diff --git a/src/main/java/frc/spectrumLib/vision/LimelightHelpers.java b/src/main/java/frc/spectrumLib/vision/LimelightHelpers.java index 73c8106..41af6c6 100644 --- a/src/main/java/frc/spectrumLib/vision/LimelightHelpers.java +++ b/src/main/java/frc/spectrumLib/vision/LimelightHelpers.java @@ -1,44 +1,48 @@ -// LimelightHelpers v1.11 (REQUIRES LLOS 2025.0 OR LATER) +//LimelightHelpers v1.14 (REQUIRES LLOS 2026.0 OR LATER) package frc.spectrumLib.vision; -import com.fasterxml.jackson.annotation.JsonFormat; -import com.fasterxml.jackson.annotation.JsonFormat.Shape; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import edu.wpi.first.math.geometry.Pose2d; -import edu.wpi.first.math.geometry.Pose3d; -import edu.wpi.first.math.geometry.Rotation2d; -import edu.wpi.first.math.geometry.Rotation3d; -import edu.wpi.first.math.geometry.Translation2d; -import edu.wpi.first.math.geometry.Translation3d; -import edu.wpi.first.math.util.Units; import edu.wpi.first.networktables.DoubleArrayEntry; import edu.wpi.first.networktables.NetworkTable; import edu.wpi.first.networktables.NetworkTableEntry; import edu.wpi.first.networktables.NetworkTableInstance; import edu.wpi.first.networktables.TimestampedDoubleArray; -import java.io.IOException; -import java.net.HttpURLConnection; +import frc.spectrumLib.vision.LimelightHelpers.LimelightResults; +import frc.spectrumLib.vision.LimelightHelpers.PoseEstimate; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.util.Units; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Translation2d; + +import java.util.Objects; import java.net.MalformedURLException; import java.net.URL; +import java.util.Arrays; import java.util.Map; -import java.util.concurrent.CompletableFuture; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonFormat.Shape; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; import java.util.concurrent.ConcurrentHashMap; +import edu.wpi.first.net.PortForwarder; /** - * LimelightHelpers provides static methods and classes for interfacing with Limelight vision - * cameras in FRC. This library supports all Limelight features including AprilTag tracking, Neural - * Networks, and standard color/retroreflective tracking. + * LimelightHelpers provides static methods and classes for interfacing with Limelight vision cameras in FRC. + * This library supports all Limelight features including AprilTag tracking, Neural Networks, and standard color/retroreflective tracking. */ public class LimelightHelpers { - private static final Map doubleArrayEntries = - new ConcurrentHashMap<>(); + private static final Map doubleArrayEntries = new ConcurrentHashMap<>(); - /** Represents a Color/Retroreflective Target Result extracted from JSON Output */ + /** + * Represents a Color/Retroreflective Target Result extracted from JSON Output + */ public static class LimelightTarget_Retro { @JsonProperty("t6c_ts") @@ -48,7 +52,7 @@ public static class LimelightTarget_Retro { private double[] robotPose_FieldSpace; @JsonProperty("t6r_ts") - private double[] robotPose_TargetSpace; + private double[] robotPose_TargetSpace; @JsonProperty("t6t_cs") private double[] targetPose_CameraSpace; @@ -56,43 +60,45 @@ public static class LimelightTarget_Retro { @JsonProperty("t6t_rs") private double[] targetPose_RobotSpace; - public Pose3d getCameraPose_TargetSpace() { + public Pose3d getCameraPose_TargetSpace() + { return toPose3D(cameraPose_TargetSpace); } - - public Pose3d getRobotPose_FieldSpace() { + public Pose3d getRobotPose_FieldSpace() + { return toPose3D(robotPose_FieldSpace); } - - public Pose3d getRobotPose_TargetSpace() { + public Pose3d getRobotPose_TargetSpace() + { return toPose3D(robotPose_TargetSpace); } - - public Pose3d getTargetPose_CameraSpace() { + public Pose3d getTargetPose_CameraSpace() + { return toPose3D(targetPose_CameraSpace); } - - public Pose3d getTargetPose_RobotSpace() { + public Pose3d getTargetPose_RobotSpace() + { return toPose3D(targetPose_RobotSpace); } - public Pose2d getCameraPose_TargetSpace2D() { + public Pose2d getCameraPose_TargetSpace2D() + { return toPose2D(cameraPose_TargetSpace); } - - public Pose2d getRobotPose_FieldSpace2D() { + public Pose2d getRobotPose_FieldSpace2D() + { return toPose2D(robotPose_FieldSpace); } - - public Pose2d getRobotPose_TargetSpace2D() { + public Pose2d getRobotPose_TargetSpace2D() + { return toPose2D(robotPose_TargetSpace); } - - public Pose2d getTargetPose_CameraSpace2D() { + public Pose2d getTargetPose_CameraSpace2D() + { return toPose2D(targetPose_CameraSpace); } - - public Pose2d getTargetPose_RobotSpace2D() { + public Pose2d getTargetPose_RobotSpace2D() + { return toPose2D(targetPose_RobotSpace); } @@ -101,7 +107,7 @@ public Pose2d getTargetPose_RobotSpace2D() { @JsonProperty("tx") public double tx; - + @JsonProperty("ty") public double ty; @@ -127,9 +133,12 @@ public LimelightTarget_Retro() { targetPose_CameraSpace = new double[6]; targetPose_RobotSpace = new double[6]; } + } - /** Represents an AprilTag/Fiducial Target Result extracted from JSON Output */ + /** + * Represents an AprilTag/Fiducial Target Result extracted from JSON Output + */ public static class LimelightTarget_Fiducial { @JsonProperty("fID") @@ -153,46 +162,48 @@ public static class LimelightTarget_Fiducial { @JsonProperty("t6t_rs") private double[] targetPose_RobotSpace; - public Pose3d getCameraPose_TargetSpace() { + public Pose3d getCameraPose_TargetSpace() + { return toPose3D(cameraPose_TargetSpace); } - - public Pose3d getRobotPose_FieldSpace() { + public Pose3d getRobotPose_FieldSpace() + { return toPose3D(robotPose_FieldSpace); } - - public Pose3d getRobotPose_TargetSpace() { + public Pose3d getRobotPose_TargetSpace() + { return toPose3D(robotPose_TargetSpace); } - - public Pose3d getTargetPose_CameraSpace() { + public Pose3d getTargetPose_CameraSpace() + { return toPose3D(targetPose_CameraSpace); } - - public Pose3d getTargetPose_RobotSpace() { + public Pose3d getTargetPose_RobotSpace() + { return toPose3D(targetPose_RobotSpace); } - public Pose2d getCameraPose_TargetSpace2D() { + public Pose2d getCameraPose_TargetSpace2D() + { return toPose2D(cameraPose_TargetSpace); } - - public Pose2d getRobotPose_FieldSpace2D() { + public Pose2d getRobotPose_FieldSpace2D() + { return toPose2D(robotPose_FieldSpace); } - - public Pose2d getRobotPose_TargetSpace2D() { + public Pose2d getRobotPose_TargetSpace2D() + { return toPose2D(robotPose_TargetSpace); } - - public Pose2d getTargetPose_CameraSpace2D() { + public Pose2d getTargetPose_CameraSpace2D() + { return toPose2D(targetPose_CameraSpace); } - - public Pose2d getTargetPose_RobotSpace2D() { + public Pose2d getTargetPose_RobotSpace2D() + { return toPose2D(targetPose_RobotSpace); } - + @JsonProperty("ta") public double ta; @@ -216,7 +227,7 @@ public Pose2d getTargetPose_RobotSpace2D() { @JsonProperty("ts") public double ts; - + public LimelightTarget_Fiducial() { cameraPose_TargetSpace = new double[6]; robotPose_FieldSpace = new double[6]; @@ -226,15 +237,21 @@ public LimelightTarget_Fiducial() { } } - /** Represents a Barcode Target Result extracted from JSON Output */ + /** + * Represents a Barcode Target Result extracted from JSON Output + */ public static class LimelightTarget_Barcode { - /** Barcode family type (e.g. "QR", "DataMatrix", etc.) */ + /** + * Barcode family type (e.g. "QR", "DataMatrix", etc.) + */ @JsonProperty("fam") public String family; - /** Gets the decoded data content of the barcode */ - @JsonProperty("data") + /** + * Gets the decoded data content of the barcode + */ + @JsonProperty("data") public String data; @JsonProperty("txp") @@ -261,14 +278,17 @@ public static class LimelightTarget_Barcode { @JsonProperty("pts") public double[][] corners; - public LimelightTarget_Barcode() {} + public LimelightTarget_Barcode() { + } public String getFamily() { return family; } } - /** Represents a Neural Classifier Pipeline Result extracted from JSON Output */ + /** + * Represents a Neural Classifier Pipeline Result extracted from JSON Output + */ public static class LimelightTarget_Classifier { @JsonProperty("class") @@ -295,10 +315,13 @@ public static class LimelightTarget_Classifier { @JsonProperty("typ") public double ty_pixels; - public LimelightTarget_Classifier() {} + public LimelightTarget_Classifier() { + } } - /** Represents a Neural Detector Pipeline Result extracted from JSON Output */ + /** + * Represents a Neural Detector Pipeline Result extracted from JSON Output + */ public static class LimelightTarget_Detector { @JsonProperty("class") @@ -331,14 +354,115 @@ public static class LimelightTarget_Detector { @JsonProperty("ty_nocross") public double ty_nocrosshair; - public LimelightTarget_Detector() {} + public LimelightTarget_Detector() { + } } - /** Limelight Results object, parsed from a Limelight's JSON results output. */ - public static class LimelightResults { + /** + * Represents hardware statistics from the Limelight. + */ + public static class HardwareReport { + @JsonProperty("cid") + public String cameraId; - public String error; + @JsonProperty("cpu") + public double cpuUsage; + + @JsonProperty("dfree") + public double diskFree; + + @JsonProperty("dtot") + public double diskTotal; + + @JsonProperty("ram") + public double ramUsage; + + @JsonProperty("temp") + public double temperature; + + public HardwareReport() { + } + } + + /** + * Represents IMU data from the JSON results. + */ + public static class IMUResults { + @JsonProperty("data") + public double[] data; + + @JsonProperty("quat") + public double[] quaternion; + + @JsonProperty("yaw") + public double yaw; + + // Parsed from data array + public double robotYaw; + public double roll; + public double pitch; + public double rawYaw; + public double gyroZ; + public double gyroX; + public double gyroY; + public double accelZ; + public double accelX; + public double accelY; + + public IMUResults() { + data = new double[0]; + quaternion = new double[4]; + } + + public void parseDataArray() { + if (data != null && data.length >= 10) { + robotYaw = data[0]; + roll = data[1]; + pitch = data[2]; + rawYaw = data[3]; + gyroZ = data[4]; + gyroX = data[5]; + gyroY = data[6]; + accelZ = data[7]; + accelX = data[8]; + accelY = data[9]; + } + } + } + + /** + * Represents capture rewind buffer statistics. + */ + public static class RewindStats { + @JsonProperty("bufferUsage") + public double bufferUsage; + + @JsonProperty("enabled") + public int enabled; + @JsonProperty("flushing") + public int flushing; + + @JsonProperty("frameCount") + public int frameCount; + + @JsonProperty("latpen") + public int latencyPenalty; + + @JsonProperty("storedSeconds") + public double storedSeconds; + + public RewindStats() { + } + } + + /** + * Limelight Results object, parsed from a Limelight's JSON results output. + */ + public static class LimelightResults { + + public String error; + @JsonProperty("pID") public double pipelineID; @@ -356,10 +480,37 @@ public static class LimelightResults { @JsonProperty("ts_rio") public double timestamp_RIOFPGA_capture; + @JsonProperty("ts_nt") + public long timestamp_nt; + + @JsonProperty("ts_sys") + public long timestamp_sys; + + @JsonProperty("ts_us") + public long timestamp_us; + @JsonProperty("v") @JsonFormat(shape = Shape.NUMBER) public boolean valid; + @JsonProperty("pTYPE") + public String pipelineType; + + @JsonProperty("tx") + public double tx; + + @JsonProperty("ty") + public double ty; + + @JsonProperty("txnc") + public double tx_nocrosshair; + + @JsonProperty("tync") + public double ty_nocrosshair; + + @JsonProperty("ta") + public double ta; + @JsonProperty("botpose") public double[] botpose; @@ -371,27 +522,48 @@ public static class LimelightResults { @JsonProperty("botpose_tagcount") public double botpose_tagcount; - + @JsonProperty("botpose_span") public double botpose_span; - + @JsonProperty("botpose_avgdist") public double botpose_avgdist; - + @JsonProperty("botpose_avgarea") public double botpose_avgarea; + @JsonProperty("botpose_orb") + public double[] botpose_orb; + + @JsonProperty("botpose_orb_wpiblue") + public double[] botpose_orb_wpiblue; + + @JsonProperty("botpose_orb_wpired") + public double[] botpose_orb_wpired; + @JsonProperty("t6c_rs") public double[] camerapose_robotspace; + @JsonProperty("hw") + public HardwareReport hardware; + + @JsonProperty("imu") + public IMUResults imuResults; + + @JsonProperty("rewind") + public RewindStats rewindStats; + + @JsonProperty("PythonOut") + public double[] pythonOutput; + public Pose3d getBotPose3d() { return toPose3D(botpose); } - + public Pose3d getBotPose3d_wpiRed() { return toPose3D(botpose_wpired); } - + public Pose3d getBotPose3d_wpiBlue() { return toPose3D(botpose_wpiblue); } @@ -399,11 +571,11 @@ public Pose3d getBotPose3d_wpiBlue() { public Pose2d getBotPose2d() { return toPose2D(botpose); } - + public Pose2d getBotPose2d_wpiRed() { return toPose2D(botpose_wpired); } - + public Pose2d getBotPose2d_wpiBlue() { return toPose2D(botpose_wpiblue); } @@ -427,16 +599,25 @@ public LimelightResults() { botpose = new double[6]; botpose_wpired = new double[6]; botpose_wpiblue = new double[6]; + botpose_orb = new double[6]; + botpose_orb_wpiblue = new double[6]; + botpose_orb_wpired = new double[6]; camerapose_robotspace = new double[6]; targets_Retro = new LimelightTarget_Retro[0]; targets_Fiducials = new LimelightTarget_Fiducial[0]; targets_Classifier = new LimelightTarget_Classifier[0]; targets_Detector = new LimelightTarget_Detector[0]; targets_Barcode = new LimelightTarget_Barcode[0]; + pythonOutput = new double[0]; + pipelineType = ""; } + + } - /** Represents a Limelight Raw Fiducial result from Limelight's NetworkTables output. */ + /** + * Represents a Limelight Raw Fiducial result from Limelight's NetworkTables output. + */ public static class RawFiducial { public int id = 0; public double txnc = 0; @@ -446,14 +627,8 @@ public static class RawFiducial { public double distToRobot = 0; public double ambiguity = 0; - public RawFiducial( - int id, - double txnc, - double tync, - double ta, - double distToCamera, - double distToRobot, - double ambiguity) { + + public RawFiducial(int id, double txnc, double tync, double ta, double distToCamera, double distToRobot, double ambiguity) { this.id = id; this.txnc = txnc; this.tync = tync; @@ -462,9 +637,61 @@ public RawFiducial( this.distToRobot = distToRobot; this.ambiguity = ambiguity; } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + RawFiducial other = (RawFiducial) obj; + return id == other.id && + Double.compare(txnc, other.txnc) == 0 && + Double.compare(tync, other.tync) == 0 && + Double.compare(ta, other.ta) == 0 && + Double.compare(distToCamera, other.distToCamera) == 0 && + Double.compare(distToRobot, other.distToRobot) == 0 && + Double.compare(ambiguity, other.ambiguity) == 0; + } + + @Override + public int hashCode() { + return Objects.hash(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); + } + + } + + /** + * Represents a Limelight Raw Target/Contour result from Limelight's NetworkTables output. + */ + public static class RawTarget { + public double txnc = 0; + public double tync = 0; + public double ta = 0; + + public RawTarget(double txnc, double tync, double ta) { + this.txnc = txnc; + this.tync = tync; + this.ta = ta; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + RawTarget other = (RawTarget) obj; + return Double.compare(txnc, other.txnc) == 0 && + Double.compare(tync, other.tync) == 0 && + Double.compare(ta, other.ta) == 0; + } + + @Override + public int hashCode() { + return Objects.hash(txnc, tync, ta); + } } - /** Represents a Limelight Raw Neural Detector result from Limelight's NetworkTables output. */ + /** + * Represents a Limelight Raw Neural Detector result from Limelight's NetworkTables output. + */ public static class RawDetection { public int classId = 0; public double txnc = 0; @@ -479,19 +706,12 @@ public static class RawDetection { public double corner3_X = 0; public double corner3_Y = 0; - public RawDetection( - int classId, - double txnc, - double tync, - double ta, - double corner0_X, - double corner0_Y, - double corner1_X, - double corner1_Y, - double corner2_X, - double corner2_Y, - double corner3_X, - double corner3_Y) { + + public RawDetection(int classId, double txnc, double tync, double ta, + double corner0_X, double corner0_Y, + double corner1_X, double corner1_Y, + double corner2_X, double corner2_Y, + double corner3_X, double corner3_Y ) { this.classId = classId; this.txnc = txnc; this.tync = tync; @@ -506,8 +726,10 @@ public RawDetection( this.corner3_Y = corner3_Y; } } - - /** Represents a 3D Pose Estimate. */ + + /** + * Represents a 3D Pose Estimate. + */ public static class PoseEstimate { public Pose2d pose; public double timestampSeconds; @@ -517,10 +739,12 @@ public static class PoseEstimate { public double avgTagDist; public double avgTagArea; - public RawFiducial[] rawFiducials; + public RawFiducial[] rawFiducials; public boolean isMegaTag2; - /** Instantiates a PoseEstimate object with default values */ + /** + * Instantiates a PoseEstimate object with default values + */ public PoseEstimate() { this.pose = new Pose2d(); this.timestampSeconds = 0; @@ -529,20 +753,13 @@ public PoseEstimate() { this.tagSpan = 0; this.avgTagDist = 0; this.avgTagArea = 0; - this.rawFiducials = new RawFiducial[] {}; + this.rawFiducials = new RawFiducial[]{}; this.isMegaTag2 = false; } - public PoseEstimate( - Pose2d pose, - double timestampSeconds, - double latency, - int tagCount, - double tagSpan, - double avgTagDist, - double avgTagArea, - RawFiducial[] rawFiducials, - boolean isMegaTag2) { + public PoseEstimate(Pose2d pose, double timestampSeconds, double latency, + int tagCount, double tagSpan, double avgTagDist, + double avgTagArea, RawFiducial[] rawFiducials, boolean isMegaTag2) { this.pose = pose; this.timestampSeconds = timestampSeconds; @@ -554,9 +771,35 @@ public PoseEstimate( this.rawFiducials = rawFiducials; this.isMegaTag2 = isMegaTag2; } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + PoseEstimate that = (PoseEstimate) obj; + // We don't compare the timestampSeconds as it isn't relevant for equality and makes + // unit testing harder + return Double.compare(that.latency, latency) == 0 + && tagCount == that.tagCount + && Double.compare(that.tagSpan, tagSpan) == 0 + && Double.compare(that.avgTagDist, avgTagDist) == 0 + && Double.compare(that.avgTagArea, avgTagArea) == 0 + && pose.equals(that.pose) + && Arrays.equals(rawFiducials, that.rawFiducials); + } + + @Override + public int hashCode() { + int result = Objects.hash(pose, latency, tagCount, tagSpan, avgTagDist, avgTagArea); + result = 31 * result + Arrays.hashCode(rawFiducials); + return result; + } + } - /** Encapsulates the state of an internal Limelight IMU. */ + /** + * Encapsulates the state of an internal Limelight IMU. + */ public static class IMUData { public double robotYaw = 0.0; public double Roll = 0.0; @@ -587,49 +830,50 @@ public IMUData(double[] imuData) { } } + private static ObjectMapper mapper; - /** Print JSON Parse time to the console in milliseconds */ + /** + * Print JSON Parse time to the console in milliseconds + */ static boolean profileJSON = false; static final String sanitizeName(String name) { - if (name == "" || name == null) { + if ("".equals(name) || name == null) { return "limelight"; } return name; } /** - * Takes a 6-length array of pose data and converts it to a Pose3d object. Array format: [x, y, - * z, roll, pitch, yaw] where angles are in degrees. - * + * Takes a 6-length array of pose data and converts it to a Pose3d object. + * Array format: [x, y, z, roll, pitch, yaw] where angles are in degrees. * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] * @return Pose3d object representing the pose, or empty Pose3d if invalid data */ - public static Pose3d toPose3D(double[] inData) { - if (inData.length < 6) { - // System.err.println("Bad LL 3D Pose Data!"); + public static Pose3d toPose3D(double[] inData){ + if(inData.length < 6) + { + //System.err.println("Bad LL 3D Pose Data!"); return new Pose3d(); } return new Pose3d( - new Translation3d(inData[0], inData[1], inData[2]), - new Rotation3d( - Units.degreesToRadians(inData[3]), - Units.degreesToRadians(inData[4]), - Units.degreesToRadians(inData[5]))); + new Translation3d(inData[0], inData[1], inData[2]), + new Rotation3d(Units.degreesToRadians(inData[3]), Units.degreesToRadians(inData[4]), + Units.degreesToRadians(inData[5]))); } /** - * Takes a 6-length array of pose data and converts it to a Pose2d object. Uses only x, y, and - * yaw components, ignoring z, roll, and pitch. Array format: [x, y, z, roll, pitch, yaw] where - * angles are in degrees. - * + * Takes a 6-length array of pose data and converts it to a Pose2d object. + * Uses only x, y, and yaw components, ignoring z, roll, and pitch. + * Array format: [x, y, z, roll, pitch, yaw] where angles are in degrees. * @param inData Array containing pose data [x, y, z, roll, pitch, yaw] * @return Pose2d object representing the pose, or empty Pose2d if invalid data */ - public static Pose2d toPose2D(double[] inData) { - if (inData.length < 6) { - // System.err.println("Bad LL 2D Pose Data!"); + public static Pose2d toPose2D(double[] inData){ + if(inData.length < 6) + { + //System.err.println("Bad LL 2D Pose Data!"); return new Pose2d(); } Translation2d tran2d = new Translation2d(inData[0], inData[1]); @@ -640,7 +884,7 @@ public static Pose2d toPose2D(double[] inData) { /** * Converts a Pose3d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. * Translation components are in meters, rotation components are in degrees. - * + * * @param pose The Pose3d object to convert * @return A 6-element array containing [x, y, z, roll, pitch, yaw] */ @@ -657,9 +901,9 @@ public static double[] pose3dToArray(Pose3d pose) { /** * Converts a Pose2d object to an array of doubles in the format [x, y, z, roll, pitch, yaw]. - * Translation components are in meters, rotation components are in degrees. Note: z, roll, and - * pitch will be 0 since Pose2d only contains x, y, and yaw. - * + * Translation components are in meters, rotation components are in degrees. + * Note: z, roll, and pitch will be 0 since Pose2d only contains x, y, and yaw. + * * @param pose The Pose2d object to convert * @return A 6-element array containing [x, y, 0, 0, 0, yaw] */ @@ -674,73 +918,64 @@ public static double[] pose2dToArray(Pose2d pose) { return result; } - private static double extractArrayEntry(double[] inData, int position) { - if (inData.length < position + 1) { + private static double extractArrayEntry(double[] inData, int position){ + if(inData.length < position+1) + { return 0; } return inData[position]; } - private static PoseEstimate getBotPoseEstimate( - String limelightName, String entryName, boolean isMegaTag2) { - DoubleArrayEntry poseEntry = - LimelightHelpers.getLimelightDoubleArrayEntry(limelightName, entryName); - + private static PoseEstimate getBotPoseEstimate(String limelightName, String entryName, boolean isMegaTag2) { + DoubleArrayEntry poseEntry = LimelightHelpers.getLimelightDoubleArrayEntry(limelightName, entryName); + TimestampedDoubleArray tsValue = poseEntry.getAtomic(); double[] poseArray = tsValue.value; long timestamp = tsValue.timestamp; - + if (poseArray.length == 0) { // Handle the case where no data is available - return null; // or some default PoseEstimate + return new PoseEstimate(); } - + var pose = toPose2D(poseArray); double latency = extractArrayEntry(poseArray, 6); - int tagCount = (int) extractArrayEntry(poseArray, 7); + int tagCount = (int)extractArrayEntry(poseArray, 7); double tagSpan = extractArrayEntry(poseArray, 8); double tagDist = extractArrayEntry(poseArray, 9); double tagArea = extractArrayEntry(poseArray, 10); - + // Convert server timestamp from microseconds to seconds and adjust for latency double adjustedTimestamp = (timestamp / 1000000.0) - (latency / 1000.0); - - RawFiducial[] rawFiducials = new RawFiducial[tagCount]; + int valsPerFiducial = 7; int expectedTotalVals = 11 + valsPerFiducial * tagCount; + RawFiducial[] rawFiducials; if (poseArray.length != expectedTotalVals) { - // Don't populate fiducials + // Array size mismatch - return empty array instead of null-filled array + rawFiducials = new RawFiducial[0]; } else { - for (int i = 0; i < tagCount; i++) { + rawFiducials = new RawFiducial[tagCount]; + for(int i = 0; i < tagCount; i++) { int baseIndex = 11 + (i * valsPerFiducial); - int id = (int) poseArray[baseIndex]; + int id = (int)poseArray[baseIndex]; double txnc = poseArray[baseIndex + 1]; double tync = poseArray[baseIndex + 2]; double ta = poseArray[baseIndex + 3]; double distToCamera = poseArray[baseIndex + 4]; double distToRobot = poseArray[baseIndex + 5]; double ambiguity = poseArray[baseIndex + 6]; - rawFiducials[i] = - new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); + rawFiducials[i] = new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); } } - - return new PoseEstimate( - pose, - adjustedTimestamp, - latency, - tagCount, - tagSpan, - tagDist, - tagArea, - rawFiducials, - isMegaTag2); + + return new PoseEstimate(pose, adjustedTimestamp, latency, tagCount, tagSpan, tagDist, tagArea, rawFiducials, isMegaTag2); } /** * Gets the latest raw fiducial/AprilTag detection results from NetworkTables. - * + * * @param limelightName Name/identifier of the Limelight * @return Array of RawFiducial objects containing detection details */ @@ -751,10 +986,10 @@ public static RawFiducial[] getRawFiducials(String limelightName) { if (rawFiducialArray.length % valsPerEntry != 0) { return new RawFiducial[0]; } - + int numFiducials = rawFiducialArray.length / valsPerEntry; RawFiducial[] rawFiducials = new RawFiducial[numFiducials]; - + for (int i = 0; i < numFiducials; i++) { int baseIndex = i * valsPerEntry; int id = (int) extractArrayEntry(rawFiducialArray, baseIndex); @@ -764,11 +999,10 @@ public static RawFiducial[] getRawFiducials(String limelightName) { double distToCamera = extractArrayEntry(rawFiducialArray, baseIndex + 4); double distToRobot = extractArrayEntry(rawFiducialArray, baseIndex + 5); double ambiguity = extractArrayEntry(rawFiducialArray, baseIndex + 6); - - rawFiducials[i] = - new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); + + rawFiducials[i] = new RawFiducial(id, txnc, tync, ta, distToCamera, distToRobot, ambiguity); } - + return rawFiducials; } @@ -785,10 +1019,10 @@ public static RawDetection[] getRawDetections(String limelightName) { if (rawDetectionArray.length % valsPerEntry != 0) { return new RawDetection[0]; } - + int numDetections = rawDetectionArray.length / valsPerEntry; RawDetection[] rawDetections = new RawDetection[numDetections]; - + for (int i = 0; i < numDetections; i++) { int baseIndex = i * valsPerEntry; // Starting index for this detection's data int classId = (int) extractArrayEntry(rawDetectionArray, baseIndex); @@ -803,20 +1037,58 @@ public static RawDetection[] getRawDetections(String limelightName) { double corner2_Y = extractArrayEntry(rawDetectionArray, baseIndex + 9); double corner3_X = extractArrayEntry(rawDetectionArray, baseIndex + 10); double corner3_Y = extractArrayEntry(rawDetectionArray, baseIndex + 11); + + rawDetections[i] = new RawDetection(classId, txnc, tync, ta, corner0_X, corner0_Y, corner1_X, corner1_Y, corner2_X, corner2_Y, corner3_X, corner3_Y); + } + + return rawDetections; + } - rawDetections[i] = - new RawDetection( - classId, txnc, tync, ta, corner0_X, corner0_Y, corner1_X, corner1_Y, - corner2_X, corner2_Y, corner3_X, corner3_Y); + /** + * Gets the raw target contours from NetworkTables. + * Returns ungrouped contours in normalized screen space (-1 to 1). + * + * @param limelightName Name/identifier of the Limelight + * @return Array of RawTarget objects containing up to 3 contours + */ + public static RawTarget[] getRawTargets(String limelightName) { + var entry = LimelightHelpers.getLimelightNTTableEntry(limelightName, "rawtargets"); + var rawTargetArray = entry.getDoubleArray(new double[0]); + int valsPerEntry = 3; + if (rawTargetArray.length % valsPerEntry != 0) { + return new RawTarget[0]; } - return rawDetections; + int numTargets = rawTargetArray.length / valsPerEntry; + RawTarget[] rawTargets = new RawTarget[numTargets]; + + for (int i = 0; i < numTargets; i++) { + int baseIndex = i * valsPerEntry; + double txnc = extractArrayEntry(rawTargetArray, baseIndex); + double tync = extractArrayEntry(rawTargetArray, baseIndex + 1); + double ta = extractArrayEntry(rawTargetArray, baseIndex + 2); + + rawTargets[i] = new RawTarget(txnc, tync, ta); + } + + return rawTargets; } /** - * Prints detailed information about a PoseEstimate to standard output. Includes timestamp, - * latency, tag count, tag span, average tag distance, average tag area, and detailed - * information about each detected fiducial. + * Gets the corner coordinates of detected targets from NetworkTables. + * Requires "send contours" to be enabled in the Limelight Output tab. + * + * @param limelightName Name/identifier of the Limelight + * @return Array of doubles containing corner coordinates [x0, y0, x1, y1, ...] + */ + public static double[] getCornerCoordinates(String limelightName) { + return getLimelightNTDoubleArray(limelightName, "tcornxy"); + } + + /** + * Prints detailed information about a PoseEstimate to standard output. + * Includes timestamp, latency, tag count, tag span, average tag distance, + * average tag area, and detailed information about each detected fiducial. * * @param pose The PoseEstimate object to print. If null, prints "No PoseEstimate available." */ @@ -825,7 +1097,7 @@ public static void printPoseEstimate(PoseEstimate pose) { System.out.println("No PoseEstimate available."); return; } - + System.out.printf("Pose Estimate Information:%n"); System.out.printf("Timestamp (Seconds): %.3f%n", pose.timestampSeconds); System.out.printf("Latency: %.3f ms%n", pose.latency); @@ -835,12 +1107,12 @@ public static void printPoseEstimate(PoseEstimate pose) { System.out.printf("Average Tag Area: %.2f%% of image%n", pose.avgTagArea); System.out.printf("Is MegaTag2: %b%n", pose.isMegaTag2); System.out.println(); - + if (pose.rawFiducials == null || pose.rawFiducials.length == 0) { System.out.println("No RawFiducials data available."); return; } - + System.out.println("Raw Fiducials Details:"); for (int i = 0; i < pose.rawFiducials.length; i++) { RawFiducial fiducial = pose.rawFiducials[i]; @@ -872,17 +1144,14 @@ public static NetworkTableEntry getLimelightNTTableEntry(String tableName, Strin return getLimelightNTTable(tableName).getEntry(entryName); } - public static DoubleArrayEntry getLimelightDoubleArrayEntry( - String tableName, String entryName) { + public static DoubleArrayEntry getLimelightDoubleArrayEntry(String tableName, String entryName) { String key = tableName + "/" + entryName; - return doubleArrayEntries.computeIfAbsent( - key, - k -> { - NetworkTable table = getLimelightNTTable(tableName); - return table.getDoubleArrayTopic(entryName).getEntry(new double[0]); - }); + return doubleArrayEntries.computeIfAbsent(key, k -> { + NetworkTable table = getLimelightNTTable(tableName); + return table.getDoubleArrayTopic(entryName).getEntry(new double[0]); + }); } - + public static double getLimelightNTDouble(String tableName, String entryName) { return getLimelightNTTableEntry(tableName, entryName).getDouble(0.0); } @@ -899,6 +1168,7 @@ public static double[] getLimelightNTDoubleArray(String tableName, String entryN return getLimelightNTTableEntry(tableName, entryName).getDoubleArray(new double[0]); } + public static String getLimelightNTString(String tableName, String entryName) { return getLimelightNTTableEntry(tableName, entryName).getString(""); } @@ -907,23 +1177,11 @@ public static String[] getLimelightNTStringArray(String tableName, String entryN return getLimelightNTTableEntry(tableName, entryName).getStringArray(new String[0]); } - public static URL getLimelightURLString(String tableName, String request) { - String urlString = "http://" + sanitizeName(tableName) + ".local:5807/" + request; - URL url; - try { - url = new URL(urlString); - return url; - } catch (MalformedURLException e) { - System.err.println("bad LL URL"); - } - return null; - } - ///// + ///// /** * Does the Limelight have a valid target? - * * @param limelightName Name of the Limelight camera ("" for default) * @return True if a valid target is present, false otherwise */ @@ -933,7 +1191,6 @@ public static boolean getTV(String limelightName) { /** * Gets the horizontal offset from the crosshair to the target in degrees. - * * @param limelightName Name of the Limelight camera ("" for default) * @return Horizontal offset angle in degrees */ @@ -943,7 +1200,6 @@ public static double getTX(String limelightName) { /** * Gets the vertical offset from the crosshair to the target in degrees. - * * @param limelightName Name of the Limelight camera ("" for default) * @return Vertical offset angle in degrees */ @@ -952,10 +1208,7 @@ public static double getTY(String limelightName) { } /** - * Gets the horizontal offset from the principal pixel/point to the target in degrees. This is - * the most accurate 2d metric if you are using a calibrated camera and you don't need - * adjustable crosshair functionality. - * + * Gets the horizontal offset from the principal pixel/point to the target in degrees. This is the most accurate 2d metric if you are using a calibrated camera and you don't need adjustable crosshair functionality. * @param limelightName Name of the Limelight camera ("" for default) * @return Horizontal offset angle in degrees */ @@ -964,10 +1217,7 @@ public static double getTXNC(String limelightName) { } /** - * Gets the vertical offset from the principal pixel/point to the target in degrees. This is the - * most accurate 2d metric if you are using a calibrated camera and you don't need adjustable - * crosshair functionality. - * + * Gets the vertical offset from the principal pixel/point to the target in degrees. This is the most accurate 2d metric if you are using a calibrated camera and you don't need adjustable crosshair functionality. * @param limelightName Name of the Limelight camera ("" for default) * @return Vertical offset angle in degrees */ @@ -977,8 +1227,7 @@ public static double getTYNC(String limelightName) { /** * Gets the target area as a percentage of the image (0-100%). - * - * @param limelightName Name of the Limelight camera ("" for default) + * @param limelightName Name of the Limelight camera ("" for default) * @return Target area percentage (0-100) */ public static double getTA(String limelightName) { @@ -986,84 +1235,77 @@ public static double getTA(String limelightName) { } /** - * T2D is an array that contains several targeting metrics - * + * T2D is an array that contains several targeting metrcis * @param limelightName Name of the Limelight camera - * @return Array containing [targetValid, targetCount, targetLatency, captureLatency, tx, ty, - * txnc, tync, ta, tid, targetClassIndexDetector, targetClassIndexClassifier, - * targetLongSidePixels, targetShortSidePixels, targetHorizontalExtentPixels, - * targetVerticalExtentPixels, targetSkewDegrees] + * @return Array containing [targetValid, targetCount, targetLatency, captureLatency, tx, ty, txnc, tync, ta, tid, targetClassIndexDetector, + * targetClassIndexClassifier, targetLongSidePixels, targetShortSidePixels, targetHorizontalExtentPixels, targetVerticalExtentPixels, targetSkewDegrees] */ public static double[] getT2DArray(String limelightName) { return getLimelightNTDoubleArray(limelightName, "t2d"); } - + /** * Gets the number of targets currently detected. - * * @param limelightName Name of the Limelight camera * @return Number of detected targets */ public static int getTargetCount(String limelightName) { - double[] t2d = getT2DArray(limelightName); - if (t2d.length == 17) { - return (int) t2d[1]; - } - return 0; + double[] t2d = getT2DArray(limelightName); + if(t2d.length == 17) + { + return (int)t2d[1]; + } + return 0; } /** * Gets the classifier class index from the currently running neural classifier pipeline - * * @param limelightName Name of the Limelight camera * @return Class index from classifier pipeline */ - public static int getClassifierClassIndex(String limelightName) { - double[] t2d = getT2DArray(limelightName); - if (t2d.length == 17) { - return (int) t2d[10]; - } - return 0; + public static int getClassifierClassIndex (String limelightName) { + double[] t2d = getT2DArray(limelightName); + if(t2d.length == 17) + { + return (int)t2d[11]; + } + return 0; } /** - * Gets the detector class index from the primary result of the currently running neural - * detector pipeline. - * + * Gets the detector class index from the primary result of the currently running neural detector pipeline. * @param limelightName Name of the Limelight camera * @return Class index from detector pipeline */ - public static int getDetectorClassIndex(String limelightName) { - double[] t2d = getT2DArray(limelightName); - if (t2d.length == 17) { - return (int) t2d[11]; - } - return 0; + public static int getDetectorClassIndex (String limelightName) { + double[] t2d = getT2DArray(limelightName); + if(t2d.length == 17) + { + return (int)t2d[10]; + } + return 0; } /** * Gets the current neural classifier result class name. - * * @param limelightName Name of the Limelight camera * @return Class name string from classifier pipeline */ - public static String getClassifierClass(String limelightName) { + public static String getClassifierClass (String limelightName) { return getLimelightNTString(limelightName, "tcclass"); } /** * Gets the primary neural detector result class name. - * * @param limelightName Name of the Limelight camera * @return Class name string from detector pipeline */ - public static String getDetectorClass(String limelightName) { + public static String getDetectorClass (String limelightName) { return getLimelightNTString(limelightName, "tdclass"); } /** * Gets the pipeline's processing latency contribution. - * * @param limelightName Name of the Limelight camera * @return Pipeline latency in milliseconds */ @@ -1073,7 +1315,6 @@ public static double getLatency_Pipeline(String limelightName) { /** * Gets the capture latency. - * * @param limelightName Name of the Limelight camera * @return Capture latency in milliseconds */ @@ -1083,7 +1324,6 @@ public static double getLatency_Capture(String limelightName) { /** * Gets the active pipeline index. - * * @param limelightName Name of the Limelight camera * @return Current pipeline index (0-9) */ @@ -1093,9 +1333,8 @@ public static double getCurrentPipelineIndex(String limelightName) { /** * Gets the current pipeline type. - * * @param limelightName Name of the Limelight camera - * @return Pipeline type string (e.g. "retro", "apriltag", etc.) + * @return Pipeline type string (e.g. "retro", "apriltag", etc) */ public static String getCurrentPipelineType(String limelightName) { return getLimelightNTString(limelightName, "getpipetype"); @@ -1103,7 +1342,6 @@ public static String getCurrentPipelineType(String limelightName) { /** * Gets the full JSON results dump. - * * @param limelightName Name of the Limelight camera * @return JSON string containing all current results */ @@ -1113,9 +1351,9 @@ public static String getJSONDump(String limelightName) { /** * Switch to getBotPose - * - * @param limelightName Name/identifier of the Limelight - * @return double array [x,y,z,roll,pitch,yaw] in Limelight Coordinate System + * + * @param limelightName + * @return */ @Deprecated public static double[] getBotpose(String limelightName) { @@ -1124,9 +1362,9 @@ public static double[] getBotpose(String limelightName) { /** * Switch to getBotPose_wpiRed - * - * @param limelightName Name/identifier of the Limelight - * @return double array [x,y,z,roll,pitch,yaw] in WPILib Red Alliance Coordinate System + * + * @param limelightName + * @return */ @Deprecated public static double[] getBotpose_wpiRed(String limelightName) { @@ -1135,9 +1373,9 @@ public static double[] getBotpose_wpiRed(String limelightName) { /** * Switch to getBotPose_wpiBlue - * - * @param limelightName Name/identifier of the Limelight - * @return double array [x,y,z,roll,pitch,yaw] in WPILib Blue Alliance Coordinate System + * + * @param limelightName + * @return */ @Deprecated public static double[] getBotpose_wpiBlue(String limelightName) { @@ -1172,6 +1410,11 @@ public static double[] getTargetPose_RobotSpace(String limelightName) { return getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); } + /** + * Gets the average color under the crosshair region as a 3-element array. + * @param limelightName Name of the Limelight camera + * @return Array containing [Blue, Green, Red] color values (BGR order) + */ public static double[] getTargetColor(String limelightName) { return getLimelightNTDoubleArray(limelightName, "tc"); } @@ -1180,6 +1423,15 @@ public static double getFiducialID(String limelightName) { return getLimelightNTDouble(limelightName, "tid"); } + /** + * Gets the Limelight heartbeat value. Increments once per frame, allowing you to detect if the Limelight is connected and alive. + * @param limelightName Name of the Limelight camera + * @return Heartbeat value that increments each frame + */ + public static double getHeartbeat(String limelightName) { + return getLimelightNTDouble(limelightName, "hb"); + } + public static String getNeuralClassID(String limelightName) { return getLimelightNTString(limelightName, "tclass"); } @@ -1198,10 +1450,8 @@ public static Pose3d getBotPose3d(String limelightName) { /** * (Not Recommended) Gets the robot's 3D pose in the WPILib Red Alliance Coordinate System. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the robot's position and orientation in Red Alliance field - * space + * @return Pose3d object representing the robot's position and orientation in Red Alliance field space */ public static Pose3d getBotPose3d_wpiRed(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpired"); @@ -1210,10 +1460,8 @@ public static Pose3d getBotPose3d_wpiRed(String limelightName) { /** * (Recommended) Gets the robot's 3D pose in the WPILib Blue Alliance Coordinate System. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the robot's position and orientation in Blue Alliance - * field space + * @return Pose3d object representing the robot's position and orientation in Blue Alliance field space */ public static Pose3d getBotPose3d_wpiBlue(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_wpiblue"); @@ -1222,10 +1470,8 @@ public static Pose3d getBotPose3d_wpiBlue(String limelightName) { /** * Gets the robot's 3D pose with respect to the currently tracked target's coordinate system. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the robot's position and orientation relative to the - * target + * @return Pose3d object representing the robot's position and orientation relative to the target */ public static Pose3d getBotPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "botpose_targetspace"); @@ -1234,10 +1480,8 @@ public static Pose3d getBotPose3d_TargetSpace(String limelightName) { /** * Gets the camera's 3D pose with respect to the currently tracked target's coordinate system. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the camera's position and orientation relative to the - * target + * @return Pose3d object representing the camera's position and orientation relative to the target */ public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_targetspace"); @@ -1246,10 +1490,8 @@ public static Pose3d getCameraPose3d_TargetSpace(String limelightName) { /** * Gets the target's 3D pose with respect to the camera's coordinate system. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the target's position and orientation relative to the - * camera + * @return Pose3d object representing the target's position and orientation relative to the camera */ public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_cameraspace"); @@ -1258,10 +1500,8 @@ public static Pose3d getTargetPose3d_CameraSpace(String limelightName) { /** * Gets the target's 3D pose with respect to the robot's coordinate system. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the target's position and orientation relative to the - * robot + * @return Pose3d object representing the target's position and orientation relative to the robot */ public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "targetpose_robotspace"); @@ -1270,10 +1510,8 @@ public static Pose3d getTargetPose3d_RobotSpace(String limelightName) { /** * Gets the camera's 3D pose with respect to the robot's coordinate system. - * * @param limelightName Name/identifier of the Limelight - * @return Pose3d object representing the camera's position and orientation relative to the - * robot + * @return Pose3d object representing the camera's position and orientation relative to the robot */ public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { double[] poseArray = getLimelightNTDoubleArray(limelightName, "camerapose_robotspace"); @@ -1281,10 +1519,11 @@ public static Pose3d getCameraPose3d_RobotSpace(String limelightName) { } /** - * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) - * - * @param limelightName Name/identifier of the Limelight - * @return Pose2d object representing the robot's position and orientation + * Gets the Pose2d for easy use with Odometry vision pose estimator + * (addVisionMeasurement) + * + * @param limelightName + * @return */ public static Pose2d getBotPose2d_wpiBlue(String limelightName) { @@ -1293,31 +1532,30 @@ public static Pose2d getBotPose2d_wpiBlue(String limelightName) { } /** - * Gets the MegaTag1 Pose2d and timestamp for use with WPILib pose estimator - * (addVisionMeasurement) in the WPILib Blue alliance coordinate system. - * + * Gets the MegaTag1 Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) in the WPILib Blue alliance coordinate system. + * * @param limelightName - * @return Position Estimate with Pose2d and timestamp + * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue(String limelightName) { return getBotPoseEstimate(limelightName, "botpose_wpiblue", false); } /** - * Gets the MegaTag2 Pose2d and timestamp for use with WPILib pose estimator - * (addVisionMeasurement) in the WPILib Blue alliance coordinate system. Make sure you are - * calling setRobotOrientation() before calling this method. - * + * Gets the MegaTag2 Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) in the WPILib Blue alliance coordinate system. + * Make sure you are calling setRobotOrientation() before calling this method. + * * @param limelightName - * @return Position Estimate with Pose2d and timestamp + * @return */ public static PoseEstimate getBotPoseEstimate_wpiBlue_MegaTag2(String limelightName) { return getBotPoseEstimate(limelightName, "botpose_orb_wpiblue", true); } /** - * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) - * + * Gets the Pose2d for easy use with Odometry vision pose estimator + * (addVisionMeasurement) + * * @param limelightName * @return */ @@ -1325,12 +1563,12 @@ public static Pose2d getBotPose2d_wpiRed(String limelightName) { double[] result = getBotPose_wpiRed(limelightName); return toPose2D(result); + } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when - * you are on the RED alliance - * + * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when you are on the RED + * alliance * @param limelightName * @return */ @@ -1339,9 +1577,8 @@ public static PoseEstimate getBotPoseEstimate_wpiRed(String limelightName) { } /** - * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when - * you are on the RED alliance - * + * Gets the Pose2d and timestamp for use with WPILib pose estimator (addVisionMeasurement) when you are on the RED + * alliance * @param limelightName * @return */ @@ -1350,8 +1587,9 @@ public static PoseEstimate getBotPoseEstimate_wpiRed_MegaTag2(String limelightNa } /** - * Gets the Pose2d for easy use with Odometry vision pose estimator (addVisionMeasurement) - * + * Gets the Pose2d for easy use with Odometry vision pose estimator + * (addVisionMeasurement) + * * @param limelightName * @return */ @@ -1359,20 +1597,21 @@ public static Pose2d getBotPose2d(String limelightName) { double[] result = getBotPose(limelightName); return toPose2D(result); - } + } + /** - * Gets the current IMU data from NetworkTables. IMU data is formatted as [robotYaw, Roll, - * Pitch, Yaw, gyroX, gyroY, gyroZ, accelX, accelY, accelZ]. Returns all zeros if data is - * invalid or unavailable. - * + * Gets the current IMU data from NetworkTables. + * IMU data is formatted as [robotYaw, Roll, Pitch, Yaw, gyroX, gyroY, gyroZ, accelX, accelY, accelZ]. + * Returns all zeros if data is invalid or unavailable. + * * @param limelightName Name/identifier of the Limelight * @return IMUData object containing all current IMU data */ public static IMUData getIMUData(String limelightName) { double[] imuData = getLimelightNTDoubleArray(limelightName, "imu"); if (imuData == null || imuData.length < 10) { - return new IMUData(); // Returns object with all zeros + return new IMUData(); // Returns object with all zeros } return new IMUData(imuData); } @@ -1384,13 +1623,13 @@ public static void setPipelineIndex(String limelightName, int pipelineIndex) { setLimelightNTDouble(limelightName, "pipeline", pipelineIndex); } + public static void setPriorityTagID(String limelightName, int ID) { setLimelightNTDouble(limelightName, "priorityid", ID); } /** * Sets LED mode to be controlled by the current pipeline. - * * @param limelightName Name of the Limelight camera */ public static void setLEDMode_PipelineControl(String limelightName) { @@ -1411,7 +1650,6 @@ public static void setLEDMode_ForceOn(String limelightName) { /** * Enables standard side-by-side stream mode. - * * @param limelightName Name of the Limelight camera */ public static void setStreamMode_Standard(String limelightName) { @@ -1420,7 +1658,6 @@ public static void setStreamMode_Standard(String limelightName) { /** * Enables Picture-in-Picture mode with secondary stream in the corner. - * * @param limelightName Name of the Limelight camera */ public static void setStreamMode_PiPMain(String limelightName) { @@ -1429,28 +1666,22 @@ public static void setStreamMode_PiPMain(String limelightName) { /** * Enables Picture-in-Picture mode with primary stream in the corner. - * * @param limelightName Name of the Limelight camera */ public static void setStreamMode_PiPSecondary(String limelightName) { setLimelightNTDouble(limelightName, "stream", 2); } + /** * Sets the crop window for the camera. The crop window in the UI must be completely open. - * * @param limelightName Name of the Limelight camera * @param cropXMin Minimum X value (-1 to 1) * @param cropXMax Maximum X value (-1 to 1) * @param cropYMin Minimum Y value (-1 to 1) * @param cropYMax Maximum Y value (-1 to 1) */ - public static void setCropWindow( - String limelightName, - double cropXMin, - double cropXMax, - double cropYMin, - double cropYMax) { + public static void setCropWindow(String limelightName, double cropXMin, double cropXMax, double cropYMin, double cropYMax) { double[] entries = new double[4]; entries[0] = cropXMin; entries[1] = cropXMax; @@ -1459,9 +1690,23 @@ public static void setCropWindow( setLimelightNTDoubleArray(limelightName, "crop", entries); } - /** Sets 3D offset point for easy 3D targeting. */ - public static void setFiducial3DOffset( - String limelightName, double offsetX, double offsetY, double offsetZ) { + /** + * Sets the keystone modification for the crop window. + * @param limelightName Name of the Limelight camera + * @param horizontal Horizontal keystone value (-0.95 to 0.95) + * @param vertical Vertical keystone value (-0.95 to 0.95) + */ + public static void setKeystone(String limelightName, double horizontal, double vertical) { + double[] entries = new double[2]; + entries[0] = horizontal; + entries[1] = vertical; + setLimelightNTDoubleArray(limelightName, "keystone_set", entries); + } + + /** + * Sets 3D offset point for easy 3D targeting. + */ + public static void setFiducial3DOffset(String limelightName, double offsetX, double offsetY, double offsetZ) { double[] entries = new double[3]; entries[0] = offsetX; entries[1] = offsetY; @@ -1471,48 +1716,30 @@ public static void setFiducial3DOffset( /** * Sets robot orientation values used by MegaTag2 localization algorithm. - * + * * @param limelightName Name/identifier of the Limelight * @param yaw Robot yaw in degrees. 0 = robot facing red alliance wall in FRC * @param yawRate (Unnecessary) Angular velocity of robot yaw in degrees per second - * @param pitch (Unnecessary) Robot pitch in degrees + * @param pitch (Unnecessary) Robot pitch in degrees * @param pitchRate (Unnecessary) Angular velocity of robot pitch in degrees per second * @param roll (Unnecessary) Robot roll in degrees * @param rollRate (Unnecessary) Angular velocity of robot roll in degrees per second */ - public static void SetRobotOrientation( - String limelightName, - double yaw, - double yawRate, - double pitch, - double pitchRate, - double roll, - double rollRate) { - SetRobotOrientation_INTERNAL( - limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, true); - } - - public static void SetRobotOrientation_NoFlush( - String limelightName, - double yaw, - double yawRate, - double pitch, - double pitchRate, - double roll, - double rollRate) { - SetRobotOrientation_INTERNAL( - limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, false); - } - - private static void SetRobotOrientation_INTERNAL( - String limelightName, - double yaw, - double yawRate, - double pitch, - double pitchRate, - double roll, - double rollRate, - boolean flush) { + public static void SetRobotOrientation(String limelightName, double yaw, double yawRate, + double pitch, double pitchRate, + double roll, double rollRate) { + SetRobotOrientation_INTERNAL(limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, true); + } + + public static void SetRobotOrientation_NoFlush(String limelightName, double yaw, double yawRate, + double pitch, double pitchRate, + double roll, double rollRate) { + SetRobotOrientation_INTERNAL(limelightName, yaw, yawRate, pitch, pitchRate, roll, rollRate, false); + } + + private static void SetRobotOrientation_INTERNAL(String limelightName, double yaw, double yawRate, + double pitch, double pitchRate, + double roll, double rollRate, boolean flush) { double[] entries = new double[6]; entries[0] = yaw; @@ -1522,14 +1749,15 @@ private static void SetRobotOrientation_INTERNAL( entries[4] = roll; entries[5] = rollRate; setLimelightNTDoubleArray(limelightName, "robot_orientation_set", entries); - if (flush) { + if(flush) + { Flush(); } } - + /** * Configures the IMU mode for MegaTag2 Localization - * + * * @param limelightName Name/identifier of the Limelight * @param mode IMU mode. */ @@ -1538,26 +1766,29 @@ public static void SetIMUMode(String limelightName, int mode) { } /** - * Sets the 3D point-of-interest offset for the current fiducial pipeline. - * https://docs.limelightvision.io/docs/docs-limelight/pipeline-apriltag/apriltag-3d#point-of-interest-tracking - * + * Configures the complementary filter alpha value for IMU Assist Modes (Modes 3 and 4) + * * @param limelightName Name/identifier of the Limelight - * @param x X offset in meters - * @param y Y offset in meters - * @param z Z offset in meters + * @param alpha Defaults to .001. Higher values will cause the internal IMU to converge onto the assist source more rapidly. */ - public static void SetFidcuial3DOffset(String limelightName, double x, double y, double z) { + public static void SetIMUAssistAlpha(String limelightName, double alpha) { + setLimelightNTDouble(limelightName, "imuassistalpha_set", alpha); + } - double[] entries = new double[3]; - entries[0] = x; - entries[1] = y; - entries[2] = z; - setLimelightNTDoubleArray(limelightName, "fiducial_offset_set", entries); + + /** + * Configures the throttle value. Set to 100-200 while disabled to reduce thermal output/temperature. + * + * @param limelightName Name/identifier of the Limelight + * @param throttle Defaults to 0. Your Limelgiht will process one frame after skipping frames. + */ + public static void SetThrottle(String limelightName, int throttle) { + setLimelightNTDouble(limelightName, "throttle_set", throttle); } /** - * Overrides the valid AprilTag IDs that will be used for localization. Tags not in this list - * will be ignored for robot pose estimation. + * Overrides the valid AprilTag IDs that will be used for localization. + * Tags not in this list will be ignored for robot pose estimation. * * @param limelightName Name/identifier of the Limelight * @param validIDs Array of valid AprilTag IDs to track @@ -1566,41 +1797,45 @@ public static void SetFiducialIDFiltersOverride(String limelightName, int[] vali double[] validIDsDouble = new double[validIDs.length]; for (int i = 0; i < validIDs.length; i++) { validIDsDouble[i] = validIDs[i]; - } + } setLimelightNTDoubleArray(limelightName, "fiducial_id_filters_set", validIDsDouble); } /** - * Sets the downscaling factor for AprilTag detection. Increasing downscale can improve - * performance at the cost of potentially reduced detection range. - * + * Sets the downscaling factor for AprilTag detection. + * Increasing downscale can improve performance at the cost of potentially reduced detection range. + * * @param limelightName Name/identifier of the Limelight - * @param downscale Downscale factor. Valid values: 1.0 (no downscale), 1.5, 2.0, 3.0, 4.0. Set - * to 0 for pipeline control. + * @param downscale Downscale factor. Valid values: 1.0 (no downscale), 1.5, 2.0, 3.0, 4.0. Set to 0 for pipeline control. */ - public static void SetFiducialDownscalingOverride(String limelightName, float downscale) { + public static void SetFiducialDownscalingOverride(String limelightName, float downscale) + { int d = 0; // pipeline - if (downscale == 1.0) { + if (downscale == 1.0) + { d = 1; } - if (downscale == 1.5) { + if (downscale == 1.5) + { d = 2; } - if (downscale == 2) { + if (downscale == 2) + { d = 3; } - if (downscale == 3) { + if (downscale == 3) + { d = 4; } - if (downscale == 4) { + if (downscale == 4) + { d = 5; } setLimelightNTDouble(limelightName, "fiducial_downscale_set", d); } - + /** * Sets the camera pose relative to the robot. - * * @param limelightName Name of the Limelight camera * @param forward Forward offset in meters * @param side Side offset in meters @@ -1609,14 +1844,7 @@ public static void SetFiducialDownscalingOverride(String limelightName, float do * @param pitch Pitch angle in degrees * @param yaw Yaw angle in degrees */ - public static void setCameraPose_RobotSpace( - String limelightName, - double forward, - double side, - double up, - double roll, - double pitch, - double yaw) { + public static void setCameraPose_RobotSpace(String limelightName, double forward, double side, double up, double roll, double pitch, double yaw) { double[] entries = new double[6]; entries[0] = forward; entries[1] = side; @@ -1641,38 +1869,42 @@ public static double[] getPythonScriptData(String limelightName) { ///// ///// - /** Asynchronously take snapshot. */ - public static CompletableFuture takeSnapshot(String tableName, String snapshotName) { - return CompletableFuture.supplyAsync( - () -> { - return SYNCH_TAKESNAPSHOT(tableName, snapshotName); - }); + /** + * Triggers a snapshot capture via NetworkTables by incrementing the snapshot counter. + * Rate-limited to once per 10 frames on the Limelight. + * @param limelightName Name of the Limelight camera + */ + public static void triggerSnapshot(String limelightName) { + double current = getLimelightNTDouble(limelightName, "snapshot"); + setLimelightNTDouble(limelightName, "snapshot", current + 1); } - private static boolean SYNCH_TAKESNAPSHOT(String tableName, String snapshotName) { - URL url = getLimelightURLString(tableName, "capturesnapshot"); - try { - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("GET"); - if (snapshotName != null && snapshotName != "") { - connection.setRequestProperty("snapname", snapshotName); - } + /** + * Enables or pauses the rewind buffer recording. + * @param limelightName Name of the Limelight camera + * @param enabled True to enable recording, false to pause + */ + public static void setRewindEnabled(String limelightName, boolean enabled) { + setLimelightNTDouble(limelightName, "rewind_enable_set", enabled ? 1 : 0); + } - int responseCode = connection.getResponseCode(); - if (responseCode == 200) { - return true; - } else { - System.err.println("Bad LL Request"); - } - } catch (IOException e) { - System.err.println(e.getMessage()); - } - return false; + /** + * Triggers a rewind capture with the specified duration. + * Maximum duration is 165 seconds. Rate-limited on the Limelight. + * @param limelightName Name of the Limelight camera + * @param durationSeconds Duration of rewind capture in seconds (max 165) + */ + public static void triggerRewindCapture(String limelightName, double durationSeconds) { + double[] currentArray = getLimelightNTDoubleArray(limelightName, "capture_rewind"); + double counter = (currentArray.length > 0) ? currentArray[0] : 0; + double[] entries = new double[2]; + entries[0] = counter + 1; + entries[1] = Math.min(durationSeconds, 165); + setLimelightNTDoubleArray(limelightName, "capture_rewind", entries); } /** * Gets the latest JSON results output and returns a LimelightResults object. - * * @param limelightName Name of the Limelight camera * @return LimelightResults object containing all current target data */ @@ -1681,13 +1913,19 @@ public static LimelightResults getLatestResults(String limelightName) { long start = System.nanoTime(); LimelightHelpers.LimelightResults results = new LimelightHelpers.LimelightResults(); if (mapper == null) { - mapper = - new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + mapper = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } try { - results = mapper.readValue(getJSONDump(limelightName), LimelightResults.class); + String jsonString = getJSONDump(limelightName); + if (jsonString == null || jsonString.isEmpty() || jsonString.isBlank()) { + results.error = "lljson error: empty json"; + } else { + results = mapper.readValue(jsonString, LimelightResults.class); + if (results.imuResults != null) { + results.imuResults.parseDataArray(); + } + } } catch (JsonProcessingException e) { results.error = "lljson error: " + e.getMessage(); } @@ -1701,4 +1939,27 @@ public static LimelightResults getLatestResults(String limelightName) { return results; } + + /** + * Sets up port forwarding for a Limelight 3A/3G connected via USB. + * This allows access to the Limelight web interface and video stream + * when connected to the robot over USB. + * + * For usbIndex 0: ports 5800-5809 forward to 172.29.0.1 + * For usbIndex 1: ports 5810-5819 forward to 172.29.1.1 + * etc. + * + * Call this method once during robot initialization. + * To access the interface of the camera with usbIndex0, you would go to roboRIO-(teamnum)-FRC.local:5801. Port 5811 for usb index 1 + * + * @param usbIndex The USB index of the Limelight (0, 1, 2, etc.) + */ + public static void setupPortForwardingUSB(int usbIndex) { + String ip = "172.29." + usbIndex + ".1"; + int basePort = 5800 + (usbIndex * 10); + + for (int i = 0; i < 10; i++) { + PortForwarder.add(basePort + i, ip, 5800 + i); + } + } }