diff --git a/FtcRobotController/build.gradle b/FtcRobotController/build.gradle index e25651fbb315..9fa21699dd27 100644 --- a/FtcRobotController/build.gradle +++ b/FtcRobotController/build.gradle @@ -14,7 +14,11 @@ android { buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"' } - compileSdkVersion 29 + buildFeatures { + buildConfig = true + } + + compileSdkVersion 30 compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 diff --git a/FtcRobotController/src/main/AndroidManifest.xml b/FtcRobotController/src/main/AndroidManifest.xml index 787878b92fe7..c873221b6dd0 100644 --- a/FtcRobotController/src/main/AndroidManifest.xml +++ b/FtcRobotController/src/main/AndroidManifest.xml @@ -1,8 +1,8 @@ + android:versionCode="60" + android:versionName="11.0"> diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java index 1d14dfb76035..2644143d78db 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java @@ -69,20 +69,20 @@ public class BasicOmniOpMode_Linear extends LinearOpMode { // Declare OpMode members for each of the 4 motors. private ElapsedTime runtime = new ElapsedTime(); - private DcMotor leftFrontDrive = null; - private DcMotor leftBackDrive = null; - private DcMotor rightFrontDrive = null; - private DcMotor rightBackDrive = null; + private DcMotor frontLeftDrive = null; + private DcMotor backLeftDrive = null; + private DcMotor frontRightDrive = null; + private DcMotor backRightDrive = null; @Override public void runOpMode() { // Initialize the hardware variables. Note that the strings used here must correspond // to the names assigned during the robot configuration step on the DS or RC devices. - leftFrontDrive = hardwareMap.get(DcMotor.class, "left_front_drive"); - leftBackDrive = hardwareMap.get(DcMotor.class, "left_back_drive"); - rightFrontDrive = hardwareMap.get(DcMotor.class, "right_front_drive"); - rightBackDrive = hardwareMap.get(DcMotor.class, "right_back_drive"); + frontLeftDrive = hardwareMap.get(DcMotor.class, "front_left_drive"); + backLeftDrive = hardwareMap.get(DcMotor.class, "back_left_drive"); + frontRightDrive = hardwareMap.get(DcMotor.class, "front_right_drive"); + backRightDrive = hardwareMap.get(DcMotor.class, "back_right_drive"); // ######################################################################################## // !!! IMPORTANT Drive Information. Test your motor directions. !!!!! @@ -94,10 +94,10 @@ public void runOpMode() { // when you first test your robot, push the left joystick forward and observe the direction the wheels turn. // Reverse the direction (flip FORWARD <-> REVERSE ) of any wheel that runs backward // Keep testing until ALL the wheels move the robot forward when you push the left joystick forward. - leftFrontDrive.setDirection(DcMotor.Direction.REVERSE); - leftBackDrive.setDirection(DcMotor.Direction.REVERSE); - rightFrontDrive.setDirection(DcMotor.Direction.FORWARD); - rightBackDrive.setDirection(DcMotor.Direction.FORWARD); + frontLeftDrive.setDirection(DcMotor.Direction.REVERSE); + backLeftDrive.setDirection(DcMotor.Direction.REVERSE); + frontRightDrive.setDirection(DcMotor.Direction.FORWARD); + backRightDrive.setDirection(DcMotor.Direction.FORWARD); // Wait for the game to start (driver presses START) telemetry.addData("Status", "Initialized"); @@ -117,22 +117,22 @@ public void runOpMode() { // Combine the joystick requests for each axis-motion to determine each wheel's power. // Set up a variable for each drive wheel to save the power level for telemetry. - double leftFrontPower = axial + lateral + yaw; - double rightFrontPower = axial - lateral - yaw; - double leftBackPower = axial - lateral + yaw; - double rightBackPower = axial + lateral - yaw; + double frontLeftPower = axial + lateral + yaw; + double frontRightPower = axial - lateral - yaw; + double backLeftPower = axial - lateral + yaw; + double backRightPower = axial + lateral - yaw; // Normalize the values so no wheel power exceeds 100% // This ensures that the robot maintains the desired motion. - max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower)); - max = Math.max(max, Math.abs(leftBackPower)); - max = Math.max(max, Math.abs(rightBackPower)); + max = Math.max(Math.abs(frontLeftPower), Math.abs(frontRightPower)); + max = Math.max(max, Math.abs(backLeftPower)); + max = Math.max(max, Math.abs(backRightPower)); if (max > 1.0) { - leftFrontPower /= max; - rightFrontPower /= max; - leftBackPower /= max; - rightBackPower /= max; + frontLeftPower /= max; + frontRightPower /= max; + backLeftPower /= max; + backRightPower /= max; } // This is test code: @@ -146,22 +146,22 @@ public void runOpMode() { // Once the correct motors move in the correct direction re-comment this code. /* - leftFrontPower = gamepad1.x ? 1.0 : 0.0; // X gamepad - leftBackPower = gamepad1.a ? 1.0 : 0.0; // A gamepad - rightFrontPower = gamepad1.y ? 1.0 : 0.0; // Y gamepad - rightBackPower = gamepad1.b ? 1.0 : 0.0; // B gamepad + frontLeftPower = gamepad1.x ? 1.0 : 0.0; // X gamepad + backLeftPower = gamepad1.a ? 1.0 : 0.0; // A gamepad + frontRightPower = gamepad1.y ? 1.0 : 0.0; // Y gamepad + backRightPower = gamepad1.b ? 1.0 : 0.0; // B gamepad */ // Send calculated power to wheels - leftFrontDrive.setPower(leftFrontPower); - rightFrontDrive.setPower(rightFrontPower); - leftBackDrive.setPower(leftBackPower); - rightBackDrive.setPower(rightBackPower); + frontLeftDrive.setPower(frontLeftPower); + frontRightDrive.setPower(frontRightPower); + backLeftDrive.setPower(backLeftPower); + backRightDrive.setPower(backRightPower); // Show the elapsed game time and wheel power. telemetry.addData("Status", "Run Time: " + runtime.toString()); - telemetry.addData("Front left/Right", "%4.2f, %4.2f", leftFrontPower, rightFrontPower); - telemetry.addData("Back left/Right", "%4.2f, %4.2f", leftBackPower, rightBackPower); + telemetry.addData("Front left/Right", "%4.2f, %4.2f", frontLeftPower, frontRightPower); + telemetry.addData("Back left/Right", "%4.2f, %4.2f", backLeftPower, backRightPower); telemetry.update(); } }} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java index d90261ef00ab..3cb4d9fb0f58 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java @@ -224,14 +224,17 @@ private void telemetryAprilTag() { for (AprilTagDetection detection : currentDetections) { if (detection.metadata != null) { telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name)); - telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", - detection.robotPose.getPosition().x, - detection.robotPose.getPosition().y, - detection.robotPose.getPosition().z)); - telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", - detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES), - detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES), - detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES))); + // Only use tags that don't have Obelisk in them + if (!detection.metadata.name.contains("Obelisk")) { + telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", + detection.robotPose.getPosition().x, + detection.robotPose.getPosition().y, + detection.robotPose.getPosition().z)); + telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", + detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES), + detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES), + detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES))); + } } else { telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id)); telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y)); diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptBlackboard.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptBlackboard.java new file mode 100644 index 000000000000..2c93abb9257c --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptBlackboard.java @@ -0,0 +1,87 @@ +/* Copyright (c) 2025 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +/* + * Demonstrates how to use an OpMode to store data in the blackboard and retrieve it. + * This is useful for storing information in Auto and then retrieving it in your TeleOp. + * + * Be aware that this is NOT saved across power reboots or downloads or robot resets. + * + * You also need to make sure that both the storing and retrieving are done using the same + * type. For example, storing a Double in the blackboard and retrieving it as an Integer + * will fail when you typecast it. + * + * The blackboard is a standard hashmap so you can use methods like: + * put, get, containsKey, remove, etc. + */ +@TeleOp(name = "Concept: Blackboard", group = "Concept") +@Disabled +public class ConceptBlackboard extends OpMode { + // We use constants here so we won't have the problem of typos in different places when we + // meant to refer to the same thing. + public static final String TIMES_STARTED_KEY = "Times started"; + public static final String ALLIANCE_KEY = "Alliance"; + + /** + * This method will be called once, when the INIT button is pressed. + */ + @Override + public void init() { + // This gets us what is in the blackboard or the default if it isn't in there. + Object timesStarted = blackboard.getOrDefault(TIMES_STARTED_KEY, 0); + blackboard.put(TIMES_STARTED_KEY, (int) timesStarted + 1); + + telemetry.addData("OpMode started times", blackboard.get(TIMES_STARTED_KEY)); + } + + /** + * This method will be called repeatedly during the period between when + * the START button is pressed and when the OpMode is stopped. + *

+ * If the left bumper is pressed it will store the value "RED" in the blackboard for alliance. + * If the right bumper is pressed it will store the value "BLUE" in the blackboard for alliance. + *

+ * You'll notice that if you quit the OpMode and come back in, the value will persist. + */ + @Override + public void loop() { + if (gamepad1.left_bumper) { + blackboard.put(ALLIANCE_KEY, "RED"); + } else if (gamepad1.right_bumper) { + blackboard.put(ALLIANCE_KEY, "BLUE"); + } + telemetry.addData("Alliance", blackboard.get(ALLIANCE_KEY)); + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadEdgeDetection.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadEdgeDetection.java new file mode 100644 index 000000000000..90243ac50a2c --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadEdgeDetection.java @@ -0,0 +1,92 @@ +/* +Copyright (c) 2024 Miriam Sinton-Remes + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted (subject to the limitations in the disclaimer below) provided that +the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of FIRST nor the names of its contributors may be used to +endorse or promote products derived from this software without specific prior +written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS +LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESSFOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +/* + * This OpMode illustrates using edge detection on a gamepad. + * + * Simply checking the state of a gamepad button each time could result in triggering an effect + * multiple times. Edge detection ensures that you only detect one button press, regardless of how + * long the button is held. + * + * There are two main types of edge detection. Rising edge detection will trigger when a button is + * first pressed. Falling edge detection will trigger when the button is released. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + */ + +@Disabled +@TeleOp(name="Concept: Gamepad Edge Detection", group ="Concept") +public class ConceptGamepadEdgeDetection extends LinearOpMode { + + @Override + public void runOpMode() { + // Wait for the DS start button to be pressed + waitForStart(); + + while (opModeIsActive()) { + // Update the telemetry + telemetryButtonData(); + + // Wait 2 seconds before doing another check + sleep(2000); + } + } + + public void telemetryButtonData() { + // Add the status of the Gamepad 1 Left Bumper + telemetry.addData("Gamepad 1 Left Bumper Pressed", gamepad1.leftBumperWasPressed()); + telemetry.addData("Gamepad 1 Left Bumper Released", gamepad1.leftBumperWasReleased()); + telemetry.addData("Gamepad 1 Left Bumper Status", gamepad1.left_bumper); + + // Add an empty line to separate the buttons in telemetry + telemetry.addLine(); + + // Add the status of the Gamepad 1 Right Bumper + telemetry.addData("Gamepad 1 Right Bumper Pressed", gamepad1.rightBumperWasPressed()); + telemetry.addData("Gamepad 1 Right Bumper Released", gamepad1.rightBumperWasReleased()); + telemetry.addData("Gamepad 1 Right Bumper Status", gamepad1.right_bumper); + + // Add a note that the telemetry is only updated every 2 seconds + telemetry.addLine("\nTelemetry is updated every 2 seconds."); + + // Update the telemetry on the DS screen + telemetry.update(); + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator.java deleted file mode 100644 index 987694db111c..000000000000 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright (c) 2024 Phil Malone - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.firstinspires.ftc.robotcontroller.external.samples; - -import android.util.Size; - -import com.qualcomm.robotcore.eventloop.opmode.Disabled; -import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; -import com.qualcomm.robotcore.eventloop.opmode.TeleOp; -import com.qualcomm.robotcore.util.SortOrder; - -import org.firstinspires.ftc.robotcore.external.Telemetry; -import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; -import org.firstinspires.ftc.vision.VisionPortal; -import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor; -import org.firstinspires.ftc.vision.opencv.ColorRange; -import org.firstinspires.ftc.vision.opencv.ImageRegion; -import org.opencv.core.RotatedRect; - -import java.util.List; - -/* - * This OpMode illustrates how to use a video source (camera) to locate specifically colored regions - * - * Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator" - * will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range. - * These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for. - * - * To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process. - * The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process. - * The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask". - * The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour". - * For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour. - * The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data. - * Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first. - * - * To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob - * The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters. - * - * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. - * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list - */ - -@Disabled -@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept") -public class ConceptVisionColorLocator extends LinearOpMode -{ - @Override - public void runOpMode() - { - /* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class. - * - Specify the color range you are looking for. You can use a predefined color, or create you own color range - * .setTargetColorRange(ColorRange.BLUE) // use a predefined color match - * Available predefined colors are: RED, BLUE YELLOW GREEN - * .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match - * new Scalar( 32, 176, 0), - * new Scalar(255, 255, 132))) - * - * - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search. - * This can be the entire frame, or a sub-region defined using: - * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. - * Use one form of the ImageRegion class to define the ROI. - * ImageRegion.entireFrame() - * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner - * ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen - * - * - Define which contours are included. - * You can get ALL the contours, or you can skip any contours that are completely inside another contour. - * .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours - * .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours - * note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color. - * - * - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time. - * .setDrawContours(true) - * - * - include any pre-processing of the image or mask before looking for Blobs. - * There are some extra processing you can include to improve the formation of blobs. Using these features requires - * an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size. - * .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours. - * The higher the number of pixels, the more blurred the image becomes. - * Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement. - * Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image. - * .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain. - * Erosion can grow holes inside regions, and also shrink objects. - * "pixels" in the range of 2-4 are suitable for low res images. - * .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker, - * and making filled shapes appear larger. Dilation is useful for joining broken parts of an - * object, such as when removing noise from an image. - * "pixels" in the range of 2-4 are suitable for low res images. - */ - ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder() - .setTargetColorRange(ColorRange.BLUE) // use a predefined color match - .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs - .setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view - .setDrawContours(true) // Show contours on the Stream Preview - .setBlurSize(5) // Smooth the transitions between different colors in image - .build(); - - /* - * Build a vision portal to run the Color Locator process. - * - * - Add the colorLocator process created above. - * - Set the desired video resolution. - * Since a high resolution will not improve this process, choose a lower resolution that is - * supported by your camera. This will improve overall performance and reduce latency. - * - Choose your video source. This may be - * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam - * or - * .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera - */ - VisionPortal portal = new VisionPortal.Builder() - .addProcessor(colorLocator) - .setCameraResolution(new Size(320, 240)) - .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) - .build(); - - telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging. - telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); - - // WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode. - while (opModeIsActive() || opModeInInit()) - { - telemetry.addData("preview on/off", "... Camera Stream\n"); - - // Read the current list - List blobs = colorLocator.getBlobs(); - - /* - * The list of Blobs can be filtered to remove unwanted Blobs. - * Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter - * conditions will remain in the current list of "blobs". Multiple filters may be used. - * - * Use any of the following filters. - * - * ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs); - * A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small. - * Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder. - * - * ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs); - * A blob's density is an indication of how "full" the contour is. - * If you put a rubber band around the contour you would get the "Convex Hull" of the contour. - * The density is the ratio of Contour-area to Convex Hull-area. - * - * ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs); - * A blob's Aspect ratio is the ratio of boxFit long side to short side. - * A perfect Square has an aspect ratio of 1. All others are > 1 - */ - ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs. - - /* - * The list of Blobs can be sorted using the same Blob attributes as listed above. - * No more than one sort call should be made. Sorting can use ascending or descending order. - * ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default - * ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs); - * ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs); - */ - - telemetry.addLine(" Area Density Aspect Center"); - - // Display the size (area) and center location for each Blob. - for(ColorBlobLocatorProcessor.Blob b : blobs) - { - RotatedRect boxFit = b.getBoxFit(); - telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)", - b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y)); - } - - telemetry.update(); - sleep(50); - } - } -} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator_Circle.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator_Circle.java new file mode 100644 index 000000000000..4ec17aaba6c8 --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator_Circle.java @@ -0,0 +1,245 @@ +/* + * Copyright (c) 2024 Phil Malone + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.graphics.Color; +import android.util.Size; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.Telemetry; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.vision.VisionPortal; +import org.firstinspires.ftc.vision.opencv.Circle; +import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor; +import org.firstinspires.ftc.vision.opencv.ColorRange; +import org.firstinspires.ftc.vision.opencv.ImageRegion; + +import java.util.List; + +/* + * This OpMode illustrates how to use a video source (camera) to locate specifically colored regions. + * This sample is targeted towards circular blobs. To see rectangles, look at ConceptVisionColorLocator_Rectangle + * + * Unlike a "color sensor" which determines the color of nearby object, this "color locator" + * will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that + * match the requested color range. These blobs can be further filtered and sorted to find the one + * most likely to be the item the user is looking for. + * + * To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process. + * The ColorBlobLocatorProcessor (CBLP) process is created first, and then the VisionPortal is built. + * The (CBLP) analyses the ROI and locates pixels that match the ColorRange to form a "mask". + * The matching pixels are then collected into contiguous "blobs" of pixels. + * The outer boundaries of these blobs are called its "contour". For each blob, the process then + * creates the smallest possible circle that will fully encase the contour. The user can then call + * getBlobs() to retrieve the list of Blobs, where each contains the contour and the circle. + * Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest + * contours are listed first. + * + * A colored enclosing circle is drawn on the camera preview to show the location of each Blob + * The original Blob contour can also be added to the preview. + * This is helpful when configuring the ColorBlobLocatorProcessor parameters. + * + * Tip: Connect an HDMI monitor to the Control Hub to view the Color Location process in real-time. + * Or use a screen copy utility like ScrCpy.exe to view the video remotely. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Disabled +@TeleOp(name = "Concept: Vision Color-Locator (Circle)", group = "Concept") +public class ConceptVisionColorLocator_Circle extends LinearOpMode { + @Override + public void runOpMode() { + /* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class. + * - Specify the color range you are looking for. Use a predefined color, or create your own + * + * .setTargetColorRange(ColorRange.BLUE) // use a predefined color match + * Available colors are: RED, BLUE, YELLOW, GREEN, ARTIFACT_GREEN, ARTIFACT_PURPLE + * .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match + * new Scalar( 32, 176, 0), + * new Scalar(255, 255, 132))) + * + * - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search. + * This can be the entire frame, or a sub-region defined using: + * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. + * Use one form of the ImageRegion class to define the ROI. + * ImageRegion.entireFrame() + * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixels at upper left corner + * ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height in center + * + * - Define which contours are included. + * You can get ALL the contours, ignore contours that are completely inside another contour. + * .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) + * .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) + * EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up solid colors. + * + * - Turn the displays of contours ON or OFF. + * Turning these on helps debugging but takes up valuable CPU time. + * .setDrawContours(true) Draws an outline of each contour. + * .setEnclosingCircleColor(int color) Draws a circle around each contour. 0 to disable. + * .setBoxFitColor(int color) Draws a rectangle around each contour. 0 to disable. ON by default. + * + * + * - include any pre-processing of the image or mask before looking for Blobs. + * There are some extra processing you can include to improve the formation of blobs. + * Using these features requires an understanding of how they may effect the final + * blobs. The "pixels" argument sets the NxN kernel size. + * .setBlurSize(int pixels) + * Blurring an image helps to provide a smooth color transition between objects, + * and smoother contours. The higher the number, the more blurred the image becomes. + * Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement. + * Blurring too much may hide smaller features. A size of 5 is good for a 320x240 image. + * + * .setErodeSize(int pixels) + * Erosion removes floating pixels and thin lines so that only substantive objects remain. + * Erosion can grow holes inside regions, and also shrink objects. + * "pixels" in the range of 2-4 are suitable for low res images. + * + * .setDilateSize(int pixels) + * Dilation makes objects and lines more visible by filling in small holes, and making + * filled shapes appear larger. Dilation is useful for joining broken parts of an + * object, such as when removing noise from an image. + * "pixels" in the range of 2-4 are suitable for low res images. + * + * .setMorphOperationType(MorphOperationType morphOperationType) + * This defines the order in which the Erode/Dilate actions are performed. + * OPENING: Will Erode and then Dilate which will make small noise blobs go away + * CLOSING: Will Dilate and then Erode which will tend to fill in any small holes in blob edges. + */ + ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder() + .setTargetColorRange(ColorRange.ARTIFACT_PURPLE) // Use a predefined color match + .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) + .setRoi(ImageRegion.asUnityCenterCoordinates(-0.75, 0.75, 0.75, -0.75)) + .setDrawContours(true) // Show contours on the Stream Preview + .setBoxFitColor(0) // Disable the drawing of rectangles + .setCircleFitColor(Color.rgb(255, 255, 0)) // Draw a circle + .setBlurSize(5) // Smooth the transitions between different colors in image + + // the following options have been added to fill in perimeter holes. + .setDilateSize(15) // Expand blobs to fill any divots on the edges + .setErodeSize(15) // Shrink blobs back to original size + .setMorphOperationType(ColorBlobLocatorProcessor.MorphOperationType.CLOSING) + + .build(); + /* + * Build a vision portal to run the Color Locator process. + * + * - Add the colorLocator process created above. + * - Set the desired video resolution. + * Since a high resolution will not improve this process, choose a lower resolution + * that is supported by your camera. This will improve overall performance and reduce + * latency. + * - Choose your video source. This may be + * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam + * or + * .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera + */ + VisionPortal portal = new VisionPortal.Builder() + .addProcessor(colorLocator) + .setCameraResolution(new Size(320, 240)) + .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) + .build(); + + telemetry.setMsTransmissionInterval(100); // Speed up telemetry updates for debugging. + telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); + + // WARNING: To view the stream preview on the Driver Station, this code runs in INIT mode. + while (opModeIsActive() || opModeInInit()) { + telemetry.addData("preview on/off", "... Camera Stream\n"); + + // Read the current list + List blobs = colorLocator.getBlobs(); + + /* + * The list of Blobs can be filtered to remove unwanted Blobs. + * Note: All contours will be still displayed on the Stream Preview, but only those + * that satisfy the filter conditions will remain in the current list of + * "blobs". Multiple filters may be used. + * + * To perform a filter + * ColorBlobLocatorProcessor.Util.filterByCriteria(criteria, minValue, maxValue, blobs); + * + * The following criteria are currently supported. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA + * A Blob's area is the number of pixels contained within the Contour. Filter out any + * that are too big or small. Start with a large range and then refine the range based + * on the likely size of the desired object in the viewfinder. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_DENSITY + * A blob's density is an indication of how "full" the contour is. + * If you put a rubber band around the contour you would get the "Convex Hull" of the + * contour. The density is the ratio of Contour-area to Convex Hull-area. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_ASPECT_RATIO + * A blob's Aspect ratio is the ratio of boxFit long side to short side. + * A perfect Square has an aspect ratio of 1. All others are > 1 + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_ARC_LENGTH + * A blob's arc length is the perimeter of the blob. + * This can be used in conjunction with an area filter to detect oddly shaped blobs. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_CIRCULARITY + * A blob's circularity is how circular it is based on the known area and arc length. + * A perfect circle has a circularity of 1. All others are < 1 + */ + ColorBlobLocatorProcessor.Util.filterByCriteria( + ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA, + 50, 20000, blobs); // filter out very small blobs. + + ColorBlobLocatorProcessor.Util.filterByCriteria( + ColorBlobLocatorProcessor.BlobCriteria.BY_CIRCULARITY, + 0.6, 1, blobs); /* filter out non-circular blobs. + * NOTE: You may want to adjust the minimum value depending on your use case. + * Circularity values will be affected by shadows, and will therefore vary based + * on the location of the camera on your robot and venue lighting. It is strongly + * encouraged to test your vision on the competition field if your event allows + * sensor calibration time. + */ + + /* + * The list of Blobs can be sorted using the same Blob attributes as listed above. + * No more than one sort call should be made. Sorting can use ascending or descending order. + * Here is an example.: + * ColorBlobLocatorProcessor.Util.sortByCriteria( + * ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA, SortOrder.DESCENDING, blobs); + */ + + telemetry.addLine("Circularity Radius Center"); + + // Display the Blob's circularity, and the size (radius) and center location of its circleFit. + for (ColorBlobLocatorProcessor.Blob b : blobs) { + + Circle circleFit = b.getCircle(); + telemetry.addLine(String.format("%5.3f %3d (%3d,%3d)", + b.getCircularity(), (int) circleFit.getRadius(), (int) circleFit.getX(), (int) circleFit.getY())); + } + + telemetry.update(); + sleep(100); // Match the telemetry update interval. + } + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator_Rectangle.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator_Rectangle.java new file mode 100644 index 000000000000..5681f71231dd --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator_Rectangle.java @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2024 Phil Malone + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.util.Size; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.util.SortOrder; + +import org.firstinspires.ftc.robotcore.external.Telemetry; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.vision.VisionPortal; +import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor; +import org.firstinspires.ftc.vision.opencv.ColorRange; +import org.firstinspires.ftc.vision.opencv.ImageRegion; +import org.opencv.core.RotatedRect; + +import java.util.List; + +/* + * This OpMode illustrates how to use a video source (camera) to locate specifically colored regions + * + * Unlike a "color sensor" which determines the color of nearby object, this "color locator" + * will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that + * match the requested color range. These blobs can be further filtered and sorted to find the one + * most likely to be the item the user is looking for. + * + * To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process. + * The ColorBlobLocatorProcessor (CBLP) process is created first, and then the VisionPortal is built. + * The (CBLP) analyses the ROI and locates pixels that match the ColorRange to form a "mask". + * The matching pixels are then collected into contiguous "blobs" of pixels. + * The outer boundaries of these blobs are called its "contour". For each blob, the process then + * creates the smallest possible rectangle "boxFit" that will fully encase the contour. The user can + * then call getBlobs() to retrieve the list of Blobs, where each contains the contour and the boxFit. + * Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest + * contours are listed first. + * + * A colored boxFit rectangle is drawn on the camera preview to show the location of each Blob + * The original Blob contour can also be added to the preview. + * This is helpful when configuring the ColorBlobLocatorProcessor parameters. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Disabled +@TeleOp(name = "Concept: Color-Locator (Rectangle)", group = "Concept") +public class ConceptVisionColorLocator_Rectangle extends LinearOpMode +{ + @Override + public void runOpMode() + { + /* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class. + * - Specify the color range you are looking for. Use a predefined color, or create your own + * + * .setTargetColorRange(ColorRange.BLUE) // use a predefined color match + * Available colors are: RED, BLUE YELLOW GREEN ARTIFACT_GREEN and ARTIFACT_PURPLE + * .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match + * new Scalar( 32, 176, 0), + * new Scalar(255, 255, 132))) + * + * - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search. + * This can be the entire frame, or a sub-region defined using: + * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. + * Use one form of the ImageRegion class to define the ROI. + * ImageRegion.entireFrame() + * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixels at upper left corner + * ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height in center + * + * - Define which contours are included. + * You can get ALL the contours, ignore contours that are completely inside another contour. + * .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) + * .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) + * EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up solid colors. + * + * - turn the display of contours ON or OFF. Helps debugging but takes up valuable CPU time. + * .setDrawContours(true) + * + * - include any pre-processing of the image or mask before looking for Blobs. + * There are some extra processing you can include to improve the formation of blobs. + * Using these features requires an understanding of how they may effect the final blobs. + * The "pixels" argument sets the NxN kernel size. + * .setBlurSize(int pixels) + * Blurring an image helps to provide a smooth color transition between objects, + * and smoother contours. The higher the number, the more blurred the image becomes. + * Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement. + * Blurring too much may hide smaller features. A size of 5 is good for a 320x240 image. + * + * .setErodeSize(int pixels) + * Erosion removes floating pixels and thin lines so that only substantive objects remain. + * Erosion can grow holes inside regions, and also shrink objects. + * "pixels" in the range of 2-4 are suitable for low res images. + * + * .setDilateSize(int pixels) + * Dilation makes objects and lines more visible by filling in small holes, and making + * filled shapes appear larger. Dilation is useful for joining broken parts of an + * object, such as when removing noise from an image. + * "pixels" in the range of 2-4 are suitable for low res images. + */ + ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder() + .setTargetColorRange(ColorRange.ARTIFACT_PURPLE) // use a predefined color match + .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) + .setRoi(ImageRegion.asUnityCenterCoordinates(-0.75, 0.75, 0.75, -0.75)) + .setDrawContours(true) // Show contours on the Stream Preview + .setBlurSize(5) // Smooth the transitions between different colors in image + .build(); + + /* + * Build a vision portal to run the Color Locator process. + * + * - Add the colorLocator process created above. + * - Set the desired video resolution. + * A high resolution will not improve this process, so choose a lower resolution that is + * supported by your camera. This will improve overall performance and reduce latency. + * - Choose your video source. This may be + * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam + * or + * .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera + */ + VisionPortal portal = new VisionPortal.Builder() + .addProcessor(colorLocator) + .setCameraResolution(new Size(320, 240)) + .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) + .build(); + + telemetry.setMsTransmissionInterval(100); // Speed up telemetry updates for debugging. + telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); + + // WARNING: To view the stream preview on the Driver Station, this code runs in INIT mode. + while (opModeIsActive() || opModeInInit()) + { + telemetry.addData("preview on/off", "... Camera Stream\n"); + + // Read the current list + List blobs = colorLocator.getBlobs(); + + /* + * The list of Blobs can be filtered to remove unwanted Blobs. + * Note: All contours will be still displayed on the Stream Preview, but only those + * that satisfy the filter conditions will remain in the current list of "blobs". + * Multiple filters may be used. + * + * To perform a filter + * ColorBlobLocatorProcessor.Util.filterByCriteria(criteria, minValue, maxValue, blobs); + * + * The following criteria are currently supported. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA + * A Blob's area is the number of pixels contained within the Contour. Filter out any + * that are too big or small. Start with a large range and then refine the range + * based on the likely size of the desired object in the viewfinder. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_DENSITY + * A blob's density is an indication of how "full" the contour is. + * If you put a rubber band around the contour you get the "Convex Hull" of the contour. + * The density is the ratio of Contour-area to Convex Hull-area. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_ASPECT_RATIO + * A blob's Aspect ratio is the ratio of boxFit long side to short side. + * A perfect Square has an aspect ratio of 1. All others are > 1 + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_ARC_LENGTH + * A blob's arc length is the perimeter of the blob. + * This can be used in conjunction with an area filter to detect oddly shaped blobs. + * + * ColorBlobLocatorProcessor.BlobCriteria.BY_CIRCULARITY + * A blob's circularity is how circular it is based on the known area and arc length. + * A perfect circle has a circularity of 1. All others are < 1 + */ + ColorBlobLocatorProcessor.Util.filterByCriteria( + ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA, + 50, 20000, blobs); // filter out very small blobs. + + /* + * The list of Blobs can be sorted using the same Blob attributes as listed above. + * No more than one sort call should be made. Sorting can be ASCENDING or DESCENDING. + * Here is an example.: + * ColorBlobLocatorProcessor.Util.sortByCriteria( + * ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA, SortOrder.DESCENDING, blobs); + */ + + telemetry.addLine("Ctr:(X,Y) Area Dens Aspect Arc Circ"); + + // Display the size (area) and center location for each Blob. + for(ColorBlobLocatorProcessor.Blob b : blobs) + { + RotatedRect boxFit = b.getBoxFit(); + telemetry.addLine(String.format("(%3d,%3d) %5d %4.2f %5.2f %3d %5.3f ", + (int) boxFit.center.x, (int) boxFit.center.y, b.getContourArea(), b.getDensity(), + b.getAspectRatio(), (int) b.getArcLength(), b.getCircularity())); + } + + telemetry.update(); + sleep(100); // Match the telemetry update interval. + } + } +} \ No newline at end of file diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java index 6be2bc4e62fe..5cce468d7b7d 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java @@ -21,13 +21,13 @@ package org.firstinspires.ftc.robotcontroller.external.samples; -import android.graphics.Color; import android.util.Size; import com.qualcomm.robotcore.eventloop.opmode.Disabled; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import org.firstinspires.ftc.robotcore.external.Telemetry; import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; import org.firstinspires.ftc.vision.VisionPortal; import org.firstinspires.ftc.vision.opencv.ImageRegion; @@ -40,16 +40,20 @@ * * This sample performs the same function, except it uses a video camera to inspect an object or scene. * The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view. - * The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected. + * The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching + * color will be selected. * * To perform this function, a VisionPortal runs a PredominantColorProcessor process. - * The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process. - * The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters. + * The PredominantColorProcessor (PCP) process is created first, and then the VisionPortal is built. + * The (PCP) analyses the ROI and splits the colored pixels into several color-clusters. * The largest of these clusters is then considered to be the "Predominant Color" * The process then matches the Predominant Color with the closest Swatch and returns that match. * * To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest, - * The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable. + * The Predominant Color is used to paint the rectangle border, so the user can visualize the color. + * + * Tip: Connect an HDMI monitor to the Control Hub to view the Color Sensor process in real-time. + * Or use a screen copy utility like ScrCpy.exe to view the video remotely. * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list @@ -65,19 +69,20 @@ public void runOpMode() /* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class. * * - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect. - * This can be the entire frame, or a sub-region defined using: - * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. - * Use one form of the ImageRegion class to define the ROI. - * ImageRegion.entireFrame() - * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner - * ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen + * This can be the entire frame, or a sub-region defined using: + * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. + * Use one form of the ImageRegion class to define the ROI. + * ImageRegion.entireFrame() + * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 square at the top left corner + * ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% W * H centered square * * - Set the list of "acceptable" color swatches (matches). * Only colors that you assign here will be returned. * If you know the sensor will be pointing to one of a few specific colors, enter them here. - * Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding. + * Or, if the sensor may be pointed randomly, provide some additional colors that may match. * Possible choices are: - * RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE; + * RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE + * Note: For the 2026 season ARTIFACT_PURPLE and ARTIFACT_GREEN have been added. * * Note that in the example shown below, only some of the available colors are included. * This will force any other colored region into one of these colors. @@ -86,6 +91,8 @@ public void runOpMode() PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder() .setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1)) .setSwatches( + PredominantColorProcessor.Swatch.ARTIFACT_GREEN, + PredominantColorProcessor.Swatch.ARTIFACT_PURPLE, PredominantColorProcessor.Swatch.RED, PredominantColorProcessor.Swatch.BLUE, PredominantColorProcessor.Swatch.YELLOW, @@ -98,7 +105,7 @@ public void runOpMode() * * - Add the colorSensor process created above. * - Set the desired video resolution. - * Since a high resolution will not improve this process, choose a lower resolution that is + * Since a high resolution will not improve this process, choose a lower resolution * supported by your camera. This will improve overall performance and reduce latency. * - Choose your video source. This may be * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam @@ -111,23 +118,38 @@ public void runOpMode() .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) .build(); - telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging. + telemetry.setMsTransmissionInterval(100); // Speed up telemetry updates, for debugging. + telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); - // WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode. + // WARNING: To view the stream preview on the Driver Station, this code runs in INIT mode. while (opModeIsActive() || opModeInInit()) { - telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n"); + telemetry.addLine("Preview on/off: 3 dots, Camera Stream\n"); + + // Request the most recent color analysis. This will return the closest matching + // colorSwatch and the predominant color in the RGB, HSV and YCrCb color spaces. + // The color space values are returned as three-element int[] arrays as follows: + // RGB Red 0-255, Green 0-255, Blue 0-255 + // HSV Hue 0-180, Saturation 0-255, Value 0-255 + // YCrCb Luminance(Y) 0-255, Cr 0-255 (center 128), Cb 0-255 (center 128) + // + // Note: to take actions based on the detected color, simply use the colorSwatch or + // color space value in a comparison or switch. eg: + + // if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {.. some code ..} + // or: + // if (result.RGB[0] > 128) {... some code ...} - // Request the most recent color analysis. - // This will return the closest matching colorSwatch and the predominant RGB color. - // Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch. - // eg: - // if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...} PredominantColorProcessor.Result result = colorSensor.getAnalysis(); // Display the Color Sensor result. - telemetry.addData("Best Match:", result.closestSwatch); - telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb))); + telemetry.addData("Best Match", result.closestSwatch); + telemetry.addLine(String.format("RGB (%3d, %3d, %3d)", + result.RGB[0], result.RGB[1], result.RGB[2])); + telemetry.addLine(String.format("HSV (%3d, %3d, %3d)", + result.HSV[0], result.HSV[1], result.HSV[2])); + telemetry.addLine(String.format("YCrCb (%3d, %3d, %3d)", + result.YCrCb[0], result.YCrCb[1], result.YCrCb[2])); telemetry.update(); sleep(20); diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToAprilTagOmni.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToAprilTagOmni.java index 9bac0069b911..4b777e2b4a47 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToAprilTagOmni.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToAprilTagOmni.java @@ -61,7 +61,7 @@ * To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS) * You can determine the best Exposure and Gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder. * - * The code assumes a Robot Configuration with motors named: leftfront_drive and rightfront_drive, leftback_drive and rightback_drive. + * The code assumes a Robot Configuration with motors named: front_left_drive and front_right_drive, back_left_drive and back_right_drive. * The motor directions must be set so a positive power goes forward on all wheels. * This sample assumes that the current game AprilTag Library (usually for the current season) is being loaded by default, * so you should choose to approach a valid tag ID. @@ -103,10 +103,10 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode final double MAX_AUTO_STRAFE= 0.5; // Clip the strafing speed to this max value (adjust for your robot) final double MAX_AUTO_TURN = 0.3; // Clip the turn speed to this max value (adjust for your robot) - private DcMotor leftFrontDrive = null; // Used to control the left front drive wheel - private DcMotor rightFrontDrive = null; // Used to control the right front drive wheel - private DcMotor leftBackDrive = null; // Used to control the left back drive wheel - private DcMotor rightBackDrive = null; // Used to control the right back drive wheel + private DcMotor frontLeftDrive = null; // Used to control the left front drive wheel + private DcMotor frontRightDrive = null; // Used to control the right front drive wheel + private DcMotor backLeftDrive = null; // Used to control the left back drive wheel + private DcMotor backRightDrive = null; // Used to control the right back drive wheel private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag. @@ -127,18 +127,18 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode // Initialize the hardware variables. Note that the strings used here as parameters // to 'get' must match the names assigned during the robot configuration. // step (using the FTC Robot Controller app on the phone). - leftFrontDrive = hardwareMap.get(DcMotor.class, "leftfront_drive"); - rightFrontDrive = hardwareMap.get(DcMotor.class, "rightfront_drive"); - leftBackDrive = hardwareMap.get(DcMotor.class, "leftback_drive"); - rightBackDrive = hardwareMap.get(DcMotor.class, "rightback_drive"); + frontLeftDrive = hardwareMap.get(DcMotor.class, "front_left_drive"); + frontRightDrive = hardwareMap.get(DcMotor.class, "front_right_drive"); + backLeftDrive = hardwareMap.get(DcMotor.class, "back_left_drive"); + backRightDrive = hardwareMap.get(DcMotor.class, "back_right_drive"); // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. // When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive. // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips - leftFrontDrive.setDirection(DcMotor.Direction.REVERSE); - leftBackDrive.setDirection(DcMotor.Direction.REVERSE); - rightFrontDrive.setDirection(DcMotor.Direction.FORWARD); - rightBackDrive.setDirection(DcMotor.Direction.FORWARD); + frontLeftDrive.setDirection(DcMotor.Direction.REVERSE); + backLeftDrive.setDirection(DcMotor.Direction.REVERSE); + frontRightDrive.setDirection(DcMotor.Direction.FORWARD); + backRightDrive.setDirection(DcMotor.Direction.FORWARD); if (USE_WEBCAM) setManualExposure(6, 250); // Use low exposure time to reduce motion blur @@ -227,28 +227,28 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode */ public void moveRobot(double x, double y, double yaw) { // Calculate wheel powers. - double leftFrontPower = x -y -yaw; - double rightFrontPower = x +y +yaw; - double leftBackPower = x +y -yaw; - double rightBackPower = x -y +yaw; + double frontLeftPower = x - y - yaw; + double frontRightPower = x + y + yaw; + double backLeftPower = x + y - yaw; + double backRightPower = x - y + yaw; // Normalize wheel powers to be less than 1.0 - double max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower)); - max = Math.max(max, Math.abs(leftBackPower)); - max = Math.max(max, Math.abs(rightBackPower)); + double max = Math.max(Math.abs(frontLeftPower), Math.abs(frontRightPower)); + max = Math.max(max, Math.abs(backLeftPower)); + max = Math.max(max, Math.abs(backRightPower)); if (max > 1.0) { - leftFrontPower /= max; - rightFrontPower /= max; - leftBackPower /= max; - rightBackPower /= max; + frontLeftPower /= max; + frontRightPower /= max; + backLeftPower /= max; + backRightPower /= max; } // Send powers to the wheels. - leftFrontDrive.setPower(leftFrontPower); - rightFrontDrive.setPower(rightFrontPower); - leftBackDrive.setPower(leftBackPower); - rightBackDrive.setPower(rightBackPower); + frontLeftDrive.setPower(frontLeftPower); + frontRightDrive.setPower(frontRightPower); + backLeftDrive.setPower(backLeftPower); + backRightDrive.setPower(backRightPower); } /** diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopMecanumFieldRelativeDrive.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopMecanumFieldRelativeDrive.java new file mode 100644 index 000000000000..c5c64d09a2a5 --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopMecanumFieldRelativeDrive.java @@ -0,0 +1,163 @@ +/* Copyright (c) 2025 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.rev.RevHubOrientationOnRobot; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.hardware.IMU; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +/* + * This OpMode illustrates how to program your robot to drive field relative. This means + * that the robot drives the direction you push the joystick regardless of the current orientation + * of the robot. + * + * This OpMode assumes that you have four mecanum wheels each on its own motor named: + * front_left_motor, front_right_motor, back_left_motor, back_right_motor + * + * and that the left motors are flipped such that when they turn clockwise the wheel moves backwards + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + * + */ +@TeleOp(name = "Robot: Field Relative Mecanum Drive", group = "Robot") +@Disabled +public class RobotTeleopMecanumFieldRelativeDrive extends OpMode { + // This declares the four motors needed + DcMotor frontLeftDrive; + DcMotor frontRightDrive; + DcMotor backLeftDrive; + DcMotor backRightDrive; + + // This declares the IMU needed to get the current direction the robot is facing + IMU imu; + + @Override + public void init() { + frontLeftDrive = hardwareMap.get(DcMotor.class, "front_left_drive"); + frontRightDrive = hardwareMap.get(DcMotor.class, "front_right_drive"); + backLeftDrive = hardwareMap.get(DcMotor.class, "back_left_drive"); + backRightDrive = hardwareMap.get(DcMotor.class, "back_right_drive"); + + // We set the left motors in reverse which is needed for drive trains where the left + // motors are opposite to the right ones. + backLeftDrive.setDirection(DcMotor.Direction.REVERSE); + frontLeftDrive.setDirection(DcMotor.Direction.REVERSE); + + // This uses RUN_USING_ENCODER to be more accurate. If you don't have the encoder + // wires, you should remove these + frontLeftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + frontRightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + backLeftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + backRightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + imu = hardwareMap.get(IMU.class, "imu"); + // This needs to be changed to match the orientation on your robot + RevHubOrientationOnRobot.LogoFacingDirection logoDirection = + RevHubOrientationOnRobot.LogoFacingDirection.UP; + RevHubOrientationOnRobot.UsbFacingDirection usbDirection = + RevHubOrientationOnRobot.UsbFacingDirection.FORWARD; + + RevHubOrientationOnRobot orientationOnRobot = new + RevHubOrientationOnRobot(logoDirection, usbDirection); + imu.initialize(new IMU.Parameters(orientationOnRobot)); + } + + @Override + public void loop() { + telemetry.addLine("Press A to reset Yaw"); + telemetry.addLine("Hold left bumper to drive in robot relative"); + telemetry.addLine("The left joystick sets the robot direction"); + telemetry.addLine("Moving the right joystick left and right turns the robot"); + + // If you press the A button, then you reset the Yaw to be zero from the way + // the robot is currently pointing + if (gamepad1.a) { + imu.resetYaw(); + } + // If you press the left bumper, you get a drive from the point of view of the robot + // (much like driving an RC vehicle) + if (gamepad1.left_bumper) { + drive(-gamepad1.left_stick_y, gamepad1.left_stick_x, gamepad1.right_stick_x); + } else { + driveFieldRelative(-gamepad1.left_stick_y, gamepad1.left_stick_x, gamepad1.right_stick_x); + } + } + + // This routine drives the robot field relative + private void driveFieldRelative(double forward, double right, double rotate) { + // First, convert direction being asked to drive to polar coordinates + double theta = Math.atan2(forward, right); + double r = Math.hypot(right, forward); + + // Second, rotate angle by the angle the robot is pointing + theta = AngleUnit.normalizeRadians(theta - + imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS)); + + // Third, convert back to cartesian + double newForward = r * Math.sin(theta); + double newRight = r * Math.cos(theta); + + // Finally, call the drive method with robot relative forward and right amounts + drive(newForward, newRight, rotate); + } + + // Thanks to FTC16072 for sharing this code!! + public void drive(double forward, double right, double rotate) { + // This calculates the power needed for each wheel based on the amount of forward, + // strafe right, and rotate + double frontLeftPower = forward + right + rotate; + double frontRightPower = forward - right - rotate; + double backRightPower = forward + right - rotate; + double backLeftPower = forward - right + rotate; + + double maxPower = 1.0; + double maxSpeed = 1.0; // make this slower for outreaches + + // This is needed to make sure we don't pass > 1.0 to any wheel + // It allows us to keep all of the motors in proportion to what they should + // be and not get clipped + maxPower = Math.max(maxPower, Math.abs(frontLeftPower)); + maxPower = Math.max(maxPower, Math.abs(frontRightPower)); + maxPower = Math.max(maxPower, Math.abs(backRightPower)); + maxPower = Math.max(maxPower, Math.abs(backLeftPower)); + + // We multiply by maxSpeed so that it can be set lower for outreaches + // When a young child is driving the robot, we may not want to allow full + // speed. + frontLeftDrive.setPower(maxSpeed * (frontLeftPower / maxPower)); + frontRightDrive.setPower(maxSpeed * (frontRightPower / maxPower)); + backLeftDrive.setPower(maxSpeed * (backLeftPower / maxPower)); + backRightDrive.setPower(maxSpeed * (backRightPower / maxPower)); + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkIMUNonOrthogonal.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkIMUNonOrthogonal.java new file mode 100644 index 000000000000..cd678efecd99 --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkIMUNonOrthogonal.java @@ -0,0 +1,193 @@ +/* Copyright (c) 2025 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.IMU; +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles; + +import static com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot.xyzOrientation; + +/* + * This OpMode shows how to use the AndyMark IMU sensor. It assumes that the AndyMark IMU is + * configured with the name "imu". + * + * The sample will display the current Yaw, Pitch and Roll of the robot. + * + * With the correct orientation parameters selected, pitch/roll/yaw should act as follows: + * Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) + * Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) + * Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) + * + * The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller). + * + * This specific sample DOES NOT assume that the AndyMark IMU is mounted on one of the three + * orthogonal planes (X/Y, X/Z or Y/Z) OR that the AndyMark IMU has only been rotated in a range of + * 90 degree increments. + * + * Note: if your AndyMark IMU is mounted Orthogonally (on a orthogonal surface, angled at some + * multiple of 90 Degrees), then you should use the simpler SensorAndyMarkIMUOrthogonal sample in + * this folder. + * + * But... If your AndyMark IMU is mounted Non-Orthogonally, you must specify one or more rotational + * angles that transform a "Default" IMU orientation into your desired orientation. That is what is + * illustrated here. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * Finally, edit this OpMode to use at least one angle around an axis to orient your AndyMark IMU. + */ +@TeleOp(name = "Sensor: AndyMark IMU Non-Orthogonal", group = "Sensor") +@Disabled // Comment this out to add to the OpMode list +public class SensorAndyMarkIMUNonOrthogonal extends LinearOpMode +{ + // The AndyMark IMU sensor object + private IMU imu; + + //---------------------------------------------------------------------------------------------- + // Main logic + //---------------------------------------------------------------------------------------------- + + @Override public void runOpMode() throws InterruptedException { + + // Retrieve and initialize the AndyMark IMU. + // This sample expects the AndyMark IMU to be named "imu". + imu = hardwareMap.get(IMU.class, "imu"); + + /* Define how the AndyMark IMU is mounted to the robot to get the correct Yaw, Pitch, and + * Roll values. + * + * You can apply up to three axis rotations to orient your AndyMark IMU according to how + * it's mounted on the robot. + * + * The starting point for these rotations is the "Default" IMU orientation, which is: + * 1) IMU laying flat on a horizontal surface, with the AndyMark logo facing up. + * 2) Rotated such that the I2C port is facing forward on the robot. + * + * The order that the rotations are performed matters, so this sample shows doing them in + * the order X, Y, then Z. + * For specifying non-orthogonal IMU mounting orientations, we must temporarily use axes + * defined relative to the AndyMark IMU itself, instead of the usual Robot Coordinate System + * axes used for the results the AndyMark IMU gives us. In the starting orientation, the + * AndyMark IMU axes are aligned with the Robot Coordinate System: + * + * X Axis: Starting at center of IMU, pointing out towards the side. + * Y Axis: Starting at center of IMU, pointing out towards I2C port. + * Z Axis: Starting at center of IMU, pointing up through the AndyMark logo. + * + * Positive rotation is defined by right-hand rule with thumb pointing in +ve direction on + * axis. + * + * Some examples. + * + * ---------------------------------------------------------------------------------------------------------------------------------- + * Example A) Assume that the AndyMark IMU is mounted on a sloped plate at the back of the + * robot, with the I2C port coming out the top of the AndyMark IMU. The plate is tilted UP + * 60 degrees from horizontal. + * + * To get the "Default" IMU into this configuration you would just need a single rotation. + * 1) Rotate the AndyMark IMU +60 degrees around the X axis to tilt up the front edge. + * 2) No rotation around the Y or Z axes. + * + * So the X,Y,Z rotations would be 60,0,0 + * + * ---------------------------------------------------------------------------------------------------------------------------------- + * Example B) Assume that the AndyMark IMU is laying flat on the chassis, but it has been + * twisted 30 degrees towards the right front wheel. + * + * To get the "Default" IMU into this configuration you would just need a single rotation, + * but around a different axis. + * 1) No rotation around the X or Y axes. + * 2) Rotate the AndyMark IMU -30 degrees (Clockwise) around the Z axis, since a positive + * angle would be Counter Clockwise. + * + * So the X,Y,Z rotations would be 0,0,-30 + * + * ---------------------------------------------------------------------------------------------------------------------------------- + * Example C) Assume that the AndyMark IMU is mounted on a vertical plate on the right side + * of the robot, with the AndyMark logo facing out, and the AndyMark IMU rotated so that the + * I2C port is facing down 30 degrees towards the back wheels of the robot. + * + * To get the "Default" IMU into this configuration will require several rotations. + * 1) Rotate the AndyMark IMU +90 degrees around the X axis to get it standing upright with + * the logo pointing backwards on the robot + * 2) Next, rotate the AndyMark IMU +90 around the Y axis to get the logo facing to the + * right. + * 3) Finally rotate the AndyMark IMU +120 degrees around the Z axis to take the I2C port + * from vertical to sloping down 30 degrees and facing towards the back of the robot. + * + * So the X,Y,Z rotations would be 90,90,120 + */ + + // The next three lines define the desired axis rotations. + // To Do: EDIT these values to match YOUR mounting configuration. + double xRotation = 0; // enter the desired X rotation angle here. + double yRotation = 0; // enter the desired Y rotation angle here. + double zRotation = 0; // enter the desired Z rotation angle here. + + Orientation imuRotation = xyzOrientation(xRotation, yRotation, zRotation); + + // Now initialize the AndyMark IMU with this mounting orientation. + AndyMarkIMUOrientationOnRobot orientationOnRobot = new AndyMarkIMUOrientationOnRobot(imuRotation); + imu.initialize(new IMU.Parameters(orientationOnRobot)); + + // Loop and update the dashboard. + while (!isStopRequested()) { + telemetry.addData("IMU orientation", "X=%.1f, Y=%.1f, Z=%.1f \n", xRotation, yRotation, zRotation); + + // Check to see if heading reset is requested. + if (gamepad1.y) { + telemetry.addData("Yaw", "Resetting\n"); + imu.resetYaw(); + } else { + telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n"); + } + + // Retrieve rotational angles and velocities. + YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles(); + AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES); + + telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES)); + telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES)); + telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES)); + telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate); + telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate); + telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate); + telemetry.update(); + } + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkIMUOrthogonal.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkIMUOrthogonal.java new file mode 100644 index 000000000000..58cee6d3cab9 --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkIMUOrthogonal.java @@ -0,0 +1,144 @@ +/* Copyright (c) 2025 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot; +import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot.I2cPortFacingDirection; +import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot.LogoFacingDirection; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.IMU; +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity; +import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles; + +/* + * This OpMode shows how to use the AndyMark IMU sensor. It assumes that the AndyMark IMU is + * configured with the name "imu". + * + * The sample will display the current Yaw, Pitch and Roll of the robot. + * + * With the correct orientation parameters selected, pitch/roll/yaw should act as follows: + * Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) + * Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) + * Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) + * + * The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller). + * + * This specific sample assumes that the AndyMark IMU is mounted on one of the three orthogonal + * planes (X/Y, X/Z or Y/Z) and that the AndyMark IMU has only been rotated in a range of 90 degree + * increments. + * + * Note: if your AndyMark IMU is mounted on a surface angled at some non-90 Degree multiple (like + * 30), then you should use the SensorAndyMarkIMUNonOrthogonal sample in this folder. + * + * This "Orthogonal" requirement means that: + * + * 1) The AndyMark logo printed on the top of the AndyMark IMU can ONLY be pointing in one of six directions: + * FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT. + * + * 2) The I2C port can only be pointing in one of the same six directions: + * FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT. + * + * So, to fully define how your AndyMark IMU is mounted to the robot, you must simply specify: + * LogoFacingDirection + * I2cPortFacingDirection + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * Finally, choose the two correct parameters to define how your AndyMark IMU is mounted and edit + * this OpMode to use those parameters. + */ +@TeleOp(name = "Sensor: AndyMark IMU Orthogonal", group = "Sensor") +@Disabled // Comment this out to add to the OpMode list +public class SensorAndyMarkIMUOrthogonal extends LinearOpMode +{ + // The AndyMark IMU sensor object + private IMU imu; + + //---------------------------------------------------------------------------------------------- + // Main logic + //---------------------------------------------------------------------------------------------- + + @Override public void runOpMode() throws InterruptedException { + + // Retrieve and initialize the AndyMark IMU. + // This sample expects the AndyMark IMU to be named "imu". + imu = hardwareMap.get(IMU.class, "imu"); + + /* Define how the AndyMark IMU is mounted to the robot to get the correct Yaw, Pitch, and + * Roll values. + * + * Two input parameters are required to fully specify the Orientation. + * The first parameter specifies the direction the AndyMark logo on the IMU is pointing. + * The second parameter specifies the direction the I2C port on the IMU is pointing. + * All directions are relative to the robot, and left/right is as-viewed from behind the robot. + */ + + /* The next two lines define the IMU orientation. + * To Do: EDIT these two lines to match YOUR mounting configuration. + */ + LogoFacingDirection logoDirection = LogoFacingDirection.UP; + I2cPortFacingDirection i2cDirection = I2cPortFacingDirection.FORWARD; + + AndyMarkIMUOrientationOnRobot orientationOnRobot = new AndyMarkIMUOrientationOnRobot(logoDirection, i2cDirection); + + // Now initialize the AndyMark IMU with this mounting orientation. + // Note: if you choose two conflicting directions, this initialization will cause a code exception. + imu.initialize(new IMU.Parameters(orientationOnRobot)); + + // Loop and update the dashboard. + while (!isStopRequested()) { + telemetry.addData("IMU orientation", "Logo=%s I2C=%s\n ", logoDirection, i2cDirection); + + // Check to see if heading reset is requested. + if (gamepad1.y) { + telemetry.addData("Yaw", "Resetting\n"); + imu.resetYaw(); + } else { + telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n"); + } + + // Retrieve rotational angles and velocities. + YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles(); + AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES); + + telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES)); + telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES)); + telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES)); + telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate); + telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate); + telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate); + telemetry.update(); + } + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkTOF.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkTOF.java new file mode 100644 index 000000000000..ce6e943e33e3 --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorAndyMarkTOF.java @@ -0,0 +1,85 @@ +/* +Copyright (c) 2025 FIRST + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted (subject to the limitations in the disclaimer below) provided that +the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of FIRST nor the names of its contributors may be used to +endorse or promote products derived from this software without specific prior +written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS +LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.andymark.AndyMarkTOF; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DistanceSensor; +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; + +/* + * This OpMode illustrates how to use the AndyMark 2m TOF Lidar Sensor. + * + * The OpMode assumes that the sensor is configured with a name of "sensor_distance". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ +@TeleOp(name = "Sensor: AndyMarkTOF", group = "Sensor") +@Disabled +public class SensorAndyMarkTOF extends LinearOpMode { + + private DistanceSensor sensorDistance; + + @Override + public void runOpMode() { + // you can use this as a regular DistanceSensor. + sensorDistance = hardwareMap.get(DistanceSensor.class, "sensor_distance"); + + // you can also cast this to a AndyMarkTOF if you want to use added + // methods associated with the AndyMarkTOF class. + AndyMarkTOF sensorTimeOfFlight = (AndyMarkTOF) sensorDistance; + + telemetry.addData(">>", "Press start to continue"); + telemetry.update(); + + waitForStart(); + while(opModeIsActive()) { + // generic DistanceSensor methods. + telemetry.addData("deviceName", sensorDistance.getDeviceName() ); + telemetry.addData("range", String.format("%.01f mm", sensorDistance.getDistance(DistanceUnit.MM))); + telemetry.addData("range", String.format("%.01f cm", sensorDistance.getDistance(DistanceUnit.CM))); + telemetry.addData("range", String.format("%.01f m", sensorDistance.getDistance(DistanceUnit.METER))); + telemetry.addData("range", String.format("%.01f in", sensorDistance.getDistance(DistanceUnit.INCH))); + + // AndyMarkTOF specific methods. + telemetry.addData("ID", String.format("%x", sensorTimeOfFlight.getModelID())); + telemetry.addData("did time out", Boolean.toString(sensorTimeOfFlight.didTimeoutOccur())); + + telemetry.update(); + } + } + +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java index 7546c9dea074..d0411af14834 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java @@ -48,18 +48,22 @@ * * There will be some variation in the values measured depending on the specific sensor you are using. * - * You can increase the gain (a multiplier to make the sensor report higher values) by holding down - * the A button on the gamepad, and decrease the gain by holding down the B button on the gamepad. + * If the color sensor supports adjusting the gain, you can increase the gain (a multiplier to make + * the sensor report higher values) by holding down the A button on the gamepad, and decrease the + * gain by holding down the B button on the gamepad. The AndyMark Proximity & Color Sensor does not + * support this. * * If the color sensor has a light which is controllable from software, you can use the X button on * the gamepad to toggle the light on and off. The REV sensors don't support this, but instead have - * a physical switch on them to turn the light on and off, beginning with REV Color Sensor V2. + * a physical switch on them to turn the light on and off, beginning with REV Color Sensor V2. The + * AndyMark Proximity & Color Sensor does not support this. * * If the color sensor also supports short-range distance measurements (usually via an infrared - * proximity sensor), the reported distance will be written to telemetry. As of September 2020, - * the only color sensors that support this are the ones from REV Robotics. These infrared proximity - * sensor measurements are only useful at very small distances, and are sensitive to ambient light - * and surface reflectivity. You should use a different sensor if you need precise distance measurements. + * proximity sensor), the reported distance will be written to telemetry. As of September 2025, + * the only color sensors that support this are the ones from REV Robotics and the AndyMark + * Proximity & Color Sensor. These infrared proximity sensor measurements are only useful at very + * small distances, and are sensitive to ambient light and surface reflectivity. You should use a + * different sensor if you need precise distance measurements. * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorGoBildaPinpoint.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorGoBildaPinpoint.java new file mode 100644 index 000000000000..5b0f6e97e38a --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorGoBildaPinpoint.java @@ -0,0 +1,116 @@ +/* +* Copyright (c) 2025 Alan Smith +* +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: + +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. + +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.gobilda.GoBildaPinpointDriver; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; +import org.firstinspires.ftc.robotcore.external.navigation.Pose2D; + +/* + * This OpMode illustrates how to use the GoBildaPinpoint + * + * The OpMode assumes that the sensor is configured with a name of "pinpoint". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + * + * See the sensor's product page: https://www.gobilda.com/pinpoint-odometry-computer-imu-sensor-fusion-for-2-wheel-odometry/ + */ +@TeleOp(name = "Sensor: GoBilda Pinpoint", group = "Sensor") +@Disabled +public class SensorGoBildaPinpoint extends OpMode { + // Create an instance of the sensor + GoBildaPinpointDriver pinpoint; + + @Override + public void init() { + // Get a reference to the sensor + pinpoint = hardwareMap.get(GoBildaPinpointDriver.class, "pinpoint"); + + // Configure the sensor + configurePinpoint(); + + // Set the location of the robot - this should be the place you are starting the robot from + pinpoint.setPosition(new Pose2D(DistanceUnit.INCH, 0, 0, AngleUnit.DEGREES, 0)); + } + + @Override + public void loop() { + telemetry.addLine("Push your robot around to see it track"); + telemetry.addLine("Press A to reset the position"); + if(gamepad1.a){ + // You could use readings from April Tags here to give a new known position to the pinpoint + pinpoint.setPosition(new Pose2D(DistanceUnit.INCH, 0, 0, AngleUnit.DEGREES, 0)); + } + pinpoint.update(); + Pose2D pose2D = pinpoint.getPosition(); + + telemetry.addData("X coordinate (IN)", pose2D.getX(DistanceUnit.INCH)); + telemetry.addData("Y coordinate (IN)", pose2D.getY(DistanceUnit.INCH)); + telemetry.addData("Heading angle (DEGREES)", pose2D.getHeading(AngleUnit.DEGREES)); + } + + public void configurePinpoint(){ + /* + * Set the odometry pod positions relative to the point that you want the position to be measured from. + * + * The X pod offset refers to how far sideways from the tracking point the X (forward) odometry pod is. + * Left of the center is a positive number, right of center is a negative number. + * + * The Y pod offset refers to how far forwards from the tracking point the Y (strafe) odometry pod is. + * Forward of center is a positive number, backwards is a negative number. + */ + pinpoint.setOffsets(-84.0, -168.0, DistanceUnit.MM); //these are tuned for 3110-0002-0001 Product Insight #1 + + /* + * Set the kind of pods used by your robot. If you're using goBILDA odometry pods, select either + * the goBILDA_SWINGARM_POD, or the goBILDA_4_BAR_POD. + * If you're using another kind of odometry pod, uncomment setEncoderResolution and input the + * number of ticks per unit of your odometry pod. For example: + * pinpoint.setEncoderResolution(13.26291192, DistanceUnit.MM); + */ + pinpoint.setEncoderResolution(GoBildaPinpointDriver.GoBildaOdometryPods.goBILDA_4_BAR_POD); + + /* + * Set the direction that each of the two odometry pods count. The X (forward) pod should + * increase when you move the robot forward. And the Y (strafe) pod should increase when + * you move the robot to the left. + */ + pinpoint.setEncoderDirections(GoBildaPinpointDriver.EncoderDirection.FORWARD, + GoBildaPinpointDriver.EncoderDirection.FORWARD); + + /* + * Before running the robot, recalibrate the IMU. This needs to happen when the robot is stationary + * The IMU will automatically calibrate when first powered on, but recalibrating before running + * the robot is a good idea to ensure that the calibration is "good". + * resetPosAndIMU will reset the position to 0,0,0 and also recalibrate the IMU. + * This is recommended before you run your autonomous, as a bad initial calibration can cause + * an incorrect starting value for x, y, and heading. + */ + pinpoint.resetPosAndIMU(); + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorLimelight3A.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorLimelight3A.java index 6c1f702a18c2..2579cd033afd 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorLimelight3A.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorLimelight3A.java @@ -100,7 +100,7 @@ public void runOpMode() throws InterruptedException status.getPipelineIndex(), status.getPipelineType()); LLResult result = limelight.getLatestResult(); - if (result != null) { + if (result.isValid()) { // Access general information Pose3D botpose = result.getBotpose(); double captureLatency = result.getCaptureLatency(); @@ -109,44 +109,42 @@ public void runOpMode() throws InterruptedException telemetry.addData("LL Latency", captureLatency + targetingLatency); telemetry.addData("Parse Latency", parseLatency); telemetry.addData("PythonOutput", java.util.Arrays.toString(result.getPythonOutput())); - - if (result.isValid()) { - telemetry.addData("tx", result.getTx()); - telemetry.addData("txnc", result.getTxNC()); - telemetry.addData("ty", result.getTy()); - telemetry.addData("tync", result.getTyNC()); - - telemetry.addData("Botpose", botpose.toString()); - - // Access barcode results - List barcodeResults = result.getBarcodeResults(); - for (LLResultTypes.BarcodeResult br : barcodeResults) { - telemetry.addData("Barcode", "Data: %s", br.getData()); - } - - // Access classifier results - List classifierResults = result.getClassifierResults(); - for (LLResultTypes.ClassifierResult cr : classifierResults) { - telemetry.addData("Classifier", "Class: %s, Confidence: %.2f", cr.getClassName(), cr.getConfidence()); - } - - // Access detector results - List detectorResults = result.getDetectorResults(); - for (LLResultTypes.DetectorResult dr : detectorResults) { - telemetry.addData("Detector", "Class: %s, Area: %.2f", dr.getClassName(), dr.getTargetArea()); - } - - // Access fiducial results - List fiducialResults = result.getFiducialResults(); - for (LLResultTypes.FiducialResult fr : fiducialResults) { - telemetry.addData("Fiducial", "ID: %d, Family: %s, X: %.2f, Y: %.2f", fr.getFiducialId(), fr.getFamily(),fr.getTargetXDegrees(), fr.getTargetYDegrees()); - } - - // Access color results - List colorResults = result.getColorResults(); - for (LLResultTypes.ColorResult cr : colorResults) { - telemetry.addData("Color", "X: %.2f, Y: %.2f", cr.getTargetXDegrees(), cr.getTargetYDegrees()); - } + + telemetry.addData("tx", result.getTx()); + telemetry.addData("txnc", result.getTxNC()); + telemetry.addData("ty", result.getTy()); + telemetry.addData("tync", result.getTyNC()); + + telemetry.addData("Botpose", botpose.toString()); + + // Access barcode results + List barcodeResults = result.getBarcodeResults(); + for (LLResultTypes.BarcodeResult br : barcodeResults) { + telemetry.addData("Barcode", "Data: %s", br.getData()); + } + + // Access classifier results + List classifierResults = result.getClassifierResults(); + for (LLResultTypes.ClassifierResult cr : classifierResults) { + telemetry.addData("Classifier", "Class: %s, Confidence: %.2f", cr.getClassName(), cr.getConfidence()); + } + + // Access detector results + List detectorResults = result.getDetectorResults(); + for (LLResultTypes.DetectorResult dr : detectorResults) { + telemetry.addData("Detector", "Class: %s, Area: %.2f", dr.getClassName(), dr.getTargetArea()); + } + + // Access fiducial results + List fiducialResults = result.getFiducialResults(); + for (LLResultTypes.FiducialResult fr : fiducialResults) { + telemetry.addData("Fiducial", "ID: %d, Family: %s, X: %.2f, Y: %.2f", fr.getFiducialId(), fr.getFamily(), fr.getTargetXDegrees(), fr.getTargetYDegrees()); + } + + // Access color results + List colorResults = result.getColorResults(); + for (LLResultTypes.ColorResult cr : colorResults) { + telemetry.addData("Color", "X: %.2f, Y: %.2f", cr.getTargetXDegrees(), cr.getTargetYDegrees()); } } else { telemetry.addData("Limelight", "No data available"); diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuad.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuad.java index f797c6b0dc72..312d7f5110e3 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuad.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuad.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024 DigitalChickenLabs + * Copyright (c) 2025 DigitalChickenLabs * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -25,20 +25,28 @@ import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.eventloop.opmode.TeleOp; import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.util.ElapsedTime; + import org.firstinspires.ftc.robotcore.external.Telemetry; /* - * This OpMode illustrates how to use the DigitalChickenLabs OctoQuad Quadrature Encoder & Pulse Width Interface Module + * This OpMode illustrates how to use DigitalChickenLabs OctoQuad Quad Encoder & Pulse Width I/F Module * - * The OctoQuad has 8 input channels that can used to read either Relative Quadrature Encoders or Pulse-Width Absolute Encoder inputs. - * Relative Quadrature encoders are found on most FTC motors, and some stand-alone position sensors like the REV Thru-Bore encoder. - * Pulse-Width encoders are less common. The REV Thru-Bore encoder can provide its absolute position via a variable pulse width, + * The OctoQuad has 8 input channels that can used to read either Relative Quadrature Encoders or + * Pulse-Width Absolute Encoder inputs. Relative Quadrature encoders are found on most FTC motors, + * and some stand-alone position sensors like the REV Thru-Bore encoder. Pulse-Width encoders are + * less common. The REV Thru-Bore encoder can provide its absolute position via a variable pulse width, * as can several sonar rangefinders such as the MaxBotix MB1000 series. * - * This basic sample shows how an OctoQuad can be used to read the position three Odometry pods fitted - * with REV Thru-Bore encoders. For a more advanced example showing additional OctoQuad capabilities, see the SensorOctoQuadAdv sample. + * Note: SDK 11.0+ requires that the OctoQuad is running firmware V3.0 or greater. + * Visit https://github.com/DigitalChickenLabs/OctoQuad/tree/master/firmware for instruction + * on how to upgrade your OctoQuad's firmware. + * + * This basic sample shows how an OctoQuad can be used to read the position of three Odometry pods + * fitted with REV Thru-Bore encoders. For a more advanced example with additional OctoQuad + * capabilities, see the SensorOctoQuadAdv sample. * - * This OpMode assumes that the OctoQuad is attached to an I2C interface named "octoquad" in the robot configuration. + * This OpMode assumes the OctoQuad is attached to an I2C interface named "octoquad" in the robot config. * * The code assumes the first three OctoQuad inputs are connected as follows * - Chan 0: for measuring forward motion on the left side of the robot. @@ -54,21 +62,24 @@ * * See the sensor's product page: https://www.tindie.com/products/35114/ */ -@TeleOp(name = "OctoQuad Basic", group="OctoQuad") @Disabled +@TeleOp(name = "OctoQuad Basic", group="OctoQuad") public class SensorOctoQuad extends LinearOpMode { // Identify which encoder OctoQuad inputs are connected to each odometry pod. - private final int ODO_LEFT = 0; // Facing forward direction on left side of robot (Axial motion) - private final int ODO_RIGHT = 1; // Facing forward direction on right side or robot (Axial motion) - private final int ODO_PERP = 2; // Facing perpendicular direction at the center of the robot (Lateral motion) + private final int ODO_LEFT = 0; // Facing forward direction on left side of robot + private final int ODO_RIGHT = 1; // Facing forward direction on right side or robot + private final int ODO_PERP = 2; // Facing perpendicular direction at the center of the robot - // Declare the OctoQuad object and members to store encoder positions and velocities + // Declare the OctoQuad object; private OctoQuad octoquad; private int posLeft; private int posRight; private int posPerp; + private int velLeft; + private int velRight; + private int velPerp; /** * This function is executed when this OpMode is selected from the Driver Station. @@ -83,12 +94,15 @@ public void runOpMode() { telemetry.addData("OctoQuad Firmware Version ", octoquad.getFirmwareVersion()); // Reverse the count-direction of any encoder that is not what you require. - // e.g. if you push the robot forward and the left encoder counts down, then reverse it so it counts up. + // e.g. if you push the robot forward and the left encoder counts down, then reverse it. octoquad.setSingleEncoderDirection(ODO_LEFT, OctoQuad.EncoderDirection.REVERSE); octoquad.setSingleEncoderDirection(ODO_RIGHT, OctoQuad.EncoderDirection.FORWARD); octoquad.setSingleEncoderDirection(ODO_PERP, OctoQuad.EncoderDirection.FORWARD); - // Any changes that are made should be saved in FLASH just in case there is a sensor power glitch. + // set the interval over which pulses are counted to determine velocity. + octoquad.setAllVelocitySampleIntervals(50); // 50 mSec means 20 velocity updates per second. + + // Save any changes that are made, just in case there is a sensor power glitch. octoquad.saveParametersToFlash(); telemetry.addLine("\nPress START to read encoder values"); @@ -98,7 +112,7 @@ public void runOpMode() { // Configure the telemetry for optimal display of data. telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); - telemetry.setMsTransmissionInterval(50); + telemetry.setMsTransmissionInterval(100); // Set all the encoder inputs to zero. octoquad.resetAllPositions(); @@ -117,25 +131,26 @@ public void runOpMode() { readOdometryPods(); // Display the values. - telemetry.addData("Left ", "%8d counts", posLeft); - telemetry.addData("Right", "%8d counts", posRight); - telemetry.addData("Perp ", "%8d counts", posPerp); + telemetry.addData("Left P", "%7d V :%6d CPS ", posLeft, velLeft); + telemetry.addData("Right P", "%7d V :%6d CPS ", posRight, velRight); + telemetry.addData("Perp P", "%7d V :%6d CPS ", posPerp, velPerp); telemetry.update(); } } private void readOdometryPods() { // For best performance, we should only perform ONE transaction with the OctoQuad each cycle. - // Since this example only needs to read positions from a few channels, we could use either - // readPositionRange(idxFirst, idxLast) to get a select number of sequential channels - // or - // readAllPositions() to get all 8 encoder readings + // This can be achieved in one of two ways: + // 1) by doing a block data read once per control cycle + // or + // 2) by doing individual caching reads, but only reading each encoder value ONCE per cycle. // - // Since both calls take almost the same amount of time, and the actual channels may not end up - // being sequential, we will read all of the encoder positions, and then pick out the ones we need. - int[] positions = octoquad.readAllPositions(); - posLeft = positions[ODO_LEFT]; - posRight = positions[ODO_RIGHT]; - posPerp = positions[ODO_PERP]; + // Since method 2 is simplest, we will use it here. + posLeft = octoquad.readSinglePosition_Caching(ODO_LEFT); + posRight = octoquad.readSinglePosition_Caching(ODO_RIGHT); + posPerp = octoquad.readSinglePosition_Caching(ODO_PERP); + velLeft = octoquad.readSingleVelocity_Caching(ODO_LEFT) * 20; // scale up to cps + velRight = octoquad.readSingleVelocity_Caching(ODO_RIGHT) * 20; // scale up to cps + velPerp = octoquad.readSingleVelocity_Caching(ODO_PERP) * 20; // scale up to cps } } diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadAdv.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadAdv.java index e763b9aac2da..0e412ef4191c 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadAdv.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadAdv.java @@ -22,7 +22,6 @@ package org.firstinspires.ftc.robotcontroller.external.samples; import com.qualcomm.hardware.digitalchickenlabs.OctoQuad; -import com.qualcomm.hardware.digitalchickenlabs.OctoQuadBase; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.eventloop.opmode.TeleOp; import com.qualcomm.robotcore.util.ElapsedTime; @@ -37,50 +36,53 @@ import java.util.List; /* - * This OpMode illustrates how to use advanced features of the DigitalChickenLabs OctoQuad Quadrature Encoder & Pulse Width Interface Module + * This OpMode illustrates how to use advanced features of the DigitalChickenLabs OctoQuad Quadrature + * Encoder & Pulse Width Interface Module. The OctoQuad has 8 input channels that can used to read + * either Quadrature Encoder signals (like with most FTC motors) or Pulse-Width style Absolute Encoder + * inputs (eg: REV Through Bore encoder pulse width output). * - * The OctoQuad has 8 input channels that can used to read either Quadrature Encoder signals (like with most FTC motors) - * or Pulse-Width style Absolute Encoder inputs (eg: REV Through Bore encoder pulse width output). + * This OpMode illustrates several of the more advanced features of an OctoQuad, including Pulse Width + * measurement and velocity measurement. For a more basic OpMode just showing how to read encoder + * inputs, see the SensorOctoQuad sample. * - * This OpMode illustrates several of the more advanced features of an OctoQuad, including Pulse Width measurement and velocity measurement. - * For a more basic OpMode just showing how to read encoder inputs, see the SensorOctoQuad sample. - * - * This OpMode assumes that the OctoQuad is attached to an I2C interface named "octoquad" in the robot configuration. + * This OpMode assumes the OctoQuad is attached to an I2C interface named "octoquad" in the robot config. * * One system that requires a lot of encoder inputs is a Swerve Drive system. - * Such a drive requires two encoders per drive module: one for position & velocity of the Drive motor/wheel, and one for position/angle of the Steering motor. - * The Drive motor usually requires a quadrature encoder, and the Steering motor requires an Absolute encoder. - * This quantity and combination of encoder inputs is a challenge, but a single OctoQuad could be used to read them all. + * Such a drive requires two encoders per drive module: + * One encoder for the Drive motor/wheel position.velocity, and one for Steer motor angle. + * The Drive motor requires a quadrature encoder, and the Steering motor requires an Absolute encoder. * * This sample will configure an OctoQuad for a swerve drive, as follows * - Configure 4 Relative Quadrature Encoder inputs and 4 Absolute Pulse-Width Encoder inputs * - Configure a velocity sample interval of 25 mSec - * - Configure a pulse width input range of 1-1024 uSec for a REV Through Bore Encoder's Absolute Pulse output. + * - Configure a pulse width input range of 1-1024 uSec for a REV Encoder's Absolute Pulse output. * - * An OctoSwerveDrive class will be created to initialize the OctoQuad, and manage the 4 swerve modules. + * An OctoSwerveDrive class will be created to initialize the OctoQuad, and manage 4 swerve modules. * An OctoSwerveModule class will be created to configure and read a single swerve module. * * Wiring: - * The OctoQuad will be configured to accept Quadrature encoders on the first four channels and Absolute (pulse width) encoders on the last four channels. + * The OctoQuad will be configured to accept Quadrature encoders on the first four channels and + * Absolute (pulse width) encoders on the last four channels. + * + * The standard encoder cable will connect each Driver Motor encoder to the OctoQuad. (channels 0-3) * - * The standard 4-pin to 4-pin cable can be used to connect each Driver Motor encoder to the OctoQuad. (channels 0-3) + * A modified version of the REV 6-4 pin cable (shipped with the encoder) connects the steering encoder + * to the OctoQuad. (channels 4-7) * - * A modified version of the REV 6-4 pin cable (shipped with the encoder) connects the steering encoder to the OctoQuad. (channels 4-7) - * To connect the Absolute position signal from a REV Thru-Bore encoder to the OctoQuad, the Provided 6-pin to 4-pin cable will need to be modified. - * At the 6-pin connector end, move the yellow wire from its initial pin-3 position to the ABS pin-5 position. This can be done easily - * by using a small flathead screwdriver to lift the small white tab holding the metal wire crimp in place and gently pulling the wire out. + * To connect the Absolute position signal from a REV Thru-Bore encoder to the OctoQuad, the + * Provided 6-pin to 4-pin cable will need to be modified. + * At the 6-pin connector end, move the yellow wire from its initial pin-3 position to the + * ABS pin-5 position. This can be done easily by using a small flathead screwdriver to lift the + * small white tab holding the metal wire crimp in place and gently pulling the wire out. * See the OctoSwerveDrive() constructor below for the correct wheel/channel assignment. * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list * - * Note: If you prefer, you can move the two support classes from this file, and place them in their own files. - * But leaving them in place is simpler for this example. - * * See the sensor's product page: https://www.tindie.com/products/35114/ */ -@TeleOp(name="OctoQuad Advanced", group="OctoQuad") @Disabled +@TeleOp(name="OctoQuad Advanced", group="OctoQuad") public class SensorOctoQuadAdv extends LinearOpMode { @Override @@ -161,22 +163,25 @@ public OctoSwerveDrive(OctoQuad octoquad) { // Create the four OctoSwerveModules, and add them to a 'list' for easier reference. - // Note: The wheel/channel order is set here. Ensure your encoder cables match. + // Notes: + // The wheel/channel order is set here. Ensure your encoder cables match. // - // Note: The angleOffset must be set for each module so a forward facing wheel shows a steer angle of 0 degrees. - // The process for determining the correct angleOffsets is as follows: - // 1) Set all the angleValues (below) to zero then build and deploy the code. - // 2) Physically rotate all the swerve drives so the wheels are facing forward in the desired 0 degree position - // 3) Run the OpMode, view the telemetry and record the "Degrees" value for each module. - // 4) Update the code by entering the recorded Degrees value for each module as the angleOffset (last) parameter in the lines below. + // The angleOffset must be set for each module so a forward facing wheel shows a steer + // angle of 0 degrees. The process for determining the correct angleOffsets is as follows: + // 1) Set all the angleValues (below) to zero then build and deploy the code. + // 2) Rotate all the swerve drives so the wheels are forward in the desired 0 degree position + // 3) Run the OpMode, view the telemetry and record the "Degrees" value for each module. + // 4) Update the code by entering the recorded Degrees value for each module as the + // angleOffset (last) parameter in the lines below. // - // Rebuild and deploy the new code. Verify that the telemetry now indicates 0 degrees when the wheels are facing forward. - // Also verify that the correct module values change appropriately when you manually spin (drive) and rotate (steer) a wheel. + // Rebuild and deploy the new code. Verify that the telemetry now indicates 0 degrees when + // the wheels are facing forward. Also verify that the correct module values change + // appropriately when you manually spin (drive) and rotate (steer) a wheel. - allModules.add(LeftFront = new OctoSwerveModule(octoquad, "LF ",0,0));// Drive = 0, Steer = 4 - allModules.add(RightFront = new OctoSwerveModule(octoquad, "RF ",1,0));// Drive = 1, Steer = 5 - allModules.add(LeftBack = new OctoSwerveModule(octoquad, "LB ",2,0));// Drive = 2, Steer = 6 - allModules.add(RightBack = new OctoSwerveModule(octoquad, "RB ",3,0));// Drive = 3, Steer = 7 + allModules.add(LeftFront = new OctoSwerveModule(octoquad, "LF ",0,0));// Drive=0, Steer=4 + allModules.add(RightFront = new OctoSwerveModule(octoquad, "RF ",1,0));// Drive=1, Steer=5 + allModules.add(LeftBack = new OctoSwerveModule(octoquad, "LB ",2,0));// Drive=2, Steer=6 + allModules.add(RightBack = new OctoSwerveModule(octoquad, "RB ",3,0));// Drive=3, Steer=7 // now make sure the settings persist through any power glitches. octoquad.saveParametersToFlash(); @@ -186,7 +191,7 @@ public OctoSwerveDrive(OctoQuad octoquad) { * Updates all 4 swerve modules */ public void updateModules() { - // Read full OctoQuad data block and then pass DataBlock to each swerve module to update themselves. + // Read full OctoQuad data block and then pass it to each swerve module to update themselves. octoquad.readAllEncoderData(encoderDataBlock); for (OctoSwerveModule module : allModules) { module.updateModule(encoderDataBlock); @@ -219,39 +224,41 @@ class OctoSwerveModule { private final String name; private final int channel; private final double angleOffset; - private final double steerDirMult; - private static final int VELOCITY_SAMPLE_INTERVAL_MS = 25; // To provide 40 updates per second. - private static final double DEGREES_PER_US = (360.0 / 1024.0); // based on REV Through Bore Encoder + private static final int VELOCITY_SAMPLE_INTERVAL_MS = 25; // To provide 40 updates/Sec. + private static final double DEGREES_PER_US = (360.0 / 1024.0); // REV Through Bore Encoder private static final double VELOCITY_SAMPLES_PER_S = (1000.0 / VELOCITY_SAMPLE_INTERVAL_MS); - // The correct drive and turn directions must be set for the Swerve Module based on the specific hardware geometry. + // The correct drive and turn directions must be set for the Swerve Module. // Forward motion must generate an increasing drive count. // Counter Clockwise steer rotation must generate an increasing Steer Angle (degrees) - private static final boolean INVERT_DRIVE_ENCODER = false; // Set true if forward motion decreases drive "Count" - private static final boolean INVERT_STEER_ENCODER = false; // Set true if counter clockwise steer action decreases steer "Degree" + private static final boolean INVERT_DRIVE_ENCODER = false; + private static final boolean INVERT_STEER_ENCODER = false; /*** * @param octoquad provide access to configure OctoQuad * @param name name used for telemetry display * @param quadChannel Quadrature encoder channel. Pulse Width channel is this + 4 - * @param angleOffset Angle to subtract from absolute encoder to calibrate zero position. (see comments above) + * @param angleOffset Angle to subtract from absolute encoder to calibrate zero position. */ public OctoSwerveModule (OctoQuad octoquad, String name, int quadChannel, double angleOffset) { this.name = name; this.channel = quadChannel; this.angleOffset = angleOffset; - this.steerDirMult = INVERT_STEER_ENCODER ? -1 : 1 ; // create a multiplier to flip the steer angle. - // Set the drive encoder direction. Note the absolute encoder does not have built-in direction inversion. - octoquad.setSingleEncoderDirection(channel, INVERT_DRIVE_ENCODER ? OctoQuad.EncoderDirection.REVERSE : OctoQuad.EncoderDirection.FORWARD); + // Set both encoder directions. + octoquad.setSingleEncoderDirection(channel, + INVERT_DRIVE_ENCODER ? OctoQuad.EncoderDirection.REVERSE : OctoQuad.EncoderDirection.FORWARD); + octoquad.setSingleEncoderDirection(channel + 4, + INVERT_STEER_ENCODER ? OctoQuad.EncoderDirection.REVERSE : OctoQuad.EncoderDirection.FORWARD); // Set the velocity sample interval on both encoders octoquad.setSingleVelocitySampleInterval(channel, VELOCITY_SAMPLE_INTERVAL_MS); octoquad.setSingleVelocitySampleInterval(channel + 4, VELOCITY_SAMPLE_INTERVAL_MS); // Setup Absolute encoder pulse range to match REV Through Bore encoder. - octoquad.setSingleChannelPulseWidthParams (channel + 4, new OctoQuad.ChannelPulseWidthParams(1,1024)); + octoquad.setSingleChannelPulseWidthParams (channel + 4, + new OctoQuad.ChannelPulseWidthParams(1,1024)); } /*** @@ -259,13 +266,15 @@ public OctoSwerveModule (OctoQuad octoquad, String name, int quadChannel, double * @param encoderDataBlock most recent full data block read from OctoQuad. */ public void updateModule(OctoQuad.EncoderDataBlock encoderDataBlock) { - driveCounts = encoderDataBlock.positions[channel]; // get Counts. - driveCountsPerSec = encoderDataBlock.velocities[channel] * VELOCITY_SAMPLES_PER_S; // convert counts/interval to counts/sec + driveCounts = encoderDataBlock.positions[channel]; + driveCountsPerSec = encoderDataBlock.velocities[channel] * VELOCITY_SAMPLES_PER_S; // convert uS to degrees. Add in any possible direction flip. - steerDegrees = AngleUnit.normalizeDegrees((encoderDataBlock.positions[channel+ 4] * DEGREES_PER_US * steerDirMult) - angleOffset); + steerDegrees = AngleUnit.normalizeDegrees( + (encoderDataBlock.positions[channel+ 4] * DEGREES_PER_US) - angleOffset); // convert uS/interval to deg per sec. Add in any possible direction flip. - steerDegreesPerSec = encoderDataBlock.velocities[channel + 4] * DEGREES_PER_US * steerDirMult * VELOCITY_SAMPLES_PER_S; + steerDegreesPerSec = encoderDataBlock.velocities[channel + 4] * + DEGREES_PER_US * VELOCITY_SAMPLES_PER_S; } /** @@ -273,6 +282,7 @@ public void updateModule(OctoQuad.EncoderDataBlock encoderDataBlock) { * @param telemetry OpMode Telemetry object */ public void show(Telemetry telemetry) { - telemetry.addData(name, "%8.0f %7.0f %7.0f %6.0f", driveCounts, driveCountsPerSec, steerDegrees, steerDegreesPerSec); + telemetry.addData(name, "%8.0f %7.0f %7.0f %6.0f", + driveCounts, driveCountsPerSec, steerDegrees, steerDegreesPerSec); } } diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadLocalization.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadLocalization.java new file mode 100644 index 000000000000..486c4681b62e --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorOctoQuadLocalization.java @@ -0,0 +1,255 @@ +/* + * Copyright (c) 2025 DigitalChickenLabs + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.digitalchickenlabs.OctoQuad; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.Telemetry; + +/* + * This OpMode demonstrates how to use the "absolute localizer" feature of the + * Digital Chicken OctoQuad FTC Edition MK2. + * + * Note: The MK2 OctoQuad has an Inertial Measurement Unit (IMU) that the MK1 does not, so this + * code will ONLY work with the MK2 version. + * + * It is STRONGLY recommended to read the Quick Start guide for the localizer feature, located here: + * https://github.com/DigitalChickenLabs/OctoQuad/blob/master/documentation/ + * + * This OpMode assumes that the OctoQuad is attached to an I2C interface named "octoquad" in the + * robot configuration. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + * + * See the sensor's product page: https://www.tindie.com/products/37799/ + */ + +@Disabled +@TeleOp(name="OctoQuad Localizer", group="OctoQuad") +public class SensorOctoQuadLocalization extends LinearOpMode +{ + // ##################################################################################### + // + // YOU MUST ADJUST THESE CONSTANTS FOR YOUR ROBOT! + // SEE THE QUICKSTART GUIDE: + // https://github.com/DigitalChickenLabs/OctoQuad/blob/master/documentation/ + // + // AND THE TUNING OPMODES: + // https://github.com/DigitalChickenLabs/OctoQuad/blob/master/code_examples/FTC + // + // ##################################################################################### + static final int DEADWHEEL_PORT_X = 0; // encoder port on OctoQuad + static final int DEADWHEEL_PORT_Y = 1; // encoder port on OctoQuad + static final OctoQuad.EncoderDirection DEADWHEEL_X_DIR = OctoQuad.EncoderDirection.FORWARD; + static final OctoQuad.EncoderDirection DEADWHEEL_Y_DIR = OctoQuad.EncoderDirection.REVERSE; + static final float X_TICKS_PER_MM = 12.34f; // eg. 19.89f for a goBILDA 4-Bar Odometry Pod + static final float Y_TICKS_PER_MM = 12.34f; // eg. 19.89f for a goBILDA 4-Bar Odometry Pod + static final float TCP_OFFSET_X_MM = 123.4f; // eg. 147.0f from QuickStart Guide diagram + static final float TCP_OFFSET_Y_MM = 123.4f; // eg.-158.0f from QuickStart Guide diagram + static final float IMU_SCALAR = 1.0f; // Rotational scale factor. + // ##################################################################################### + + // Conversion factor for radians --> degrees + static final double RAD2DEG = 180/Math.PI; + + // For tracking the number of CRC mismatches + int badPackets = 0; + int totalPackets = 0; + boolean warn = false; + + // Data structure which will store the localizer data read from the OctoQuad + OctoQuad.LocalizerDataBlock localizer = new OctoQuad.LocalizerDataBlock(); + + /* + * Main OpMode function + */ + public void runOpMode() + { + // Begin by retrieving a handle to the device from the hardware map. + OctoQuad oq = hardwareMap.get(OctoQuad.class, "octoquad"); + + // Increase the telemetry update frequency to make the data display a bit less laggy + telemetry.setMsTransmissionInterval(50); + telemetry.setDisplayFormat(Telemetry.DisplayFormat.HTML); + + // Configure a range of parameters for the absolute localizer + // --> Read the quick start guide for an explanation of these!! + // IMPORTANT: these parameter changes will not take effect until the localizer is reset! + oq.setSingleEncoderDirection(DEADWHEEL_PORT_X, DEADWHEEL_X_DIR); + oq.setSingleEncoderDirection(DEADWHEEL_PORT_Y, DEADWHEEL_Y_DIR); + oq.setLocalizerPortX(DEADWHEEL_PORT_X); + oq.setLocalizerPortY(DEADWHEEL_PORT_Y); + oq.setLocalizerCountsPerMM_X(X_TICKS_PER_MM); + oq.setLocalizerCountsPerMM_Y(Y_TICKS_PER_MM); + oq.setLocalizerTcpOffsetMM_X(TCP_OFFSET_X_MM); + oq.setLocalizerTcpOffsetMM_Y(TCP_OFFSET_Y_MM); + oq.setLocalizerImuHeadingScalar(IMU_SCALAR); + oq.setLocalizerVelocityIntervalMS(25); + oq.setI2cRecoveryMode(OctoQuad.I2cRecoveryMode.MODE_1_PERIPH_RST_ON_FRAME_ERR); + + // Resetting the localizer will apply the parameters configured above. + // This function will NOT block until calibration of the IMU is complete - + // for that you need to look at the status returned by getLocalizerStatus() + oq.resetLocalizerAndCalibrateIMU(); + + /* + * Init Loop + */ + while (opModeInInit()) + { + telemetry.addData("OQ Firmware Version", oq.getFirmwareVersionString()); + telemetry.addData("Localizer Status", oq.getLocalizerStatus()); + telemetry.addData("Heading Axis Detection", oq.getLocalizerHeadingAxisChoice()); + telemetry.addLine(" "); + + warnIfNotTuned(); + + telemetry.addLine("Press Play to start navigating"); + telemetry.update(); + + sleep(100); + } + + /* + * Don't proceed to the main loop until calibration is complete + */ + while (!isStopRequested() && (oq.getLocalizerStatus() != OctoQuad.LocalizerStatus.RUNNING)) + { + telemetry.addLine("Waiting for IMU Calibration to complete\n"); + telemetry.addData("Localizer Status", oq.getLocalizerStatus()); + telemetry.update(); + sleep(100); + } + + // Establish a starting position for the robot. This will be 0,0,0 by default so this line + // is rather redundant, but you could change it to be anything you want + oq.setLocalizerPose(0, 0, 0); + + // Not required for localizer, but left in here since raw counts are displayed + // on telemetry for debugging + oq.resetAllPositions(); + + /* + * MAIN LOOP + */ + while (opModeIsActive()) + { + // Use the Gamepad A/Cross button to teleport to a new location and heading + if (gamepad1.crossWasPressed()) + { + oq.setLocalizerPose(200, 200, (float)(Math.PI/2.0f)); + } + + // Read updated data from the OctoQuad into the 'localizer' data structure + oq.readLocalizerData(localizer); + + // ################################################################################# + // IMPORTANT! Check whether the received CRC for the data is correct. An incorrect + // CRC indicates that the data was corrupted in transit and cannot be + // trusted. This can be caused by internal or external ESD. + // + // If the CRC is NOT reported as OK, you should throw away the data and + // try to read again. + // + // NOTE: Raw odometry pod counts are displayed to verify correct direction of rotation + // When the robot is pushed FWD, the X pod counts must increase in value + // When the robot is pushed LEFT, the Y pod counts must increase in value + // Use the setSingleEncoderDirection() method to make any reversals. + // ################################################################################# + if (localizer.crcOk) + { + warnIfNotTuned(); + + // Display Robot position data + telemetry.addData("Localizer Status", localizer.localizerStatus); + telemetry.addData("Heading deg", "%.2f", localizer.heading_rad * RAD2DEG); + telemetry.addData("Rotation dps", "%.2f", localizer.velHeading_radS * RAD2DEG); + telemetry.addData("X Pos mm", localizer.posX_mm); + telemetry.addData("Y Pos mm", localizer.posY_mm); + telemetry.addData("X Vel mm/s", localizer.velX_mmS); + telemetry.addData("Y Vel mm/s", localizer.velY_mmS); + telemetry.addLine("\nPress Gamepad A (Cross) to teleport"); + + // ############################################################################# + // IMPORTANT!! + // + // These two encoder status lines will slow the loop down, + // so they can be removed once the encoder direction has been verified. + // ############################################################################# + telemetry.addData("\nRaw X Pod Counts", oq.readSinglePosition_Caching(DEADWHEEL_PORT_X)); + telemetry.addData("Raw Y Pod Counts", oq.readSinglePosition_Caching(DEADWHEEL_PORT_Y)); + } + else + { + badPackets++; + telemetry.addLine("Data CRC not valid"); + } + totalPackets++; + + // Print some statistics about CRC validation + telemetry.addLine(String.format("%d CRC error(s) in %d packets", badPackets, totalPackets)); + + // Send updated telemetry to the Driver Station + telemetry.update(); + } + } + + long lastWarnFlashTimeMs = 0; + boolean warnFlash = false; + + void warnIfNotTuned() + { + String warnString = ""; + if (IMU_SCALAR == 1.0f) + { + warnString += "WARNING: IMU_SCALAR not tuned.
"; + warn = true; + } + if (X_TICKS_PER_MM == 12.34f || Y_TICKS_PER_MM == 12.34f) + { + warnString += "WARNING: TICKS_PER_MM not tuned.
"; + warn = true; + } + if (TCP_OFFSET_X_MM == 123.4f || TCP_OFFSET_Y_MM == 123.4f) + { + warnString += "WARNING: TCP_OFFSET not tuned.
"; + warn = true; + } + if (warn) + { + warnString +="
 - Read the code COMMENTS for tuning help.
"; + + if (System.currentTimeMillis() - lastWarnFlashTimeMs > 1000) + { + lastWarnFlashTimeMs = System.currentTimeMillis(); + warnFlash = !warnFlash; + } + + telemetry.addLine(String.format("%s", + warnFlash ? "red" : "white", warnString)); + } + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/UtilityOctoQuadConfigMenu.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/UtilityOctoQuadConfigMenu.java index a962919fcfe7..60be6c9f2c36 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/UtilityOctoQuadConfigMenu.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/UtilityOctoQuadConfigMenu.java @@ -69,6 +69,7 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode TelemetryMenu.MenuElement menuAbsParams = new TelemetryMenu.MenuElement("Abs. Encoder Pulse Width Params", false); TelemetryMenu.IntegerOption[] optionsAbsParamsMax = new TelemetryMenu.IntegerOption[OctoQuad.NUM_ENCODERS]; TelemetryMenu.IntegerOption[] optionsAbsParamsMin = new TelemetryMenu.IntegerOption[OctoQuad.NUM_ENCODERS]; + TelemetryMenu.BooleanOption[] optionsAbsParamsWrapTracking = new TelemetryMenu.BooleanOption[OctoQuad.NUM_ENCODERS]; TelemetryMenu.OptionElement optionProgramToFlash; TelemetryMenu.OptionElement optionSendToRAM; @@ -161,9 +162,16 @@ void onClick() // called on OpMode thread OctoQuad.MIN_PULSE_WIDTH_US, OctoQuad.MAX_PULSE_WIDTH_US, params.min_length_us); + + optionsAbsParamsWrapTracking[i] = new TelemetryMenu.BooleanOption( + String.format("Chan %d wrap tracking enabled", i), + octoquad.getSingleChannelPulseWidthTracksWrap(i), + "yes", + "no"); } menuAbsParams.addChildren(optionsAbsParamsMin); menuAbsParams.addChildren(optionsAbsParamsMax); + menuAbsParams.addChildren(optionsAbsParamsWrapTracking); optionProgramToFlash = new TelemetryMenu.OptionElement() { @@ -266,6 +274,7 @@ void sendSettingsToRam() params.min_length_us = optionsAbsParamsMin[i].getValue(); octoquad.setSingleChannelPulseWidthParams(i, params); + octoquad.setSingleChannelPulseWidthTracksWrap(i, optionsAbsParamsWrapTracking[i].val); } octoquad.setI2cRecoveryMode((OctoQuad.I2cRecoveryMode) optionI2cResetMode.getValue()); diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/externalhardware/ConceptExternalHardwareClass.java similarity index 100% rename from FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java rename to FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/externalhardware/ConceptExternalHardwareClass.java diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/externalhardware/RobotHardware.java similarity index 100% rename from FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java rename to FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/externalhardware/RobotHardware.java diff --git a/README.md b/README.md index f7a02c95f849..522fb8048e75 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ ## NOTICE -This repository contains the public FTC SDK for the INTO THE DEEP (2024-2025) competition season. +This repository contains the public FTC SDK for the DECODE (2025-2026) competition season. ## Welcome! This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer. ## Requirements -To use this Android Studio project, you will need Android Studio 2021.2 (codename Chipmunk) or later. +To use this Android Studio project, you will need Android Studio Ladybug (2024.2) or later. To program your robot in Blocks or OnBot Java, you do not need Android Studio. @@ -59,6 +59,119 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc # Release Information +## Version 11.0 (20250827-105138) + +### Enhancements + +* OnBotJava now has the concept of a project. + A project is a collection of related files. A project may be chosen by selecting 'Example Project' + from the 'File type:' dropdown. Doing so will populate the dropdown to the immediate right with + a list of projects to choose from. + When selecting a project all of the related files appear in the left pane of the workspace + underneath a directory with the chosen project name. + This is useful for example for ConceptExternalHardwareClass which has a dependency upon + RobotHardware. This feature simplifies the usage of this Concept example by automatically + pulling in dependent classes. +* Adds support for AndyMark ToF, IMU, and Color sensors. +* The Driver Station app indicates if WiFi is disabled on the device. +* Adds several features to the Color Processing software: + * DECODE colors `ARTIFACT_GREEN` and `ARTIFACT_PURPLE` + * Choice of the order of pre-processing steps Erode and Dilate + * Best-fit preview shape called `circleFit`, an alternate to the existing `boxFit` + * Sample OpMode `ConceptVisionColorLocator_Circle`, an alternate to the renamed `ConceptVisionColorLocator_Rectangle` +* The Driver Station app play button has a green background with a white play symbol if + * the driver station and robot controller are connected and have the same team number + * there is at least one gamepad attached + * the timer is enabled (for an Autonomous OpMode) +* Updated AprilTag Library for DECODE. Notably, getCurrentGameTagLibrary() now returns DECODE tags. + * Since the AprilTags on the Obelisk should not be used for localization, the ConceptAprilTagLocalization samples only use those tags without the name 'Obelisk' in them. +* OctoQuad I2C driver updated to support firmware v3.x + * Adds support for odometry localizer on MK2 hardware revision + * Adds ability to track position for an absolute encoder across multiple rotations + * Note that some driver APIs have changed; minor updates to user software may be required + * Requires firmware v3.x. For instructions on updating firmware, see + https://github.com/DigitalChickenLabs/OctoQuad/blob/master/documentation/OctoQuadDatasheet_Rev_3.0C.pdf + + +## Version 10.3 (20250625-090416) + +### Breaking Changes +* The behavior of setGlobalErrorMsg() is changed. Note that this is an SDK internal method that is not + meant to be used by team software or third party libraries. Teams or libraries using this method should + find another means to communicate failure. The design intent of setGlobalErrorMsg() is to report an + error and force the user to restart the robot, which in certain circumstances when used inappropriately + could cause a robot to continue running while Driver Station controls are disabled. To prevent this, + processing of a call to setGlobalErrorMsg() is deferred until the robot is in a known safe state. This may + mean that a call to setGlobalErrorMsg() that does not also result in stopping a running OpMode will appear + as though nothing happened until the robot is stopped, at which point, if clearGlobalErrorMsg() has not + been called the message will appear on the Driver Station and a restart will be required. + Addresses issue [1381](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/1381) +* Fixes getLatestResult in Limelight3A so if the Limelight hasn't provided data yet, it still returns an LLResult but valid will be false + * If you previously used to check and see if this was `null` to see if the Limelight had been contacted, you now need to use `isValid()` on the result. That is because now it always returns an LLResult even before it talks to the Limelight, but if it doesn't have valid data, the `isValid()` will be `false`. +* Changed all omni samples to use front_left_drive, front_right_drive, back_left_drive, back_right_drive + * This is only breaking for you if you copy one of the changed samples to your own project and expect to use the same robot configuration as before. + +### Known Issues +* The redesigned OnBotJava new file workflow allows the user to use a lowercase letter as the first character of a filename. + This is a regression from 10.2 which required the first character to be uppercase. Software will build, but if the user tries + to rename the file, the rename will fail. + +### Enhancements +* Improved the OBJ new file creation flow workflow. The new flow allows you to easily use samples, craft new custom OpModes and make new Java classes. +* Added support for gamepad edge detection. + * A new sample program `ConceptGamepadEdgeDetection` demonstrates its use. +* Adds a blackboard member to the Opmode that maintains state between opmodes (but not between robot resets). See the ConceptBlackboard sample for how to use it. +* Updated PredominantColorProcessor to also return the predominant color in RGB, HSV and YCrCb color spaces. Updated ConceptVisionColorSensor sample OpMode to display the getAnalysis() result in all three color spaces. +* Adds support for the GoBilda Pinpoint + * Also adds `SensorGoBildaPinpoint` sample to show how to use it +* Added `getArcLength()` and `getCircularity()` to ColorBlobLocatorProcessor.Blob. Added BY_ARC_LENGTH and BY_CIRCULARITY as additional BlobCriteria. +* Added `filterByCriteria()` and `sortByCriteria()` to ColorBlobLocatorProcessor.Util. + * The filter and sort methods for specific criteria have been deprecated. + * The updated sample program `ConceptVisionColorLocator` provides more details on the new syntax. +* Add Help menu item and Help page that is available when connected to the robot controller via Program and Manage. The Help page has links to team resources such as [FTC Documentation](https://ftc-docs.firstinspires.org/), [FTC Discussion Forums](https://ftc-community.firstinspires.org), [Java FTC SDK API Documentation](https://javadoc.io/doc/org.firstinspires.ftc), and [FTC Game Information](https://ftc.game/). +* Self inspection changes: + * List both the Driver Station Name and Robot Controller Name when inspecting the Driver Station. + * Report if the team number portion of the device names do not match. + * -rc is no longer valid as part of a Robot Controller name, must be -RC. + * Use Robot Controller Name or Driver Station Name labels on the inspection screens instead of WIFI Access Point or WIFI Direct Name. + +### Bug Fixes +* Fixes issue [1478](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/1478) in AnnotatedHooksClassFilter that ignored exceptions if they occur in one of the SDK app hooks. +* Fix initialize in distance sensor (Rev 2m) to prevent bad data in first call to getDistance. +* Fixes issue [1470](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/1470) Scaling a servo range is now irrespective of reverse() being called. For example, if you set the scale range to [0.0, 0.5] and the servo is reversed, it will be from 0.5 to 0.0, NOT 1.0 to 0.5. +* Fixes issue [1232](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/1232), a rare race condition where using the log rapidly along with other telemetry could cause a crash. + +## Version 10.2 (20250121-174034) + +### Enhancements +* Add ability to upload the pipeline for Limelight3A which allows teams to version control their limelight pipelines. + + +### Bug Fixes + +* Fix an internal bug where if the RUN_TO_POSITION run mode was specified before a target position, recovery would require a power cycle. A side effect of this fix is that a stack trace identifying the location of the error is always produced in the log. Fixes issue [1345](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/1345). +* Throws a helpful exception if region of interest is set to null when building a PredominantColorProcessor. Also sets the default RoI to the full frame. Addresses issue [1076](FIRST-Tech-Challenge/FtcRobotController#1076) +* Throws a helpful exception if user tries to construct an ImageRegion with malformed boundaries. Addresses issue [1078](FIRST-Tech-Challenge/FtcRobotController#1078) + +## Version 10.1.1 (20241102-092223) + +### Breaking Changes + +* Support for Android Studio Ladybug. Requires Android Studio Ladybug. + +### Known Issues + +* Android Studio Ladybug's bundled JDK is version 21. JDK 21 has deprecated support for Java 1.8, and Ladybug will warn on this deprecation. + OnBotJava only supports Java 1.8, therefore, in order to ensure that software developed using Android Studio will + run within the OnBotJava environment, the targetCompatibility and sourceCompatibility versions for the SDK have been left at VERSION_1_8. + FIRST has decided that until it can devote the resources to migrating OnBotJava to a newer version of Java, the deprecation is the + lesser of two non-optimal situations. + +### Enhancements + +* Added `toString()` method to Pose2D +* Added `toString()` method to SparkFunOTOS.Pose2D + ## Version 10.1 (20240919-122750) ### Enhancements diff --git a/build.common.gradle b/build.common.gradle index 046a4a163f93..a1d2fa4ad51c 100644 --- a/build.common.gradle +++ b/build.common.gradle @@ -21,7 +21,7 @@ apply plugin: 'com.android.application' android { - compileSdkVersion 29 + compileSdkVersion 30 signingConfigs { release { @@ -109,10 +109,6 @@ android { packagingOptions { pickFirst '**/*.so' } - sourceSets.main { - jni.srcDirs = [] - jniLibs.srcDir rootProject.file('libs') - } ndkVersion '21.3.6528147' } diff --git a/build.dependencies.gradle b/build.dependencies.gradle index a2b4ea1130fc..1c07e8cba6c1 100644 --- a/build.dependencies.gradle +++ b/build.dependencies.gradle @@ -4,14 +4,14 @@ repositories { } dependencies { - implementation 'org.firstinspires.ftc:Inspection:10.1.0' - implementation 'org.firstinspires.ftc:Blocks:10.1.0' - implementation 'org.firstinspires.ftc:RobotCore:10.1.0' - implementation 'org.firstinspires.ftc:RobotServer:10.1.0' - implementation 'org.firstinspires.ftc:OnBotJava:10.1.0' - implementation 'org.firstinspires.ftc:Hardware:10.1.0' - implementation 'org.firstinspires.ftc:FtcCommon:10.1.0' - implementation 'org.firstinspires.ftc:Vision:10.1.0' + implementation 'org.firstinspires.ftc:Inspection:11.0.0' + implementation 'org.firstinspires.ftc:Blocks:11.0.0' + implementation 'org.firstinspires.ftc:RobotCore:11.0.0' + implementation 'org.firstinspires.ftc:RobotServer:11.0.0' + implementation 'org.firstinspires.ftc:OnBotJava:11.0.0' + implementation 'org.firstinspires.ftc:Hardware:11.0.0' + implementation 'org.firstinspires.ftc:FtcCommon:11.0.0' + implementation 'org.firstinspires.ftc:Vision:11.0.0' implementation 'androidx.appcompat:appcompat:1.2.0' } diff --git a/build.gradle b/build.gradle index 8969a41590b4..e70f2095b7a9 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ buildscript { } dependencies { // Note for FTC Teams: Do not modify this yourself. - classpath 'com.android.tools.build:gradle:7.2.0' + classpath 'com.android.tools.build:gradle:8.7.0' } } diff --git a/gradle.properties b/gradle.properties index 7a370c51bbe7..f5935e913f4b 100644 --- a/gradle.properties +++ b/gradle.properties @@ -8,3 +8,5 @@ android.enableJetifier=false # Allow Gradle to use up to 1 GB of RAM org.gradle.jvmargs=-Xmx1024M + +android.nonTransitiveRClass=false \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index aa991fceae6e..19cfad969baf 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists