Merge pull request #1545 from FIRST-Tech-Challenge/20250827-105138-release-candidate
FtcRobotController v11.0
This commit is contained in:
@@ -1,8 +1,8 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
xmlns:tools="http://schemas.android.com/tools"
|
xmlns:tools="http://schemas.android.com/tools"
|
||||||
android:versionCode="59"
|
android:versionCode="60"
|
||||||
android:versionName="10.3">
|
android:versionName="11.0">
|
||||||
|
|
||||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||||
|
|
||||||
|
|||||||
@@ -224,6 +224,8 @@ public class ConceptAprilTagLocalization extends LinearOpMode {
|
|||||||
for (AprilTagDetection detection : currentDetections) {
|
for (AprilTagDetection detection : currentDetections) {
|
||||||
if (detection.metadata != null) {
|
if (detection.metadata != null) {
|
||||||
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
|
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
|
||||||
|
// Only use tags that don't have Obelisk in them
|
||||||
|
if (!detection.metadata.name.contains("Obelisk")) {
|
||||||
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)",
|
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)",
|
||||||
detection.robotPose.getPosition().x,
|
detection.robotPose.getPosition().x,
|
||||||
detection.robotPose.getPosition().y,
|
detection.robotPose.getPosition().y,
|
||||||
@@ -232,6 +234,7 @@ public class ConceptAprilTagLocalization extends LinearOpMode {
|
|||||||
detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES),
|
detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES),
|
||||||
detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES),
|
detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES),
|
||||||
detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES)));
|
detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES)));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
|
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
|
||||||
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
|
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ public class ConceptGamepadEdgeDetection extends LinearOpMode {
|
|||||||
telemetry.addData("Gamepad 1 Left Bumper Released", gamepad1.leftBumperWasReleased());
|
telemetry.addData("Gamepad 1 Left Bumper Released", gamepad1.leftBumperWasReleased());
|
||||||
telemetry.addData("Gamepad 1 Left Bumper Status", gamepad1.left_bumper);
|
telemetry.addData("Gamepad 1 Left Bumper Status", gamepad1.left_bumper);
|
||||||
|
|
||||||
// Add an empty line to seperate the buttons in telemetry
|
// Add an empty line to separate the buttons in telemetry
|
||||||
telemetry.addLine();
|
telemetry.addLine();
|
||||||
|
|
||||||
// Add the status of the Gamepad 1 Right Bumper
|
// Add the status of the Gamepad 1 Right Bumper
|
||||||
|
|||||||
@@ -0,0 +1,245 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2024 Phil Malone
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import android.graphics.Color;
|
||||||
|
import android.util.Size;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||||
|
import org.firstinspires.ftc.vision.VisionPortal;
|
||||||
|
import org.firstinspires.ftc.vision.opencv.Circle;
|
||||||
|
import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor;
|
||||||
|
import org.firstinspires.ftc.vision.opencv.ColorRange;
|
||||||
|
import org.firstinspires.ftc.vision.opencv.ImageRegion;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions.
|
||||||
|
* This sample is targeted towards circular blobs. To see rectangles, look at ConceptVisionColorLocator_Rectangle
|
||||||
|
*
|
||||||
|
* Unlike a "color sensor" which determines the color of nearby object, this "color locator"
|
||||||
|
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that
|
||||||
|
* match the requested color range. These blobs can be further filtered and sorted to find the one
|
||||||
|
* most likely to be the item the user is looking for.
|
||||||
|
*
|
||||||
|
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
|
||||||
|
* The ColorBlobLocatorProcessor (CBLP) process is created first, and then the VisionPortal is built.
|
||||||
|
* The (CBLP) analyses the ROI and locates pixels that match the ColorRange to form a "mask".
|
||||||
|
* The matching pixels are then collected into contiguous "blobs" of pixels.
|
||||||
|
* The outer boundaries of these blobs are called its "contour". For each blob, the process then
|
||||||
|
* creates the smallest possible circle that will fully encase the contour. The user can then call
|
||||||
|
* getBlobs() to retrieve the list of Blobs, where each contains the contour and the circle.
|
||||||
|
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest
|
||||||
|
* contours are listed first.
|
||||||
|
*
|
||||||
|
* A colored enclosing circle is drawn on the camera preview to show the location of each Blob
|
||||||
|
* The original Blob contour can also be added to the preview.
|
||||||
|
* This is helpful when configuring the ColorBlobLocatorProcessor parameters.
|
||||||
|
*
|
||||||
|
* Tip: Connect an HDMI monitor to the Control Hub to view the Color Location process in real-time.
|
||||||
|
* Or use a screen copy utility like ScrCpy.exe to view the video remotely.
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
|
*/
|
||||||
|
|
||||||
|
@Disabled
|
||||||
|
@TeleOp(name = "Concept: Vision Color-Locator (Circle)", group = "Concept")
|
||||||
|
public class ConceptVisionColorLocator_Circle extends LinearOpMode {
|
||||||
|
@Override
|
||||||
|
public void runOpMode() {
|
||||||
|
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
|
||||||
|
* - Specify the color range you are looking for. Use a predefined color, or create your own
|
||||||
|
*
|
||||||
|
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
||||||
|
* Available colors are: RED, BLUE, YELLOW, GREEN, ARTIFACT_GREEN, ARTIFACT_PURPLE
|
||||||
|
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
|
||||||
|
* new Scalar( 32, 176, 0),
|
||||||
|
* new Scalar(255, 255, 132)))
|
||||||
|
*
|
||||||
|
* - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search.
|
||||||
|
* This can be the entire frame, or a sub-region defined using:
|
||||||
|
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||||
|
* Use one form of the ImageRegion class to define the ROI.
|
||||||
|
* ImageRegion.entireFrame()
|
||||||
|
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixels at upper left corner
|
||||||
|
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height in center
|
||||||
|
*
|
||||||
|
* - Define which contours are included.
|
||||||
|
* You can get ALL the contours, ignore contours that are completely inside another contour.
|
||||||
|
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY)
|
||||||
|
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY)
|
||||||
|
* EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up solid colors.
|
||||||
|
*
|
||||||
|
* - Turn the displays of contours ON or OFF.
|
||||||
|
* Turning these on helps debugging but takes up valuable CPU time.
|
||||||
|
* .setDrawContours(true) Draws an outline of each contour.
|
||||||
|
* .setEnclosingCircleColor(int color) Draws a circle around each contour. 0 to disable.
|
||||||
|
* .setBoxFitColor(int color) Draws a rectangle around each contour. 0 to disable. ON by default.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* - include any pre-processing of the image or mask before looking for Blobs.
|
||||||
|
* There are some extra processing you can include to improve the formation of blobs.
|
||||||
|
* Using these features requires an understanding of how they may effect the final
|
||||||
|
* blobs. The "pixels" argument sets the NxN kernel size.
|
||||||
|
* .setBlurSize(int pixels)
|
||||||
|
* Blurring an image helps to provide a smooth color transition between objects,
|
||||||
|
* and smoother contours. The higher the number, the more blurred the image becomes.
|
||||||
|
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
|
||||||
|
* Blurring too much may hide smaller features. A size of 5 is good for a 320x240 image.
|
||||||
|
*
|
||||||
|
* .setErodeSize(int pixels)
|
||||||
|
* Erosion removes floating pixels and thin lines so that only substantive objects remain.
|
||||||
|
* Erosion can grow holes inside regions, and also shrink objects.
|
||||||
|
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||||
|
*
|
||||||
|
* .setDilateSize(int pixels)
|
||||||
|
* Dilation makes objects and lines more visible by filling in small holes, and making
|
||||||
|
* filled shapes appear larger. Dilation is useful for joining broken parts of an
|
||||||
|
* object, such as when removing noise from an image.
|
||||||
|
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||||
|
*
|
||||||
|
* .setMorphOperationType(MorphOperationType morphOperationType)
|
||||||
|
* This defines the order in which the Erode/Dilate actions are performed.
|
||||||
|
* OPENING: Will Erode and then Dilate which will make small noise blobs go away
|
||||||
|
* CLOSING: Will Dilate and then Erode which will tend to fill in any small holes in blob edges.
|
||||||
|
*/
|
||||||
|
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
|
||||||
|
.setTargetColorRange(ColorRange.ARTIFACT_PURPLE) // Use a predefined color match
|
||||||
|
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY)
|
||||||
|
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.75, 0.75, 0.75, -0.75))
|
||||||
|
.setDrawContours(true) // Show contours on the Stream Preview
|
||||||
|
.setBoxFitColor(0) // Disable the drawing of rectangles
|
||||||
|
.setCircleFitColor(Color.rgb(255, 255, 0)) // Draw a circle
|
||||||
|
.setBlurSize(5) // Smooth the transitions between different colors in image
|
||||||
|
|
||||||
|
// the following options have been added to fill in perimeter holes.
|
||||||
|
.setDilateSize(15) // Expand blobs to fill any divots on the edges
|
||||||
|
.setErodeSize(15) // Shrink blobs back to original size
|
||||||
|
.setMorphOperationType(ColorBlobLocatorProcessor.MorphOperationType.CLOSING)
|
||||||
|
|
||||||
|
.build();
|
||||||
|
/*
|
||||||
|
* Build a vision portal to run the Color Locator process.
|
||||||
|
*
|
||||||
|
* - Add the colorLocator process created above.
|
||||||
|
* - Set the desired video resolution.
|
||||||
|
* Since a high resolution will not improve this process, choose a lower resolution
|
||||||
|
* that is supported by your camera. This will improve overall performance and reduce
|
||||||
|
* latency.
|
||||||
|
* - Choose your video source. This may be
|
||||||
|
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||||
|
* or
|
||||||
|
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
|
||||||
|
*/
|
||||||
|
VisionPortal portal = new VisionPortal.Builder()
|
||||||
|
.addProcessor(colorLocator)
|
||||||
|
.setCameraResolution(new Size(320, 240))
|
||||||
|
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
telemetry.setMsTransmissionInterval(100); // Speed up telemetry updates for debugging.
|
||||||
|
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||||
|
|
||||||
|
// WARNING: To view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||||
|
while (opModeIsActive() || opModeInInit()) {
|
||||||
|
telemetry.addData("preview on/off", "... Camera Stream\n");
|
||||||
|
|
||||||
|
// Read the current list
|
||||||
|
List<ColorBlobLocatorProcessor.Blob> blobs = colorLocator.getBlobs();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The list of Blobs can be filtered to remove unwanted Blobs.
|
||||||
|
* Note: All contours will be still displayed on the Stream Preview, but only those
|
||||||
|
* that satisfy the filter conditions will remain in the current list of
|
||||||
|
* "blobs". Multiple filters may be used.
|
||||||
|
*
|
||||||
|
* To perform a filter
|
||||||
|
* ColorBlobLocatorProcessor.Util.filterByCriteria(criteria, minValue, maxValue, blobs);
|
||||||
|
*
|
||||||
|
* The following criteria are currently supported.
|
||||||
|
*
|
||||||
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA
|
||||||
|
* A Blob's area is the number of pixels contained within the Contour. Filter out any
|
||||||
|
* that are too big or small. Start with a large range and then refine the range based
|
||||||
|
* on the likely size of the desired object in the viewfinder.
|
||||||
|
*
|
||||||
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_DENSITY
|
||||||
|
* A blob's density is an indication of how "full" the contour is.
|
||||||
|
* If you put a rubber band around the contour you would get the "Convex Hull" of the
|
||||||
|
* contour. The density is the ratio of Contour-area to Convex Hull-area.
|
||||||
|
*
|
||||||
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_ASPECT_RATIO
|
||||||
|
* A blob's Aspect ratio is the ratio of boxFit long side to short side.
|
||||||
|
* A perfect Square has an aspect ratio of 1. All others are > 1
|
||||||
|
*
|
||||||
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_ARC_LENGTH
|
||||||
|
* A blob's arc length is the perimeter of the blob.
|
||||||
|
* This can be used in conjunction with an area filter to detect oddly shaped blobs.
|
||||||
|
*
|
||||||
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_CIRCULARITY
|
||||||
|
* A blob's circularity is how circular it is based on the known area and arc length.
|
||||||
|
* A perfect circle has a circularity of 1. All others are < 1
|
||||||
|
*/
|
||||||
|
ColorBlobLocatorProcessor.Util.filterByCriteria(
|
||||||
|
ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA,
|
||||||
|
50, 20000, blobs); // filter out very small blobs.
|
||||||
|
|
||||||
|
ColorBlobLocatorProcessor.Util.filterByCriteria(
|
||||||
|
ColorBlobLocatorProcessor.BlobCriteria.BY_CIRCULARITY,
|
||||||
|
0.6, 1, blobs); /* filter out non-circular blobs.
|
||||||
|
* NOTE: You may want to adjust the minimum value depending on your use case.
|
||||||
|
* Circularity values will be affected by shadows, and will therefore vary based
|
||||||
|
* on the location of the camera on your robot and venue lighting. It is strongly
|
||||||
|
* encouraged to test your vision on the competition field if your event allows
|
||||||
|
* sensor calibration time.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The list of Blobs can be sorted using the same Blob attributes as listed above.
|
||||||
|
* No more than one sort call should be made. Sorting can use ascending or descending order.
|
||||||
|
* Here is an example.:
|
||||||
|
* ColorBlobLocatorProcessor.Util.sortByCriteria(
|
||||||
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA, SortOrder.DESCENDING, blobs);
|
||||||
|
*/
|
||||||
|
|
||||||
|
telemetry.addLine("Circularity Radius Center");
|
||||||
|
|
||||||
|
// Display the Blob's circularity, and the size (radius) and center location of its circleFit.
|
||||||
|
for (ColorBlobLocatorProcessor.Blob b : blobs) {
|
||||||
|
|
||||||
|
Circle circleFit = b.getCircle();
|
||||||
|
telemetry.addLine(String.format("%5.3f %3d (%3d,%3d)",
|
||||||
|
b.getCircularity(), (int) circleFit.getRadius(), (int) circleFit.getX(), (int) circleFit.getY()));
|
||||||
|
}
|
||||||
|
|
||||||
|
telemetry.update();
|
||||||
|
sleep(100); // Match the telemetry update interval.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -41,36 +41,41 @@ import java.util.List;
|
|||||||
/*
|
/*
|
||||||
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions
|
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions
|
||||||
*
|
*
|
||||||
* Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator"
|
* Unlike a "color sensor" which determines the color of nearby object, this "color locator"
|
||||||
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range.
|
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that
|
||||||
* These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for.
|
* match the requested color range. These blobs can be further filtered and sorted to find the one
|
||||||
|
* most likely to be the item the user is looking for.
|
||||||
*
|
*
|
||||||
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
|
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
|
||||||
* The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process.
|
* The ColorBlobLocatorProcessor (CBLP) process is created first, and then the VisionPortal is built.
|
||||||
* The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask".
|
* The (CBLP) analyses the ROI and locates pixels that match the ColorRange to form a "mask".
|
||||||
* The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour".
|
* The matching pixels are then collected into contiguous "blobs" of pixels.
|
||||||
* For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour.
|
* The outer boundaries of these blobs are called its "contour". For each blob, the process then
|
||||||
* The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data.
|
* creates the smallest possible rectangle "boxFit" that will fully encase the contour. The user can
|
||||||
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first.
|
* then call getBlobs() to retrieve the list of Blobs, where each contains the contour and the boxFit.
|
||||||
|
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest
|
||||||
|
* contours are listed first.
|
||||||
*
|
*
|
||||||
* To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob
|
* A colored boxFit rectangle is drawn on the camera preview to show the location of each Blob
|
||||||
* The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters.
|
* The original Blob contour can also be added to the preview.
|
||||||
|
* This is helpful when configuring the ColorBlobLocatorProcessor parameters.
|
||||||
*
|
*
|
||||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@Disabled
|
@Disabled
|
||||||
@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept")
|
@TeleOp(name = "Concept: Color-Locator (Rectangle)", group = "Concept")
|
||||||
public class ConceptVisionColorLocator extends LinearOpMode
|
public class ConceptVisionColorLocator_Rectangle extends LinearOpMode
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public void runOpMode()
|
public void runOpMode()
|
||||||
{
|
{
|
||||||
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
|
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
|
||||||
* - Specify the color range you are looking for. You can use a predefined color, or create you own color range
|
* - Specify the color range you are looking for. Use a predefined color, or create your own
|
||||||
|
*
|
||||||
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
||||||
* Available predefined colors are: RED, BLUE YELLOW GREEN
|
* Available colors are: RED, BLUE YELLOW GREEN ARTIFACT_GREEN and ARTIFACT_PURPLE
|
||||||
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
|
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
|
||||||
* new Scalar( 32, 176, 0),
|
* new Scalar( 32, 176, 0),
|
||||||
* new Scalar(255, 255, 132)))
|
* new Scalar(255, 255, 132)))
|
||||||
@@ -80,37 +85,43 @@ public class ConceptVisionColorLocator extends LinearOpMode
|
|||||||
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||||
* Use one form of the ImageRegion class to define the ROI.
|
* Use one form of the ImageRegion class to define the ROI.
|
||||||
* ImageRegion.entireFrame()
|
* ImageRegion.entireFrame()
|
||||||
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
|
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixels at upper left corner
|
||||||
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen
|
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height in center
|
||||||
*
|
*
|
||||||
* - Define which contours are included.
|
* - Define which contours are included.
|
||||||
* You can get ALL the contours, or you can skip any contours that are completely inside another contour.
|
* You can get ALL the contours, ignore contours that are completely inside another contour.
|
||||||
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours
|
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY)
|
||||||
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours
|
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY)
|
||||||
* note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color.
|
* EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up solid colors.
|
||||||
*
|
*
|
||||||
* - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time.
|
* - turn the display of contours ON or OFF. Helps debugging but takes up valuable CPU time.
|
||||||
* .setDrawContours(true)
|
* .setDrawContours(true)
|
||||||
*
|
*
|
||||||
* - include any pre-processing of the image or mask before looking for Blobs.
|
* - include any pre-processing of the image or mask before looking for Blobs.
|
||||||
* There are some extra processing you can include to improve the formation of blobs. Using these features requires
|
* There are some extra processing you can include to improve the formation of blobs.
|
||||||
* an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size.
|
* Using these features requires an understanding of how they may effect the final blobs.
|
||||||
* .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours.
|
* The "pixels" argument sets the NxN kernel size.
|
||||||
* The higher the number of pixels, the more blurred the image becomes.
|
* .setBlurSize(int pixels)
|
||||||
|
* Blurring an image helps to provide a smooth color transition between objects,
|
||||||
|
* and smoother contours. The higher the number, the more blurred the image becomes.
|
||||||
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
|
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
|
||||||
* Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image.
|
* Blurring too much may hide smaller features. A size of 5 is good for a 320x240 image.
|
||||||
* .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain.
|
*
|
||||||
|
* .setErodeSize(int pixels)
|
||||||
|
* Erosion removes floating pixels and thin lines so that only substantive objects remain.
|
||||||
* Erosion can grow holes inside regions, and also shrink objects.
|
* Erosion can grow holes inside regions, and also shrink objects.
|
||||||
* "pixels" in the range of 2-4 are suitable for low res images.
|
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||||
* .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker,
|
*
|
||||||
* and making filled shapes appear larger. Dilation is useful for joining broken parts of an
|
* .setDilateSize(int pixels)
|
||||||
|
* Dilation makes objects and lines more visible by filling in small holes, and making
|
||||||
|
* filled shapes appear larger. Dilation is useful for joining broken parts of an
|
||||||
* object, such as when removing noise from an image.
|
* object, such as when removing noise from an image.
|
||||||
* "pixels" in the range of 2-4 are suitable for low res images.
|
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||||
*/
|
*/
|
||||||
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
|
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
|
||||||
.setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
.setTargetColorRange(ColorRange.ARTIFACT_PURPLE) // use a predefined color match
|
||||||
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs
|
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY)
|
||||||
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view
|
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.75, 0.75, 0.75, -0.75))
|
||||||
.setDrawContours(true) // Show contours on the Stream Preview
|
.setDrawContours(true) // Show contours on the Stream Preview
|
||||||
.setBlurSize(5) // Smooth the transitions between different colors in image
|
.setBlurSize(5) // Smooth the transitions between different colors in image
|
||||||
.build();
|
.build();
|
||||||
@@ -120,7 +131,7 @@ public class ConceptVisionColorLocator extends LinearOpMode
|
|||||||
*
|
*
|
||||||
* - Add the colorLocator process created above.
|
* - Add the colorLocator process created above.
|
||||||
* - Set the desired video resolution.
|
* - Set the desired video resolution.
|
||||||
* Since a high resolution will not improve this process, choose a lower resolution that is
|
* A high resolution will not improve this process, so choose a lower resolution that is
|
||||||
* supported by your camera. This will improve overall performance and reduce latency.
|
* supported by your camera. This will improve overall performance and reduce latency.
|
||||||
* - Choose your video source. This may be
|
* - Choose your video source. This may be
|
||||||
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||||
@@ -133,10 +144,10 @@ public class ConceptVisionColorLocator extends LinearOpMode
|
|||||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
|
telemetry.setMsTransmissionInterval(100); // Speed up telemetry updates for debugging.
|
||||||
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||||
|
|
||||||
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
|
// WARNING: To view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||||
while (opModeIsActive() || opModeInInit())
|
while (opModeIsActive() || opModeInInit())
|
||||||
{
|
{
|
||||||
telemetry.addData("preview on/off", "... Camera Stream\n");
|
telemetry.addData("preview on/off", "... Camera Stream\n");
|
||||||
@@ -146,8 +157,9 @@ public class ConceptVisionColorLocator extends LinearOpMode
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* The list of Blobs can be filtered to remove unwanted Blobs.
|
* The list of Blobs can be filtered to remove unwanted Blobs.
|
||||||
* Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter
|
* Note: All contours will be still displayed on the Stream Preview, but only those
|
||||||
* conditions will remain in the current list of "blobs". Multiple filters may be used.
|
* that satisfy the filter conditions will remain in the current list of "blobs".
|
||||||
|
* Multiple filters may be used.
|
||||||
*
|
*
|
||||||
* To perform a filter
|
* To perform a filter
|
||||||
* ColorBlobLocatorProcessor.Util.filterByCriteria(criteria, minValue, maxValue, blobs);
|
* ColorBlobLocatorProcessor.Util.filterByCriteria(criteria, minValue, maxValue, blobs);
|
||||||
@@ -155,12 +167,13 @@ public class ConceptVisionColorLocator extends LinearOpMode
|
|||||||
* The following criteria are currently supported.
|
* The following criteria are currently supported.
|
||||||
*
|
*
|
||||||
* ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA
|
||||||
* A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small.
|
* A Blob's area is the number of pixels contained within the Contour. Filter out any
|
||||||
* Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder.
|
* that are too big or small. Start with a large range and then refine the range
|
||||||
|
* based on the likely size of the desired object in the viewfinder.
|
||||||
*
|
*
|
||||||
* ColorBlobLocatorProcessor.BlobCriteria.BY_DENSITY
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_DENSITY
|
||||||
* A blob's density is an indication of how "full" the contour is.
|
* A blob's density is an indication of how "full" the contour is.
|
||||||
* If you put a rubber band around the contour you would get the "Convex Hull" of the contour.
|
* If you put a rubber band around the contour you get the "Convex Hull" of the contour.
|
||||||
* The density is the ratio of Contour-area to Convex Hull-area.
|
* The density is the ratio of Contour-area to Convex Hull-area.
|
||||||
*
|
*
|
||||||
* ColorBlobLocatorProcessor.BlobCriteria.BY_ASPECT_RATIO
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_ASPECT_RATIO
|
||||||
@@ -181,26 +194,25 @@ public class ConceptVisionColorLocator extends LinearOpMode
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* The list of Blobs can be sorted using the same Blob attributes as listed above.
|
* The list of Blobs can be sorted using the same Blob attributes as listed above.
|
||||||
* No more than one sort call should be made. Sorting can use ascending or descending order.
|
* No more than one sort call should be made. Sorting can be ASCENDING or DESCENDING.
|
||||||
* ColorBlobLocatorProcessor.Util.sortByCriteria(ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA SortOrder.DESCENDING, blobs); // Default
|
* Here is an example.:
|
||||||
* ColorBlobLocatorProcessor.Util.sortByCriteria(ColorBlobLocatorProcessor.BlobCriteria.BY_DENSITY SortOrder.DESCENDING, blobs);
|
* ColorBlobLocatorProcessor.Util.sortByCriteria(
|
||||||
* ColorBlobLocatorProcessor.Util.sortByCriteria(ColorBlobLocatorProcessor.BlobCriteria.BY_ASPECT_RATIO SortOrder.DESCENDING, blobs);
|
* ColorBlobLocatorProcessor.BlobCriteria.BY_CONTOUR_AREA, SortOrder.DESCENDING, blobs);
|
||||||
* ColorBlobLocatorProcessor.Util.sortByCriteria(ColorBlobLocatorProcessor.BlobCriteria.BY_ARC_LENGTH SortOrder.DESCENDING, blobs);
|
|
||||||
* ColorBlobLocatorProcessor.Util.sortByCriteria(ColorBlobLocatorProcessor.BlobCriteria.BY_CIRCULARITY SortOrder.DESCENDING, blobs);
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
telemetry.addLine("Area Density Aspect Arc Circle Center");
|
telemetry.addLine("Ctr:(X,Y) Area Dens Aspect Arc Circ");
|
||||||
|
|
||||||
// Display the size (area) and center location for each Blob.
|
// Display the size (area) and center location for each Blob.
|
||||||
for(ColorBlobLocatorProcessor.Blob b : blobs)
|
for(ColorBlobLocatorProcessor.Blob b : blobs)
|
||||||
{
|
{
|
||||||
RotatedRect boxFit = b.getBoxFit();
|
RotatedRect boxFit = b.getBoxFit();
|
||||||
telemetry.addLine(String.format("%5d %4.2f %5.2f %3d %5.3f (%3d,%3d)",
|
telemetry.addLine(String.format("(%3d,%3d) %5d %4.2f %5.2f %3d %5.3f ",
|
||||||
b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) b.getArcLength(), b.getCircularity(), (int) boxFit.center.x, (int) boxFit.center.y));
|
(int) boxFit.center.x, (int) boxFit.center.y, b.getContourArea(), b.getDensity(),
|
||||||
|
b.getAspectRatio(), (int) b.getArcLength(), b.getCircularity()));
|
||||||
}
|
}
|
||||||
|
|
||||||
telemetry.update();
|
telemetry.update();
|
||||||
sleep(50);
|
sleep(100); // Match the telemetry update interval.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -40,16 +40,20 @@ import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor;
|
|||||||
*
|
*
|
||||||
* This sample performs the same function, except it uses a video camera to inspect an object or scene.
|
* This sample performs the same function, except it uses a video camera to inspect an object or scene.
|
||||||
* The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view.
|
* The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view.
|
||||||
* The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected.
|
* The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching
|
||||||
|
* color will be selected.
|
||||||
*
|
*
|
||||||
* To perform this function, a VisionPortal runs a PredominantColorProcessor process.
|
* To perform this function, a VisionPortal runs a PredominantColorProcessor process.
|
||||||
* The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process.
|
* The PredominantColorProcessor (PCP) process is created first, and then the VisionPortal is built.
|
||||||
* The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters.
|
* The (PCP) analyses the ROI and splits the colored pixels into several color-clusters.
|
||||||
* The largest of these clusters is then considered to be the "Predominant Color"
|
* The largest of these clusters is then considered to be the "Predominant Color"
|
||||||
* The process then matches the Predominant Color with the closest Swatch and returns that match.
|
* The process then matches the Predominant Color with the closest Swatch and returns that match.
|
||||||
*
|
*
|
||||||
* To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest,
|
* To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest,
|
||||||
* The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable.
|
* The Predominant Color is used to paint the rectangle border, so the user can visualize the color.
|
||||||
|
*
|
||||||
|
* Tip: Connect an HDMI monitor to the Control Hub to view the Color Sensor process in real-time.
|
||||||
|
* Or use a screen copy utility like ScrCpy.exe to view the video remotely.
|
||||||
*
|
*
|
||||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
@@ -69,15 +73,16 @@ public class ConceptVisionColorSensor extends LinearOpMode
|
|||||||
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||||
* Use one form of the ImageRegion class to define the ROI.
|
* Use one form of the ImageRegion class to define the ROI.
|
||||||
* ImageRegion.entireFrame()
|
* ImageRegion.entireFrame()
|
||||||
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
|
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 square at the top left corner
|
||||||
* ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen
|
* ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% W * H centered square
|
||||||
*
|
*
|
||||||
* - Set the list of "acceptable" color swatches (matches).
|
* - Set the list of "acceptable" color swatches (matches).
|
||||||
* Only colors that you assign here will be returned.
|
* Only colors that you assign here will be returned.
|
||||||
* If you know the sensor will be pointing to one of a few specific colors, enter them here.
|
* If you know the sensor will be pointing to one of a few specific colors, enter them here.
|
||||||
* Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding.
|
* Or, if the sensor may be pointed randomly, provide some additional colors that may match.
|
||||||
* Possible choices are:
|
* Possible choices are:
|
||||||
* RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE;
|
* RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE
|
||||||
|
* Note: For the 2026 season ARTIFACT_PURPLE and ARTIFACT_GREEN have been added.
|
||||||
*
|
*
|
||||||
* Note that in the example shown below, only some of the available colors are included.
|
* Note that in the example shown below, only some of the available colors are included.
|
||||||
* This will force any other colored region into one of these colors.
|
* This will force any other colored region into one of these colors.
|
||||||
@@ -86,6 +91,8 @@ public class ConceptVisionColorSensor extends LinearOpMode
|
|||||||
PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder()
|
PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder()
|
||||||
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1))
|
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1))
|
||||||
.setSwatches(
|
.setSwatches(
|
||||||
|
PredominantColorProcessor.Swatch.ARTIFACT_GREEN,
|
||||||
|
PredominantColorProcessor.Swatch.ARTIFACT_PURPLE,
|
||||||
PredominantColorProcessor.Swatch.RED,
|
PredominantColorProcessor.Swatch.RED,
|
||||||
PredominantColorProcessor.Swatch.BLUE,
|
PredominantColorProcessor.Swatch.BLUE,
|
||||||
PredominantColorProcessor.Swatch.YELLOW,
|
PredominantColorProcessor.Swatch.YELLOW,
|
||||||
@@ -98,7 +105,7 @@ public class ConceptVisionColorSensor extends LinearOpMode
|
|||||||
*
|
*
|
||||||
* - Add the colorSensor process created above.
|
* - Add the colorSensor process created above.
|
||||||
* - Set the desired video resolution.
|
* - Set the desired video resolution.
|
||||||
* Since a high resolution will not improve this process, choose a lower resolution that is
|
* Since a high resolution will not improve this process, choose a lower resolution
|
||||||
* supported by your camera. This will improve overall performance and reduce latency.
|
* supported by your camera. This will improve overall performance and reduce latency.
|
||||||
* - Choose your video source. This may be
|
* - Choose your video source. This may be
|
||||||
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||||
@@ -111,24 +118,25 @@ public class ConceptVisionColorSensor extends LinearOpMode
|
|||||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
|
telemetry.setMsTransmissionInterval(100); // Speed up telemetry updates, for debugging.
|
||||||
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||||
|
|
||||||
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
|
// WARNING: To view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||||
while (opModeIsActive() || opModeInInit())
|
while (opModeIsActive() || opModeInInit())
|
||||||
{
|
{
|
||||||
telemetry.addLine("Preview on/off: 3 dots, Camera Stream\n");
|
telemetry.addLine("Preview on/off: 3 dots, Camera Stream\n");
|
||||||
|
|
||||||
// Request the most recent color analysis.
|
// Request the most recent color analysis. This will return the closest matching
|
||||||
// This will return the closest matching colorSwatch and the predominant color in RGB, HSV and YCrCb color spaces.
|
// colorSwatch and the predominant color in the RGB, HSV and YCrCb color spaces.
|
||||||
// The color space values are returned as three-element int[] arrays as follows:
|
// The color space values are returned as three-element int[] arrays as follows:
|
||||||
// RGB Red 0-255, Green 0-255, Blue 0-255
|
// RGB Red 0-255, Green 0-255, Blue 0-255
|
||||||
// HSV Hue 0-180, Saturation 0-255, Value 0-255
|
// HSV Hue 0-180, Saturation 0-255, Value 0-255
|
||||||
// YCrCb Luminance(Y) 0-255, Cr 0-255 (center 128), Cb 0-255 (center 128)
|
// YCrCb Luminance(Y) 0-255, Cr 0-255 (center 128), Cb 0-255 (center 128)
|
||||||
//
|
//
|
||||||
// Note: to take actions based on the detected color, simply use the colorSwatch or color space value in a comparison or switch.
|
// Note: to take actions based on the detected color, simply use the colorSwatch or
|
||||||
// eg:
|
// color space value in a comparison or switch. eg:
|
||||||
// if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...}
|
|
||||||
|
// if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {.. some code ..}
|
||||||
// or:
|
// or:
|
||||||
// if (result.RGB[0] > 128) {... some code ...}
|
// if (result.RGB[0] > 128) {... some code ...}
|
||||||
|
|
||||||
@@ -136,9 +144,12 @@ public class ConceptVisionColorSensor extends LinearOpMode
|
|||||||
|
|
||||||
// Display the Color Sensor result.
|
// Display the Color Sensor result.
|
||||||
telemetry.addData("Best Match", result.closestSwatch);
|
telemetry.addData("Best Match", result.closestSwatch);
|
||||||
telemetry.addLine(String.format("RGB (%3d, %3d, %3d)", result.RGB[0], result.RGB[1], result.RGB[2]));
|
telemetry.addLine(String.format("RGB (%3d, %3d, %3d)",
|
||||||
telemetry.addLine(String.format("HSV (%3d, %3d, %3d)", result.HSV[0], result.HSV[1], result.HSV[2]));
|
result.RGB[0], result.RGB[1], result.RGB[2]));
|
||||||
telemetry.addLine(String.format("YCrCb (%3d, %3d, %3d)", result.YCrCb[0], result.YCrCb[1], result.YCrCb[2]));
|
telemetry.addLine(String.format("HSV (%3d, %3d, %3d)",
|
||||||
|
result.HSV[0], result.HSV[1], result.HSV[2]));
|
||||||
|
telemetry.addLine(String.format("YCrCb (%3d, %3d, %3d)",
|
||||||
|
result.YCrCb[0], result.YCrCb[1], result.YCrCb[2]));
|
||||||
telemetry.update();
|
telemetry.update();
|
||||||
|
|
||||||
sleep(20);
|
sleep(20);
|
||||||
|
|||||||
@@ -0,0 +1,193 @@
|
|||||||
|
/* Copyright (c) 2025 FIRST. All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||||
|
* the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
* of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
* list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
* other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||||
|
* promote products derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||||
|
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||||
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
import com.qualcomm.robotcore.hardware.IMU;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
|
||||||
|
|
||||||
|
import static com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot.xyzOrientation;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode shows how to use the AndyMark IMU sensor. It assumes that the AndyMark IMU is
|
||||||
|
* configured with the name "imu".
|
||||||
|
*
|
||||||
|
* The sample will display the current Yaw, Pitch and Roll of the robot.
|
||||||
|
*
|
||||||
|
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
|
||||||
|
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X)
|
||||||
|
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y)
|
||||||
|
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z)
|
||||||
|
*
|
||||||
|
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller).
|
||||||
|
*
|
||||||
|
* This specific sample DOES NOT assume that the AndyMark IMU is mounted on one of the three
|
||||||
|
* orthogonal planes (X/Y, X/Z or Y/Z) OR that the AndyMark IMU has only been rotated in a range of
|
||||||
|
* 90 degree increments.
|
||||||
|
*
|
||||||
|
* Note: if your AndyMark IMU is mounted Orthogonally (on a orthogonal surface, angled at some
|
||||||
|
* multiple of 90 Degrees), then you should use the simpler SensorAndyMarkIMUOrthogonal sample in
|
||||||
|
* this folder.
|
||||||
|
*
|
||||||
|
* But... If your AndyMark IMU is mounted Non-Orthogonally, you must specify one or more rotational
|
||||||
|
* angles that transform a "Default" IMU orientation into your desired orientation. That is what is
|
||||||
|
* illustrated here.
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||||
|
*
|
||||||
|
* Finally, edit this OpMode to use at least one angle around an axis to orient your AndyMark IMU.
|
||||||
|
*/
|
||||||
|
@TeleOp(name = "Sensor: AndyMark IMU Non-Orthogonal", group = "Sensor")
|
||||||
|
@Disabled // Comment this out to add to the OpMode list
|
||||||
|
public class SensorAndyMarkIMUNonOrthogonal extends LinearOpMode
|
||||||
|
{
|
||||||
|
// The AndyMark IMU sensor object
|
||||||
|
private IMU imu;
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------------------------
|
||||||
|
// Main logic
|
||||||
|
//----------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@Override public void runOpMode() throws InterruptedException {
|
||||||
|
|
||||||
|
// Retrieve and initialize the AndyMark IMU.
|
||||||
|
// This sample expects the AndyMark IMU to be named "imu".
|
||||||
|
imu = hardwareMap.get(IMU.class, "imu");
|
||||||
|
|
||||||
|
/* Define how the AndyMark IMU is mounted to the robot to get the correct Yaw, Pitch, and
|
||||||
|
* Roll values.
|
||||||
|
*
|
||||||
|
* You can apply up to three axis rotations to orient your AndyMark IMU according to how
|
||||||
|
* it's mounted on the robot.
|
||||||
|
*
|
||||||
|
* The starting point for these rotations is the "Default" IMU orientation, which is:
|
||||||
|
* 1) IMU laying flat on a horizontal surface, with the AndyMark logo facing up.
|
||||||
|
* 2) Rotated such that the I2C port is facing forward on the robot.
|
||||||
|
*
|
||||||
|
* The order that the rotations are performed matters, so this sample shows doing them in
|
||||||
|
* the order X, Y, then Z.
|
||||||
|
* For specifying non-orthogonal IMU mounting orientations, we must temporarily use axes
|
||||||
|
* defined relative to the AndyMark IMU itself, instead of the usual Robot Coordinate System
|
||||||
|
* axes used for the results the AndyMark IMU gives us. In the starting orientation, the
|
||||||
|
* AndyMark IMU axes are aligned with the Robot Coordinate System:
|
||||||
|
*
|
||||||
|
* X Axis: Starting at center of IMU, pointing out towards the side.
|
||||||
|
* Y Axis: Starting at center of IMU, pointing out towards I2C port.
|
||||||
|
* Z Axis: Starting at center of IMU, pointing up through the AndyMark logo.
|
||||||
|
*
|
||||||
|
* Positive rotation is defined by right-hand rule with thumb pointing in +ve direction on
|
||||||
|
* axis.
|
||||||
|
*
|
||||||
|
* Some examples.
|
||||||
|
*
|
||||||
|
* ----------------------------------------------------------------------------------------------------------------------------------
|
||||||
|
* Example A) Assume that the AndyMark IMU is mounted on a sloped plate at the back of the
|
||||||
|
* robot, with the I2C port coming out the top of the AndyMark IMU. The plate is tilted UP
|
||||||
|
* 60 degrees from horizontal.
|
||||||
|
*
|
||||||
|
* To get the "Default" IMU into this configuration you would just need a single rotation.
|
||||||
|
* 1) Rotate the AndyMark IMU +60 degrees around the X axis to tilt up the front edge.
|
||||||
|
* 2) No rotation around the Y or Z axes.
|
||||||
|
*
|
||||||
|
* So the X,Y,Z rotations would be 60,0,0
|
||||||
|
*
|
||||||
|
* ----------------------------------------------------------------------------------------------------------------------------------
|
||||||
|
* Example B) Assume that the AndyMark IMU is laying flat on the chassis, but it has been
|
||||||
|
* twisted 30 degrees towards the right front wheel.
|
||||||
|
*
|
||||||
|
* To get the "Default" IMU into this configuration you would just need a single rotation,
|
||||||
|
* but around a different axis.
|
||||||
|
* 1) No rotation around the X or Y axes.
|
||||||
|
* 2) Rotate the AndyMark IMU -30 degrees (Clockwise) around the Z axis, since a positive
|
||||||
|
* angle would be Counter Clockwise.
|
||||||
|
*
|
||||||
|
* So the X,Y,Z rotations would be 0,0,-30
|
||||||
|
*
|
||||||
|
* ----------------------------------------------------------------------------------------------------------------------------------
|
||||||
|
* Example C) Assume that the AndyMark IMU is mounted on a vertical plate on the right side
|
||||||
|
* of the robot, with the AndyMark logo facing out, and the AndyMark IMU rotated so that the
|
||||||
|
* I2C port is facing down 30 degrees towards the back wheels of the robot.
|
||||||
|
*
|
||||||
|
* To get the "Default" IMU into this configuration will require several rotations.
|
||||||
|
* 1) Rotate the AndyMark IMU +90 degrees around the X axis to get it standing upright with
|
||||||
|
* the logo pointing backwards on the robot
|
||||||
|
* 2) Next, rotate the AndyMark IMU +90 around the Y axis to get the logo facing to the
|
||||||
|
* right.
|
||||||
|
* 3) Finally rotate the AndyMark IMU +120 degrees around the Z axis to take the I2C port
|
||||||
|
* from vertical to sloping down 30 degrees and facing towards the back of the robot.
|
||||||
|
*
|
||||||
|
* So the X,Y,Z rotations would be 90,90,120
|
||||||
|
*/
|
||||||
|
|
||||||
|
// The next three lines define the desired axis rotations.
|
||||||
|
// To Do: EDIT these values to match YOUR mounting configuration.
|
||||||
|
double xRotation = 0; // enter the desired X rotation angle here.
|
||||||
|
double yRotation = 0; // enter the desired Y rotation angle here.
|
||||||
|
double zRotation = 0; // enter the desired Z rotation angle here.
|
||||||
|
|
||||||
|
Orientation imuRotation = xyzOrientation(xRotation, yRotation, zRotation);
|
||||||
|
|
||||||
|
// Now initialize the AndyMark IMU with this mounting orientation.
|
||||||
|
AndyMarkIMUOrientationOnRobot orientationOnRobot = new AndyMarkIMUOrientationOnRobot(imuRotation);
|
||||||
|
imu.initialize(new IMU.Parameters(orientationOnRobot));
|
||||||
|
|
||||||
|
// Loop and update the dashboard.
|
||||||
|
while (!isStopRequested()) {
|
||||||
|
telemetry.addData("IMU orientation", "X=%.1f, Y=%.1f, Z=%.1f \n", xRotation, yRotation, zRotation);
|
||||||
|
|
||||||
|
// Check to see if heading reset is requested.
|
||||||
|
if (gamepad1.y) {
|
||||||
|
telemetry.addData("Yaw", "Resetting\n");
|
||||||
|
imu.resetYaw();
|
||||||
|
} else {
|
||||||
|
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retrieve rotational angles and velocities.
|
||||||
|
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
|
||||||
|
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
|
||||||
|
|
||||||
|
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
|
||||||
|
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
|
||||||
|
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,144 @@
|
|||||||
|
/* Copyright (c) 2025 FIRST. All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||||
|
* the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
* of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
* list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
* other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||||
|
* promote products derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||||
|
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||||
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot;
|
||||||
|
import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot.I2cPortFacingDirection;
|
||||||
|
import com.qualcomm.hardware.andymark.AndyMarkIMUOrientationOnRobot.LogoFacingDirection;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
import com.qualcomm.robotcore.hardware.IMU;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode shows how to use the AndyMark IMU sensor. It assumes that the AndyMark IMU is
|
||||||
|
* configured with the name "imu".
|
||||||
|
*
|
||||||
|
* The sample will display the current Yaw, Pitch and Roll of the robot.
|
||||||
|
*
|
||||||
|
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
|
||||||
|
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X)
|
||||||
|
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y)
|
||||||
|
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z)
|
||||||
|
*
|
||||||
|
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller).
|
||||||
|
*
|
||||||
|
* This specific sample assumes that the AndyMark IMU is mounted on one of the three orthogonal
|
||||||
|
* planes (X/Y, X/Z or Y/Z) and that the AndyMark IMU has only been rotated in a range of 90 degree
|
||||||
|
* increments.
|
||||||
|
*
|
||||||
|
* Note: if your AndyMark IMU is mounted on a surface angled at some non-90 Degree multiple (like
|
||||||
|
* 30), then you should use the SensorAndyMarkIMUNonOrthogonal sample in this folder.
|
||||||
|
*
|
||||||
|
* This "Orthogonal" requirement means that:
|
||||||
|
*
|
||||||
|
* 1) The AndyMark logo printed on the top of the AndyMark IMU can ONLY be pointing in one of six directions:
|
||||||
|
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
|
||||||
|
*
|
||||||
|
* 2) The I2C port can only be pointing in one of the same six directions:
|
||||||
|
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
|
||||||
|
*
|
||||||
|
* So, to fully define how your AndyMark IMU is mounted to the robot, you must simply specify:
|
||||||
|
* LogoFacingDirection
|
||||||
|
* I2cPortFacingDirection
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||||
|
*
|
||||||
|
* Finally, choose the two correct parameters to define how your AndyMark IMU is mounted and edit
|
||||||
|
* this OpMode to use those parameters.
|
||||||
|
*/
|
||||||
|
@TeleOp(name = "Sensor: AndyMark IMU Orthogonal", group = "Sensor")
|
||||||
|
@Disabled // Comment this out to add to the OpMode list
|
||||||
|
public class SensorAndyMarkIMUOrthogonal extends LinearOpMode
|
||||||
|
{
|
||||||
|
// The AndyMark IMU sensor object
|
||||||
|
private IMU imu;
|
||||||
|
|
||||||
|
//----------------------------------------------------------------------------------------------
|
||||||
|
// Main logic
|
||||||
|
//----------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@Override public void runOpMode() throws InterruptedException {
|
||||||
|
|
||||||
|
// Retrieve and initialize the AndyMark IMU.
|
||||||
|
// This sample expects the AndyMark IMU to be named "imu".
|
||||||
|
imu = hardwareMap.get(IMU.class, "imu");
|
||||||
|
|
||||||
|
/* Define how the AndyMark IMU is mounted to the robot to get the correct Yaw, Pitch, and
|
||||||
|
* Roll values.
|
||||||
|
*
|
||||||
|
* Two input parameters are required to fully specify the Orientation.
|
||||||
|
* The first parameter specifies the direction the AndyMark logo on the IMU is pointing.
|
||||||
|
* The second parameter specifies the direction the I2C port on the IMU is pointing.
|
||||||
|
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* The next two lines define the IMU orientation.
|
||||||
|
* To Do: EDIT these two lines to match YOUR mounting configuration.
|
||||||
|
*/
|
||||||
|
LogoFacingDirection logoDirection = LogoFacingDirection.UP;
|
||||||
|
I2cPortFacingDirection i2cDirection = I2cPortFacingDirection.FORWARD;
|
||||||
|
|
||||||
|
AndyMarkIMUOrientationOnRobot orientationOnRobot = new AndyMarkIMUOrientationOnRobot(logoDirection, i2cDirection);
|
||||||
|
|
||||||
|
// Now initialize the AndyMark IMU with this mounting orientation.
|
||||||
|
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
|
||||||
|
imu.initialize(new IMU.Parameters(orientationOnRobot));
|
||||||
|
|
||||||
|
// Loop and update the dashboard.
|
||||||
|
while (!isStopRequested()) {
|
||||||
|
telemetry.addData("IMU orientation", "Logo=%s I2C=%s\n ", logoDirection, i2cDirection);
|
||||||
|
|
||||||
|
// Check to see if heading reset is requested.
|
||||||
|
if (gamepad1.y) {
|
||||||
|
telemetry.addData("Yaw", "Resetting\n");
|
||||||
|
imu.resetYaw();
|
||||||
|
} else {
|
||||||
|
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retrieve rotational angles and velocities.
|
||||||
|
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
|
||||||
|
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
|
||||||
|
|
||||||
|
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
|
||||||
|
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
|
||||||
|
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,85 @@
|
|||||||
|
/*
|
||||||
|
Copyright (c) 2025 FIRST
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted (subject to the limitations in the disclaimer below) provided that
|
||||||
|
the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
other materials provided with the distribution.
|
||||||
|
|
||||||
|
Neither the name of FIRST nor the names of its contributors may be used to
|
||||||
|
endorse or promote products derived from this software without specific prior
|
||||||
|
written permission.
|
||||||
|
|
||||||
|
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||||
|
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||||
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import com.qualcomm.hardware.andymark.AndyMarkTOF;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
import com.qualcomm.robotcore.hardware.DistanceSensor;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode illustrates how to use the AndyMark 2m TOF Lidar Sensor.
|
||||||
|
*
|
||||||
|
* The OpMode assumes that the sensor is configured with a name of "sensor_distance".
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
|
*/
|
||||||
|
@TeleOp(name = "Sensor: AndyMarkTOF", group = "Sensor")
|
||||||
|
@Disabled
|
||||||
|
public class SensorAndyMarkTOF extends LinearOpMode {
|
||||||
|
|
||||||
|
private DistanceSensor sensorDistance;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() {
|
||||||
|
// you can use this as a regular DistanceSensor.
|
||||||
|
sensorDistance = hardwareMap.get(DistanceSensor.class, "sensor_distance");
|
||||||
|
|
||||||
|
// you can also cast this to a AndyMarkTOF if you want to use added
|
||||||
|
// methods associated with the AndyMarkTOF class.
|
||||||
|
AndyMarkTOF sensorTimeOfFlight = (AndyMarkTOF) sensorDistance;
|
||||||
|
|
||||||
|
telemetry.addData(">>", "Press start to continue");
|
||||||
|
telemetry.update();
|
||||||
|
|
||||||
|
waitForStart();
|
||||||
|
while(opModeIsActive()) {
|
||||||
|
// generic DistanceSensor methods.
|
||||||
|
telemetry.addData("deviceName", sensorDistance.getDeviceName() );
|
||||||
|
telemetry.addData("range", String.format("%.01f mm", sensorDistance.getDistance(DistanceUnit.MM)));
|
||||||
|
telemetry.addData("range", String.format("%.01f cm", sensorDistance.getDistance(DistanceUnit.CM)));
|
||||||
|
telemetry.addData("range", String.format("%.01f m", sensorDistance.getDistance(DistanceUnit.METER)));
|
||||||
|
telemetry.addData("range", String.format("%.01f in", sensorDistance.getDistance(DistanceUnit.INCH)));
|
||||||
|
|
||||||
|
// AndyMarkTOF specific methods.
|
||||||
|
telemetry.addData("ID", String.format("%x", sensorTimeOfFlight.getModelID()));
|
||||||
|
telemetry.addData("did time out", Boolean.toString(sensorTimeOfFlight.didTimeoutOccur()));
|
||||||
|
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -48,18 +48,22 @@ import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
|
|||||||
*
|
*
|
||||||
* There will be some variation in the values measured depending on the specific sensor you are using.
|
* There will be some variation in the values measured depending on the specific sensor you are using.
|
||||||
*
|
*
|
||||||
* You can increase the gain (a multiplier to make the sensor report higher values) by holding down
|
* If the color sensor supports adjusting the gain, you can increase the gain (a multiplier to make
|
||||||
* the A button on the gamepad, and decrease the gain by holding down the B button on the gamepad.
|
* the sensor report higher values) by holding down the A button on the gamepad, and decrease the
|
||||||
|
* gain by holding down the B button on the gamepad. The AndyMark Proximity & Color Sensor does not
|
||||||
|
* support this.
|
||||||
*
|
*
|
||||||
* If the color sensor has a light which is controllable from software, you can use the X button on
|
* If the color sensor has a light which is controllable from software, you can use the X button on
|
||||||
* the gamepad to toggle the light on and off. The REV sensors don't support this, but instead have
|
* the gamepad to toggle the light on and off. The REV sensors don't support this, but instead have
|
||||||
* a physical switch on them to turn the light on and off, beginning with REV Color Sensor V2.
|
* a physical switch on them to turn the light on and off, beginning with REV Color Sensor V2. The
|
||||||
|
* AndyMark Proximity & Color Sensor does not support this.
|
||||||
*
|
*
|
||||||
* If the color sensor also supports short-range distance measurements (usually via an infrared
|
* If the color sensor also supports short-range distance measurements (usually via an infrared
|
||||||
* proximity sensor), the reported distance will be written to telemetry. As of September 2020,
|
* proximity sensor), the reported distance will be written to telemetry. As of September 2025,
|
||||||
* the only color sensors that support this are the ones from REV Robotics. These infrared proximity
|
* the only color sensors that support this are the ones from REV Robotics and the AndyMark
|
||||||
* sensor measurements are only useful at very small distances, and are sensitive to ambient light
|
* Proximity & Color Sensor. These infrared proximity sensor measurements are only useful at very
|
||||||
* and surface reflectivity. You should use a different sensor if you need precise distance measurements.
|
* small distances, and are sensitive to ambient light and surface reflectivity. You should use a
|
||||||
|
* different sensor if you need precise distance measurements.
|
||||||
*
|
*
|
||||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2024 DigitalChickenLabs
|
* Copyright (c) 2025 DigitalChickenLabs
|
||||||
*
|
*
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
* of this software and associated documentation files (the "Software"), to deal
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
@@ -25,20 +25,28 @@ import com.qualcomm.hardware.digitalchickenlabs.OctoQuad;
|
|||||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.util.ElapsedTime;
|
||||||
|
|
||||||
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This OpMode illustrates how to use the DigitalChickenLabs OctoQuad Quadrature Encoder & Pulse Width Interface Module
|
* This OpMode illustrates how to use DigitalChickenLabs OctoQuad Quad Encoder & Pulse Width I/F Module
|
||||||
*
|
*
|
||||||
* The OctoQuad has 8 input channels that can used to read either Relative Quadrature Encoders or Pulse-Width Absolute Encoder inputs.
|
* The OctoQuad has 8 input channels that can used to read either Relative Quadrature Encoders or
|
||||||
* Relative Quadrature encoders are found on most FTC motors, and some stand-alone position sensors like the REV Thru-Bore encoder.
|
* Pulse-Width Absolute Encoder inputs. Relative Quadrature encoders are found on most FTC motors,
|
||||||
* Pulse-Width encoders are less common. The REV Thru-Bore encoder can provide its absolute position via a variable pulse width,
|
* and some stand-alone position sensors like the REV Thru-Bore encoder. Pulse-Width encoders are
|
||||||
|
* less common. The REV Thru-Bore encoder can provide its absolute position via a variable pulse width,
|
||||||
* as can several sonar rangefinders such as the MaxBotix MB1000 series.
|
* as can several sonar rangefinders such as the MaxBotix MB1000 series.
|
||||||
*
|
*
|
||||||
* This basic sample shows how an OctoQuad can be used to read the position three Odometry pods fitted
|
* Note: SDK 11.0+ requires that the OctoQuad is running firmware V3.0 or greater.
|
||||||
* with REV Thru-Bore encoders. For a more advanced example showing additional OctoQuad capabilities, see the SensorOctoQuadAdv sample.
|
* Visit https://github.com/DigitalChickenLabs/OctoQuad/tree/master/firmware for instruction
|
||||||
|
* on how to upgrade your OctoQuad's firmware.
|
||||||
*
|
*
|
||||||
* This OpMode assumes that the OctoQuad is attached to an I2C interface named "octoquad" in the robot configuration.
|
* This basic sample shows how an OctoQuad can be used to read the position of three Odometry pods
|
||||||
|
* fitted with REV Thru-Bore encoders. For a more advanced example with additional OctoQuad
|
||||||
|
* capabilities, see the SensorOctoQuadAdv sample.
|
||||||
|
*
|
||||||
|
* This OpMode assumes the OctoQuad is attached to an I2C interface named "octoquad" in the robot config.
|
||||||
*
|
*
|
||||||
* The code assumes the first three OctoQuad inputs are connected as follows
|
* The code assumes the first three OctoQuad inputs are connected as follows
|
||||||
* - Chan 0: for measuring forward motion on the left side of the robot.
|
* - Chan 0: for measuring forward motion on the left side of the robot.
|
||||||
@@ -54,21 +62,24 @@ import org.firstinspires.ftc.robotcore.external.Telemetry;
|
|||||||
*
|
*
|
||||||
* See the sensor's product page: https://www.tindie.com/products/35114/
|
* See the sensor's product page: https://www.tindie.com/products/35114/
|
||||||
*/
|
*/
|
||||||
@TeleOp(name = "OctoQuad Basic", group="OctoQuad")
|
|
||||||
@Disabled
|
@Disabled
|
||||||
|
@TeleOp(name = "OctoQuad Basic", group="OctoQuad")
|
||||||
public class SensorOctoQuad extends LinearOpMode {
|
public class SensorOctoQuad extends LinearOpMode {
|
||||||
|
|
||||||
// Identify which encoder OctoQuad inputs are connected to each odometry pod.
|
// Identify which encoder OctoQuad inputs are connected to each odometry pod.
|
||||||
private final int ODO_LEFT = 0; // Facing forward direction on left side of robot (Axial motion)
|
private final int ODO_LEFT = 0; // Facing forward direction on left side of robot
|
||||||
private final int ODO_RIGHT = 1; // Facing forward direction on right side or robot (Axial motion)
|
private final int ODO_RIGHT = 1; // Facing forward direction on right side or robot
|
||||||
private final int ODO_PERP = 2; // Facing perpendicular direction at the center of the robot (Lateral motion)
|
private final int ODO_PERP = 2; // Facing perpendicular direction at the center of the robot
|
||||||
|
|
||||||
// Declare the OctoQuad object and members to store encoder positions and velocities
|
// Declare the OctoQuad object;
|
||||||
private OctoQuad octoquad;
|
private OctoQuad octoquad;
|
||||||
|
|
||||||
private int posLeft;
|
private int posLeft;
|
||||||
private int posRight;
|
private int posRight;
|
||||||
private int posPerp;
|
private int posPerp;
|
||||||
|
private int velLeft;
|
||||||
|
private int velRight;
|
||||||
|
private int velPerp;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function is executed when this OpMode is selected from the Driver Station.
|
* This function is executed when this OpMode is selected from the Driver Station.
|
||||||
@@ -83,12 +94,15 @@ public class SensorOctoQuad extends LinearOpMode {
|
|||||||
telemetry.addData("OctoQuad Firmware Version ", octoquad.getFirmwareVersion());
|
telemetry.addData("OctoQuad Firmware Version ", octoquad.getFirmwareVersion());
|
||||||
|
|
||||||
// Reverse the count-direction of any encoder that is not what you require.
|
// Reverse the count-direction of any encoder that is not what you require.
|
||||||
// e.g. if you push the robot forward and the left encoder counts down, then reverse it so it counts up.
|
// e.g. if you push the robot forward and the left encoder counts down, then reverse it.
|
||||||
octoquad.setSingleEncoderDirection(ODO_LEFT, OctoQuad.EncoderDirection.REVERSE);
|
octoquad.setSingleEncoderDirection(ODO_LEFT, OctoQuad.EncoderDirection.REVERSE);
|
||||||
octoquad.setSingleEncoderDirection(ODO_RIGHT, OctoQuad.EncoderDirection.FORWARD);
|
octoquad.setSingleEncoderDirection(ODO_RIGHT, OctoQuad.EncoderDirection.FORWARD);
|
||||||
octoquad.setSingleEncoderDirection(ODO_PERP, OctoQuad.EncoderDirection.FORWARD);
|
octoquad.setSingleEncoderDirection(ODO_PERP, OctoQuad.EncoderDirection.FORWARD);
|
||||||
|
|
||||||
// Any changes that are made should be saved in FLASH just in case there is a sensor power glitch.
|
// set the interval over which pulses are counted to determine velocity.
|
||||||
|
octoquad.setAllVelocitySampleIntervals(50); // 50 mSec means 20 velocity updates per second.
|
||||||
|
|
||||||
|
// Save any changes that are made, just in case there is a sensor power glitch.
|
||||||
octoquad.saveParametersToFlash();
|
octoquad.saveParametersToFlash();
|
||||||
|
|
||||||
telemetry.addLine("\nPress START to read encoder values");
|
telemetry.addLine("\nPress START to read encoder values");
|
||||||
@@ -98,7 +112,7 @@ public class SensorOctoQuad extends LinearOpMode {
|
|||||||
|
|
||||||
// Configure the telemetry for optimal display of data.
|
// Configure the telemetry for optimal display of data.
|
||||||
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||||
telemetry.setMsTransmissionInterval(50);
|
telemetry.setMsTransmissionInterval(100);
|
||||||
|
|
||||||
// Set all the encoder inputs to zero.
|
// Set all the encoder inputs to zero.
|
||||||
octoquad.resetAllPositions();
|
octoquad.resetAllPositions();
|
||||||
@@ -117,25 +131,26 @@ public class SensorOctoQuad extends LinearOpMode {
|
|||||||
readOdometryPods();
|
readOdometryPods();
|
||||||
|
|
||||||
// Display the values.
|
// Display the values.
|
||||||
telemetry.addData("Left ", "%8d counts", posLeft);
|
telemetry.addData("Left P", "%7d V :%6d CPS ", posLeft, velLeft);
|
||||||
telemetry.addData("Right", "%8d counts", posRight);
|
telemetry.addData("Right P", "%7d V :%6d CPS ", posRight, velRight);
|
||||||
telemetry.addData("Perp ", "%8d counts", posPerp);
|
telemetry.addData("Perp P", "%7d V :%6d CPS ", posPerp, velPerp);
|
||||||
telemetry.update();
|
telemetry.update();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void readOdometryPods() {
|
private void readOdometryPods() {
|
||||||
// For best performance, we should only perform ONE transaction with the OctoQuad each cycle.
|
// For best performance, we should only perform ONE transaction with the OctoQuad each cycle.
|
||||||
// Since this example only needs to read positions from a few channels, we could use either
|
// This can be achieved in one of two ways:
|
||||||
// readPositionRange(idxFirst, idxLast) to get a select number of sequential channels
|
// 1) by doing a block data read once per control cycle
|
||||||
// or
|
// or
|
||||||
// readAllPositions() to get all 8 encoder readings
|
// 2) by doing individual caching reads, but only reading each encoder value ONCE per cycle.
|
||||||
//
|
//
|
||||||
// Since both calls take almost the same amount of time, and the actual channels may not end up
|
// Since method 2 is simplest, we will use it here.
|
||||||
// being sequential, we will read all of the encoder positions, and then pick out the ones we need.
|
posLeft = octoquad.readSinglePosition_Caching(ODO_LEFT);
|
||||||
int[] positions = octoquad.readAllPositions();
|
posRight = octoquad.readSinglePosition_Caching(ODO_RIGHT);
|
||||||
posLeft = positions[ODO_LEFT];
|
posPerp = octoquad.readSinglePosition_Caching(ODO_PERP);
|
||||||
posRight = positions[ODO_RIGHT];
|
velLeft = octoquad.readSingleVelocity_Caching(ODO_LEFT) * 20; // scale up to cps
|
||||||
posPerp = positions[ODO_PERP];
|
velRight = octoquad.readSingleVelocity_Caching(ODO_RIGHT) * 20; // scale up to cps
|
||||||
|
velPerp = octoquad.readSingleVelocity_Caching(ODO_PERP) * 20; // scale up to cps
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,7 +22,6 @@
|
|||||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
import com.qualcomm.hardware.digitalchickenlabs.OctoQuad;
|
import com.qualcomm.hardware.digitalchickenlabs.OctoQuad;
|
||||||
import com.qualcomm.hardware.digitalchickenlabs.OctoQuadBase;
|
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
import com.qualcomm.robotcore.util.ElapsedTime;
|
import com.qualcomm.robotcore.util.ElapsedTime;
|
||||||
@@ -37,50 +36,53 @@ import java.util.ArrayList;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This OpMode illustrates how to use advanced features of the DigitalChickenLabs OctoQuad Quadrature Encoder & Pulse Width Interface Module
|
* This OpMode illustrates how to use advanced features of the DigitalChickenLabs OctoQuad Quadrature
|
||||||
|
* Encoder & Pulse Width Interface Module. The OctoQuad has 8 input channels that can used to read
|
||||||
|
* either Quadrature Encoder signals (like with most FTC motors) or Pulse-Width style Absolute Encoder
|
||||||
|
* inputs (eg: REV Through Bore encoder pulse width output).
|
||||||
*
|
*
|
||||||
* The OctoQuad has 8 input channels that can used to read either Quadrature Encoder signals (like with most FTC motors)
|
* This OpMode illustrates several of the more advanced features of an OctoQuad, including Pulse Width
|
||||||
* or Pulse-Width style Absolute Encoder inputs (eg: REV Through Bore encoder pulse width output).
|
* measurement and velocity measurement. For a more basic OpMode just showing how to read encoder
|
||||||
|
* inputs, see the SensorOctoQuad sample.
|
||||||
*
|
*
|
||||||
* This OpMode illustrates several of the more advanced features of an OctoQuad, including Pulse Width measurement and velocity measurement.
|
* This OpMode assumes the OctoQuad is attached to an I2C interface named "octoquad" in the robot config.
|
||||||
* For a more basic OpMode just showing how to read encoder inputs, see the SensorOctoQuad sample.
|
|
||||||
*
|
|
||||||
* This OpMode assumes that the OctoQuad is attached to an I2C interface named "octoquad" in the robot configuration.
|
|
||||||
*
|
*
|
||||||
* One system that requires a lot of encoder inputs is a Swerve Drive system.
|
* One system that requires a lot of encoder inputs is a Swerve Drive system.
|
||||||
* Such a drive requires two encoders per drive module: one for position & velocity of the Drive motor/wheel, and one for position/angle of the Steering motor.
|
* Such a drive requires two encoders per drive module:
|
||||||
* The Drive motor usually requires a quadrature encoder, and the Steering motor requires an Absolute encoder.
|
* One encoder for the Drive motor/wheel position.velocity, and one for Steer motor angle.
|
||||||
* This quantity and combination of encoder inputs is a challenge, but a single OctoQuad could be used to read them all.
|
* The Drive motor requires a quadrature encoder, and the Steering motor requires an Absolute encoder.
|
||||||
*
|
*
|
||||||
* This sample will configure an OctoQuad for a swerve drive, as follows
|
* This sample will configure an OctoQuad for a swerve drive, as follows
|
||||||
* - Configure 4 Relative Quadrature Encoder inputs and 4 Absolute Pulse-Width Encoder inputs
|
* - Configure 4 Relative Quadrature Encoder inputs and 4 Absolute Pulse-Width Encoder inputs
|
||||||
* - Configure a velocity sample interval of 25 mSec
|
* - Configure a velocity sample interval of 25 mSec
|
||||||
* - Configure a pulse width input range of 1-1024 uSec for a REV Through Bore Encoder's Absolute Pulse output.
|
* - Configure a pulse width input range of 1-1024 uSec for a REV Encoder's Absolute Pulse output.
|
||||||
*
|
*
|
||||||
* An OctoSwerveDrive class will be created to initialize the OctoQuad, and manage the 4 swerve modules.
|
* An OctoSwerveDrive class will be created to initialize the OctoQuad, and manage 4 swerve modules.
|
||||||
* An OctoSwerveModule class will be created to configure and read a single swerve module.
|
* An OctoSwerveModule class will be created to configure and read a single swerve module.
|
||||||
*
|
*
|
||||||
* Wiring:
|
* Wiring:
|
||||||
* The OctoQuad will be configured to accept Quadrature encoders on the first four channels and Absolute (pulse width) encoders on the last four channels.
|
* The OctoQuad will be configured to accept Quadrature encoders on the first four channels and
|
||||||
|
* Absolute (pulse width) encoders on the last four channels.
|
||||||
*
|
*
|
||||||
* The standard 4-pin to 4-pin cable can be used to connect each Driver Motor encoder to the OctoQuad. (channels 0-3)
|
* The standard encoder cable will connect each Driver Motor encoder to the OctoQuad. (channels 0-3)
|
||||||
*
|
*
|
||||||
* A modified version of the REV 6-4 pin cable (shipped with the encoder) connects the steering encoder to the OctoQuad. (channels 4-7)
|
* A modified version of the REV 6-4 pin cable (shipped with the encoder) connects the steering encoder
|
||||||
* To connect the Absolute position signal from a REV Thru-Bore encoder to the OctoQuad, the Provided 6-pin to 4-pin cable will need to be modified.
|
* to the OctoQuad. (channels 4-7)
|
||||||
* At the 6-pin connector end, move the yellow wire from its initial pin-3 position to the ABS pin-5 position. This can be done easily
|
*
|
||||||
* by using a small flathead screwdriver to lift the small white tab holding the metal wire crimp in place and gently pulling the wire out.
|
* To connect the Absolute position signal from a REV Thru-Bore encoder to the OctoQuad, the
|
||||||
|
* Provided 6-pin to 4-pin cable will need to be modified.
|
||||||
|
* At the 6-pin connector end, move the yellow wire from its initial pin-3 position to the
|
||||||
|
* ABS pin-5 position. This can be done easily by using a small flathead screwdriver to lift the
|
||||||
|
* small white tab holding the metal wire crimp in place and gently pulling the wire out.
|
||||||
* See the OctoSwerveDrive() constructor below for the correct wheel/channel assignment.
|
* See the OctoSwerveDrive() constructor below for the correct wheel/channel assignment.
|
||||||
*
|
*
|
||||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
*
|
*
|
||||||
* Note: If you prefer, you can move the two support classes from this file, and place them in their own files.
|
|
||||||
* But leaving them in place is simpler for this example.
|
|
||||||
*
|
|
||||||
* See the sensor's product page: https://www.tindie.com/products/35114/
|
* See the sensor's product page: https://www.tindie.com/products/35114/
|
||||||
*/
|
*/
|
||||||
@TeleOp(name="OctoQuad Advanced", group="OctoQuad")
|
|
||||||
@Disabled
|
@Disabled
|
||||||
|
@TeleOp(name="OctoQuad Advanced", group="OctoQuad")
|
||||||
public class SensorOctoQuadAdv extends LinearOpMode {
|
public class SensorOctoQuadAdv extends LinearOpMode {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -161,17 +163,20 @@ class OctoSwerveDrive {
|
|||||||
|
|
||||||
// Create the four OctoSwerveModules, and add them to a 'list' for easier reference.
|
// Create the four OctoSwerveModules, and add them to a 'list' for easier reference.
|
||||||
|
|
||||||
// Note: The wheel/channel order is set here. Ensure your encoder cables match.
|
// Notes:
|
||||||
|
// The wheel/channel order is set here. Ensure your encoder cables match.
|
||||||
//
|
//
|
||||||
// Note: The angleOffset must be set for each module so a forward facing wheel shows a steer angle of 0 degrees.
|
// The angleOffset must be set for each module so a forward facing wheel shows a steer
|
||||||
// The process for determining the correct angleOffsets is as follows:
|
// angle of 0 degrees. The process for determining the correct angleOffsets is as follows:
|
||||||
// 1) Set all the angleValues (below) to zero then build and deploy the code.
|
// 1) Set all the angleValues (below) to zero then build and deploy the code.
|
||||||
// 2) Physically rotate all the swerve drives so the wheels are facing forward in the desired 0 degree position
|
// 2) Rotate all the swerve drives so the wheels are forward in the desired 0 degree position
|
||||||
// 3) Run the OpMode, view the telemetry and record the "Degrees" value for each module.
|
// 3) Run the OpMode, view the telemetry and record the "Degrees" value for each module.
|
||||||
// 4) Update the code by entering the recorded Degrees value for each module as the angleOffset (last) parameter in the lines below.
|
// 4) Update the code by entering the recorded Degrees value for each module as the
|
||||||
|
// angleOffset (last) parameter in the lines below.
|
||||||
//
|
//
|
||||||
// Rebuild and deploy the new code. Verify that the telemetry now indicates 0 degrees when the wheels are facing forward.
|
// Rebuild and deploy the new code. Verify that the telemetry now indicates 0 degrees when
|
||||||
// Also verify that the correct module values change appropriately when you manually spin (drive) and rotate (steer) a wheel.
|
// the wheels are facing forward. Also verify that the correct module values change
|
||||||
|
// appropriately when you manually spin (drive) and rotate (steer) a wheel.
|
||||||
|
|
||||||
allModules.add(LeftFront = new OctoSwerveModule(octoquad, "LF ",0,0));// Drive=0, Steer=4
|
allModules.add(LeftFront = new OctoSwerveModule(octoquad, "LF ",0,0));// Drive=0, Steer=4
|
||||||
allModules.add(RightFront = new OctoSwerveModule(octoquad, "RF ",1,0));// Drive=1, Steer=5
|
allModules.add(RightFront = new OctoSwerveModule(octoquad, "RF ",1,0));// Drive=1, Steer=5
|
||||||
@@ -186,7 +191,7 @@ class OctoSwerveDrive {
|
|||||||
* Updates all 4 swerve modules
|
* Updates all 4 swerve modules
|
||||||
*/
|
*/
|
||||||
public void updateModules() {
|
public void updateModules() {
|
||||||
// Read full OctoQuad data block and then pass DataBlock to each swerve module to update themselves.
|
// Read full OctoQuad data block and then pass it to each swerve module to update themselves.
|
||||||
octoquad.readAllEncoderData(encoderDataBlock);
|
octoquad.readAllEncoderData(encoderDataBlock);
|
||||||
for (OctoSwerveModule module : allModules) {
|
for (OctoSwerveModule module : allModules) {
|
||||||
module.updateModule(encoderDataBlock);
|
module.updateModule(encoderDataBlock);
|
||||||
@@ -219,39 +224,41 @@ class OctoSwerveModule {
|
|||||||
private final String name;
|
private final String name;
|
||||||
private final int channel;
|
private final int channel;
|
||||||
private final double angleOffset;
|
private final double angleOffset;
|
||||||
private final double steerDirMult;
|
|
||||||
|
|
||||||
private static final int VELOCITY_SAMPLE_INTERVAL_MS = 25; // To provide 40 updates per second.
|
private static final int VELOCITY_SAMPLE_INTERVAL_MS = 25; // To provide 40 updates/Sec.
|
||||||
private static final double DEGREES_PER_US = (360.0 / 1024.0); // based on REV Through Bore Encoder
|
private static final double DEGREES_PER_US = (360.0 / 1024.0); // REV Through Bore Encoder
|
||||||
private static final double VELOCITY_SAMPLES_PER_S = (1000.0 / VELOCITY_SAMPLE_INTERVAL_MS);
|
private static final double VELOCITY_SAMPLES_PER_S = (1000.0 / VELOCITY_SAMPLE_INTERVAL_MS);
|
||||||
|
|
||||||
// The correct drive and turn directions must be set for the Swerve Module based on the specific hardware geometry.
|
// The correct drive and turn directions must be set for the Swerve Module.
|
||||||
// Forward motion must generate an increasing drive count.
|
// Forward motion must generate an increasing drive count.
|
||||||
// Counter Clockwise steer rotation must generate an increasing Steer Angle (degrees)
|
// Counter Clockwise steer rotation must generate an increasing Steer Angle (degrees)
|
||||||
private static final boolean INVERT_DRIVE_ENCODER = false; // Set true if forward motion decreases drive "Count"
|
private static final boolean INVERT_DRIVE_ENCODER = false;
|
||||||
private static final boolean INVERT_STEER_ENCODER = false; // Set true if counter clockwise steer action decreases steer "Degree"
|
private static final boolean INVERT_STEER_ENCODER = false;
|
||||||
|
|
||||||
/***
|
/***
|
||||||
* @param octoquad provide access to configure OctoQuad
|
* @param octoquad provide access to configure OctoQuad
|
||||||
* @param name name used for telemetry display
|
* @param name name used for telemetry display
|
||||||
* @param quadChannel Quadrature encoder channel. Pulse Width channel is this + 4
|
* @param quadChannel Quadrature encoder channel. Pulse Width channel is this + 4
|
||||||
* @param angleOffset Angle to subtract from absolute encoder to calibrate zero position. (see comments above)
|
* @param angleOffset Angle to subtract from absolute encoder to calibrate zero position.
|
||||||
*/
|
*/
|
||||||
public OctoSwerveModule (OctoQuad octoquad, String name, int quadChannel, double angleOffset) {
|
public OctoSwerveModule (OctoQuad octoquad, String name, int quadChannel, double angleOffset) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.channel = quadChannel;
|
this.channel = quadChannel;
|
||||||
this.angleOffset = angleOffset;
|
this.angleOffset = angleOffset;
|
||||||
this.steerDirMult = INVERT_STEER_ENCODER ? -1 : 1 ; // create a multiplier to flip the steer angle.
|
|
||||||
|
|
||||||
// Set the drive encoder direction. Note the absolute encoder does not have built-in direction inversion.
|
// Set both encoder directions.
|
||||||
octoquad.setSingleEncoderDirection(channel, INVERT_DRIVE_ENCODER ? OctoQuad.EncoderDirection.REVERSE : OctoQuad.EncoderDirection.FORWARD);
|
octoquad.setSingleEncoderDirection(channel,
|
||||||
|
INVERT_DRIVE_ENCODER ? OctoQuad.EncoderDirection.REVERSE : OctoQuad.EncoderDirection.FORWARD);
|
||||||
|
octoquad.setSingleEncoderDirection(channel + 4,
|
||||||
|
INVERT_STEER_ENCODER ? OctoQuad.EncoderDirection.REVERSE : OctoQuad.EncoderDirection.FORWARD);
|
||||||
|
|
||||||
// Set the velocity sample interval on both encoders
|
// Set the velocity sample interval on both encoders
|
||||||
octoquad.setSingleVelocitySampleInterval(channel, VELOCITY_SAMPLE_INTERVAL_MS);
|
octoquad.setSingleVelocitySampleInterval(channel, VELOCITY_SAMPLE_INTERVAL_MS);
|
||||||
octoquad.setSingleVelocitySampleInterval(channel + 4, VELOCITY_SAMPLE_INTERVAL_MS);
|
octoquad.setSingleVelocitySampleInterval(channel + 4, VELOCITY_SAMPLE_INTERVAL_MS);
|
||||||
|
|
||||||
// Setup Absolute encoder pulse range to match REV Through Bore encoder.
|
// Setup Absolute encoder pulse range to match REV Through Bore encoder.
|
||||||
octoquad.setSingleChannelPulseWidthParams (channel + 4, new OctoQuad.ChannelPulseWidthParams(1,1024));
|
octoquad.setSingleChannelPulseWidthParams (channel + 4,
|
||||||
|
new OctoQuad.ChannelPulseWidthParams(1,1024));
|
||||||
}
|
}
|
||||||
|
|
||||||
/***
|
/***
|
||||||
@@ -259,13 +266,15 @@ class OctoSwerveModule {
|
|||||||
* @param encoderDataBlock most recent full data block read from OctoQuad.
|
* @param encoderDataBlock most recent full data block read from OctoQuad.
|
||||||
*/
|
*/
|
||||||
public void updateModule(OctoQuad.EncoderDataBlock encoderDataBlock) {
|
public void updateModule(OctoQuad.EncoderDataBlock encoderDataBlock) {
|
||||||
driveCounts = encoderDataBlock.positions[channel]; // get Counts.
|
driveCounts = encoderDataBlock.positions[channel];
|
||||||
driveCountsPerSec = encoderDataBlock.velocities[channel] * VELOCITY_SAMPLES_PER_S; // convert counts/interval to counts/sec
|
driveCountsPerSec = encoderDataBlock.velocities[channel] * VELOCITY_SAMPLES_PER_S;
|
||||||
|
|
||||||
// convert uS to degrees. Add in any possible direction flip.
|
// convert uS to degrees. Add in any possible direction flip.
|
||||||
steerDegrees = AngleUnit.normalizeDegrees((encoderDataBlock.positions[channel+ 4] * DEGREES_PER_US * steerDirMult) - angleOffset);
|
steerDegrees = AngleUnit.normalizeDegrees(
|
||||||
|
(encoderDataBlock.positions[channel+ 4] * DEGREES_PER_US) - angleOffset);
|
||||||
// convert uS/interval to deg per sec. Add in any possible direction flip.
|
// convert uS/interval to deg per sec. Add in any possible direction flip.
|
||||||
steerDegreesPerSec = encoderDataBlock.velocities[channel + 4] * DEGREES_PER_US * steerDirMult * VELOCITY_SAMPLES_PER_S;
|
steerDegreesPerSec = encoderDataBlock.velocities[channel + 4] *
|
||||||
|
DEGREES_PER_US * VELOCITY_SAMPLES_PER_S;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -273,6 +282,7 @@ class OctoSwerveModule {
|
|||||||
* @param telemetry OpMode Telemetry object
|
* @param telemetry OpMode Telemetry object
|
||||||
*/
|
*/
|
||||||
public void show(Telemetry telemetry) {
|
public void show(Telemetry telemetry) {
|
||||||
telemetry.addData(name, "%8.0f %7.0f %7.0f %6.0f", driveCounts, driveCountsPerSec, steerDegrees, steerDegreesPerSec);
|
telemetry.addData(name, "%8.0f %7.0f %7.0f %6.0f",
|
||||||
|
driveCounts, driveCountsPerSec, steerDegrees, steerDegreesPerSec);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,255 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2025 DigitalChickenLabs
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import com.qualcomm.hardware.digitalchickenlabs.OctoQuad;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode demonstrates how to use the "absolute localizer" feature of the
|
||||||
|
* Digital Chicken OctoQuad FTC Edition MK2.
|
||||||
|
*
|
||||||
|
* Note: The MK2 OctoQuad has an Inertial Measurement Unit (IMU) that the MK1 does not, so this
|
||||||
|
* code will ONLY work with the MK2 version.
|
||||||
|
*
|
||||||
|
* It is STRONGLY recommended to read the Quick Start guide for the localizer feature, located here:
|
||||||
|
* https://github.com/DigitalChickenLabs/OctoQuad/blob/master/documentation/
|
||||||
|
*
|
||||||
|
* This OpMode assumes that the OctoQuad is attached to an I2C interface named "octoquad" in the
|
||||||
|
* robot configuration.
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||||
|
*
|
||||||
|
* See the sensor's product page: https://www.tindie.com/products/37799/
|
||||||
|
*/
|
||||||
|
|
||||||
|
@Disabled
|
||||||
|
@TeleOp(name="OctoQuad Localizer", group="OctoQuad")
|
||||||
|
public class SensorOctoQuadLocalization extends LinearOpMode
|
||||||
|
{
|
||||||
|
// #####################################################################################
|
||||||
|
//
|
||||||
|
// YOU MUST ADJUST THESE CONSTANTS FOR YOUR ROBOT!
|
||||||
|
// SEE THE QUICKSTART GUIDE:
|
||||||
|
// https://github.com/DigitalChickenLabs/OctoQuad/blob/master/documentation/
|
||||||
|
//
|
||||||
|
// AND THE TUNING OPMODES:
|
||||||
|
// https://github.com/DigitalChickenLabs/OctoQuad/blob/master/code_examples/FTC
|
||||||
|
//
|
||||||
|
// #####################################################################################
|
||||||
|
static final int DEADWHEEL_PORT_X = 0; // encoder port on OctoQuad
|
||||||
|
static final int DEADWHEEL_PORT_Y = 1; // encoder port on OctoQuad
|
||||||
|
static final OctoQuad.EncoderDirection DEADWHEEL_X_DIR = OctoQuad.EncoderDirection.FORWARD;
|
||||||
|
static final OctoQuad.EncoderDirection DEADWHEEL_Y_DIR = OctoQuad.EncoderDirection.REVERSE;
|
||||||
|
static final float X_TICKS_PER_MM = 12.34f; // eg. 19.89f for a goBILDA 4-Bar Odometry Pod
|
||||||
|
static final float Y_TICKS_PER_MM = 12.34f; // eg. 19.89f for a goBILDA 4-Bar Odometry Pod
|
||||||
|
static final float TCP_OFFSET_X_MM = 123.4f; // eg. 147.0f from QuickStart Guide diagram
|
||||||
|
static final float TCP_OFFSET_Y_MM = 123.4f; // eg.-158.0f from QuickStart Guide diagram
|
||||||
|
static final float IMU_SCALAR = 1.0f; // Rotational scale factor.
|
||||||
|
// #####################################################################################
|
||||||
|
|
||||||
|
// Conversion factor for radians --> degrees
|
||||||
|
static final double RAD2DEG = 180/Math.PI;
|
||||||
|
|
||||||
|
// For tracking the number of CRC mismatches
|
||||||
|
int badPackets = 0;
|
||||||
|
int totalPackets = 0;
|
||||||
|
boolean warn = false;
|
||||||
|
|
||||||
|
// Data structure which will store the localizer data read from the OctoQuad
|
||||||
|
OctoQuad.LocalizerDataBlock localizer = new OctoQuad.LocalizerDataBlock();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Main OpMode function
|
||||||
|
*/
|
||||||
|
public void runOpMode()
|
||||||
|
{
|
||||||
|
// Begin by retrieving a handle to the device from the hardware map.
|
||||||
|
OctoQuad oq = hardwareMap.get(OctoQuad.class, "octoquad");
|
||||||
|
|
||||||
|
// Increase the telemetry update frequency to make the data display a bit less laggy
|
||||||
|
telemetry.setMsTransmissionInterval(50);
|
||||||
|
telemetry.setDisplayFormat(Telemetry.DisplayFormat.HTML);
|
||||||
|
|
||||||
|
// Configure a range of parameters for the absolute localizer
|
||||||
|
// --> Read the quick start guide for an explanation of these!!
|
||||||
|
// IMPORTANT: these parameter changes will not take effect until the localizer is reset!
|
||||||
|
oq.setSingleEncoderDirection(DEADWHEEL_PORT_X, DEADWHEEL_X_DIR);
|
||||||
|
oq.setSingleEncoderDirection(DEADWHEEL_PORT_Y, DEADWHEEL_Y_DIR);
|
||||||
|
oq.setLocalizerPortX(DEADWHEEL_PORT_X);
|
||||||
|
oq.setLocalizerPortY(DEADWHEEL_PORT_Y);
|
||||||
|
oq.setLocalizerCountsPerMM_X(X_TICKS_PER_MM);
|
||||||
|
oq.setLocalizerCountsPerMM_Y(Y_TICKS_PER_MM);
|
||||||
|
oq.setLocalizerTcpOffsetMM_X(TCP_OFFSET_X_MM);
|
||||||
|
oq.setLocalizerTcpOffsetMM_Y(TCP_OFFSET_Y_MM);
|
||||||
|
oq.setLocalizerImuHeadingScalar(IMU_SCALAR);
|
||||||
|
oq.setLocalizerVelocityIntervalMS(25);
|
||||||
|
oq.setI2cRecoveryMode(OctoQuad.I2cRecoveryMode.MODE_1_PERIPH_RST_ON_FRAME_ERR);
|
||||||
|
|
||||||
|
// Resetting the localizer will apply the parameters configured above.
|
||||||
|
// This function will NOT block until calibration of the IMU is complete -
|
||||||
|
// for that you need to look at the status returned by getLocalizerStatus()
|
||||||
|
oq.resetLocalizerAndCalibrateIMU();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Init Loop
|
||||||
|
*/
|
||||||
|
while (opModeInInit())
|
||||||
|
{
|
||||||
|
telemetry.addData("OQ Firmware Version", oq.getFirmwareVersionString());
|
||||||
|
telemetry.addData("Localizer Status", oq.getLocalizerStatus());
|
||||||
|
telemetry.addData("Heading Axis Detection", oq.getLocalizerHeadingAxisChoice());
|
||||||
|
telemetry.addLine(" ");
|
||||||
|
|
||||||
|
warnIfNotTuned();
|
||||||
|
|
||||||
|
telemetry.addLine("Press Play to start navigating");
|
||||||
|
telemetry.update();
|
||||||
|
|
||||||
|
sleep(100);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Don't proceed to the main loop until calibration is complete
|
||||||
|
*/
|
||||||
|
while (!isStopRequested() && (oq.getLocalizerStatus() != OctoQuad.LocalizerStatus.RUNNING))
|
||||||
|
{
|
||||||
|
telemetry.addLine("Waiting for IMU Calibration to complete\n");
|
||||||
|
telemetry.addData("Localizer Status", oq.getLocalizerStatus());
|
||||||
|
telemetry.update();
|
||||||
|
sleep(100);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Establish a starting position for the robot. This will be 0,0,0 by default so this line
|
||||||
|
// is rather redundant, but you could change it to be anything you want
|
||||||
|
oq.setLocalizerPose(0, 0, 0);
|
||||||
|
|
||||||
|
// Not required for localizer, but left in here since raw counts are displayed
|
||||||
|
// on telemetry for debugging
|
||||||
|
oq.resetAllPositions();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* MAIN LOOP
|
||||||
|
*/
|
||||||
|
while (opModeIsActive())
|
||||||
|
{
|
||||||
|
// Use the Gamepad A/Cross button to teleport to a new location and heading
|
||||||
|
if (gamepad1.crossWasPressed())
|
||||||
|
{
|
||||||
|
oq.setLocalizerPose(200, 200, (float)(Math.PI/2.0f));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read updated data from the OctoQuad into the 'localizer' data structure
|
||||||
|
oq.readLocalizerData(localizer);
|
||||||
|
|
||||||
|
// #################################################################################
|
||||||
|
// IMPORTANT! Check whether the received CRC for the data is correct. An incorrect
|
||||||
|
// CRC indicates that the data was corrupted in transit and cannot be
|
||||||
|
// trusted. This can be caused by internal or external ESD.
|
||||||
|
//
|
||||||
|
// If the CRC is NOT reported as OK, you should throw away the data and
|
||||||
|
// try to read again.
|
||||||
|
//
|
||||||
|
// NOTE: Raw odometry pod counts are displayed to verify correct direction of rotation
|
||||||
|
// When the robot is pushed FWD, the X pod counts must increase in value
|
||||||
|
// When the robot is pushed LEFT, the Y pod counts must increase in value
|
||||||
|
// Use the setSingleEncoderDirection() method to make any reversals.
|
||||||
|
// #################################################################################
|
||||||
|
if (localizer.crcOk)
|
||||||
|
{
|
||||||
|
warnIfNotTuned();
|
||||||
|
|
||||||
|
// Display Robot position data
|
||||||
|
telemetry.addData("Localizer Status", localizer.localizerStatus);
|
||||||
|
telemetry.addData("Heading deg", "%.2f", localizer.heading_rad * RAD2DEG);
|
||||||
|
telemetry.addData("Rotation dps", "%.2f", localizer.velHeading_radS * RAD2DEG);
|
||||||
|
telemetry.addData("X Pos mm", localizer.posX_mm);
|
||||||
|
telemetry.addData("Y Pos mm", localizer.posY_mm);
|
||||||
|
telemetry.addData("X Vel mm/s", localizer.velX_mmS);
|
||||||
|
telemetry.addData("Y Vel mm/s", localizer.velY_mmS);
|
||||||
|
telemetry.addLine("\nPress Gamepad A (Cross) to teleport");
|
||||||
|
|
||||||
|
// #############################################################################
|
||||||
|
// IMPORTANT!!
|
||||||
|
//
|
||||||
|
// These two encoder status lines will slow the loop down,
|
||||||
|
// so they can be removed once the encoder direction has been verified.
|
||||||
|
// #############################################################################
|
||||||
|
telemetry.addData("\nRaw X Pod Counts", oq.readSinglePosition_Caching(DEADWHEEL_PORT_X));
|
||||||
|
telemetry.addData("Raw Y Pod Counts", oq.readSinglePosition_Caching(DEADWHEEL_PORT_Y));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
badPackets++;
|
||||||
|
telemetry.addLine("Data CRC not valid");
|
||||||
|
}
|
||||||
|
totalPackets++;
|
||||||
|
|
||||||
|
// Print some statistics about CRC validation
|
||||||
|
telemetry.addLine(String.format("%d CRC error(s) in %d packets", badPackets, totalPackets));
|
||||||
|
|
||||||
|
// Send updated telemetry to the Driver Station
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
long lastWarnFlashTimeMs = 0;
|
||||||
|
boolean warnFlash = false;
|
||||||
|
|
||||||
|
void warnIfNotTuned()
|
||||||
|
{
|
||||||
|
String warnString = "";
|
||||||
|
if (IMU_SCALAR == 1.0f)
|
||||||
|
{
|
||||||
|
warnString += "WARNING: IMU_SCALAR not tuned.<br>";
|
||||||
|
warn = true;
|
||||||
|
}
|
||||||
|
if (X_TICKS_PER_MM == 12.34f || Y_TICKS_PER_MM == 12.34f)
|
||||||
|
{
|
||||||
|
warnString += "WARNING: TICKS_PER_MM not tuned.<br>";
|
||||||
|
warn = true;
|
||||||
|
}
|
||||||
|
if (TCP_OFFSET_X_MM == 123.4f || TCP_OFFSET_Y_MM == 123.4f)
|
||||||
|
{
|
||||||
|
warnString += "WARNING: TCP_OFFSET not tuned.<br>";
|
||||||
|
warn = true;
|
||||||
|
}
|
||||||
|
if (warn)
|
||||||
|
{
|
||||||
|
warnString +="<BR> - Read the code COMMENTS for tuning help.<BR>";
|
||||||
|
|
||||||
|
if (System.currentTimeMillis() - lastWarnFlashTimeMs > 1000)
|
||||||
|
{
|
||||||
|
lastWarnFlashTimeMs = System.currentTimeMillis();
|
||||||
|
warnFlash = !warnFlash;
|
||||||
|
}
|
||||||
|
|
||||||
|
telemetry.addLine(String.format("<b><font color='%s' >%s</font></b>",
|
||||||
|
warnFlash ? "red" : "white", warnString));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -69,6 +69,7 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode
|
|||||||
TelemetryMenu.MenuElement menuAbsParams = new TelemetryMenu.MenuElement("Abs. Encoder Pulse Width Params", false);
|
TelemetryMenu.MenuElement menuAbsParams = new TelemetryMenu.MenuElement("Abs. Encoder Pulse Width Params", false);
|
||||||
TelemetryMenu.IntegerOption[] optionsAbsParamsMax = new TelemetryMenu.IntegerOption[OctoQuad.NUM_ENCODERS];
|
TelemetryMenu.IntegerOption[] optionsAbsParamsMax = new TelemetryMenu.IntegerOption[OctoQuad.NUM_ENCODERS];
|
||||||
TelemetryMenu.IntegerOption[] optionsAbsParamsMin = new TelemetryMenu.IntegerOption[OctoQuad.NUM_ENCODERS];
|
TelemetryMenu.IntegerOption[] optionsAbsParamsMin = new TelemetryMenu.IntegerOption[OctoQuad.NUM_ENCODERS];
|
||||||
|
TelemetryMenu.BooleanOption[] optionsAbsParamsWrapTracking = new TelemetryMenu.BooleanOption[OctoQuad.NUM_ENCODERS];
|
||||||
|
|
||||||
TelemetryMenu.OptionElement optionProgramToFlash;
|
TelemetryMenu.OptionElement optionProgramToFlash;
|
||||||
TelemetryMenu.OptionElement optionSendToRAM;
|
TelemetryMenu.OptionElement optionSendToRAM;
|
||||||
@@ -161,9 +162,16 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode
|
|||||||
OctoQuad.MIN_PULSE_WIDTH_US,
|
OctoQuad.MIN_PULSE_WIDTH_US,
|
||||||
OctoQuad.MAX_PULSE_WIDTH_US,
|
OctoQuad.MAX_PULSE_WIDTH_US,
|
||||||
params.min_length_us);
|
params.min_length_us);
|
||||||
|
|
||||||
|
optionsAbsParamsWrapTracking[i] = new TelemetryMenu.BooleanOption(
|
||||||
|
String.format("Chan %d wrap tracking enabled", i),
|
||||||
|
octoquad.getSingleChannelPulseWidthTracksWrap(i),
|
||||||
|
"yes",
|
||||||
|
"no");
|
||||||
}
|
}
|
||||||
menuAbsParams.addChildren(optionsAbsParamsMin);
|
menuAbsParams.addChildren(optionsAbsParamsMin);
|
||||||
menuAbsParams.addChildren(optionsAbsParamsMax);
|
menuAbsParams.addChildren(optionsAbsParamsMax);
|
||||||
|
menuAbsParams.addChildren(optionsAbsParamsWrapTracking);
|
||||||
|
|
||||||
optionProgramToFlash = new TelemetryMenu.OptionElement()
|
optionProgramToFlash = new TelemetryMenu.OptionElement()
|
||||||
{
|
{
|
||||||
@@ -266,6 +274,7 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode
|
|||||||
params.min_length_us = optionsAbsParamsMin[i].getValue();
|
params.min_length_us = optionsAbsParamsMin[i].getValue();
|
||||||
|
|
||||||
octoquad.setSingleChannelPulseWidthParams(i, params);
|
octoquad.setSingleChannelPulseWidthParams(i, params);
|
||||||
|
octoquad.setSingleChannelPulseWidthTracksWrap(i, optionsAbsParamsWrapTracking[i].val);
|
||||||
}
|
}
|
||||||
|
|
||||||
octoquad.setI2cRecoveryMode((OctoQuad.I2cRecoveryMode) optionI2cResetMode.getValue());
|
octoquad.setI2cRecoveryMode((OctoQuad.I2cRecoveryMode) optionI2cResetMode.getValue());
|
||||||
|
|||||||
36
README.md
36
README.md
@@ -1,6 +1,6 @@
|
|||||||
## NOTICE
|
## NOTICE
|
||||||
|
|
||||||
This repository contains the public FTC SDK for the INTO THE DEEP (2024-2025) competition season.
|
This repository contains the public FTC SDK for the DECODE (2025-2026) competition season.
|
||||||
|
|
||||||
## Welcome!
|
## Welcome!
|
||||||
This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer.
|
This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer.
|
||||||
@@ -59,6 +59,40 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
|
|||||||
|
|
||||||
# Release Information
|
# Release Information
|
||||||
|
|
||||||
|
## Version 11.0 (20250827-105138)
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
* OnBotJava now has the concept of a project.
|
||||||
|
A project is a collection of related files. A project may be chosen by selecting 'Example Project'
|
||||||
|
from the 'File type:' dropdown. Doing so will populate the dropdown to the immediate right with
|
||||||
|
a list of projects to choose from.
|
||||||
|
When selecting a project all of the related files appear in the left pane of the workspace
|
||||||
|
underneath a directory with the chosen project name.
|
||||||
|
This is useful for example for ConceptExternalHardwareClass which has a dependency upon
|
||||||
|
RobotHardware. This feature simplifies the usage of this Concept example by automatically
|
||||||
|
pulling in dependent classes.
|
||||||
|
* Adds support for AndyMark ToF, IMU, and Color sensors.
|
||||||
|
* The Driver Station app indicates if WiFi is disabled on the device.
|
||||||
|
* Adds several features to the Color Processing software:
|
||||||
|
* DECODE colors `ARTIFACT_GREEN` and `ARTIFACT_PURPLE`
|
||||||
|
* Choice of the order of pre-processing steps Erode and Dilate
|
||||||
|
* Best-fit preview shape called `circleFit`, an alternate to the existing `boxFit`
|
||||||
|
* Sample OpMode `ConceptVisionColorLocator_Circle`, an alternate to the renamed `ConceptVisionColorLocator_Rectangle`
|
||||||
|
* The Driver Station app play button has a green background with a white play symbol if
|
||||||
|
* the driver station and robot controller are connected and have the same team number
|
||||||
|
* there is at least one gamepad attached
|
||||||
|
* the timer is enabled (for an Autonomous OpMode)
|
||||||
|
* Updated AprilTag Library for DECODE. Notably, getCurrentGameTagLibrary() now returns DECODE tags.
|
||||||
|
* Since the AprilTags on the Obelisk should not be used for localization, the ConceptAprilTagLocalization samples only use those tags without the name 'Obelisk' in them.
|
||||||
|
* OctoQuad I2C driver updated to support firmware v3.x
|
||||||
|
* Adds support for odometry localizer on MK2 hardware revision
|
||||||
|
* Adds ability to track position for an absolute encoder across multiple rotations
|
||||||
|
* Note that some driver APIs have changed; minor updates to user software may be required
|
||||||
|
* Requires firmware v3.x. For instructions on updating firmware, see
|
||||||
|
https://github.com/DigitalChickenLabs/OctoQuad/blob/master/documentation/OctoQuadDatasheet_Rev_3.0C.pdf
|
||||||
|
|
||||||
|
|
||||||
## Version 10.3 (20250625-090416)
|
## Version 10.3 (20250625-090416)
|
||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
|
|||||||
@@ -4,14 +4,14 @@ repositories {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'org.firstinspires.ftc:Inspection:10.3.0'
|
implementation 'org.firstinspires.ftc:Inspection:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:Blocks:10.3.0'
|
implementation 'org.firstinspires.ftc:Blocks:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:RobotCore:10.3.0'
|
implementation 'org.firstinspires.ftc:RobotCore:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:RobotServer:10.3.0'
|
implementation 'org.firstinspires.ftc:RobotServer:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:OnBotJava:10.3.0'
|
implementation 'org.firstinspires.ftc:OnBotJava:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:Hardware:10.3.0'
|
implementation 'org.firstinspires.ftc:Hardware:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:FtcCommon:10.3.0'
|
implementation 'org.firstinspires.ftc:FtcCommon:11.0.0'
|
||||||
implementation 'org.firstinspires.ftc:Vision:10.3.0'
|
implementation 'org.firstinspires.ftc:Vision:11.0.0'
|
||||||
implementation 'androidx.appcompat:appcompat:1.2.0'
|
implementation 'androidx.appcompat:appcompat:1.2.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user