Skip to content

Commit

Permalink
FtcRobotController v9.0.1
Browse files Browse the repository at this point in the history
  • Loading branch information
CalKestis committed Sep 29, 2023
1 parent f3a5a54 commit c023e97
Show file tree
Hide file tree
Showing 8 changed files with 154 additions and 36 deletions.
4 changes: 2 additions & 2 deletions FtcRobotController/src/main/AndroidManifest.xml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="51"
android:versionName="9.0">
android:versionCode="52"
android:versionName="9.0.1">

<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,23 @@
* This OpMode illustrates the basics of AprilTag recognition and pose estimation,
* including Java Builder structures for specifying Vision parameters.
*
* For an introduction to AprilTags, see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
* "TagLibrary" will have their position and orientation information displayed. This default TagLibrary contains
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* To experiment with using AprilTags to navigate, try out these two driving samples:
* RobotAutoDriveToAprilTagOmni and RobotAutoDriveToAprilTagTank
*
* There are many "default" VisionPortal and AprilTag configuration parameters that may be overridden if desired.
* These default parameters are shown as comments in the code below.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
Expand Down Expand Up @@ -106,6 +123,8 @@ private void initAprilTag() {

// Create the AprilTag processor.
aprilTag = new AprilTagProcessor.Builder()

// The following default settings are available to un-comment and edit as needed.
//.setDrawAxes(false)
//.setDrawCubeProjection(false)
//.setDrawTagOutline(true)
Expand All @@ -117,11 +136,19 @@ private void initAprilTag() {
// If you do not manually specify calibration parameters, the SDK will attempt
// to load a predefined calibration for your camera.
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)

// ... these parameters are fx, fy, cx, cy.

.build();

// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second (default)
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second (default)
// Note: Decimation can be changed on-the-fly to adapt during a match.
//aprilTag.setDecimation(3);

// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();

Expand All @@ -136,7 +163,7 @@ private void initAprilTag() {
//builder.setCameraResolution(new Size(640, 480));

// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
//builder.enableLiveView(true);

// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,20 @@
* This OpMode illustrates the basics of AprilTag recognition and pose estimation, using
* the easy way.
*
* For an introduction to AprilTags, see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
* "TagLibrary" will have their position and orientation information displayed. This default TagLibrary contains
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* To experiment with using AprilTags to navigate, try out these two driving samples:
* RobotAutoDriveToAprilTagOmni and RobotAutoDriveToAprilTagTank
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,17 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {

private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera

// TFOD_MODEL_ASSET points to a model file stored in the project Asset location,
// this is only used for Android Studio when using models in Assets.
private static final String TFOD_MODEL_ASSET = "MyModelStoredAsAsset.tflite";
// TFOD_MODEL_FILE points to a model file stored onboard the Robot Controller's storage,
// this is used when uploading models directly to the RC using the model upload interface.
private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/myCustomModel.tflite";
// Define the labels recognized in the model for TFOD (must be in training order!)
private static final String[] LABELS = {
"Pixel",
};

/**
* The variable to store our instance of the TensorFlow Object Detection processor.
*/
Expand Down Expand Up @@ -107,11 +118,16 @@ private void initTfod() {
// Create the TensorFlow processor by using a builder.
tfod = new TfodProcessor.Builder()

// Use setModelAssetName() if the TF Model is built in as an asset.
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
// With the following lines commented out, the default TfodProcessor Builder
// will load the default model for the season. To define a custom model to load,
// choose one of the following:
// Use setModelAssetName() if the custom TF Model is built in as an asset (AS only).
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
//.setModelAssetName(TFOD_MODEL_ASSET)
//.setModelFileName(TFOD_MODEL_FILE)

// The following default settings are available to un-comment and edit as needed to
// set parameters for custom models.
//.setModelLabels(LABELS)
//.setIsModelTensorFlow2(true)
//.setIsModelQuantized(true)
Expand All @@ -134,7 +150,7 @@ private void initTfod() {
//builder.setCameraResolution(new Size(640, 480));

// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
//builder.enableLiveView(true);

// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,13 @@
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
* The code assumes a Holonomic (Mecanum or X Drive) Robot.
*
* For an introduction to AprilTags, see the ftc-docs link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* The drive goal is to rotate to keep the Tag centered in the camera, while strafing to be directly in front of the tag, and
* driving towards the tag to achieve the desired distance.
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
Expand Down Expand Up @@ -102,7 +109,7 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
private DcMotor rightBackDrive = null; // Used to control the right back drive wheel

private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag.
private VisionPortal visionPortal; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
Expand Down Expand Up @@ -150,25 +157,33 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
// Step through the list of detected tags and look for a matching tag
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
for (AprilTagDetection detection : currentDetections) {
if ((detection.metadata != null) &&
((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) ){
targetFound = true;
desiredTag = detection;
break; // don't look any further.
// Look to see if we have size info on this tag.
if (detection.metadata != null) {
// Check to see if we want to track towards this tag.
if ((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) {
// Yes, we want to use this tag.
targetFound = true;
desiredTag = detection;
break; // don't look any further.
} else {
// This tag is in the library, but we do not want to track it right now.
telemetry.addData("Skipping", "Tag ID %d is not desired", detection.id);
}
} else {
telemetry.addData("Unknown Target", "Tag ID %d is not in TagLibrary\n", detection.id);
// This tag is NOT in the library, so we don't have enough information to track to it.
telemetry.addData("Unknown", "Tag ID %d is not in TagLibrary", detection.id);
}
}

// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("\n>","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Found", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
telemetry.addData("Yaw","%3.0f degrees", desiredTag.ftcPose.yaw);
} else {
telemetry.addData(">","Drive using joysticks to find valid target\n");
telemetry.addData("\n>","Drive using joysticks to find valid target\n");
}

// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
Expand Down Expand Up @@ -243,6 +258,15 @@ private void initAprilTag() {
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();

// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second
// Note: Decimation can be changed on-the-fly to adapt during a match.
aprilTag.setDecimation(2);

// Create the vision portal by using a builder.
if (USE_WEBCAM) {
visionPortal = new VisionPortal.Builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,13 @@
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
* The code assumes a basic two-wheel (Tank) Robot Drivetrain
*
* For an introduction to AprilTags, see the ftc-docs link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* The driving goal is to rotate to keep the tag centered in the camera, while driving towards the tag to achieve the desired distance.
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
* You can determine the best exposure and gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
Expand Down Expand Up @@ -97,7 +104,7 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
private DcMotor rightDrive = null; // Used to control the right drive wheel

private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag.
private VisionPortal visionPortal; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
Expand Down Expand Up @@ -140,24 +147,32 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
// Step through the list of detected tags and look for a matching tag
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
for (AprilTagDetection detection : currentDetections) {
if ((detection.metadata != null) &&
((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) ){
targetFound = true;
desiredTag = detection;
break; // don't look any further.
// Look to see if we have size info on this tag.
if (detection.metadata != null) {
// Check to see if we want to track towards this tag.
if ((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) {
// Yes, we want to use this tag.
targetFound = true;
desiredTag = detection;
break; // don't look any further.
} else {
// This tag is in the library, but we do not want to track it right now.
telemetry.addData("Skipping", "Tag ID %d is not desired", detection.id);
}
} else {
telemetry.addData("Unknown Target", "Tag ID %d is not in TagLibrary\n", detection.id);
// This tag is NOT in the library, so we don't have enough information to track to it.
telemetry.addData("Unknown", "Tag ID %d is not in TagLibrary", detection.id);
}
}

// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("\n>","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Found", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
} else {
telemetry.addData(">","Drive using joysticks to find valid target\n");
telemetry.addData("\n>","Drive using joysticks to find valid target\n");
}

// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
Expand Down Expand Up @@ -218,6 +233,15 @@ private void initAprilTag() {
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();

// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second
// Note: Decimation can be changed on-the-fly to adapt during a match.
aprilTag.setDecimation(2);

// Create the vision portal by using a builder.
if (USE_WEBCAM) {
visionPortal = new VisionPortal.Builder()
Expand Down
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,19 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc

# Release Information

## Version 9.0.1 (20230929-083754)

### Enhancements
* Updates AprilTag samples to include Decimation and additional Comments. Also corrects misleading tag ID warnings
* Increases maximum size of Blocks inline comments to 140 characters
* Adds Blocks sample BasicOmniOpMode.
* Updated CENTERSTAGE library AprilTag orientation quaternions
* Thanks [@FromenActual](https://github.com/FromenActual)
* Updated Java Sample ConceptTensorFlowObjectDetection.java to include missing elements needed for custom model support.

### Bug Fixes
* Fixes a problem where after October 1 the Driver Station will report as obsolete on v9.0 and prompt the user to update.

## Version 9.0 (20230830-154348)

### Breaking Changes
Expand Down
Loading

0 comments on commit c023e97

Please sign in to comment.