Skip to content

Commit 2390680

Browse files
authored
Merge pull request #344 from FIRST-Tech-Challenge/20220907-131644-release-candidate
FtcRobotController v8.0
2 parents aba72e5 + e0282fc commit 2390680

24 files changed

+822
-426
lines changed

FtcRobotController/src/main/AndroidManifest.xml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
11
<?xml version="1.0" encoding="utf-8"?>
22
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
33
xmlns:tools="http://schemas.android.com/tools"
4-
android:versionCode="45"
5-
android:versionName="7.2">
4+
android:versionCode="47"
5+
android:versionName="8.0">
66

77
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
88

99
<application
1010
android:allowBackup="true"
1111
android:largeHeap="true"
12-
android:extractNativeLibs="true"
1312
android:icon="@drawable/ic_launcher"
1413
android:label="@string/app_name"
1514
android:theme="@style/AppThemeRedRC"

FtcRobotController/src/main/assets/qcar_config.xsd

Whitespace-only changes.
Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
/* Copyright (c) 2022 FIRST. All rights reserved.
2+
*
3+
* Redistribution and use in source and binary forms, with or without modification,
4+
* are permitted (subject to the limitations in the disclaimer below) provided that
5+
* the following conditions are met:
6+
*
7+
* Redistributions of source code must retain the above copyright notice, this list
8+
* of conditions and the following disclaimer.
9+
*
10+
* Redistributions in binary form must reproduce the above copyright notice, this
11+
* list of conditions and the following disclaimer in the documentation and/or
12+
* other materials provided with the distribution.
13+
*
14+
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
15+
* promote products derived from this software without specific prior written permission.
16+
*
17+
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
18+
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19+
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
20+
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21+
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
22+
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23+
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24+
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25+
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26+
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27+
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28+
*/
29+
30+
package org.firstinspires.ftc.robotcontroller.external.samples;
31+
32+
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
33+
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
34+
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
35+
import com.qualcomm.robotcore.util.Range;
36+
37+
/**
38+
* This OpMode Sample illustrates how to use an external "hardware" class to modularize all the robot's sensors and actuators.
39+
* This approach is very efficient because the same hardware class can be used by all of your teleop and autonomous OpModes
40+
* without requiring many copy & paste operations. Once you have defined and tested the hardware class with one OpMode,
41+
* it is instantly available to other OpModes.
42+
*
43+
* The real benefit of this approach is that as you tweak your robot hardware, you only need to make changes in ONE place (the Hardware Class).
44+
* So, to be effective you should put as much or your hardware setup and access code as possible in the hardware class.
45+
* Essentially anything you do with hardware in BOTH Teleop and Auto should likely go in the hardware class.
46+
*
47+
* The Hardware Class is created in a separate file, and then an "instance" of this class is created in each OpMode.
48+
* In order for the class to do typical OpMode things (like send telemetry data) it must be passed a reference to the
49+
* OpMode object when it's created, so it can access all core OpMode functions. This is illustrated below.
50+
*
51+
* In this concept sample, the hardware class file is called RobotHardware.java and it must accompany this sample OpMode.
52+
* So, if you copy ConceptExternalHardwareClass.java into TeamCode (using Android Studio or OnBotJava) then RobotHardware.java
53+
* must also be copied to the same location (maintaining its name).
54+
*
55+
* For comparison purposes, this sample and its accompanying hardware class duplicates the functionality of the
56+
* RobotTelopPOV_Linear opmode. It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand)
57+
*
58+
* View the RobotHardware.java class file for more details
59+
*
60+
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
61+
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
62+
*
63+
* In OnBot Java, add a new OpMode, drawing from this Sample; select TeleOp.
64+
* Also add another new file named RobotHardware.java, drawing from the Sample with that name; select Not an OpMode.
65+
*/
66+
67+
@TeleOp(name="Concept: Robot Hardware Class", group="Robot")
68+
@Disabled
69+
public class ConceptExternalHardwareClass extends LinearOpMode {
70+
71+
// Create a RobotHardware object to be used to access robot hardware.
72+
// Prefix any hardware functions with "robot." to access this class.
73+
RobotHardware robot = new RobotHardware(this);
74+
75+
@Override
76+
public void runOpMode() {
77+
double drive = 0;
78+
double turn = 0;
79+
double arm = 0;
80+
double handOffset = 0;
81+
82+
// initialize all the hardware, using the hardware class. See how clean and simple this is?
83+
robot.init();
84+
85+
// Send telemetry message to signify robot waiting;
86+
// Wait for the game to start (driver presses PLAY)
87+
waitForStart();
88+
89+
// run until the end of the match (driver presses STOP)
90+
while (opModeIsActive()) {
91+
92+
// Run wheels in POV mode (note: The joystick goes negative when pushed forward, so negate it)
93+
// In this mode the Left stick moves the robot fwd and back, the Right stick turns left and right.
94+
// This way it's also easy to just drive straight, or just turn.
95+
drive = -gamepad1.left_stick_y;
96+
turn = gamepad1.right_stick_x;
97+
98+
// Combine drive and turn for blended motion. Use RobotHardware class
99+
robot.driveRobot(drive, turn);
100+
101+
// Use gamepad left & right Bumpers to open and close the claw
102+
// Use the SERVO constants defined in RobotHardware class.
103+
// Each time around the loop, the servos will move by a small amount.
104+
// Limit the total offset to half of the full travel range
105+
if (gamepad1.right_bumper)
106+
handOffset += robot.HAND_SPEED;
107+
else if (gamepad1.left_bumper)
108+
handOffset -= robot.HAND_SPEED;
109+
handOffset = Range.clip(handOffset, -0.5, 0.5);
110+
111+
// Move both servos to new position. Use RobotHardware class
112+
robot.setHandPositions(handOffset);
113+
114+
// Use gamepad buttons to move arm up (Y) and down (A)
115+
// Use the MOTOR constants defined in RobotHardware class.
116+
if (gamepad1.y)
117+
arm = robot.ARM_UP_POWER;
118+
else if (gamepad1.a)
119+
arm = robot.ARM_DOWN_POWER;
120+
else
121+
arm = 0;
122+
123+
robot.setArmPower(arm);
124+
125+
// Send telemetry messages to explain controls and show robot status
126+
telemetry.addData("Drive", "Left Stick");
127+
telemetry.addData("Turn", "Right Stick");
128+
telemetry.addData("Arm Up/Down", "Y & A Buttons");
129+
telemetry.addData("Hand Open/Closed", "Left and Right Bumpers");
130+
telemetry.addData("-", "-------");
131+
132+
telemetry.addData("Drive Power", "%.2f", drive);
133+
telemetry.addData("Turn Power", "%.2f", turn);
134+
telemetry.addData("Arm Power", "%.2f", arm);
135+
telemetry.addData("Hand Position", "Offset = %.2f", handOffset);
136+
telemetry.update();
137+
138+
// Pace this loop so hands move at a reasonable speed.
139+
sleep(50);
140+
}
141+
}
142+
}

FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptMotorBulkRead.java

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,21 +46,28 @@
4646
4747
Three scenarios are tested:
4848
Cache Mode = OFF This is the normal default, where no cache is used, and every read produces a discrete transaction with
49-
an expansion hub, which is the slowest approach.
49+
an expansion hub, which is the slowest approach, but guarentees that the value is as fresh (recent) as possible..
50+
5051
Cache Mode = AUTO This mode will attempt to minimize the number of discrete read commands, by performing bulk-reads
51-
and then returning values that have been cached. The cache is updated automatically whenever a specific read operation is repeated.
52-
This mode will always return fresh data, but it may perform more bulk-reads than absolutely required.
53-
Extra reads will be performed if multiple identical encoder/velocity reads are performed in one control cycle.
52+
and then returning values that have been cached. The cache is updated automatically whenever any specific encoder is re-read.
53+
This mode will always return new data, but it may perform more bulk-reads than absolutely required.
54+
Extra reads will be performed if multiple encoder/velocity reads are performed on the same encoder in one control cycle.
5455
This mode is a good compromise between the OFF and MANUAL modes.
55-
Cache Mode = MANUAL This mode enables the user's code to determine the best time to refresh the cached bulk-read data.
56-
Well organized code can place all the sensor reads in one location, and then just reset the cache once per control cycle.
57-
The approach will produce the shortest cycle times, but it does require the user to manually clear the cache.
56+
Note: If there are significant user-program delays between encoder reads, the cached value may not be fresh (recent).
57+
You can issue a clearBulkCache() call at any time force a fresh bulk-read on the next encoder read.
58+
59+
Cache Mode = MANUAL This mode requires the user's code to determine the best time to clear the cached bulk-read data.
60+
Well organized code will reset the cache once at the beginning of the control cycle, and then immediately read and store all the encoder values.
61+
This approach will produce the shortest cycle times, but it does require the user to manually clear the cache.
62+
Since NO automatic Bulk-Reads are performed, neglecting to clear the bulk cache will result in the same values being returned
63+
each time an encoder read is performed.
5864
5965
-------------------------------------
6066
6167
General tip to speed up your control cycles:
68+
6269
No matter what method you use to read encoders and other inputs, you should try to
63-
avoid reading the same input multiple times around a control loop.
70+
avoid reading the same encoder input multiple times around a control loop.
6471
Under normal conditions, this will slow down the control loop.
6572
The preferred method is to read all the required inputs ONCE at the beginning of the loop,
6673
and save the values in variable that can be used by other parts of the control code.

FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java

Lines changed: 40 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,8 @@
4040
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
4141

4242
/**
43-
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
44-
* determine the position of the Freight Frenzy game elements.
43+
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
44+
* determine which image is being presented to the robot.
4545
*
4646
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
4747
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
@@ -52,23 +52,21 @@
5252
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
5353
@Disabled
5454
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
55-
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
56-
* the following 4 detectable objects
57-
* 0: Ball,
58-
* 1: Cube,
59-
* 2: Duck,
60-
* 3: Marker (duck location tape marker)
61-
*
62-
* Two additional model assets are available which only contain a subset of the objects:
63-
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
64-
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
65-
*/
66-
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
55+
56+
/*
57+
* Specify the source for the Tensor Flow Model.
58+
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
59+
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
60+
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
61+
* Here we assume it's an Asset. Also see method initTfod() below .
62+
*/
63+
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
64+
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
65+
6766
private static final String[] LABELS = {
68-
"Ball",
69-
"Cube",
70-
"Duck",
71-
"Marker"
67+
"1 Bolt",
68+
"2 Bulb",
69+
"3 Panel"
7270
};
7371

7472
/*
@@ -114,11 +112,11 @@ public void runOpMode() {
114112

115113
// The TensorFlow software will scale the input images from the camera to a lower resolution.
116114
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
117-
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
115+
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
118116
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
119117
// should be set to the value of the images used to create the TensorFlow Object Detection model
120118
// (typically 16/9).
121-
tfod.setZoom(2.5, 16.0/9.0);
119+
tfod.setZoom(1.0, 16.0/9.0);
122120
}
123121

124122
/** Wait for the game to begin */
@@ -133,19 +131,22 @@ public void runOpMode() {
133131
// the last time that call was made.
134132
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
135133
if (updatedRecognitions != null) {
136-
telemetry.addData("# Object Detected", updatedRecognitions.size());
137-
138-
// step through the list of recognitions and display boundary info.
139-
int i = 0;
140-
for (Recognition recognition : updatedRecognitions) {
141-
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
142-
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
143-
recognition.getLeft(), recognition.getTop());
144-
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
145-
recognition.getRight(), recognition.getBottom());
146-
i++;
147-
}
148-
telemetry.update();
134+
telemetry.addData("# Objects Detected", updatedRecognitions.size());
135+
136+
// step through the list of recognitions and display image position/size information for each one
137+
// Note: "Image number" refers to the randomized image orientation/number
138+
for (Recognition recognition : updatedRecognitions) {
139+
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
140+
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
141+
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
142+
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
143+
144+
telemetry.addData(""," ");
145+
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
146+
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
147+
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
148+
}
149+
telemetry.update();
149150
}
150151
}
151152
}
@@ -166,8 +167,6 @@ private void initVuforia() {
166167

167168
// Instantiate the Vuforia engine
168169
vuforia = ClassFactory.getInstance().createVuforia(parameters);
169-
170-
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
171170
}
172171

173172
/**
@@ -177,10 +176,14 @@ private void initTfod() {
177176
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
178177
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
179178
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
180-
tfodParameters.minResultConfidence = 0.8f;
179+
tfodParameters.minResultConfidence = 0.75f;
181180
tfodParameters.isModelTensorFlow2 = true;
182-
tfodParameters.inputSize = 320;
181+
tfodParameters.inputSize = 300;
183182
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
183+
184+
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
185+
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
184186
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
187+
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
185188
}
186189
}

0 commit comments

Comments
 (0)