Skip to content

Commit

Permalink
Working spike detection
Browse files Browse the repository at this point in the history
  • Loading branch information
alanwong9664 committed Jan 21, 2024
1 parent 9632b7e commit 9b493d8
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ public void init(HardwareMap hwMap) {
visionPortal = new VisionPortal.Builder().setCamera(hwMap.get(WebcamName.class, cameraName))
.setCameraResolution(new Size(640, 480))
.setLiveViewContainerId(viewPortID)
.addProcessor(aprilTag)
//.addProcessor(aprilTag)
.addProcessor(teamPropDetector)
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,24 @@
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;

import org.firstinspires.ftc.teamcode.ftc16072.OpModes.QQOpMode;
import org.firstinspires.ftc.teamcode.ftc16072.OpModes.VisionBase;
import org.firstinspires.ftc.teamcode.ftc16072.Robot;
import org.firstinspires.ftc.teamcode.ftc16072.Util.TeamPropLocation;

@Autonomous
public class PlacingOnTapeAuto extends OpMode {
public class PlacingOnTapeAuto extends VisionBase {
Robot robot = new Robot();
Trajectory trajectory;
private enum State {BEGIN, FORWARD,TURN,FORWARD_TO_TAPE, DONE}
State state = State.BEGIN;
@Override
public void init() {
robot.makeDriveOnly();
robot.init(hardwareMap);
super.init(); // for vision
}
public void init_loop(){

}

@Override
Expand All @@ -26,7 +31,7 @@ public void loop() {
Pose2d currentPose = robot.nav.getPoseEstimate();
TeamPropLocation teamPropLocation = robot.cameraBack.getTeamPropPosition();

telemetry.addData("STATE", state);
telemetry.addData("team prop", teamPropLocation);
telemetry.addData("POSE", "x = %.2f y = %.2f h = %.1f", currentPose.getX(), currentPose.getY(), Math.toDegrees(currentPose.getHeading()));
switch (state) {
case BEGIN:
Expand All @@ -51,38 +56,56 @@ public void loop() {
robot.nav.follower.followTrajectory(trajectory);
break;
case FORWARD:
state = State.TURN;
if (teamPropLocation == TeamPropLocation.LEFT_SPIKE){
if (robot.nav.isDoneFollowing(currentPose)) {
robot.nav.turnAsync(-0.25*Math.PI);
}
} else if (teamPropLocation == TeamPropLocation.RIGHT_SPIKE) {
if (robot.nav.isDoneFollowing(currentPose)) {
robot.nav.turnAsync(0.25*Math.PI);

if (robot.nav.isDoneFollowing(currentPose)) {
if (teamPropLocation == TeamPropLocation.LEFT_SPIKE){
if (robot.nav.isDoneFollowing(currentPose)) {
robot.nav.turnAsync(-0.25*Math.PI);
}
} else if (teamPropLocation == TeamPropLocation.RIGHT_SPIKE) {
if (robot.nav.isDoneFollowing(currentPose)) {
robot.nav.turnAsync(0.25*Math.PI);
}

}
state = State.TURN;

}



break;


case TURN:
state = State.FORWARD_TO_TAPE;
if (teamPropLocation == TeamPropLocation.LEFT_SPIKE){
trajectory = robot.nav.trajectoryBuilder(currentPose, false)
.back(12)
.build();
if (robot.nav.isDoneFollowing(currentPose)) {


} else if (teamPropLocation == TeamPropLocation.RIGHT_SPIKE){
trajectory = robot.nav.trajectoryBuilder(currentPose, false)
.back(12)
.build();
if (teamPropLocation == TeamPropLocation.LEFT_SPIKE) {
trajectory = robot.nav.trajectoryBuilder(currentPose, false)
.back(7)
.build();


} else if (teamPropLocation == TeamPropLocation.RIGHT_SPIKE) {
trajectory = robot.nav.trajectoryBuilder(currentPose, false)
.back(7)
.build();
}
}
break;
case FORWARD_TO_TAPE:
state = State.DONE;
if (robot.nav.isDoneFollowing(currentPose)) {
state = State.DONE;
}



break;
case DONE:
break;

}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.acmerobotics.dashboard.config.Config;

import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
import org.firstinspires.ftc.teamcode.ftc16072.Mechanisms.Camera;
import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.core.Core;
import org.opencv.core.Mat;
Expand All @@ -16,10 +17,12 @@
@Config
public class TeamPropDetector implements VisionProcessor {
//makes "detection zones" for each tape zone
TeamPropLocation location = TeamPropLocation.NOT_DETECTED;
public Rect leftTapeDetectionZone = new Rect(100,100,75,75);
public Rect middleTapeDetectionZone = new Rect(400,130,75,75);
public Rect rightTapeDetectionZone = new Rect(800,100,75,75);
TeamPropLocation location = TeamPropLocation.LEFT_SPIKE;
public Rect leftTapeDetectionZone = new Rect(0,155,120,75);
public Rect middleTapeDetectionZone = new Rect(250,165,120,75);
public Rect rightTapeDetectionZone = new Rect(500,155,120,75);



//submats are smaller portions of the frame that you can get values from
Mat submat = new Mat();
Expand All @@ -28,6 +31,7 @@ public class TeamPropDetector implements VisionProcessor {
@Override
public void init(int width, int height, CameraCalibration calibration) {


}


Expand Down

0 comments on commit 9b493d8

Please sign in to comment.