/** * Creates the object and starts the camera server * * @param usbPort * The USB camera port number. '0' for default. * @param camera * the brand / model of the camera */ public VisionProcessor (int usbPort, CameraModel camera) { // Adds the camera to the cscore CameraServer, in order to grab the // stream. this.camera = CameraServer.getInstance() .startAutomaticCapture("Vision Camera", usbPort); // Based on the selected camera type, set the field of views and focal // length. this.cameraModel = camera; switch (this.cameraModel) { // case LIFECAM: //Not enough information to properly find this data. // see above. // this.horizontalFieldOfView = // this.verticalFieldOfView = // this.focalLength = // break; default: // Data will default to one to avoid any "divide by zero" // errors. this.horizontalFieldOfView = 1; this.verticalFieldOfView = 1; } }
public void initDefaultCommand() { visionThread = new Thread(() -> { // Get the UsbCamera from CameraServer UsbCamera camera = CameraServer.getInstance().startAutomaticCapture(); // Set the resolution camera.setResolution(640, 480); // Get a CvSink. This will capture Mats from the camera CvSink cvSink = CameraServer.getInstance().getVideo(); // Setup a CvSource. This will send images back to the Dashboard CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480); // Mats are very memory expensive. Lets reuse this Mat. Mat mat = new Mat(); // This cannot be 'true'. The program will never exit if it is. This // lets the robot stop this thread when restarting robot code or // deploying. while (!Thread.interrupted()) { // Tell the CvSink to grab a frame from the camera and put it // in the source mat. If there is an error notify the output. if (cvSink.grabFrame(mat) == 0) { // Send the output the error. outputStream.notifyError(cvSink.getError()); // skip the rest of the current iteration continue; } // Put a rectangle on the image Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5); // Give the output stream a new image to display outputStream.putFrame(mat); } }); visionThread.setDaemon(true); visionThread.start(); }
public void robotInit() { RobotMap.init(); drivetrain = new Drivetrain(); climber = new Climber(); // fuel = new Fuel(); gear = new Gear(); oi = new OI(); // initializes camera server server = CameraServer.getInstance(); // cameraInit(); // server.startAutomaticCapture(0); // autonomousCommand = (Command) new AutoMiddleHook(); autonomousCommand = (Command) new AutoBaseline(); // resets all sensors resetAllSensors(); if (RobotConstants.isTestingEnvironment) updateTestingEnvironment(); updateSensorDisplay(); }
public void enable() { new Thread(() -> { cvSink = CameraServer.getInstance().getVideo(cam2); while (!Thread.interrupted()) { SmartDashboard.putString("Camera", cvSink.getSource().getName()); cvSink.grabFrame(source); outputStream.putFrame(source); if (controls.getToggleCamera() && !buttonHeld && !isToggled) { isToggled = true; cvSink = CameraServer.getInstance().getVideo(cam2); System.out.println("toggled"); } else if (controls.getToggleCamera() && !buttonHeld && isToggled) { isToggled = false; cvSink = CameraServer.getInstance().getVideo(cam1); System.out.println("toggled"); } buttonHeld = controls.getToggleCamera(); } }).start(); }
/** * Initializes the cameras and starts the CameraServer to stream to GRIP / * SmartDashboard */ public void init() { /* gearCam.setFPS(fps); gearCam.setResolution(xResolution, yResolution); // Use zero exposure for bright vision targets and back light // gearCam.setExposureManual(0); shooterCam.setFPS(fps); shooterCam.setResolution(xResolution, yResolution); // Use zero exposure for bright vision targets and back light // shooterCam.setExposureManual(0); CameraServer.getInstance().addCamera(gearCam); CameraServer.getInstance().addCamera(shooterCam); CameraServer.getInstance().startAutomaticCapture(gearCam); */ CameraServer.getInstance().startAutomaticCapture(0); }
public SpatialAwarenessSubsystem() { super("SpatialAwarenessSubsystem"); cameraServer = CameraServer.getInstance(); gearCamera = cameraServer.startAutomaticCapture(0); gearCamera.setResolution(IMG_WIDTH, IMG_HEIGHT); gearCamera.setFPS(30); gearCamera.setBrightness(7); gearCamera.setExposureManual(30); gearVideo = cameraServer.getVideo(gearCamera); visionProcessing = new VisionProcessing(); leftUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_LEFT); rightUltrasonic = new AnalogInput(RobotMap.ANALOG_ULTRASONIC_RIGHT); leftUltrasonicKalman = new SimpleKalman(1.4d, 64d, leftUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS); rightUltrasonicKalman = new SimpleKalman(1.4d, 64d, rightUltrasonic.getAverageVoltage() * ULTRASONIC_VOLTS_TO_METERS); navX = new AHRS(SPI.Port.kMXP); System.out.println("SpatialAwarenessSubsystem constructor finished"); }
public void operatorControl() { NIVision.IMAQdxStartAcquisition(session); /** * grab an image, draw the circle, and provide it for the camera server * which will in turn send it to the dashboard. */ NIVision.Rect rect = new NIVision.Rect(10, 10, 100, 100); while (isOperatorControl() && isEnabled()) { NIVision.IMAQdxGrab(session, frame, 1); NIVision.imaqDrawShapeOnImage(frame, frame, rect, DrawMode.DRAW_VALUE, ShapeMode.SHAPE_OVAL, 0.0f); CameraServer.getInstance().setImage(frame); /** robot code here! **/ Timer.delay(0.005); // wait for a motor update time } NIVision.IMAQdxStopAcquisition(session); }
@Override public void robotInit() { RF = new RobotFunctions(); chooser.addDefault("Default Auto", defaultAuto); chooser.addObject("My Auto", customAuto); SmartDashboard.putData("Auto modes", chooser); UsbCamera camera = CameraServer.getInstance().startAutomaticCapture(); camera.setResolution(IMG_WIDTH, IMG_HEIGHT); visionThread = new VisionThread(camera, new Grip(), pipeline -> { if (pipeline.equals(null)) { Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0)); synchronized (imgLock) { centerX = r.x + (r.width / 2);//Find the centre of the X Value centerY = r.y + (r.height / 2); rw = r.width; rh = r.height; } } }); visionThread.start(); }
public void display() { NIVision.IMAQdxStartAcquisition(session); /** * grab an image, draw the circle, and provide it for the camera server * which will in turn send it to the dashboard. */ NIVision.Rect rect = new NIVision.Rect(10, 10, 100, 100); //while (teleop && enabled) { while(enabled) { NIVision.IMAQdxGrab(session, frame, 1); NIVision.imaqDrawShapeOnImage(frame, frame, rect, DrawMode.DRAW_VALUE, ShapeMode.SHAPE_OVAL, 0.0f); CameraServer.getInstance().setImage(frame); /** robot code here! **/ Timer.delay(0.005); // wait for a motor update time } NIVision.IMAQdxStopAcquisition(session); }
@Override public void run() { if (sessionStarted) { NIVision.IMAQdxStartAcquisition(session); running = true; while (running) { tick++; SmartDashboard.putNumber("CameraThread Tick", tick); NIVision.IMAQdxGrab(session, frame, 1); Image filteredFrame = null; NIVision.imaqColorThreshold(filteredFrame, frame, 0, ColorMode.HSL, new Range(0, 255), new Range(0, 255), new Range(128, 255)); CameraServer.getInstance().setImage(filteredFrame); Timer.delay(0.01); } NIVision.IMAQdxStopAcquisition(session); } running = false; }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ @Override public void robotInit() { chooser.addDefault("gear", new HangGear()); chooser.addObject("baseline", new BaseLine()); SmartDashboard.putData("Auto", chooser); oi = new OI(); UsbCamera cam = CameraServer.getInstance().startAutomaticCapture(0); cam.setResolution(640, 480); cam.setFPS(60); UsbCamera cam1 = CameraServer.getInstance().startAutomaticCapture(1); cam1.setResolution(640, 480); cam1.setFPS(60); SmartDashboard.putString("Robot State:", "started"); System.out.println("Robot init"); chassis.startMonitor(); intaker.startMonitor(); shooter.setSleepTime(100); shooter.startMonitor(); stirrer.setSleepTime(300); stirrer.startMonitor(); uSensor.setSleepTime(100); uSensor.startMonitor(); }
public void setUpCamera(){ CameraServer.getInstance().startAutomaticCapture(); camera = CameraServer.getInstance().startAutomaticCapture(); //camera.setResolution(RobotMap.IMG_WIDTH, RobotMap.IMG_HEIGHT); //camera.setExposureManual(RobotMap.exposure); visionThread = new VisionThread(camera, new GripPipeline(), pipeline -> { if (!pipeline.filterContoursOutput().isEmpty()) { Rect r = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0)); synchronized (imgLock) { centerX = r.x + (r.width / 2); } } }); }
/** * Creates the object and starts the camera servera * * @param ip * the IP of the the axis camera (usually 10.3.39.11) * @param camera * the brand / model of the camera */ public VisionProcessor (String ip, CameraModel camera) { // Adds the camera to the cscore CameraServer, in order to grab the // stream. this.camera = CameraServer.getInstance() .addAxisCamera("Vision Camera", ip); // Based on the selected camera type, set the field of views and focal // length. this.cameraModel = camera; switch (this.cameraModel) { case AXIS_M1011: this.horizontalFieldOfView = M1011_HORIZ_FOV; this.verticalFieldOfView = M1011_VERT_FOV; break; case AXIS_M1013: this.horizontalFieldOfView = M1013_HORIZ_FOV; this.verticalFieldOfView = M1013_VERT_FOV; break; default: // Data will default to one to avoid any "divide by zero" // errors. this.horizontalFieldOfView = 1; this.verticalFieldOfView = 1; } }
/** * The method that processes the image and inputs it into the particle reports */ public void processImage () { // Gets the error code while getting the new image from the camera. // If the error code is not 0, then there is no error. long errorCode = CameraServer.getInstance() .getVideo("Vision Camera").grabFrame(image); if (image.empty()) { System.out.println("Image is Empty! Unable to process image!"); return; } if (errorCode == 0) { System.out.println( "There was an error grabbing the image. See below:"); System.out.println( CameraServer.getInstance().getVideo().getError()); } // The process image function found in the AutoGenVision class. super.process(image); // If this throws an error, make sure the GRIP project ends with a // filterContours function. this.createParticleReports(super.filterContoursOutput()); // Sort the particles from largest to smallest Arrays.sort(particleReports); // for (int i = 0; i < particleReports.length; i++) // { // System.out.println(i + " " + particleReports[i].area); // } }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { RobotMap.init(); // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS driveTrain = new DriveTrain(); shooter = new Shooter(); lift = new Lift(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS // OI must be constructed after subsystems. If the OI creates Commands //(which it very likely will), subsystems are not guaranteed to be // constructed yet. Thus, their requires() statements may grab null // pointers. Bad news. Don't move it. oi = new OI(); // instantiate the command used for the autonomous period // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS autonomousCommand = new AutonomousCommand(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS CameraServer cameraServer = CameraServer.getInstance(); System.out.println("Camera sources:" + VideoSource.enumerateSources().length); for (VideoSource videoSource : VideoSource.enumerateSources()) { System.out.println("Camera: " + videoSource.getName()); } UsbCamera camera= cameraServer.startAutomaticCapture(); System.out.println("Started camera capture."); // Hard coded camera address cameraServer.addAxisCamera("AxisCam ye", "10.26.67.42"); // visionThread = new VisionThread(camera,new GripPipeline()); driveTrainChooser = new SendableChooser(); driveTrainChooser.addDefault("default PWM", DriveTrain.DriveMode.PWM); for (DriveTrain.DriveMode driveMode : DriveTrain.DriveMode.values()) { driveTrainChooser.addObject(driveMode.name(), driveMode); } }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { RobotMap.init(); // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS driveTrain = new DriveTrain(); shooter = new Shooter(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS // OI must be constructed after subsystems. If the OI creates Commands //(which it very likely will), subsystems are not guaranteed to be // constructed yet. Thus, their requires() statements may grab null // pointers. Bad news. Don't move it. oi = new OI(); // instantiate the command used for the autonomous period // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS autonomousCommand = new AutonomousCommand(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS CameraServer cameraServer = CameraServer.getInstance(); System.out.println("Camera sources:" + VideoSource.enumerateSources().length); for (VideoSource videoSource : VideoSource.enumerateSources()) { System.out.println("Camera: " + videoSource.getName()); } UsbCamera camera= cameraServer.startAutomaticCapture(); System.out.println("Started camera capture."); // Hard coded camera address cameraServer.addAxisCamera("AxisCam ye", "10.26.67.42"); // visionThread = new VisionThread(camera,new GripPipeline()); }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { RobotMap.init(); // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS driveTrain = new DriveTrain(); pDP = new PDP(); intake = new Intake(); climber = new Climber(); shooter = new Shooter(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS // OI must be constructed after subsystems. If the OI creates Commands //(which it very likely will), subsystems are not guaranteed to be // constructed yet. Thus, their requires() statements may grab null // pointers. Bad news. Don't move it. oi = new OI(); // initializes networktable table = NetworkTable.getTable("HookContoursReport"); // sets up camera switcher server = CameraServer.getInstance(); server.startAutomaticCapture(0); // cameraInit(); // set up sendable chooser for autonomous autoChooser = new SendableChooser(); autoChooser.addDefault("Middle Hook", new AUTOMiddleHook()); autoChooser.addObject("Left Hook", new AUTOLeftHook()); autoChooser.addObject("RightHook", new AUTORightHook()); SmartDashboard.putData("Auto Chooser", autoChooser); // instantiate the command used for the autonomous period // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ // @Override public void robotInit() { System.out.println("1"); RobotMap.init(); drivetrain = new Drivetrain(); climber = new Climber(); // fuel = new Fuel(); gear = new Gear(); oi = new OI(); // initializes camera server server = CameraServer.getInstance(); cameraInit(); // initializes auto chooser autoChooser = new SendableChooser(); autoChooser.addDefault("Middle Hook", new AutoMiddleHook()); autoChooser.addObject("Loading Station Hook", new AutoLeftHook()); autoChooser.addObject("Boiler Side Hook", new AutoRightHook()); SmartDashboard.putData("Auto mode", autoChooser); // auto delay SmartDashboard.putNumber("Auto delay", 0); // resets all sensors resetAllSensors(); }
/** * Start both the left and right camera streams and combine them into a single one which is then pushed * to an output stream titled Concat. * This method should only be used for starting the camera stream. */ private void concatStreamStart() { cameraLeft = CameraServer.getInstance().startAutomaticCapture("Left", 0); cameraRight = CameraServer.getInstance().startAutomaticCapture("Right", 1); /// Dummy sinks to keep camera connections open. CvSink cvsinkLeft = new CvSink("leftSink"); cvsinkLeft.setSource(cameraLeft); cvsinkLeft.setEnabled(true); CvSink cvsinkRight = new CvSink("rightSink"); cvsinkRight.setSource(cameraRight); cvsinkRight.setEnabled(true); /// Matrices to store each image from the cameras. Mat leftSource = new Mat(); Mat rightSource = new Mat(); /// The ArrayList of left and right sources is needed for the hconcat method used to combine the streams ArrayList<Mat> sources = new ArrayList<>(); sources.add(leftSource); sources.add(rightSource); /// Concatenation of both matrices Mat concat = new Mat(); /// Puts the combined video on the SmartDashboard (I think) /// The width is multiplied by 2 as the dimensions of the stream will have a width two times that of a single webcam CvSource outputStream = CameraServer.getInstance().putVideo("Concat", 2*Constants.CAM_WIDTH, Constants.CAM_HEIGHT); while (!Thread.interrupted()) { /// Provide each mat with the current frame cvsinkLeft.grabFrame(leftSource); cvsinkRight.grabFrame(rightSource); /// Combine the frames into a single mat in the Output and stream the image. Core.hconcat(sources, concat); outputStream.putFrame(concat); } }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ //@SuppressWarnings("unused") public void robotInit() { RobotMap.init(); // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS motors = new Motors(); camera = new Camera(); electrical = new Electrical(); bling = new Bling(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS // OI must be constructed after subsystems. If the OI creates Commands //(which it very likely will), subsystems are not guaranteed to be // constructed yet. Thus, their requires() statements may grab null // pointers. Bad news. Don't move it. oi = new OI(); // instantiate the command used for the autonomous period // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS autonomousCommand = new AutonomousCommand(); //LiveWindow liveWin = new LiveWindow(); // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=AUTONOMOUS int pulseWidthPos = Motors.TestCANTalon.getPulseWidthPosition(); int pulseWidthUs = Motors.TestCANTalon.getPulseWidthRiseToFallUs(); int periodUs = Motors.TestCANTalon.getPulseWidthRiseToRiseUs(); int pulseWidthVel = Motors.TestCANTalon.getPulseWidthVelocity(); // Check to see if encoder is plugged in FeedbackDeviceStatus sensorStatus = Motors.TestCANTalon.isSensorPresent(FeedbackDevice.CtreMagEncoder_Absolute); boolean sensorPluggedIn = (FeedbackDeviceStatus.FeedbackStatusPresent == sensorStatus); Motors.TestCANTalon.changeControlMode(TalonControlMode.PercentVbus); Motors.TestCANTalon.setFeedbackDevice(FeedbackDevice.CtreMagEncoder_Absolute); CameraServer.getInstance().startAutomaticCapture(); }
@Override public void robotInit() { compressor.start(); CameraServer.getInstance().startAutomaticCapture(0); autoSelector = new SendableChooser<>(); autoSelector.addDefault("Drive Forward", new DriveForward()); autoSelector.addObject("Place Middle Gear", new MidGearAuto()); autoSelector.addObject("Place Right Gear <<< Feeling lucky?", new RightGearAuto()); autoSelector.addObject("Shoot on Blue Alliance", new BlueShootAuto()); autoSelector.addObject("Shoot on Red Alliance", new RedShootAuto()); SmartDashboard.putData("Autonomous Command", autoSelector); visiontracking = new VisionTracking(); //created this last for ordering issues oi = new OperatorInterface(); }
public VisionTest() { UsbCamera camera = CameraServer.getInstance().startAutomaticCapture(); // cam0 by default camera.setResolution(IMG_WIDTH, IMG_HEIGHT); camera.setBrightness(0); // camera.setExposureManual(100); camera.setExposureAuto(); CvSource cs= CameraServer.getInstance().putVideo("name", IMG_WIDTH, IMG_HEIGHT); visionThread = new VisionThread(camera, new GripPipeline(), pipeline -> { Mat IMG_MOD = pipeline.hslThresholdOutput(); if (!pipeline.filterContoursOutput().isEmpty()) { //Rect recCombine = Imgproc.boundingRect(pipeline.filterContoursOutput().get(0)); Rect recCombine = computeBoundingRectangle(pipeline.filterContoursOutput()); if (recCombine == null) { targetDetected = false; return; } targetDetected = true; computeCoords(recCombine); synchronized (imgLock) { centerX = recCombine.x + (recCombine.width / 2); } Imgproc.rectangle(IMG_MOD, new Point(recCombine.x, recCombine.y),new Point(recCombine.x + recCombine.width,recCombine.y + recCombine.height), new Scalar(255, 20, 0), 5); } else { targetDetected = false; } cs.putFrame(IMG_MOD); }); visionThread.start(); Relay relay = new Relay(RobotMap.RELAY_CHANNEL, Direction.kForward); relay.set(Relay.Value.kOn); //this.processImage(); }
public CameraSet(Controls controls, String devpath1, String devpath2) { this.controls = controls; this.cam1 = CameraServer.getInstance().startAutomaticCapture("Back", devpath2); this.cam2 = CameraServer.getInstance().startAutomaticCapture("Front", devpath1); // cam1.setResolution((int) (this.multiplier * 160), (int) (this.multiplier * 120)); // cam2.setResolution((int) (this.multiplier * 160), (int) (this.multiplier * 120)); outputStream = CameraServer.getInstance().putVideo("camera_set", (int) (multiplier * 160), (int) (multiplier * 120)); source = new Mat(); }
/** * Swaps the current camera streaming to GRIP / SmartDashboard with the one * not streaming (and not capturing) */ public void toggleVisionCamera() { if (isGearCamActive) { CameraServer.getInstance().getServer().setSource(shooterCam); isGearCamActive = false; } else { CameraServer.getInstance().getServer().setSource(gearCam); isGearCamActive = true; } }
public void robotInit() { // Create subsystems drive = new Drive(); intake = new Intake(); arm = new Arm(); shooter = new Shooter(); hanger = new Hanger(); oi = new OI(); // Create motion profiles crossLowBar = new Profile(AutoMap.crossLowBar); reach = new Profile(AutoMap.reach); toShoot = new Profile(AutoMap.toShoot); toLowGoal = new Profile(AutoMap.toLowGoal); // Pick an auto chooser = new SendableChooser(); chooser.addDefault("Do Nothing", new DoNothing()); chooser.addObject("Low Bar", new LowBarRawtonomous()); chooser.addObject("Low Bar (profile)", new CrossLowBar(crossLowBar)); chooser.addObject("Reach", new Reach(reach)); chooser.addObject("Forward Rawto", new ForwardRawtonomous()); chooser.addObject("Backward Rawto", new BackwardRawtonomous()); chooser.addObject("Shoot", new AutoShoot()); SmartDashboard.putData("Auto mode", chooser); // Start camera stream camera = new USBCamera(); server = CameraServer.getInstance(); server.setQuality(40); server.startAutomaticCapture(camera); // Start compressor compressor = new Compressor(); compressor.start(); navx = new AHRS(SPI.Port.kMXP); }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { server = CameraServer.getInstance(); server.setQuality(80); //the camera name (ex "cam0") can be found through the roborio web interface server.startAutomaticCapture("cam0"); serverThread = new Thread(new ServerLoop(this)); serverThread.start(); }
/** * Creates the object and starts the camera servera * * @param ip * the IP of the .mjpg the axis camera outputs * @param camera * the brand / model of the camera */ public VisionProcessor (String ip, CameraModel camera) { // Adds the camera to the cscore CameraServer, in order to grab the // stream. this.camera = CameraServer.getInstance() .addAxisCamera("Vision Camera", ip); // Based on the selected camera type, set the field of views and focal // length. this.cameraModel = camera; switch (this.cameraModel) { case AXIS_M1011: this.horizontalFieldOfView = M1011_HORIZ_FOV; this.verticalFieldOfView = M1011_VERT_FOV; break; case AXIS_M1013: this.horizontalFieldOfView = M1013_HORIZ_FOV; this.verticalFieldOfView = M1013_VERT_FOV; break; default: // Data will default to one to avoid any "divide by zero" // errors. this.horizontalFieldOfView = 1; this.verticalFieldOfView = 1; } }
public void updateCamera() { if (frame == null || currSession == 0) return; String cameraSelected = (String) cameraSelector.getSelected(); NIVision.IMAQdxGrab(currSession, frame, 1); CameraServer.getInstance().setImage(frame); if (cameraSelected == lastSelected) { return; } switch (cameraSelected) { case "Front View": NIVision.IMAQdxStopAcquisition(currSession); currSession = sessionfront; NIVision.IMAQdxConfigureGrab(currSession); Robot.drivetrain.ForwardDrive(); lastSelected = "Front View"; break; default: case "Back View": NIVision.IMAQdxStopAcquisition(currSession); currSession = sessionback; NIVision.IMAQdxConfigureGrab(currSession); Robot.drivetrain.ReverseDrive(); lastSelected = "Back View"; break; case "Manual Change": break; } }
@Override public void robotInit() { chooser.addDefault("Default Auto", defaultAuto); chooser.addObject("My Auto", customAuto); SmartDashboard.putData("Auto modes", chooser); UsbCamera camera = CameraServer.getInstance().startAutomaticCapture(); camera.setResolution(IMG_WIDTH, IMG_HEIGHT); visionThread = new VisionThread(Camera, new mar4grip(), Pipeline -> { if (Pipeline.filterContoursOutput().isEmpty()) { Rect r = Imgproc.boundingRect(Pipeline.findContoursOutput().get(0)); centerX = r.x + (r.width / 2); } }); }
@Override public void robotInit() { chooser.addDefault("Default Auto", defaultAuto); chooser.addObject("My Auto", customAuto); SmartDashboard.putData("Auto modes", chooser); UsbCamera camera = CameraServer.getInstance().startAutomaticCapture(); camera.setResolution(IMG_WIDTH, IMG_HEIGHT); visionThread = new VisionThread(Camera, new Grippipeline(), pipeline -> { if (!pipeline.filterContoursOutput().isEmpty()) { Rect r = Imgproc.boundingRect(pipeline.findContoursOutput().get(0)); centerX = r.x + (r.width / 2); } }); }
/** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { NetworkTable.globalDeleteAll(); oi = new OI(); teleop = new Teleop(); uHGSD = new UpdateHighGoalShooterDashboard(); autonomousCommand = new Autonomous(2, 2); CameraServer server = CameraServer.getInstance(); server.startAutomaticCapture("cam0"); hood.resetEncoder(hood.HOOD_START); }
/** * Creates the default camera (cam0) and the cvSink */ public Cameras() { // Define Variables camNum = 0; lastSwitched = 0; source = new Mat(); output = new Mat(); // Setup default camera cam0 = new UsbCamera("cam0", 0); cam0.setResolution(320, 240); CameraServer.getInstance().addCamera(cam0); cvSink = CameraServer.getInstance().getVideo(cam0); outputStream = CameraServer.getInstance().putVideo("cams", 320, 240); }
public static void init() { // Drivetrain DRIVETRAIN_ROBOT_DRIVE = new RobotDrive(0, 1); DRIVETRAIN_ENCODER_LEFT = new Encoder(0, 1); DRIVETRAIN_ENCODER_LEFT.setDistancePerPulse(0.0481); DRIVETRAIN_ENCODER_RIGHT = new Encoder(2, 3, true); DRIVETRAIN_ENCODER_RIGHT.setDistancePerPulse(0.0481); // Floor Gear FLOORGEAR_INTAKE = new VictorSP(2); FLOORGEAR_LIFTER = new DoubleSolenoid(0, 1); FLOORGEAR_BLOCKER = new DoubleSolenoid(2, 3); // Climber CLIMBER = new VictorSP(3); // Passive Gear SLOTGEAR_HOLDER = new DoubleSolenoid(4, 5); // Vision VISION_SERVER = CameraServer.getInstance(); VISION_CAMERA = VISION_SERVER.startAutomaticCapture("FRONT", 0); VISION_CAMERA.getProperty("saturation").set(20); VISION_CAMERA.getProperty("gain").set(50); VISION_CAMERA.getProperty("exposure_auto").set(1); VISION_CAMERA.getProperty("brightness").set(50); VISION_CAMERA.getProperty("exposure_absolute").set(1); VISION_CAMERA.getProperty("exposure_auto_priority").set(0); }
public Cameras() { server = CameraServer.getInstance(); frame = NIVision.imaqCreateImage(NIVision.ImageType.IMAGE_RGB, 0); sessionFront = NIVision.IMAQdxOpenCamera(RobotMap.FRONT_CAMERA, NIVision.IMAQdxCameraControlMode.CameraControlModeController); NIVision.IMAQdxConfigureGrab(sessionFront); currentSession = sessionFront; sessionSet = true; }
public void teleopPeriodic() { Scheduler.getInstance().run(); SmartDashboard.putNumber("Elevator Pot Value: ", Robot.elevator.getPotVal()); SmartDashboard.putNumber("Arm Pot Value: ", Robot.arm.getPotVal()); NIVision.IMAQdxGrab(session, frame, 1); CameraServer.getInstance().setImage(frame); }
public void run() { NIVision.IMAQdxStartAcquisition(session); while(true){ NIVision.IMAQdxGrab(session, frame, 1); CameraServer.getInstance().setImage(frame); try { Thread.sleep(200); } catch (InterruptedException e) { e.printStackTrace(); } } //NIVision.IMAQdxStopAcquisition(session); }