Java 类org.webrtc.VideoCapturerAndroid 实例源码

项目:webrtc-android-codelab    文件:MainActivity.java   
private VideoCapturer getVideoCapturer(CameraVideoCapturer.CameraEventsHandler eventsHandler) {
    String[] cameraFacing = {"front", "back"};
    int[] cameraIndex = {0, 1};
    int[] cameraOrientation = {0, 90, 180, 270};
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing +
                        ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturerAndroid.create(name, eventsHandler);
                if (capturer != null) {
                    Log.d("Using camera: ", name);
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capture");
}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
private VideoTrack createCapturerVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);
    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:WebRTC-VideoCall-Anrdoid    文件:VideoCallActivity.java   
@OnClick({R.id.btnCamera, R.id.btnVoice, R.id.btnChangeCam, R.id.btnEndCall})
    public void onViewClicked(View view) {
        switch (view.getId()) {
            case R.id.btnCamera:
                enableCam = !enableCam;
                btnCamera.setSelected(!enableCam);
                localVideoTrack.setEnabled(enableCam);
                break;
            case R.id.btnVoice:
                enableVoice = !enableVoice;
                btnVoice.setSelected(!enableVoice);
                localAudioTrack.setEnabled(enableVoice);
                break;
            case R.id.btnChangeCam:
                LogUtils.e("change cam");
                videoCapture.switchCamera(new VideoCapturerAndroid.CameraSwitchHandler() {
                    @Override
                    public void onCameraSwitchDone(final boolean b) {
                        LogUtils.e("is Front Camera: " + b);
                        VideoCallActivity.this.runOnUiThread(new Runnable() {
                            @Override
                            public void run() {
                                isFrontCam = b;
                                btnChangeCam.setSelected(!isFrontCam);
//                                VideoRendererGui.update(localRender, 72, 65, 25, 25, RendererCommon.ScalingType.SCALE_ASPECT_FIT, isFrontCam);
                            }
                        });

                    }

                    @Override
                    public void onCameraSwitchError(String s) {
                        LogUtils.e("onCameraSwitchError: ");
                    }
                });
                break;
            case R.id.btnEndCall:
                endCall();
                break;
        }
    }
项目:webrtc-android    文件:PeerConnectionClient.java   
public void createPeerConnection(
            final EglBase.Context renderEGLContext,
            final VideoRenderer.Callbacks localRender,
            final VideoRenderer.Callbacks remoteRender,
            final PeerConnectionEvents events,
            final PeerConnectionParameters peerConnectionParameters) {
        this.peerConnectionParameters = peerConnectionParameters;
        this.events = events;
        videoCallEnabled = peerConnectionParameters.videoCallEnabled;
//
//      PeerConnectionFactory.initializeAndroidGlobals(, true, true,
//              false);
//      factory = new PeerConnectionFactory();

//      if (peerConnectionParameters == null) {
//          Log.e(TAG, "Creating peer connection without initializing factory.");
//          return;
//      }
        this.localRender = localRender;
        this.remoteRender = remoteRender;

        executor.execute(new Runnable() {
            @Override
            public void run() {
                createMediaConstraintsInternal();
//              createPeerConnectionInternal(renderEGLContext, iceServers);
                if(mediaStream == null) {
                    mediaStream = factory.createLocalMediaStream("ARDAMS");
                    if (videoCallEnabled) {
                        String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
                        String frontCameraDeviceName =
                                CameraEnumerationAndroid.getNameOfFrontFacingDevice();
                        if (numberOfCameras > 1 && frontCameraDeviceName != null) {
                            cameraDeviceName = frontCameraDeviceName;
                        }
                        Log.d(TAG, "Opening camera: " + cameraDeviceName);
                        videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null,
                                peerConnectionParameters.captureToTexture ? renderEGLContext : null);
                        if (videoCapturer == null) {
                            reportError("Failed to open camera");
                            return;
                        }
                        mediaStream.addTrack(createVideoTrack(videoCapturer));
                    }

                    mediaStream.addTrack(factory.createAudioTrack(
                            AUDIO_TRACK_ID,
                            factory.createAudioSource(audioConstraints)));
                }
                try {
                    manager = new Manager(new URI(mHost));
                    client = manager.socket("/");
                } catch (URISyntaxException e) {
                    e.printStackTrace();
                }
                client
                        .on(INIT_MESSAGE, messageHandler.onInitMessage)
                        .on(TEXT_MESSAGE, messageHandler.onTextMessage)
//                      .on(INVITE_MESSAGE, messageHandler.onInviteMessage)
//                      .on(READY_MESSAGE, messageHandler.onReadyMessage)
//                      .on(OFFER_MESSAGE, messageHandler.onOfferMessage)
//                      .on(ANSWER_MESSAGE, messageHandler.onAnswerMessage)
//                      .on(ICE_CANDIDATE_MESSAGE, messageHandler.onCandidateMessage)
                        .on(RTC_MESSAGE, messageHandler.onRtcMessage)
                        .on(LEAVE_MESSAGE, messageHandler.onLeaveMessage)
                        .on(AVAILABLE_USERS_MESSAGE, messageHandler.onAvailablePeersMessage)
                        .on(PRESENCE_MESSAGE, messageHandler.onPresenceMessage);
                client.connect();
            }
        });

    }
项目:webrtc-android    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:DeviceConnect-Android    文件:MediaStream.java   
/**
 * Creates a instance of VideoCapturerAndroid.
 * @return VideoCapturerAndroid
 */
private VideoCapturerAndroid createVideoCapturer() {
    switch (mOption.getVideoType()) {
        default:
        case NONE:
            return null;
        case CAMERA:
            return createCameraCapture();
        case EXTERNAL_RESOURCE:
            return createExternalResource();
    }
}
项目:DeviceConnect-Android    文件:MediaStream.java   
/**
 * Creates a instance of VideoTrack to used in a VideoCapturerAndroid.
 * @param capturer Instance of VideoCapturerAndroid
 * @return VideoTrack
 */
private VideoTrack createVideoTrack(final VideoCapturerAndroid capturer) {
    mVideoRender = mOption.getRender();
    mVideoSource = mFactory.createVideoSource(capturer, mVideoConstraints);
    mVideoTrack = mFactory.createVideoTrack(VIDEO_TRACK_ID, mVideoSource);
    mVideoTrack.setEnabled(mEnableVideo);
    mVideoTrack.addRenderer(new VideoRenderer(mVideoRender));
    return mVideoTrack;
}
项目:DeviceConnect-Android    文件:VideoCapturerExternalResource.java   
@Override
public void startCapture(final int width, final int height, final int frameRate,
                         final Context applicationContext,
                         final VideoCapturerAndroid.CapturerObserver frameObserver) {
    if (DEBUG) {
        Log.i(TAG, "@@@ startCapture size:[" + width + ", " + height
                + "] frameRate:" + frameRate);
    }

    mSurfaceHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
    mSurface = new Surface(mSurfaceHelper.getSurfaceTexture());

    mRequestWidth = width;
    mRequestHeight = height;
    mFrameObserver = frameObserver;

    if (mClient != null) {
        mClient.stop();
        mClient = null;
    }

    mClient = new MixedReplaceMediaClient(mUri);
    mClient.setOnMixedReplaceMediaListener(mOnMixedReplaceMediaListener);
    mClient.start();
}
项目:webrtc-android    文件:WebRtcClient.java   
private VideoCapturer getVideoCapturer() {
    String frontCameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
    return VideoCapturerAndroid.create(frontCameraDeviceName);
}
项目:webrtc-android    文件:WebRtcClient.java   
private VideoCapturer getVideoCapturer() {
    String frontCameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
    return VideoCapturerAndroid.create(frontCameraDeviceName);
}
项目:krankygeek    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
void createLocalMediaStream(Object renderEGLContext,final VideoRenderer.Callbacks localRender) {
    if (factory == null) {
        Log.e(TAG, "Peerconnection factory is not created");
        return;
    }
    this.localRender = localRender;
    if (videoCallEnabled) {
        factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
    }

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO);

    localMediaStream = factory.createLocalMediaStream("ARDAMS");

    // If video call is enabled and the device has camera(s)
    if (videoCallEnabled && numberOfCameras > 0) {
        String cameraDeviceName; // = CameraEnumerationAndroid.getDeviceName(0);
        String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice();
        String backCameraDeviceName = CameraEnumerationAndroid.getNameOfBackFacingDevice();

        // If current camera is set to front and the device has one
        if (currentCameraPosition==NBMCameraPosition.FRONT && frontCameraDeviceName!=null) {
            cameraDeviceName = frontCameraDeviceName;
        }
        // If current camera is set to back and the device has one
        else if (currentCameraPosition==NBMCameraPosition.BACK && backCameraDeviceName!=null) {
            cameraDeviceName = backCameraDeviceName;
        }
        // If current camera is set to any then we pick the first camera of the device, which
        // should be a back-facing camera according to libjingle API
        else {
            cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
            currentCameraPosition = NBMCameraPosition.BACK;
        }

        Log.d(TAG, "Opening camera: " + cameraDeviceName);
        videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null);
        if (videoCapturer == null) {
            Log.d(TAG, "Error while opening camera");
            return;
        }
        localMediaStream.addTrack(createCapturerVideoTrack(videoCapturer));
    }

    // Create audio track
    localMediaStream.addTrack(factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints)));

    Log.d(TAG, "Local media stream created.");
}
项目:DeviceConnect-Android    文件:VideoCapturerExternalResource.java   
@Override
public void switchCamera(VideoCapturerAndroid.CameraSwitchHandler cameraSwitchHandler) {
    if (DEBUG) {
        Log.e(TAG, "switchCamera:");
    }
}