Java 类org.webrtc.VideoCapturer 实例源码

项目:Achilles_Android    文件:MainActivity.java   
private VideoCapturer createVideoCapturer() {
    VideoCapturer videoCapturer = null;
    Log.d(TAG, "Creating capturer using camera2 API.");

    // Creating camera capturer
    Camera2Enumerator enumerator = new Camera2Enumerator(this);
    final String[] deviceNames = enumerator.getDeviceNames();
    Log.d(TAG, "Looking for back facing cameras.");
    for (String deviceName : deviceNames) {
        if (enumerator.isBackFacing(deviceName)) {
            Log.d(TAG, "Creating back facing camera capturer.");
            videoCapturer = enumerator.createCapturer(deviceName, null);
            break;
        }
    }

    if (videoCapturer == null) {
        Log.e(TAG, "Failed to open camera.");
        return null;
    }
    return videoCapturer;
}
项目:AndroidRTC    文件:PeerConnectionClientTest.java   
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
    MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
    VideoCapturer videoCapturer, EglBase.Context eglContext) {
  List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
  SignalingParameters signalingParameters =
      new SignalingParameters(iceServers, true, // iceServers, initiator.
          null, null, null, // clientId, wssUrl, wssPostUrl.
          null, null); // offerSdp, iceCandidates.

  PeerConnectionClient client = PeerConnectionClient.getInstance();
  PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
  options.networkIgnoreMask = 0;
  options.disableNetworkMonitor = true;
  client.setPeerConnectionFactoryOptions(options);
  client.createPeerConnectionFactory(
      InstrumentationRegistry.getTargetContext(), peerConnectionParameters, this);
  client.createPeerConnection(
      eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
  client.createOffer();
  return client;
}
项目:webrtc-android-codelab    文件:MainActivity.java   
private VideoCapturer getVideoCapturer(CameraVideoCapturer.CameraEventsHandler eventsHandler) {
    String[] cameraFacing = {"front", "back"};
    int[] cameraIndex = {0, 1};
    int[] cameraOrientation = {0, 90, 180, 270};
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing +
                        ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturerAndroid.create(name, eventsHandler);
                if (capturer != null) {
                    Log.d("Using camera: ", name);
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capture");
}
项目:actor-platform    文件:AndroidVideoSource.java   
private VideoCapturer getVideoCapturer() {
    String[] cameraFacing = {"front", "back"};
    int[] cameraIndex = {0, 1};
    int[] cameraOrientation = {0, 90, 180, 270};
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing +
                        ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturer.create(name);
                if (capturer != null) {
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capturer");
}
项目:respoke-sdk-android    文件:RespokeCall.java   
private void addLocalStreams(Context context) {
    AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
    // TODO(fischman): figure out how to do this Right(tm) and remove the suppression.
    @SuppressWarnings("deprecation")
    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
    audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

    localStream = peerConnectionFactory.createLocalMediaStream("ARDAMS");

    if (!audioOnly) {
        VideoCapturer capturer = getVideoCapturer();
        MediaConstraints videoConstraints = new MediaConstraints();
        videoSource = peerConnectionFactory.createVideoSource(capturer, videoConstraints);
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        localStream.addTrack(videoTrack);
    }

    localStream.addTrack(peerConnectionFactory.createAudioTrack("ARDAMSa0", peerConnectionFactory.createAudioSource(new MediaConstraints())));

    peerConnection.addStream(localStream);
}
项目:respoke-sdk-android    文件:RespokeCall.java   
private VideoCapturer getVideoCapturer() {
    String[] cameraFacing = { "front", "back" };
    int[] cameraIndex = { 0, 1 };
    int[] cameraOrientation = { 0, 90, 180, 270 };
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing +
                        ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturer.create(name);
                if (capturer != null) {
                    //logAndToast("Using camera: " + name);
                    Log.d(TAG, "Using camera: " + name);
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capturer");
}
项目:WebRTCDemo    文件:AppRTCDemoActivity.java   
private VideoCapturer getVideoCapturer() {
  String[] cameraFacing = { "front", "back" };
  int[] cameraIndex = { 0, 1 };
  int[] cameraOrientation = { 0, 90, 180, 270 };
  for (String facing : cameraFacing) {
    for (int index : cameraIndex) {
      for (int orientation : cameraOrientation) {
        String name = "Camera " + index + ", Facing " + facing +
            ", Orientation " + orientation;
        VideoCapturer capturer = VideoCapturer.create(name);
        if (capturer != null) {
          logAndToast("Using camera: " + name);
          return capturer;
        }
      }
    }
  }
  throw new RuntimeException("Failed to open capturer");
}
项目:apprtc-android    文件:AppRTCDemoActivity.java   
private VideoCapturer getVideoCapturer() {
  String[] cameraFacing = { "front", "back" };
  int[] cameraIndex = { 0, 1 };
  int[] cameraOrientation = { 0, 90, 180, 270 };
  for (String facing : cameraFacing) {
    for (int index : cameraIndex) {
      for (int orientation : cameraOrientation) {
        String name = "Camera " + index + ", Facing " + facing +
            ", Orientation " + orientation;
        VideoCapturer capturer = VideoCapturer.create(name);
        if (capturer != null) {
          logAndToast("Using camera: " + name);
          return capturer;
        }
      }
    }
  }
  throw new RuntimeException("Failed to open capturer");
}
项目:licodeAndroidClient    文件:LicodeConnector.java   
/** get access to the camera */
private VideoCapturer getVideoCapturer() {
    String[] cameraFacing = { "front", "back" };
    int[] cameraIndex = { 0, 1 };
    int[] cameraOrientation = { 0, 90, 180, 270 };
    for (String facing : cameraFacing) {
        for (int index : cameraIndex) {
            for (int orientation : cameraOrientation) {
                String name = "Camera " + index + ", Facing " + facing
                        + ", Orientation " + orientation;
                VideoCapturer capturer = VideoCapturer.create(name);
                if (capturer != null) {
                    log("Using camera: " + name);
                    return capturer;
                }
            }
        }
    }
    throw new RuntimeException("Failed to open capturer");
}
项目:droidkit-webrtc    文件:AppRTCDemoActivity.java   
private VideoCapturer getVideoCapturer() {
  String[] cameraFacing = { "front", "back" };
  int[] cameraIndex = { 0, 1 };
  int[] cameraOrientation = { 0, 90, 180, 270 };
  for (String facing : cameraFacing) {
    for (int index : cameraIndex) {
      for (int orientation : cameraOrientation) {
        String name = "Camera " + index + ", Facing " + facing +
            ", Orientation " + orientation;
        VideoCapturer capturer = VideoCapturer.create(name);
        if (capturer != null) {
          logAndToast("Using camera: " + name);
          return capturer;
        }
      }
    }
  }
  throw new RuntimeException("Failed to open capturer");
}
项目:appspotdemo-mono    文件:AppRTCDemoActivity.java   
private VideoCapturer getVideoCapturer() {
  String[] cameraFacing = { "front", "back" };
  int[] cameraIndex = { 0, 1 };
  int[] cameraOrientation = { 0, 90, 180, 270 };
  for (String facing : cameraFacing) {
    for (int index : cameraIndex) {
      for (int orientation : cameraOrientation) {
        String name = "Camera " + index + ", Facing " + facing +
            ", Orientation " + orientation;
        VideoCapturer capturer = VideoCapturer.create(name);
        if (capturer != null) {
          logAndToast("Using camera: " + name);
          return capturer;
        }
      }
    }
  }
  throw new RuntimeException("Failed to open capturer");
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender,
                                 final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer,
                                 final SignalingParameters signalingParameters) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
                         videoCapturer, signalingParameters);
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    if (localRender != null) {
        localVideoTrack.setEnabled(renderVideo);
        localVideoTrack.addRenderer(new VideoRenderer(localRender));
    }

    return localVideoTrack;
}
项目:Achilles_Android    文件:MainActivity.java   
private VideoTrack createVideoTrack(VideoCapturer videoCapturer) {
    mVideoSource = mPeerConnectionFactory.createVideoSource(videoCapturer);
    videoCapturer.startCapture(videoWidth, videoHeight, videoHeight);

    mLocalVideoTrack = mPeerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, mVideoSource);
    mLocalVideoTrack.setEnabled(true);
    mLocalVideoTrack.addRenderer(new VideoRenderer(localProxyRenderer));
    return mLocalVideoTrack;
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderVideo);
  localVideoTrack.addSink(localRender);
  return localVideoTrack;
}
项目:AppRTC-Android    文件:CallActivity.java   
@TargetApi(21)
private VideoCapturer createScreenCapturer() {
  if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
    reportError("User didn't give permission to capture the screen.");
    return null;
  }
  return new ScreenCapturerAndroid(
      mediaProjectionPermissionResultData, new MediaProjection.Callback() {
    @Override
    public void onStop() {
      reportError("User revoked permission to capture the screen.");
    }
  });
}
项目:AppRTC-Android    文件:CallActivity.java   
private void onConnectedToRoomInternal(final SignalingParameters params) {
  final long delta = System.currentTimeMillis() - callStartedTimeMs;

  signalingParameters = params;
  logAndToast("Creating peer connection, delay=" + delta + "ms");
  VideoCapturer videoCapturer = null;
  if (peerConnectionParameters.videoCallEnabled) {
    videoCapturer = createVideoCapturer();
  }
  peerConnectionClient.createPeerConnection(
      localProxyVideoSink, remoteRenderers, videoCapturer, signalingParameters);

  if (signalingParameters.initiator) {
    logAndToast("Creating OFFER...");
    // Create offer. Offer SDP will be sent to answering client in
    // PeerConnectionEvents.onLocalDescription event.
    peerConnectionClient.createOffer();
  } else {
    if (params.offerSdp != null) {
      peerConnectionClient.setRemoteDescription(params.offerSdp);
      logAndToast("Creating ANSWER...");
      // Create answer. Answer SDP will be sent to offering client in
      // PeerConnectionEvents.onLocalDescription event.
      peerConnectionClient.createAnswer();
    }
    if (params.iceCandidates != null) {
      // Add remote ICE candidates from room.
      for (IceCandidate iceCandidate : params.iceCandidates) {
        peerConnectionClient.addRemoteIceCandidate(iceCandidate);
      }
    }
  }
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:AndroidRTC    文件:CallActivity.java   
@TargetApi(21)
private VideoCapturer createScreenCapturer() {
    if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
        reportError("User didn't give permission to capture the screen.");
        return null;
    }
    return new ScreenCapturerAndroid(
            mediaProjectionPermissionResultData, new MediaProjection.Callback() {
        @Override
        public void onStop() {
            reportError("User revoked permission to capture the screen.");
        }
    });
}
项目:AndroidRTC    文件:CallActivity.java   
private void onConnectedToRoomInternal(final SignalingParameters params) {
    final long delta = System.currentTimeMillis() - callStartedTimeMs;

    signalingParameters = params;
    logAndToast("Creating peer connection, delay=" + delta + "ms");
    VideoCapturer videoCapturer = null;
    if (peerConnectionParameters.videoCallEnabled) {
        videoCapturer = createVideoCapturer();
    }
    peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), localRender,
            remoteRenderers, videoCapturer, signalingParameters);

    if (signalingParameters.initiator) {
        logAndToast("Creating OFFER...");
        // Create offer. Offer SDP will be sent to answering client in
        // PeerConnectionEvents.onLocalDescription event.
        peerConnectionClient.createOffer();
    } else {
        if (params.offerSdp != null) {
            peerConnectionClient.setRemoteDescription(params.offerSdp);
            logAndToast("Creating ANSWER...");
            // Create answer. Answer SDP will be sent to offering client in
            // PeerConnectionEvents.onLocalDescription event.
            peerConnectionClient.createAnswer();
        }
        if (params.iceCandidates != null) {
            // Add remote ICE candidates from room.
            for (IceCandidate iceCandidate : params.iceCandidates) {
                peerConnectionClient.addRemoteIceCandidate(iceCandidate);
            }
        }
    }
}
项目:AndroidRTC    文件:PeerConnectionClientTest.java   
private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
    boolean decodeToTexture) throws InterruptedException {
  loopback = true;
  MockRenderer localRenderer = null;
  MockRenderer remoteRenderer = null;
  if (parameters.videoCallEnabled) {
    Log.d(TAG, "testLoopback for video " + parameters.videoCodec);
    localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
    remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
  } else {
    Log.d(TAG, "testLoopback for audio.");
  }
  pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer,
      decodeToTexture ? eglBase.getEglBaseContext() : null);

  // Wait for local SDP, rename it to answer and set as remote SDP.
  assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
  SessionDescription remoteSdp = new SessionDescription(
      SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
  pcClient.setRemoteDescription(remoteSdp);

  // Wait for ICE connection.
  assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));

  if (parameters.videoCallEnabled) {
    // Check that local and remote video frames were rendered.
    assertTrue("Local video frames were not rendered.",
        localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
    assertTrue("Remote video frames were not rendered.",
        remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
  } else {
    // For audio just sleep for 1 sec.
    // TODO(glaznev): check how we can detect that remote audio was rendered.
    Thread.sleep(AUDIO_RUN_TIMEOUT);
  }

  pcClient.close();
  assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
  Log.d(TAG, "testLoopback done.");
}
项目:VideoCRE    文件:VideoActivity.java   
private VideoCapturer createVideoCapturer() {
    switch (MainActivity.sVideoSource) {
        case VideoSource.SOURCE_CAMERA1:
            return VideoCapturers.createCamera1Capturer(true);
        case VideoSource.SOURCE_CAMERA2:
            return VideoCapturers.createCamera2Capturer(this);
        case VideoSource.SOURCE_SCREEN:
            return null;
        case VideoSource.SOURCE_FILE:
            return VideoCapturers.createFileVideoCapturer("");
        default:
            return null;
    }
}
项目:VideoCRE    文件:VideoCapturers.java   
public static VideoCapturer createFileVideoCapturer(String path) {
    try {
        return new FileVideoCapturer(path);
    } catch (IOException e) {
        return null;
    }
}
项目:webrtc-android-codelab    文件:MainActivity.java   
private VideoCapturer createVideoCapturer() {
    VideoCapturer videoCapturer;
    Logging.d(TAG, "Creating capturer using camera1 API.");
    videoCapturer = createCameraCapturer(new Camera1Enumerator(false));

    return videoCapturer;
}
项目:webrtc-android-codelab    文件:MainActivity.java   
public void start() {
    start.setEnabled(false);
    call.setEnabled(true);
    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = getVideoCapturer(new CustomCameraEventsHandler());

    //Create MediaConstraints - Will be useful for specifying video and audio constraints.
    audioConstraints = new MediaConstraints();
    videoConstraints = new MediaConstraints();

    //Create a VideoSource instance
    videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    audioSource = peerConnectionFactory.createAudioSource(audioConstraints);
    localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
    localVideoView.setVisibility(View.VISIBLE);

    //create a videoRenderer based on SurfaceViewRenderer instance
    localRenderer = new VideoRenderer(localVideoView);
    // And finally, with our VideoRenderer ready, we
    // can add our renderer to the VideoTrack.
    localVideoTrack.addRenderer(localRenderer);

}
项目:voip_android    文件:WebRTCActivity.java   
protected void startStream() {
    logAndToast("Creating peer connection");

    peerConnectionClient = PeerConnectionClient.getInstance();

    peerConnectionClient.createPeerConnectionFactory(
            this, peerConnectionParameters, this);


    PeerConnection.IceServer server = new PeerConnection.IceServer("stun:stun.counterpath.net:3478");

    String username = turnUserName;
    String password = turnPassword;
    PeerConnection.IceServer server2 = new PeerConnection.IceServer("turn:turn.gobelieve.io:3478?transport=udp", username, password);

    peerConnectionClient.clearIceServer();
    peerConnectionClient.addIceServer(server);
    peerConnectionClient.addIceServer(server2);

    VideoCapturer videoCapturer = null;
    if (peerConnectionParameters.videoCallEnabled) {
        videoCapturer = createVideoCapturer();
    }
    peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), localRender,
            remoteRenderers, videoCapturer);

    if (this.isCaller) {
        logAndToast("Creating OFFER...");
        // Create offer. Offer SDP will be sent to answering client in
        // PeerConnectionEvents.onLocalDescription event.
        peerConnectionClient.createOffer();
    }
}
项目:voip_android    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:restcomm-android-sdk    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderLocalVideo);
  localVideoTrack.addSink(localRender);
  return localVideoTrack;
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
public void createPeerConnection(final VideoSink localRender,
    final VideoRenderer.Callbacks remoteRender, final VideoCapturer videoCapturer,
    final SignalingParameters signalingParameters) {
  createPeerConnection(
      localRender, Collections.singletonList(remoteRender), videoCapturer, signalingParameters);
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
            videoCapturer, signalingParameters);
}
项目:VideoCRE    文件:VideoActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_video);

    VideoConfig config = VideoConfig.builder()
            .previewWidth(1280)
            .previewHeight(720)
            .outputWidth(448)
            .outputHeight(800)
            .fps(30)
            .outputBitrate(800)
            .build();
    VideoConfig hdConfig = VideoConfig.builder()
            .previewWidth(1280)
            .previewHeight(720)
            .outputWidth(720)
            .outputHeight(1280)
            .fps(30)
            .outputBitrate(2000)
            .build();
    VideoCapturer capturer = createVideoCapturer();

    mVideoView = (SurfaceViewRenderer) findViewById(R.id.mVideoView1);
    try {
        String filename = "video_source_record_" + System.currentTimeMillis();
        mMp4Recorder = new Mp4Recorder(
                new File(Environment.getExternalStorageDirectory(), filename + ".mp4"));
        mHdMp4Recorder = new Mp4Recorder(
                new File(Environment.getExternalStorageDirectory(), filename + "-hd.mp4"));
    } catch (IOException e) {
        e.printStackTrace();
        Toast.makeText(this, "start Mp4Recorder fail!", Toast.LENGTH_SHORT).show();
        finish();
        return;
    }
    mHwAvcEncoder = new HwAvcEncoder(config, mMp4Recorder);
    mHdHwAvcEncoder = new HwAvcEncoder(hdConfig, mHdMp4Recorder);
    mVideoSink = new VideoSink(mVideoView, mHwAvcEncoder, mHdHwAvcEncoder);
    mVideoSource = new VideoSource(getApplicationContext(), config, capturer, mVideoSink);

    mVideoView.init(mVideoSource.getRootEglBase().getEglBaseContext(), null);
    mHwAvcEncoder.start(mVideoSource.getRootEglBase());
    mHdHwAvcEncoder.start(mVideoSource.getRootEglBase());

    initView();
}
项目:VideoCRE    文件:VideoCapturers.java   
public static VideoCapturer createCamera1Capturer(boolean captureToTexture) {
    return createCameraCapturer(new Camera1Enumerator(captureToTexture));
}
项目:VideoCRE    文件:VideoCapturers.java   
public static VideoCapturer createCamera2Capturer(Context context) {
    return createCameraCapturer(new Camera2Enumerator(context));
}
项目:webrtc-android-codelab    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = createVideoCapturer();
    //Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
    MediaConstraints constraints = new MediaConstraints();

    //Create a VideoSource instance
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    //we will start capturing the video from the camera
    //width,height and fps
    videoCapturerAndroid.startCapture(1000, 1000, 30);

    //create surface renderer, init it and add the renderer to the track
    SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
    videoView.setMirror(true);

    EglBase rootEglBase = EglBase.create();
    videoView.init(rootEglBase.getEglBaseContext(), null);

    localVideoTrack.addRenderer(new VideoRenderer(videoView));


}
项目:webrtc-android    文件:WebRtcClient.java   
private VideoCapturer getVideoCapturer() {
    String frontCameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
    return VideoCapturerAndroid.create(frontCameraDeviceName);
}
项目:webrtc-android    文件:WebRtcClient.java   
private VideoCapturer getVideoCapturer() {
    String frontCameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
    return VideoCapturerAndroid.create(frontCameraDeviceName);
}
项目:voip_android    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
            videoCapturer);
}
项目:WebRTCDemo    文件:AppRTCDemoActivity.java   
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
  factory = new PeerConnectionFactory();

  MediaConstraints pcConstraints = appRtcClient.pcConstraints();
  pcConstraints.optional.add(
      new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
  pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

  createDataChannelToRegressionTestBug2302(pc);  // See method comment.

  // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
  // NOTE: this _must_ happen while |factory| is alive!
  // Logging.enableTracing(
  //     "logcat:",
  //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
  //     Logging.Severity.LS_SENSITIVE);

  {
    final PeerConnection finalPC = pc;
    final Runnable repeatedStatsLogger = new Runnable() {
        public void run() {
          synchronized (quit[0]) {
            if (quit[0]) {
              return;
            }
            final Runnable runnableThis = this;
            if (hudView.getVisibility() == View.INVISIBLE) {
              vsv.postDelayed(runnableThis, 1000);
              return;
            }
            boolean success = finalPC.getStats(new StatsObserver() {
                public void onComplete(final StatsReport[] reports) {
                  runOnUiThread(new Runnable() {
                      public void run() {
                        updateHUD(reports);
                      }
                    });
                  for (StatsReport report : reports) {
                    Log.d(TAG, "Stats: " + report.toString());
                  }
                  vsv.postDelayed(runnableThis, 1000);
                }
              }, null);
            if (!success) {
              throw new RuntimeException("getStats() return false!");
            }
          }
        }
      };
    vsv.postDelayed(repeatedStatsLogger, 1000);
  }

  {
    logAndToast("Creating local video source...");
    MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
    if (appRtcClient.videoConstraints() != null) {
      VideoCapturer capturer = getVideoCapturer();
      videoSource = factory.createVideoSource(
          capturer, appRtcClient.videoConstraints());
      VideoTrack videoTrack =
          factory.createVideoTrack("ARDAMSv0", videoSource);
      videoTrack.addRenderer(new VideoRenderer(localRender));
      lMS.addTrack(videoTrack);
    }
    if (appRtcClient.audioConstraints() != null) {
      lMS.addTrack(factory.createAudioTrack(
          "ARDAMSa0",
          factory.createAudioSource(appRtcClient.audioConstraints())));
    }
    pc.addStream(lMS, new MediaConstraints());
  }
  logAndToast("Waiting for ICE candidates...");
}
项目:restcomm-android-sdk    文件:PeerConnectionClient.java   
public void createPeerConnection(final VideoSink localRender,
    final VideoRenderer.Callbacks remoteRender, final VideoCapturer videoCapturer,
    final SignalingParameters signalingParameters) {
  createPeerConnection(
      localRender, Collections.singletonList(remoteRender), videoCapturer, signalingParameters);
}
项目:restcomm-android-sdk    文件:RCConnection.java   
private void onConnectedToRoomInternal(final SignalingParameters params)
{
   RCLogger.i(TAG, "onConnectedToRoomInternal");
   final long delta = System.currentTimeMillis() - callStartedTimeMs;

   signalingParameters = params;
   if (peerConnectionClient == null) {
      RCLogger.w(TAG, "Room is connected, but EGL context is not ready yet.");
      return;
   }

   VideoCapturer videoCapturer = null;
   if (peerConnectionParameters.videoCallEnabled) {
      videoCapturer = createVideoCapturer();
   }

   logAndToast("Creating peer connection, delay=" + delta + "ms");
   peerConnectionClient.createPeerConnection(localRender, remoteRender, videoCapturer, signalingParameters);

   if (signalingParameters.initiator) {
      logAndToast("Creating OFFER...");
      // Create offer. Offer SDP will be sent to answering client in
      // PeerConnectionEvents.onLocalDescription event.
      peerConnectionClient.createOffer();
   }
   else {
      if (params.offerSdp != null) {
         peerConnectionClient.setRemoteDescription(params.offerSdp);
         logAndToast("Creating ANSWER...");
         // Create answer. Answer SDP will be sent to offering client in
         // PeerConnectionEvents.onLocalDescription event.
         peerConnectionClient.createAnswer();
      }
      if (params.iceCandidates != null) {
         // Add remote ICE candidates from room.
         for (IceCandidate iceCandidate : params.iceCandidates) {
            peerConnectionClient.addRemoteIceCandidate(iceCandidate);
         }
      }
   }
}