Java 类org.webrtc.VideoTrack 实例源码

项目:react-native-webrtc    文件:WebRTCView.java   
/**
 * Sets the {@code VideoTrack} to be rendered by this {@code WebRTCView}.
 *
 * @param videoTrack The {@code VideoTrack} to be rendered by this
 * {@code WebRTCView} or {@code null}.
 */
private void setVideoTrack(VideoTrack videoTrack) {
    VideoTrack oldValue = this.videoTrack;

    if (oldValue != videoTrack) {
        if (oldValue != null) {
            removeRendererFromVideoTrack();
        }

        this.videoTrack = videoTrack;

        if (videoTrack != null) {
            tryAddRendererToVideoTrack();
        }
    }
}
项目:webrtc-android-codelab    文件:MainActivity.java   
private void gotRemoteStream(MediaStream stream) {
    //we have remote video stream. add to the renderer.
    final VideoTrack videoTrack = stream.videoTracks.getFirst();
    AudioTrack audioTrack = stream.audioTracks.getFirst();
    runOnUiThread(new Runnable() {
        @Override
        public void run() {
            try {
                remoteRenderer = new VideoRenderer(remoteVideoView);
                remoteVideoView.setVisibility(View.VISIBLE);
                videoTrack.addRenderer(remoteRenderer);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });

}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
private VideoTrack createCapturerVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);
    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:respoke-sdk-android    文件:RespokeCall.java   
private void addLocalStreams(Context context) {
    AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
    // TODO(fischman): figure out how to do this Right(tm) and remove the suppression.
    @SuppressWarnings("deprecation")
    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
    audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

    localStream = peerConnectionFactory.createLocalMediaStream("ARDAMS");

    if (!audioOnly) {
        VideoCapturer capturer = getVideoCapturer();
        MediaConstraints videoConstraints = new MediaConstraints();
        videoSource = peerConnectionFactory.createVideoSource(capturer, videoConstraints);
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        localStream.addTrack(videoTrack);
    }

    localStream.addTrack(peerConnectionFactory.createAudioTrack("ARDAMSa0", peerConnectionFactory.createAudioSource(new MediaConstraints())));

    peerConnection.addStream(localStream);
}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * Sets the {@code VideoTrack} to be rendered by this {@code WebRTCView}.
 *
 * @param videoTrack The {@code VideoTrack} to be rendered by this
 *                   {@code WebRTCView} or {@code null}.
 */
private void setVideoTrack(VideoTrack videoTrack) {
    VideoTrack oldValue = this.videoTrack;

    if (oldValue != videoTrack) {
        if (oldValue != null) {
            removeRendererFromVideoTrack();
        }

        this.videoTrack = videoTrack;

        if (videoTrack != null) {
            tryAddRendererToVideoTrack();
        }
    }
}
项目:react-native-webrtc    文件:WebRTCView.java   
/**
 * Sets the {@code MediaStream} to be rendered by this {@code WebRTCView}.
 * The implementation renders the first {@link VideoTrack}, if any, of the
 * specified {@code mediaStream}.
 *
 * @param mediaStream The {@code MediaStream} to be rendered by this
 * {@code WebRTCView} or {@code null}.
 */
public void setStream(MediaStream mediaStream) {
    VideoTrack videoTrack;

    if (mediaStream == null) {
        videoTrack = null;
    } else {
        List<VideoTrack> videoTracks = mediaStream.videoTracks;

        videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0);
    }

    setVideoTrack(videoTrack);
}
项目:PeSanKita-android    文件:WebRtcCallService.java   
@Override
public void onAddStream(MediaStream stream) {
  Log.w(TAG, "onAddStream:" + stream);

  for (AudioTrack audioTrack : stream.audioTracks) {
    audioTrack.setEnabled(true);
  }

  if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
    VideoTrack videoTrack = stream.videoTracks.getFirst();
    videoTrack.setEnabled(true);
    videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
  }

}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    if (localRender != null) {
        localVideoTrack.setEnabled(renderVideo);
        localVideoTrack.addRenderer(new VideoRenderer(localRender));
    }

    return localVideoTrack;
}
项目:Achilles_Android    文件:MainActivity.java   
private VideoTrack createVideoTrack(VideoCapturer videoCapturer) {
    mVideoSource = mPeerConnectionFactory.createVideoSource(videoCapturer);
    videoCapturer.startCapture(videoWidth, videoHeight, videoHeight);

    mLocalVideoTrack = mPeerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, mVideoSource);
    mLocalVideoTrack.setEnabled(true);
    mLocalVideoTrack.addRenderer(new VideoRenderer(localProxyRenderer));
    return mLocalVideoTrack;
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderVideo);
  localVideoTrack.addSink(localRender);
  return localVideoTrack;
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:Cable-Android    文件:WebRtcCallService.java   
@Override
public void onAddStream(MediaStream stream) {
  Log.w(TAG, "onAddStream:" + stream);

  for (AudioTrack audioTrack : stream.audioTracks) {
    audioTrack.setEnabled(true);
  }

  if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
    VideoTrack videoTrack = stream.videoTracks.getFirst();
    videoTrack.setEnabled(true);
    videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
  }
}
项目:webrtc-android    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:DeviceConnect-Android    文件:MediaStream.java   
/**
 * Creates a instance of VideoTrack to used in a VideoCapturerAndroid.
 * @param capturer Instance of VideoCapturerAndroid
 * @return VideoTrack
 */
private VideoTrack createVideoTrack(final VideoCapturerAndroid capturer) {
    mVideoRender = mOption.getRender();
    mVideoSource = mFactory.createVideoSource(capturer, mVideoConstraints);
    mVideoTrack = mFactory.createVideoTrack(VIDEO_TRACK_ID, mVideoSource);
    mVideoTrack.setEnabled(mEnableVideo);
    mVideoTrack.addRenderer(new VideoRenderer(mVideoRender));
    return mVideoTrack;
}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
public void run() {
    Log.d(TAG, "Attaching VideoRenderer to remote stream (" + remoteStream + ")");

    // Check if the remote stream has a video track
    if (remoteStream.videoTracks.size() == 1) {
        // Get the video track
        VideoTrack remoteVideoTrack = remoteStream.videoTracks.get(0);
        // Set video track enabled if we have enabled video rendering
        remoteVideoTrack.setEnabled(renderVideo);

        VideoRenderer videoRenderer = remoteVideoRenderers.get(remoteRender);
        if (videoRenderer != null) {
            MediaStream mediaStream = remoteVideoMediaStreams.get(videoRenderer);
            if (mediaStream != null) {
                VideoTrack videoTrack = remoteVideoTracks.get(mediaStream);
                if (videoTrack != null) {
                    videoTrack.removeRenderer(videoRenderer);
                }
            }
        }

        VideoRenderer newVideoRenderer = new VideoRenderer(remoteRender);
        remoteVideoTrack.addRenderer(newVideoRenderer);
        remoteVideoRenderers.put(remoteRender, newVideoRenderer);
        remoteVideoMediaStreams.put(newVideoRenderer, remoteStream);
        remoteVideoTracks.put(remoteStream, remoteVideoTrack);
        Log.d(TAG, "Attached.");
    }
}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
void setVideoEnabled(final boolean enable) {
    executor.execute(new Runnable() {
        @Override
        public void run() {
            renderVideo = enable;
            if (localVideoTrack != null) {
                localVideoTrack.setEnabled(renderVideo);
            }
            for (VideoTrack tv : remoteVideoTracks.values()) {
                tv.setEnabled(renderVideo);
            }
        }
    });
}
项目:voip_android    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:restcomm-android-sdk    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
  videoSource = factory.createVideoSource(capturer);
  capturer.startCapture(videoWidth, videoHeight, videoFps);

  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
  localVideoTrack.setEnabled(renderLocalVideo);
  localVideoTrack.addSink(localRender);
  return localVideoTrack;
}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * Sets the {@code MediaStream} to be rendered by this {@code WebRTCView}.
 * The implementation renders the first {@link VideoTrack}, if any, of the
 * specified {@code mediaStream}.
 *
 * @param mediaStream The {@code MediaStream} to be rendered by this
 *                    {@code WebRTCView} or {@code null}.
 */
public void setStream(MediaStream mediaStream) {
    VideoTrack videoTrack;

    if (mediaStream == null) {
        videoTrack = null;
    } else {
        List<VideoTrack> videoTracks = mediaStream.videoTracks;

        videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0);
    }

    setVideoTrack(videoTrack);
}
项目:webrtc-android-codelab    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = createVideoCapturer();
    //Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
    MediaConstraints constraints = new MediaConstraints();

    //Create a VideoSource instance
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    //we will start capturing the video from the camera
    //width,height and fps
    videoCapturerAndroid.startCapture(1000, 1000, 30);

    //create surface renderer, init it and add the renderer to the track
    SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
    videoView.setMirror(true);

    EglBase rootEglBase = EglBase.create();
    videoView.init(rootEglBase.getEglBaseContext(), null);

    localVideoTrack.addRenderer(new VideoRenderer(videoView));


}
项目:krankygeek    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:actor-platform    文件:AndroidVideoTrack.java   
public AndroidVideoTrack(VideoTrack videoTrack, AndroidMediaStream stream) {
    this.videoTrack = videoTrack;
    this.stream = stream;
}
项目:actor-platform    文件:AndroidVideoTrack.java   
public VideoTrack getVideoTrack() {
    return videoTrack;
}
项目:WebRTCDemo    文件:AppRTCDemoActivity.java   
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
  factory = new PeerConnectionFactory();

  MediaConstraints pcConstraints = appRtcClient.pcConstraints();
  pcConstraints.optional.add(
      new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
  pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

  createDataChannelToRegressionTestBug2302(pc);  // See method comment.

  // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
  // NOTE: this _must_ happen while |factory| is alive!
  // Logging.enableTracing(
  //     "logcat:",
  //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
  //     Logging.Severity.LS_SENSITIVE);

  {
    final PeerConnection finalPC = pc;
    final Runnable repeatedStatsLogger = new Runnable() {
        public void run() {
          synchronized (quit[0]) {
            if (quit[0]) {
              return;
            }
            final Runnable runnableThis = this;
            if (hudView.getVisibility() == View.INVISIBLE) {
              vsv.postDelayed(runnableThis, 1000);
              return;
            }
            boolean success = finalPC.getStats(new StatsObserver() {
                public void onComplete(final StatsReport[] reports) {
                  runOnUiThread(new Runnable() {
                      public void run() {
                        updateHUD(reports);
                      }
                    });
                  for (StatsReport report : reports) {
                    Log.d(TAG, "Stats: " + report.toString());
                  }
                  vsv.postDelayed(runnableThis, 1000);
                }
              }, null);
            if (!success) {
              throw new RuntimeException("getStats() return false!");
            }
          }
        }
      };
    vsv.postDelayed(repeatedStatsLogger, 1000);
  }

  {
    logAndToast("Creating local video source...");
    MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
    if (appRtcClient.videoConstraints() != null) {
      VideoCapturer capturer = getVideoCapturer();
      videoSource = factory.createVideoSource(
          capturer, appRtcClient.videoConstraints());
      VideoTrack videoTrack =
          factory.createVideoTrack("ARDAMSv0", videoSource);
      videoTrack.addRenderer(new VideoRenderer(localRender));
      lMS.addTrack(videoTrack);
    }
    if (appRtcClient.audioConstraints() != null) {
      lMS.addTrack(factory.createAudioTrack(
          "ARDAMSa0",
          factory.createAudioSource(appRtcClient.audioConstraints())));
    }
    pc.addStream(lMS, new MediaConstraints());
  }
  logAndToast("Waiting for ICE candidates...");
}
项目:apprtc-android    文件:AppRTCDemoActivity.java   
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
  factory = new PeerConnectionFactory();

  MediaConstraints pcConstraints = appRtcClient.pcConstraints();
  pcConstraints.optional.add(
      new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
  pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

  createDataChannelToRegressionTestBug2302(pc);  // See method comment.

  // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
  // NOTE: this _must_ happen while |factory| is alive!
  // Logging.enableTracing(
  //     "logcat:",
  //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
  //     Logging.Severity.LS_SENSITIVE);

  {
    final PeerConnection finalPC = pc;
    final Runnable repeatedStatsLogger = new Runnable() {
        public void run() {
          synchronized (quit[0]) {
            if (quit[0]) {
              return;
            }
            final Runnable runnableThis = this;
            if (hudView.getVisibility() == View.INVISIBLE) {
              vsv.postDelayed(runnableThis, 1000);
              return;
            }
            boolean success = finalPC.getStats(new StatsObserver() {
                public void onComplete(final StatsReport[] reports) {
                  runOnUiThread(new Runnable() {
                      public void run() {
                        updateHUD(reports);
                      }
                    });
                  for (StatsReport report : reports) {
                    Log.d(TAG, "Stats: " + report.toString());
                  }
                  vsv.postDelayed(runnableThis, 1000);
                }
              }, null);
            if (!success) {
              throw new RuntimeException("getStats() return false!");
            }
          }
        }
      };
    vsv.postDelayed(repeatedStatsLogger, 1000);
  }

  {
    logAndToast("Creating local video source...");
    MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
    if (appRtcClient.videoConstraints() != null) {
      VideoCapturer capturer = getVideoCapturer();
      videoSource = factory.createVideoSource(
          capturer, appRtcClient.videoConstraints());
      VideoTrack videoTrack =
          factory.createVideoTrack("ARDAMSv0", videoSource);
      videoTrack.addRenderer(new VideoRenderer(localRender));
      lMS.addTrack(videoTrack);
    }
    if (appRtcClient.audioConstraints() != null) {
      lMS.addTrack(factory.createAudioTrack(
          "ARDAMSa0",
          factory.createAudioSource(appRtcClient.audioConstraints())));
    }
    pc.addStream(lMS, new MediaConstraints());
  }
  logAndToast("Waiting for ICE candidates...");
}
项目:licodeAndroidClient    文件:LicodeConnector.java   
/** begin streaming to server - MUST run on VcThread */
void doPublish(VideoStreamsView view) {
    if (mVideoCapturer != null) {
        return;
    }

    MediaConstraints videoConstraints = new MediaConstraints();
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "maxWidth", "320"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "maxHeight", "240"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "maxFrameRate", "10"));
    MediaConstraints audioConstraints = new MediaConstraints();
    audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
            "googEchoCancellation2", "true"));
    audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
            "googNoiseSuppression", "true"));
    lMS = sFactory.createLocalMediaStream("ARDAMS");

    if (videoConstraints != null) {
        mVideoCapturer = getVideoCapturer();
        mVideoSource = sFactory.createVideoSource(mVideoCapturer,
                videoConstraints);
        VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0",
                mVideoSource);
        lMS.addTrack(videoTrack);
    }
    if (audioConstraints != null) {
        AudioTrack audioTrack = sFactory.createAudioTrack("ARDAMSa0",
                sFactory.createAudioSource(audioConstraints));
        lMS.addTrack(audioTrack);
        audioTrack.setEnabled(false);
    }

    StreamDescription stream = new StreamDescription("", false, true, true,
            false, null, mNick);
    MediaConstraints pcConstraints = makePcConstraints();
    MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream,
            true), stream);

    PeerConnection pc = sFactory.createPeerConnection(mIceServers,
            pcConstraints, pcObs);
    pc.addStream(lMS, new MediaConstraints());

    stream.setMedia(lMS);
    if (view != null) {
        stream.attachRenderer(new VideoCallbacks(view,
                VideoStreamsView.LOCAL_STREAM_ID));
    }
    stream.initLocal(pc, pcObs.getSdpObserver());
}
项目:droidkit-webrtc    文件:AppRTCDemoActivity.java   
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
  factory = new PeerConnectionFactory();

  MediaConstraints pcConstraints = appRtcClient.pcConstraints();
  pcConstraints.optional.add(
      new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
  pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);

  createDataChannelToRegressionTestBug2302(pc);  // See method comment.

  // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
  // NOTE: this _must_ happen while |factory| is alive!
  // Logging.enableTracing(
  //     "logcat:",
  //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
  //     Logging.Severity.LS_SENSITIVE);

  {
    final PeerConnection finalPC = pc;
    final Runnable repeatedStatsLogger = new Runnable() {
        public void run() {
          synchronized (quit[0]) {
            if (quit[0]) {
              return;
            }
            final Runnable runnableThis = this;
            if (hudView.getVisibility() == View.INVISIBLE) {
              vsv.postDelayed(runnableThis, 1000);
              return;
            }
            boolean success = finalPC.getStats(new StatsObserver() {
                public void onComplete(final StatsReport[] reports) {
                  runOnUiThread(new Runnable() {
                      public void run() {
                        updateHUD(reports);
                      }
                    });
                  for (StatsReport report : reports) {
                    Log.d(TAG, "Stats: " + report.toString());
                  }
                  vsv.postDelayed(runnableThis, 1000);
                }
              }, null);
            if (!success) {
              throw new RuntimeException("getStats() return false!");
            }
          }
        }
      };
    vsv.postDelayed(repeatedStatsLogger, 1000);
  }

  {
    logAndToast("Creating local video source...");
    MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
    if (appRtcClient.videoConstraints() != null) {
      VideoCapturer capturer = getVideoCapturer();
      videoSource = factory.createVideoSource(
          capturer, appRtcClient.videoConstraints());
      VideoTrack videoTrack =
          factory.createVideoTrack("ARDAMSv0", videoSource);
      videoTrack.addRenderer(new VideoRenderer(localRender));
      lMS.addTrack(videoTrack);
    }
    if (appRtcClient.audioConstraints() != null) {
      lMS.addTrack(factory.createAudioTrack(
          "ARDAMSa0",
          factory.createAudioSource(appRtcClient.audioConstraints())));
    }
    pc.addStream(lMS, new MediaConstraints());
  }
  logAndToast("Waiting for ICE candidates...");
}
项目:appspotdemo-mono    文件:AppRTCDemoActivity.java   
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
  factory = new PeerConnectionFactory();
  pc = factory.createPeerConnection(
      iceServers, appRtcClient.pcConstraints(), pcObserver);

  // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
  // NOTE: this _must_ happen while |factory| is alive!
  // Logging.enableTracing(
  //     "logcat:",
  //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
  //     Logging.Severity.LS_SENSITIVE);

  {
    final PeerConnection finalPC = pc;
    final Runnable repeatedStatsLogger = new Runnable() {
        public void run() {
          synchronized (quit[0]) {
            if (quit[0]) {
              return;
            }
            final Runnable runnableThis = this;
            boolean success = finalPC.getStats(new StatsObserver() {
                public void onComplete(StatsReport[] reports) {
                  for (StatsReport report : reports) {
                    Log.d(TAG, "Stats: " + report.toString());
                  }
                  vsv.postDelayed(runnableThis, 10000);
                }
              }, null);
            if (!success) {
              throw new RuntimeException("getStats() return false!");
            }
          }
        }
      };
    vsv.postDelayed(repeatedStatsLogger, 10000);
  }

  {
    logAndToast("Creating local video source...");
    MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
    if (appRtcClient.videoConstraints() != null) {
      VideoCapturer capturer = getVideoCapturer();
      videoSource = factory.createVideoSource(
          capturer, appRtcClient.videoConstraints());
      VideoTrack videoTrack =
          factory.createVideoTrack("ARDAMSv0", videoSource);
      videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks(
          vsv, VideoStreamsView.Endpoint.LOCAL)));
      lMS.addTrack(videoTrack);
    }
    lMS.addTrack(factory.createAudioTrack("ARDAMSa0"));
    pc.addStream(lMS, new MediaConstraints());
  }
  logAndToast("Waiting for ICE candidates...");
}