Java 类org.webrtc.AudioTrack 实例源码

项目:webrtc-android-codelab    文件:MainActivity.java   
private void gotRemoteStream(MediaStream stream) {
    //we have remote video stream. add to the renderer.
    final VideoTrack videoTrack = stream.videoTracks.getFirst();
    AudioTrack audioTrack = stream.audioTracks.getFirst();
    runOnUiThread(new Runnable() {
        @Override
        public void run() {
            try {
                remoteRenderer = new VideoRenderer(remoteVideoView);
                remoteVideoView.setVisibility(View.VISIBLE);
                videoTrack.addRenderer(remoteRenderer);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });

}
项目:PeSanKita-android    文件:WebRtcCallService.java   
@Override
public void onAddStream(MediaStream stream) {
  Log.w(TAG, "onAddStream:" + stream);

  for (AudioTrack audioTrack : stream.audioTracks) {
    audioTrack.setEnabled(true);
  }

  if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
    VideoTrack videoTrack = stream.videoTracks.getFirst();
    videoTrack.setEnabled(true);
    videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
  }

}
项目:Cable-Android    文件:WebRtcCallService.java   
@Override
public void onAddStream(MediaStream stream) {
  Log.w(TAG, "onAddStream:" + stream);

  for (AudioTrack audioTrack : stream.audioTracks) {
    audioTrack.setEnabled(true);
  }

  if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
    VideoTrack videoTrack = stream.videoTracks.getFirst();
    videoTrack.setEnabled(true);
    videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
  }
}
项目:licodeAndroidClient    文件:LicodeConnector.java   
@Override
public void setAudioEnabled(boolean enabled) {
    if (mState != State.kConnected || lMS == null) {
        return;
    }

    for (AudioTrack audioTrack : lMS.audioTracks) {
        audioTrack.setEnabled(enabled);
    }
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
private AudioTrack createAudioTrack() {
    audioSource = factory.createAudioSource(audioConstraints);
    localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(enableAudio);
    return localAudioTrack;
}
项目:Achilles_Android    文件:MainActivity.java   
private AudioTrack createAudioTrack(MediaConstraints audioConstraints) {
    mAudioSource = mPeerConnectionFactory.createAudioSource(audioConstraints);
    mLocalAudioTrack = mPeerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, mAudioSource);
    mLocalAudioTrack.setEnabled(true);
    return mLocalAudioTrack;
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
private AudioTrack createAudioTrack() {
  audioSource = factory.createAudioSource(audioConstraints);
  localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
  localAudioTrack.setEnabled(enableAudio);
  return localAudioTrack;
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
private AudioTrack createAudioTrack() {
    audioSource = factory.createAudioSource(audioConstraints);
    localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(enableAudio);
    return localAudioTrack;
}
项目:webrtc-android-codelab    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = createVideoCapturer();
    //Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
    MediaConstraints constraints = new MediaConstraints();

    //Create a VideoSource instance
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    //we will start capturing the video from the camera
    //width,height and fps
    videoCapturerAndroid.startCapture(1000, 1000, 30);

    //create surface renderer, init it and add the renderer to the track
    SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
    videoView.setMirror(true);

    EglBase rootEglBase = EglBase.create();
    videoView.init(rootEglBase.getEglBaseContext(), null);

    localVideoTrack.addRenderer(new VideoRenderer(videoView));


}
项目:krankygeek    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:actor-platform    文件:AndroidAudioTrack.java   
public AndroidAudioTrack(AudioTrack audioTrack, AndroidMediaStream stream) {
    this.audioTrack = audioTrack;
    this.stream = stream;
}
项目:actor-platform    文件:AndroidAudioTrack.java   
public AudioTrack getAudioTrack() {
    return audioTrack;
}
项目:voip_android    文件:PeerConnectionClient.java   
private AudioTrack createAudioTrack() {
    audioSource = factory.createAudioSource(audioConstraints);
    localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(enableAudio);
    return localAudioTrack;
}
项目:restcomm-android-sdk    文件:PeerConnectionClient.java   
private AudioTrack createAudioTrack() {
  audioSource = factory.createAudioSource(audioConstraints);
  localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
  localAudioTrack.setEnabled(enableAudio);
  return localAudioTrack;
}
项目:licodeAndroidClient    文件:LicodeConnector.java   
/** begin streaming to server - MUST run on VcThread */
void doPublish(VideoStreamsView view) {
    if (mVideoCapturer != null) {
        return;
    }

    MediaConstraints videoConstraints = new MediaConstraints();
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "maxWidth", "320"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "maxHeight", "240"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "maxFrameRate", "10"));
    MediaConstraints audioConstraints = new MediaConstraints();
    audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
            "googEchoCancellation2", "true"));
    audioConstraints.optional.add(new MediaConstraints.KeyValuePair(
            "googNoiseSuppression", "true"));
    lMS = sFactory.createLocalMediaStream("ARDAMS");

    if (videoConstraints != null) {
        mVideoCapturer = getVideoCapturer();
        mVideoSource = sFactory.createVideoSource(mVideoCapturer,
                videoConstraints);
        VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0",
                mVideoSource);
        lMS.addTrack(videoTrack);
    }
    if (audioConstraints != null) {
        AudioTrack audioTrack = sFactory.createAudioTrack("ARDAMSa0",
                sFactory.createAudioSource(audioConstraints));
        lMS.addTrack(audioTrack);
        audioTrack.setEnabled(false);
    }

    StreamDescription stream = new StreamDescription("", false, true, true,
            false, null, mNick);
    MediaConstraints pcConstraints = makePcConstraints();
    MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream,
            true), stream);

    PeerConnection pc = sFactory.createPeerConnection(mIceServers,
            pcConstraints, pcObs);
    pc.addStream(lMS, new MediaConstraints());

    stream.setMedia(lMS);
    if (view != null) {
        stream.attachRenderer(new VideoCallbacks(view,
                VideoStreamsView.LOCAL_STREAM_ID));
    }
    stream.initLocal(pc, pcObs.getSdpObserver());
}