Java 类org.webrtc.SurfaceViewRenderer 实例源码

项目:matrix-android-sdk    文件:MXWebRtcView.java   
private void setScalingType(ScalingType scalingType) {
    SurfaceViewRenderer surfaceViewRenderer;

    synchronized (layoutSyncRoot) {
        if (this.scalingType == scalingType) {
            return;
        }

        this.scalingType = scalingType;

        surfaceViewRenderer = getSurfaceViewRenderer();
        surfaceViewRenderer.setScalingType(scalingType);
    }
    // Both this instance ant its SurfaceViewRenderer take the value of
    // their scalingType properties into account upon their layouts.
    requestSurfaceViewRendererLayout();
}
项目:PeSanKita-android    文件:WebRtcCallService.java   
private void initializeVideo() {
  Util.runOnMainSync(new Runnable() {
    @Override
    public void run() {
      eglBase        = EglBase.create();
      localRenderer  = new SurfaceViewRenderer(WebRtcCallService.this);
      remoteRenderer = new SurfaceViewRenderer(WebRtcCallService.this);

      localRenderer.init(eglBase.getEglBaseContext(), null);
      remoteRenderer.init(eglBase.getEglBaseContext(), null);

      peerConnectionFactory.setVideoHwAccelerationOptions(eglBase.getEglBaseContext(),
                                                          eglBase.getEglBaseContext());
    }
  });
}
项目:anyRTC-RTCP-Android    文件:RTCVideoView.java   
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) {
    this.strPeerId = strPeerId;
    this.index = index;
    this.x = x;
    this.y = y;
    this.w = w;
    this.h = h;
    this.mRTCVideoLayout = videoLayout;

    mLayout = new PercentFrameLayout(ctx);
    mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    View view = View.inflate(ctx, R.layout.layout_top_right, null);

    mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view);
    btnClose = (ImageView) view.findViewById(R.id.img_close_render);
    mLocalCamera = (ImageView) view.findViewById(R.id.camera_off);
    mAudioImageView = (ImageView) view.findViewById(R.id.img_audio_close);
    mVideoImageView = (ImageView) view.findViewById(R.id.img_video_close);
    layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera);
    mView.init(eglBase.getEglBaseContext(), null);
    mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    mLayout.addView(view);
}
项目:anyRTC-Meeting-Android    文件:RTCVideoView.java   
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) {
    this.strPeerId = strPeerId;
    this.index = index;
    this.x = x;
    this.y = y;
    this.w = w;
    this.h = h;
    this.mRTCVideoLayout = videoLayout;

    mLayout = new PercentFrameLayout(ctx);
    mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    View view = View.inflate(ctx, org.anyrtc.meet_kit.R.layout.layout_top_right, null);

    mView = (SurfaceViewRenderer) view.findViewById(org.anyrtc.meet_kit.R.id.suface_view);
    btnClose = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_close_render);
    mLocalCamera = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.camera_off);
    mAudioImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_audio_close);
    mVideoImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_video_close);
    layoutCamera = (RelativeLayout) view.findViewById(org.anyrtc.meet_kit.R.id.layout_camera);
    mView.init(eglBase.getEglBaseContext(), null);
    mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    mLayout.addView(view);
}
项目:Cable-Android    文件:WebRtcCallService.java   
private void initializeVideo() {
  Util.runOnMainSync(new Runnable() {
    @Override
    public void run() {
      eglBase        = EglBase.create();
      localRenderer  = new SurfaceViewRenderer(WebRtcCallService.this);
      remoteRenderer = new SurfaceViewRenderer(WebRtcCallService.this);

      localRenderer.init(eglBase.getEglBaseContext(), null);
      remoteRenderer.init(eglBase.getEglBaseContext(), null);

      peerConnectionFactory.setVideoHwAccelerationOptions(eglBase.getEglBaseContext(),
                                                          eglBase.getEglBaseContext());
    }
  });
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h) {
            this.strPeerId = strPeerId;
            this.index = index;
            this.x = x;
            this.y = y;
            this.w = w;
            this.h = h;

            mLayout = new PercentFrameLayout(ctx);
//            mLayout.setBackgroundResource(R.drawable.background);
            mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
            View view = View.inflate(ctx, R.layout.layout_top_right, null);
            mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view);
            layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera);
            mView.init(eglBase.getEglBaseContext(), null);
            mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
            mLayout.addView(view);
        }
项目:yun2win-sdk-android    文件:AVMemberView.java   
public AVMemberView(final Context context,EglBase rootEglBase,AVMember avMember, String trackType){
    this.context = context;
    this.rootEglBase = rootEglBase;
    this.avMember = avMember;
    this.trackType = trackType;
    convertView = LayoutInflater.from(context).inflate(R.layout.avcall_member_preview_item, null);
    viewHolder = new ViewHolder();
    viewHolder.sfv_video= (SurfaceViewRenderer)convertView.findViewById(R.id.svr_video_item);
    viewHolder.iv_header = (ImageView) convertView.findViewById(R.id.iv_av_member_avatar);
    viewHolder.rl_bg = (RelativeLayout) convertView.findViewById(R.id.rl_bg);
    viewHolder.sfv_video.init(rootEglBase.getEglBaseContext(), null);
    viewHolder.sfv_video.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
    viewHolder.sfv_video.setMirror(false);
    viewHolder.sfv_video.setZOrderMediaOverlay(true);
    convertView.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            if (onMemberViewClickListener != null)
                onMemberViewClickListener.itemClick(avMemberView);
        }
    });
    avMemberView = this;
    setRenderer();
}
项目:restcomm-android-sdk    文件:RCConnection.java   
private void initializeVideo(boolean videoEnabled, PercentFrameLayout localRenderLayout, PercentFrameLayout remoteRenderLayout)
{
   if (localRenderLayout == null ||remoteRenderLayout == null) {
      return;
   }

   scalingType = ScalingType.SCALE_ASPECT_FILL;

   this.localRenderLayout = localRenderLayout;
   this.remoteRenderLayout = remoteRenderLayout;

   localRender = (SurfaceViewRenderer)localRenderLayout.getChildAt(0);
   remoteRender = (SurfaceViewRenderer)remoteRenderLayout.getChildAt(0);

   localRender.init(peerConnectionClient.getRenderContext(), null);
   localRender.setZOrderMediaOverlay(true);
   remoteRender.init(peerConnectionClient.getRenderContext(), null);
   updateVideoView(VideoViewState.NONE);
}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * Sets the z-order of this {@link WebRTCView} in the stacking space of all
 * {@code WebRTCView}s. For more details, refer to the documentation of the
 * {@code zOrder} property of the JavaScript counterpart of
 * {@code WebRTCView} i.e. {@code RTCView}.
 *
 * @param zOrder The z-order to set on this {@code WebRTCView}.
 */
public void setZOrder(int zOrder) {
    SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer();

    switch (zOrder) {
        case 0:
            surfaceViewRenderer.setZOrderMediaOverlay(false);
            break;
        case 1:
            surfaceViewRenderer.setZOrderMediaOverlay(true);
            break;
        case 2:
            surfaceViewRenderer.setZOrderOnTop(true);
            break;
    }
}
项目:PeSanKita-android    文件:WebRtcCallScreen.java   
private void setConnected(SurfaceViewRenderer localRenderer,
                          SurfaceViewRenderer remoteRenderer)
{
  if (localRenderLayout.getChildCount() == 0 && remoteRenderLayout.getChildCount() == 0) {
    if (localRenderer.getParent() != null) {
      ((ViewGroup)localRenderer.getParent()).removeView(localRenderer);
    }

    if (remoteRenderer.getParent() != null) {
      ((ViewGroup)remoteRenderer.getParent()).removeView(remoteRenderer);
    }

    localRenderLayout.setPosition(7, 70, 25, 25);
    localRenderLayout.setSquare(true);
    remoteRenderLayout.setPosition(0, 0, 100, 100);

    localRenderer.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
                                                               ViewGroup.LayoutParams.MATCH_PARENT));

    remoteRenderer.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
                                                                ViewGroup.LayoutParams.MATCH_PARENT));

    localRenderer.setMirror(true);
    localRenderer.setZOrderMediaOverlay(true);

    localRenderLayout.addView(localRenderer);
    remoteRenderLayout.addView(remoteRenderer);
  }
}
项目:InsideCodec    文件:RcTest.java   
public RcTest(final Config config, final EglBase eglBase, final SurfaceViewRenderer renderer,
        final Notifier notifier) {
    mEglBase = eglBase;
    mSurfaceViewRenderer = renderer;
    File videoFile = new File(Environment.getExternalStorageDirectory(),
            "alien-covenant.mp4");
    mFrameProducer = new FrameProducer(mEglBase, videoFile, config.outputFps(), this);
    mEncoderWrapper = new EncoderWrapper(config, notifier);
}
项目:Cable-Android    文件:WebRtcCallScreen.java   
private void setConnected(SurfaceViewRenderer localRenderer,
                          SurfaceViewRenderer remoteRenderer)
{
  if (localRenderLayout.getChildCount() == 0 && remoteRenderLayout.getChildCount() == 0) {
    if (localRenderer.getParent() != null) {
      ((ViewGroup)localRenderer.getParent()).removeView(localRenderer);
    }

    if (remoteRenderer.getParent() != null) {
      ((ViewGroup)remoteRenderer.getParent()).removeView(remoteRenderer);
    }

    localRenderLayout.setPosition(7, 70, 25, 25);
    localRenderLayout.setSquare(true);
    remoteRenderLayout.setPosition(0, 0, 100, 100);

    localRenderer.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
                                                               ViewGroup.LayoutParams.MATCH_PARENT));

    remoteRenderer.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
                                                                ViewGroup.LayoutParams.MATCH_PARENT));

    localRenderer.setMirror(true);
    localRenderer.setZOrderMediaOverlay(true);

    localRenderLayout.addView(localRenderer);
    remoteRenderLayout.addView(remoteRenderer);
  }
}
项目:nubo-test    文件:PeerVideoActivity.java   
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_video_chat);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    mHandler = new Handler();
    masterView = (SurfaceViewRenderer) findViewById(R.id.gl_surface);
    localView = (SurfaceViewRenderer) findViewById(R.id.gl_surface_local);
    this.mCallStatus   = (TextView) findViewById(R.id.call_status);
    callState = CallState.IDLE;
    MainActivity.getKurentoRoomAPIInstance().addObserver(this);
}
项目:webrtc-android-codelab    文件:MainActivity.java   
private void initViews() {
    start = (Button) findViewById(R.id.start_call);
    call = (Button) findViewById(R.id.init_call);
    hangup = (Button) findViewById(R.id.end_call);
    localVideoView = (SurfaceViewRenderer) findViewById(R.id.local_gl_surface_view);
    remoteVideoView = (SurfaceViewRenderer) findViewById(R.id.remote_gl_surface_view);

    start.setOnClickListener(this);
    call.setOnClickListener(this);
    hangup.setOnClickListener(this);
}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
public MXWebRtcView(Context context) {
    super(context);

    surfaceViewRenderer = new SurfaceViewRenderer(context);
    addView(surfaceViewRenderer);

    setMirror(false);
    setScalingType(DEFAULT_SCALING_TYPE);
}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * Sets the indicator which determines whether this {@code WebRTCView} is to
 * mirror the video represented by {@link #videoTrack} during its rendering.
 *
 * @param mirror If this {@code WebRTCView} is to mirror the video
 *               represented by {@code videoTrack} during its rendering, {@code true};
 *               otherwise, {@code false}.
 */
public void setMirror(boolean mirror) {
    if (this.mirror != mirror) {
        this.mirror = mirror;

        SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer();

        surfaceViewRenderer.setMirror(mirror);
        // SurfaceViewRenderer takes the value of its mirror property into
        // account upon its layout.
        requestSurfaceViewRendererLayout();
    }
}
项目:quickblox-android    文件:VideoConversationFragment.java   
protected void updateVideoView(SurfaceViewRenderer surfaceViewRenderer, boolean mirror) {
    updateVideoView(surfaceViewRenderer, mirror, RendererCommon.ScalingType.SCALE_ASPECT_FILL);
}
项目:quickblox-android    文件:VideoConversationFragment.java   
protected void updateVideoView(SurfaceViewRenderer surfaceViewRenderer, boolean mirror, RendererCommon.ScalingType scalingType) {
    Log.i(TAG, "updateVideoView mirror:" + mirror + ", scalingType = " + scalingType);
    surfaceViewRenderer.setScalingType(scalingType);
    surfaceViewRenderer.setMirror(mirror);
    surfaceViewRenderer.requestLayout();
}
项目:Achilles_Android    文件:MainActivity.java   
private void init() {
    mLocalVideoView = (SurfaceViewRenderer) findViewById(R.id.local_video_view);

    // Init ExecutorService
    mExecutorService = Executors.newSingleThreadExecutor();

    // Socket.IO initialization
    initSocket();

    // Create video renderer
    rootEglBase = EglBase.create();
    Log.d(TAG, "Created video renderer.");

    mLocalVideoView.init(rootEglBase.getEglBaseContext(), null);
    mLocalVideoView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalVideoView.setEnableHardwareScaler(true);
    // Set ProxyRenderer target to SurfaceViewRenderer
    localProxyRenderer.setTarget(mLocalVideoView);
    mLocalVideoView.setMirror(true);

    // Check permission
    /*for (String permission : MANDATORY_PERMISSIONS) {
        if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
            Log.w(TAG, "Permission " + permission + " is not granted.");
            // finish();
            return;
        }
    }*/

    DisplayMetrics displayMetrics = getDisplayMetrics();
    videoWidth = displayMetrics.widthPixels;
    videoHeight = displayMetrics.heightPixels;
    videoFps = 30;

    initPeerConnectionFactory();

    // Set STUN Server
    mIceServers.add(new PeerConnection.IceServer(googleStunServer));

    // Set default SessionDescription MediaConstraints
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));

    // Set default AudioConstraints
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));

    // Enalble DTLS for normal calls
    mPcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
}
项目:VideoCRE    文件:VideoActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_video);

    VideoConfig config = VideoConfig.builder()
            .previewWidth(1280)
            .previewHeight(720)
            .outputWidth(448)
            .outputHeight(800)
            .fps(30)
            .outputBitrate(800)
            .build();
    VideoConfig hdConfig = VideoConfig.builder()
            .previewWidth(1280)
            .previewHeight(720)
            .outputWidth(720)
            .outputHeight(1280)
            .fps(30)
            .outputBitrate(2000)
            .build();
    VideoCapturer capturer = createVideoCapturer();

    mVideoView = (SurfaceViewRenderer) findViewById(R.id.mVideoView1);
    try {
        String filename = "video_source_record_" + System.currentTimeMillis();
        mMp4Recorder = new Mp4Recorder(
                new File(Environment.getExternalStorageDirectory(), filename + ".mp4"));
        mHdMp4Recorder = new Mp4Recorder(
                new File(Environment.getExternalStorageDirectory(), filename + "-hd.mp4"));
    } catch (IOException e) {
        e.printStackTrace();
        Toast.makeText(this, "start Mp4Recorder fail!", Toast.LENGTH_SHORT).show();
        finish();
        return;
    }
    mHwAvcEncoder = new HwAvcEncoder(config, mMp4Recorder);
    mHdHwAvcEncoder = new HwAvcEncoder(hdConfig, mHdMp4Recorder);
    mVideoSink = new VideoSink(mVideoView, mHwAvcEncoder, mHdHwAvcEncoder);
    mVideoSource = new VideoSource(getApplicationContext(), config, capturer, mVideoSink);

    mVideoView.init(mVideoSource.getRootEglBase().getEglBaseContext(), null);
    mHwAvcEncoder.start(mVideoSource.getRootEglBase());
    mHdHwAvcEncoder.start(mVideoSource.getRootEglBase());

    initView();
}
项目:webrtc-android-codelab    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = createVideoCapturer();
    //Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
    MediaConstraints constraints = new MediaConstraints();

    //Create a VideoSource instance
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    //we will start capturing the video from the camera
    //width,height and fps
    videoCapturerAndroid.startCapture(1000, 1000, 30);

    //create surface renderer, init it and add the renderer to the track
    SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
    videoView.setMirror(true);

    EglBase rootEglBase = EglBase.create();
    videoView.init(rootEglBase.getEglBaseContext(), null);

    localVideoTrack.addRenderer(new VideoRenderer(videoView));


}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * {@inheritDoc}
 */
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
    int height = b - t;
    int width = r - l;

    if (height == 0 || width == 0) {
        l = t = r = b = 0;
    } else {
        int frameHeight;
        int frameRotation;
        int frameWidth;
        ScalingType scalingType;

        synchronized (layoutSyncRoot) {
            frameHeight = this.frameHeight;
            frameRotation = this.frameRotation;
            frameWidth = this.frameWidth;
            scalingType = this.scalingType;
        }

        SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer();

        switch (scalingType) {
            case SCALE_ASPECT_FILL:
                // Fill this ViewGroup with surfaceViewRenderer and the latter
                // will take care of filling itself with the video similarly to
                // the cover value the CSS property object-fit.
                r = width;
                l = 0;
                b = height;
                t = 0;
                break;
            case SCALE_ASPECT_FIT:
            default:
                // Lay surfaceViewRenderer out inside this ViewGroup in accord
                // with the contain value of the CSS property object-fit.
                // SurfaceViewRenderer will fill itself with the video similarly
                // to the cover or contain value of the CSS property object-fit
                // (which will not matter, eventually).
                if (frameHeight == 0 || frameWidth == 0) {
                    l = t = r = b = 0;
                } else {
                    float frameAspectRatio
                            = (frameRotation % 180 == 0)
                            ? frameWidth / (float) frameHeight
                            : frameHeight / (float) frameWidth;
                    Point frameDisplaySize
                            = RendererCommon.getDisplaySize(
                            scalingType,
                            frameAspectRatio,
                            width, height);

                    l = (width - frameDisplaySize.x) / 2;
                    t = (height - frameDisplaySize.y) / 2;
                    r = l + frameDisplaySize.x;
                    b = t + frameDisplaySize.y;
                }
                break;
        }
    }
    surfaceViewRenderer.layout(l, t, r, b);
}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * Gets the {@code SurfaceViewRenderer} which renders {@link #videoTrack}.
 * Explicitly defined and used in order to facilitate switching the instance
 * at compile time. For example, reduces the number of modifications
 * necessary to switch the implementation from a {@code SurfaceViewRenderer}
 * that is a child of a {@code WebRTCView} to {@code WebRTCView} extending
 * {@code SurfaceViewRenderer}.
 *
 * @return The {@code SurfaceViewRenderer} which renders {@code videoTrack}.
 */
private final SurfaceViewRenderer getSurfaceViewRenderer() {
    return surfaceViewRenderer;
}