private void setScalingType(ScalingType scalingType) { SurfaceViewRenderer surfaceViewRenderer; synchronized (layoutSyncRoot) { if (this.scalingType == scalingType) { return; } this.scalingType = scalingType; surfaceViewRenderer = getSurfaceViewRenderer(); surfaceViewRenderer.setScalingType(scalingType); } // Both this instance ant its SurfaceViewRenderer take the value of // their scalingType properties into account upon their layouts. requestSurfaceViewRendererLayout(); }
private void initializeVideo() { Util.runOnMainSync(new Runnable() { @Override public void run() { eglBase = EglBase.create(); localRenderer = new SurfaceViewRenderer(WebRtcCallService.this); remoteRenderer = new SurfaceViewRenderer(WebRtcCallService.this); localRenderer.init(eglBase.getEglBaseContext(), null); remoteRenderer.init(eglBase.getEglBaseContext(), null); peerConnectionFactory.setVideoHwAccelerationOptions(eglBase.getEglBaseContext(), eglBase.getEglBaseContext()); } }); }
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) { this.strPeerId = strPeerId; this.index = index; this.x = x; this.y = y; this.w = w; this.h = h; this.mRTCVideoLayout = videoLayout; mLayout = new PercentFrameLayout(ctx); mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); View view = View.inflate(ctx, R.layout.layout_top_right, null); mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view); btnClose = (ImageView) view.findViewById(R.id.img_close_render); mLocalCamera = (ImageView) view.findViewById(R.id.camera_off); mAudioImageView = (ImageView) view.findViewById(R.id.img_audio_close); mVideoImageView = (ImageView) view.findViewById(R.id.img_video_close); layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera); mView.init(eglBase.getEglBaseContext(), null); mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); mLayout.addView(view); }
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) { this.strPeerId = strPeerId; this.index = index; this.x = x; this.y = y; this.w = w; this.h = h; this.mRTCVideoLayout = videoLayout; mLayout = new PercentFrameLayout(ctx); mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); View view = View.inflate(ctx, org.anyrtc.meet_kit.R.layout.layout_top_right, null); mView = (SurfaceViewRenderer) view.findViewById(org.anyrtc.meet_kit.R.id.suface_view); btnClose = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_close_render); mLocalCamera = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.camera_off); mAudioImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_audio_close); mVideoImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_video_close); layoutCamera = (RelativeLayout) view.findViewById(org.anyrtc.meet_kit.R.id.layout_camera); mView.init(eglBase.getEglBaseContext(), null); mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); mLayout.addView(view); }
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h) { this.strPeerId = strPeerId; this.index = index; this.x = x; this.y = y; this.w = w; this.h = h; mLayout = new PercentFrameLayout(ctx); // mLayout.setBackgroundResource(R.drawable.background); mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); View view = View.inflate(ctx, R.layout.layout_top_right, null); mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view); layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera); mView.init(eglBase.getEglBaseContext(), null); mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); mLayout.addView(view); }
public AVMemberView(final Context context,EglBase rootEglBase,AVMember avMember, String trackType){ this.context = context; this.rootEglBase = rootEglBase; this.avMember = avMember; this.trackType = trackType; convertView = LayoutInflater.from(context).inflate(R.layout.avcall_member_preview_item, null); viewHolder = new ViewHolder(); viewHolder.sfv_video= (SurfaceViewRenderer)convertView.findViewById(R.id.svr_video_item); viewHolder.iv_header = (ImageView) convertView.findViewById(R.id.iv_av_member_avatar); viewHolder.rl_bg = (RelativeLayout) convertView.findViewById(R.id.rl_bg); viewHolder.sfv_video.init(rootEglBase.getEglBaseContext(), null); viewHolder.sfv_video.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); viewHolder.sfv_video.setMirror(false); viewHolder.sfv_video.setZOrderMediaOverlay(true); convertView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (onMemberViewClickListener != null) onMemberViewClickListener.itemClick(avMemberView); } }); avMemberView = this; setRenderer(); }
private void initializeVideo(boolean videoEnabled, PercentFrameLayout localRenderLayout, PercentFrameLayout remoteRenderLayout) { if (localRenderLayout == null ||remoteRenderLayout == null) { return; } scalingType = ScalingType.SCALE_ASPECT_FILL; this.localRenderLayout = localRenderLayout; this.remoteRenderLayout = remoteRenderLayout; localRender = (SurfaceViewRenderer)localRenderLayout.getChildAt(0); remoteRender = (SurfaceViewRenderer)remoteRenderLayout.getChildAt(0); localRender.init(peerConnectionClient.getRenderContext(), null); localRender.setZOrderMediaOverlay(true); remoteRender.init(peerConnectionClient.getRenderContext(), null); updateVideoView(VideoViewState.NONE); }
/** * Sets the z-order of this {@link WebRTCView} in the stacking space of all * {@code WebRTCView}s. For more details, refer to the documentation of the * {@code zOrder} property of the JavaScript counterpart of * {@code WebRTCView} i.e. {@code RTCView}. * * @param zOrder The z-order to set on this {@code WebRTCView}. */ public void setZOrder(int zOrder) { SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); switch (zOrder) { case 0: surfaceViewRenderer.setZOrderMediaOverlay(false); break; case 1: surfaceViewRenderer.setZOrderMediaOverlay(true); break; case 2: surfaceViewRenderer.setZOrderOnTop(true); break; } }
private void setConnected(SurfaceViewRenderer localRenderer, SurfaceViewRenderer remoteRenderer) { if (localRenderLayout.getChildCount() == 0 && remoteRenderLayout.getChildCount() == 0) { if (localRenderer.getParent() != null) { ((ViewGroup)localRenderer.getParent()).removeView(localRenderer); } if (remoteRenderer.getParent() != null) { ((ViewGroup)remoteRenderer.getParent()).removeView(remoteRenderer); } localRenderLayout.setPosition(7, 70, 25, 25); localRenderLayout.setSquare(true); remoteRenderLayout.setPosition(0, 0, 100, 100); localRenderer.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); remoteRenderer.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); localRenderer.setMirror(true); localRenderer.setZOrderMediaOverlay(true); localRenderLayout.addView(localRenderer); remoteRenderLayout.addView(remoteRenderer); } }
public RcTest(final Config config, final EglBase eglBase, final SurfaceViewRenderer renderer, final Notifier notifier) { mEglBase = eglBase; mSurfaceViewRenderer = renderer; File videoFile = new File(Environment.getExternalStorageDirectory(), "alien-covenant.mp4"); mFrameProducer = new FrameProducer(mEglBase, videoFile, config.outputFps(), this); mEncoderWrapper = new EncoderWrapper(config, notifier); }
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_chat); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); mHandler = new Handler(); masterView = (SurfaceViewRenderer) findViewById(R.id.gl_surface); localView = (SurfaceViewRenderer) findViewById(R.id.gl_surface_local); this.mCallStatus = (TextView) findViewById(R.id.call_status); callState = CallState.IDLE; MainActivity.getKurentoRoomAPIInstance().addObserver(this); }
private void initViews() { start = (Button) findViewById(R.id.start_call); call = (Button) findViewById(R.id.init_call); hangup = (Button) findViewById(R.id.end_call); localVideoView = (SurfaceViewRenderer) findViewById(R.id.local_gl_surface_view); remoteVideoView = (SurfaceViewRenderer) findViewById(R.id.remote_gl_surface_view); start.setOnClickListener(this); call.setOnClickListener(this); hangup.setOnClickListener(this); }
public MXWebRtcView(Context context) { super(context); surfaceViewRenderer = new SurfaceViewRenderer(context); addView(surfaceViewRenderer); setMirror(false); setScalingType(DEFAULT_SCALING_TYPE); }
/** * Sets the indicator which determines whether this {@code WebRTCView} is to * mirror the video represented by {@link #videoTrack} during its rendering. * * @param mirror If this {@code WebRTCView} is to mirror the video * represented by {@code videoTrack} during its rendering, {@code true}; * otherwise, {@code false}. */ public void setMirror(boolean mirror) { if (this.mirror != mirror) { this.mirror = mirror; SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); surfaceViewRenderer.setMirror(mirror); // SurfaceViewRenderer takes the value of its mirror property into // account upon its layout. requestSurfaceViewRendererLayout(); } }
protected void updateVideoView(SurfaceViewRenderer surfaceViewRenderer, boolean mirror) { updateVideoView(surfaceViewRenderer, mirror, RendererCommon.ScalingType.SCALE_ASPECT_FILL); }
protected void updateVideoView(SurfaceViewRenderer surfaceViewRenderer, boolean mirror, RendererCommon.ScalingType scalingType) { Log.i(TAG, "updateVideoView mirror:" + mirror + ", scalingType = " + scalingType); surfaceViewRenderer.setScalingType(scalingType); surfaceViewRenderer.setMirror(mirror); surfaceViewRenderer.requestLayout(); }
private void init() { mLocalVideoView = (SurfaceViewRenderer) findViewById(R.id.local_video_view); // Init ExecutorService mExecutorService = Executors.newSingleThreadExecutor(); // Socket.IO initialization initSocket(); // Create video renderer rootEglBase = EglBase.create(); Log.d(TAG, "Created video renderer."); mLocalVideoView.init(rootEglBase.getEglBaseContext(), null); mLocalVideoView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL); mLocalVideoView.setEnableHardwareScaler(true); // Set ProxyRenderer target to SurfaceViewRenderer localProxyRenderer.setTarget(mLocalVideoView); mLocalVideoView.setMirror(true); // Check permission /*for (String permission : MANDATORY_PERMISSIONS) { if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) { Log.w(TAG, "Permission " + permission + " is not granted."); // finish(); return; } }*/ DisplayMetrics displayMetrics = getDisplayMetrics(); videoWidth = displayMetrics.widthPixels; videoHeight = displayMetrics.heightPixels; videoFps = 30; initPeerConnectionFactory(); // Set STUN Server mIceServers.add(new PeerConnection.IceServer(googleStunServer)); // Set default SessionDescription MediaConstraints mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); // Set default AudioConstraints mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false")); // Enalble DTLS for normal calls mPcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true")); }
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_video); VideoConfig config = VideoConfig.builder() .previewWidth(1280) .previewHeight(720) .outputWidth(448) .outputHeight(800) .fps(30) .outputBitrate(800) .build(); VideoConfig hdConfig = VideoConfig.builder() .previewWidth(1280) .previewHeight(720) .outputWidth(720) .outputHeight(1280) .fps(30) .outputBitrate(2000) .build(); VideoCapturer capturer = createVideoCapturer(); mVideoView = (SurfaceViewRenderer) findViewById(R.id.mVideoView1); try { String filename = "video_source_record_" + System.currentTimeMillis(); mMp4Recorder = new Mp4Recorder( new File(Environment.getExternalStorageDirectory(), filename + ".mp4")); mHdMp4Recorder = new Mp4Recorder( new File(Environment.getExternalStorageDirectory(), filename + "-hd.mp4")); } catch (IOException e) { e.printStackTrace(); Toast.makeText(this, "start Mp4Recorder fail!", Toast.LENGTH_SHORT).show(); finish(); return; } mHwAvcEncoder = new HwAvcEncoder(config, mMp4Recorder); mHdHwAvcEncoder = new HwAvcEncoder(hdConfig, mHdMp4Recorder); mVideoSink = new VideoSink(mVideoView, mHwAvcEncoder, mHdHwAvcEncoder); mVideoSource = new VideoSource(getApplicationContext(), config, capturer, mVideoSink); mVideoView.init(mVideoSource.getRootEglBase().getEglBaseContext(), null); mHwAvcEncoder.start(mVideoSource.getRootEglBase()); mHdHwAvcEncoder.start(mVideoSource.getRootEglBase()); initView(); }
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); //Initialize PeerConnectionFactory globals. //Params are context, initAudio,initVideo and videoCodecHwAcceleration PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true); //Create a new PeerConnectionFactory instance. PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options); //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh! VideoCapturer videoCapturerAndroid = createVideoCapturer(); //Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later! MediaConstraints constraints = new MediaConstraints(); //Create a VideoSource instance VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid); VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); //create an AudioSource instance AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints); AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource); //we will start capturing the video from the camera //width,height and fps videoCapturerAndroid.startCapture(1000, 1000, 30); //create surface renderer, init it and add the renderer to the track SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer); videoView.setMirror(true); EglBase rootEglBase = EglBase.create(); videoView.init(rootEglBase.getEglBaseContext(), null); localVideoTrack.addRenderer(new VideoRenderer(videoView)); }
/** * {@inheritDoc} */ @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { int height = b - t; int width = r - l; if (height == 0 || width == 0) { l = t = r = b = 0; } else { int frameHeight; int frameRotation; int frameWidth; ScalingType scalingType; synchronized (layoutSyncRoot) { frameHeight = this.frameHeight; frameRotation = this.frameRotation; frameWidth = this.frameWidth; scalingType = this.scalingType; } SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); switch (scalingType) { case SCALE_ASPECT_FILL: // Fill this ViewGroup with surfaceViewRenderer and the latter // will take care of filling itself with the video similarly to // the cover value the CSS property object-fit. r = width; l = 0; b = height; t = 0; break; case SCALE_ASPECT_FIT: default: // Lay surfaceViewRenderer out inside this ViewGroup in accord // with the contain value of the CSS property object-fit. // SurfaceViewRenderer will fill itself with the video similarly // to the cover or contain value of the CSS property object-fit // (which will not matter, eventually). if (frameHeight == 0 || frameWidth == 0) { l = t = r = b = 0; } else { float frameAspectRatio = (frameRotation % 180 == 0) ? frameWidth / (float) frameHeight : frameHeight / (float) frameWidth; Point frameDisplaySize = RendererCommon.getDisplaySize( scalingType, frameAspectRatio, width, height); l = (width - frameDisplaySize.x) / 2; t = (height - frameDisplaySize.y) / 2; r = l + frameDisplaySize.x; b = t + frameDisplaySize.y; } break; } } surfaceViewRenderer.layout(l, t, r, b); }
/** * Gets the {@code SurfaceViewRenderer} which renders {@link #videoTrack}. * Explicitly defined and used in order to facilitate switching the instance * at compile time. For example, reduces the number of modifications * necessary to switch the implementation from a {@code SurfaceViewRenderer} * that is a child of a {@code WebRTCView} to {@code WebRTCView} extending * {@code SurfaceViewRenderer}. * * @return The {@code SurfaceViewRenderer} which renders {@code videoTrack}. */ private final SurfaceViewRenderer getSurfaceViewRenderer() { return surfaceViewRenderer; }