Java 类org.webrtc.VideoRenderer 实例源码

项目:react-native-webrtc    文件:SurfaceViewRenderer.java   
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
  synchronized (statisticsLock) {
    ++framesReceived;
  }
  synchronized (handlerLock) {
    if (renderThreadHandler == null) {
      Logging.d(TAG, getResourceName()
          + "Dropping frame - Not initialized or already released.");
      VideoRenderer.renderFrameDone(frame);
      return;
    }
    synchronized (frameLock) {
      if (pendingFrame != null) {
        // Drop old frame.
        synchronized (statisticsLock) {
          ++framesDropped;
        }
        VideoRenderer.renderFrameDone(pendingFrame);
      }
      pendingFrame = frame;
      renderThreadHandler.post(renderFrameRunnable);
    }
  }
}
项目:react-native-webrtc    文件:SurfaceViewRenderer.java   
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
  synchronized (layoutLock) {
    if (frameWidth != frame.width || frameHeight != frame.height
        || frameRotation != frame.rotationDegree) {
      Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
          + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
      if (rendererEvents != null) {
        rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
      }
      frameWidth = frame.width;
      frameHeight = frame.height;
      frameRotation = frame.rotationDegree;
      post(new Runnable() {
        @Override public void run() {
          requestLayout();
        }
      });
    }
  }
}
项目:newwebrtc    文件:VideoChatActivity.java   
@Override
public void onAddRemoteStream(final MediaStream remoteStream, final PnPeer peer) {
    super.onAddRemoteStream(remoteStream, peer); // Will log values
    VideoChatActivity.this.runOnUiThread(new Runnable() {
        @Override
        public void run() {
            Toast.makeText(VideoChatActivity.this,"Connected to " + peer.getId(), Toast.LENGTH_SHORT).show();
            try {
                if(remoteStream.audioTracks.size()==0 || remoteStream.videoTracks.size()==0) return;
                mCallStatus.setVisibility(View.GONE);
                remoteStream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
                VideoRendererGui.update(remoteRender, 0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
                VideoRendererGui.update(localRender, 72, 65, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FIT, true);
            }
            catch (Exception e){ e.printStackTrace(); }
        }
    });
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
@Override
public void onAddStream(final MediaStream stream) {
    executor.execute(new Runnable() {
        @Override
        public void run() {
            if (peerConnection == null || isError) {
                return;
            }
            if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
                reportError("Weird-looking stream: " + stream);
                return;
            }
            if (stream.videoTracks.size() == 1) {
                remoteVideoTrack = stream.videoTracks.get(0);
                remoteVideoTrack.setEnabled(renderVideo);
                for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
                    remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
                }
            }
        }
    });
}
项目:InsideCodec    文件:EncoderWrapper.java   
public void encodeFrame(final VideoRenderer.I420Frame frame) {
    if (mEncoder == null) {
        return;
    }

    mHandler.post(new Runnable() {
        @Override
        public void run() {
            mEglBase.makeCurrent();
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            mDrawer.drawOes(frame.textureId, frame.samplingMatrix, frame.width, frame.height, 0,
                    0, frame.width, frame.height);
            mEglBase.swapBuffers(frame.timestamp);
        }
    });
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
@Override
public void onAddStream(final MediaStream stream) {
  executor.execute(new Runnable() {
    @Override
    public void run() {
      if (peerConnection == null || isError) {
        return;
      }
      if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
        reportError("Weird-looking stream: " + stream);
        return;
      }
      if (stream.videoTracks.size() == 1) {
        remoteVideoTrack = stream.videoTracks.get(0);
        remoteVideoTrack.setEnabled(renderVideo);
        for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
          remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
        }
      }
    }
  });
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
@Override
public void onAddStream(final MediaStream stream) {
    executor.execute(new Runnable() {
        @Override
        public void run() {
            if (peerConnection == null || isError) {
                return;
            }
            if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
                reportError("Weird-looking stream: " + stream);
                return;
            }
            if (stream.videoTracks.size() == 1) {
                remoteVideoTrack = stream.videoTracks.get(0);
                remoteVideoTrack.setEnabled(renderVideo);
                for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
                    remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
                }
            }
        }
    });
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
/**
 * Implements for AnyRTCViewEvents.
 */
@Override
public VideoRenderer OnRtcOpenLocalRender() {
    int size = GetVideoRenderSize();
    if (size == 0) {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100);
    } else {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, size, SUB_X, (100 - size * (SUB_HEIGHT + SUB_Y)), SUB_WIDTH, SUB_HEIGHT);
        mLocalRender.mView.setZOrderMediaOverlay(true);
    }
    mLocalRender.mView.setBackgroundResource(R.drawable.background);
    mVideoView.addView(mLocalRender.mLayout);
    mLocalRender.mLayout.setPosition(
            mLocalRender.x, mLocalRender.y, mLocalRender.w, mLocalRender.h);
    mLocalRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalRender.mRenderer = new VideoRenderer(mLocalRender.mView);
    mLocalRender.mLayout.setBackgroundResource(R.drawable.background);
    return mLocalRender.mRenderer;
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
public VideoRenderer OnRtcOpenPreViewRender(String strRtcPeerId){
    VideoView remoteRender = mRemoteRenders.get(strRtcPeerId);
    if (remoteRender == null) {
        int size = GetVideoRenderSize();
        if (size == 0) {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100);
        } else {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, size, (100-SUB_WIDTH)/2, 12, SUB_WIDTH, SUB_HEIGHT);
            remoteRender.mView.setZOrderMediaOverlay(true);
        }
        mVideoView.addView(remoteRender.mLayout);
        remoteRender.mLayout.setPosition(
                remoteRender.x, remoteRender.y, remoteRender.w, remoteRender.h);
        remoteRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        remoteRender.mRenderer = new VideoRenderer(remoteRender.mView);
        mRemoteRenders.put(strRtcPeerId, remoteRender);
    }
    return remoteRender.mRenderer;
}
项目:webrtc-android-codelab    文件:MainActivity.java   
private void gotRemoteStream(MediaStream stream) {
    //we have remote video stream. add to the renderer.
    final VideoTrack videoTrack = stream.videoTracks.getFirst();
    AudioTrack audioTrack = stream.audioTracks.getFirst();
    runOnUiThread(new Runnable() {
        @Override
        public void run() {
            try {
                remoteRenderer = new VideoRenderer(remoteVideoView);
                remoteVideoView.setVisibility(View.VISIBLE);
                videoTrack.addRenderer(remoteRenderer);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });

}
项目:webrtc-android    文件:PeerConnectionClient.java   
@Override
public void onAddStream(final MediaStream stream) {
    executor.execute(new Runnable() {
        @Override
        public void run() {
            if (peerConnection == null || isError) {
                return;
            }
            if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
                reportError("Weird-looking stream: " + stream);
                return;
            }
            if (stream.videoTracks.size() == 1) {
                remoteVideoTrack = stream.videoTracks.get(0);
                remoteVideoTrack.setEnabled(renderVideo);
                remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
            }
        }
    });
}
项目:DeviceConnect-Android    文件:MySurfaceViewRenderer.java   
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
    if (mReleased) {
        return;
    }

    if (mServer != null && mYuvConverter != null) {
        if (!frame.yuvFrame) {
            convertTextureToYUV(frame);
        } else {
            convertYuvToRGB(frame);
        }
    }

    super.renderFrame(frame);
}
项目:DeviceConnect-Android    文件:MySurfaceViewRenderer.java   
private void convertYuvToRGB(VideoRenderer.I420Frame frame) {
    if (frame.yuvPlanes == null || frame.yuvPlanes[0] == null) {
        return;
    }

    if (mBitmap == null || mBitmap.getWidth() != frame.width || mBitmap.getHeight() != frame.height) {
        mBitmap = Bitmap.createBitmap(frame.width, frame.height, Bitmap.Config.ARGB_8888);
    }

    ImageUtils.decodeYUV420SP3(mBitmap, frame.yuvPlanes, frame.width, frame.height, frame.yuvStrides);

    mOutputStream.reset();
    mBitmap.compress(Bitmap.CompressFormat.JPEG, 20, mOutputStream);
    mFrameHeight = frame.height;
    mFrameWidth = frame.width;
    mServer.offerMedia(mType, mOutputStream.toByteArray());
}
项目:DeviceConnect-Android    文件:MySurfaceViewRenderer.java   
private void convertTextureToYUV(VideoRenderer.I420Frame frame) {
    if (mByteBuffer == null) {
        int uv_height = (frame.height + 1) / 2;
        int total_height = frame.height + uv_height;
        int size = frame.width * total_height;
        mByteBuffer = ByteBuffer.allocateDirect(size);
        mByteBuffer.order(ByteOrder.nativeOrder());
    }

    if (mBitmap == null || mBitmap.getWidth() != frame.width || mBitmap.getHeight() != frame.height) {
        mBitmap = Bitmap.createBitmap(frame.width, frame.height, Bitmap.Config.ARGB_8888);
    }

    mYuvConverter.convert(mByteBuffer, frame.width, frame.height, frame.width, frame.textureId, frame.samplingMatrix);

    ImageUtils.decodeYUV420SP2(mBitmap, mByteBuffer, frame.width, frame.height);

    mOutputStream.reset();
    mBitmap.compress(Bitmap.CompressFormat.JPEG, 20, mOutputStream);
    mFrameHeight = frame.height;
    mFrameWidth = frame.width;
    mServer.offerMedia(mType, mOutputStream.toByteArray());
}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
private VideoTrack createCapturerVideoTrack(VideoCapturerAndroid capturer) {
    videoSource = factory.createVideoSource(capturer, videoConstraints);
    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:webrtcpeer-android    文件:NBMWebRTCPeer.java   
/**
* NBMWebRTCPeer constructor
    * <p>
    *     This constructor should always be used in order to properly create a NBMWebRTCPeer instance
    * </p>
* @param  config            Media configuration instance
* @param  context           Android context instance
* @param  localRenderer     Callback for rendering the locally produced media stream
* @param  observer          An observer instance which implements WebRTC callback functions
*/
   public NBMWebRTCPeer(NBMMediaConfiguration config, Context context,
                        VideoRenderer.Callbacks localRenderer, Observer observer) {

       this.context = context;
       this.localRender = localRenderer;
       this.observer = observer;
       this.masterRenderer = null;
       this.activeMasterStream = null;
       this.config = config;
       executor = new LooperExecutor();

       // Looper thread is started once in private ctor and is used for all
       // peer connection API calls to ensure new peer connection peerConnectionFactory is
       // created on the same thread as previously destroyed peerConnectionFactory.
       executor.requestStart();

       peerConnectionParameters = new NBMWebRTCPeer.NBMPeerConnectionParameters(true, false,
                        config.getReceiverVideoFormat().width, config.getReceiverVideoFormat().heigth,
                       (int)config.getReceiverVideoFormat().frameRate, config.getVideoBandwidth(), config.getVideoCodec().toString(), true,
                       config.getAudioBandwidth(), config.getAudioCodec().toString(),false, true);

       iceServers = new LinkedList<>();
       // Add Google's stun as a default ICE server
       addIceServer("stun:stun.l.google.com:19302");
   }
项目:voip_android    文件:PeerConnectionClient.java   
@Override
public void onAddStream(final MediaStream stream) {
    executor.execute(new Runnable() {
        @Override
        public void run() {
            if (peerConnection == null || isError) {
                return;
            }
            if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) {
                reportError("Weird-looking stream: " + stream);
                return;
            }
            if (stream.videoTracks.size() == 1) {
                remoteVideoTrack = stream.videoTracks.get(0);
                remoteVideoTrack.setEnabled(renderVideo);
                for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
                    remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
                }
            }
        }
    });
}
项目:respoke-sdk-android    文件:RespokeCall.java   
private void addLocalStreams(Context context) {
    AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
    // TODO(fischman): figure out how to do this Right(tm) and remove the suppression.
    @SuppressWarnings("deprecation")
    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
    audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

    localStream = peerConnectionFactory.createLocalMediaStream("ARDAMS");

    if (!audioOnly) {
        VideoCapturer capturer = getVideoCapturer();
        MediaConstraints videoConstraints = new MediaConstraints();
        videoSource = peerConnectionFactory.createVideoSource(capturer, videoConstraints);
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        localStream.addTrack(videoTrack);
    }

    localStream.addTrack(peerConnectionFactory.createAudioTrack("ARDAMSa0", peerConnectionFactory.createAudioSource(new MediaConstraints())));

    peerConnection.addStream(localStream);
}
项目:respoke-sdk-android    文件:RespokeCall.java   
@Override public void onAddStream(final MediaStream stream){
    new Handler(Looper.getMainLooper()).post(new Runnable() {
        public void run() {
            if (isActive()) {
                if (stream.audioTracks.size() <= 1 && stream.videoTracks.size() <= 1) {
                    if (stream.videoTracks.size() == 1) {
                        stream.videoTracks.get(0).addRenderer(
                                new VideoRenderer(remoteRender));
                    }
                } else {
                    postErrorToListener("An invalid stream was added");
                }
            }
        }
    });
}
项目:MediaCodecTest    文件:DecodeWebRTCTest.java   
public void testDecoder(
    final  VideoRenderer.Callbacks renderer,
    final boolean useSurface,
    final EGLContext sharedContext) throws Exception {

    final String encodedIvfFilename = SDCARD_DIR + File.separator + ENCODED_IVF_BASE +
            "_" + WIDTH + "x" + HEIGHT + ".ivf";
    final String outputYuvFilename = null;

    Log.d(TAG, "---------- testSurfaceBasic on thread id: " + Thread.currentThread().getId());
    mLooperRunner = new LooperRunner();
    mLooperRunner.requestStart();
    // Configure and open camera on looper thread
    mLooperRunner.runCallableNoWait( new Callable<Void>() {
        @Override
        public Void call() throws Exception {
            //decode(encodedIvfFilename, useSurface, renderer);
            decodeWebRTC(encodedIvfFilename, useSurface, sharedContext, renderer);
            getAverageCodecTime();
            //VideoRendererGui.printStatistics();
            return null;
        }
    } );

}
项目:restcomm-android-sdk    文件:PeerConnectionClient.java   
public void reattachVideo(final VideoSink localRender, final VideoRenderer.Callbacks remoteRender, boolean localEnabled)
{
  Log.d(TAG, "reattachVideo()");

  this.localRender = localRender;
  this.remoteRenders = Collections.singletonList(remoteRender);

  executor.execute(new Runnable() {
    @Override
    public void run()
    {
      // Important: when reattaching local video is subject to user's mute preference
      addLocalRenderer(localRender, localEnabled);
      // When reattaching remote video should always be enabled since it's not controlled by the user
      addRemoteRenderer(remoteRender, true);

      events.onVideoReattached();
    }
  });
}
项目:quickblox-android    文件:VideoConversationFragment.java   
/**
 * @param userId set userId if it from fullscreen videoTrack
 */
private void fillVideoView(int userId, QBRTCSurfaceView videoView, QBRTCVideoTrack videoTrack,
                           boolean remoteRenderer) {
    videoTrack.removeRenderer(videoTrack.getRenderer());
    videoTrack.addRenderer(new VideoRenderer(videoView));
    if (userId != 0) {
        getVideoTrackMap().put(userId, videoTrack);
    }
    if (!remoteRenderer) {
        updateVideoView(videoView, isCurrentCameraFront);
    }
    Log.d(TAG, (remoteRenderer ? "remote" : "local") + " Track is rendering");
}
项目:PeSanKita-android    文件:WebRtcCallService.java   
@Override
public void onAddStream(MediaStream stream) {
  Log.w(TAG, "onAddStream:" + stream);

  for (AudioTrack audioTrack : stream.audioTracks) {
    audioTrack.setEnabled(true);
  }

  if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
    VideoTrack videoTrack = stream.videoTracks.getFirst();
    videoTrack.setEnabled(true);
    videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
  }

}
项目:PeSanKita-android    文件:PeerConnectionWrapper.java   
public PeerConnectionWrapper(@NonNull Context context,
                             @NonNull PeerConnectionFactory factory,
                             @NonNull PeerConnection.Observer observer,
                             @NonNull VideoRenderer.Callbacks localRenderer,
                             @NonNull List<PeerConnection.IceServer> turnServers,
                             boolean hideIp)
{
  List<PeerConnection.IceServer> iceServers = new LinkedList<>();
  iceServers.add(STUN_SERVER);
  iceServers.addAll(turnServers);

  MediaConstraints                constraints      = new MediaConstraints();
  MediaConstraints                audioConstraints = new MediaConstraints();
  PeerConnection.RTCConfiguration configuration    = new PeerConnection.RTCConfiguration(iceServers);

  configuration.bundlePolicy  = PeerConnection.BundlePolicy.MAXBUNDLE;
  configuration.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;

  if (hideIp) {
    configuration.iceTransportsType = PeerConnection.IceTransportsType.RELAY;
  }

  constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
  audioConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));

  this.peerConnection = factory.createPeerConnection(configuration, constraints, observer);
  this.videoCapturer  = createVideoCapturer(context);

  MediaStream mediaStream = factory.createLocalMediaStream("ARDAMS");
  this.audioSource = factory.createAudioSource(audioConstraints);
  this.audioTrack  = factory.createAudioTrack("ARDAMSa0", audioSource);
  this.audioTrack.setEnabled(false);
  mediaStream.addTrack(audioTrack);

  if (videoCapturer != null) {
    this.videoSource = factory.createVideoSource(videoCapturer);
    this.videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);

    this.videoTrack.addRenderer(new VideoRenderer(localRenderer));
    this.videoTrack.setEnabled(false);
    mediaStream.addTrack(videoTrack);
  } else {
    this.videoSource = null;
    this.videoTrack  = null;
  }

  this.peerConnection.addStream(mediaStream);
}
项目:newwebrtc    文件:VideoChatActivity.java   
@Override
public void onLocalStream(final MediaStream localStream) {
    super.onLocalStream(localStream); // Will log values
    VideoChatActivity.this.runOnUiThread(new Runnable() {
        @Override
        public void run() {
            if(localStream.videoTracks.size()==0) return;
            localStream.videoTracks.get(0).addRenderer(new VideoRenderer(localRender));
        }
    });
}
项目:nc-android-webrtcpeer    文件:ProxyRenderer.java   
synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
    if (target == null) {
        VideoRenderer.renderFrameDone(frame);
        return;
    }

    target.renderFrame(frame);
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender,
                                 final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer,
                                 final SignalingParameters signalingParameters) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
                         videoCapturer, signalingParameters);
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    if (localRender != null) {
        localVideoTrack.setEnabled(renderVideo);
        localVideoTrack.addRenderer(new VideoRenderer(localRender));
    }

    return localVideoTrack;
}
项目:InsideCodec    文件:RcTest.java   
@Override
public void onFrame(final int oesTextureId, final float[] transformMatrix,
        final long timestampNs) {
    VideoRenderer.I420Frame frame = new VideoRenderer.I420Frame(mVideoWidth, mVideoHeight, 0,
            oesTextureId, transformMatrix, 0, timestampNs);
    mSurfaceViewRenderer.renderFrame(frame);
    mEncoderWrapper.encodeFrame(frame);
}
项目:anyRTC-RTCP-Android    文件:RTCVideoView.java   
/**
 * Implements for AnyRTCViewEvents.
 */
@Override
public VideoRenderer OnRtcOpenLocalRender() {
    int size = GetVideoRenderSize();
    screenChange();
    if (size == 0) {
        if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        } else {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        }
    } else {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, size, SUB_X, (100 - size * (SUB_HEIGHT + SUB_Y)), SUB_WIDTH, SUB_HEIGHT, mRTCVideoLayout);
    }
    if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
        mVideoView.addView(mLocalRender.mLayout, -1);
    } else {
        mVideoView.addView(mLocalRender.mLayout, 0);
    }


    mLocalRender.mLayout.setPosition(
            mLocalRender.x, mLocalRender.y, mLocalRender.w, mLocalRender.h);
    mLocalRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalRender.mRenderer = new VideoRenderer(mLocalRender.mView);
    return mLocalRender.mRenderer;
}
项目:Achilles_Android    文件:MainActivity.java   
synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
    if (target == null) {
        VideoRenderer.renderFrameDone(frame);
        return;
    }
    target.renderFrame(frame);
}
项目:Achilles_Android    文件:MainActivity.java   
private VideoTrack createVideoTrack(VideoCapturer videoCapturer) {
    mVideoSource = mPeerConnectionFactory.createVideoSource(videoCapturer);
    videoCapturer.startCapture(videoWidth, videoHeight, videoHeight);

    mLocalVideoTrack = mPeerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, mVideoSource);
    mLocalVideoTrack.setEnabled(true);
    mLocalVideoTrack.addRenderer(new VideoRenderer(localProxyRenderer));
    return mLocalVideoTrack;
}
项目:AppRTC-Android    文件:CallActivity.java   
@Override
synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
  if (target == null) {
    Logging.d(TAG, "Dropping frame in proxy because target is null.");
    VideoRenderer.renderFrameDone(frame);
    return;
  }

  target.renderFrame(frame);
}
项目:anyRTC-Meeting-Android    文件:MeetingActivity.java   
/**
 * 其他人视频即将显示  比如你在会议中有人进来了 则会回调该方法 再次设置其他人视频窗口即可
 * @param strRTCPeerId RTC服务生成的用来标识该用户的ID
 * @param strUserId 用户ID
 * @param strPublishId 媒体通道ID
 * @param strUserData 用户自定义数据
 */
@Override
public void onRTCOpenVideoRender(final String strRTCPeerId, final String strPublishId, final String strUserId, final String strUserData) {
    MeetingActivity.this.runOnUiThread(new Runnable() {
        @Override
        public void run() {
            Log.d("callback", "onRTCOpenVideoRender strPublishId="+strPublishId+"strRTCPeerId=" + strRTCPeerId + "strUserId=" + strUserId + "strUserData=" +strUserData);
            final VideoRenderer render = mVideoView.OnRtcOpenRemoteRender(strPublishId);
            if (null != render) {
                mMeetKit.setRTCVideoRender(strPublishId, render.GetRenderPointer());
            }
        }
    });
}
项目:anyRTC-Meeting-Android    文件:RTCVideoView.java   
/**
 * Implements for AnyRTCViewEvents.
 */
@Override
public VideoRenderer OnRtcOpenLocalRender() {
    int size = GetVideoRenderSize();
    screenChange();
    if (size == 0) {
        if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        } else {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        }
    } else {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, size, SUB_X, (100 - size * (SUB_HEIGHT + SUB_Y)), SUB_WIDTH, SUB_HEIGHT, mRTCVideoLayout);
    }
    if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
        mVideoView.addView(mLocalRender.mLayout, -1);
    } else {
        mVideoView.addView(mLocalRender.mLayout, 0);
    }


    mLocalRender.mLayout.setPosition(
            mLocalRender.x, mLocalRender.y, mLocalRender.w, mLocalRender.h);
    mLocalRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalRender.mRenderer = new VideoRenderer(mLocalRender.mView);
    return mLocalRender.mRenderer;
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
private VideoTrack createVideoTrack(VideoCapturer capturer) {
    videoSource = factory.createVideoSource(capturer);
    capturer.startCapture(videoWidth, videoHeight, videoFps);

    localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
    localVideoTrack.setEnabled(renderVideo);
    localVideoTrack.addRenderer(new VideoRenderer(localRender));
    return localVideoTrack;
}
项目:AndroidRTC    文件:PeerConnectionClientTest.java   
@Override
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
  if (!renderFrameCalled) {
    if (rendererName != null) {
      Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x "
              + frame.rotatedHeight());
    } else {
      Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight());
    }
  }
  renderFrameCalled = true;
  VideoRenderer.renderFrameDone(frame);
  doneRendering.countDown();
}
项目:Cable-Android    文件:WebRtcCallService.java   
@Override
public void onAddStream(MediaStream stream) {
  Log.w(TAG, "onAddStream:" + stream);

  for (AudioTrack audioTrack : stream.audioTracks) {
    audioTrack.setEnabled(true);
  }

  if (stream.videoTracks != null && stream.videoTracks.size() == 1) {
    VideoTrack videoTrack = stream.videoTracks.getFirst();
    videoTrack.setEnabled(true);
    videoTrack.addRenderer(new VideoRenderer(remoteRenderer));
  }
}
项目:Cable-Android    文件:PeerConnectionWrapper.java   
public PeerConnectionWrapper(@NonNull Context context,
                             @NonNull PeerConnectionFactory factory,
                             @NonNull PeerConnection.Observer observer,
                             @NonNull VideoRenderer.Callbacks localRenderer,
                             @NonNull List<PeerConnection.IceServer> turnServers,
                             boolean hideIp)
{
  List<PeerConnection.IceServer> iceServers = new LinkedList<>();
  iceServers.add(STUN_SERVER);
  iceServers.addAll(turnServers);

  MediaConstraints                constraints      = new MediaConstraints();
  MediaConstraints                audioConstraints = new MediaConstraints();
  PeerConnection.RTCConfiguration configuration    = new PeerConnection.RTCConfiguration(iceServers);

  configuration.bundlePolicy  = PeerConnection.BundlePolicy.MAXBUNDLE;
  configuration.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;

  if (hideIp) {
    configuration.iceTransportsType = PeerConnection.IceTransportsType.RELAY;
  }

  constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
  audioConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));

  this.peerConnection = factory.createPeerConnection(configuration, constraints, observer);
  this.videoCapturer  = createVideoCapturer(context);

  MediaStream mediaStream = factory.createLocalMediaStream("ARDAMS");
  this.audioSource = factory.createAudioSource(audioConstraints);
  this.audioTrack  = factory.createAudioTrack("ARDAMSa0", audioSource);
  this.audioTrack.setEnabled(false);
  mediaStream.addTrack(audioTrack);

  if (videoCapturer != null) {
    this.videoSource = factory.createVideoSource(videoCapturer);
    this.videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);

    this.videoTrack.addRenderer(new VideoRenderer(localRenderer));
    this.videoTrack.setEnabled(false);
    mediaStream.addTrack(videoTrack);
  } else {
    this.videoSource = null;
    this.videoTrack  = null;
  }

  this.peerConnection.addStream(mediaStream);
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
@Override
public VideoRenderer OnRtcOpenRemoteRender(final String strRtcPeerId) {
    VideoView remoteRender = mRemoteRenders.get(strRtcPeerId);
    if (remoteRender == null) {
        int size = GetVideoRenderSize();
        if (size == 0) {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100);
        } else {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, size, 4, 4, SUB_WIDTH, SUB_HEIGHT);
            remoteRender.mView.setZOrderMediaOverlay(true);
        }

        mVideoView.addView(remoteRender.mLayout);

        remoteRender.mLayout.setPosition(
                remoteRender.x, remoteRender.y, remoteRender.w, remoteRender.h);
        remoteRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        remoteRender.mRenderer = new VideoRenderer(remoteRender.mView);

        mRemoteRenders.put(strRtcPeerId, remoteRender);

        if (mRemoteRenders.size() == 1 && mLocalRender != null) {
            SwitchViewToFullscreen(remoteRender, mLocalRender);
        }
    }
    return remoteRender.mRenderer;
}