Java 类org.webrtc.RendererCommon 实例源码

项目:InsideCodec    文件:MainActivity.java   
@OnClick(R.id.mBtnStart)
public void startTest() {
    mSurface.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);

    if (mRcTest != null && !mRcTest.finished()) {
        Toast.makeText(this, "Last test still running", Toast.LENGTH_SHORT).show();
        return;
    }

    mConfig = Config.builder()
            .updateBr(mCbUpdateBr.isChecked())
            .asyncEnc(mCbAsyncEnc.isChecked())
            .initBr(Integer.parseInt(mEtInitBr.getText().toString()))
            .brStep(Integer.parseInt(mEtBrStep.getText().toString()))
            .quality(Integer.parseInt(mEtQuality.getText().toString()))
            .brMode(mSpBitrateMode.getSelectedItemPosition())
            .outputWidth(448)
            .outputHeight(800)
            .outputFps(30)
            .outputKeyFrameInterval(2)
            .build();
    mRcTest = new RcTest(mConfig, mEglBase, mSurface, this);
    mRcTest.start();
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
/**
 * Implements for AnyRTCViewEvents.
 */
@Override
public VideoRenderer OnRtcOpenLocalRender() {
    int size = GetVideoRenderSize();
    if (size == 0) {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100);
    } else {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, size, SUB_X, (100 - size * (SUB_HEIGHT + SUB_Y)), SUB_WIDTH, SUB_HEIGHT);
        mLocalRender.mView.setZOrderMediaOverlay(true);
    }
    mLocalRender.mView.setBackgroundResource(R.drawable.background);
    mVideoView.addView(mLocalRender.mLayout);
    mLocalRender.mLayout.setPosition(
            mLocalRender.x, mLocalRender.y, mLocalRender.w, mLocalRender.h);
    mLocalRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalRender.mRenderer = new VideoRenderer(mLocalRender.mView);
    mLocalRender.mLayout.setBackgroundResource(R.drawable.background);
    return mLocalRender.mRenderer;
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
public VideoRenderer OnRtcOpenPreViewRender(String strRtcPeerId){
    VideoView remoteRender = mRemoteRenders.get(strRtcPeerId);
    if (remoteRender == null) {
        int size = GetVideoRenderSize();
        if (size == 0) {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100);
        } else {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, size, (100-SUB_WIDTH)/2, 12, SUB_WIDTH, SUB_HEIGHT);
            remoteRender.mView.setZOrderMediaOverlay(true);
        }
        mVideoView.addView(remoteRender.mLayout);
        remoteRender.mLayout.setPosition(
                remoteRender.x, remoteRender.y, remoteRender.w, remoteRender.h);
        remoteRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        remoteRender.mRenderer = new VideoRenderer(remoteRender.mView);
        mRemoteRenders.put(strRtcPeerId, remoteRender);
    }
    return remoteRender.mRenderer;
}
项目:yun2win-sdk-android    文件:AVMemberView.java   
public AVMemberView(final Context context,EglBase rootEglBase,AVMember avMember, String trackType){
    this.context = context;
    this.rootEglBase = rootEglBase;
    this.avMember = avMember;
    this.trackType = trackType;
    convertView = LayoutInflater.from(context).inflate(R.layout.avcall_member_preview_item, null);
    viewHolder = new ViewHolder();
    viewHolder.sfv_video= (SurfaceViewRenderer)convertView.findViewById(R.id.svr_video_item);
    viewHolder.iv_header = (ImageView) convertView.findViewById(R.id.iv_av_member_avatar);
    viewHolder.rl_bg = (RelativeLayout) convertView.findViewById(R.id.rl_bg);
    viewHolder.sfv_video.init(rootEglBase.getEglBaseContext(), null);
    viewHolder.sfv_video.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
    viewHolder.sfv_video.setMirror(false);
    viewHolder.sfv_video.setZOrderMediaOverlay(true);
    convertView.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            if (onMemberViewClickListener != null)
                onMemberViewClickListener.itemClick(avMemberView);
        }
    });
    avMemberView = this;
    setRenderer();
}
项目:voip_android    文件:VOIPVideoActivity.java   
protected void updateVideoView() {
    remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
    remoteRenderScreen.setScalingType(scalingType);
    remoteRenderScreen.setMirror(false);

    if (iceConnected) {
        localRenderLayout.setPosition(
                LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED, LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED);
        localRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
    } else {
        localRenderLayout.setPosition(
                LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING, LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING);
        localRender.setScalingType(scalingType);
    }
    localRender.setMirror(true);

    localRender.requestLayout();
    remoteRenderScreen.requestLayout();
}
项目:react-native-webrtc    文件:SurfaceViewRenderer.java   
private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
  synchronized (layoutLock) {
    final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
    final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
    final Point size =
        RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
    if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
      size.x = maxWidth;
    }
    if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
      size.y = maxHeight;
    }
    return size;
  }
}
项目:anyRTC-RTCP-Android    文件:RTCVideoView.java   
/**
 * Implements for AnyRTCViewEvents.
 */
@Override
public VideoRenderer OnRtcOpenLocalRender() {
    int size = GetVideoRenderSize();
    screenChange();
    if (size == 0) {
        if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        } else {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        }
    } else {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, size, SUB_X, (100 - size * (SUB_HEIGHT + SUB_Y)), SUB_WIDTH, SUB_HEIGHT, mRTCVideoLayout);
    }
    if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
        mVideoView.addView(mLocalRender.mLayout, -1);
    } else {
        mVideoView.addView(mLocalRender.mLayout, 0);
    }


    mLocalRender.mLayout.setPosition(
            mLocalRender.x, mLocalRender.y, mLocalRender.w, mLocalRender.h);
    mLocalRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalRender.mRenderer = new VideoRenderer(mLocalRender.mView);
    return mLocalRender.mRenderer;
}
项目:anyRTC-Meeting-Android    文件:RTCVideoView.java   
/**
 * Implements for AnyRTCViewEvents.
 */
@Override
public VideoRenderer OnRtcOpenLocalRender() {
    int size = GetVideoRenderSize();
    screenChange();
    if (size == 0) {
        if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        } else {
            mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100, mRTCVideoLayout);
        }
    } else {
        mLocalRender = new VideoView("localRender", mVideoView.getContext(), mRootEglBase, size, SUB_X, (100 - size * (SUB_HEIGHT + SUB_Y)), SUB_WIDTH, SUB_HEIGHT, mRTCVideoLayout);
    }
    if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_1X3) {
        mVideoView.addView(mLocalRender.mLayout, -1);
    } else {
        mVideoView.addView(mLocalRender.mLayout, 0);
    }


    mLocalRender.mLayout.setPosition(
            mLocalRender.x, mLocalRender.y, mLocalRender.w, mLocalRender.h);
    mLocalRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalRender.mRenderer = new VideoRenderer(mLocalRender.mView);
    return mLocalRender.mRenderer;
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
@Override
public VideoRenderer OnRtcOpenRemoteRender(final String strRtcPeerId) {
    VideoView remoteRender = mRemoteRenders.get(strRtcPeerId);
    if (remoteRender == null) {
        int size = GetVideoRenderSize();
        if (size == 0) {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, 0, 0, 0, 100, 100);
        } else {
            remoteRender = new VideoView(strRtcPeerId, mVideoView.getContext(), mRootEglBase, size, 4, 4, SUB_WIDTH, SUB_HEIGHT);
            remoteRender.mView.setZOrderMediaOverlay(true);
        }

        mVideoView.addView(remoteRender.mLayout);

        remoteRender.mLayout.setPosition(
                remoteRender.x, remoteRender.y, remoteRender.w, remoteRender.h);
        remoteRender.mView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        remoteRender.mRenderer = new VideoRenderer(remoteRender.mView);

        mRemoteRenders.put(strRtcPeerId, remoteRender);

        if (mRemoteRenders.size() == 1 && mLocalRender != null) {
            SwitchViewToFullscreen(remoteRender, mLocalRender);
        }
    }
    return remoteRender.mRenderer;
}
项目:VideoCRE    文件:TextureViewRenderer.java   
/**
 * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
 * for drawing frames on the EGLSurface. This class is responsible for calling release() on
 * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
 * init()/release() cycle.
 */
public void init(final EglBase.Context sharedContext,
    RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
    RendererCommon.GlDrawer drawer) {
  ThreadUtils.checkIsOnMainThread();
  this.rendererEvents = rendererEvents;
  synchronized (layoutLock) {
    isFirstFrameRendered = false;
    rotatedFrameWidth = 0;
    rotatedFrameHeight = 0;
    frameRotation = 0;
  }
  eglRenderer.init(sharedContext, configAttributes, drawer);
}
项目:react-native-twilio-video-webrtc    文件:RNVideoViewGroup.java   
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
    int height = b - t;
    int width = r - l;
    if (height == 0 || width == 0) {
        l = t = r = b = 0;
    } else {
        int videoHeight;
        int videoWidth;
        synchronized (layoutSync) {
            videoHeight = this.videoHeight;
            videoWidth = this.videoWidth;
        }

        if (videoHeight == 0 || videoWidth == 0) {
            // These are Twilio defaults.
            videoHeight = 480;
            videoWidth = 640;
        }

        Point displaySize = RendererCommon.getDisplaySize(
                this.scalingType,
                videoWidth / (float) videoHeight,
                width,
                height
        );

        l = (width - displaySize.x) / 2;
        t = (height - displaySize.y) / 2;
        r = l + displaySize.x;
        b = t + displaySize.y;
    }
    surfaceViewRenderer.layout(l, t, r, b);
}
项目:nubo-test    文件:PeerVideoActivity.java   
@Override
protected void onStart() {
    super.onStart();

    Bundle extras = getIntent().getExtras();
    this.username = extras.getString(Constants.USER_NAME, "");
    Log.i(TAG, "username: " + username);

    EglBase rootEglBase = EglBase.create();
    masterView.init(rootEglBase.getEglBaseContext(), null);
    masterView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    localView.init(rootEglBase.getEglBaseContext(), null);
    localView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);

    NBMMediaConfiguration peerConnectionParameters = new NBMMediaConfiguration(
            NBMMediaConfiguration.NBMRendererType.OPENGLES,
            NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
            NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
            new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20),
            NBMMediaConfiguration.NBMCameraPosition.FRONT);

    videoRequestUserMapping = new HashMap<>();

    nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, localView, this);
    nbmWebRTCPeer.registerMasterRenderer(masterView);
    Log.i(TAG, "Initializing nbmWebRTCPeer...");
    nbmWebRTCPeer.initialize();
    callState = CallState.PUBLISHING;
    mCallStatus.setText("Publishing...");
}
项目:DeviceConnect-Android    文件:VideoChatActivity.java   
/**
 * Updated layout of the views.
 */
private void updateVideoView(final String videoUri) {
    mRemoteLayout.setPosition(0, 0, 100, 90);
    mRemoteRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
    mRemoteRender.setMirror(false);

    mLocalLayout.setPosition(72, 72, 25, 25);
    mLocalRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
    mLocalRender.setMirror("true".equals(videoUri));

    mLocalRender.requestLayout();
    mRemoteRender.requestLayout();
}
项目:q-municate-android    文件:RTCGLVideoView.java   
public void updateRenderer(RendererSurface rendererSurface, RendererConfig config){
    boolean mainRenderer = RendererSurface.MAIN.equals(rendererSurface);
    VideoRenderer.Callbacks callbacks = mainRenderer ? mainRendererCallback
            :localRendererCallback;

    if (config.coordinates != null) {
        setViewCoordinates((mainRenderer ? remoteCoords : localCoords),
                config.coordinates);
    }
    setRendererMirror(config.mirror, rendererSurface);
    int[] viewCoordinates = mainRenderer ? remoteCoords : localCoords;
    VideoRendererGui.update(callbacks, viewCoordinates[0], viewCoordinates[1], viewCoordinates[2],
            viewCoordinates[3], RendererCommon.ScalingType.SCALE_ASPECT_FILL,
            (mainRenderer ? mainMirror : secondMirror));
}
项目:react-native-webrtc    文件:SurfaceViewRenderer.java   
/**
 * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
 * reinitialize the renderer after a previous init()/release() cycle.
 */
public void init(
    EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
  init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
}
项目:react-native-webrtc    文件:SurfaceViewRenderer.java   
/**
 * Set how the video will fill the allowed layout area.
 */
public void setScalingType(RendererCommon.ScalingType scalingType) {
  synchronized (layoutLock) {
    this.scalingType = scalingType;
  }
}
项目:react-native-webrtc    文件:WebRTCView.java   
/**
 * {@inheritDoc}
 */
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
    int height = b - t;
    int width = r - l;

    if (height == 0 || width == 0) {
        l = t = r = b = 0;
    } else {
        int frameHeight;
        int frameRotation;
        int frameWidth;
        ScalingType scalingType;

        synchronized (layoutSyncRoot) {
            frameHeight = this.frameHeight;
            frameRotation = this.frameRotation;
            frameWidth = this.frameWidth;
            scalingType = this.scalingType;
        }

        SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer();

        switch (scalingType) {
        case SCALE_ASPECT_FILL:
            // Fill this ViewGroup with surfaceViewRenderer and the latter
            // will take care of filling itself with the video similarly to
            // the cover value the CSS property object-fit.
            r = width;
            l = 0;
            b = height;
            t = 0;
            break;
        case SCALE_ASPECT_FIT:
        default:
            // Lay surfaceViewRenderer out inside this ViewGroup in accord
            // with the contain value of the CSS property object-fit.
            // SurfaceViewRenderer will fill itself with the video similarly
            // to the cover or contain value of the CSS property object-fit
            // (which will not matter, eventually).
            if (frameHeight == 0 || frameWidth == 0) {
                l = t = r = b = 0;
            } else {
                float frameAspectRatio
                    = (frameRotation % 180 == 0)
                        ? frameWidth / (float) frameHeight
                        : frameHeight / (float) frameWidth;
                Point frameDisplaySize
                    = RendererCommon.getDisplaySize(
                            scalingType,
                            frameAspectRatio,
                            width, height);

                l = (width - frameDisplaySize.x) / 2;
                t = (height - frameDisplaySize.y) / 2;
                r = l + frameDisplaySize.x;
                b = t + frameDisplaySize.y;
            }
            break;
        }
    }
    surfaceViewRenderer.layout(l, t, r, b);
}
项目:quickblox-android    文件:VideoConversationFragment.java   
protected void updateVideoView(SurfaceViewRenderer surfaceViewRenderer, boolean mirror) {
    updateVideoView(surfaceViewRenderer, mirror, RendererCommon.ScalingType.SCALE_ASPECT_FILL);
}
项目:quickblox-android    文件:VideoConversationFragment.java   
protected void updateVideoView(SurfaceViewRenderer surfaceViewRenderer, boolean mirror, RendererCommon.ScalingType scalingType) {
    Log.i(TAG, "updateVideoView mirror:" + mirror + ", scalingType = " + scalingType);
    surfaceViewRenderer.setScalingType(scalingType);
    surfaceViewRenderer.setMirror(mirror);
    surfaceViewRenderer.requestLayout();
}
项目:Achilles_Android    文件:MainActivity.java   
private void init() {
    mLocalVideoView = (SurfaceViewRenderer) findViewById(R.id.local_video_view);

    // Init ExecutorService
    mExecutorService = Executors.newSingleThreadExecutor();

    // Socket.IO initialization
    initSocket();

    // Create video renderer
    rootEglBase = EglBase.create();
    Log.d(TAG, "Created video renderer.");

    mLocalVideoView.init(rootEglBase.getEglBaseContext(), null);
    mLocalVideoView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalVideoView.setEnableHardwareScaler(true);
    // Set ProxyRenderer target to SurfaceViewRenderer
    localProxyRenderer.setTarget(mLocalVideoView);
    mLocalVideoView.setMirror(true);

    // Check permission
    /*for (String permission : MANDATORY_PERMISSIONS) {
        if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
            Log.w(TAG, "Permission " + permission + " is not granted.");
            // finish();
            return;
        }
    }*/

    DisplayMetrics displayMetrics = getDisplayMetrics();
    videoWidth = displayMetrics.widthPixels;
    videoHeight = displayMetrics.heightPixels;
    videoFps = 30;

    initPeerConnectionFactory();

    // Set STUN Server
    mIceServers.add(new PeerConnection.IceServer(googleStunServer));

    // Set default SessionDescription MediaConstraints
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));

    // Set default AudioConstraints
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));

    // Enalble DTLS for normal calls
    mPcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
}
项目:VideoCRE    文件:MatrixHelper.java   
public MatrixHelper() {
    mFlipHorizontal = RendererCommon.horizontalFlipMatrix();
    mFlipVertical = RendererCommon.verticalFlipMatrix();
    mTemp = new float[32];
}
项目:VideoCRE    文件:MatrixHelper.java   
public void rotate(float[] matrix, float degree) {
    Matrix.setRotateM(mTemp, 0, degree, 0, 0, 1);
    RendererCommon.adjustOrigin(mTemp);
    Matrix.multiplyMM(mTemp, 16, mTemp, 0, matrix, 0);
    System.arraycopy(mTemp, 16, matrix, 0, 16);
}
项目:VideoCRE    文件:TextureViewRenderer.java   
/**
 * Set how the video will fill the allowed layout area.
 */
public void setScalingType(RendererCommon.ScalingType scalingType) {
  ThreadUtils.checkIsOnMainThread();
  videoLayoutMeasure.setScalingType(scalingType);
  requestLayout();
}
项目:VideoCRE    文件:TextureViewRenderer.java   
public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation,
    RendererCommon.ScalingType scalingTypeMismatchOrientation) {
  ThreadUtils.checkIsOnMainThread();
  videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation);
  requestLayout();
}
项目:video-quickstart-android    文件:SnapshotVideoRenderer.java   
private Bitmap captureBitmapFromTexture(I420Frame i420Frame) {
    int width = i420Frame.rotatedWidth();
    int height = i420Frame.rotatedHeight();
    int outputFrameSize = width * height * 3 / 2;
    ByteBuffer outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
    final float frameAspectRatio = (float) i420Frame.rotatedWidth() /
            (float) i420Frame.rotatedHeight();
    final float[] rotatedSamplingMatrix =
            RendererCommon.rotateTextureMatrix(i420Frame.samplingMatrix,
                    i420Frame.rotationDegree);
    final float[] layoutMatrix = RendererCommon.getLayoutMatrix(false,
            frameAspectRatio,
            (float) width / height);
    final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix,
            layoutMatrix);
    /*
     * YuvConverter must be instantiated on a thread that has an active EGL context. We know
     * that renderFrame is called from the correct render thread therefore
     * we defer instantiation of the converter until frame arrives.
     */
    YuvConverter yuvConverter = new YuvConverter();
    yuvConverter.convert(outputFrameBuffer,
            width,
            height,
            width,
            i420Frame.textureId,
            texMatrix);

    // Now we need to unpack the YUV data into planes
    byte[] data = outputFrameBuffer.array();
    int offset = outputFrameBuffer.arrayOffset();
    int stride = width;
    ByteBuffer[] yuvPlanes = new ByteBuffer[] {
            ByteBuffer.allocateDirect(width * height),
            ByteBuffer.allocateDirect(width * height / 4),
            ByteBuffer.allocateDirect(width * height / 4)
    };
    int[] yuvStrides = new int[] {
            width,
            (width + 1) / 2,
            (width + 1) / 2
    };

    // Write Y
    yuvPlanes[0].put(data, offset, width * height);

    // Write U
    for (int r = height ; r < height * 3 / 2; ++r) {
        yuvPlanes[1].put(data, offset + r * stride, stride / 2);
    }

    // Write V
    for (int r = height ; r < height * 3 / 2 ; ++r) {
        yuvPlanes[2].put(data, offset + r * stride + stride / 2, stride / 2);
    }

    // Convert the YuvImage
    YuvImage yuvImage = i420ToYuvImage(yuvPlanes, yuvStrides, width, height);

    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    Rect rect = new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight());

    // Compress YuvImage to jpeg
    yuvImage.compressToJpeg(rect, 100, stream);

    // Convert jpeg to Bitmap
    byte[] imageBytes = stream.toByteArray();

    // Release YUV Converter
    yuvConverter.release();

    return BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
}
项目:react-native-twilio-video-webrtc    文件:RNVideoViewGroup.java   
public void setScalingType(RendererCommon.ScalingType scalingType) {
    this.scalingType = scalingType;
}
项目:nubo-test-tree    文件:MasterVideoActivity.java   
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        treeState = TreeState.IDLE;

        setContentView(R.layout.activity_video_master);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        mHandler = new Handler();
        Bundle extras = getIntent().getExtras();
        if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
            ;
            Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
                    Toast.LENGTH_SHORT).show();
            finish();
            return;
        }
        this.treeId      = extras.getString(Constants.USER_NAME, "");
        Log.i(TAG, "treeId: " + treeId);

        this.mCallStatus   = (TextView) findViewById(R.id.call_status);

        this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);
        // Set up the List View for chatting
        RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
        VideoRendererGui.setView(videoView, null);

        remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT,
                scalingType, false);
        localRender = VideoRendererGui.create(  LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                scalingType, true);
        NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
        peerConnectionParameters = new NBMMediaConfiguration(   NBMMediaConfiguration.NBMRendererType.OPENGLES,
                NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
                NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
                receiverVideoFormat,
                NBMMediaConfiguration.NBMCameraPosition.FRONT);
        nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
        nbmWebRTCPeer.initialize();
        Log.i(TAG, "MasterVideoActivity initialized");
//        mHandler.postDelayed(publishDelayed, 4000);

        MainActivity.getKurentoTreeAPIInstance().addObserver(this);

        createTreeRequestId = ++Constants.id;
        MainActivity.getKurentoTreeAPIInstance().sendCreateTree(treeId, createTreeRequestId);

        treeState = treeState.CREATING;
        mCallStatus.setText("Creating tree...");

    }
项目:nubo-test-tree    文件:ViewerVideoActivity.java   
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        treeState = TreeState.IDLE;

        setContentView(R.layout.activity_video_master);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        mHandler = new Handler();
        Bundle extras = getIntent().getExtras();
        if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
            ;
            Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
                    Toast.LENGTH_SHORT).show();
            finish();
            return;
        }
        this.treeId      = extras.getString(Constants.USER_NAME, "");
        Log.i(TAG, "treeId: " + treeId);

//        if (extras.containsKey(Constants.CALL_USER)) {
//            this.calluser      = extras.getString(Constants.CALL_USER, "");
//            Log.i(TAG, "callUser: " + calluser);
//        }

        this.mCallStatus   = (TextView) findViewById(R.id.call_status);

        this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

        queuedIceCandidates = new Vector<>();
        // Set up the List View for chatting
        RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
        VideoRendererGui.setView(videoView, null);

        remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT,
                scalingType, false);
        localRender = VideoRendererGui.create(  LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                scalingType, true);
        NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
        peerConnectionParameters = new NBMMediaConfiguration(   NBMMediaConfiguration.NBMRendererType.OPENGLES,
                NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
                NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
                receiverVideoFormat,
                NBMMediaConfiguration.NBMCameraPosition.FRONT);
        nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
        nbmWebRTCPeer.initialize();
        Log.i(TAG, "MasterVideoActivity initialized");
        mHandler.postDelayed(createOfferDelayed, 1000);

        MainActivity.getKurentoTreeAPIInstance().addObserver(this);

        treeState = TreeState.CREATING_OFFER;
        mCallStatus.setText("Creating local offer...");

    }
项目:q-municate-android    文件:ConversationCallFragment.java   
protected void updateVideoView(QBRTCSurfaceView surfaceViewRenderer, boolean mirror) {
    updateVideoView(surfaceViewRenderer, mirror, RendererCommon.ScalingType.SCALE_ASPECT_FILL);
}
项目:q-municate-android    文件:ConversationCallFragment.java   
protected void updateVideoView(QBRTCSurfaceView surfaceViewRenderer, boolean mirror, RendererCommon.ScalingType scalingType) {
    Log.i(TAG, "updateVideoView mirror:" + mirror + ", scalintType = " + scalingType);
    surfaceViewRenderer.setScalingType(scalingType);
    surfaceViewRenderer.setMirror(mirror);
    surfaceViewRenderer.requestLayout();
}
项目:q-municate-android    文件:RTCGLVideoView.java   
private VideoRenderer.Callbacks initRenderer(boolean mirror, int[] viewCoordinates) {
    return VideoRendererGui.createGuiRenderer(viewCoordinates[0], viewCoordinates[1], viewCoordinates[2],
            viewCoordinates[3], RendererCommon.ScalingType.SCALE_ASPECT_FILL, mirror);

}
项目:matrix-android-sdk    文件:MXWebRtcView.java   
/**
 * {@inheritDoc}
 */
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
    int height = b - t;
    int width = r - l;

    if (height == 0 || width == 0) {
        l = t = r = b = 0;
    } else {
        int frameHeight;
        int frameRotation;
        int frameWidth;
        ScalingType scalingType;

        synchronized (layoutSyncRoot) {
            frameHeight = this.frameHeight;
            frameRotation = this.frameRotation;
            frameWidth = this.frameWidth;
            scalingType = this.scalingType;
        }

        SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer();

        switch (scalingType) {
            case SCALE_ASPECT_FILL:
                // Fill this ViewGroup with surfaceViewRenderer and the latter
                // will take care of filling itself with the video similarly to
                // the cover value the CSS property object-fit.
                r = width;
                l = 0;
                b = height;
                t = 0;
                break;
            case SCALE_ASPECT_FIT:
            default:
                // Lay surfaceViewRenderer out inside this ViewGroup in accord
                // with the contain value of the CSS property object-fit.
                // SurfaceViewRenderer will fill itself with the video similarly
                // to the cover or contain value of the CSS property object-fit
                // (which will not matter, eventually).
                if (frameHeight == 0 || frameWidth == 0) {
                    l = t = r = b = 0;
                } else {
                    float frameAspectRatio
                            = (frameRotation % 180 == 0)
                            ? frameWidth / (float) frameHeight
                            : frameHeight / (float) frameWidth;
                    Point frameDisplaySize
                            = RendererCommon.getDisplaySize(
                            scalingType,
                            frameAspectRatio,
                            width, height);

                    l = (width - frameDisplaySize.x) / 2;
                    t = (height - frameDisplaySize.y) / 2;
                    r = l + frameDisplaySize.x;
                    b = t + frameDisplaySize.y;
                }
                break;
        }
    }
    surfaceViewRenderer.layout(l, t, r, b);
}
项目:VideoCRE    文件:TextureViewRenderer.java   
/**
 * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
 * reinitialize the renderer after a previous init()/release() cycle.
 */
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
  init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
}
项目:VideoCRE    文件:TextureViewRenderer.java   
/**
 * Register a callback to be invoked when a new video frame has been received.
 *
 * @param listener The callback to be invoked. The callback will be invoked on the render thread.
 *                 It should be lightweight and must not call removeFrameListener.
 * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
 *                 required.
 * @param drawer   Custom drawer to use for this frame listener.
 */
public void addFrameListener(
    EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) {
  eglRenderer.addFrameListener(listener, scale, drawerParam);
}
项目:nc-android-webrtcpeer    文件:OnCallEvents.java   
void onVideoScalingSwitch(RendererCommon.ScalingType scalingType);