Java 类org.webrtc.EglBase 实例源码

项目:PeSanKita-android    文件:WebRtcCallService.java   
private void initializeVideo() {
  Util.runOnMainSync(new Runnable() {
    @Override
    public void run() {
      eglBase        = EglBase.create();
      localRenderer  = new SurfaceViewRenderer(WebRtcCallService.this);
      remoteRenderer = new SurfaceViewRenderer(WebRtcCallService.this);

      localRenderer.init(eglBase.getEglBaseContext(), null);
      remoteRenderer.init(eglBase.getEglBaseContext(), null);

      peerConnectionFactory.setVideoHwAccelerationOptions(eglBase.getEglBaseContext(),
                                                          eglBase.getEglBaseContext());
    }
  });
}
项目:anyRTC-RTCP-Android    文件:RTCVideoView.java   
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) {
    this.strPeerId = strPeerId;
    this.index = index;
    this.x = x;
    this.y = y;
    this.w = w;
    this.h = h;
    this.mRTCVideoLayout = videoLayout;

    mLayout = new PercentFrameLayout(ctx);
    mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    View view = View.inflate(ctx, R.layout.layout_top_right, null);

    mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view);
    btnClose = (ImageView) view.findViewById(R.id.img_close_render);
    mLocalCamera = (ImageView) view.findViewById(R.id.camera_off);
    mAudioImageView = (ImageView) view.findViewById(R.id.img_audio_close);
    mVideoImageView = (ImageView) view.findViewById(R.id.img_video_close);
    layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera);
    mView.init(eglBase.getEglBaseContext(), null);
    mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    mLayout.addView(view);
}
项目:anyRTC-Meeting-Android    文件:RTCVideoView.java   
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h, AnyRTCVideoLayout videoLayout) {
    this.strPeerId = strPeerId;
    this.index = index;
    this.x = x;
    this.y = y;
    this.w = w;
    this.h = h;
    this.mRTCVideoLayout = videoLayout;

    mLayout = new PercentFrameLayout(ctx);
    mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    View view = View.inflate(ctx, org.anyrtc.meet_kit.R.layout.layout_top_right, null);

    mView = (SurfaceViewRenderer) view.findViewById(org.anyrtc.meet_kit.R.id.suface_view);
    btnClose = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_close_render);
    mLocalCamera = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.camera_off);
    mAudioImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_audio_close);
    mVideoImageView = (ImageView) view.findViewById(org.anyrtc.meet_kit.R.id.img_video_close);
    layoutCamera = (RelativeLayout) view.findViewById(org.anyrtc.meet_kit.R.id.layout_camera);
    mView.init(eglBase.getEglBaseContext(), null);
    mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
    mLayout.addView(view);
}
项目:AndroidRTC    文件:PeerConnectionClientTest.java   
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
    MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
    VideoCapturer videoCapturer, EglBase.Context eglContext) {
  List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
  SignalingParameters signalingParameters =
      new SignalingParameters(iceServers, true, // iceServers, initiator.
          null, null, null, // clientId, wssUrl, wssPostUrl.
          null, null); // offerSdp, iceCandidates.

  PeerConnectionClient client = PeerConnectionClient.getInstance();
  PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
  options.networkIgnoreMask = 0;
  options.disableNetworkMonitor = true;
  client.setPeerConnectionFactoryOptions(options);
  client.createPeerConnectionFactory(
      InstrumentationRegistry.getTargetContext(), peerConnectionParameters, this);
  client.createPeerConnection(
      eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
  client.createOffer();
  return client;
}
项目:Cable-Android    文件:WebRtcCallService.java   
private void initializeVideo() {
  Util.runOnMainSync(new Runnable() {
    @Override
    public void run() {
      eglBase        = EglBase.create();
      localRenderer  = new SurfaceViewRenderer(WebRtcCallService.this);
      remoteRenderer = new SurfaceViewRenderer(WebRtcCallService.this);

      localRenderer.init(eglBase.getEglBaseContext(), null);
      remoteRenderer.init(eglBase.getEglBaseContext(), null);

      peerConnectionFactory.setVideoHwAccelerationOptions(eglBase.getEglBaseContext(),
                                                          eglBase.getEglBaseContext());
    }
  });
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
public VideoView(String strPeerId, Context ctx, EglBase eglBase, int index, int x, int y, int w, int h) {
            this.strPeerId = strPeerId;
            this.index = index;
            this.x = x;
            this.y = y;
            this.w = w;
            this.h = h;

            mLayout = new PercentFrameLayout(ctx);
//            mLayout.setBackgroundResource(R.drawable.background);
            mLayout.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
            View view = View.inflate(ctx, R.layout.layout_top_right, null);
            mView = (SurfaceViewRenderer) view.findViewById(R.id.suface_view);
            layoutCamera = (RelativeLayout) view.findViewById(R.id.layout_camera);
            mView.init(eglBase.getEglBaseContext(), null);
            mView.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT));
            mLayout.addView(view);
        }
项目:yun2win-sdk-android    文件:AVMemberView.java   
public AVMemberView(final Context context,EglBase rootEglBase,AVMember avMember, String trackType){
    this.context = context;
    this.rootEglBase = rootEglBase;
    this.avMember = avMember;
    this.trackType = trackType;
    convertView = LayoutInflater.from(context).inflate(R.layout.avcall_member_preview_item, null);
    viewHolder = new ViewHolder();
    viewHolder.sfv_video= (SurfaceViewRenderer)convertView.findViewById(R.id.svr_video_item);
    viewHolder.iv_header = (ImageView) convertView.findViewById(R.id.iv_av_member_avatar);
    viewHolder.rl_bg = (RelativeLayout) convertView.findViewById(R.id.rl_bg);
    viewHolder.sfv_video.init(rootEglBase.getEglBaseContext(), null);
    viewHolder.sfv_video.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
    viewHolder.sfv_video.setMirror(false);
    viewHolder.sfv_video.setZOrderMediaOverlay(true);
    convertView.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            if (onMemberViewClickListener != null)
                onMemberViewClickListener.itemClick(avMemberView);
        }
    });
    avMemberView = this;
    setRenderer();
}
项目:DeviceConnect-Android    文件:YuvConverter.java   
public YuvConverter (EglBase.Context sharedContext) {
    eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
    eglBase.createDummyPbufferSurface();
    eglBase.makeCurrent();

    shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
    shader.useProgram();
    texMatrixLoc = shader.getUniformLocation("texMatrix");
    xUnitLoc = shader.getUniformLocation("xUnit");
    coeffsLoc = shader.getUniformLocation("coeffs");
    GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
    GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
    // Initialize vertex shader attributes.
    shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
    // If the width is not a multiple of 4 pixels, the texture
    // will be scaled up slightly and clipped at the right border.
    shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
    eglBase.detachCurrent();
}
项目:DeviceConnect-Android    文件:WebRTCController.java   
/**
 * Hang up a call.
 */
public void hangup() {
    if (BuildConfig.DEBUG) {
        Log.d(TAG, "@@@ VideoChatActivity::hangup");
    }

    if (mConnection != null) {
        mConnection.close();
        mConnection = null;
    }

    mRemoteRender.release();
    mLocalRender.release();

    EglBase eglBase = mOption.getEglBase();
    if (eglBase != null) {
        eglBase.release();
    }
}
项目:nc-android-webrtcpeer    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender,
                                 final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer,
                                 final SignalingParameters signalingParameters) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
                         videoCapturer, signalingParameters);
}
项目:InsideCodec    文件:FrameProducer.java   
public FrameProducer(final EglBase eglBase, final File videoFile, final int fps,
        final Callback callback) {
    mVideoFile = videoFile;
    mFps = fps;
    mCallback = callback;
    mBufferInfo = new MediaCodec.BufferInfo();

    mSurfaceTextureHelper = SurfaceTextureHelper.create("SurfaceTextureHelper",
            eglBase.getEglBaseContext());
    mSurfaceTextureHelper.startListening(this);
    mHubSurface = new Surface(mSurfaceTextureHelper.getSurfaceTexture());
}
项目:InsideCodec    文件:RcTest.java   
public RcTest(final Config config, final EglBase eglBase, final SurfaceViewRenderer renderer,
        final Notifier notifier) {
    mEglBase = eglBase;
    mSurfaceViewRenderer = renderer;
    File videoFile = new File(Environment.getExternalStorageDirectory(),
            "alien-covenant.mp4");
    mFrameProducer = new FrameProducer(mEglBase, videoFile, config.outputFps(), this);
    mEncoderWrapper = new EncoderWrapper(config, notifier);
}
项目:anyRTC-RTCP-Android    文件:RTCVideoView.java   
public RTCVideoView(RelativeLayout videoView, Context ctx, EglBase eglBase) {
    mAutoLayout = false;
    mContext = ctx;
    mVideoView = videoView;
    mRootEglBase = eglBase;
    mLocalRender = null;
    mRemoteRenders = new HashMap<>();
    this.isHost = isHost;
    mRTCVideoLayout = AnyRTCRTCPEngine.Inst().getAnyRTCRTCPOption().getmVideoLayout();
    if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_3X3_auto) {
        ((Activity) mContext).setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    }
    mScreenWidth = ScreenUtils.getScreenWidth(mContext);
    mScreenHeight = ScreenUtils.getScreenHeight(mContext) - ScreenUtils.getStatusHeight(mContext);
}
项目:anyRTC-Meeting-Android    文件:RTCVideoView.java   
public RTCVideoView(RelativeLayout videoView, Context ctx, EglBase eglBase) {
    mAutoLayout = false;
    mContext = ctx;
    mVideoView = videoView;
    mRootEglBase = eglBase;
    mLocalRender = null;
    mRemoteRenders = new HashMap<>();
    mRTCVideoLayout = AnyRTCMeetEngine.Inst().getAnyRTCMeetOption().getmVideoLayout();
    if (mRTCVideoLayout == AnyRTCVideoLayout.AnyRTC_V_3X3_auto) {
        ((Activity) mContext).setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    }
    mScreenWidth = ScreenUtils.getScreenWidth(mContext);
    mScreenHeight = ScreenUtils.getScreenHeight(mContext) - ScreenUtils.getStatusHeight(mContext);
}
项目:AndroidRTC    文件:PeerConnectionClientTest.java   
@Before
public void setUp() {
  signalingExecutor = Executors.newSingleThreadExecutor();
  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
    eglBase = EglBase.create();
  }
}
项目:anyRTC-P2P-Android    文件:RTCVideoView.java   
public RTCVideoView(RelativeLayout videoView, Context ctx, EglBase eglBase) {
    this.mContext = ctx;
    mAutoLayout = false;
    mVideoView = videoView;
    mRootEglBase = eglBase;
    mLocalRender = null;
    mRemoteRenders = new HashMap<>();
    mScreenWidth = ScreenUtils.getScreenWidth(mContext);
    mScreenHeight = ScreenUtils.getScreenHeight(mContext) - ScreenUtils.getStatusHeight(mContext);
}
项目:VideoCRE    文件:HwAvcEncoder.java   
public void start(final EglBase eglBase) {
    mMediaCodecHandler.post(new Runnable() {
        @Override
        public void run() {
            mVideoEncoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_H264,
                    MediaCodecVideoEncoder.H264Profile.CONSTRAINED_BASELINE.getValue(),
                    mVideoConfig.outputWidth(), mVideoConfig.outputHeight(),
                    mVideoConfig.outputBitrate(), mVideoConfig.fps(),
                    eglBase.getEglBaseContext(), HwAvcEncoder.this);
        }
    });
}
项目:VideoCRE    文件:TextureViewRenderer.java   
/**
 * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
 * for drawing frames on the EGLSurface. This class is responsible for calling release() on
 * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
 * init()/release() cycle.
 */
public void init(final EglBase.Context sharedContext,
    RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
    RendererCommon.GlDrawer drawer) {
  ThreadUtils.checkIsOnMainThread();
  this.rendererEvents = rendererEvents;
  synchronized (layoutLock) {
    isFirstFrameRendered = false;
    rotatedFrameWidth = 0;
    rotatedFrameHeight = 0;
    frameRotation = 0;
  }
  eglRenderer.init(sharedContext, configAttributes, drawer);
}
项目:nubo-test    文件:PeerVideoActivity.java   
@Override
protected void onStart() {
    super.onStart();

    Bundle extras = getIntent().getExtras();
    this.username = extras.getString(Constants.USER_NAME, "");
    Log.i(TAG, "username: " + username);

    EglBase rootEglBase = EglBase.create();
    masterView.init(rootEglBase.getEglBaseContext(), null);
    masterView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    localView.init(rootEglBase.getEglBaseContext(), null);
    localView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);

    NBMMediaConfiguration peerConnectionParameters = new NBMMediaConfiguration(
            NBMMediaConfiguration.NBMRendererType.OPENGLES,
            NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
            NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
            new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20),
            NBMMediaConfiguration.NBMCameraPosition.FRONT);

    videoRequestUserMapping = new HashMap<>();

    nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, localView, this);
    nbmWebRTCPeer.registerMasterRenderer(masterView);
    Log.i(TAG, "Initializing nbmWebRTCPeer...");
    nbmWebRTCPeer.initialize();
    callState = CallState.PUBLISHING;
    mCallStatus.setText("Publishing...");
}
项目:webrtc-android-codelab    文件:MainActivity.java   
private void initVideos() {
    EglBase rootEglBase = EglBase.create();
    localVideoView.init(rootEglBase.getEglBaseContext(), null);
    remoteVideoView.init(rootEglBase.getEglBaseContext(), null);
    localVideoView.setZOrderMediaOverlay(true);
    remoteVideoView.setZOrderMediaOverlay(true);
}
项目:webrtc-android    文件:PeerConnectionClient.java   
public void createPeerConnection(
            final EglBase.Context renderEGLContext,
            final VideoRenderer.Callbacks localRender,
            final VideoRenderer.Callbacks remoteRender,
            final PeerConnectionEvents events,
            final PeerConnectionParameters peerConnectionParameters) {
        this.peerConnectionParameters = peerConnectionParameters;
        this.events = events;
        videoCallEnabled = peerConnectionParameters.videoCallEnabled;
//
//      PeerConnectionFactory.initializeAndroidGlobals(, true, true,
//              false);
//      factory = new PeerConnectionFactory();

//      if (peerConnectionParameters == null) {
//          Log.e(TAG, "Creating peer connection without initializing factory.");
//          return;
//      }
        this.localRender = localRender;
        this.remoteRender = remoteRender;

        executor.execute(new Runnable() {
            @Override
            public void run() {
                createMediaConstraintsInternal();
//              createPeerConnectionInternal(renderEGLContext, iceServers);
                if(mediaStream == null) {
                    mediaStream = factory.createLocalMediaStream("ARDAMS");
                    if (videoCallEnabled) {
                        String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0);
                        String frontCameraDeviceName =
                                CameraEnumerationAndroid.getNameOfFrontFacingDevice();
                        if (numberOfCameras > 1 && frontCameraDeviceName != null) {
                            cameraDeviceName = frontCameraDeviceName;
                        }
                        Log.d(TAG, "Opening camera: " + cameraDeviceName);
                        videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null,
                                peerConnectionParameters.captureToTexture ? renderEGLContext : null);
                        if (videoCapturer == null) {
                            reportError("Failed to open camera");
                            return;
                        }
                        mediaStream.addTrack(createVideoTrack(videoCapturer));
                    }

                    mediaStream.addTrack(factory.createAudioTrack(
                            AUDIO_TRACK_ID,
                            factory.createAudioSource(audioConstraints)));
                }
                try {
                    manager = new Manager(new URI(mHost));
                    client = manager.socket("/");
                } catch (URISyntaxException e) {
                    e.printStackTrace();
                }
                client
                        .on(INIT_MESSAGE, messageHandler.onInitMessage)
                        .on(TEXT_MESSAGE, messageHandler.onTextMessage)
//                      .on(INVITE_MESSAGE, messageHandler.onInviteMessage)
//                      .on(READY_MESSAGE, messageHandler.onReadyMessage)
//                      .on(OFFER_MESSAGE, messageHandler.onOfferMessage)
//                      .on(ANSWER_MESSAGE, messageHandler.onAnswerMessage)
//                      .on(ICE_CANDIDATE_MESSAGE, messageHandler.onCandidateMessage)
                        .on(RTC_MESSAGE, messageHandler.onRtcMessage)
                        .on(LEAVE_MESSAGE, messageHandler.onLeaveMessage)
                        .on(AVAILABLE_USERS_MESSAGE, messageHandler.onAvailablePeersMessage)
                        .on(PRESENCE_MESSAGE, messageHandler.onPresenceMessage);
                client.connect();
            }
        });

    }
项目:DeviceConnect-Android    文件:VideoCapturerExternalResource.java   
/**
 * Constructor.
 *
 * @param uri    uri of resource
 * @param width  width
 * @param height height
 */
public VideoCapturerExternalResource(EglBase.Context sharedContext, final String uri, final int width, final int height) {
    mUri = uri;
    mWidth = width;
    mHeight = height;
    mFPS = 30;

    HandlerThread cameraThread = new HandlerThread(TAG);
    cameraThread.start();
    mCameraThreadHandler = new Handler(cameraThread.getLooper());
    mSurfaceHelper = SurfaceTextureHelper.create(sharedContext, mCameraThreadHandler);
}
项目:react-native-webrtc    文件:SurfaceViewRenderer.java   
/**
 * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
 * reinitialize the renderer after a previous init()/release() cycle.
 */
public void init(
    EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
  init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
}
项目:InsideCodec    文件:EncoderWrapper.java   
public void start(final EglBase eglBase) {
    mHandler.post(new Runnable() {
        @Override
        public void run() {
            try {
                mEglBase = EglBase.create(eglBase.getEglBaseContext(),
                        EglBase.CONFIG_RECORDABLE);

                mEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
                MediaFormat encodeFormat = MediaFormat.createVideoFormat(
                        MediaFormat.MIMETYPE_VIDEO_AVC,
                        mConfig.outputWidth(), mConfig.outputHeight());
                encodeFormat.setInteger(MediaFormat.KEY_BIT_RATE, mConfig.initBr() * 1000);
                encodeFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mConfig.outputFps());
                encodeFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
                        mConfig.outputKeyFrameInterval());
                encodeFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, mConfig.brMode());
                if (mConfig.brMode() == MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ) {
                    encodeFormat.setInteger("quality", mConfig.quality());
                }
                encodeFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                        MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
                if (mConfig.asyncEnc()) {
                    mEncoder.setCallback(EncoderWrapper.this);
                }
                mEncoder.configure(encodeFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                Log.i(TAG, "VideoFormat encoder " + encodeFormat);

                mEncoderSurface = mEncoder.createInputSurface();

                mEncoder.start();

                mCurrentBr = mConfig.initBr();

                mEglBase.createSurface(mEncoderSurface);
                mEglBase.makeCurrent();
                mDrawer = new GlRectDrawer();
                mLastResetBitsTime = System.currentTimeMillis();

                if (!mConfig.asyncEnc()) {
                    startOutputThread();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    });

    if (mConfig.updateBr()) {
        mUiHandler.postDelayed(mRcRunnable, RC_INTERVAL);
    }
}
项目:Achilles_Android    文件:MainActivity.java   
private void init() {
    mLocalVideoView = (SurfaceViewRenderer) findViewById(R.id.local_video_view);

    // Init ExecutorService
    mExecutorService = Executors.newSingleThreadExecutor();

    // Socket.IO initialization
    initSocket();

    // Create video renderer
    rootEglBase = EglBase.create();
    Log.d(TAG, "Created video renderer.");

    mLocalVideoView.init(rootEglBase.getEglBaseContext(), null);
    mLocalVideoView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
    mLocalVideoView.setEnableHardwareScaler(true);
    // Set ProxyRenderer target to SurfaceViewRenderer
    localProxyRenderer.setTarget(mLocalVideoView);
    mLocalVideoView.setMirror(true);

    // Check permission
    /*for (String permission : MANDATORY_PERMISSIONS) {
        if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
            Log.w(TAG, "Permission " + permission + " is not granted.");
            // finish();
            return;
        }
    }*/

    DisplayMetrics displayMetrics = getDisplayMetrics();
    videoWidth = displayMetrics.widthPixels;
    videoHeight = displayMetrics.heightPixels;
    videoFps = 30;

    initPeerConnectionFactory();

    // Set STUN Server
    mIceServers.add(new PeerConnection.IceServer(googleStunServer));

    // Set default SessionDescription MediaConstraints
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));

    // Set default AudioConstraints
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
    mAudioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));

    // Enalble DTLS for normal calls
    mPcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
public PeerConnectionClient() {
  rootEglBase = EglBase.create();
}
项目:AppRTC-Android    文件:PeerConnectionClient.java   
public EglBase.Context getRenderContext() {
  return rootEglBase.getEglBaseContext();
}
项目:AndroidRTC    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
            videoCapturer, signalingParameters);
}
项目:VideoCRE    文件:VideoSource.java   
public EglBase getRootEglBase() {
    return mEglBase;
}
项目:yun2win-sdk-android    文件:AVCallActivity.java   
private void init(){
    rootEglBase =  EglBase.create();
    svr_video.init(rootEglBase.getEglBaseContext(), null);
    svr_video.setZOrderMediaOverlay(false);
}
项目:webrtc-android-codelab    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    PeerConnectionFactory peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = createVideoCapturer();
    //Create MediaConstraints - Will be useful for specifying video and audio constraints. More on this later!
    MediaConstraints constraints = new MediaConstraints();

    //Create a VideoSource instance
    VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints);
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

    //we will start capturing the video from the camera
    //width,height and fps
    videoCapturerAndroid.startCapture(1000, 1000, 30);

    //create surface renderer, init it and add the renderer to the track
    SurfaceViewRenderer videoView = (SurfaceViewRenderer) findViewById(R.id.surface_rendeer);
    videoView.setMirror(true);

    EglBase rootEglBase = EglBase.create();
    videoView.init(rootEglBase.getEglBaseContext(), null);

    localVideoTrack.addRenderer(new VideoRenderer(videoView));


}
项目:webrtc-android    文件:PeerConnectionClient.java   
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
    createPeerConnectionInternal(renderEGLContext, null);
}
项目:DeviceConnect-Android    文件:VideoChatActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    mLocalLayout = (PercentFrameLayout) findViewById(R.id.local_view_layout);
    mRemoteLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);

    mLocalRender = (MySurfaceViewRenderer) findViewById(R.id.local_video_view);
    mLocalRender.setType(MySurfaceViewRenderer.TYPE_LOCAL);
    mRemoteRender = (MySurfaceViewRenderer) findViewById(R.id.remote_video_view);
    mRemoteRender.setType(MySurfaceViewRenderer.TYPE_REMOTE);

    EglBase eglBase = EglBase.create();
    mRemoteRender.init(eglBase.getEglBaseContext(), null);
    mLocalRender.init(eglBase.getEglBaseContext(), null);
    mLocalRender.setZOrderMediaOverlay(true);

    WebRtcAudioTrack.setAudioTrackModuleFactory(null);

    Intent intent = getIntent();
    if (intent != null) {
        long prevTimeStamp = ((WebRTCApplication) getApplication()).getCallTimeStamp();
        long callTimeStamp = intent.getLongExtra(EXTRA_CALL_TIMESTAMP, 0);
        if (prevTimeStamp == callTimeStamp) {
            createWebRTCErrorDialog();
            return;
        }
        ((WebRTCApplication) getApplication()).setCallTimeStamp(callTimeStamp);

        PeerConfig config = intent.getParcelableExtra(EXTRA_CONFIG);
        String videoUri = intent.getStringExtra(EXTRA_VIDEO_URI);
        String audioUri = intent.getStringExtra(EXTRA_AUDIO_URI);
        String addressId = intent.getStringExtra(EXTRA_ADDRESS_ID);
        boolean offer = intent.getBooleanExtra(EXTRA_OFFER, false);
        String audioSampleRate = intent.getStringExtra(EXTRA_AUDIOSAMPLERATE);
        int audioSampleRateValue;
        if (audioSampleRate == null) {
            audioSampleRateValue = WebRTCVideoChatProfile.PARAM_RATE_48000;
        } else {
            audioSampleRateValue = Integer.valueOf(audioSampleRate);
        }
        String audioBitDepth = intent.getStringExtra(EXTRA_AUDIOBITDEPTH);
        String audioChannel = intent.getStringExtra(EXTRA_AUDIOCHANNEL);

        WebRTCController.Builder builder = new WebRTCController.Builder();
        builder.setApplication((WebRTCApplication) getApplication());
        builder.setWebRTCEventListener(mListener);
        builder.setContext(this);
        builder.setEglBase(eglBase);
        builder.setConfig(config);
        builder.setRemoteRender(mRemoteRender);
        builder.setLocalRender(mLocalRender);
        builder.setVideoUri(videoUri);
        builder.setAudioUri(audioUri);
        builder.setAddressId(addressId);
        builder.setOffer(offer);
        builder.setAudioSampleRate(audioSampleRateValue);
        builder.setAudioBitDepth(audioBitDepth);
        builder.setAudioChannel(audioChannel);
        builder.setLandscape(isLandscape());
        mWebRTCController = builder.create();
        updateVideoView(videoUri);
    } else {
        openWebRTCErrorDialog();
    }
}
项目:DeviceConnect-Android    文件:MySurfaceViewRenderer.java   
public void createYuvConverter(EglBase.Context context) {
    mYuvConverter = new YuvConverter(context);
}
项目:DeviceConnect-Android    文件:Peer.java   
public void setVideoHwAccelerationOptions(EglBase.Context renderEGLContext) {
    mFactory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
}
项目:DeviceConnect-Android    文件:PeerOption.java   
public EglBase getEglBase() {
    return mEglBase;
}
项目:DeviceConnect-Android    文件:PeerOption.java   
public void setEglBase(EglBase eglBase) {
    mEglBase = eglBase;
}
项目:DeviceConnect-Android    文件:WebRTCController.java   
public Builder setEglBase(final EglBase eglBase) {
    mEglBase = eglBase;
    return this;
}
项目:voip_android    文件:PeerConnectionClient.java   
public void createPeerConnection(final EglBase.Context renderEGLContext,
                                 final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
                                 final VideoCapturer videoCapturer) {
    createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
            videoCapturer);
}
项目:restcomm-android-sdk    文件:PeerConnectionClient.java   
public PeerConnectionClient() {
  rootEglBase = EglBase.create();
}