Java 类org.webrtc.VideoRendererGui 实例源码

项目:newwebrtc    文件:VideoChatActivity.java   
@Override
public void onAddRemoteStream(final MediaStream remoteStream, final PnPeer peer) {
    super.onAddRemoteStream(remoteStream, peer); // Will log values
    VideoChatActivity.this.runOnUiThread(new Runnable() {
        @Override
        public void run() {
            Toast.makeText(VideoChatActivity.this,"Connected to " + peer.getId(), Toast.LENGTH_SHORT).show();
            try {
                if(remoteStream.audioTracks.size()==0 || remoteStream.videoTracks.size()==0) return;
                mCallStatus.setVisibility(View.GONE);
                remoteStream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
                VideoRendererGui.update(remoteRender, 0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
                VideoRendererGui.update(localRender, 72, 65, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FIT, true);
            }
            catch (Exception e){ e.printStackTrace(); }
        }
    });
}
项目:VideoMeeting    文件:RoomChatPresenter.java   
public RoomChatPresenter(IRoomChatView view) {
    mView = view;
    mSocketAddress = "http://" + App.getInstance().getString(R.string.stream_host);
    mSocketAddress += (":" + App.getInstance().getString(R.string.stream_port) + "/");

    GLSurfaceView surfaceView = mView.getSurfaceView();
    surfaceView.setPreserveEGLContextOnPause(true);
    surfaceView.setKeepScreenOn(true);
    VideoRendererGui.setView(surfaceView, new Runnable() {
        @Override
        public void run() {
            // SurfaceView 准备完毕
            L.d("eglContextReadyCallback");
            init();
        }
    });

    localRender = VideoRendererGui.create(
            0, 0,
            50, 50, scalingType, true);

}
项目:janus-gateway-android    文件:JanusActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    java.lang.System.setProperty("java.net.preferIPv6Addresses", "false");
    java.lang.System.setProperty("java.net.preferIPv4Stack", "true");
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    setContentView(R.layout.activity_janus);

    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

    vsv = (GLSurfaceView) findViewById(R.id.glview);
    vsv.setPreserveEGLContextOnPause(true);
    vsv.setKeepScreenOn(true);
    VideoRendererGui.setView(vsv, new MyInit());

    localRender = VideoRendererGui.create(72, 72, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    remoteRender = VideoRendererGui.create(0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
}
项目:respoke-sdk-android    文件:RespokeCall.java   
/**
 *  Attach the call's video renderers to the specified GLSurfaceView
 *
 *  @param glView  The GLSurfaceView on which to render video
 */
public void attachVideoRenderer(GLSurfaceView glView) {
    if (null != glView) {
        VideoRendererGui.setView(glView, new Runnable() {
            @Override
            public void run() {
                Log.d(TAG, "VideoRendererGui GL Context ready");
            }
        });

        remoteRender = VideoRendererGui.create(0, 0, 100, 100,
                VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
        localRender = VideoRendererGui.create(70, 5, 25, 25,
                VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
    }
}
项目:webrtc-android    文件:RtcActivity.java   
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().addFlags(
                LayoutParams.FLAG_FULLSCREEN
                        | LayoutParams.FLAG_KEEP_SCREEN_ON
                        | LayoutParams.FLAG_DISMISS_KEYGUARD
                        | LayoutParams.FLAG_SHOW_WHEN_LOCKED
                        | LayoutParams.FLAG_TURN_SCREEN_ON);
//        setContentView(R.layout.main);
//        mSocketAddress = "http://" + getResources().getString(R.string.host);
//        mSocketAddress += (":" + getResources().getString(R.string.port) + "/");
//
//        vsv = (GLSurfaceView) findViewById(R.id.glview_call);
//        vsv.setPreserveEGLContextOnPause(true);
//        vsv.setKeepScreenOn(true);
        VideoRendererGui.setView(vsv, new Runnable() {
            @Override
            public void run() {
                init();
            }
        });

        // local and remote render
        remoteRender = VideoRendererGui.create(
                REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
        localRender = VideoRendererGui.create(
                LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
                LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);

        final Intent intent = getIntent();
        final String action = intent.getAction();

        if (Intent.ACTION_VIEW.equals(action)) {
            final List<String> segments = intent.getData().getPathSegments();
            callerId = segments.get(0);
        }
    }
项目:webrtc-android    文件:RtcActivity.java   
@Override
public void onLocalStream(MediaStream localStream) {
    localStream.videoTracks.get(0).addRenderer(new VideoRenderer(localRender));
    VideoRendererGui.update(localRender,
            LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
            LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING,
            scalingType, true);
}
项目:webrtc-android    文件:RtcActivity.java   
@Override
public void onAddRemoteStream(MediaStream remoteStream, int endPoint) {
    remoteStream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
    VideoRendererGui.update(remoteRender,
            REMOTE_X, REMOTE_Y,
            REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, true);
    VideoRendererGui.update(localRender,
            LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
            LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
            scalingType, true);
}
项目:webrtc-android    文件:RtcActivity.java   
@Override
public void onRemoveRemoteStream(int endPoint) {
    VideoRendererGui.update(localRender,
            LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
            LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING,
            scalingType, true);
}
项目:VideoMeeting    文件:RoomChatPresenter.java   
private void init() {
    if (mView == null) {
        L.w("view is null, it may be destroyed");
        return;
    }
    Point displaySize = mView.getDisplaySize();
    // TODO make sure video height and width supported.确保手机摄像头支持displaySize的分辨率
    PeerConnectionParameters params = new PeerConnectionParameters(
            true, false, displaySize.x, displaySize.y, 30, 1, VIDEO_CODEC_VP9, true, 1, AUDIO_CODEC_OPUS, true);

    mWebRtcClient = new WebRtcClient(this, mSocketAddress, params, VideoRendererGui.getEGLContext());
}
项目:VideoMeeting    文件:RoomChatPresenter.java   
private void addRender(MediaStream stream, int position) {
    VideoRenderer.Callbacks render;
    L.d("addRender position is " + position);
    if (position == 0) {
        render = localRender;
    } else {
        render = VideoRendererGui.create(position % 2 == 0 ? 0 : 50,
                position / 2 * 50,
                50, 50,
                scalingType, false);
    }
    stream.videoTracks.get(0).addRenderer(new VideoRenderer(render));
}
项目:janus-gateway-android    文件:EchoTest.java   
@Override
public void onRemoteStream(MediaStream stream) {
    stream.videoTracks.get(0).setEnabled(true);
    if(stream.videoTracks.get(0).enabled())
        Log.d("JANUSCLIENT", "video tracks enabled");
    stream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender));
    VideoRendererGui.update(remoteRender, 0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
    VideoRendererGui.update(localRender, 72, 72, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
}
项目:janus-gateway-android    文件:JanusActivity.java   
private void init() {
    try {
        EGLContext con = VideoRendererGui.getEGLContext();
        echoTest = new EchoTest(localRender, remoteRender);
        echoTest.initializeMediaContext(JanusActivity.this, true, true, true, con);
        echoTest.Start();

    } catch (Exception ex) {
        Log.e("computician.janusclient", ex.getMessage());
    }
}
项目:webrtcpeer-android    文件:NBMWebRTCPeer.java   
private boolean startLocalMediaSync() {
    if (mediaResourceManager != null && mediaResourceManager.getLocalMediaStream() == null) {
        mediaResourceManager.createLocalMediaStream(VideoRendererGui.getEglBaseContext(), localRender);
        mediaResourceManager.startVideoSource();
        mediaResourceManager.selectCameraPosition(config.getCameraPosition());
        return true;
    } else {
        return false;
    }
}
项目:respoke-sdk-android    文件:Respoke.java   
/**
 *  Notify the shared SDK instance that the specified client has connected. This is for internal use only, and should never be called by your client application.
 *
 *  @param client  The client that just connected
 */
public void clientConnected(RespokeClient client) {
    if (null != pushToken) {
        registerPushServices();
    }

    if (!factoryStaticInitialized) {
        // Perform a one-time WebRTC global initialization
        PeerConnectionFactory.initializeAndroidGlobals(context, true, true, true, VideoRendererGui.getEGLContext());
        factoryStaticInitialized = true;
    }
}
项目:MediaCodecTest    文件:MediaCodecActivity.java   
public void onClick(View v) {
    boolean useSurfaceLocal = true;
    boolean useSurfaceRemote = false;
    Log.d(TAG, "Start decoder WebRTC test. GUI thread id = " + Thread.currentThread().getId());
    DecodeWebRTCTest testLocal = new DecodeWebRTCTest();
    //DecodeWebRTCTest testRemote = new DecodeWebRTCTest();
    try {
        testLocal.testDecoder(localRenderer, useSurfaceLocal, VideoRendererGui.getEGLContext());
        //testRemote.testDecoder(remoteRenderer, useSurfaceRemote, VideoRendererGui.getEGLContext());
    } catch (Throwable e) {
        Log.e(TAG, e.toString());
    }
    Log.d(TAG, "Decoder WebRTC test done");
}
项目:q-municate-android    文件:RTCGLVideoView.java   
public void updateRenderer(RendererSurface rendererSurface, RendererConfig config){
    boolean mainRenderer = RendererSurface.MAIN.equals(rendererSurface);
    VideoRenderer.Callbacks callbacks = mainRenderer ? mainRendererCallback
            :localRendererCallback;

    if (config.coordinates != null) {
        setViewCoordinates((mainRenderer ? remoteCoords : localCoords),
                config.coordinates);
    }
    setRendererMirror(config.mirror, rendererSurface);
    int[] viewCoordinates = mainRenderer ? remoteCoords : localCoords;
    VideoRendererGui.update(callbacks, viewCoordinates[0], viewCoordinates[1], viewCoordinates[2],
            viewCoordinates[3], RendererCommon.ScalingType.SCALE_ASPECT_FILL,
            (mainRenderer ? mainMirror : secondMirror));
}
项目:q-municate-android    文件:RTCGLVideoView.java   
public void  release(){
    if (localRendererCallback != null) {
        VideoRendererGui.remove(localRendererCallback);
    }
    if (mainRendererCallback != null) {
        VideoRendererGui.remove(mainRendererCallback);
    }
}
项目:q-municate-android    文件:RTCGLVideoView.java   
private void init(TypedArray typedArray) {
    VideoRendererGui.setView(this, null);
    if (typedArray != null) {
        setValuefromResources(typedArray);
        typedArray.recycle();
    }

    obtainMainVideoRenderer();
}
项目:nubo-test-tree    文件:MasterVideoActivity.java   
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        treeState = TreeState.IDLE;

        setContentView(R.layout.activity_video_master);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        mHandler = new Handler();
        Bundle extras = getIntent().getExtras();
        if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
            ;
            Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
                    Toast.LENGTH_SHORT).show();
            finish();
            return;
        }
        this.treeId      = extras.getString(Constants.USER_NAME, "");
        Log.i(TAG, "treeId: " + treeId);

        this.mCallStatus   = (TextView) findViewById(R.id.call_status);

        this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);
        // Set up the List View for chatting
        RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
        VideoRendererGui.setView(videoView, null);

        remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT,
                scalingType, false);
        localRender = VideoRendererGui.create(  LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                scalingType, true);
        NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
        peerConnectionParameters = new NBMMediaConfiguration(   NBMMediaConfiguration.NBMRendererType.OPENGLES,
                NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
                NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
                receiverVideoFormat,
                NBMMediaConfiguration.NBMCameraPosition.FRONT);
        nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
        nbmWebRTCPeer.initialize();
        Log.i(TAG, "MasterVideoActivity initialized");
//        mHandler.postDelayed(publishDelayed, 4000);

        MainActivity.getKurentoTreeAPIInstance().addObserver(this);

        createTreeRequestId = ++Constants.id;
        MainActivity.getKurentoTreeAPIInstance().sendCreateTree(treeId, createTreeRequestId);

        treeState = treeState.CREATING;
        mCallStatus.setText("Creating tree...");

    }
项目:nubo-test-tree    文件:ViewerVideoActivity.java   
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        treeState = TreeState.IDLE;

        setContentView(R.layout.activity_video_master);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        mHandler = new Handler();
        Bundle extras = getIntent().getExtras();
        if (extras == null || !extras.containsKey(Constants.USER_NAME)) {
            ;
            Toast.makeText(this, "Need to pass username to MasterVideoActivity in intent extras (Constants.USER_NAME).",
                    Toast.LENGTH_SHORT).show();
            finish();
            return;
        }
        this.treeId      = extras.getString(Constants.USER_NAME, "");
        Log.i(TAG, "treeId: " + treeId);

//        if (extras.containsKey(Constants.CALL_USER)) {
//            this.calluser      = extras.getString(Constants.CALL_USER, "");
//            Log.i(TAG, "callUser: " + calluser);
//        }

        this.mCallStatus   = (TextView) findViewById(R.id.call_status);

        this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface);

        queuedIceCandidates = new Vector<>();
        // Set up the List View for chatting
        RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_FILL;
        VideoRendererGui.setView(videoView, null);

        remoteRender = VideoRendererGui.create( REMOTE_X, REMOTE_Y,
                REMOTE_WIDTH, REMOTE_HEIGHT,
                scalingType, false);
        localRender = VideoRendererGui.create(  LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
                LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
                scalingType, true);
        NBMMediaConfiguration.NBMVideoFormat receiverVideoFormat = new NBMMediaConfiguration.NBMVideoFormat(352, 288, PixelFormat.RGB_888, 20);
        peerConnectionParameters = new NBMMediaConfiguration(   NBMMediaConfiguration.NBMRendererType.OPENGLES,
                NBMMediaConfiguration.NBMAudioCodec.OPUS, 0,
                NBMMediaConfiguration.NBMVideoCodec.VP8, 0,
                receiverVideoFormat,
                NBMMediaConfiguration.NBMCameraPosition.FRONT);
        nbmWebRTCPeer = new NBMWebRTCPeer(peerConnectionParameters, this, remoteRender, this);
        nbmWebRTCPeer.initialize();
        Log.i(TAG, "MasterVideoActivity initialized");
        mHandler.postDelayed(createOfferDelayed, 1000);

        MainActivity.getKurentoTreeAPIInstance().addObserver(this);

        treeState = TreeState.CREATING_OFFER;
        mCallStatus.setText("Creating local offer...");

    }
项目:krankygeek    文件:MainActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(true);

    PeerConnectionFactory.initializeAndroidGlobals(
            this,  // Context
            true,  // Audio Enabled
            true,  // Video Enabled
            true,  // Hardware Acceleration Enabled
            null); // Render EGL Context

    peerConnectionFactory = new PeerConnectionFactory();

    VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null);

    localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints());
    VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource);
    localVideoTrack.setEnabled(true);

    AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
    AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
    localAudioTrack.setEnabled(true);

    localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID);
    localMediaStream.addTrack(localVideoTrack);
    localMediaStream.addTrack(localAudioTrack);

    GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call);

    VideoRendererGui.setView(videoView, null);
    try {
        otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true);
        localVideoTrack.addRenderer(renderer);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
项目:janus-gateway-android    文件:EchoTest.java   
@Override
public void onLocalStream(MediaStream stream) {
    stream.videoTracks.get(0).addRenderer(new VideoRenderer(localRender));
    VideoRendererGui.update(localRender, 0, 0, 25, 25, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false);
}
项目:MediaCodecTest    文件:MediaCodecActivity.java   
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    Log.d(TAG, "onCreate: Model: " + Build.MODEL);

    //getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    //getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    mLayout = new RelativeLayout(this);
    //LinearLayout layout = new LinearLayout(this);
    //layout.setOrientation(LinearLayout.VERTICAL);


    if (useGlSurfaceView) {
        glSv = new GLSurfaceView(this);
        if (useVideoRendererGui) {
            VideoRendererGui.setView(glSv);
            localRenderer = VideoRendererGui.createGuiRenderer(0, 0, 100, 100, ScalingType.SCALE_ASPECT_FIT);
            //localRenderer = VideoRendererGui.createGuiRenderer(0, 0, 50, 100, ScalingType.SCALE_ASPECT_FIT);
            //localRenderer = VideoRendererGui.createGuiRenderer(0, 0, 100, 30, ScalingType.SCALE_ASPECT_FILL);
            //localRenderer = VideoRendererGui.createGuiRenderer(5, 5, 90, 90);
            //localRenderer = VideoRendererGui.createGuiRenderer(5, 5, 40, 90);
            remoteRenderer = VideoRendererGui.createGuiRenderer(50, 0, 50, 100, ScalingType.SCALE_ASPECT_FIT);
        } else {
            mRenderer = new MyGLRenderer(glSv);
        }

        mLayout.addView(glSv);
    }
    else {
        sv = new SurfaceView(this);
        sv.getHolder().addCallback(this);
        mLayout.addView(sv);
    }

    RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(
            LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
    lp.addRule(RelativeLayout.ALIGN_PARENT_TOP);
    buttonStartSurface = new Button(this);
    buttonStartSurface.setText("Start encoder test with surfaces.");
    buttonStartSurface.setLayoutParams(lp);
    buttonStartSurface.setBackgroundColor(0x80E0E0E0);
    buttonStartSurface.setOnClickListener(mStartSurfaceCameraListener);
    mLayout.addView(buttonStartSurface, lp);

    lp = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
    lp.addRule(RelativeLayout.CENTER_IN_PARENT);
    buttonStartBuffer = new Button(this);
    buttonStartBuffer.setText("Start encoder test with byte buffers.");
    buttonStartBuffer.setLayoutParams(lp);
    buttonStartBuffer.setBackgroundColor(0x80E0E0E0);
    buttonStartBuffer.setOnClickListener(mStartBufferCameraListener);
    mLayout.addView(buttonStartBuffer, lp);

    lp = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
    lp.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
    buttonStartDecoder = new Button(this);
    buttonStartDecoder.setText("Start decoder test with surface.");
    buttonStartDecoder.setLayoutParams(lp);
    buttonStartDecoder.setBackgroundColor(0x80E0E0E0);
    //buttonStartDecoder.setOnClickListener(mStartDecodeSurfaceListener);
    buttonStartDecoder.setOnClickListener(mStartDecodeWebRTCListener);
    mLayout.addView(buttonStartDecoder, lp);

    setContentView(mLayout);
}
项目:WebRTCDemo    文件:AppRTCDemoActivity.java   
@Override
public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  Thread.setDefaultUncaughtExceptionHandler(
      new UnhandledExceptionHandler(this));

  getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
  getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

  Point displaySize = new Point();
  getWindowManager().getDefaultDisplay().getRealSize(displaySize);

  vsv = new AppRTCGLView(this, displaySize);
  VideoRendererGui.setView(vsv);
  remoteRender = VideoRendererGui.create(0, 0, 100, 100);
  localRender = VideoRendererGui.create(70, 5, 25, 25);

  vsv.setOnClickListener(new View.OnClickListener() {
      @Override public void onClick(View v) {
        toggleHUD();
      }
    });
  setContentView(vsv);
  logAndToast("Tap the screen to toggle stats visibility");

  hudView = new TextView(this);
  hudView.setTextColor(Color.BLACK);
  hudView.setBackgroundColor(Color.WHITE);
  hudView.setAlpha(0.4f);
  hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
  hudView.setVisibility(View.INVISIBLE);
  addContentView(hudView, hudLayout);

  if (!factoryStaticInitialized) {
    abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
        this, true, true),
      "Failed to initializeAndroidGlobals");
    factoryStaticInitialized = true;
  }

  AudioManager audioManager =
      ((AudioManager) getSystemService(AUDIO_SERVICE));
  // TODO(fischman): figure out how to do this Right(tm) and remove the
  // suppression.
  @SuppressWarnings("deprecation")
  boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
  audioManager.setMode(isWiredHeadsetOn ?
      AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
  audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

  sdpMediaConstraints = new MediaConstraints();
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveAudio", "true"));
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveVideo", "true"));

  final Intent intent = getIntent();
  if ("android.intent.action.VIEW".equals(intent.getAction())) {
    connectToRoom(intent.getData().toString());
    return;
  }
  showGetRoomUI();
}
项目:q-municate-android    文件:RTCGLVideoView.java   
public void removeLocalRendererCallback(){
    if (localRendererCallback != null) {
        VideoRendererGui.remove(localRendererCallback);
        localRendererCallback = null;
    }
}
项目:q-municate-android    文件:RTCGLVideoView.java   
public void removeMainRendererCallback(){
    if (mainRendererCallback != null) {
        VideoRendererGui.remove(mainRendererCallback);
        mainRendererCallback = null;
    }
}
项目:q-municate-android    文件:RTCGLVideoView.java   
private VideoRenderer.Callbacks initRenderer(boolean mirror, int[] viewCoordinates) {
    return VideoRendererGui.createGuiRenderer(viewCoordinates[0], viewCoordinates[1], viewCoordinates[2],
            viewCoordinates[3], RendererCommon.ScalingType.SCALE_ASPECT_FILL, mirror);

}
项目:apprtc-android    文件:AppRTCDemoActivity.java   
@Override
public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  Thread.setDefaultUncaughtExceptionHandler(
      new UnhandledExceptionHandler(this));

  getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
  getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

  Point displaySize = new Point();
  getWindowManager().getDefaultDisplay().getRealSize(displaySize);

  vsv = new AppRTCGLView(this, displaySize);
  VideoRendererGui.setView(vsv);
  remoteRender = VideoRendererGui.create(0, 0, 100, 100);
  localRender = VideoRendererGui.create(70, 5, 25, 25);

  vsv.setOnClickListener(new View.OnClickListener() {
      @Override public void onClick(View v) {
        toggleHUD();
      }
    });
  setContentView(vsv);
  logAndToast("Tap the screen to toggle stats visibility");

  hudView = new TextView(this);
  hudView.setTextColor(Color.BLACK);
  hudView.setBackgroundColor(Color.WHITE);
  hudView.setAlpha(0.4f);
  hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
  hudView.setVisibility(View.INVISIBLE);
  addContentView(hudView, hudLayout);

  if (!factoryStaticInitialized) {
    abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
        this, true, true),
      "Failed to initializeAndroidGlobals");
    factoryStaticInitialized = true;
  }

  AudioManager audioManager =
      ((AudioManager) getSystemService(AUDIO_SERVICE));
  // TODO(fischman): figure out how to do this Right(tm) and remove the
  // suppression.
  @SuppressWarnings("deprecation")
  boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
  audioManager.setMode(isWiredHeadsetOn ?
      AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
  audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

  sdpMediaConstraints = new MediaConstraints();
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveAudio", "true"));
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveVideo", "true"));

  final Intent intent = getIntent();
  if ("android.intent.action.VIEW".equals(intent.getAction())) {
    connectToRoom(intent.getData().toString());
    return;
  }
  showGetRoomUI();
}
项目:droidkit-webrtc    文件:AppRTCDemoActivity.java   
@Override
public void onCreate(Bundle savedInstanceState) {
  super.onCreate(savedInstanceState);

  Thread.setDefaultUncaughtExceptionHandler(
      new UnhandledExceptionHandler(this));

  getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
  getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

  Point displaySize = new Point();
  getWindowManager().getDefaultDisplay().getRealSize(displaySize);

  vsv = new AppRTCGLView(this, displaySize);
  VideoRendererGui.setView(vsv);
  remoteRender = VideoRendererGui.create(0, 0, 100, 100);
  localRender = VideoRendererGui.create(70, 5, 25, 25);

  vsv.setOnClickListener(new View.OnClickListener() {
      @Override public void onClick(View v) {
        toggleHUD();
      }
    });
  setContentView(vsv);
  logAndToast("Tap the screen to toggle stats visibility");

  hudView = new TextView(this);
  hudView.setTextColor(Color.BLACK);
  hudView.setBackgroundColor(Color.WHITE);
  hudView.setAlpha(0.4f);
  hudView.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
  hudView.setVisibility(View.INVISIBLE);
  addContentView(hudView, hudLayout);

  if (!factoryStaticInitialized) {
    abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
        this, true, true),
      "Failed to initializeAndroidGlobals");
    factoryStaticInitialized = true;
  }

  AudioManager audioManager =
      ((AudioManager) getSystemService(AUDIO_SERVICE));
  // TODO(fischman): figure out how to do this Right(tm) and remove the
  // suppression.
  @SuppressWarnings("deprecation")
  boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
  audioManager.setMode(isWiredHeadsetOn ?
      AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
  audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

  sdpMediaConstraints = new MediaConstraints();
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveAudio", "true"));
  sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
      "OfferToReceiveVideo", "true"));

  final Intent intent = getIntent();
  if ("android.intent.action.VIEW".equals(intent.getAction())) {
    connectToRoom(intent.getData().toString());
    return;
  }
  showGetRoomUI();
}