Java 类org.webrtc.MediaConstraints 实例源码

项目:saltyrtc-demo    文件:WebRTC.java   
WebRTC(WebRTCTask task, MainActivity activity) {
    this.task = task;
    this.activity = activity;

    // Initialize Android globals
    // See https://bugs.chromium.org/p/webrtc/issues/detail?id=3416
    PeerConnectionFactory.initializeAndroidGlobals(activity, false);

    // Set ICE servers
    List<PeerConnection.IceServer> iceServers = new ArrayList<>();
    iceServers.add(new org.webrtc.PeerConnection.IceServer("stun:" + Config.STUN_SERVER));
    if (Config.TURN_SERVER != null) {
        iceServers.add(new org.webrtc.PeerConnection.IceServer("turn:" + Config.TURN_SERVER,
                Config.TURN_USER, Config.TURN_PASS));
    }

    // Create peer connection
    final PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    this.factory = new PeerConnectionFactory(options);
    this.constraints = new MediaConstraints();
    this.pc = this.factory.createPeerConnection(iceServers, constraints, new PeerConnectionObserver());

    // Add task message event handler
    this.task.setMessageHandler(new TaskMessageHandler());
}
项目:webrtc-android    文件:WebRtcClient.java   
public WebRtcClient(RtcListener listener, String host, PeerConnectionClient.PeerConnectionParameters params) {
    mListener = listener;
    pcParams = params;
    PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
            params.videoCodecHwAcceleration);
    factory = new PeerConnectionFactory();
    MessageHandler messageHandler = new MessageHandler();

    try {
        client = IO.socket(host);
    } catch (URISyntaxException e) {
        e.printStackTrace();
    }
    client.on("id", messageHandler.onId);
    client.on("message", messageHandler.onMessage);
    client.connect();

    iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121"));
    iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));

    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
    pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
项目:webrtc-android    文件:WebRtcClient.java   
private void setCamera(){
    localMS = factory.createLocalMediaStream("ARDAMS");
    if(pcParams.videoCallEnabled){
        MediaConstraints videoConstraints = new MediaConstraints();
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", Integer.toString(pcParams.videoHeight)));
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", Integer.toString(pcParams.videoWidth)));
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(pcParams.videoFps)));
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate", Integer.toString(pcParams.videoFps)));

        videoSource = factory.createVideoSource(getVideoCapturer(), videoConstraints);
        localMS.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource));
    }

    AudioSource audioSource = factory.createAudioSource(new MediaConstraints());
    localMS.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource));

    mListener.onLocalStream(localMS);
}
项目:respoke-sdk-android    文件:RespokeCall.java   
private void addLocalStreams(Context context) {
    AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
    // TODO(fischman): figure out how to do this Right(tm) and remove the suppression.
    @SuppressWarnings("deprecation")
    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
    audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);

    localStream = peerConnectionFactory.createLocalMediaStream("ARDAMS");

    if (!audioOnly) {
        VideoCapturer capturer = getVideoCapturer();
        MediaConstraints videoConstraints = new MediaConstraints();
        videoSource = peerConnectionFactory.createVideoSource(capturer, videoConstraints);
        VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource);
        videoTrack.addRenderer(new VideoRenderer(localRender));
        localStream.addTrack(videoTrack);
    }

    localStream.addTrack(peerConnectionFactory.createAudioTrack("ARDAMSa0", peerConnectionFactory.createAudioSource(new MediaConstraints())));

    peerConnection.addStream(localStream);
}
项目:webrtc-workshop    文件:PeerConnectionWrapper.java   
private boolean createPeerConnection(Context context) {
    boolean success = false;

    if (PeerConnectionFactory.initializeAndroidGlobals(context)) {
        PeerConnectionFactory factory = new PeerConnectionFactory();
        List<IceServer> iceServers = new ArrayList<IceServer>();
        iceServers.add(new IceServer("stun:stun.l.google.com:19302"));
        // For TURN servers the format would be:
        // new IceServer("turn:url", user, password)

        MediaConstraints mediaConstraints = new MediaConstraints();
        mediaConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "false"));
        mediaConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
        peerConnection = factory.createPeerConnection(iceServers, mediaConstraints, this);

        localStream = factory.createLocalMediaStream("WEBRTC_WORKSHOP_NS");
        localStream.addTrack(factory.createAudioTrack("WEBRTC_WORKSHOP_NSa1",
                factory.createAudioSource(new MediaConstraints())));
        peerConnection.addStream(localStream, new MediaConstraints());
        success = true;
    }

    return success;
}
项目:matrix-android-sdk    文件:MXWebRtcCall.java   
/**
 * Create the local audio stack
 */
private void createAudioTrack() {
    Log.d(LOG_TAG, "createAudioTrack");

    MediaConstraints audioConstraints = new MediaConstraints();

    // add all existing audio filters to avoid having echos
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation", "true"));
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation2", "true"));
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googDAEchoCancellation", "true"));

    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googTypingNoiseDetection", "true"));

    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAutoGainControl", "true"));
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAutoGainControl2", "true"));

    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression2", "true"));

    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAudioMirroring", "false"));
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googHighpassFilter", "true"));

    mAudioSource = mPeerConnectionFactory.createAudioSource(audioConstraints);
    mLocalAudioTrack = mPeerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, mAudioSource);
}
项目:PeSanKita-android    文件:PeerConnectionWrapper.java   
public PeerConnectionWrapper(@NonNull Context context,
                             @NonNull PeerConnectionFactory factory,
                             @NonNull PeerConnection.Observer observer,
                             @NonNull VideoRenderer.Callbacks localRenderer,
                             @NonNull List<PeerConnection.IceServer> turnServers,
                             boolean hideIp)
{
  List<PeerConnection.IceServer> iceServers = new LinkedList<>();
  iceServers.add(STUN_SERVER);
  iceServers.addAll(turnServers);

  MediaConstraints                constraints      = new MediaConstraints();
  MediaConstraints                audioConstraints = new MediaConstraints();
  PeerConnection.RTCConfiguration configuration    = new PeerConnection.RTCConfiguration(iceServers);

  configuration.bundlePolicy  = PeerConnection.BundlePolicy.MAXBUNDLE;
  configuration.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;

  if (hideIp) {
    configuration.iceTransportsType = PeerConnection.IceTransportsType.RELAY;
  }

  constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
  audioConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));

  this.peerConnection = factory.createPeerConnection(configuration, constraints, observer);
  this.videoCapturer  = createVideoCapturer(context);

  MediaStream mediaStream = factory.createLocalMediaStream("ARDAMS");
  this.audioSource = factory.createAudioSource(audioConstraints);
  this.audioTrack  = factory.createAudioTrack("ARDAMSa0", audioSource);
  this.audioTrack.setEnabled(false);
  mediaStream.addTrack(audioTrack);

  if (videoCapturer != null) {
    this.videoSource = factory.createVideoSource(videoCapturer);
    this.videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);

    this.videoTrack.addRenderer(new VideoRenderer(localRenderer));
    this.videoTrack.setEnabled(false);
    mediaStream.addTrack(videoTrack);
  } else {
    this.videoSource = null;
    this.videoTrack  = null;
  }

  this.peerConnection.addStream(mediaStream);
}
项目:newwebrtc    文件:PnSignalingParams.java   
public PnSignalingParams(
        List<PeerConnection.IceServer> iceServers,
        MediaConstraints pcConstraints,
        MediaConstraints videoConstraints,
        MediaConstraints audioConstraints) {
    this.iceServers       = (iceServers==null)       ? defaultIceServers()       : iceServers;
    this.pcConstraints    = (pcConstraints==null)    ? defaultPcConstraints()    : pcConstraints;
    this.videoConstraints = (videoConstraints==null) ? defaultVideoConstraints() : videoConstraints;
    this.audioConstraints = (audioConstraints==null) ? defaultAudioConstraints() : audioConstraints;
}
项目:newwebrtc    文件:PnSignalingParams.java   
/**
 * Default Ice Servers, but specified parameters.
 * @param pcConstraints
 * @param videoConstraints
 * @param audioConstraints
 */
public PnSignalingParams(
        MediaConstraints pcConstraints,
        MediaConstraints videoConstraints,
        MediaConstraints audioConstraints) {
    this.iceServers       = PnSignalingParams.defaultIceServers();
    this.pcConstraints    = (pcConstraints==null)    ? defaultPcConstraints()    : pcConstraints;
    this.videoConstraints = (videoConstraints==null) ? defaultVideoConstraints() : videoConstraints;
    this.audioConstraints = (audioConstraints==null) ? defaultAudioConstraints() : audioConstraints;
}
项目:newwebrtc    文件:PnSignalingParams.java   
/**
 * The default parameters for media constraints. Might have to tweak in future.
 * @return default parameters
 */
public static PnSignalingParams defaultInstance() {
    MediaConstraints pcConstraints    = PnSignalingParams.defaultPcConstraints();
    MediaConstraints videoConstraints = PnSignalingParams.defaultVideoConstraints();
    MediaConstraints audioConstraints = PnSignalingParams.defaultAudioConstraints();
    List<PeerConnection.IceServer> iceServers = PnSignalingParams.defaultIceServers();
    return new PnSignalingParams(iceServers, pcConstraints, videoConstraints, audioConstraints);
}
项目:newwebrtc    文件:PnSignalingParams.java   
private static MediaConstraints defaultPcConstraints(){
    MediaConstraints pcConstraints = new MediaConstraints();
    pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
    return pcConstraints;
}
项目:newwebrtc    文件:PnSignalingParams.java   
private static MediaConstraints defaultVideoConstraints(){
    MediaConstraints videoConstraints = new MediaConstraints();
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth","1280"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight","720"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minWidth", "640"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minHeight","480"));
    return videoConstraints;
}
项目:Cable-Android    文件:PeerConnectionWrapper.java   
public PeerConnectionWrapper(@NonNull Context context,
                             @NonNull PeerConnectionFactory factory,
                             @NonNull PeerConnection.Observer observer,
                             @NonNull VideoRenderer.Callbacks localRenderer,
                             @NonNull List<PeerConnection.IceServer> turnServers,
                             boolean hideIp)
{
  List<PeerConnection.IceServer> iceServers = new LinkedList<>();
  iceServers.add(STUN_SERVER);
  iceServers.addAll(turnServers);

  MediaConstraints                constraints      = new MediaConstraints();
  MediaConstraints                audioConstraints = new MediaConstraints();
  PeerConnection.RTCConfiguration configuration    = new PeerConnection.RTCConfiguration(iceServers);

  configuration.bundlePolicy  = PeerConnection.BundlePolicy.MAXBUNDLE;
  configuration.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;

  if (hideIp) {
    configuration.iceTransportsType = PeerConnection.IceTransportsType.RELAY;
  }

  constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
  audioConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));

  this.peerConnection = factory.createPeerConnection(configuration, constraints, observer);
  this.videoCapturer  = createVideoCapturer(context);

  MediaStream mediaStream = factory.createLocalMediaStream("ARDAMS");
  this.audioSource = factory.createAudioSource(audioConstraints);
  this.audioTrack  = factory.createAudioTrack("ARDAMSa0", audioSource);
  this.audioTrack.setEnabled(false);
  mediaStream.addTrack(audioTrack);

  if (videoCapturer != null) {
    this.videoSource = factory.createVideoSource(videoCapturer);
    this.videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);

    this.videoTrack.addRenderer(new VideoRenderer(localRenderer));
    this.videoTrack.setEnabled(false);
    mediaStream.addTrack(videoTrack);
  } else {
    this.videoSource = null;
    this.videoTrack  = null;
  }

  this.peerConnection.addStream(mediaStream);
}
项目:webrtc-android-codelab    文件:MainActivity.java   
public void start() {
    start.setEnabled(false);
    call.setEnabled(true);
    //Initialize PeerConnectionFactory globals.
    //Params are context, initAudio,initVideo and videoCodecHwAcceleration
    PeerConnectionFactory.initializeAndroidGlobals(this, true, true, true);

    //Create a new PeerConnectionFactory instance.
    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
    peerConnectionFactory = new PeerConnectionFactory(options);


    //Now create a VideoCapturer instance. Callback methods are there if you want to do something! Duh!
    VideoCapturer videoCapturerAndroid = getVideoCapturer(new CustomCameraEventsHandler());

    //Create MediaConstraints - Will be useful for specifying video and audio constraints.
    audioConstraints = new MediaConstraints();
    videoConstraints = new MediaConstraints();

    //Create a VideoSource instance
    videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
    localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

    //create an AudioSource instance
    audioSource = peerConnectionFactory.createAudioSource(audioConstraints);
    localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);
    localVideoView.setVisibility(View.VISIBLE);

    //create a videoRenderer based on SurfaceViewRenderer instance
    localRenderer = new VideoRenderer(localVideoView);
    // And finally, with our VideoRenderer ready, we
    // can add our renderer to the VideoTrack.
    localVideoTrack.addRenderer(localRenderer);

}
项目:webrtc-android    文件:WebRtcClient.java   
private void setCamera(){
    localMS = factory.createLocalMediaStream("ARDAMS");
    if(pcParams.videoCallEnabled){
        MediaConstraints videoConstraints = new MediaConstraints();
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", Integer.toString(pcParams.videoHeight)));
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", Integer.toString(pcParams.videoWidth)));
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(pcParams.videoFps)));
        videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate", Integer.toString(pcParams.videoFps)));

        videoSource = factory.createVideoSource(getVideoCapturer(), videoConstraints);
        localMS.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource));
    }

    AudioSource audioSource = factory.createAudioSource(new MediaConstraints());
    localMS.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource));

    mListener.onLocalStream(localMS);
}
项目:webrtc-android    文件:WebRtcClient.java   
public WebRtcClient(RtcListener listener, String host, PeerConnectionParameters params) {
        mListener = listener;
        pcParams = params;
        PeerConnectionFactory.initializeAndroidGlobals(listener, true, true,
                params.videoCodecHwAcceleration);
        factory = new PeerConnectionFactory();
        MessageHandler messageHandler = new MessageHandler();
        Log.d(TAG, "WebRtcClient..host:" + host);
        try {
            Manager man = new Manager(new URI(host));
//            client = IO.socket(host);
            client = man.socket("/hello");

        } catch (URISyntaxException e) {
            e.printStackTrace();
            Log.d(TAG, "WebRtcClient..exception");
        }
        client.on("id", messageHandler.onId);
        client.on("message", messageHandler.onMessage);
        client.connect();

//        iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121"));
        iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));

        pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
        pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
        pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
    }
项目:viska-android    文件:CallingActivity.java   
public void onAnswerButtonClicked(final View view) {
  progressState.changeValue(ProgressState.NEGOTIATING);
  showHangButton();

  final PeerConnectionFactory factory = ((Application) getApplication()).getWebRtcFactory();
  final MediaStream stream = factory.createLocalMediaStream(UUID.randomUUID().toString());
  stream.addTrack(factory.createAudioTrack(
      UUID.randomUUID().toString(),
      factory.createAudioSource(CONSTRAINTS)
  ));
  peerConnection.addStream(stream);
  peerConnection.createAnswer(sdpObserver, new MediaConstraints());
}
项目:DeviceConnect-Android    文件:MediaConnection.java   
/**
 * Creates a PeerConnection.
 * @param config configuration of PeerConnection
 */
private void createPeerConnection(final List<PeerConnection.IceServer> config) {
    MediaConstraints mc = new MediaConstraints();
    try {
        mPeerConnection = mFactory.createPeerConnection(config, mc, mObserver);
    } catch (Exception e) {
        if (BuildConfig.DEBUG) {
            Log.e(TAG, "@@@ Failed to create PeerConnection.", e);
        }
        throw new RuntimeException(e);
    }
}
项目:DeviceConnect-Android    文件:MediaConnection.java   
/**
 * Creates the MediaConstraints of SessionDescription.
 * @return MediaConstraints
 */
private MediaConstraints createSDPMediaConstraints() {
    MediaConstraints mc = new MediaConstraints();
    mc.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveAudio", "true"));
    mc.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveVideo", "true"));
    return mc;
}
项目:webrtcpeer-android    文件:NBMPeerConnection.java   
public void createOffer(MediaConstraints sdpMediaConstraints) {
    this.sdpMediaConstraints = sdpMediaConstraints;
    if (pc != null){// && !isError) {
        Log.d(TAG, "PC Create OFFER");
        isInitiator = true;
        pc.createOffer(this,this.sdpMediaConstraints);
    }
}
项目:webrtcpeer-android    文件:NBMPeerConnection.java   
public void createAnswer(final MediaConstraints sdpMediaConstraints) {
    executor.execute(new Runnable() {
        @Override
        public void run() {
            if (pc != null){// && !isError) {
                Log.d(TAG, "PC create ANSWER");
                isInitiator = false;
                pc.createAnswer(NBMPeerConnection.this, sdpMediaConstraints);
            }
        }
    });
}
项目:webrtcpeer-android    文件:PeerConnectionResourceManager.java   
NBMPeerConnection createPeerConnection( SignalingParameters signalingParameters,
                                        MediaConstraints pcConstraints,
                                        String connectionId) {

    Log.d(TAG, "Create peer connection.");
    Log.d(TAG, "PCConstraints: " + pcConstraints.toString());

    // TCP candidates are only useful when connecting to a server that supports ICE-TCP.
    PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
    rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
    rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
    rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
    rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
    //rtcConfig.iceServers IceServer
    NBMPeerConnection connectionWrapper = new NBMPeerConnection(connectionId, preferIsac, videoCallEnabled, preferH264, executor, peerConnectionParameters);
    PeerConnection peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, connectionWrapper);

    connectionWrapper.setPc(peerConnection);
    connections.put(connectionId, connectionWrapper);

    // Set default WebRTC tracing and INFO libjingle logging.
    // NOTE: this _must_ happen while |factory| is alive!
    Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO);

    Log.d(TAG, "Peer connection created.");
    return connectionWrapper;
}
项目:UMA-AndroidWebRTC    文件:WebRTCCliente.java   
public Peer(String id, int endPoint) {
    Log.d(TAG, "NUEVO HAMIJO new Peer: " + id + " " + endPoint);
  this.pc = factory.createPeerConnection(iceServers, pcConstraints, this);
  this.id = id;
  this.endPoint = endPoint;

  pc.addStream(lMS, new MediaConstraints());

  mListener.onStatusChanged("Conectando al Servidor WebRTC");
    //abrir datachanel
// createDataChannelToRegressionTestBug2302(pc);

}
项目:UMA-AndroidWebRTC    文件:WebRTCCliente.java   
public WebRTCCliente(RTCListener listener, String host) {
  mListener = listener;
  factory = new PeerConnectionFactory();

  SocketIOClient.connect(host, new ConnectCallback() {

    @Override
    public void onConnectCompleted(Exception ex, SocketIOClient socket) {
      if (ex != null) {
        mListener.onStatusChanged("No se puedo conectar al Servidor WebSocket en la direccion: " + ex.getMessage());
        //Log.e(TAG,"WebRTCCliente connect failed: "+ex.getMessage());
        return;
      }
      mListener.onStatusChanged("Conectado al Servidor WebSocket.");

      //  Log.d(TAG, "Conectado al Servidor WebRTC.");
      MainActivity.conectado_servidor = true;
      client = socket;

      // specify which events you are interested in receiving
      client.addListener("id", messageHandler);
      client.addListener("message", messageHandler);
    }
  }, new Handler());

  iceServers.add(new PeerConnection.IceServer("stun:23.21.150.121"));
  iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));

    //  Habilitar el uso de Datachannels 30.09/2014
     //  pcConstraints.optional.add(new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
    //
  pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "false"));
  pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));

}
项目:UMA-AndroidWebRTC    文件:WebRTCCliente.java   
public void setCamera(String cameraFacing, String height, String width){
  MediaConstraints videoConstraints = new MediaConstraints();
  videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", height));
 videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", width));

  //VideoSource videoSource = factory.createVideoSource(getVideoCapturer(cameraFacing), videoConstraints);
  lMS = factory.createLocalMediaStream("ARDAMS");
 // lMS.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource)); // Desactivar el envio de video del movil al Servidor
 // lMS.addTrack(factory.createAudioTrack("ARDAMSa0"));  // Desactivar el envio de audio del movil al Servidor
 // mListener.onLocalStream(lMS);
}
项目:actor-platform    文件:AndroidPeerConnection.java   
@NonNull
public MediaConstraints getMediaConstraints() {
    MediaConstraints constraints = new MediaConstraints();

    constraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveAudio", "true"));
    constraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveVideo", "true"));

    return constraints;
}
项目:actor-platform    文件:AndroidAudioSource.java   
public AndroidAudioSource() {
    this.count = 1;
    this.isReleased = false;
    MediaConstraints audioConstraints = new MediaConstraints();
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
    audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation", "true"));
    this.audioSource = AndroidWebRTCRuntimeProvider.FACTORY.createAudioSource(audioConstraints);
}
项目:android-webrtc-api    文件:PnSignalingParams.java   
public PnSignalingParams(
        List<PeerConnection.IceServer> iceServers,
        MediaConstraints pcConstraints,
        MediaConstraints videoConstraints,
        MediaConstraints audioConstraints) {
    this.iceServers       = (iceServers==null)       ? defaultIceServers()       : iceServers;
    this.pcConstraints    = (pcConstraints==null)    ? defaultPcConstraints()    : pcConstraints;
    this.videoConstraints = (videoConstraints==null) ? defaultVideoConstraints() : videoConstraints;
    this.audioConstraints = (audioConstraints==null) ? defaultAudioConstraints() : audioConstraints;
}
项目:android-webrtc-api    文件:PnSignalingParams.java   
/**
 * Default Ice Servers, but specified parameters.
 * @param pcConstraints
 * @param videoConstraints
 * @param audioConstraints
 */
public PnSignalingParams(
        MediaConstraints pcConstraints,
        MediaConstraints videoConstraints,
        MediaConstraints audioConstraints) {
    this.iceServers       = PnSignalingParams.defaultIceServers();
    this.pcConstraints    = (pcConstraints==null)    ? defaultPcConstraints()    : pcConstraints;
    this.videoConstraints = (videoConstraints==null) ? defaultVideoConstraints() : videoConstraints;
    this.audioConstraints = (audioConstraints==null) ? defaultAudioConstraints() : audioConstraints;
}
项目:android-webrtc-api    文件:PnSignalingParams.java   
/**
 * The default parameters for media constraints. Might have to tweak in future.
 * @return default parameters
 */
public static PnSignalingParams defaultInstance() {
    MediaConstraints pcConstraints    = PnSignalingParams.defaultPcConstraints();
    MediaConstraints videoConstraints = PnSignalingParams.defaultVideoConstraints();
    MediaConstraints audioConstraints = PnSignalingParams.defaultAudioConstraints();
    List<PeerConnection.IceServer> iceServers = PnSignalingParams.defaultIceServers();
    return new PnSignalingParams(iceServers, pcConstraints, videoConstraints, audioConstraints);
}
项目:android-webrtc-api    文件:PnSignalingParams.java   
private static MediaConstraints defaultPcConstraints(){
    MediaConstraints pcConstraints = new MediaConstraints();
    pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
    return pcConstraints;
}
项目:android-webrtc-api    文件:PnSignalingParams.java   
private static MediaConstraints defaultVideoConstraints(){
    MediaConstraints videoConstraints = new MediaConstraints();
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth","1280"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight","720"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minWidth", "640"));
    videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minHeight","480"));
    return videoConstraints;
}
项目:respoke-sdk-android    文件:RespokeCall.java   
private void createOffer() {
    MediaConstraints sdpMediaConstraints = new MediaConstraints();
    sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveAudio", directConnectionOnly ? "false" : "true"));
    sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveVideo", (directConnectionOnly || audioOnly) ? "false" : "true"));

    peerConnection.createOffer(sdpObserver, sdpMediaConstraints);
}
项目:respoke-sdk-android    文件:RespokeCall.java   
@Override public void onSetSuccess() {
    new Handler(Looper.getMainLooper()).post(new Runnable() {
        public void run() {
            if (isActive()) {
                Log.d(TAG, "onSuccess(Set SDP)");
                if (caller) {
                    if (peerConnection.getRemoteDescription() != null) {
                        // We've set our local offer and received & set the remote
                        // answer, so drain candidates.
                        drainRemoteCandidates();
                    }
                } else {
                    if (peerConnection.getLocalDescription() == null) {
                        // We just set the remote offer, time to create our answer.
                        MediaConstraints sdpMediaConstraints = new MediaConstraints();
                        sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
                                "OfferToReceiveAudio", "true"));
                        sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
                                "OfferToReceiveVideo", audioOnly ? "false" : "true"));

                        peerConnection.createAnswer(SDPObserver.this, sdpMediaConstraints);
                    } else {
                        drainRemoteCandidates();
                    }
                }
            }
        }
    });
}
项目:WebRTCDemo    文件:AppRTCClient.java   
public AppRTCSignalingParameters(
    List<PeerConnection.IceServer> iceServers,
    String gaeBaseHref, String channelToken, String postMessageUrl,
    boolean initiator, MediaConstraints pcConstraints,
    MediaConstraints videoConstraints, MediaConstraints audioConstraints) {
  this.iceServers = iceServers;
  this.gaeBaseHref = gaeBaseHref;
  this.channelToken = channelToken;
  this.postMessageUrl = postMessageUrl;
  this.initiator = initiator;
  this.pcConstraints = pcConstraints;
  this.videoConstraints = videoConstraints;
  this.audioConstraints = audioConstraints;
}
项目:webrtc-workshop    文件:PeerConnectionWrapper.java   
public void createOffer(Context context, final CreateOfferCallback createOfferCallback,
        IceCandidateCallback iceCandidateCallback, ConnectionStateCallback connectionStateCallback) {
    this.createOfferCallback = createOfferCallback;
    this.iceCandidateCallback = iceCandidateCallback;
    this.connectionStateCallback = connectionStateCallback;

    if (createPeerConnection(context)) {
        MediaConstraints offerConstraints = new MediaConstraints();
        offerConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
        offerConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
        peerConnection.createOffer(new OfferObserver(), offerConstraints);
    }
}
项目:webrtc-workshop    文件:PeerConnectionWrapper.java   
public void onSetSuccess() {
    if (peerConnection.getLocalDescription() == null) {
        MediaConstraints answerConstraints = new MediaConstraints();
        answerConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
        answerConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
        peerConnection.createAnswer(new AnswerObserver(), answerConstraints);
    }
}
项目:apprtc-android    文件:AppRTCClient.java   
public AppRTCSignalingParameters(
    List<PeerConnection.IceServer> iceServers,
    String gaeBaseHref, String channelToken, String postMessageUrl,
    boolean initiator, MediaConstraints pcConstraints,
    MediaConstraints videoConstraints, MediaConstraints audioConstraints) {
  this.iceServers = iceServers;
  this.gaeBaseHref = gaeBaseHref;
  this.channelToken = channelToken;
  this.postMessageUrl = postMessageUrl;
  this.initiator = initiator;
  this.pcConstraints = pcConstraints;
  this.videoConstraints = videoConstraints;
  this.audioConstraints = audioConstraints;
}
项目:licodeAndroidClient    文件:StreamDescription.java   
public void initLocal(PeerConnection pc, SdpObserver sdpObserver) {
    mLocal = true;
    mState = StreamState.LOCAL;
    this.pc = pc;
    mSdpConstraints = new MediaConstraints();
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveAudio", "true"));
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveVideo", "true"));
    pc.createOffer(sdpObserver, mSdpConstraints);
}
项目:licodeAndroidClient    文件:StreamDescription.java   
public void initRemote(PeerConnection pc, SdpObserver sdpObserver) {
    mLocal = false;
    mState = StreamState.OPENING;
    this.pc = pc;
    mSdpConstraints = new MediaConstraints();
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveAudio", "true"));
    mSdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
            "OfferToReceiveVideo", "true"));
    pc.createOffer(sdpObserver, mSdpConstraints);
}