Java 类org.webrtc.VideoRenderer.I420Frame 实例源码

项目:MediaCodecTest    文件:VideoRendererGui.java   
@Override
public void setSize(final int width, final int height) {
  Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
      width + " x " + height);
  videoWidth = width;
  videoHeight = height;
  int[] strides = { width, width / 2, width / 2  };
  // Frame re-allocation need to be synchronized with copying
  // frame to textures in draw() function to avoid re-allocating
  // the frame while it is being copied.
  synchronized (frameToRenderQueue) {
    // Clear rendering queue.
    frameToRenderQueue.poll();
    // Re-allocate / allocate the frame.
    yuvFrameToRender = new I420Frame(width, height, strides, null);
    textureFrameToRender = new I420Frame(width, height, null, -1);
    updateTextureProperties = true;
  }
}
项目:droidkit-webrtc    文件:VideoRendererGui.java   
private YuvImageRenderer(
    GLSurfaceView surface,
    int x, int y, int width, int height) {
  Log.v(TAG, "YuvImageRenderer.Create");
  this.surface = surface;
  frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
  // Create texture vertices.
  float xLeft = (x - 50) / 50.0f;
  float yTop = (50 - y) / 50.0f;
  float xRight = Math.min(1.0f, (x + width - 50) / 50.0f);
  float yBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
  float textureVeticesFloat[] = new float[] {
      xLeft, yTop,
      xLeft, yBottom,
      xRight, yTop,
      xRight, yBottom
  };
  textureVertices = directNativeFloatBuffer(textureVeticesFloat);
}
项目:appspotdemo-mono    文件:VideoStreamsView.java   
/** Queue |frame| to be uploaded. */
public void queueFrame(final Endpoint stream, I420Frame frame) {
  // Paying for the copy of the YUV data here allows CSC and painting time
  // to get spent on the render thread instead of the UI thread.
  abortUnless(framePool.validateDimensions(frame), "Frame too large!");
  final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame);
  boolean needToScheduleRender;
  synchronized (framesToRender) {
    // A new render needs to be scheduled (via updateFrames()) iff there isn't
    // already a render scheduled, which is true iff framesToRender is empty.
    needToScheduleRender = framesToRender.isEmpty();
    I420Frame frameToDrop = framesToRender.put(stream, frameCopy);
    if (frameToDrop != null) {
      framePool.returnFrame(frameToDrop);
    }
  }
  if (needToScheduleRender) {
    queueEvent(new Runnable() {
        public void run() {
          updateFrames();
        }
      });
  }
}
项目:appspotdemo-mono    文件:VideoStreamsView.java   
private void updateFrames() {
  I420Frame localFrame = null;
  I420Frame remoteFrame = null;
  synchronized (framesToRender) {
    localFrame = framesToRender.remove(Endpoint.LOCAL);
    remoteFrame = framesToRender.remove(Endpoint.REMOTE);
  }
  if (localFrame != null) {
    texImage2D(localFrame, yuvTextures[0]);
    framePool.returnFrame(localFrame);
  }
  if (remoteFrame != null) {
    texImage2D(remoteFrame, yuvTextures[1]);
    framePool.returnFrame(remoteFrame);
  }
  abortUnless(localFrame != null || remoteFrame != null,
              "Nothing to render!");
  requestRender();
}
项目:AndroidRTC    文件:CameraVideoCapturerTestFixtures.java   
@Override
public void renderFrame(I420Frame frame) {
  synchronized (frameLock) {
    ++framesRendered;
    width = frame.rotatedWidth();
    height = frame.rotatedHeight();
    frameLock.notify();
  }
  VideoRenderer.renderFrameDone(frame);
}
项目:AndroidRTC    文件:CameraVideoCapturerTestFixtures.java   
@Override
public void renderFrame(I420Frame frame) {
  synchronized (pendingFrames) {
    pendingFrames.add(frame);
    pendingFrames.notifyAll();
  }
}
项目:AndroidRTC    文件:CameraVideoCapturerTestFixtures.java   
public List<I420Frame> waitForPendingFrames() throws InterruptedException {
  Logging.d(TAG, "Waiting for pending frames");
  synchronized (pendingFrames) {
    while (pendingFrames.isEmpty()) {
      pendingFrames.wait();
    }
    return new ArrayList<I420Frame>(pendingFrames);
  }
}
项目:AndroidRTC    文件:CameraVideoCapturerTestFixtures.java   
public void returnBufferLateEndToEnd() throws InterruptedException {
  final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
  final VideoTrackWithRenderer videoTrackWithRenderer =
      createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
  // Wait for at least one frame that has not been returned.
  assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty());

  capturerInstance.capturer.stopCapture();

  // Dispose everything.
  disposeCapturer(capturerInstance);
  disposeVideoTrackWithRenderer(videoTrackWithRenderer);

  // Return the frame(s), on a different thread out of spite.
  final List<I420Frame> pendingFrames =
      videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
  final Thread returnThread = new Thread(new Runnable() {
    @Override
    public void run() {
      for (I420Frame frame : pendingFrames) {
        VideoRenderer.renderFrameDone(frame);
      }
    }
  });
  returnThread.start();
  returnThread.join();
}
项目:UMA-AndroidWebRTC    文件:VideoStreamsView.java   
/** Queue |frame| to be uploaded. */
public void queueFrame(final int stream, I420Frame frame) {
  // Paying for the copy of the YUV data here allows CSC and painting time
  // to get spent on the render thread instead of the UI thread.
  abortUnless(FramePool.validateDimensions(frame), "Frame demasiado grande!");
  final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame);
  queueEvent(new Runnable() {
    public void run() {
      updateFrame(stream, frameCopy);
    }
  });
}
项目:UMA-AndroidWebRTC    文件:VideoStreamsView.java   
private void updateFrame(int stream, I420Frame frame) {
  int[] textures = yuvTextures[stream];
  texImage2D(frame, textures);
  framePool.returnFrame(frame);

  requestRender();
}
项目:UMA-AndroidWebRTC    文件:VideoStreamsView.java   
private void texImage2D(I420Frame frame, int[] textures) {
  for (int i = 0; i < 3; ++i) {
    ByteBuffer plane = frame.yuvPlanes[i];
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
    int w = i == 0 ? frame.width : frame.width / 2;
    int h = i == 0 ? frame.height : frame.height / 2;
    abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
    GLES20.glTexImage2D(
                        GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
                        GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
  }
  checkNoGLES2Error();
}
项目:UMA-AndroidWebRTC    文件:FramePool.java   
public void returnFrame(I420Frame frame) {
    long desc = summarizeFrameDimensions(frame);
    synchronized (availableFrames) {
        LinkedList<I420Frame> frames = availableFrames.get(desc);
        if (frames == null) {
            throw new IllegalArgumentException("Unexpected frame dimensions");
        }
        frames.add(frame);
    }
}
项目:UMA-AndroidWebRTC    文件:FramePool.java   
/** Validate that |frame| can be managed by the pool. */
public static boolean validateDimensions(I420Frame frame) {
    return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION &&
            frame.yuvStrides[0] < MAX_DIMENSION &&
            frame.yuvStrides[1] < MAX_DIMENSION &&
            frame.yuvStrides[2] < MAX_DIMENSION;
}
项目:UMA-AndroidWebRTC    文件:FramePool.java   
private static long summarizeFrameDimensions(I420Frame frame) {
    long ret = frame.width;
    ret = ret * MAX_DIMENSION + frame.height;
    ret = ret * MAX_DIMENSION + frame.yuvStrides[0];
    ret = ret * MAX_DIMENSION + frame.yuvStrides[1];
    ret = ret * MAX_DIMENSION + frame.yuvStrides[2];
    return ret;
}
项目:MediaCodecTest    文件:VideoRendererGui.java   
private YuvImageRenderer(
    GLSurfaceView surface, int id,
    int x, int y, int width, int height,
    ScalingType scalingType) {
  Log.d(TAG, "YuvImageRenderer.Create id: " + id);
  this.surface = surface;
  this.id = id;
  this.scalingType = scalingType;
  frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
  // Create texture vertices.
  texLeft = (x - 50) / 50.0f;
  texTop = (50 - y) / 50.0f;
  texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
  texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
  float textureVeticesFloat[] = new float[] {
      texLeft, texTop,
      texLeft, texBottom,
      texRight, texTop,
      texRight, texBottom
  };
  textureVertices = directNativeFloatBuffer(textureVeticesFloat);
  // Create texture UV coordinates.
  float textureCoordinatesFloat[] = new float[] {
      0, 0, 0, 1, 1, 0, 1, 1
  };
  textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
  updateTextureProperties = false;
}
项目:licodeAndroidClient    文件:VideoStreamsView.java   
/** Queue |frame| to be uploaded. */
public void queueFrame(final String stream, I420Frame frame) {
    // Paying for the copy of the YUV data here allows CSC and painting time
    // to get spent on the render thread instead of the UI thread.
    abortUnless(FramePool.validateDimensions(frame), "Frame too large!");
    // boolean needToScheduleRender;
    synchronized (frameDescriptions) {
        // A new render needs to be scheduled (via updateFrames()) iff there
        // isn't
        // already a render scheduled, which is true iff framesToRender is
        // empty.
        // needToScheduleRender = frameDescriptions.isEmpty();
        FrameDescription desc = frameDescriptions.get(stream);
        // if (desc == null)
        // {
        // desc = createFrameDescription(stream);
        // }
        if (desc != null && desc.bufferIndex != -1) {
            I420Frame frameToDrop = desc.frameToRender;
            if (desc.bufferIndex != -1) {
                final I420Frame frameCopy = framePool.takeFrame(frame)
                        .copyFrom(frame);
                desc.frameToRender = frameCopy;
            }
            if (frameToDrop != null) {
                framePool.returnFrame(frameToDrop);
            }
        }
    }
    long dt = System.nanoTime() - mLastRendered;
    if (dt > MIN_NANOS_BETWEEN_FRAMES
            && mRenderRequested.compareAndSet(false, true)) {
        queueEvent(new Runnable() {
            public void run() {
                updateFrames();
            }
        });
    }
}
项目:licodeAndroidClient    文件:VideoStreamsView.java   
private void texImage2D(I420Frame frame, int[] textures) {
    for (int i = 0; i < 3; ++i) {
        ByteBuffer plane = frame.yuvPlanes[i];
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
        int w = i == 0 ? frame.width : frame.width / 2;
        int h = i == 0 ? frame.height : frame.height / 2;
        abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!="
                + w);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
                w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
                plane);
    }
    checkNoGLES2Error();
}
项目:licodeAndroidClient    文件:FramePool.java   
public void returnFrame(I420Frame frame) {
  long desc = summarizeFrameDimensions(frame);
  synchronized (availableFrames) {
    LinkedList<I420Frame> frames = availableFrames.get(desc);
    if (frames == null) {
      throw new IllegalArgumentException("Unexpected frame dimensions");
    }
    frames.add(frame);
  }
}
项目:licodeAndroidClient    文件:FramePool.java   
/** Validate that |frame| can be managed by the pool. */
public static boolean validateDimensions(I420Frame frame) {
  return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION &&
      frame.yuvStrides[0] < MAX_DIMENSION &&
      frame.yuvStrides[1] < MAX_DIMENSION &&
      frame.yuvStrides[2] < MAX_DIMENSION;
}
项目:licodeAndroidClient    文件:FramePool.java   
private static long summarizeFrameDimensions(I420Frame frame) {
  long ret = frame.width;
  ret = ret * MAX_DIMENSION + frame.height;
  ret = ret * MAX_DIMENSION + frame.yuvStrides[0];
  ret = ret * MAX_DIMENSION + frame.yuvStrides[1];
  ret = ret * MAX_DIMENSION + frame.yuvStrides[2];
  return ret;
}
项目:droidkit-webrtc    文件:VideoRendererGui.java   
@Override
public void setSize(final int width, final int height) {
  Log.v(TAG, "YuvImageRenderer.setSize: " + width + " x " + height);
  int[] strides = { width, width / 2, width / 2  };
  // Frame re-allocation need to be synchronized with copying
  // frame to textures in draw() function to avoid re-allocating
  // the frame while it is being copied.
  synchronized (frameToRenderQueue) {
    // Clear rendering queue
    frameToRenderQueue.poll();
    // Re-allocate / allocate the frame
    frameToRender = new I420Frame(width, height, strides, null);
  }
}
项目:droidkit-webrtc    文件:VideoRendererGui.java   
@Override
public synchronized void renderFrame(I420Frame frame) {
  long now = System.nanoTime();
  framesReceived++;
  // Check input frame parameters.
  if (!(frame.yuvStrides[0] == frame.width &&
      frame.yuvStrides[1] == frame.width / 2 &&
      frame.yuvStrides[2] == frame.width / 2)) {
    Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
        frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
    return;
  }
  // Skip rendering of this frame if setSize() was not called.
  if (frameToRender == null) {
    framesDropped++;
    return;
  }
  // Check incoming frame dimensions
  if (frame.width != frameToRender.width ||
      frame.height != frameToRender.height) {
    throw new RuntimeException("Wrong frame size " +
        frame.width + " x " + frame.height);
  }

  if (frameToRenderQueue.size() > 0) {
    // Skip rendering of this frame if previous frame was not rendered yet.
    framesDropped++;
    return;
  }
  frameToRender.copyFrom(frame);
  copyTimeNs += (System.nanoTime() - now);
  frameToRenderQueue.offer(frameToRender);
  seenFrame = true;
  surface.requestRender();
}
项目:appspotdemo-mono    文件:VideoStreamsView.java   
private void texImage2D(I420Frame frame, int[] textures) {
  for (int i = 0; i < 3; ++i) {
    ByteBuffer plane = frame.yuvPlanes[i];
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
    int w = i == 0 ? frame.width : frame.width / 2;
    int h = i == 0 ? frame.height : frame.height / 2;
    abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
    GLES20.glTexImage2D(
        GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
        GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
  }
  checkNoGLES2Error();
}
项目:appspotdemo-mono    文件:FramePool.java   
public void returnFrame(I420Frame frame) {
  long desc = summarizeFrameDimensions(frame);
  synchronized (availableFrames) {
    LinkedList<I420Frame> frames = availableFrames.get(desc);
    if (frames == null) {
      throw new IllegalArgumentException("Unexpected frame dimensions");
    }
    frames.add(frame);
  }
}
项目:appspotdemo-mono    文件:FramePool.java   
/** Validate that |frame| can be managed by the pool. */
public static boolean validateDimensions(I420Frame frame) {
  return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION &&
      frame.yuvStrides[0] < MAX_DIMENSION &&
      frame.yuvStrides[1] < MAX_DIMENSION &&
      frame.yuvStrides[2] < MAX_DIMENSION;
}
项目:appspotdemo-mono    文件:FramePool.java   
private static long summarizeFrameDimensions(I420Frame frame) {
  long ret = frame.width;
  ret = ret * MAX_DIMENSION + frame.height;
  ret = ret * MAX_DIMENSION + frame.yuvStrides[0];
  ret = ret * MAX_DIMENSION + frame.yuvStrides[1];
  ret = ret * MAX_DIMENSION + frame.yuvStrides[2];
  return ret;
}
项目:MediaCodecTest    文件:VideoRendererGui.java   
@Override
public synchronized void renderFrame(I420Frame frame) {
  long now = System.nanoTime();
  framesReceived++;
  // Skip rendering of this frame if setSize() was not called.
  if (yuvFrameToRender == null || textureFrameToRender == null) {
    framesDropped++;
    return;
  }
  // Check input frame parameters.
  if (frame.yuvFrame) {
    if (!(frame.yuvStrides[0] == frame.width &&
        frame.yuvStrides[1] == frame.width / 2 &&
        frame.yuvStrides[2] == frame.width / 2)) {
      Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
          frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
      return;
    }
    // Check incoming frame dimensions.
    if (frame.width != yuvFrameToRender.width ||
        frame.height != yuvFrameToRender.height) {
      throw new RuntimeException("Wrong frame size " +
          frame.width + " x " + frame.height);
    }
  }

  if (frameToRenderQueue.size() > 0) {
    // Skip rendering of this frame if previous frame was not rendered yet.
    framesDropped++;
    return;
  }

  // Create a local copy of the frame.
  if (frame.yuvFrame) {
    yuvFrameToRender.copyFrom(frame);
    rendererType = RendererType.RENDERER_YUV;
    frameToRenderQueue.offer(yuvFrameToRender);
  } else {
    textureFrameToRender.copyFrom(frame);
    rendererType = RendererType.RENDERER_TEXTURE;
    frameToRenderQueue.offer(textureFrameToRender);
  }
  copyTimeNs += (System.nanoTime() - now);
  seenFrame = true;

  // Request rendering.
  surface.requestRender();
}
项目:licodeAndroidClient    文件:LicodeConnector.java   
@Override
public void renderFrame(I420Frame frame) {
    view.queueFrame(streamId, frame);
}
项目:droidkit-webrtc    文件:VideoRendererGui.java   
private void draw() {
  long now = System.nanoTime();
  if (!seenFrame) {
    // No frame received yet - nothing to render.
    return;
  }
  I420Frame frameFromQueue;
  synchronized (frameToRenderQueue) {
    frameFromQueue = frameToRenderQueue.peek();
    if (frameFromQueue != null && startTimeNs == -1) {
      startTimeNs = now;
    }
    for (int i = 0; i < 3; ++i) {
      int w = (i == 0) ? frameToRender.width : frameToRender.width / 2;
      int h = (i == 0) ? frameToRender.height : frameToRender.height / 2;
      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
      if (frameFromQueue != null) {
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
            w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
            frameFromQueue.yuvPlanes[i]);
      }
    }
    if (frameFromQueue != null) {
      frameToRenderQueue.poll();
    }
  }
  int posLocation = GLES20.glGetAttribLocation(program, "in_pos");
  GLES20.glEnableVertexAttribArray(posLocation);
  GLES20.glVertexAttribPointer(
      posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);

  int texLocation = GLES20.glGetAttribLocation(program, "in_tc");
  GLES20.glEnableVertexAttribArray(texLocation);
  GLES20.glVertexAttribPointer(
      texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);

  GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

  GLES20.glDisableVertexAttribArray(posLocation);
  GLES20.glDisableVertexAttribArray(texLocation);

  checkNoGLES2Error();

  if (frameFromQueue != null) {
    framesRendered++;
    drawTimeNs += (System.nanoTime() - now);
    if ((framesRendered % 150) == 0) {
      logStatistics();
    }
  }
}
项目:appspotdemo-mono    文件:AppRTCDemoActivity.java   
@Override
public void renderFrame(I420Frame frame) {
  view.queueFrame(stream, frame);
}