我的目标是做几件事:
我两个都在工作,但是我实现2号的方法很荒谬:
可以,但是它有很多缺点:在录制过程中,帧速率下降得令人难以忍受地降低了,拼接步骤每帧大约需要半秒,并且内存不足,导致视频播放时间超过了几秒钟,然后我降低相机的分辨率,以确保图像尽可能小。即使那样,视频帧速率也无法与现实相提并论,并且视频看起来疯狂地加速了。
由于许多原因,这似乎很荒谬,所以我的问题是:是否有更好的方法来做到这一点?
如果有人要运行它,这是一个小例子。这需要提供OpenCV的Android项目在这里,和JCodec Android项目可在这里。
Manifest.xml:
<uses-sdk android:minSdkVersion="8" android:targetSdkVersion="22" /> <application android:allowBackup="true" android:icon="@drawable/ic_launcher" android:label="@string/app_name" android:theme="@android:style/Theme.NoTitleBar.Fullscreen" > <activity android:name=".MainActivity" android:screenOrientation="landscape" android:configChanges="orientation|keyboardHidden|screenSize" android:label="@string/app_name" > <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> </intent-filter> </activity> </application> <uses-permission android:name="android.permission.CAMERA"/> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
主要活动:
package com.example.videotest; import java.io.File; import java.util.List; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; import org.opencv.core.Mat; import org.opencv.core.Scalar; import org.opencv.imgproc.Imgproc; import android.app.Activity; import android.media.MediaScannerConnection; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.SurfaceView; import android.view.View; import android.view.WindowManager; import android.widget.Toast; public class MainActivity extends Activity implements CvCameraViewListener2{ private CameraView cameraView; private Mat edgesMat; private final Scalar greenScalar = new Scalar(0,255,0); private int resolutionIndex = 0; private MatVideoWriter matVideoWriter = new MatVideoWriter(); private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: { Log.i("VideoTest", "OpenCV loaded successfully"); cameraView.enableView(); } break; default: { super.onManagerConnected(status); } break; } } }; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); cameraView = (CameraView) findViewById(R.id.cameraView); cameraView.setVisibility(SurfaceView.VISIBLE); cameraView.setCvCameraViewListener(this); } @Override public void onPause() { super.onPause(); if (cameraView != null){ cameraView.disableView(); } } @Override public void onResume() { super.onResume(); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback); } public void onDestroy() { super.onDestroy(); if (cameraView != null) cameraView.disableView(); } public void onCameraViewStarted(int width, int height) { edgesMat = new Mat(); } public void onCameraViewStopped() { if (edgesMat != null) edgesMat.release(); edgesMat = null; } public Mat onCameraFrame(CvCameraViewFrame inputFrame) { Mat rgba = inputFrame.rgba(); org.opencv.core.Size sizeRgba = rgba.size(); int rows = (int) sizeRgba.height; int cols = (int) sizeRgba.width; int left = cols / 8; int top = rows / 8; int width = cols * 3 / 4; int height = rows * 3 / 4; //get sub-image Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width); //create edgesMat from sub-image Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100); Mat colorEdges = new Mat(); Mat killMe = colorEdges; edgesMat.copyTo(colorEdges); Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA); colorEdges = colorEdges.setTo(greenScalar, edgesMat); colorEdges.copyTo(rgbaInnerWindow, edgesMat); killMe.release(); colorEdges.release(); rgbaInnerWindow.release(); if(matVideoWriter.isRecording()){ matVideoWriter.write(rgba); } return rgba; } public void changeResolution(View v){ List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList(); resolutionIndex++; if(resolutionIndex >= cameraResolutionList.size()){ resolutionIndex = 0; } android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex); cameraView.setResolution(resolution.width, resolution.height); resolution = cameraView.getResolution(); String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString(); Toast.makeText(this, caption, Toast.LENGTH_SHORT).show(); } public void startVideo(View v){ if(matVideoWriter.isRecording()){ matVideoWriter.stop(); File file = new File(getExternalFilesDir(null), "VideoTest/images/"); for(String img : file.list()){ String scanMe = new File(file, img).getAbsolutePath(); MediaScannerConnection.scanFile(this, new String[]{scanMe}, null, null); Log.i("VideoTest", "Scanning: " +scanMe); } file = new File(file, "video.mp4"); MediaScannerConnection.scanFile(this, new String[]{file.getAbsolutePath()}, null, null); } else{ String state = Environment.getExternalStorageState(); Log.i("VideoTest", "state: " + state); File ext = getExternalFilesDir(null); Log.i("VideoTest", "ext: " + ext.getAbsolutePath()); File file = new File(getExternalFilesDir(null), "VideoTest/images/"); if(!file.exists()){ boolean success = file.mkdirs(); Log.i("VideoTest", "mkdirs: " + success); } else{ Log.i("VideoTest", "file exists."); } Log.i("VideoTest", "starting recording: " + file.getAbsolutePath()); matVideoWriter.start(file); } } }
CameraView:
package com.example.videotest; import java.io.FileOutputStream; import java.util.List; import org.opencv.android.JavaCameraView; import android.content.Context; import android.hardware.Camera; import android.hardware.Camera.PictureCallback; import android.util.AttributeSet; import android.util.Log; public class CameraView extends JavaCameraView{ private String mPictureFileName; public CameraView(Context context, AttributeSet attrs) { super(context, attrs); } public List<String> getEffectList() { return mCamera.getParameters().getSupportedColorEffects(); } public boolean isEffectSupported() { return (mCamera.getParameters().getColorEffect() != null); } public String getEffect() { return mCamera.getParameters().getColorEffect(); } public void setEffect(String effect) { Camera.Parameters params = mCamera.getParameters(); params.setColorEffect(effect); mCamera.setParameters(params); } public List<android.hardware.Camera.Size> getResolutionList() { return mCamera.getParameters().getSupportedPreviewSizes(); } public void setResolution(int width, int height) { disconnectCamera(); mMaxHeight = height; mMaxWidth = width; connectCamera(getWidth(), getHeight()); } public android.hardware.Camera.Size getResolution() { return mCamera.getParameters().getPreviewSize(); } }
MatVideoWriter:
package com.example.videotest; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.jcodec.api.android.SequenceEncoder; import org.opencv.core.Mat; import org.opencv.highgui.Highgui; import org.opencv.imgproc.Imgproc; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Log; public class MatVideoWriter { boolean recording; File dir; int imageIndex = 0; public void start(File dir){ this.dir = dir; recording = true; } public void stop(){ recording = false; try{ File file = new File(dir, "video.mp4"); SequenceEncoder encoder = new SequenceEncoder(file); List<File> files = Arrays.asList(dir.listFiles()); Collections.sort(files, new Comparator<File>(){ @Override public int compare(File lhs, File rhs) { return lhs.getName().compareTo(rhs.getName()); } }); for(File f : files){ Log.i("VideoTest", "Encoding image: " + f.getAbsolutePath()); try{ Bitmap frame = BitmapFactory.decodeFile(f.getAbsolutePath()); encoder.encodeImage(frame); } catch(Exception e){ e.printStackTrace(); } } encoder.finish(); } catch(Exception e){ e.printStackTrace(); } } public void write(Mat mat){ //convert from BGR to RGB Mat rgbMat = new Mat(); Imgproc.cvtColor(mat, rgbMat, Imgproc.COLOR_BGR2RGB); File file = new File(dir, "img" + imageIndex + ".png"); String filename = file.toString(); boolean success = Highgui.imwrite(filename, rgbMat); Log.i("VideoTest", "Success writing img" + imageIndex +".png: " + success); imageIndex++; } public boolean isRecording() { return recording; } }
编辑:我没有收到任何评论或答案,所以我在这里交叉张贴了OpenCV论坛。
@HaDang向我指出了这些链接:
http://www.walking-productions.com/notslop/2013/01/16/android-live-streaming- courtesy-of-javacv-and-ffmpeg/
https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java
该示例使用FFMPEG的Java包装器进行视频录制。对于任何想做同样事情的人来说,这个项目都是一个非常有用的起点:https : //github.com/vanevery/JavaCV_0.3_stream_test
我接受了上述项目,并将其作为示例。这很杂乱,但是有效:
package com.example.videotest; import java.io.File; import java.io.IOException; import java.nio.ShortBuffer; import java.util.List; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; import org.opencv.core.Mat; import org.opencv.core.Scalar; import org.opencv.imgproc.Imgproc; import com.googlecode.javacv.FFmpegFrameRecorder; import com.googlecode.javacv.FrameRecorder.Exception; import com.googlecode.javacv.cpp.opencv_core.IplImage; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.media.MediaScannerConnection; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.SurfaceView; import android.view.View; import android.view.WindowManager; import android.widget.Toast; public class MainActivity extends Activity implements CvCameraViewListener2{ private CameraView cameraView; private Mat edgesMat; private final Scalar greenScalar = new Scalar(0,255,0); private int resolutionIndex = 0; private IplImage videoImage = null; boolean recording = false; private volatile FFmpegFrameRecorder recorder; private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; private int frameRate = 30; private Thread audioThread; volatile boolean runAudioThread = true; private AudioRecord audioRecord; private AudioRecordRunnable audioRecordRunnable; private String ffmpeg_link; long startTime = 0; private String LOG_TAG = "VideoTest"; private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: Log.i("VideoTest", "OpenCV loaded successfully"); cameraView.enableView(); break; default: super.onManagerConnected(status); break; } } }; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); cameraView = (CameraView) findViewById(R.id.cameraView); cameraView.setVisibility(SurfaceView.VISIBLE); cameraView.setCvCameraViewListener(this); } private void initRecorder() { Log.w(LOG_TAG,"initRecorder"); int depth = com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U; int channels = 4; // if (yuvIplimage == null) { // Recreated after frame size is set in surface change method videoImage = IplImage.create(imageWidth, imageHeight, depth, channels); //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2); Log.v(LOG_TAG, "IplImage.create"); // } File videoFile = new File(getExternalFilesDir(null), "VideoTest/images/video.mp4"); boolean mk = videoFile.getParentFile().mkdirs(); Log.v(LOG_TAG, "Mkdir: " + mk); boolean del = videoFile.delete(); Log.v(LOG_TAG, "del: " + del); try { boolean created = videoFile.createNewFile(); Log.v(LOG_TAG, "Created: " + created); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } ffmpeg_link = videoFile.getAbsolutePath(); recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight); recorder.setFormat("mp4"); Log.v(LOG_TAG, "recorder.setFormat(\"mp4\")"); recorder.setSampleRate(sampleAudioRateInHz); Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)"); // re-set in the surface changed method as well recorder.setFrameRate(frameRate); Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)"); // Create audio recording thread audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); } @Override public void onPause() { super.onPause(); if (cameraView != null){ cameraView.disableView(); } } @Override public void onResume() { super.onResume(); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback); } public void onDestroy() { super.onDestroy(); if (cameraView != null) cameraView.disableView(); } public void onCameraViewStarted(int width, int height) { edgesMat = new Mat(); } public void onCameraViewStopped() { if (edgesMat != null) edgesMat.release(); edgesMat = null; } public Mat onCameraFrame(CvCameraViewFrame inputFrame) { Mat rgba = inputFrame.rgba(); org.opencv.core.Size sizeRgba = rgba.size(); int rows = (int) sizeRgba.height; int cols = (int) sizeRgba.width; int left = cols / 8; int top = rows / 8; int width = cols * 3 / 4; int height = rows * 3 / 4; //get sub-image Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width); //create edgesMat from sub-image Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100); Mat colorEdges = new Mat(); Mat killMe = colorEdges; edgesMat.copyTo(colorEdges); Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA); colorEdges = colorEdges.setTo(greenScalar, edgesMat); colorEdges.copyTo(rgbaInnerWindow, edgesMat); killMe.release(); colorEdges.release(); rgbaInnerWindow.release(); if(recording){ byte[] byteFrame = new byte[(int) (rgba.total() * rgba.channels())]; rgba.get(0, 0, byteFrame); onFrame(byteFrame); } return rgba; } public void stopRecording() { // This should stop the audio thread from running runAudioThread = false; if (recorder != null) { Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder"); try { recorder.stop(); recorder.release(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } recorder = null; } MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null); } public void changeResolution(View v){ List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList(); resolutionIndex++; if(resolutionIndex >= cameraResolutionList.size()){ resolutionIndex = 0; } android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex); cameraView.setResolution(resolution.width, resolution.height); resolution = cameraView.getResolution(); String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString(); Toast.makeText(this, caption, Toast.LENGTH_SHORT).show(); imageWidth = resolution.width; imageHeight = resolution.height; frameRate = cameraView.getFrameRate(); initRecorder(); } int frames = 0; private void onFrame(byte[] data){ if (videoImage != null && recording) { long videoTimestamp = 1000 * (System.currentTimeMillis() - startTime); // Put the camera preview frame right into the yuvIplimage object videoImage.getByteBuffer().put(data); try { // Get the correct time recorder.setTimestamp(videoTimestamp); // Record the image into FFmpegFrameRecorder recorder.record(videoImage); frames++; Log.i(LOG_TAG, "Wrote Frame: " + frames); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } public void startVideo(View v){ recording = !recording; Log.i(LOG_TAG, "Recording: " + recording); if(recording){ startTime = System.currentTimeMillis(); try { recorder.start(); Log.i(LOG_TAG, "STARTED RECORDING."); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } else{ stopRecording(); } } class AudioRecordRunnable implements Runnable { @Override public void run() { // Set the thread priority android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; short[] audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); audioData = new short[bufferSize]; Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); // Audio Capture/Encoding Loop while (runAudioThread) { // Read from audioRecord bufferReadResult = audioRecord.read(audioData, 0, audioData.length); if (bufferReadResult > 0) { //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult); // Changes in this variable may not be picked up despite it being "volatile" if (recording) { try { // Write to FFmpegFrameRecorder recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult)); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG,"AudioThread Finished"); /* Capture/Encoding finished, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null); Log.v(LOG_TAG,"audioRecord released"); } } } }