2

I want to use Opencv4android sample Tutorial1 to do Video Recorder. And I use this solution, but my smartphone not display anything on Screen. Just black view. Can anyone help me?

here is my code

Tutorial1Activity

public class Tutorial1Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";

private CameraBridgeViewBase mOpenCvCameraView;
private boolean              mIsJavaCamera = true;
private MenuItem             mItemSwitchCamera = null;


//*****************writetoSD***********************//
public FileWriter fw; // = new FileWriter(folder_path, false);
public BufferedWriter bw;// = new BufferedWriter(fw);
boolean first_in = true;
String showTimefile = null;
String showTime = null;
String folder_path = Environment.getExternalStorageDirectory().getAbsolutePath();
String folder_name = "Face Detection Signal";
String folder_pathforfile = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH_mm_ss");
SimpleDateFormat sdf_fileintxt = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//*****************writetoSD***********************//


//---------------MediaRecorder-------------------//
public MediaRecorder mediaRecorder;
Button bt_Record;
boolean isRecord = false;
Handler mThreadHandler;
HandlerThread mHandlerThread;

//---------------MediaRecorder-------------------//

private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
    @Override
    public void onManagerConnected(int status) {
        switch (status) {
            case LoaderCallbackInterface.SUCCESS:
            {
                Log.i(TAG, "OpenCV loaded successfully");
                mOpenCvCameraView.enableView();
            } break;
            default:
            {
                super.onManagerConnected(status);
            } break;
        }
    }
};

public Tutorial1Activity() {
    Log.i(TAG, "Instantiated new " + this.getClass());
}

/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i(TAG, "called onCreate");
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    setContentView(R.layout.tutorial1_surface_view);

    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);

    mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);

    mOpenCvCameraView.setCvCameraViewListener(this);
    //---------------------------------------------------------//
    bt_Record = (Button)findViewById(R.id.bt_recorder);
    folder_pathforfile = folder_path + File.separator + folder_name
            + File.separator + "opencv" + "_";
    CreateSDfolder();
    ongetTime();
    //---------------------------------------------------------//
}

@Override
public void onPause()
{
    super.onPause();
    if (mOpenCvCameraView != null)
        mOpenCvCameraView.disableView();
}

@Override
public void onResume()
{
    super.onResume();
    if (!OpenCVLoader.initDebug()) {
        Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
    } else {
        Log.d(TAG, "OpenCV library found inside package. Using it!");
        mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
    }
}

public void onDestroy() {
    super.onDestroy();
    if (mOpenCvCameraView != null)
        mOpenCvCameraView.disableView();
}

public void onCameraViewStarted(int width, int height) {
}

public void onCameraViewStopped() {
}

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    return inputFrame.rgba();
}







private void CreateSDfolder() {
    String filefolderpath = folder_path + File.separator +  folder_name;
    File dir = new File(filefolderpath);
    if (!dir.exists()){
        Log.e("folder", "not exist");
        try{
            //dir.createNewFile(true);
            dir.mkdir();
            Log.e("folder", "creat exist");
        }catch(Exception e){
            Log.e("folder", "creat not exist");
            e.printStackTrace();
        }
    }
    else{
        Log.e("folder", "exist");
    }
}

private void ongetTime() {
    Date dt=new Date();
    showTime=sdf_fileintxt.format(dt);
    showTimefile =sdf.format(dt);
}

private void WritetoSD(String data) {
    try {
        fw = new FileWriter(folder_pathforfile + showTimefile+".txt", true);
        bw = new BufferedWriter(fw);
        if (first_in == true) {
            first_in = false;
            bw.append(showTime);
            bw.newLine();
        }
        bw.append(data);
        bw.newLine();
        bw.flush();
        bw.close();
    } catch (IOException e) {
        Log.e("WriteToSD", "Write To SD ERROR");
        e.printStackTrace();
    }
}

public void onRecordSignal (View v){
    if(!isRecord){
        isRecord = true;
        Log.e(TAG, "button click " + isRecord);
        bt_Record.setText("Stop");
       //new MediaPrepareTask().execute(null, null, null);
        if (prepareMediaRecorder()) {
            // Camera is available and unlocked, MediaRecorder is prepared,
            // now you can start recording
            Log.e("debug_mediarecorder", "prepareMediaRecorder in if");
            mOpenCvCameraView.setRecorder(mediaRecorder);
            mediaRecorder.start();
        } else {
            // prepare didn't work, release the camera
            Log.e("debug_mediarecorder", "prepareMediaRecorder in else");
            // mediaRecorder.stop();
            releaseMediaRecorder();
        }
    } else{
        isRecord = false;
        Log.e(TAG, "button click " + isRecord);
        bt_Record.setText("Record");
        try {
            if(mediaRecorder != null)
                mediaRecorder.stop();  // stop the recording
            else
                Log.e(TAG,"onRecordSignal mediaRecorder is null");

        } catch (RuntimeException e) {
            // RuntimeException is thrown when stop() is called immediately after start().
            // In this case the output file is not properly constructed ans should be deleted.
            Log.d(TAG, "RuntimeException: stop() is called immediately after start()");
            //noinspection ResultOfMethodCallIgnored
        }
        releaseMediaRecorder(); // release the MediaRecorder object
    }
}

public void releaseMediaRecorder() {
    Log.e("debug","releaseMediaRecorder");
    if (mediaRecorder != null) {
        mediaRecorder.reset(); // clear recorder configuration
        mediaRecorder.release(); // release the recorder object
        mediaRecorder = null;
        JavaCameraView.mCamera.lock();
        mOpenCvCameraView.releaseRecord();
    }
}

private String recordfilepath() {
    // TODO Auto-generated method stub
    ongetTime();
    File sddir =  Environment.getExternalStorageDirectory();
    File vrdir = new File(sddir, folder_name);
    File file = new File(vrdir, showTimefile+"_.mp4");
    String filepath = file.getAbsolutePath();
    Log.e("debug mediarecorder", filepath);
    return filepath;
}

public boolean prepareMediaRecorder() {
    // TODO Auto-generated method stub
    Log.e("debug mediarecorder", "in prepareMediaRecorder");
    mediaRecorder = new MediaRecorder();
    try {
        JavaCameraView.mCamera.lock();
        JavaCameraView.mCamera.unlock();
    }catch (RuntimeException e){
        Log.e("debug mediarecorder","JavaCameraView.mCamera.unlock() fail");
    }
    /*mediaRecorder.reset();
    mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
    mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
    mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
    //mediaRecorder.setPreviewDisplay(CameraBridgeViewBase.mSurfaceHolder.getSurface());
    mediaRecorder.setOutputFile(recordfilepath());
    //mediaRecorder.setOnInfoListener((MediaRecorder.OnInfoListener) this);
    //mediaRecorder.setOnErrorListener((MediaRecorder.OnErrorListener) this);*/
    mediaRecorder.reset();

    mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
    mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
    CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
    mediaRecorder.setProfile(cpHigh);
    //mediaRecorder.setOutputFile("out.mp4");
    mediaRecorder.setOutputFile(recordfilepath());
    mediaRecorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
    //mediaRecorder.setOnInfoListener(this);
    //mediaRecorder.setOnErrorListener(this);
    try {
        mediaRecorder.prepare();

    } catch (IllegalStateException e) {
        Log.e("debug mediarecorder", "not prepare");
        releaseMediaRecorder();
        return false;
    } catch (IOException e) {
        Log.e("debug mediarecorder", "not prepare IOException");
        //releaseMediaRecorder();
    }
    return true;
}


}

CameraBridgeViewBase

public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {

private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;

private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();

public int mFrameWidth;
public int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;

public static final int CAMERA_ID_ANY   = -1;
public static final int CAMERA_ID_BACK  = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;


//-------------------------------------//
protected  MediaRecorder mRecorder;
protected  Surface mSurface = null;

public void setRecorder(MediaRecorder rec) {
    mRecorder = rec;
    //Log.e(TAG,mRecorder.toString());
    if (mRecorder != null) {
        mSurface = mRecorder.getSurface();
        Log.e(TAG,"mRecorder is not null");
        Log.e(TAG,"mSurface = "+mSurface.toString());
    }
    else{
        Log.e(TAG,"mRecorder is null");
    }
}
public void releaseRecord(){
    mSurface.release();
}
//-------------------------------------//

public CameraBridgeViewBase(Context context, int cameraId) {
    super(context);
    mCameraIndex = cameraId;
    getHolder().addCallback(this);
    mMaxWidth = MAX_UNSPECIFIED;
    mMaxHeight = MAX_UNSPECIFIED;
}

public CameraBridgeViewBase(Context context, AttributeSet attrs) {
    super(context, attrs);

    int count = attrs.getAttributeCount();
    Log.d(TAG, "Attr count: " + Integer.valueOf(count));

    TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
    if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
        enableFpsMeter();

    mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);

    getHolder().addCallback(this);
    mMaxWidth = MAX_UNSPECIFIED;
    mMaxHeight = MAX_UNSPECIFIED;
    styledAttrs.recycle();
}

/**
 * Sets the camera index
 * @param cameraIndex new camera index
 */
public void setCameraIndex(int cameraIndex) {
    this.mCameraIndex = cameraIndex;
}

public interface CvCameraViewListener {
    /**
     * This method is invoked when camera preview has started. After this method is invoked
     * the frames will start to be delivered to client via the onCameraFrame() callback.
     * @param width -  the width of the frames that will be delivered
     * @param height - the height of the frames that will be delivered
     */
    public void onCameraViewStarted(int width, int height);

    /**
     * This method is invoked when camera preview has been stopped for some reason.
     * No frames will be delivered via onCameraFrame() callback after this method is called.
     */
    public void onCameraViewStopped();

    /**
     * This method is invoked when delivery of the frame needs to be done.
     * The returned values - is a modified frame which needs to be displayed on the screen.
     * TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
     */
    public Mat onCameraFrame(Mat inputFrame);
}

public interface CvCameraViewListener2 {
    /**
     * This method is invoked when camera preview has started. After this method is invoked
     * the frames will start to be delivered to client via the onCameraFrame() callback.
     * @param width -  the width of the frames that will be delivered
     * @param height - the height of the frames that will be delivered
     */
    public void onCameraViewStarted(int width, int height);

    /**
     * This method is invoked when camera preview has been stopped for some reason.
     * No frames will be delivered via onCameraFrame() callback after this method is called.
     */
    public void onCameraViewStopped();

    /**
     * This method is invoked when delivery of the frame needs to be done.
     * The returned values - is a modified frame which needs to be displayed on the screen.
     * TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
     */
    public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};

protected class CvCameraViewListenerAdapter implements CvCameraViewListener2  {
    public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
        mOldStyleListener = oldStypeListener;
    }

    public void onCameraViewStarted(int width, int height) {
        mOldStyleListener.onCameraViewStarted(width, height);
    }

    public void onCameraViewStopped() {
        mOldStyleListener.onCameraViewStopped();
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
         Mat result = null;
         switch (mPreviewFormat) {
            case RGBA:
                result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
                break;
            case GRAY:
                result = mOldStyleListener.onCameraFrame(inputFrame.gray());
                break;
            default:
                Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
        };

        return result;
    }

    public void setFrameFormat(int format) {
        mPreviewFormat = format;
    }

    private int mPreviewFormat = RGBA;
    private CvCameraViewListener mOldStyleListener;
};

/**
 * This class interface is abstract representation of single frame from camera for onCameraFrame callback
 * Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
 */
public interface CvCameraViewFrame {

    /**
     * This method returns RGBA Mat with frame
     */
    public Mat rgba();

    /**
     * This method returns single channel gray scale Mat with frame
     */
    public Mat gray();
};

public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
    Log.d(TAG, "call surfaceChanged event");
    synchronized(mSyncObject) {
        if (!mSurfaceExist) {
            mSurfaceExist = true;
            checkCurrentState();
        } else {
            /** Surface changed. We need to stop camera and restart with new parameters */
            /* Pretend that old surface has been destroyed */
            mSurfaceExist = false;
            checkCurrentState();
            /* Now use new surface. Say we have it now */
            mSurfaceExist = true;
            checkCurrentState();
        }
    }
}

public void surfaceCreated(SurfaceHolder holder) {
    /* Do nothing. Wait until surfaceChanged delivered */
}

public void surfaceDestroyed(SurfaceHolder holder) {
    synchronized(mSyncObject) {
        mSurfaceExist = false;
        checkCurrentState();
    }
}

/**
 * This method is provided for clients, so they can enable the camera connection.
 * The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
 */
public void enableView() {
    synchronized(mSyncObject) {
        mEnabled = true;
        checkCurrentState();
    }
}

/**
 * This method is provided for clients, so they can disable camera connection and stop
 * the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
 */
public void disableView() {
    synchronized(mSyncObject) {
        mEnabled = false;
        checkCurrentState();
    }
}

/**
 * This method enables label with fps value on the screen
 */
public void enableFpsMeter() {
    if (mFpsMeter == null) {
        mFpsMeter = new FpsMeter();
        mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
    }
}

public void disableFpsMeter() {
        mFpsMeter = null;
}

/**
 *
 * @param listener
 */

public void setCvCameraViewListener(CvCameraViewListener2 listener) {
    mListener = listener;
}

public void setCvCameraViewListener(CvCameraViewListener listener) {
    CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
    adapter.setFrameFormat(mPreviewFormat);
    mListener = adapter;
}

/**
 * This method sets the maximum size that camera frame is allowed to be. When selecting
 * size - the biggest size which less or equal the size set will be selected.
 * As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
 * preview frame will be selected with 176x152 size.
 * This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
 * @param maxWidth - the maximum width allowed for camera frame.
 * @param maxHeight - the maximum height allowed for camera frame
 */
public void setMaxFrameSize(int maxWidth, int maxHeight) {
    mMaxWidth = maxWidth;
    mMaxHeight = maxHeight;
}

public void SetCaptureFormat(int format)
{
    mPreviewFormat = format;
    if (mListener instanceof CvCameraViewListenerAdapter) {
        CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
        adapter.setFrameFormat(mPreviewFormat);
    }
}

/**
 * Called when mSyncObject lock is held
 */
private void checkCurrentState() {
    Log.d(TAG, "call checkCurrentState");
    int targetState;

    if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
        targetState = STARTED;
    } else {
        targetState = STOPPED;
    }

    if (targetState != mState) {
        /* The state change detected. Need to exit the current state and enter target state */
        processExitState(mState);
        mState = targetState;
        processEnterState(mState);
    }
}

private void processEnterState(int state) {
    Log.d(TAG, "call processEnterState: " + state);
    switch(state) {
    case STARTED:
        onEnterStartedState();
        if (mListener != null) {
            mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
        }
        break;
    case STOPPED:
        onEnterStoppedState();
        if (mListener != null) {
            mListener.onCameraViewStopped();
        }
        break;
    };
}

private void processExitState(int state) {
    Log.d(TAG, "call processExitState: " + state);
    switch(state) {
    case STARTED:
        onExitStartedState();
        break;
    case STOPPED:
        onExitStoppedState();
        break;
    };
}

private void onEnterStoppedState() {
    /* nothing to do */
}

private void onExitStoppedState() {
    /* nothing to do */
}

// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
    Log.d(TAG, "call onEnterStartedState");
    /* Connect camera */
    if (!connectCamera(getWidth(), getHeight())) {
        AlertDialog ad = new AlertDialog.Builder(getContext()).create();
        ad.setCancelable(false); // This blocks the 'BACK' button
        ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
        ad.setButton(DialogInterface.BUTTON_NEUTRAL,  "OK", new DialogInterface.OnClickListener() {
            public void onClick(DialogInterface dialog, int which) {
                dialog.dismiss();
                ((Activity) getContext()).finish();
            }
        });
        ad.show();

    }
}

private void onExitStartedState() {
    disconnectCamera();
    if (mCacheBitmap != null) {
        mCacheBitmap.recycle();
    }
}

/**
 * This method shall be called by the subclasses when they have valid
 * object and want it to be delivered to external client (via callback) and
 * then displayed on the screen.
 * @param frame - the current frame to be delivered
 */

protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
    Mat modified;

    if (mListener != null) {
        modified = mListener.onCameraFrame(frame);
    } else {
        modified = frame.rgba();
    }

    boolean bmpValid = true;
    if (modified != null) {
        try {
            Utils.matToBitmap(modified, mCacheBitmap);
        } catch(Exception e) {
            Log.e(TAG, "Mat type: " + modified);
            Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
            Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
            bmpValid = false;
        }
    }

    if (bmpValid && mCacheBitmap != null) {
        Canvas canvas;

        if (mRecorder != null) {
            canvas = mSurface.lockCanvas(null);

            canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
            Log.d(TAG, "mStretch value: " + mScale);

            if (mScale != 0) {
                canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                        new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
                                (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
                                (int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
                                (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
            } else {
                canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
                        new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
            }

            if (mFpsMeter != null) {
                mFpsMeter.measure();
                mFpsMeter.draw(canvas, 20, 30);
            }
            mSurface.unlockCanvasAndPost(canvas);
        }

    }
}

/**
 * This method is invoked shall perform concrete operation to initialize the camera.
 * CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
 * initialized with the size of the Camera frames that will be delivered to external processor.
 * @param width - the width of this SurfaceView
 * @param height - the height of this SurfaceView
 */
protected abstract boolean connectCamera(int width, int height);

/**
 * Disconnects and release the particular camera object being connected to this surface view.
 * Called when syncObject lock is held
 */
protected abstract void disconnectCamera();

// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
    mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}

public interface ListItemAccessor {
    public int getWidth(Object obj);
    public int getHeight(Object obj);
};

/**
 * This helper method can be called by subclasses to select camera preview size.
 * It goes over the list of the supported preview sizes and selects the maximum one which
 * fits both values set via setMaxFrameSize() and surface frame allocated for this view
 * @param supportedSizes
 * @param surfaceWidth
 * @param surfaceHeight
 * @return optimal frame size
 */
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
    int calcWidth = 0;
    int calcHeight = 0;

    int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
    int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;

    for (Object size : supportedSizes) {
        int width = accessor.getWidth(size);
        int height = accessor.getHeight(size);

        if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
            if (width >= calcWidth && height >= calcHeight) {
                calcWidth = (int) width;
                calcHeight = (int) height;
            }
        }
    }

    return new Size(calcWidth, calcHeight);
}
}

And I add permission in Manifest

<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
Community
  • 1
  • 1
  • I fix my problem. if use MediaRecorder.VideoSource.SURFACE, you cann't use the follow method: setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_1080P)) – Black student Jul 22 '16 at 14:45
  • How exactly did you come to this solution that setProfile is the problem? I'm also having the same problem –  Oct 12 '16 at 20:48
  • Can you please share the solution? – User Aug 24 '17 at 10:15

0 Answers0