OpenGL ES:旋转整个场景以匹配纵向模式

问题描述

注意是Android和OpenGL的新手

我正在基于ARToolKitX(Github:https://github.com/artoolkitx/artoolkitx/tree/8c6bd4e7be5e80c8439066b23473506aebbb496c/Source/ARXJ/ARXJProj/arxj/src/main/java/org/artoolkitx/arx/arxj)构建增强现实应用。

应用程序显示摄像机框架并在顶部显示带有opengl的对象。

我的问题: ARToolKitX强制应用处于横向模式:

setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

但是当我将屏幕方向更改为SCREEN_ORIENTATION_PORTRAIT时,相机图像和opengl对象不会旋转到正确的方向并保持在横向模式。

在ARRenderer中,我可以使用drawVideoSettings方法自行旋转摄像机图像,但这不适用于opengl对象。

ARToolKitX还在CameraSurface类内部提供了SurfaceChanged方法,并带有注释:“如果应用程序要处理方向变化,则可以在此处创建缩放矩阵以缩放然后旋转表面视图。

但是我不知道转换矩阵的外观以及如何应用它。

感谢您的帮助。

ARRenderer:

public abstract class ARRenderer implements GLSurfaceView.Renderer {

private MyShaderProgram shaderProgram;
private int width,height,cameraIndex;
private int[] viewport = new int[4];
private boolean firstRun = true;

private final static String TAG = ARRenderer.class.getName();


/**
 * Allows subclasses to load markers and prepare the scene. This is called after
 * initialisation is complete.
 */
public boolean configureARScene() {
    return true;
}

public void onSurfaceCreated(GL10 unused,EGLConfig config) {

    // Transparent background
    GLES20.glClearColor(0.0f,0.0f,0.f);
    this.shaderProgram = new MyShaderProgram(new MyVertexShader(),new MyFragmentShader());
    GLES20.glUseProgram(shaderProgram.getShaderProgramHandle());
}

public void onSurfaceChanged(GL10 unused,int w,int h) {
    this.width = w;
    this.height = h;
    if(ARController.getInstance().isRunning()) {
        //Update the frame settings for native rendering
        ARController.getInstance().drawVideoSettings(cameraIndex,w,h,false,ARX_jni.ARW_H_ALIGN_CENTRE,ARX_jni.ARW_V_ALIGN_CENTRE,ARX_jni.ARW_SCALE_MODE_FILL,viewport);
    }
}

public void onDrawFrame(GL10 unused) {
    if (ARController.getInstance().isRunning()) {
        // Initialize artoolkitX video background rendering.
        if (firstRun) {
            boolean isDisplayFrameInited = ARController.getInstance().drawVideoInit(cameraIndex);
            if (!isDisplayFrameInited) {
                Log.e(TAG,"Display Frame not inited");
            }

            if (!ARController.getInstance().drawVideoSettings(cameraIndex,this.width,this.height,viewport)) {
                Log.e(TAG,"Error during call of displayFrameSettings.");
            } else {
                Log.i(TAG,"Viewport {" + viewport[0] + "," + viewport[1] + "," + viewport[2] + "," + viewport[3] + "}.");
            }

            firstRun = false;
        }
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if (!ARController.getInstance().drawVideoSettings(cameraIndex)) {
            Log.e(TAG,"Error during call of displayFrame.");
        }
        draw();
    }
}

/**
 * Should be overridden in subclasses and used to perform rendering.
 */
public void draw() {
    GLES20.glViewport(viewport[0],viewport[1],viewport[2],viewport[3]);

    //TODO: Check how to refactor near and far plane
    shaderProgram.setProjectionMatrix(ARController.getInstance().getProjectionMatrix(10.0f,10000.0f));
    float[] camPosition = {1f,1f,1f};
    shaderProgram.render(camPosition);
}

@SuppressWarnings("unused")
public ShaderProgram getShaderProgram() {
    return shaderProgram;
}

public void setCameraIndex(int cameraIndex) {
    this.cameraIndex = cameraIndex;
}

}

CameraSurface

class CameraSurfaceImpl implements CameraSurface {

/**
 * Android logging tag for this class.
 */
private static final String TAG = CameraSurfaceImpl.class.getSimpleName();
private CameraDevice mCameraDevice;
private ImageReader mImageReader;
private Size mImageReaderVideoSize;
private final Context mAppContext;

private final CameraDevice.StateCallback mCamera2DeviceStateCallback = new CameraDevice.StateCallback() {
    @Override
    public void onOpened(@NonNull CameraDevice camera2DeviceInstance) {
        mCameraDevice = camera2DeviceInstance;
        startCaptureAndForwardFramesSession();
    }

    @Override
    public void onDisconnected(@NonNull CameraDevice camera2DeviceInstance) {
        camera2DeviceInstance.close();
        mCameraDevice = null;
    }

    @Override
    public void onError(@NonNull CameraDevice camera2DeviceInstance,int error) {
        camera2DeviceInstance.close();
        mCameraDevice = null;
    }
};

/**
 * Listener to inform of camera related events: start,frame,and stop.
 */
private final CameraEventListener mCameraEventListener;
/**
 * Tracks if SurfaceView instance was created.
 */
private boolean mImageReaderCreated;

public CameraSurfaceImpl(CameraEventListener cameraEventListener,Context appContext){
    this.mCameraEventListener = cameraEventListener;
    this.mAppContext = appContext;
}


private final ImageReader.OnImageAvailableListener mImageAvailableAndProcessHandler = new ImageReader.OnImageAvailableListener() {
    @Override
    public void onImageAvailable(ImageReader reader)
    {

        Image imageInstance = reader.acquireLatestImage();
        if (imageInstance == null) {
            //Note: This seems to happen quite often.
            Log.v(TAG,"onImageAvailable(): unable to acquire new image");
            return;
        }

        // Get a ByteBuffer for each plane.
        final Image.Plane[] imagePlanes = imageInstance.getPlanes();
        final int imagePlaneCount = Math.min(4,imagePlanes.length); // We can handle up to 4 planes max.
        final ByteBuffer[] imageBuffers = new ByteBuffer[imagePlaneCount];
        final int[] imageBufferPixelStrides = new int[imagePlaneCount];
        final int[] imageBufferRowStrides = new int[imagePlaneCount];
        for (int i = 0; i < imagePlaneCount; i++) {
            imageBuffers[i] = imagePlanes[i].getBuffer();
            // For ImageFormat.YUV_420_888 the order of planes in the array returned by Image.getPlanes()
            // is guaranteed such that plane #0 is always Y,plane #1 is always U (Cb),and plane #2 is always V (Cr).
            // The Y-plane is guaranteed not to be interleaved with the U/V planes (in particular,pixel stride is
            // always 1 in yPlane.getPixelStride()). The U/V planes are guaranteed to have the same row stride and
            // pixel stride (in particular,uPlane.getRowStride() == vPlane.getRowStride() and uPlane.getPixelStride() == vPlane.getPixelStride(); ).
            imageBufferPixelStrides[i] = imagePlanes[i].getPixelStride();
            imageBufferRowStrides[i] = imagePlanes[i].getRowStride();
        }

        if (mCameraEventListener != null) {
            mCameraEventListener.cameraStreamFrame(imageBuffers,imageBufferPixelStrides,imageBufferRowStrides);
        }

        imageInstance.close();
    }
};

@Override
public void surfaceCreated() {
    Log.i(TAG,"surfaceCreated(): called");

    SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(mAppContext);
    int defaultCameraIndexId = mAppContext.getResources().getIdentifier("pref_defaultValue_cameraIndex","string",mAppContext.getPackageName());
    mCamera2DeviceID = Integer.parseInt(prefs.getString("pref_cameraIndex",mAppContext.getResources().getString(defaultCameraIndexId)));
    Log.i(TAG,"surfaceCreated(): will attempt to open camera \"" + mCamera2DeviceID +
            "\",set orientation,set preview surface");

    /*
    Set the resolution from the settings as size for the glView. Because the video stream capture
    is requested based on this size.

    WARNING: While coding the preferences are taken from the res/xml/preferences.xml!!!
    When building for Unity the actual used preferences are taken from the UnityARPlayer project!!!
    */
    int defaultCameraValueId = mAppContext.getResources().getIdentifier("pref_defaultValue_cameraResolution",mAppContext.getPackageName());
    String camResolution = prefs.getString("pref_cameraResolution",mAppContext.getResources().getString(defaultCameraValueId));
    String[] dims = camResolution.split("x",2);
    mImageReaderVideoSize =  new Size(Integer.parseInt(dims[0]),Integer.parseInt(dims[1]));

    // Note that maxImages should be at least 2 for acquireLatestImage() to be any different than acquireNextImage() -
    // discarding all-but-the-newest Image requires temporarily acquiring two Images at once. Or more generally,// calling acquireLatestImage() with less than two images of margin,that is (maxImages - currentAcquiredImages < 2)
    // will not discard as expected.
    mImageReader = ImageReader.newInstance(mImageReaderVideoSize.getWidth(),mImageReaderVideoSize.getHeight(),ImageFormat.YUV_420_888,/* The maximum number of images the user will want to access simultaneously:*/ 2 );
    mImageReader.setOnImageAvailableListener(mImageAvailableAndProcessHandler,null);

    mImageReaderCreated = true;

} // end: public void surfaceCreated(SurfaceHolder holder)

/* Interface implemented by this SurfaceView subclass
   holder: SurfaceHolder instance associated with SurfaceView instance that changed
   format: pixel format of the surface
   width: of the SurfaceView instance
   height: of the SurfaceView instance
*/
@Override
public void surfaceChanged() {
    Log.i(TAG,"surfaceChanged(): called");

    // This is where to calculate the optimal size of the display and set the aspect ratio
    // of the surface view (probably the service holder). Also where to Create transformation
    // matrix to scale and then rotate surface view,if the app is going to handle orientation
    // changes.
    if (!mImageReaderCreated) {
        surfaceCreated();
    }
    if (!isCamera2DeviceOpen()) {
        openCamera2(mCamera2DeviceID);
    }
    if (isCamera2DeviceOpen() && (null == mYUV_CaptureAndSendSession)) {
        startCaptureAndForwardFramesSession();
    }


}

private void openCamera2(int camera2DeviceID) {
    Log.i(TAG,"openCamera2(): called");
    CameraManager camera2DeviceMgr = (CameraManager)mAppContext.getSystemService(Context.CAMERA_SERVICE);

    try {
        if (PackageManager.PERMISSION_GRANTED == ContextCompat.checkSelfPermission(mAppContext,Manifest.permission.CAMERA)) {
            camera2DeviceMgr.openCamera(Integer.toString(camera2DeviceID),mCamera2DeviceStateCallback,null);
            return;
        }
    } catch (CameraAccessException ex) {
        Log.e(TAG,"openCamera2(): CameraAccessException caught," + ex.getMessage());
    } catch (Exception ex) {
        Log.e(TAG,"openCamera2(): exception caught," + ex.getMessage());
    }
    if (null == camera2DeviceMgr) {
        Log.e(TAG,"openCamera2(): Camera2 DeviceMgr not set");
    }
    Log.e(TAG,"openCamera2(): abnormal exit");
}

private int mCamera2DeviceID = -1;
private CaptureRequest.Builder mCaptureRequestBuilder;
private CameraCaptureSession mYUV_CaptureAndSendSession;

private void startCaptureAndForwardFramesSession() {

    if ((null == mCameraDevice) || (!mImageReaderCreated) /*|| (null == mPreviewSize)*/) {
        return;
    }

    closeYUV_CaptureAndForwardSession();

    try {
        mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        List<Surface> surfaces = new ArrayList<>();

        Surface surfaceInstance;
        surfaceInstance = mImageReader.getSurface();
        surfaces.add(surfaceInstance);
        mCaptureRequestBuilder.addTarget(surfaceInstance);

        mCameraDevice.createCaptureSession(
                surfaces,// Output surfaces
                new CameraCaptureSession.StateCallback() {
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession session) {
                        try {
                            if (mCameraEventListener != null) {
                                mCameraEventListener.cameraStreamStarted(mImageReaderVideoSize.getWidth(),"YUV_420_888",mCamera2DeviceID,false);
                            }
                            mYUV_CaptureAndSendSession = session;
                            // Session to repeat request to update passed in camSensorSurface
                            mYUV_CaptureAndSendSession.setRepeatingRequest(mCaptureRequestBuilder.build(),/* CameraCaptureSession.CaptureCallback cameraEventListener: */null,/* Background thread: */ null);
                        } catch (CameraAccessException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                        Toast.makeText(mAppContext,"Unable to setup camera sensor capture session",Toast.LENGTH_SHORT).show();
                    }
                },// Callback for capture session state updates
                null); // Secondary thread message queue
    } catch (CameraAccessException ex) {
        ex.printStackTrace();
    }
}

@Override
public void closeCameraDevice() {
    closeYUV_CaptureAndForwardSession();
    if (null != mCameraDevice) {
        mCameraDevice.close();
        mCameraDevice = null;
    }
    if (null != mImageReader) {
        mImageReader.close();
        mImageReader = null;
    }
    if (mCameraEventListener != null) {
        mCameraEventListener.cameraStreamStopped();
    }
    mImageReaderCreated = false;
}

private void closeYUV_CaptureAndForwardSession() {
    if (mYUV_CaptureAndSendSession != null) {
        mYUV_CaptureAndSendSession.close();
        mYUV_CaptureAndSendSession = null;
    }
}

/**
 * Indicates whether or not camera2 device instance is available,opened,enabled.
 */
@Override
public boolean isCamera2DeviceOpen() {
    return (null != mCameraDevice);
}

@Override
public boolean isImageReaderCreated() {
    return mImageReaderCreated;
}

}

编辑:

/**
 * Override the draw function from ARRenderer.
 */
@Override
public void draw() {
    super.draw();
    fpsCounter.frame();
    if(maxfps<fpsCounter.getFPS()){
        maxfps= fpsCounter.getFPS();
    }
    logger.log(Level.INFO,"FPS: " + maxfps);

    // Initialize GL
    GLES20.glEnable(GLES20.GL_CULL_FACE);
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);
    GLES20.glFrontFace(GLES20.GL_CCW);
    
    // Look for trackables,and draw on each found one.
    for (int trackableUID : trackables.keySet()) {
        // If the trackable is visible,apply its transformation,and render the object
        float[] modelViewMatrix = new float[16];
        if (ARController.getInstance().queryTrackableVisibilityAndTransformation(trackableUID,modelViewMatrix)) {
            float[] projectionMatrix = ARController.getInstance().getProjectionMatrix(10.0f,10000.0f);
            trackables.get(trackableUID).draw(projectionMatrix,modelViewMatrix);
        }
    }
}

解决方法

暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!

如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。

小编邮箱:dio#foxmail.com (将#修改为@)

相关问答

依赖报错 idea导入项目后依赖报错,解决方案:https://blog....
错误1:代码生成器依赖和mybatis依赖冲突 启动项目时报错如下...
错误1:gradle项目控制台输出为乱码 # 解决方案:https://bl...
错误还原:在查询的过程中,传入的workType为0时,该条件不起...
报错如下,gcc版本太低 ^ server.c:5346:31: 错误:‘struct...