相机正在工作但我无法看到它

时间:2017-09-26 22:31:40

标签: java android camera augmented-reality vuforia

在基于Vuforia的应用程序中,我想要一些基于Vuforia的演示。所以我停止一个Vuforia(ARactivity)并使用intent开始新活动(Md2activity.class),如下所示:

    arActivity.stopAR();

    Intent intent = new Intent(arActivity, Md2activity.class);

    arActivity.startActivity(intent);

和.stopAR方法:

    public void stopAR(){
    try {
        vuforiaAppSession.stopAR();



    } catch (SampleApplicationException e) {
        e.printStackTrace();
    }
    vuforiaAppSession.stopCamera();
    mTextures.clear();
    mTextures = null;

}

我可以在康纳尔看到它工作正常。相机可识别目标等。但我在屏幕上没有相机视图。 Just ProgressBar。

编辑:

公共类 Md2activity extends Activity实现SampleApplicationControl,         SampleAppMenuInterface {     private static final String LOGTAG =" Md2activity&#34 ;;

SampleApplicationSession vuforiaAppSession;

private DataSet mCurrentDataset;
private int mCurrentDatasetSelectionIndex = 0;
private int mStartDatasetsIndex = 0;
private int mDatasetsNumber = 0;
private ArrayList<String> mDatasetStrings = new ArrayList<String>();

private Activity mActivity;
private SampleApplicationControl mSessionControl;


// Our OpenGL view:
private SampleApplicationGLView mGlView;

// Our renderer:
private ImageTargetRendererMd2 mRenderer;

private GestureDetector mGestureDetector;

// The textures we will use for rendering:
private Vector<Texture> mTextures;

private boolean mSwitchDatasetAsap = false;
private boolean mFlash = false;
private boolean mContAutofocus = false;
private boolean mExtendedTracking = false;


private View mFlashOptionView;

private RelativeLayout mUILayout;

private SampleAppMenu mSampleAppMenu;

LoadingDialogHandler loadingDialogHandler = new LoadingDialogHandler(this);

// Alert Dialog used to display SDK errors
private AlertDialog mErrorDialog;

private boolean mIsDroidDevice = false;



// Called when the activity first starts or the user navigates back to an
// activity.
@Override
protected void onCreate(Bundle savedInstanceState) {
    Log.d(LOGTAG, "onCreate");
    super.onCreate(savedInstanceState);
    //setContentView(R.layout.camera_md2);
    vuforiaAppSession = new SampleApplicationSession(this);


    startLoadingAnimation();
    mDatasetStrings.add("StonesAndChips.xml");
    mDatasetStrings.add("Tarmac.xml");


    vuforiaAppSession
            .initAR(this, ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);


    mGestureDetector = new GestureDetector(this, new dr.ar.browser.Md2activity.GestureListener());

    // Load any sample specific textures:
    mTextures = new Vector<Texture>();

    mIsDroidDevice = android.os.Build.MODEL.toLowerCase().startsWith(
            "droid");

}

// Process Single Tap event to trigger autofocus
private class GestureListener extends
        GestureDetector.SimpleOnGestureListener
{
    // Used to set autofocus one second after a manual focus is triggered
    private final Handler autofocusHandler = new Handler();


    @Override
    public boolean onDown(MotionEvent e)
    {
        return true;
    }


    @Override
    public boolean onSingleTapUp(MotionEvent e)
    {
        // Generates a Handler to trigger autofocus
        // after 1 second
        autofocusHandler.postDelayed(new Runnable()
        {
            public void run()
            {
                boolean result = CameraDevice.getInstance().setFocusMode(
                        CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO);

                if (!result)
                    Log.e("SingleTapUp", "Unable to trigger focus");
            }
        }, 1000L);

        return true;
    }
}

// Called when the activity will start interacting with the user.
@Override
protected void onResume()
{
    Log.d(LOGTAG, "onResume");
    super.onResume();

    // This is needed for some Droid devices to force portrait
    if (mIsDroidDevice)
    {
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    }

    try
    {
        vuforiaAppSession.resumeAR();
    } catch (SampleApplicationException e)
    {
        Log.e(LOGTAG, e.getString());
    }

    // Resume the GL view:
    if (mGlView != null)
    {
        mGlView.setVisibility(View.VISIBLE);
        mGlView.onResume();
    }

}


// Callback for configuration changes the activity handles itself
@Override
public void onConfigurationChanged(Configuration config)
{
    Log.d(LOGTAG, "onConfigurationChanged");
    super.onConfigurationChanged(config);

    vuforiaAppSession.onConfigurationChanged();
}


// Called when the system is about to start resuming a previous activity.
@Override
protected void onPause()
{
    Log.d(LOGTAG, "onPause");
    super.onPause();

    if (mGlView != null)
    {
        mGlView.setVisibility(View.INVISIBLE);
        mGlView.onPause();
    }

    // Turn off the flash
    if (mFlashOptionView != null && mFlash)
    {
        // OnCheckedChangeListener is called upon changing the checked state
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1)
        {
            ((Switch) mFlashOptionView).setChecked(false);
        } else
        {
            ((CheckBox) mFlashOptionView).setChecked(false);
        }
    }

    try
    {
        vuforiaAppSession.pauseAR();
    } catch (SampleApplicationException e)
    {
        Log.e(LOGTAG, e.getString());
    }
}


// The final call you receive before your activity is destroyed.
@Override
protected void onDestroy()
{
    Log.d(LOGTAG, "onDestroy");
    super.onDestroy();

    try
    {
        vuforiaAppSession.stopAR();
    } catch (SampleApplicationException e)
    {
        Log.e(LOGTAG, e.getString());
    }

    // Unload texture:
    mTextures.clear();
    mTextures = null;

    System.gc();
}

// Initializes AR application components.
private void initApplicationAR()
{
    // Create OpenGL ES view:
    int depthSize = 16;
    int stencilSize = 0;
    boolean translucent = Vuforia.requiresAlpha();

    mGlView = new SampleApplicationGLView(this);
    mGlView.init(translucent, depthSize, stencilSize);

    mRenderer = new ImageTargetRendererMd2(this, vuforiaAppSession);
    mGlView.setRenderer(mRenderer);

}


private void startLoadingAnimation()
{
    mUILayout = (RelativeLayout) View.inflate(this, R.layout.camera_overlay,
            null);

    mUILayout.setVisibility(View.VISIBLE);
    mUILayout.setBackgroundColor(Color.BLACK);

    // Gets a reference to the loading dialog
    loadingDialogHandler.mLoadingDialogContainer = mUILayout
            .findViewById(R.id.loading_indicator);

    // Shows the loading indicator at start
    loadingDialogHandler
            .sendEmptyMessage(LoadingDialogHandler.SHOW_LOADING_DIALOG);

    // Adds the inflated layout to the view
    addContentView(mUILayout, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
            ViewGroup.LayoutParams.MATCH_PARENT));

}


// Methods to load and destroy tracking data.
@Override
public boolean doLoadTrackersData()
{
    TrackerManager tManager = TrackerManager.getInstance();
    ObjectTracker objectTracker = (ObjectTracker) tManager
            .getTracker(ObjectTracker.getClassType());
    if (objectTracker == null)
        return false;

    if (mCurrentDataset == null)
        mCurrentDataset = objectTracker.createDataSet();

    if (mCurrentDataset == null)
        return false;

    if (!mCurrentDataset.load(
            mDatasetStrings.get(mCurrentDatasetSelectionIndex),
            STORAGE_TYPE.STORAGE_APPRESOURCE))
        return false;

    if (!objectTracker.activateDataSet(mCurrentDataset))
        return false;

    int numTrackables = mCurrentDataset.getNumTrackables();
    for (int count = 0; count < numTrackables; count++)
    {
        Trackable trackable = mCurrentDataset.getTrackable(count);
        if(isExtendedTrackingActive())
        {
            trackable.startExtendedTracking();
        }

        String name = "Current Dataset : " + trackable.getName();
        trackable.setUserData(name);
        Log.d(LOGTAG, "UserData:Set the following user data "
                + (String) trackable.getUserData());
    }

    return true;
}


@Override
public boolean doUnloadTrackersData()
{
    // Indicate if the trackers were unloaded correctly
    boolean result = true;

    TrackerManager tManager = TrackerManager.getInstance();
    ObjectTracker objectTracker = (ObjectTracker) tManager
            .getTracker(ObjectTracker.getClassType());
    if (objectTracker == null)
        return false;

    if (mCurrentDataset != null && mCurrentDataset.isActive())
    {
        if (objectTracker.getActiveDataSet(0).equals(mCurrentDataset)
                && !objectTracker.deactivateDataSet(mCurrentDataset))
        {
            result = false;
        } else if (!objectTracker.destroyDataSet(mCurrentDataset))
        {
            result = false;
        }

        mCurrentDataset = null;
    }

    return result;
}


@Override
public void onInitARDone(SampleApplicationException exception)
{

    if (exception == null)
    {
        initApplicationAR();

        mRenderer.mIsActive = true;

        // Now add the GL surface view. It is important
        // that the OpenGL ES surface view gets added
        // BEFORE the camera is started and video
        // background is configured.
        addContentView(mGlView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
                ViewGroup.LayoutParams.MATCH_PARENT));

        // Sets the UILayout to be drawn in front of the camera
        mUILayout.bringToFront();

        // Sets the layout background to transparent
        mUILayout.setBackgroundColor(Color.TRANSPARENT);

        try
        {
            vuforiaAppSession.startAR(CameraDevice.CAMERA_DIRECTION.CAMERA_DIRECTION_DEFAULT);
        } catch (SampleApplicationException e)
        {
            Log.e(LOGTAG, e.getString());
        }

        boolean result = CameraDevice.getInstance().setFocusMode(
                CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);

        if (result)
            mContAutofocus = true;
        else
            Log.e(LOGTAG, "Unable to enable continuous autofocus");

        mSampleAppMenu = new SampleAppMenu(this, this, "Image Targets",
                mGlView, mUILayout, null);
        setSampleAppMenuSettings();

    } else
    {
        Log.e(LOGTAG, exception.getString());
        showInitializationErrorMessage(exception.getString());
    }
}


// Shows initialization error messages as System dialogs
private void showInitializationErrorMessage(String message)
{
    final String errorMessage = message;
    runOnUiThread(new Runnable()
    {
        public void run()
        {
            if (mErrorDialog != null)
            {
                mErrorDialog.dismiss();
            }


        }
    });
}


@Override
public void onVuforiaUpdate(State state)
{

    if (mSwitchDatasetAsap)
    {
        Log.e("onVuforiaUpdate","成功");
        mSwitchDatasetAsap = false;
        TrackerManager tm = TrackerManager.getInstance();
        ObjectTracker ot = (ObjectTracker) tm.getTracker(ObjectTracker
                .getClassType());
        if (ot == null || mCurrentDataset == null
                || ot.getActiveDataSet(0) == null)
        {
            Log.d(LOGTAG, "Failed to swap datasets");
            return;
        }

        doUnloadTrackersData();
        doLoadTrackersData();
    }
}


@Override
public boolean doInitTrackers()
{
    // Indicate if the trackers were initialized correctly
    boolean result = true;

    TrackerManager tManager = TrackerManager.getInstance();
    Tracker tracker;

    // Trying to initialize the image tracker
    tracker = tManager.initTracker(ObjectTracker.getClassType());
    if (tracker == null)
    {
        Log.e(
                LOGTAG,
                "Tracker not initialized. Tracker already initialized or the camera is already started");
        result = false;
    } else
    {
        Log.i(LOGTAG, "Tracker successfully initialized");
    }
    return result;
}


@Override
public boolean doStartTrackers()
{
    // Indicate if the trackers were started correctly
    boolean result = true;

    Tracker objectTracker = TrackerManager.getInstance().getTracker(
            ObjectTracker.getClassType());
    if (objectTracker != null)
        objectTracker.start();

    return result;
}


@Override
public boolean doStopTrackers()
{
    // Indicate if the trackers were stopped correctly
    boolean result = true;

    Tracker objectTracker = TrackerManager.getInstance().getTracker(
            ObjectTracker.getClassType());
    if (objectTracker != null)
        objectTracker.stop();

    return result;
}


@Override
public boolean doDeinitTrackers()
{
    // Indicate if the trackers were deinitialized correctly
    boolean result = true;

    TrackerManager tManager = TrackerManager.getInstance();
    tManager.deinitTracker(ObjectTracker.getClassType());

    return result;
}


@Override
public boolean onTouchEvent(MotionEvent event)
{
    // Process the Gestures
    if (mSampleAppMenu != null && mSampleAppMenu.processEvent(event))
        return true;

    return mGestureDetector.onTouchEvent(event);
}


private boolean isExtendedTrackingActive() {
    return mExtendedTracking;
}

final public static int CMD_BACK = -1;
final public static int CMD_EXTENDED_TRACKING = 1;
final public static int CMD_AUTOFOCUS = 2;
final public static int CMD_FLASH = 3;
final public static int CMD_CAMERA_FRONT = 4;
final public static int CMD_CAMERA_REAR = 5;
final public static int CMD_DATASET_START_INDEX = 6;


// This method sets the menu's settings
private void setSampleAppMenuSettings()
{
    SampleAppMenuGroup group;

    group = mSampleAppMenu.addGroup("", false);
    group.addTextItem(getString(R.string.menu_back), -1);

    group = mSampleAppMenu.addGroup("", true);
    group.addSelectionItem(getString(R.string.menu_extended_tracking),
            CMD_EXTENDED_TRACKING, false);
    group.addSelectionItem(getString(R.string.menu_contAutofocus),
            CMD_AUTOFOCUS, mContAutofocus);
    mFlashOptionView = group.addSelectionItem(
            getString(R.string.menu_flash), CMD_FLASH, false);

    Camera.CameraInfo ci = new Camera.CameraInfo();
    boolean deviceHasFrontCamera = false;
    boolean deviceHasBackCamera = false;
    for (int i = 0; i < Camera.getNumberOfCameras(); i++)
    {
        Camera.getCameraInfo(i, ci);
        if (ci.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
            deviceHasFrontCamera = true;
        else if (ci.facing == Camera.CameraInfo.CAMERA_FACING_BACK)
            deviceHasBackCamera = true;
    }

    if (deviceHasBackCamera && deviceHasFrontCamera)
    {
        group = mSampleAppMenu.addGroup(getString(R.string.menu_camera),
                true);
        group.addRadioItem(getString(R.string.menu_camera_front),
                CMD_CAMERA_FRONT, false);
        group.addRadioItem(getString(R.string.menu_camera_back),
                CMD_CAMERA_REAR, true);
    }

    group = mSampleAppMenu
            .addGroup(getString(R.string.menu_datasets), true);
    mStartDatasetsIndex = CMD_DATASET_START_INDEX;
    mDatasetsNumber = mDatasetStrings.size();

    group.addRadioItem("Stones & Chips", mStartDatasetsIndex, true);
    group.addRadioItem("Tarmac", mStartDatasetsIndex + 1, false);

    mSampleAppMenu.attachMenu();
}


@Override
public boolean menuProcess(int command)
{

    boolean result = true;

    switch (command)
    {
        case CMD_BACK:
            finish();
            break;

        case CMD_FLASH:
            result = CameraDevice.getInstance().setFlashTorchMode(!mFlash);

            if (result)
            {
                mFlash = !mFlash;
            } else
            {
                showToast(getString(mFlash ? R.string.menu_flash_error_off
                        : R.string.menu_flash_error_on));
                Log.e(LOGTAG,
                        getString(mFlash ? R.string.menu_flash_error_off
                                : R.string.menu_flash_error_on));
            }
            break;

        case CMD_AUTOFOCUS:

            if (mContAutofocus)
            {
                result = CameraDevice.getInstance().setFocusMode(
                        CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL);

                if (result)
                {
                    mContAutofocus = false;
                } else
                {
                    showToast(getString(R.string.menu_contAutofocus_error_off));
                    Log.e(LOGTAG,
                            getString(R.string.menu_contAutofocus_error_off));
                }
            } else
            {
                result = CameraDevice.getInstance().setFocusMode(
                        CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO);

                if (result)
                {
                    mContAutofocus = true;
                } else
                {
                    showToast(getString(R.string.menu_contAutofocus_error_on));
                    Log.e(LOGTAG,
                            getString(R.string.menu_contAutofocus_error_on));
                }
            }

            break;

        case CMD_CAMERA_FRONT:
        case CMD_CAMERA_REAR:

            // Turn off the flash
            if (mFlashOptionView != null && mFlash)
            {
                // OnCheckedChangeListener is called upon changing the checked state
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1)
                {
                    ((Switch) mFlashOptionView).setChecked(false);
                } else
                {
                    ((CheckBox) mFlashOptionView).setChecked(false);
                }
            }

            vuforiaAppSession.stopCamera();

            try
            {
                vuforiaAppSession
                        .startAR(command == CMD_CAMERA_FRONT ? CameraDevice.CAMERA_DIRECTION.CAMERA_DIRECTION_FRONT
                                : CameraDevice.CAMERA_DIRECTION.CAMERA_DIRECTION_BACK);
            } catch (SampleApplicationException e)
            {
                showToast(e.getString());
                Log.e(LOGTAG, e.getString());
                result = false;
            }
            doStartTrackers();
            break;

        case CMD_EXTENDED_TRACKING:
            for (int tIdx = 0; tIdx < mCurrentDataset.getNumTrackables(); tIdx++)
            {
                Trackable trackable = mCurrentDataset.getTrackable(tIdx);

                if (!mExtendedTracking)
                {
                    if (!trackable.startExtendedTracking())
                    {
                        Log.e(LOGTAG,
                                "Failed to start extended tracking target");
                        result = false;
                    } else
                    {
                        Log.d(LOGTAG,
                                "Successfully started extended tracking target");
                    }
                } else
                {
                    if (!trackable.stopExtendedTracking())
                    {
                        Log.e(LOGTAG,
                                "Failed to stop extended tracking target");
                        result = false;
                    } else
                    {
                        Log.d(LOGTAG,
                                "Successfully started extended tracking target");
                    }
                }
            }

            if (result)
                mExtendedTracking = !mExtendedTracking;

            break;

        default:
            if (command >= mStartDatasetsIndex
                    && command < mStartDatasetsIndex + mDatasetsNumber)
            {
                mSwitchDatasetAsap = true;
                mCurrentDatasetSelectionIndex = command
                        - mStartDatasetsIndex;
            }
            break;
    }

    return result;
}


private void showToast(String text)
{
    Toast.makeText(this, text, Toast.LENGTH_SHORT).show();
}

}

0 个答案:

没有答案
相关问题