目前,我尝试让stettbacher O-3000相机与我的Java客户端一起运行。 为此,我使用JNI来调用驱动程序函数。一切都按预期工作。 这是我的代码的相关部分:
(Java)的
...
static {
System.loadLibrary("o3000jni");
}
...
private Camo3000JNI jni;
private Model model;
...
Model model = Model();
Camo3000JNI jni = new Camo3000JNI(model, 0));
jni.init();
...
jni.start();
...
(JNI课程取自官方演示(稍加修改))
public class Camo3000JNI implements Runnable {
private boolean debug = true;
private CameraDevsInterface camIface; //Model is implementing this
private boolean keepRunning = true;
private final static int E_SUCCESS = 0, E_NOCALLBACK = 1, E_NOMEM = 2, E_NODEV = 3, E_OTHER =4;
private int w = 1280; private int h = 960;
private int[] video_data_array = new int[w*h];
private Thread thd_cam;
private int session = -1;
private int cameraNumber;
/**
* Constructor
*/
public Camo3000JNI(CameraDevsInterface camIface, int cameraNumber) {
this.camIface = camIface;
thd_cam = new Thread(this);
this.cameraNumber = cameraNumber;
}
/**
* Start blocking thread
*/
public void startThread() {
thd_cam.start();
}
public int init(){
System.out.println("Initializing session for camera: " + cameraNumber);
session = initCamDriver(0x0483,0xA098, video_data_array);
if(0 >= session)
throw new RuntimeException("Session for camera " + cameraNumber + " could not be initialized!");
camIface.setOnOffStatus(E_SUCCESS);
// warning: camera not connected, just initialized, kernel will queue any pending XML messages from host to device
return session;
}
// callbacks:
/**
* Handling receiving video data
*/
public void video_handler(int width, int height, int format){
camIface.streamVid(video_data_array, width, height, format);
}
/**
* Handling receiving xml messages
*/
public void xml_handler(String buf, int len){
if (debug){System.out.println("XXXXXXX IN: " + buf);}
camIface.setXMLresp(buf);
}
/**
* Handling receiving log messages
*/
public void log_handler(String log){
log = log.replace("\n", "").replace("\r", "");
camIface.setLogresp(log);
}
// callbacks: end
// native calls
public native int sendXMLcmd(int session, String xmlCmd);
public native int initCamDriver(int vid, int pid, int[] video_data_array);
public native void stopCamDriver(int session);
public native int connectCamDriver(int session, int cameraNumber);
public native int availableCameraCount(int session);
// native calls: end
public int getNumberOfCams(){
return availableCameraCount(session);
}
public int sendXMLcmd(String xml){
return sendXMLcmd(session, xml);
}
/**
* Runnable method of this thread
*/
public void run() {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
while (keepRunning) {
if (session != E_SUCCESS){
try {
Thread.sleep(100);
} catch (InterruptedException e) {
System.out.println("Sleep not successful in jni run: " + e.getMessage());
}
continue;
}
if (connectCamDriver(session, cameraNumber) != E_SUCCESS){
System.out.println("Not connected!!!");
continue;
}
}
}
/**
* Stop thread is never called out of camera context
*/
public synchronized void stopo3000Cam(){
keepRunning = false;
stopCamDriver(session);
}
}
(模型正在实施的界面)
/**
* Interface for Camera Implementation
*/
public interface CameraDevsInterface {
public void streamVid(int[] video_data, int width, int height, int format);
public void setXMLresp(String xml_response);
public void setLogresp(String log_response);
public void setOnOffStatus(int ret);
}
现在我有一个拍摄快照的方法:
File file = new File("snapshot.png");
jni.sendXMLcmd("<camera><start></start></camera>");
if(model.getOnOffStatus() == 0) {
System.out.println("Cam connected!");
} else {
System.out.println("Not connected!");
}
jni.sendXMLcmd("<camera><stream></stream></camera>");
jni.sendXMLcmd("<camera><snapshot></snapshot></camera>");
jni.video_handler(1280, 960, 12);
try {
Thread.sleep(200);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
ImageIO.write(toBufferedImage(model.getImage()), "png", file);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
jni.sendXMLcmd("<camera><stop></stop></camera>");
以上所有内容都有效,但前提是我在自己的流程中运行它。即当我尝试启动多个线程(jni-objects)时,我可以与驱动程序通信(我得到正确数量的摄像头)但是如果我尝试拍摄快照,我只会得到一个黑色图像。 我的想法是回调有一些东西,我必须实现多线程的另一种方式(现在我对JNI思想的经验很少)。
也许有人可以帮我理解问题是什么或者我如何重构代码。
[来源&amp;附加信息]
driver *在Windows上安装官方消息来源提供的.exe后,可获得所有资源。
(o3000_jni.h)
/**
* Java Demo Software for the o3000-series: SSP_StreamingCamera
*
* Copyright (C) 2013 by Stettbacher Signal Processing
...cut
**/
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class o3000_Camo3000JNI */
#ifndef _Included_o3000_Camo3000JNI
#define _Included_o3000_Camo3000JNI
#ifdef __cplusplus
extern "C" {
#endif
#undef o3000_Camo3000JNI_E_SUCCESS
#define o3000_Camo3000JNI_E_SUCCESS 0L
#undef o3000_Camo3000JNI_E_NOCALLBACK
#define o3000_Camo3000JNI_E_NOCALLBACK 1L
#undef o3000_Camo3000JNI_E_NOMEM
#define o3000_Camo3000JNI_E_NOMEM 2L
#undef o3000_Camo3000JNI_E_NODEV
#define o3000_Camo3000JNI_E_NODEV 3L
#undef o3000_Camo3000JNI_E_OTHER
#define o3000_Camo3000JNI_E_OTHER 4L
/*
* Class: o3000_Camo3000JNI
* Method: sendXMLcmd
* Signature: (ILjava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_o3000_Camo3000JNI_sendXMLcmd
(JNIEnv *, jobject, jint, jstring);
/*
* Class: o3000_Camo3000JNI
* Method: initCamDriver
* Signature: (II[I)I
*/
JNIEXPORT jint JNICALL Java_o3000_Camo3000JNI_initCamDriver
(JNIEnv *, jobject, jint, jint, jintArray);
/*
* Class: o3000_Camo3000JNI
* Method: stopCamDriver
* Signature: (I)V
*/
JNIEXPORT void JNICALL Java_o3000_Camo3000JNI_stopCamDriver
(JNIEnv *, jobject, jint);
/*
* Class: o3000_Camo3000JNI
* Method: connectCamDriver
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_o3000_Camo3000JNI_connectCamDriver
(JNIEnv *, jobject, jint, jint);
/*
* Class: o3000_Camo3000JNI
* Method: availableCameraCount
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_o3000_Camo3000JNI_availableCameraCount
(JNIEnv *, jobject, jint);
#ifdef __cplusplus
}
#endif
#endif
(o3000_jni.c)
/*
...cut
*/
#include <jni.h>
#include <stdio.h>
#include <string.h>
#include <o3000/o3000.h>
#include <o3000/color_pipe.h>
#include <o3000/o3000_jni.h>
#define FALSE 0
#define TRUE 1
#define IMAGE_FRAME_WIDTH 1280 // maximum image width in pixels
#define IMAGE_FRAME_HEIGHT 960 // maximum image height in pixels
// ----------------------------------------------------------------------------
// Global references for callbacks
// ----------------------------------------------------------------------------
jmethodID video_handler_id;
jmethodID xml_handler_id;
jmethodID log_handler_id;
JNIEnv *env_cached;
jobject obj_cached;
jobject viddata_chached;
// ----------------------------------------------------------------------------
// end: Global references for callbacks
// ----------------------------------------------------------------------------
// ----------------------------------------------------------------------------
// prototypes (not in *.h file because *.h file is automatically generated
// ----------------------------------------------------------------------------
// ----------------------------------------------------------------------------
// end prototypes
// ----------------------------------------------------------------------------
static int flag_connection = FALSE;
static char xml_msg_default[] = {"<camera><stop></stop></camera>"};
static struct color_pipe_t *color_pipe;
/**
* \brief XML handler
*
* Passes a string (xml answer: callback) to to java application
*
* \param *buf Pointer to string
* \param len String's length
*/
static __stdcall void xml_handling(int id, char* buf, int len) {
jstring jstr;
/* Create a new string and overwrite the instance field */
jstr = (*env_cached)->NewStringUTF(env_cached, buf);
if (jstr == NULL) {
return; /* out of memory */
}
(*env_cached)->CallVoidMethod(env_cached,obj_cached,xml_handler_id, jstr, len); // calls Java method "xml_handler"
}
/**
* Wrapper function to copy pipelinde image to java memory space.
* The pixel alignment of the input image (pipelined image) depends on the bit-depth and
* color/mono type. The input image is copied to the a jint-array located at JVM memory space.
*
* @param img_out Pointer to java memory space. Pipelined image is saved here.
* @param img_in Pointer to pipelined image.
* @param is_color not 0 if color, 0 if monochrome
* @param height image height in number of pixels
* @param width image width in number of pixels
* @param bit_channel bits per color channel
*/
static void pipe2jint(jint *img_out, void *__restrict__ img_in, const int is_color, const int height, const int width, const int bit_channel) {
int index, y, x, pixel, red, green, blue;
uint8_t *img8;
uint16_t *img16;
if(is_color) {
if(bit_channel == 8) {
/*
* Color image, 8 bit-per-pixel
*/
index = 0;
img8 = (uint8_t *)img_in;
for(y = 0; y < height; y++) {
for(x = 0; x < width; x++) {
red = *img8;
img8++;
green = *img8;
img8++;
blue = *img8;
img8++;
img_out[index] = (red << 16) | (green << 8) | blue;
index++;
}
}
}
else if(bit_channel == 12) {
/*
* Color image, 12 bit-per-pixel
*/
index = 0;
img16 = (uint16_t *)img_in;
for(y = 0; y < height; y++) {
for(x = 0; x < width; x++) {
red = (*img16 >> 4);
img16++;
green = (*img16 >> 4);
img16++;
blue = (*img16 >> 4);
img16++;
img_out[index] = (red << 16) | (green << 8) | blue;
index++;
}
}
}
else {
printf("%s: Can't display image with %d bits per color channel. Do implement it here!\n", __func__, bit_channel);
return;
}
}
else {
if(bit_channel == 8) {
/*
* Monochrome image, 8 bit-per-pixel
*/
index = 0;
img8 = (uint8_t *)img_in;
for(y = 0; y < height; y++) {
for(x = 0; x < width; x++) {
pixel = *img8;
img8++;
img_out[index] = (pixel << 16) | (pixel << 8) | pixel;
index++;
}
}
}
else if(bit_channel == 12) {
/*
* Monochrome image, 12 bit-per-pixel
*/
index = 0;
img16 = (uint16_t *)img_in;
for(y = 0; y < height; y++) {
for(x = 0; x < width; x++) {
pixel = *img16;
pixel = (pixel >> 4) & 0xff; // cut off 4 LSBs because the monitor can't display more than 8 bits per color
img16++;
img_out[index] = (pixel << 16) | (pixel << 8) | pixel;
index++;
}
}
}
else {
printf("%s: Can't display image with %d bits per color channel. Do implement it here!\n", __func__, bit_channel);
return;
}
}
}
/**
* \brief Video handler
*
* Passes video data (callback) to to java application
*
* \param *buf Pointer to video data
* \param img_header Information about image data
*/
static __stdcall void video_handling(int id, unsigned char* buf, struct img_header_t* img_header) {
jint *img; // image array located into java virtual machine memory space
// get Java Array
img = (*env_cached)->GetPrimitiveArrayCritical(env_cached,viddata_chached,0);
if (img == NULL){
return;
}
/*
* process image at color pipe and copy image to JVM memory space
*/
color_pipe_process(color_pipe, buf, img_header);
pipe2jint(img, color_pipe->img_out, color_pipe->is_color, color_pipe->height, color_pipe->width, color_pipe->bit_channel);
(*env_cached)->ReleasePrimitiveArrayCritical( env_cached,
viddata_chached,
img,
0);
// calls java methid "video_handler"
(*env_cached)->CallVoidMethod( env_cached,
obj_cached,
video_handler_id,
color_pipe->width,
color_pipe->height,
img_header->format);
}
/**
* \brief Log handler
*
* Gives logs to the java interface
*
* \param *msg Pointer to log message
*/
static __stdcall void log_handling(int id, char* msg) {
jstring jstr;
if (env_cached == NULL){
return;
}
/* Create a new string and overwrite the instance field */
jstr = (*env_cached)->NewStringUTF(env_cached, msg);
if (jstr == NULL) {
return; /* out of memory */
}
(*env_cached)->CallVoidMethod(env_cached,obj_cached,log_handler_id, jstr); // calls Java method "log_handler"
// printf("LOG: %s ------ %s\n", __func__, msg);
}
/**
* \brief Java native call sendXMLcmd(String xmlCmd)
*
* Sends the xml String passed from java method to the connected camera
*
* \param *env Pointer to log java env (JVM), automatically generated by jni
* \param obj Pointer to log java object, automaticall generated by jni
* \param session Session to use
* \param jstr Message to send
*
* \return ret succes state
*/
JNIEXPORT jint JNICALL
Java_o3000_Camo3000JNI_sendXMLcmd(JNIEnv *env, jobject obj, jint session, jstring jstr){
const char *str;
int len, ret;
// get message's properties
if(flag_connection == FALSE) {
printf("%s: no connection\n", __func__);
return -1;
}
str = (*env)->GetStringUTFChars(env, jstr, NULL);
if (str == NULL){
return -1;
}
len = (*env)->GetStringUTFLength(env, jstr);
if (str == NULL){
return -1;
}
// libo3000 function call
ret = o3000_send_xml(session, str, len);
(*env)->ReleaseStringUTFChars(env, jstr, str);
return ((jint)ret);
}
/**
* \brief Java native call initCamDriver(int vid, int pid, int[] video_data_array);
*
* Initialises the camera with all necessary information.
*
* \param *env Pointer to log java env (JVM), automatically generated by jni
* \param obj Pointer to log java object, automaticall generated by jni
* \param vid Vendor ID (USB purpose)
* \param pid Product ID (USB purpose)
* \param video_data_array Reference to java array for video handling function (callback address)
*/
JNIEXPORT jint JNICALL
Java_o3000_Camo3000JNI_initCamDriver(JNIEnv *env, jobject obj, jint vid, jint pid, jintArray viddata){
int cam_session, num_camera = -1;
env_cached = NULL;
jclass cls = (*env)->GetObjectClass(env,obj);
// make object globally available
obj_cached = (*env)->NewGlobalRef(env,obj);
if (obj_cached == NULL){
return -1;
}
// make array globally available
viddata_chached = (*env)->NewGlobalRef(env,viddata);
if (viddata_chached == NULL){
return -1;
}
// look for "xml_handler" Java method
jmethodID xmlhand = (*env)->GetMethodID(env, cls, "xml_handler", "(Ljava/lang/String;I)V");
if (xmlhand == NULL){
return -1; /* Method not found */
}
xml_handler_id = xmlhand;
// look for "video_handler" Java method
jmethodID vidhand = (*env)->GetMethodID(env, cls, "video_handler", "(III)V");
if (vidhand == NULL){
return -1; /* Method not found */
}
video_handler_id = vidhand;
// look for "log_handler" Java method
jmethodID loghand = (*env)->GetMethodID(env, cls, "log_handler", "(Ljava/lang/String;)V");
if (loghand == NULL){
return -1; /* Method not found */
}
log_handler_id = loghand;
// setup camera session
cam_session = o3000_init(vid, pid, 1024*1024*4, xml_handling, video_handling, log_handling, O3000_LOG_INFO);
if(cam_session < 0) {
printf("%s: Error opening new camera session (code %d)\n", __func__, cam_session);
return -1;
}
num_camera = o3000_device_discovery(cam_session);
if(num_camera < 0) {
printf("%s: device discovery error (code %d)\n", __func__, num_camera);
o3000_exit(cam_session);
cam_session = -1;
return -1;
}
if(num_camera == 0) {
// printf("%s: no camera connected to the system\n", __func__);
o3000_exit(cam_session);
cam_session = -1;
return -1;
}
flag_connection = TRUE;
return cam_session;
}
/**
* \brief Java native call availableCameraCount(int session);
*
* Return the number of connected cameras.
*
* \param *env Pointer to log java env (JVM), automatically generated by jni
* \param obj Pointer to log java object, automaticall generated by jni
* \param session Session to use
*
* \return ret number of connected cameras (< 0: device discovery error; 0: no camera connected to the system; > 0: number of connected cameras)
*/
JNIEXPORT jint JNICALL
Java_o3000_Camo3000JNI_availableCameraCount(JNIEnv *env, jobject obj, jint session){
return o3000_device_discovery(session);
}
/**
* \brief Java native call connectCamDriver()
*
* Connect to O-3000 camera (never returns) until camera is disconnected manually
* or by application.
*
* \param *env Pointer to log java env (JVM), automatically generated by jni
* \param obj Pointer to log java object, automaticall generated by jni
* \param session Session to use
*
* \return ret succes state
*/
JNIEXPORT jint JNICALL
Java_o3000_Camo3000JNI_connectCamDriver(JNIEnv *env, jobject obj, jint session, jint camNumber){
int ret, num_camera = -1;
if(session < 0) {
printf("%s: camera not initialized\n", __func__);
return -1;
}
env_cached = env; // make environment globally available. till termination of programm (JVM)
// setup color image processing pipeline
if(color_pipe_open(&color_pipe, IMAGE_FRAME_HEIGHT, IMAGE_FRAME_WIDTH, 12)) {
printf("%s: Initializing color pipeline failed\n", __func__);
return -1;
}
/*
* Enable various pipeline stages
*
* NOTE: Don't enable any stage because CPU performance is needed. It would be much better
* if those parameters are set by the user per Java Demo.
*/
// color_pipe->awb_data.enable = 1; // enable white-balancing
// color_pipe->cam_calib_data.enable = 0; // lens correction because lens type is unknown
// color_pipe->color_calib_data.enable = 1; // enable color correction
// color_pipe->color_calib_data.ccm = CCM_PRESET_O3020; // if color camera is connected, color correction matrix for O-3020 camera is used
// color_pipe->sharp_data.enable = 0; // disable sharpening algrithm because a lot of CPU performance is needed!
// // color_pipe->sharp_data.sharp_alg = SHARP_ALG_LOCAL; // local sharpening uses much more CPU performance than global sharpening
// color_pipe->gamma_data.enable = 0; // disable gamma correction
num_camera = o3000_device_discovery(session);
if(num_camera < 0) {
printf("%s: device discovery error (code %d)\n", __func__, num_camera);
o3000_exit(session);
return -1;
}
if(num_camera == 0) {
// printf("%s: no camera connected to the system\n", __func__);
o3000_exit(session);
return -1;
}
printf("%s: Trying %d\n", __func__, currentCam);
ret = o3000_connect(session, camNumber, xml_msg_default, strlen(xml_msg_default));
//if(ret == O3000_ERROR_BUSY) {
if(ret != 0){
//return ret; -> will return -6
printf("%s: device %d is already in use\n", __func__, session);
}
o3000_exit(session);
if(color_pipe != NULL) {
color_pipe_close(color_pipe);
color_pipe = NULL;
}
flag_connection = FALSE;
return ret;
}
/**
* \brief Java native call stopCamDriver()
*
* De-initialises the camera driver and terminates connections
*
* \param *env Pointer to log java env (JVM), automatically generated by jni
* \param obj Pointer to log java object, automaticall generated by jni
* \param session Session to use
*
*/
JNIEXPORT void JNICALL
Java_o3000_Camo3000JNI_stopCamDriver(JNIEnv *env, jobject obj, jint session){
o3000_exit(session);
}