这段时间做一个Android项目,需要在一台Android 4.4.4系统设备上链接usb摄像头,用作人脸识别,

于是作为Android项目组“骨干”(哈哈臭屁一下^-^)的我开始了usb摄像头攻坚战,爬帖子,找博客,

最后眼睛瞅准了一篇博客(感谢)

http://blog.csdn.net/sukhoi27smk/article/details/18269097

然后下载源码开始了我的usb摄像头驱动之旅

我整理的库文件下载点击查看

首先是在Linux Ubuntu系统下面配置环境,AS,NDK,如果想写C代码方便可以安装QT(个人推荐,其他都行,比如gedit就很好使)

软件安装,环境配置,这些自当不必多说。。。网上有N多种方式可以搞定

接下来就开始代码改造

找到jni文件夹下的ImageProc.c文件修改其中的jni接口方法名,使之指向CameraPreview.java

我修改后如下所示,找到其他的jni方法用同样方式修改方法名(不要忘了同步.h文件中的方法名哦。。。):

1void Java_com_mojsoft_usbcamera_view_CameraPreview_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap)

Ctrl+Alt+T打开终端,cd到你的jni目录下,运行ndk-build,编译成功

cd .. 回到jni目录外面,运行ls就可以看到libs,obj两个目录,将libs中的libImageProc.so文件拷贝到你项目中的jniLibs目录下(AS)

接下来是java调用 [注意:java包名和C文件方法格式一致]

将CameraPreview文件放到布局中,编译运行。。。

接下来就是盯着屏幕。。。

满满的惊喜,问题就这么解决了,哈哈

下面是主要的CameraSurfaView类:

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, Runnable{

private static final boolean DEBUG = true;

private static final String TAG = "WebCam";

protected Context context;

private SurfaceHolder holder;

Thread mainLoop = null;

private Bitmap bmp = null;

private byte[] byteArrary;

private boolean cameraExists = false;

private boolean shouldStop = false;

// /dev/videox (x=cameraId+cameraBase) is used.

// In some omap devices, system uses /dev/video[0-3],

// so users must use /dev/video[4-].

// In such a case, try cameraId=0 and cameraBase=4

private int cameraId = 0;

private int cameraBase = 0;

// This definition also exists in ImageProc.h.

// Webcam must support the resolution 640x480 with YUYV format.

static final int IMG_WIDTH = 640;

static final int IMG_HEIGHT = 480;

// The following variables are used to draw camera images.

private int winWidth = 0;

private int winHeight = 0;

private Rect rect;

private int dw, dh;

private float rate;

private boolean isFramAready = false;

private AlreadyRGBListener listener;

// JNI functions

public native int prepareCamera(int videoid);

public native int prepareCameraWithBase(int videoid, int camerabase);

public native void processCamera();

public native void stopCamera();

public native void pixeltobmp(Bitmap bitmap);

public native byte[] getRgb();

static {

System.loadLibrary("ImageProc");

}

public CameraPreview(Context context){

super(context);

this.context = context;

if (DEBUG) Log.d(TAG, "CameraPreview constructed");

setFocusable(true);

holder = getHolder();

holder.addCallback(this);

holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);

}

public CameraPreview(Context context, AttributeSet attrs){

super(context, attrs);

this.context = context;

if (DEBUG) Log.d(TAG, "CameraPreview constructed");

setFocusable(true);

holder = getHolder();

holder.addCallback(this);

holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);

}

@Override

public void run(){

while (true && cameraExists) {

//obtaining display area to draw a large image

if (winWidth == 0) {

winWidth = this.getWidth();

winHeight = this.getHeight();

if (winWidth * 3 / 4 <= winHeight) {

dw = 0;

dh = (winHeight - winWidth * 3 / 4) / 2;

rate = ((float) winWidth) / IMG_WIDTH;

rect = new Rect(dw, dh, dw + winWidth - 1, dh + winWidth * 3 / 4 - 1);

} else {

dw = (winWidth - winHeight * 4 / 3) / 2;

dh = 0;

rate = ((float) winHeight) / IMG_HEIGHT;

rect = new Rect(dw, dh, dw + winHeight * 4 / 3 - 1, dh + winHeight - 1);

}

}

// obtaining a camera image (pixel data are stored in an array in JNI).

processCamera();

// camera image to bmp

pixeltobmp(bmp);

byte[] arr = getRgb();

isFramAready = true;

Canvas canvas = getHolder().lockCanvas();

if (canvas != null) {

// draw camera bmp on canvas

canvas.drawBitmap(bmp, null, rect, null);

getHolder().unlockCanvasAndPost(canvas);

}

listener.alreadyRGB(arr);

if (shouldStop) {

shouldStop = false;

break;

}

}

}

@Override

public void surfaceCreated(SurfaceHolder holder){

if (DEBUG) Log.d(TAG, "surfaceCreated");

if (bmp == null) {

bmp = Bitmap.createBitmap(IMG_WIDTH, IMG_HEIGHT, Bitmap.Config.ARGB_8888);

}

if (byteArrary == null) {

byteArrary = new byte[IMG_WIDTH * IMG_HEIGHT];

}

// /dev/videox (x=cameraId + cameraBase) is used

//下面一句是原代码,我尝试失败,于是我将其修改为 int ret = prepareCamera(0);后成功调用

// int ret = prepareCameraWithBase(cameraId, cameraBase);

int ret = prepareCamera(0);

if (ret != -1) cameraExists = true;

mainLoop = new Thread(this);

mainLoop.start();

}

@Override

public void surfaceChanged(SurfaceHolder holder, int format, int width, int height){

if (DEBUG) Log.d(TAG, "surfaceChanged");

}

@Override

public void surfaceDestroyed(SurfaceHolder holder){

if (DEBUG) Log.d(TAG, "surfaceDestroyed");

if (cameraExists) {

shouldStop = true;

while (shouldStop) {

try {

Thread.sleep(50); // wait for thread stopping

} catch (Exception e) {

}

}

}

stopCamera();

}

public void setGetBitmapListener(AlreadyRGBListener listener){

this.listener = listener;

}

}

Logo

华为开发者空间,是为全球开发者打造的专属开发空间,汇聚了华为优质开发资源及工具,致力于让每一位开发者拥有一台云主机,基于华为根生态开发、创新。

更多推荐