硬件:

  • RK3399开发板 x1
  • unoR3开发板 x1
  • UVC摄像头 x1
  • TB6612fng电机驱动电路 x1
  • 直流蜗杆电机 x2

第三方库

开发工具及环境

  • Android Studio 4.1.2
  • Arduino 1.8.9
  • JDK 1.8
  • NDK 21.1.6352462

unoR3引脚定义如下

#define PWMA    5   //Motor1的占空比引脚
#define PWMB    6   //Motor2的占空比引脚
#define STBY    10  //STBY引脚
#define AIN1    4   //Motor1的IN1引脚
#define AIN2    7   //Motor1的IN2引脚
#define BIN1    8   //Motor2的IN1引脚
#define BIN2    9   //Motor2的IN2引脚
//电机1操作头部左右移动
//电机2操作头部上下移动

OpenCV的导入

太麻烦了,不想截图,参考这位大佬的文章

这里面有一个坑,不知道别人有没有遇到。我的Android项目目录中含有空格,导致OpenCV的导入始终不成功。

CH34XUARTDriver的导入

这个简单,下载下来后将lib目录下的CH34XUARTDriver.jar文件复制到项目libs目录下,然后右键点击Add As Library后即可。

主要代码

unoR3 Arduino
#define PWMA    5   //Motor1的占空比引脚
#define PWMB    6   //Motor2的占空比引脚
#define STBY    10  //STBY引脚,此引脚常高电平
#define AIN1    4   //Motor1的IN1引脚
#define AIN2    7   //Motor1的IN2引脚
#define BIN1    8   //Motor2的IN1引脚
#define BIN2    9   //Motor2的IN2引脚


boolean str = false;
String IOFlag = "";
void setup() {
  Serial.begin(115200);
  IOFlag.reserve(200);
  Serial.println("Devices Init Successfully!!");
  pinMode(4, OUTPUT);
  pinMode(5, OUTPUT);
  pinMode(6, OUTPUT);
  pinMode(7, OUTPUT);
  pinMode(8, OUTPUT);
  pinMode(9, OUTPUT);
  pinMode(10, OUTPUT);
}

void loop() {
  if (str) {
    int len = IOFlag.length();
    checkMotor(IOFlag);
    IOFlag = "";
    str = false;
  }
}
void startMotor() {
  digitalWrite(STBY, HIGH);
}
void stopMotor() {
  digitalWrite(STBY, LOW);
}
void z_motor1() {
  startMotor();
  digitalWrite(AIN2, LOW);
  digitalWrite(AIN1, HIGH);
  analogWrite(PWMA, 200);
}
void z_motor2() {
  startMotor();
  digitalWrite(BIN2, LOW);
  digitalWrite(BIN1, HIGH);
  analogWrite(PWMB, 200);
}
void f_motor1() {
  startMotor();
  digitalWrite(AIN1, LOW);
  digitalWrite(AIN2, HIGH);
  analogWrite(PWMA, 200);
}
void f_motor2() {
  startMotor();
  digitalWrite(BIN1, LOW);
  digitalWrite(BIN2, HIGH);
  analogWrite(PWMB, 200);
}
void checkMotor(String IOString) {

  if (IOString == "0100")
  {
    Serial.println("0100");
    z_motor1();
  }
  if (IOString == "0101")
  {
    Serial.println("0101");
    f_motor1();
  }
  if (IOString == "1000")
  {
    Serial.println("1000");
    z_motor2();
  }
  if (IOString == "1001")
  {
    Serial.println("1001");
    f_motor2();
  }
  if (IOString == "stopMotor") {
    stopMotor();
  }
}
void serialEvent() {
  while (Serial.available()) {
    char inChar = (char)Serial.read();
    if (inChar != '\n')
      IOFlag += inChar;
    if (inChar == '\n') {
      str = true;
    }
  }
}
Android Studio
MainActivity
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;

import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceView;
import android.view.WindowManager;

import com.xuye.openeye.Head.Compensate;

import org.jetbrains.annotations.NotNull;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;

import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;


public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {


    // Used to load the 'native-lib' library on application startup.
    static {
        System.loadLibrary("native-lib");
    }
    /**
     * A native method that is implemented by the 'native-lib' native library,
     * which is packaged with this application.
     */
    //public native String stringFromJNI();
    private CameraBridgeViewBase mCVCamera;
    private CascadeClassifier cascadeClassifier; // OpenCV的人脸检测器

    private Mat grayscaleImage;
    private int absoluteFaceSize;

    private static final String TAG = "OCV/BaseLoaderCallback";

    private final int REQUEST_CAMERA_PERMISSION = 0x13;
    /**
     * 通过OpenCV管理Android服务,异步初始化OpenCV
     */
    private final BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            if (status == LoaderCallbackInterface.SUCCESS) {
                Log.i(TAG, "OpenCV loaded successfully");
                mCVCamera.enableView();
            } else {
                super.onManagerConnected(status);
            }
        }
    };

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        setContentView(R.layout.activity_main);

        if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
            ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
        }
        mCVCamera = findViewById(R.id.camera_view);
        mCVCamera.setCameraPermissionGranted();
        
//        mCVCamera.setCameraIndex(1);
        mCVCamera.enableFpsMeter();
        mCVCamera.setVisibility(SurfaceView.VISIBLE);
        mCVCamera.setCvCameraViewListener(this);

        Log.d(TAG, "onCreate: " + OpenCVLoader.OPENCV_VERSION);
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NotNull String[] permissions, @NotNull int[] grantResults) {
        if (requestCode == REQUEST_CAMERA_PERMISSION) {
            if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
                // 摄像头权限已授权,可以进行摄像头相关操作
                mCVCamera.setCameraPermissionGranted();
                Log.d(TAG, "onRequestPermissionsResult: 摄像头权限已授权,可以进行摄像头相关操作  ");
            } else {
                // 摄像头权限被拒绝,无法进行摄像头相关操作
                Log.d(TAG, "onRequestPermissionsResult: 摄像头权限被拒绝,无法进行摄像头相关操作 ");
            }
        }
    }

    @Override
    public void onResume() {
        super.onResume();
        boolean flag = OpenCVLoader.initDebug();
        if (!flag) {
            Log.d(TAG, "OpenCV library not found!");
        } else {
            Log.d(TAG, "OpenCV library found inside package. Using it!");
            initializeOpenCVDependencies();
            mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
        }
    }

    @Override
    public void onCameraViewStarted(int width, int height) {
        grayscaleImage = new Mat(height, width, CvType.CV_8UC4);
        absoluteFaceSize = (int) (height * 0.2);
    }

    @Override
    public void onCameraViewStopped() {
        
    }

    @Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        Mat aInputFrame = inputFrame.rgba();
        Core.flip(aInputFrame, aInputFrame, 1);
        Imgproc.cvtColor(aInputFrame, grayscaleImage, Imgproc.COLOR_RGBA2RGB);
        MatOfRect faces = new MatOfRect();
        if (cascadeClassifier != null) {
            cascadeClassifier.detectMultiScale(grayscaleImage, faces, 1.1, 3, 2,
                    new Size(absoluteFaceSize, absoluteFaceSize), new Size());
        }
        Rect[] facesArray = faces.toArray();
        Rect mRectFace = new Rect();
        for (Rect rect : facesArray) {
            //计算本次识别的人脸两点间x坐标的差
            double f = rect.br().x - rect.tl().x;
            //检查人脸临时存储对象是否为空
            //临时对象为空
            //计算临时对象人脸的两点间x坐标的差
            double _f = mRectFace.br().x - mRectFace.tl().x;
            if (_f < f) {
                //如果临时对象的坐标差小于本次循环中检测到的人脸坐标差,则说明本次循环中识别到的人脸大于临时对象中的人脸尺寸;
                //将本次循环中获取到的人脸赋值给临时对象
                mRectFace = rect;
            }
        }
        Imgproc.rectangle(aInputFrame, mRectFace.tl(), mRectFace.br(), new Scalar(0, 255, 0, 255), 3);
//      如果没有检测到人脸则直接返回MAT对象
        if (mRectFace.tl().x == 0 && mRectFace.tl().y == 0 && mRectFace.br().x == 0 && mRectFace.br().y == 0)
            return aInputFrame;
        int width = aInputFrame.width();
        int height = aInputFrame.height();
        Point br = mRectFace.br();
        Point tl = mRectFace.tl();
        Log.d(TAG, "onCameraFrame: facesArray x:" + mRectFace.x + ".facesArray y" + mRectFace.y);
        Map<String, Object> map = new HashMap<>();
        map.put("width", width);
        map.put("height", height);
        map.put("br", br);
        map.put("tl", tl);
        Compensate mCompensate = new Compensate(map, getApplication(), this);
        mCompensate.start();
        return aInputFrame;
    }

    /*
     **初始化OPENCV人脸检测模型
     */
    private void initializeOpenCVDependencies() {
        try {
            InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
            File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
            File mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
            FileOutputStream os = new FileOutputStream(mCascadeFile);

            byte[] buffer = new byte[4096];
            int bytesRead;
            while ((bytesRead = is.read(buffer)) != -1) {
                os.write(buffer, 0, bytesRead);
            }
            is.close();
            os.close();
            cascadeClassifier = new CascadeClassifier(mCascadeFile.getAbsolutePath());
        } catch (Exception e) {
            Log.e("OpenCVActivity", "Error loading cascade", e);
        }
        mCVCamera.enableView();
    }
}
头部位置补偿类
import android.app.Application;
import android.content.Context;
import android.util.Log;

import com.xuye.openeye.Serial.Ch34;

import org.opencv.core.Point;

import java.util.Map;

public class Compensate extends Thread {
    private static final String TAG = "Head Compensate";
    private Point tl;
    private Point br;
    private int width;
    private int height;
    private Application application;
    private Context context;

    public Compensate(Map<String, Object> map, Application application, Context context) {
        this.br = (Point) map.get("br");
        this.tl = (Point) map.get("tl");
        this.height = (int) map.get("height");
        this.width = (int) map.get("width");
        this.application = application;
        this.context = context;
    }

    @Override
    public void run() {
        super.run();
        //右上
        double tlx = tl.x;
        double tly = tl.y;
        //左下
        double brx = br.x;
        double bry = br.y;

        //人脸横坐标基准位置
        double f_x = (width - (brx - tlx)) / 2;
        //人脸纵坐标基准位置
        double f_y = (height - (bry - tly)) / 2;

        //实际人脸横坐标
        double s_f_x = tl.x;
        //实际人脸纵坐标
        double s_f_y = tl.y;

        Log.d(TAG, "run: 人脸横坐标基准位置:" + f_x);
        Log.d(TAG, "run: 实际人脸横坐标:" + s_f_x);
        String msg = "stopMotor";
        if (f_x - s_f_x > 100) {
            Log.d(TAG, "HeadCompensate: " + "横坐标基准位置大于实际横坐标,执行横坐标位置向右补偿" + (f_x - s_f_x));
            //控制电机驱动,是其执行向右补偿操作;
            msg = "0100";
        }
        if (s_f_x - f_x > 100) {
            Log.d(TAG, "HeadCompensate: " + "横坐标基准位置大于实际横坐标,执行横坐标位置向左补偿" + (s_f_x - f_x));
            //控制电机驱动,是其执行向左补偿操作;
            msg = "0101";
        }
        Log.d(TAG, "run: 人脸纵坐标基准位置:" + f_y);
        Log.d(TAG, "run: 实际人脸纵坐标:" + s_f_y);
        if (f_y - s_f_y > 100) {
            Log.d(TAG, "HeadCompensate: " + "横坐标基准位置大于实际横坐标,执行横坐标位置向下补偿" + (f_y - s_f_y));
            //控制电机驱动,是其执行向下补偿操作;
            msg = "1000";
        }
        if (s_f_y - f_y > 100) {
            Log.d(TAG, "HeadCompensate: " + "横坐标基准位置大于实际横坐标,执行横坐标位置向上补偿" + (s_f_y - f_y));
            //控制电机驱动,是其执行向上补偿操作;
            msg = "1001";
        }
        Ch34 ch34 = new Ch34(application, context, msg);
        ch34.start();
    }
}
CH340串口通信类
  • 这里我在网上查了很多资料,说的都与我下载的jar包不一致。可能是官方更新了新的jar包。最后再吐槽以下官方的说明文档,只有各方法的说明没有说明调用流程,我连蒙带猜的总算实现上位机与下位机之间的通信了,有没有bug还待测试。
import android.app.Application;
import android.content.Context;
import android.hardware.usb.UsbDevice;
import android.util.Log;

import com.xuye.openeye.APP;

import cn.wch.uartlib.WCHUARTManager;
import cn.wch.uartlib.callback.IDataCallback;

import static android.content.ContentValues.TAG;

public class Ch34 extends Thread{
    private final Application application;
    private final Context     context;
    private String      msg;

    public Ch34(Application application,Context context,String msg){
        this.application=application;
        this.context=context;
        this.msg=msg;
    }
    private final IDataCallback callback= (i, bytes, i1) -> {
        Log.i(TAG, "onData: "+i);
        Log.i(TAG, "onData: "+i1);
        Log.i(TAG, "onData: "+new String(bytes));
    };
    private void sendData(){
        //字符串最后必须加一个/n换行符,否则uno无法触发字符串接收完成标志位
        msg+="\n";
        APP.manager= WCHUARTManager.getInstance();
        APP.manager.init(application);
        try {
            UsbDevice device=APP.manager.enumDevice().get(0);
            APP.manager.requestPermission(context,device);
            if(!APP.manager.isConnected(device)){
                APP.manager.openDevice(device);
            }
            APP.manager.setSerialParameter(device,0,115200,8,1,0,false);
            APP.manager.registerDataCallback(device,callback);
            APP.manager.writeData(device,0,msg.getBytes(),msg.length(),1000);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @Override
    public void run(){
        super.run();
        sendData();
    }
}
全局CH340Driver对象
import android.app.Application;

import cn.wch.uartlib.WCHUARTManager;


public class APP extends Application {
    public static WCHUARTManager manager;
}
Logo

DAMO开发者矩阵,由阿里巴巴达摩院和中国互联网协会联合发起,致力于探讨最前沿的技术趋势与应用成果,搭建高质量的交流与分享平台,推动技术创新与产业应用链接,围绕“人工智能与新型计算”构建开放共享的开发者生态。

更多推荐