zl程序教程

您现在的位置是:首页 >  其它

当前栏目

JavaCPP初体验

初体验
2023-09-14 09:01:36 时间

        Bytedeco通过提供使用共同开发的JavaCPP技术生成的即用型绑定,使本机库可用于Java平台。 我们希望这是Java和C / C ++之间缺少的桥梁,它将计算密集型科学,多媒体,计算机视觉,深度学习等带入Java平台。

 

一、初步了解

        官网:Bytedeco - Home

        支持非常多的类库啊,下图只是官网的一小部分,真是感觉幸福。

        使用方便:Maven网站里面 

        https://mvnrepository.com/search?q=javacpp-presets 

        https://mvnrepository.com/search?q=JavaCPP+Presets+Platform+For        

        找到添加依赖就可以用了。

        开发入门,及参考网址:

        javacv开发入门之Maven环境搭建_qi531621028的博客-CSDN博客_javacv maven

        CentOS 下通过 JavaCPP 调用 FFMPEG_jTeam的专栏-CSDN博客

        JavaCpp之FFMPEG实战教程 - 简书

        JavaCPP 技术使用经验总结_程序是有生命的精灵-CSDN博客_javacpp

二、试用FFMpeg

        新建spring boot项目,添加依赖如下:

<dependency>
    <groupId>org.bytedeco.javacpp-presets</groupId>
	<artifactId>ffmpeg-platform</artifactId>
	<version>4.1-1.4.4</version>
</dependency>
<dependency>
    <groupId>org.bytedeco.javacpp-presets</groupId>
	<artifactId>opencv-platform</artifactId>
	<version>4.0.1-1.4.4</version>
</dependency>

        视频截取:

public int cutVideo(double from_seconds, double end_seconds, String in_filename, String out_filename){
		AVOutputFormat ofmt = null;
		AVFormatContext ifmt_ctx = new AVFormatContext(null);
		AVFormatContext ofmt_ctx = new AVFormatContext(null);
		AVPacket pkt = new AVPacket();
		int ret, i;

		try{
			av_register_all();

			if (avformat_open_input(ifmt_ctx, in_filename, null, null) < 0) {
				return -1;
			}

			if ((ret = avformat_find_stream_info(ifmt_ctx, (PointerPointer<Pointer>) null)) < 0) {
				return -1;
			}

			av_dump_format(ifmt_ctx, 0, in_filename, 0);

			avformat_alloc_output_context2(ofmt_ctx, null, null, out_filename);
			if (ofmt_ctx == null) {
				return -1;
			}

			ofmt = ofmt_ctx.oformat();

			for (i = 0; i < ifmt_ctx.nb_streams(); i++) {
				AVStream in_stream = ifmt_ctx.streams(i);
				AVStream out_stream = avformat_new_stream(ofmt_ctx, in_stream.codec().codec());
				if (out_stream == null) {
					return -1;
				}

				ret = avcodec_copy_context(out_stream.codec(), in_stream.codec());
				if (ret < 0) {
					return -1;
				}
				out_stream.codec().codec_tag(0);
				if ((ofmt_ctx.oformat().flags() & AVFMT_GLOBALHEADER) > 0)
					ofmt_ctx.oformat().flags(ofmt_ctx.oformat().flags() | AV_CODEC_FLAG_GLOBAL_HEADER) ;
			}
			av_dump_format(ofmt_ctx, 0, out_filename, 1);

			if ((ofmt.flags() & AVFMT_NOFILE) == 0) {
				AVIOContext pb = new AVIOContext();
				ret = avio_open(pb, out_filename, AVIO_FLAG_WRITE);
				if (ret < 0) {
					return -1;
				}
				ofmt_ctx.pb(pb);
			}

			ret = avformat_write_header(ofmt_ctx, (PointerPointer<Pointer>)null);
			if (ret < 0) {
				return -1;
			}

			//    int64_t start_from = 8*AV_TIME_BASE;
			ret = av_seek_frame(ifmt_ctx, -1, new Double(from_seconds*AV_TIME_BASE).longValue(), AVSEEK_FLAG_ANY);
			if (ret < 0) {
				return -1;
			}

//			int dts_start_from = ifmt_ctx.nb_streams();
//			av_memcpy_backptr(dts_start_from, 0, ifmt_ctx.nb_streams());
//			int64_t *pts_start_from = malloc(sizeof(int64_t) * ifmt_ctx->nb_streams);
//			memset(pts_start_from, 0, sizeof(int64_t) * ifmt_ctx->nb_streams);
			long[] dts_start_from = new long[ifmt_ctx.nb_streams()];
			Arrays.fill(dts_start_from, 0);
			long[] pts_start_from = new long[ifmt_ctx.nb_streams()];
			Arrays.fill(pts_start_from, 0);

			while (true) {
				AVStream in_stream, out_stream;

				ret = av_read_frame(ifmt_ctx, pkt);
				if (ret < 0)
					break;

				in_stream  = ifmt_ctx.streams(pkt.stream_index());
				out_stream = ofmt_ctx.streams(pkt.stream_index());

				//log_packet(ifmt_ctx, pkt, "in");

				if (av_q2d(in_stream.time_base()) * pkt.pts() > end_seconds) {
					av_free_packet(pkt);
					break;
				}

				if (dts_start_from[pkt.stream_index()] == 0) {
					dts_start_from[pkt.stream_index()] = pkt.dts();
					//printf("dts_start_from: %s\n", av_ts2str(dts_start_from[pkt.stream_index()]));
				}
				if (pts_start_from[pkt.stream_index()] == 0) {
					pts_start_from[pkt.stream_index()] = pkt.pts();
					//printf("pts_start_from: %s\n", av_ts2str(pts_start_from[pkt.stream_index]));
				}

				/* copy packet */
				pkt.pts(av_rescale_q_rnd(pkt.pts() - pts_start_from[pkt.stream_index()], in_stream.time_base(), out_stream.time_base(), AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
				pkt.dts(av_rescale_q_rnd(pkt.dts() - dts_start_from[pkt.stream_index()], in_stream.time_base(), out_stream.time_base(), AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
				if (pkt.pts() < 0) {
					pkt.pts(0);
				}
				if (pkt.dts() < 0) {
					pkt.dts(0);
				}
				pkt.duration((int)av_rescale_q((int)pkt.duration(), in_stream.time_base(), out_stream.time_base()));
				pkt.pos(-1);
				//log_packet(ofmt_ctx, &pkt, "out");
				//printf("\n");

				ret = av_interleaved_write_frame(ofmt_ctx, pkt);
				if (ret < 0) {
					//fprintf(stderr, "Error muxing packet\n");
					break;
				}
				av_free_packet(pkt);
			}

			dts_start_from = new long[0];
			pts_start_from = new long[0];

			av_write_trailer(ofmt_ctx);

			return 0;
		} finally {
			avformat_close_input(ifmt_ctx);

			/* close output */
			if (ofmt_ctx!=null && (ofmt.flags() & AVFMT_NOFILE)==0)
				avio_closep(ofmt_ctx.pb());
			avformat_free_context(ofmt_ctx);

//			if (ret < 0 && ret != AVERROR_EOF) {
//				fprintf(stderr, "Error occurred: %s\n", av_err2str(ret));
//				return 1;
//			}
		}

       查看视频信息

public int info(int argc, String[] argv){
		AVFormatContext fmt_ctx = avformat_alloc_context();
		AVDictionaryEntry tag = null;
		if (argc != 2) {
			log.error("usage: %s <input_file> example program to demonstrate the use of the libavformat metadata API.", argv[0]);
			return 1;
		}
		if (avformat_open_input(fmt_ctx, argv[1], null, null)!=0){
			return -1;
		}
		log.info("视频时长:" + fmt_ctx.duration());
		//tag = av_dict_get(fmt_ctx.metadata(), "", tag, AV_DICT_IGNORE_SUFFIX);
//		while ((tag = av_dict_get(fmt_ctx.metadata(), "", tag, AV_DICT_IGNORE_SUFFIX))){
//			log.info("%s=%s\n", tag.key(), tag.value());
//		}
		avformat_close_input(fmt_ctx);
		return 0;
	}

        截取视频某一帧图片

/**
	 * 打开视频流或者视频文件,并解码视频帧为YUVJ420P数据
	 *
	 * @param url -视频源地址
	 * @param out_file 截图文件保存地址
	 * @param time 秒 0开始
	 * @author eguid
	 * @throws IOException
	 */
	public int openVideo(String url,String out_file, double time) throws IOException {
		avformat.AVFormatContext pFormatCtx = new avformat.AVFormatContext(null);
		int i, videoStream;
		avcodec.AVCodecContext pCodecCtx = null;
		avcodec.AVCodec pCodec = null;
		avutil.AVFrame pFrame = null;
		avcodec.AVPacket packet = new avcodec.AVPacket();
		int[] frameFinished = new int[1];
		avutil.AVDictionary optionsDict = null;
		avutil.AVFrame pFrameRGB = null;
		int numBytes;
		BytePointer buffer = null;
		swscale.SwsContext sws_ctx = null;

		// Open video file
		if (avformat_open_input(pFormatCtx, url, null, null) != 0) {
			return -1; // Couldn't open file
		}

		// Retrieve stream information
		if (avformat_find_stream_info(pFormatCtx, (PointerPointer<Pointer>) null) < 0) {
			return -1;// Couldn't find stream information
		}

		// Dump information about file onto standard error
		av_dump_format(pFormatCtx, 0, url, 0);
		log.info("nbstreams:"+pFormatCtx.nb_streams());
		// Find the first video stream
		videoStream = -1;
		for (i = 0; i < pFormatCtx.nb_streams(); i++) {
			if (pFormatCtx.streams(i).codec().codec_type() == AVMEDIA_TYPE_VIDEO) {
				videoStream = i;
				break;
			}
		}
		if (videoStream == -1) {
			return -1; // Didn't find a video stream
		}

		// Get a pointer to the codec context for the video stream
		pCodecCtx = pFormatCtx.streams(videoStream).codec();

		// Find the decoder for the video stream
		pCodec = avcodec_find_decoder(pCodecCtx.codec_id());
		if (pCodec == null) {
			System.err.println("Unsupported codec!");
			return -1; // Codec not found
		}
		// Open codec
		if (avcodec_open2(pCodecCtx, pCodec, optionsDict) < 0) {
			return -1; // Could not open codec
		}

		pFrame = av_frame_alloc();// Allocate video frame

		// Allocate an AVFrame structure
		pFrameRGB = av_frame_alloc();
		if (pFrameRGB == null) {
			return -1;
		}
		int width = pCodecCtx.width(), height = pCodecCtx.height();
		pFrameRGB.width(width);
		pFrameRGB.height(height);
		pFrameRGB.format(AV_PIX_FMT_YUVJ420P);
		// Determine required buffer size and allocate buffer
		numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 1);

		buffer = new BytePointer(av_malloc(numBytes));

		sws_ctx = sws_getContext(pCodecCtx.width(), pCodecCtx.height(), pCodecCtx.pix_fmt(), pCodecCtx.width(),
				pCodecCtx.height(), AV_PIX_FMT_YUVJ420P, SWS_BICUBIC, null, null, (DoublePointer) null);

		// Assign appropriate parts of buffer to image planes in pFrameRGB
		// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
		// of AVPicture
		avpicture_fill(new AVPicture(pFrameRGB), buffer, AV_PIX_FMT_YUVJ420P, pCodecCtx.width(), pCodecCtx.height());

		// Read frames and save first five frames to disk
		int ret=-1;

		// 如果seek的时间大于0,则进行seek,否则取第一帧
		if(time>0){
			Long l = new Double(time/av_q2d(pFormatCtx.streams(videoStream).time_base())).longValue();
			log.info(""+l);
			if(av_seek_frame(pFormatCtx,videoStream,l,AVSEEK_FLAG_BACKWARD) < 0){
				return -1;
			}
		}

		while (av_read_frame(pFormatCtx, packet) >= 0) {
			if (packet.stream_index() == videoStream) {// Is this a packet from the video stream?
				//avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet);// Decode video frame
				avcodec_send_packet(pCodecCtx,packet);
				avcodec_receive_frame(pCodecCtx,pFrame);
				// Did we get a video frame?
				//if (frameFinished != null) {
				// 转换图像格式,将解压出来的YUV420P的图像转换为YUVJ420P的图像
				sws_scale(sws_ctx, pFrame.data(), pFrame.linesize(), 0, pCodecCtx.height(), pFrameRGB.data(),
						pFrameRGB.linesize());
				//}

				//if (frameFinished[0] != 0 && !pFrame.isNull()) {
				// Convert the image from its native format to YUVJ420P
				sws_scale(sws_ctx, pFrame.data(), pFrame.linesize(), 0, pCodecCtx.height(), pFrameRGB.data(),pFrameRGB.linesize());
				if((ret=saveImg(pFrameRGB,out_file))>=0) {
					break;
				}
				//}
			}
		}

		av_packet_unref(packet);// Free the packet that was allocated by av_read_frame
		// Free the RGB image
		av_free(buffer);

		av_free(pFrameRGB);

		av_free(pFrame);// Free the YUV frame

		avcodec_close(pCodecCtx);// Close the codec

		avformat_close_input(pFormatCtx);// Close the video file

		return ret;
	}

	/**
	 * 把YUVJ420P数据编码保存成jpg图片
	 *
	 * @param pFrame -图像帧
	 * @param out_file -截图文件保存地址
	 * @author eguid
	 * @return
	 */
	private int saveImg(AVFrame pFrame, String out_file) {
		AVPacket pkt = null;
		AVStream pAVStream = null;
		AVCodec codec = null;
		int ret = -1;

		int width = pFrame.width(), height = pFrame.height();
		// 分配AVFormatContext对象
		AVFormatContext pFormatCtx = avformat_alloc_context();
		// 设置输出文件格式
		pFormatCtx.oformat(av_guess_format("mjpeg", null, null));
		if (pFormatCtx.oformat() == null) {
			return -1;
		}
		try {
			// 创建并初始化一个和该url相关的AVIOContext
			AVIOContext pb = new AVIOContext();
			if (avio_open(pb, out_file, AVIO_FLAG_READ_WRITE) < 0) {// dont open file
				return -1;
			}
			pFormatCtx.pb(pb);
			// 构建一个新stream
			pAVStream = avformat_new_stream(pFormatCtx, codec);
			if (pAVStream == null) {
				return -1;
			}
			int codec_id = pFormatCtx.oformat().video_codec();
			// 设置该stream的信息
			// AVCodecContext pCodecCtx = pAVStream.codec();
			AVCodecContext pCodecCtx = pAVStream.codec();
			pCodecCtx.codec_id(codec_id);
			pCodecCtx.codec_type(AVMEDIA_TYPE_VIDEO);
			pCodecCtx.pix_fmt(AV_PIX_FMT_YUVJ420P);
			pCodecCtx.width(width);
			pCodecCtx.height(height);
			pCodecCtx.time_base().num(1);
			pCodecCtx.time_base().den(25);

			// Begin Output some information
			av_dump_format(pFormatCtx, 0, out_file, 1);
			// End Output some information

			// 查找解码器
			AVCodec pCodec = avcodec_find_encoder(codec_id);
			if (pCodec == null) {// codec not found
				return -1;
			}
			// 设置pCodecCtx的解码器为pCodec
			if (avcodec_open2(pCodecCtx, pCodec, (PointerPointer<Pointer>) null) < 0) {
				System.err.println("Could not open codec.");
				return -1;
			}

			// Write Header
			avformat_write_header(pFormatCtx, (PointerPointer<Pointer>) null);

			// 给AVPacket分配足够大的空间
			pkt = new AVPacket();
			if (av_new_packet(pkt, width * height * 3) < 0) {
				return -1;
			}
			int[] got_picture = {0};
			// encode
			if (avcodec_encode_video2(pCodecCtx, pkt, pFrame, got_picture) >= 0) {
				// flush
				if ((ret = av_write_frame(pFormatCtx, pkt)) >= 0) {
					// Write Trailer
					if (av_write_trailer(pFormatCtx) >= 0) {
						System.err.println("Encode Successful.");
					}
				}
			}
			return ret;
			// 结束时销毁
		} finally {
			if (pkt != null) {
				av_packet_unref(pkt);
			}
			if (pAVStream != null) {
				avcodec_close(pAVStream.codec());
			}
			if (pFormatCtx != null) {
				avio_close(pFormatCtx.pb());
				avformat_free_context(pFormatCtx);
			}
		}
	}

         音频处理:

public int info(int argc, String[] argv){

		AVFormatContext fmt_ctx = avformat_alloc_context();
		AVDictionary dict = new AVDictionary();
		AVDictionaryEntry tag = null;
		if (argc != 2) {
			log.error("usage: %s <input_file> example program to demonstrate the use of the libavformat metadata API.", argv[0]);
			return 1;
		}

		if (avformat_open_input(fmt_ctx, argv[1], null, null)!=0){
			return -1;
		}
		if (avformat_find_stream_info(fmt_ctx, dict)!=0) {
//			LOGE("Error find stream information: %d (Streams: %d)", lError, pFormatCtx->nb_streams);
			return -1;
		}
		int streamsCount = fmt_ctx.nb_streams();
		log.info("有几路流:" + streamsCount);
		log.info("时长:" + fmt_ctx.nb_streams(streamsCount).duration());
		// 读取详细信息
		while ((tag = av_dict_get(fmt_ctx.metadata(), "", tag, AV_DICT_IGNORE_SUFFIX))!=null)
		{
			log.info("key:" + tag.key().getString().toString());
			log.info("key:" + tag.value().getString().toString());
		}
		avformat_close_input(fmt_ctx);
		return 0;
	}


	public int cutAudio(double from_seconds, double end_seconds, String in_filename, String out_filename){
		AVOutputFormat ofmt = null;
		AVFormatContext ifmt_ctx = new AVFormatContext(null);
		AVFormatContext ofmt_ctx = new AVFormatContext(null);
		avcodec.AVPacket pkt = new avcodec.AVPacket();
		int ret, i;

		try{
			av_register_all();

			if (avformat_open_input(ifmt_ctx, in_filename, null, null) < 0) {
				return -1;
			}

			if ((ret = avformat_find_stream_info(ifmt_ctx, (PointerPointer<Pointer>) null)) < 0) {
				return -1;
			}

			av_dump_format(ifmt_ctx, 0, in_filename, 0);

			avformat_alloc_output_context2(ofmt_ctx, null, null, out_filename);
			if (ofmt_ctx == null) {
				return -1;
			}

			ofmt = ofmt_ctx.oformat();

			for (i = 0; i < ifmt_ctx.nb_streams(); i++) {
				AVStream in_stream = ifmt_ctx.streams(i);
				AVStream out_stream = avformat_new_stream(ofmt_ctx, in_stream.codec().codec());
				if (out_stream == null) {
					return -1;
				}

				ret = avcodec_copy_context(out_stream.codec(), in_stream.codec());
				if (ret < 0) {
					return -1;
				}
				out_stream.codec().codec_tag(0);
				if ((ofmt_ctx.oformat().flags() & AVFMT_GLOBALHEADER) > 0)
					ofmt_ctx.oformat().flags(ofmt_ctx.oformat().flags() | AV_CODEC_FLAG_GLOBAL_HEADER) ;
			}
			av_dump_format(ofmt_ctx, 0, out_filename, 1);

			if ((ofmt.flags() & AVFMT_NOFILE) == 0) {
				AVIOContext pb = new AVIOContext();
				ret = avio_open(pb, out_filename, AVIO_FLAG_WRITE);
				if (ret < 0) {
					return -1;
				}
				ofmt_ctx.pb(pb);
			}

			ret = avformat_write_header(ofmt_ctx, (PointerPointer<Pointer>)null);
			if (ret < 0) {
				return -1;
			}

			//    int64_t start_from = 8*AV_TIME_BASE;
			ret = av_seek_frame(ifmt_ctx, -1, new Double(from_seconds*AV_TIME_BASE).longValue(), AVSEEK_FLAG_ANY);
			if (ret < 0) {
				return -1;
			}

//			int dts_start_from = ifmt_ctx.nb_streams();
//			av_memcpy_backptr(dts_start_from, 0, ifmt_ctx.nb_streams());
//			int64_t *pts_start_from = malloc(sizeof(int64_t) * ifmt_ctx->nb_streams);
//			memset(pts_start_from, 0, sizeof(int64_t) * ifmt_ctx->nb_streams);
			long[] dts_start_from = new long[ifmt_ctx.nb_streams()];
			Arrays.fill(dts_start_from, 0);
			long[] pts_start_from = new long[ifmt_ctx.nb_streams()];
			Arrays.fill(pts_start_from, 0);

			while (true) {
				AVStream in_stream, out_stream;

				ret = av_read_frame(ifmt_ctx, pkt);
				if (ret < 0)
					break;

				in_stream  = ifmt_ctx.streams(pkt.stream_index());
				out_stream = ofmt_ctx.streams(pkt.stream_index());

				//log_packet(ifmt_ctx, pkt, "in");

				if (av_q2d(in_stream.time_base()) * pkt.pts() > end_seconds) {
					av_free_packet(pkt);
					break;
				}

				if (dts_start_from[pkt.stream_index()] == 0) {
					dts_start_from[pkt.stream_index()] = pkt.dts();
					//printf("dts_start_from: %s\n", av_ts2str(dts_start_from[pkt.stream_index()]));
				}
				if (pts_start_from[pkt.stream_index()] == 0) {
					pts_start_from[pkt.stream_index()] = pkt.pts();
					//printf("pts_start_from: %s\n", av_ts2str(pts_start_from[pkt.stream_index]));
				}

				/* copy packet */
				pkt.pts(av_rescale_q_rnd(pkt.pts() - pts_start_from[pkt.stream_index()], in_stream.time_base(), out_stream.time_base(), AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
				pkt.dts(av_rescale_q_rnd(pkt.dts() - dts_start_from[pkt.stream_index()], in_stream.time_base(), out_stream.time_base(), AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
				if (pkt.pts() < 0) {
					pkt.pts(0);
				}
				if (pkt.dts() < 0) {
					pkt.dts(0);
				}
				pkt.duration((int)av_rescale_q((int)pkt.duration(), in_stream.time_base(), out_stream.time_base()));
				pkt.pos(-1);
				//log_packet(ofmt_ctx, &pkt, "out");
				//printf("\n");

				ret = av_interleaved_write_frame(ofmt_ctx, pkt);
				if (ret < 0) {
					//fprintf(stderr, "Error muxing packet\n");
					break;
				}
				av_free_packet(pkt);
			}

			dts_start_from = new long[0];
			pts_start_from = new long[0];

			av_write_trailer(ofmt_ctx);

			return 0;
		} finally {
			avformat_close_input(ifmt_ctx);

			/* close output */
			if (ofmt_ctx!=null && (ofmt.flags() & AVFMT_NOFILE)==0)
				avio_closep(ofmt_ctx.pb());
			avformat_free_context(ofmt_ctx);

//			if (ret < 0 && ret != AVERROR_EOF) {
//				fprintf(stderr, "Error occurred: %s\n", av_err2str(ret));
//				return 1;
//			}
		}

三、试用OpenCV

 车牌识别

package com.joinway.platform.admin.carno;

import java.math.BigDecimal;
import java.util.Vector;

import com.joinway.platform.admin.carno.core.CharsRecognise;
import com.joinway.platform.admin.carno.core.PlateDetect;
import org.bytedeco.javacpp.opencv_imgcodecs;
import org.bytedeco.javacpp.opencv_core.Mat;

/**
 * 车牌识别
 * @author eguid
 *
 */
public class PlateRecognition {
	 static PlateDetect plateDetect =null;
	 static CharsRecognise cr=null;
	 static{
		plateDetect=new PlateDetect();
		plateDetect.setPDLifemode(true);
		cr = new CharsRecognise();
	 }
	
	 /**
	 * 单个车牌识别
	 * @param mat
	 * @return
	 */
	public static String plateRecognise(Mat mat){
		 Vector<Mat> matVector = new Vector<Mat>(1);
		 if (0 == plateDetect.plateDetect(mat, matVector)) {
			 if(matVector.size()>0){
				 return cr.charsRecognise(matVector.get(0));
			 }
		 }
		 return null;
	}
	/**
	 * 多车牌识别
	 * @param mat
	 * @return
	 */
	public static String[] mutiPlateRecognise(Mat mat){
		 PlateDetect plateDetect = new PlateDetect();
		 plateDetect.setPDLifemode(true);
		 Vector<Mat> matVector = new Vector<Mat>(10);
		 if (0 == plateDetect.plateDetect(mat, matVector)) {
			 CharsRecognise cr = new CharsRecognise();
			 String[] results=new String[matVector.size()];
			 for (int i = 0; i < matVector.size(); ++i) {
				 String result = cr.charsRecognise(matVector.get(i));
			   results[i]=result;
			 }
			 return results;
		 }
		 return null;
	}
	/**
	 * 单个车牌识别
	 * @param imgPath
	 * @return
	 */
	public static String plateRecognise(String imgPath){
		 Mat src = opencv_imgcodecs.imread(imgPath);
		 return plateRecognise(src);
	}
	/**
	 * 多车牌识别
	 * @param imgPath
	 * @return
	 */
	public static String[] mutiPlateRecognise(String imgPath){
		Mat src = opencv_imgcodecs.imread(imgPath);
		return mutiPlateRecognise(src);
	}

	public static void main(String[] args){
		int sum=100;
		int errNum=0;
		int sumTime=0;
		long longTime=0;
		for(int i=sum;i>0;i--){
		 String imgPath = "res/image/test_image/plate_judge.jpg";
		 Mat src = opencv_imgcodecs.imread(imgPath);
		 long now =System.currentTimeMillis();
		String ret=plateRecognise(src);
		System.err.println(ret);
		long s=System.currentTimeMillis()-now;
		if(s>longTime){
			longTime=s;
		}
		sumTime+=s;
		if(!"川A0CP56".equals(ret)){
			errNum++;
		}
		}
		System.err.println("总数量:"+sum);
		System.err.println("单次最长耗时:"+longTime+"ms");

		BigDecimal errSum=new BigDecimal(errNum);
		BigDecimal sumNum=new BigDecimal(sum);
		BigDecimal c=sumNum.subtract(errSum).divide(sumNum).multiply(new BigDecimal(100));
		System.err.println("总耗时:"+sumTime+"ms,平均处理时长:"+sumTime/sum+"ms,错误数量:"+errNum+",正确识别率:"+c+"%");
	}
}