主要包括几个方面工作:
1、Openh264解码视频;
2、解码视频扔给显示设备;
3、显示设备适配linux的显示屏;
接上文,已经实现了一个lvgl_dev.c,用来做显示设备的对接。
注意显示设备的注册顺序,video_dev.c中,注册显示设备的时候,要放到camera适配设备的后面,这样子默认的capture设备,即时不配置,也能找到第一个。
#if PJMEDIA_VIDEO_DEV_HAS_OV5000 //--vcapture-dev= 0 /* Better put colorbar at the last, so the default capturer will be * a real capturer, if any. */ vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_ov5000_factory; #endif #if PJMEDIA_VIDEO_DEV_HAS_LVGL // --vrender-dev=1 vid_subsys->drv[vid_subsys->drv_cnt++].create = &pjmedia_lvgl_factory; #endif
app启动config文件中,需要指定采集设备和预览设备id:
--video --vcapture-dev 0 --vrender-dev 1
开启视频远端显示:
//被叫 static pjsua_call_setting incoming_call_opt = {}; incoming_call_opt.flag |= PJSUA_CALL_SET_MEDIA_DIR; pjsua_call_answer2(entry->id, &incoming_call_opt, app_config.auto_answer, NULL, NULL); pjsua_call_setting_default(&incoming_call_opt); incoming_call_opt.aud_cnt = app_config.aud_cnt; incoming_call_opt.vid_cnt = app_config.vid.vid_cnt; incoming_call_opt.vid_cnt = app_config.vid.vid_cnt; incoming_call_opt.media_dir[1] = PJMEDIA_DIR_ENCODING_DECODING; //主叫 pjsua_call_setting_default(&call_opt); call_opt.aud_cnt = app_config.aud_cnt; call_opt.vid_cnt = app_config.vid.vid_cnt; call_opt.flag |= PJSUA_CALL_SET_MEDIA_DIR; if (audio_dir == 0){ //snd_recv call_opt.media_dir[0] = PJMEDIA_DIR_ENCODING_DECODING; }else if (audio_dir == 1){ //snd only call_opt.media_dir[0] = PJMEDIA_DIR_CAPTURE; }else{ //recv only call_opt.media_dir[0] = PJMEDIA_DIR_PLAYBACK; } if (video_dir == 0){ //no_video call_opt.media_dir[1] = PJMEDIA_DIR_NONE; call_opt.vid_cnt = 0;// //call_opt.out_auto_transmit = PJ_FALSE; }else if (video_dir == 1){ //snd only call_opt.media_dir[1] = PJMEDIA_DIR_CAPTURE; call_opt.vid_cnt = 1;// //call_opt.out_auto_transmit = PJ_TRUE; }else{ //recv only call_opt.media_dir[1] = PJMEDIA_DIR_PLAYBACK; call_opt.vid_cnt = 1;// //call_opt.out_auto_transmit = PJ_FALSE; //call_opt.in_auto_show = PJ_TRUE; } pjsua_call_make_call(current_acc, &tmp, &call_opt, NULL, &msg_data_, ¤t_call);
lvgl_dev.c显示适配:
重点是几个点:
1、显示视频的格式,是yvu420p还是其他的什么,需要转换为rgb24还是其他的什么rgb格式;
2、显示视频的角度,是否要旋转;
3、显示屏幕的分辨率,是否需要裁剪。
//I420转换为argb if (display_argb_data == NULL){ display_argb_data = (uint8_t*)malloc(MAX_WIDTH * MAX_HEGITH * 4); display_rotate_argb_data = (uint8_t*)malloc(MAX_WIDTH * MAX_HEGITH * 4); display_i420_data = (uint8_t*)malloc(MAX_WIDTH * MAX_HEGITH * 3/2); } if (frame_width == 0){ return PJ_SUCCESS; } ConvertYuvI420ToBitmap(frame->buf, frame_width, frame_height, display_argb_data, MAX_WIDTH * MAX_HEGITH * 4); int ConvertYuvI420ToBitmap(uint8_t* yuvData, int width, int height, uint8_t *argbData, int argbSize) { // 分配ARGB数据所需的内存(这里简化,你可能需要根据你的Bitmap类进行调整) int need_argbSize = width * height * 3; // ARGB 每个像素4字节 if (argbSize < need_argbSize || argbData == NULL){ return 0; } //uint8_t* argbData = new uint8_t[argbSize]; // 设置YUV平面的指针和步长 int yStride = width; int uvStride = width / 2; // 对于I420,U/V平面的宽度是Y平面的一半 int src_i420_y_size = width * height; uint8_t* yPlane = yuvData; uint8_t* uPlane = yuvData + yStride * height; uint8_t* vPlane = uPlane + uvStride * height / 2; //转换为显示屏的宽度和高度 uint8_t *dst_i420_data = display_i420_data; int dst_i420_y_size = VWIDTH * VHEIGHT; int dst_i420_u_size = (VWIDTH >> 1) * (VHEIGHT >> 1); uint8_t *dst_i420_y_data = dst_i420_data; uint8_t *dst_i420_u_data = dst_i420_data + dst_i420_y_size; uint8_t *dst_i420_v_data = dst_i420_data + dst_i420_y_size + dst_i420_u_size; int ret = 0; char file_name[255] = ""; if (width < height){ /*typedef enum FilterMode { kFilterNone = 0, // Point sample; Fastest. kFilterLinear = 1, // Filter horizontally only. kFilterBilinear = 2, // Faster than box, but lower quality scaling down. kFilterBox = 3 // Highest quality. } FilterModeEnum; */ ret =I420Scale(yPlane, yStride, uPlane, uvStride, vPlane, uvStride, width, height, dst_i420_y_data, VWIDTH, dst_i420_u_data, VWIDTH >> 1, dst_i420_v_data, VWIDTH >> 1, VWIDTH, VHEIGHT, 3); }else{ uint8_t *dst_rotate_i420_y_data = display_rotate_argb_data; uint8_t *dst_rotate_i420_u_data = display_rotate_argb_data + src_i420_y_size; int src_i420_u_size = (width >> 1) * (height >> 1); uint8_t *dst_rotate_i420_v_data = display_rotate_argb_data + src_i420_y_size + src_i420_u_size; //rotate ret = I420Rotate(yPlane, yStride, uPlane, uvStride, vPlane, uvStride, dst_rotate_i420_y_data, height, dst_rotate_i420_u_data, height >> 1, dst_rotate_i420_v_data, height >> 1, width, height, 270 ); ret =I420Scale(dst_rotate_i420_y_data, height, dst_rotate_i420_u_data, height >> 1, dst_rotate_i420_v_data, height >> 1, height, width, dst_i420_y_data, VWIDTH, dst_i420_u_data, VWIDTH >> 1, dst_i420_v_data, VWIDTH >> 1, VWIDTH, VHEIGHT, 3); } //printf("I420Scale ret:%d\r\n", ret); #if 0 // 使用libyuv进行转换 ret = I420ToRGB24(dst_i420_y_data, VWIDTH, dst_i420_u_data, VWIDTH >> 1, dst_i420_v_data, VWIDTH >> 1, argbData, VWIDTH * 3, VWIDTH, VHEIGHT); need_argbSize = VWIDTH * VHEIGHT * 3; // ARGB 每个像素4字节 RGB24 每个像素3字节 #else ret =I420ToARGB(dst_i420_y_data, VWIDTH, dst_i420_u_data, VWIDTH >> 1, dst_i420_v_data, VWIDTH >> 1, argbData, VWIDTH * 4, VWIDTH, VHEIGHT); need_argbSize = VWIDTH * VHEIGHT * 4; // ARGB 每个像素4字节 RGB24 每个像素3字节 #endif static int save_index = 0; save_index++; sprintf(file_name, "/mnt/UDISK/%d_10.bmp", save_index); //SaveARGBAsBMP(file_name, argbData, VWIDTH, VHEIGHT); //display_show_bitmap(argbData, need_argbSize, 4); display_show_bitmap(argbData, need_argbSize, 4); //printf("ConvertYuvI420ToBitmap ret:%d,need_argbSize:%d\r\n", ret, need_argbSize); } #include <libyuv/convert.h> #include <libyuv/planar_functions.h> #define MAX_WIDTH 800 #define MAX_HEGITH 600 static int frame_width = 0; static int frame_height = 0; void set_video_frame_width(int width, int height){ frame_width = width; frame_height = height; } #include <sys/mman.h> #include <sys/ioctl.h> #include <string.h> #include <fcntl.h> #include <linux/fb.h> static int fbfd = -1; static char *fb_map = NULL; static uint8_t* display_argb_data = NULL; static uint8_t* display_rotate_argb_data = NULL; static uint8_t* display_i420_data = NULL; static int display_size = 0; static unsigned long VWIDTH = 0; static unsigned long VHEIGHT= 0; static unsigned long BPP = 0; static struct fb_fix_screeninfo finfo; struct fb_var_screeninfo vinfo; // 显卡设备的可变属性结构体 static void close_disply(){ if (fb_map != NULL){ // 解除映射 munmap(fb_map, display_size); fb_map = NULL; } if (fbfd >= 0){ close(fbfd); } display_size = 0; printf("close_disply end.\n"); } static void open_display(){ // 打开LCD设备 fbfd = open("/dev/fb0", O_RDWR|O_EXCL); #if USE_BSD_FBDEV //Get fb type if (ioctl(fbfd, FBIOGTYPE, &fb) != 0) { perror("ioctl(FBIOGTYPE)"); return; } //Get screen width if (ioctl(fbfd, FBIO_GETLINEWIDTH, &line_length) != 0) { perror("ioctl(FBIO_GETLINEWIDTH)"); return; } // 获得当前显卡所支持的虚拟区显存大小 VWIDTH = fb.fb_width; VHEIGHT = fb.fb_height; BPP = fb.fb_depth; //vwidth:320 vheight:960? printf("display mem: %d×%d BPP:%d\n", VWIDTH, VHEIGHT, BPP); int screen_height = 480;// if (VHEIGHT > screen_height){ VHEIGHT = screen_height; } display_size = line_length * vinfo.yres; #else ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo); // 获取可变属性 // Get fixed screen information if(ioctl(fbfd, FBIOGET_FSCREENINFO, &finfo) == -1) { perror("Error reading fixed information"); return; } printf("screen_info, %dx%d, %dbpp x_offset:%d, y_offset:%d, lines:%d\r\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel,vinfo.xoffset, vinfo.yoffset, finfo.line_length); // 获得当前显卡所支持的虚拟区显存大小 VWIDTH = vinfo.xres;//vinfo.xres_virtual; VHEIGHT = vinfo.yres;//vinfo.yres_virtual; BPP = vinfo.bits_per_pixel; //vwidth:320 vheight:960? printf("display mem: %dX%d BPP:%d\n", VWIDTH, VHEIGHT, BPP); #if 0 int screen_height = 480;// if (VHEIGHT > screen_height){ VHEIGHT = screen_height; } #endif // Figure out the size of the screen in bytes display_size = finfo.smem_len; //finfo.line_length * vinfo.yres; #endif // 申请一块虚拟区大小的映射内存 fb_map = mmap(NULL, display_size, PROT_READ|PROT_WRITE, MAP_SHARED, fbfd, 0); if(fb_map != MAP_FAILED) { printf("open success,fb_map:%d,display_size:%d\n", fb_map, display_size); }else{ display_size = 0; printf("open failed\n"); } } static void display_show_bitmap(uint8_t *color_p, int total_length, int per_bit){ if (fb_map == NULL){ return; } if(total_length <= display_size) { #if 1 //memcpy(fb_map,data,total_length); uint8_t * fbp8 = (uint8_t *)fb_map; int x; long int location = 0; int32_t y; uint8_t *pixel; for(y = 0; y <= VHEIGHT; y++) { location = (0 + vinfo.xoffset) + (y + vinfo.yoffset) * finfo.line_length / 3; for (x = 0; x < VWIDTH; ++x) { pixel = (uint8_t *)(&color_p[x*per_bit]); fbp8[3 * (location + x)] = pixel[0]; fbp8[3 * (location + x) + 1] = pixel[1]; fbp8[3 * (location + x) + 2] = pixel[2]; } color_p += VWIDTH*per_bit; } #else uint8_t * fbp = (uint8_t *)fb_map; for (int y = 0; y < VHEIGHT; y++) { for (int x = 0; x < VWIDTH; x++) { // 计算当前像素在framebuffer中的位置 int offset = (y * finfo.line_length) + (x * (vinfo.bits_per_pixel / 8)); // 写入RGB值(注意:可能需要调整以匹配实际的内存布局) fbp[offset] = color_p[(y * VWIDTH + x) * 4]; // B fbp[offset + 1] = color_p[(y * VWIDTH + x) * 4 + 1]; // G fbp[offset + 2] = color_p[(y * VWIDTH + x) * 4 + 2]; // R } } #endif }else { memcpy(fb_map, color_p, display_size); } }
-------------------广告线---------------
项目、合作,欢迎勾搭,邮箱:promall@qq.com
本文为呱牛笔记原创文章,转载无需和我联系,但请注明来自呱牛笔记 ,it3q.com