Refactor: Use esp_video component (#1245)
Some checks are pending
Build Boards / Determine variants to build (push) Waiting to run
Build Boards / Build ${{ matrix.name }} (push) Blocked by required conditions

* refactor: migrate camera module to esp-video library

* refactor: migrate boards to esp-video API (1/2)

* refactor: migrate boards to esp-video API (2/2)

* fix: use ESP-IDF 5.5

* refactor: migrate the JPEG encoder to `esp_new_jpeg`

* feat: add YUV422 support

* feat: improve pixelformat and device selection process

* feat: use ESP32-P4 Hardware JPEG Encoder
This commit is contained in:
laride 2025-10-14 10:44:45 +08:00 committed by GitHub
parent 4854bda302
commit 60ad1c5afc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
39 changed files with 1724 additions and 1772 deletions

View File

@ -92,7 +92,7 @@ jobs:
include: ${{ fromJson(needs.prepare.outputs.variants) }}
runs-on: ubuntu-latest
container:
image: espressif/idf:release-v5.4
image: espressif/idf:release-v5.5
steps:
- name: Checkout
uses: actions/checkout@v4

View File

@ -24,7 +24,6 @@ set(SOURCES "audio/audio_codec.cc"
"display/lvgl_display/gif/lvgl_gif.cc"
"display/lvgl_display/gif/gifdec.c"
"display/lvgl_display/jpg/image_to_jpeg.cpp"
"display/lvgl_display/jpg/jpeg_encoder.cpp"
"protocols/protocol.cc"
"protocols/mqtt_protocol.cc"
"protocols/websocket_protocol.cc"
@ -594,6 +593,8 @@ if(CONFIG_IDF_TARGET_ESP32)
"audio/codecs/es8388_audio_codec.cc"
"audio/codecs/es8389_audio_codec.cc"
"led/gpio_led.cc"
"${CMAKE_CURRENT_SOURCE_DIR}/boards/common/esp32_camera.cc"
"display/lvgl_display/jpg/image_to_jpeg.cpp"
)
endif()

View File

@ -635,6 +635,38 @@ config RECEIVE_CUSTOM_MESSAGE
help
Enable custom message reception, allow the device to receive custom messages from the server (preferably through the MQTT protocol)
menu "Camera Configuration"
depends on !IDF_TARGET_ESP32
config XIAOZHI_ENABLE_HARDWARE_JPEG_ENCODER
bool "Enable Hardware JPEG Encoder"
default y
depends on SOC_JPEG_ENCODE_SUPPORTED
help
Use hardware JPEG encoder on ESP32-P4 to encode image to JPEG.
See https://docs.espressif.com/projects/esp-idf/en/stable/esp32p4/api-reference/peripherals/jpeg.html for more details.
config XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
bool "Enable Camera Debug Mode"
default n
help
Enable camera debug mode, print camera debug information to the console.
Only works on boards that support camera.
config XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP
bool "Enable software camera buffer endianness swapping (USE WITH CAUTION)"
default n
depends on !CAMERA_SENSOR_SWAP_PIXEL_BYTE_ORDER
help
This option treats the camera buffer as a uint16_t[] array and performs byte-swapping (endianness conversion) on each element.
Should only be modified by development board integration engineers.
**Incorrect usage may result in incorrect image colors!**
ATTENTION: If the option CAMERA_SENSOR_SWAP_PIXEL_BYTE_ORDER is available for your sensor, please use that instead.
endmenu
menu "TAIJIPAI_S3_CONFIG"
depends on BOARD_TYPE_ESP32S3_Taiji_Pi
choice I2S_TYPE_TAIJIPI_S3

View File

@ -133,58 +133,53 @@ private:
// 初始化摄像头ov2640
// 根据正点原子官方示例参数
void InitializeCamera() {
xl9555_->SetOutputState(OV_PWDN_IO, 0); // PWDN=低 (上电)
xl9555_->SetOutputState(OV_RESET_IO, 0); // 确保复位
vTaskDelay(pdMS_TO_TICKS(50)); // 延长复位保持时间
xl9555_->SetOutputState(OV_RESET_IO, 1); // 释放复位
vTaskDelay(pdMS_TO_TICKS(50)); // 延长 50ms
camera_config_t config = {};
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAM_PIN_D0,
[1] = CAM_PIN_D1,
[2] = CAM_PIN_D2,
[3] = CAM_PIN_D3,
[4] = CAM_PIN_D4,
[5] = CAM_PIN_D5,
[6] = CAM_PIN_D6,
[7] = CAM_PIN_D7,
},
.vsync_io = CAM_PIN_VSYNC,
.de_io = CAM_PIN_HREF,
.pclk_io = CAM_PIN_PCLK,
.xclk_io = CAM_PIN_XCLK,
};
config.pin_pwdn = CAM_PIN_PWDN; // 实际由 XL9555 控制
config.pin_reset = CAM_PIN_RESET;// 实际由 XL9555 控制
config.pin_xclk = CAM_PIN_XCLK;
config.pin_sccb_sda = CAM_PIN_SIOD;
config.pin_sccb_scl = CAM_PIN_SIOC;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 1,
.scl_pin = CAM_PIN_SIOC,
.sda_pin = CAM_PIN_SIOD,
},
.freq = 100000,
};
config.pin_d7 = CAM_PIN_D7;
config.pin_d6 = CAM_PIN_D6;
config.pin_d5 = CAM_PIN_D5;
config.pin_d4 = CAM_PIN_D4;
config.pin_d3 = CAM_PIN_D3;
config.pin_d2 = CAM_PIN_D2;
config.pin_d1 = CAM_PIN_D1;
config.pin_d0 = CAM_PIN_D0;
config.pin_vsync = CAM_PIN_VSYNC;
config.pin_href = CAM_PIN_HREF;
config.pin_pclk = CAM_PIN_PCLK;
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAM_PIN_RESET, // 实际由 XL9555 控制
.pwdn_pin = CAM_PIN_PWDN, // 实际由 XL9555 控制
.dvp_pin = dvp_pin_config,
.xclk_freq = 24000000,
};
/* XCLK 20MHz or 10MHz for OV2640 double FPS (Experimental) */
config.xclk_freq_hz = 24000000;
config.ledc_timer = LEDC_TIMER_0;
config.ledc_channel = LEDC_CHANNEL_0;
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
config.pixel_format = PIXFORMAT_RGB565; /* YUV422,GRAYSCALE,RGB565,JPEG */
config.frame_size = FRAMESIZE_QVGA; /* QQVGA-UXGA, For ESP32, do not use sizes above QVGA when not JPEG. The performance of the ESP32-S series has improved a lot, but JPEG mode always gives better frame rates */
config.jpeg_quality = 12; /* 0-63, for OV series camera sensors, lower number means higher quality */
config.fb_count = 2; /* When jpeg mode is used, if fb_count more than one, the driver will work in continuous mode */
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_err_t err = esp_camera_init(&config); // 测试相机是否存在
if (err != ESP_OK) {
ESP_LOGE(TAG, "Camera is not plugged in or not supported, error: %s", esp_err_to_name(err));
// 如果摄像头初始化失败,设置 camera_ 为 nullptr
camera_ = nullptr;
return;
}else
{
esp_camera_deinit();// 释放之前的摄像头资源,为正确初始化做准备
camera_ = new Esp32Camera(config);
}
camera_ = new Esp32Camera(video_config);
}
public:

View File

@ -14,6 +14,12 @@ AtomS3R CAM、AtomS3R M12 是 M5Stack 推出的基于 ESP32-S3-PICO-1-N8R8 的
两款开发版均**不带屏幕、不带额外按键**,需要使用语音唤醒。必要时,需要使用 `idf.py monitor` 查看 log 以确定运行状态。
> ![NOTE]
>
> 自版本 [待定] 起,由于依赖库不支持 OV3660 传感器AtomS3R M12 无法使用摄像头识别功能。
>
> AtomS3R CAM 不受影响;使用旧版本小智固件的 AtomS3R M12 不受影响。
## 配置、编译命令
**配置编译目标为 ESP32S3**

View File

@ -123,36 +123,51 @@ private:
ESP_LOGI(TAG, "Camera Power Enabled");
vTaskDelay(pdMS_TO_TICKS(300));
vTaskDelay(pdMS_TO_TICKS(1000));
}
void InitializeCamera() {
camera_config_t config = {};
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = CAMERA_PIN_SIOD;
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_QVGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
camera_ = new Esp32Camera(config);
void InitializeCamera() {
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 1,
.scl_pin = CAMERA_PIN_SIOC,
.sda_pin = CAMERA_PIN_SIOD,
},
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
camera_->SetHMirror(false);
}

View File

@ -5,7 +5,10 @@
"name": "atoms3r-cam-m12-echo-base",
"sdkconfig_append": [
"CONFIG_ESPTOOLPY_FLASHSIZE_8MB=y",
"CONFIG_PARTITION_TABLE_CUSTOM_FILENAME=\"partitions/v2/8m.csv\""
"CONFIG_PARTITION_TABLE_CUSTOM_FILENAME=\"partitions/v2/8m.csv\"",
"CONFIG_CAMERA_GC0308=y",
"CONFIG_CAMERA_GC0308_AUTO_DETECT_DVP_INTERFACE_SENSOR=y",
"CONFIG_CAMERA_GC0308_DVP_YUV422_320X240_20FPS=y"
]
}
]

View File

@ -126,32 +126,47 @@ private:
}
void InitializeCamera() {
camera_config_t config = {};
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = CAMERA_PIN_SIOD;
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 0;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_QVGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
camera_ = new Esp32Camera(config);
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 0,
.scl_pin = CAMERA_PIN_SIOC,
.sda_pin = CAMERA_PIN_SIOD,
},
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
camera_->SetHMirror(false);
}

View File

@ -1,37 +1,307 @@
#include "esp32_camera.h"
#include "mcp_server.h"
#include "display.h"
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/param.h>
#include <unistd.h>
#include "board.h"
#include "system_info.h"
#include "lvgl_display.h"
#include "display.h"
#include "esp_video_device.h"
#include "esp_video_init.h"
#include "jpg/image_to_jpeg.h"
#include "linux/videodev2.h"
#include "lvgl_display.h"
#include "mcp_server.h"
#include "system_info.h"
#include <esp_log.h>
#ifdef CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
#undef LOG_LOCAL_LEVEL
#define LOG_LOCAL_LEVEL MAX(CONFIG_LOG_DEFAULT_LEVEL, ESP_LOG_DEBUG)
#endif // CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
#include <errno.h>
#include <esp_heap_caps.h>
#include <esp_log.h>
#include <cstdio>
#include <cstring>
#define TAG "Esp32Camera"
Esp32Camera::Esp32Camera(const camera_config_t& config) {
// camera init
esp_err_t err = esp_camera_init(&config); // 配置上面定义的参数
if (err != ESP_OK) {
ESP_LOGE(TAG, "Camera init failed with error 0x%x", err);
#if defined(CONFIG_CAMERA_SENSOR_SWAP_PIXEL_BYTE_ORDER) || defined(CONFIG_XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP)
#warning "CAMERA_SENSOR_SWAP_PIXEL_BYTE_ORDER or CONFIG_XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP is enabled, which may cause image corruption in YUV422 format!"
#endif
#if CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
#define CAM_PRINT_FOURCC(pixelformat) \
char fourcc[5]; \
fourcc[0] = pixelformat & 0xFF; \
fourcc[1] = (pixelformat >> 8) & 0xFF; \
fourcc[2] = (pixelformat >> 16) & 0xFF; \
fourcc[3] = (pixelformat >> 24) & 0xFF; \
fourcc[4] = '\0'; \
ESP_LOGD(TAG, "FOURCC: '%c%c%c%c'", fourcc[0], fourcc[1], fourcc[2], fourcc[3]);
static void log_available_video_devices() {
for (int i = 0; i < 50; i++) {
char path[16];
snprintf(path, sizeof(path), "/dev/video%d", i);
int fd = open(path, O_RDONLY);
if (fd >= 0) {
ESP_LOGD(TAG, "found video device: %s", path);
close(fd);
}
}
}
#else
#define CAM_PRINT_FOURCC(pixelformat) (void)0;
#endif // CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
Esp32Camera::Esp32Camera(const esp_video_init_config_t& config) {
if (esp_video_init(&config) != ESP_OK) {
ESP_LOGE(TAG, "esp_video_init failed");
return;
}
sensor_t *s = esp_camera_sensor_get(); // 获取摄像头型号
if (s->id.PID == GC0308_PID) {
s->set_hmirror(s, 0); // 这里控制摄像头镜像 写1镜像 写0不镜像
#ifdef CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
esp_log_level_set(TAG, ESP_LOG_DEBUG);
#endif // CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
const char* video_device_name = nullptr;
if (false) { /* 用于构建 else if */
}
#if CONFIG_ESP_VIDEO_ENABLE_MIPI_CSI_VIDEO_DEVICE
else if (config.csi != nullptr) {
video_device_name = ESP_VIDEO_MIPI_CSI_DEVICE_NAME;
}
#endif
#if CONFIG_ESP_VIDEO_ENABLE_DVP_VIDEO_DEVICE
else if (config.dvp != nullptr) {
video_device_name = ESP_VIDEO_DVP_DEVICE_NAME;
}
#endif
#if CONFIG_ESP_VIDEO_ENABLE_HW_JPEG_VIDEO_DEVICE
else if (config.jpeg != nullptr) {
video_device_name = ESP_VIDEO_JPEG_DEVICE_NAME;
}
#endif
#if CONFIG_ESP_VIDEO_ENABLE_SPI_VIDEO_DEVICE
else if (config.spi != nullptr) {
video_device_name = ESP_VIDEO_SPI_DEVICE_NAME;
}
#endif
#if CONFIG_ESP_VIDEO_ENABLE_USB_UVC_VIDEO_DEVICE
else if (config.usb_uvc != nullptr) {
video_device_name = ESP_VIDEO_USB_UVC_DEVICE_NAME(config.usb_uvc->uvc.uvc_dev_num);
}
#endif
if (video_device_name == nullptr) {
ESP_LOGE(TAG, "no video device is enabled");
return;
}
video_fd_ = open(video_device_name, O_RDWR);
if (video_fd_ < 0) {
ESP_LOGE(TAG, "open %s failed, errno=%d(%s)", video_device_name, errno, strerror(errno));
#if CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
log_available_video_devices();
#endif // CONFIG_XIAOZHI_ENABLE_CAMERA_DEBUG_MODE
return;
}
struct v4l2_capability cap = {};
if (ioctl(video_fd_, VIDIOC_QUERYCAP, &cap) != 0) {
ESP_LOGE(TAG, "VIDIOC_QUERYCAP failed, errno=%d(%s)", errno, strerror(errno));
close(video_fd_);
video_fd_ = -1;
return;
}
ESP_LOGD(
TAG,
"VIDIOC_QUERYCAP: driver=%s, card=%s, bus_info=%s, version=0x%08x, capabilities=0x%08x, device_caps=0x%08x",
cap.driver, cap.card, cap.bus_info, cap.version, cap.capabilities, cap.device_caps);
struct v4l2_format format = {};
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(video_fd_, VIDIOC_G_FMT, &format) != 0) {
ESP_LOGE(TAG, "VIDIOC_G_FMT failed, errno=%d(%s)", errno, strerror(errno));
close(video_fd_);
video_fd_ = -1;
return;
}
ESP_LOGD(TAG, "VIDIOC_G_FMT: pixelformat=0x%08x, width=%d, height=%d", format.fmt.pix.pixelformat,
format.fmt.pix.width, format.fmt.pix.height);
CAM_PRINT_FOURCC(format.fmt.pix.pixelformat);
struct v4l2_format setformat = {};
setformat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setformat.fmt.pix.width = format.fmt.pix.width;
setformat.fmt.pix.height = format.fmt.pix.height;
struct v4l2_fmtdesc fmtdesc = {};
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmtdesc.index = 0;
uint32_t best_fmt = 0;
int best_rank = 1 << 30; // large number
// 优先级: YUV422P > RGB565 > RGB24 > GREY
// 注: 当前版本中 YUV422P 实际输出为 YUYV。YUYV 色彩格式在后续的处理中更节省内存空间。
auto get_rank = [](uint32_t fmt) -> int {
switch (fmt) {
case V4L2_PIX_FMT_YUV422P:
return 0;
case V4L2_PIX_FMT_RGB565:
return 1;
case V4L2_PIX_FMT_RGB24:
return 2;
case V4L2_PIX_FMT_GREY:
return 3;
default:
return 1 << 29; // unsupported
}
};
while (ioctl(video_fd_, VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
ESP_LOGD(TAG, "VIDIOC_ENUM_FMT: pixelformat=0x%08x, description=%s", fmtdesc.pixelformat, fmtdesc.description);
CAM_PRINT_FOURCC(fmtdesc.pixelformat);
int rank = get_rank(fmtdesc.pixelformat);
if (rank < best_rank) {
best_rank = rank;
best_fmt = fmtdesc.pixelformat;
}
fmtdesc.index++;
}
if (best_rank < (1 << 29)) {
setformat.fmt.pix.pixelformat = best_fmt;
sensor_format_ = best_fmt;
}
if (!setformat.fmt.pix.pixelformat) {
ESP_LOGE(TAG, "no supported pixel format found");
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
ESP_LOGD(TAG, "selected pixel format: 0x%08x", setformat.fmt.pix.pixelformat);
if (ioctl(video_fd_, VIDIOC_S_FMT, &setformat) != 0) {
ESP_LOGE(TAG, "VIDIOC_S_FMT failed, errno=%d(%s)", errno, strerror(errno));
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
frame_.width = setformat.fmt.pix.width;
frame_.height = setformat.fmt.pix.height;
// 申请缓冲并mmap
struct v4l2_requestbuffers req = {};
req.count = strcmp(video_device_name, ESP_VIDEO_MIPI_CSI_DEVICE_NAME) == 0 ? 2 : 1;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(video_fd_, VIDIOC_REQBUFS, &req) != 0) {
ESP_LOGE(TAG, "VIDIOC_REQBUFS failed");
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
mmap_buffers_.resize(req.count);
for (uint32_t i = 0; i < req.count; i++) {
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (ioctl(video_fd_, VIDIOC_QUERYBUF, &buf) != 0) {
ESP_LOGE(TAG, "VIDIOC_QUERYBUF failed");
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
void* start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, video_fd_, buf.m.offset);
if (start == MAP_FAILED) {
ESP_LOGE(TAG, "mmap failed");
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
mmap_buffers_[i].start = start;
mmap_buffers_[i].length = buf.length;
if (ioctl(video_fd_, VIDIOC_QBUF, &buf) != 0) {
ESP_LOGE(TAG, "VIDIOC_QBUF failed");
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
}
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(video_fd_, VIDIOC_STREAMON, &type) != 0) {
ESP_LOGE(TAG, "VIDIOC_STREAMON failed");
close(video_fd_);
video_fd_ = -1;
sensor_format_ = 0;
return;
}
#ifdef CONFIG_ESP_VIDEO_ENABLE_ISP_VIDEO_DEVICE
// 当启用 ISP 时ISP 需要一些照片来初始化参数因此开启后后台拍摄5s照片并丢弃
xTaskCreate(
[](void* arg) {
Esp32Camera* self = static_cast<Esp32Camera*>(arg);
uint16_t capture_count = 0;
TickType_t start = xTaskGetTickCount();
TickType_t duration = 5000 / portTICK_PERIOD_MS; // 5s
while ((xTaskGetTickCount() - start) < duration) {
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (ioctl(self->video_fd_, VIDIOC_DQBUF, &buf) != 0) {
ESP_LOGE(TAG, "VIDIOC_DQBUF failed during init");
vTaskDelay(10 / portTICK_PERIOD_MS);
continue;
}
if (ioctl(self->video_fd_, VIDIOC_QBUF, &buf) != 0) {
ESP_LOGE(TAG, "VIDIOC_QBUF failed during init");
}
capture_count++;
}
ESP_LOGI(TAG, "Camera init success, captured %d frames in %dms", capture_count,
(xTaskGetTickCount() - start) * portTICK_PERIOD_MS);
self->streaming_on_ = true;
vTaskDelete(NULL);
},
"CameraInitTask", 4096, this, 5, nullptr);
#else
ESP_LOGI(TAG, "Camera init success");
streaming_on_ = true;
#endif // CONFIG_ESP_VIDEO_ENABLE_ISP_VIDEO_DEVICE
}
Esp32Camera::~Esp32Camera() {
if (fb_) {
esp_camera_fb_return(fb_);
fb_ = nullptr;
if (streaming_on_ && video_fd_ >= 0) {
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(video_fd_, VIDIOC_STREAMOFF, &type);
}
esp_camera_deinit();
for (auto& b : mmap_buffers_) {
if (b.start && b.length) {
munmap(b.start, b.length);
}
}
if (video_fd_ >= 0) {
close(video_fd_);
video_fd_ = -1;
}
sensor_format_ = 0;
esp_video_deinit();
}
void Esp32Camera::SetExplainUrl(const std::string& url, const std::string& token) {
@ -44,98 +314,266 @@ bool Esp32Camera::Capture() {
encoder_thread_.join();
}
auto start_time = esp_timer_get_time();
int frames_to_get = 2;
// Try to get a stable frame
for (int i = 0; i < frames_to_get; i++) {
if (fb_ != nullptr) {
esp_camera_fb_return(fb_);
}
fb_ = esp_camera_fb_get();
if (fb_ == nullptr) {
ESP_LOGE(TAG, "Camera capture failed");
if (!streaming_on_ || video_fd_ < 0) {
return false;
}
for (int i = 0; i < 3; i++) {
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (ioctl(video_fd_, VIDIOC_DQBUF, &buf) != 0) {
ESP_LOGE(TAG, "VIDIOC_DQBUF failed");
return false;
}
if (i == 2) {
// 保存帧副本到PSRAM
if (frame_.data) {
heap_caps_free(frame_.data);
frame_.data = nullptr;
frame_.format = 0;
}
frame_.len = buf.bytesused;
frame_.data = (uint8_t*)heap_caps_malloc(frame_.len, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT);
if (!frame_.data) {
ESP_LOGE(TAG, "alloc frame copy failed");
return false;
}
ESP_LOGD(TAG, "frame.len = %d, frame.width = %d, frame.height = %d", frame_.len, frame_.width,
frame_.height);
ESP_LOG_BUFFER_HEXDUMP(TAG, frame_.data, MIN(frame_.len, 256), ESP_LOG_DEBUG);
switch (sensor_format_) {
case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB24:
case V4L2_PIX_FMT_YUYV:
#ifdef CONFIG_XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP
{
auto src16 = (uint16_t*)mmap_buffers_[buf.index].start;
auto dst16 = (uint16_t*)frame_.data;
size_t count = (size_t)mmap_buffers_[buf.index].length / 2;
for (size_t i = 0; i < count; i++) {
dst16[i] = __builtin_bswap16(src16[i]);
}
}
#else
memcpy(frame_.data, mmap_buffers_[buf.index].start, frame_.len);
#endif // CONFIG_XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP
frame_.format = sensor_format_;
break;
case V4L2_PIX_FMT_YUV422P: {
// 这个格式是 422 YUYV不是 planer
frame_.format = V4L2_PIX_FMT_YUYV;
#ifdef CONFIG_XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP
{
auto src16 = (uint16_t*)mmap_buffers_[buf.index].start;
auto dst16 = (uint16_t*)frame_.data;
size_t count = (size_t)mmap_buffers_[buf.index].length / 2;
for (size_t i = 0; i < count; i++) {
dst16[i] = __builtin_bswap16(src16[i]);
}
}
#else
memcpy(frame_.data, mmap_buffers_[buf.index].start, frame_.len);
#endif // CONFIG_XIAOZHI_ENABLE_CAMERA_ENDIANNESS_SWAP
break;
}
case V4L2_PIX_FMT_RGB565X: {
// 大端序的 RGB565 需要转换为小端序
// 目前 esp_video 的大小端都会返回格式为 RGB565不会返回格式为 RGB565X此 case 用于未来版本兼容
auto src16 = (uint16_t*)mmap_buffers_[buf.index].start;
auto dst16 = (uint16_t*)frame_.data;
size_t pixel_count = (size_t)frame_.width * (size_t)frame_.height;
for (size_t i = 0; i < pixel_count; i++) {
dst16[i] = __builtin_bswap16(src16[i]);
}
frame_.format = V4L2_PIX_FMT_RGB565;
break;
}
default:
ESP_LOGE(TAG, "unsupported sensor format: 0x%08x", sensor_format_);
return false;
}
}
if (ioctl(video_fd_, VIDIOC_QBUF, &buf) != 0) {
ESP_LOGE(TAG, "VIDIOC_QBUF failed");
}
}
auto end_time = esp_timer_get_time();
ESP_LOGI(TAG, "Camera captured %d frames in %d ms", frames_to_get, int((end_time - start_time) / 1000));
// 显示预览图片
auto display = dynamic_cast<LvglDisplay*>(Board::GetInstance().GetDisplay());
if (display != nullptr) {
auto data = (uint8_t*)heap_caps_malloc(fb_->len, MALLOC_CAP_SPIRAM);
if (data == nullptr) {
ESP_LOGE(TAG, "Failed to allocate memory for preview image");
if (!frame_.data) {
return false;
}
uint16_t w = frame_.width;
uint16_t h = frame_.height;
size_t lvgl_image_size = frame_.len;
size_t stride = ((w * 2) + 3) & ~3; // 4字节对齐
lv_color_format_t color_format = LV_COLOR_FORMAT_RGB565;
uint8_t* data = nullptr;
auto src = (uint16_t*)fb_->buf;
auto dst = (uint16_t*)data;
size_t pixel_count = fb_->len / 2;
for (size_t i = 0; i < pixel_count; i++) {
// 交换每个16位字内的字节
dst[i] = __builtin_bswap16(src[i]);
switch (frame_.format) {
case V4L2_PIX_FMT_YUYV:
// color_format = LV_COLOR_FORMAT_YUY2;
// [[fallthrough]];
// LV_COLOR_FORMAT_YUY2 的显示似乎有问题,暂时转换为 RGB565 显示
{
color_format = LV_COLOR_FORMAT_RGB565;
data = (uint8_t*)heap_caps_malloc(w * h * 2, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT);
lvgl_image_size = w * h * 2;
if (data == nullptr) {
ESP_LOGE(TAG, "Failed to allocate memory for preview image");
return false;
}
const uint8_t* src = (const uint8_t*)frame_.data;
size_t src_len = frame_.len;
size_t dst_off = 0;
auto clamp = [](int v) -> uint8_t {
if (v < 0) return 0;
if (v > 255) return 255;
return (uint8_t)v;
};
// 每 4 字节处理两个像素: Y0 U Y1 V
for (size_t i = 0; i + 3 < src_len; i += 4) {
int y0 = (int)src[i + 0];
int u = (int)src[i + 1];
int y1 = (int)src[i + 2];
int v = (int)src[i + 3];
int c0 = y0 - 16;
int c1 = y1 - 16;
int d = u - 128;
int e = v - 128;
// 常用整数近似转换
int r0 = (298 * c0 + 409 * e + 128) >> 8;
int g0 = (298 * c0 - 100 * d - 208 * e + 128) >> 8;
int b0 = (298 * c0 + 516 * d + 128) >> 8;
int r1 = (298 * c1 + 409 * e + 128) >> 8;
int g1 = (298 * c1 - 100 * d - 208 * e + 128) >> 8;
int b1 = (298 * c1 + 516 * d + 128) >> 8;
uint8_t cr0 = clamp(r0);
uint8_t cg0 = clamp(g0);
uint8_t cb0 = clamp(b0);
uint8_t cr1 = clamp(r1);
uint8_t cg1 = clamp(g1);
uint8_t cb1 = clamp(b1);
// RGB565 打包
uint16_t pix0 = (uint16_t)(((cr0 >> 3) << 11) | ((cg0 >> 2) << 5) | (cb0 >> 3));
uint16_t pix1 = (uint16_t)(((cr1 >> 3) << 11) | ((cg1 >> 2) << 5) | (cb1 >> 3));
// 小端序:低字节先写入
data[dst_off++] = (uint8_t)(pix0 & 0xFF);
data[dst_off++] = (uint8_t)((pix0 >> 8) & 0xFF);
data[dst_off++] = (uint8_t)(pix1 & 0xFF);
data[dst_off++] = (uint8_t)((pix1 >> 8) & 0xFF);
}
break;
}
case V4L2_PIX_FMT_RGB565:
// 默认的 color_format 就是 LV_COLOR_FORMAT_RGB565
data = (uint8_t*)heap_caps_malloc(w * h * 2, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT);
if (data == nullptr) {
ESP_LOGE(TAG, "Failed to allocate memory for preview image");
return false;
}
memcpy(data, frame_.data, frame_.len);
lvgl_image_size = frame_.len; // fallthrough 时兼顾 YUYV 与 RGB565
break;
case V4L2_PIX_FMT_RGB24: {
// RGB888 需要转换为 RGB565
color_format = LV_COLOR_FORMAT_RGB565;
data = (uint8_t*)heap_caps_malloc(w * h * 2, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT);
uint16_t* dst16 = (uint16_t*)data;
if (data == nullptr) {
ESP_LOGE(TAG, "Failed to allocate memory for preview image");
return false;
}
const uint8_t* src = frame_.data;
size_t pixel_count = (size_t)w * (size_t)h;
for (size_t i = 0; i < pixel_count; i++) {
uint8_t r = src[i * 3 + 0];
uint8_t g = src[i * 3 + 1];
uint8_t b = src[i * 3 + 2];
dst16[i] = (uint16_t)(((r >> 3) << 11) | ((g >> 2) << 5) | (b >> 3));
}
lvgl_image_size = w * h * 2;
break;
}
default:
ESP_LOGE(TAG, "unsupported frame format: 0x%08x", frame_.format);
return false;
}
auto image = std::make_unique<LvglAllocatedImage>(data, fb_->len, fb_->width, fb_->height, fb_->width * 2, LV_COLOR_FORMAT_RGB565);
auto image = std::make_unique<LvglAllocatedImage>(data, lvgl_image_size, w, h, stride, color_format);
display->SetPreviewImage(std::move(image));
}
return true;
}
bool Esp32Camera::SetHMirror(bool enabled) {
sensor_t *s = esp_camera_sensor_get();
if (s == nullptr) {
ESP_LOGE(TAG, "Failed to get camera sensor");
if (video_fd_ < 0)
return false;
struct v4l2_ext_controls ctrls = {};
struct v4l2_ext_control ctrl = {};
ctrl.id = V4L2_CID_HFLIP;
ctrl.value = enabled ? 1 : 0;
ctrls.ctrl_class = V4L2_CTRL_CLASS_USER;
ctrls.count = 1;
ctrls.controls = &ctrl;
if (ioctl(video_fd_, VIDIOC_S_EXT_CTRLS, &ctrls) != 0) {
ESP_LOGE(TAG, "set HFLIP failed");
return false;
}
esp_err_t err = s->set_hmirror(s, enabled);
if (err != ESP_OK) {
ESP_LOGE(TAG, "Failed to set horizontal mirror: %d", err);
return false;
}
ESP_LOGI(TAG, "Camera horizontal mirror set to: %s", enabled ? "enabled" : "disabled");
return true;
}
bool Esp32Camera::SetVFlip(bool enabled) {
sensor_t *s = esp_camera_sensor_get();
if (s == nullptr) {
ESP_LOGE(TAG, "Failed to get camera sensor");
if (video_fd_ < 0)
return false;
struct v4l2_ext_controls ctrls = {};
struct v4l2_ext_control ctrl = {};
ctrl.id = V4L2_CID_VFLIP;
ctrl.value = enabled ? 1 : 0;
ctrls.ctrl_class = V4L2_CTRL_CLASS_USER;
ctrls.count = 1;
ctrls.controls = &ctrl;
if (ioctl(video_fd_, VIDIOC_S_EXT_CTRLS, &ctrls) != 0) {
ESP_LOGE(TAG, "set VFLIP failed");
return false;
}
esp_err_t err = s->set_vflip(s, enabled);
if (err != ESP_OK) {
ESP_LOGE(TAG, "Failed to set vertical flip: %d", err);
return false;
}
ESP_LOGI(TAG, "Camera vertical flip set to: %s", enabled ? "enabled" : "disabled");
return true;
}
/**
* @brief AI分析和解释
*
*
* JPEG格式HTTP POST请求
* multipart/form-data的形式发送到指定的解释服务器
* AI分析并返回结果
*
*
*
* - 使线JPEG线
* - (chunked transfer encoding)使
* - 线线
* - IDID和认证令牌的HTTP头部配置
*
*
* @param question AI提出的关于图像的问题
* @return std::string JSON格式响应字符串
* AI分析结果
* {"success": true, "result": "分析结果"}
* {"success": false, "message": "错误信息"}
*
*
* @note SetExplainUrl()URL
* @note 线
* @warning
@ -154,17 +592,19 @@ std::string Esp32Camera::Explain(const std::string& question) {
// We spawn a thread to encode the image to JPEG using optimized encoder (cost about 500ms and 8KB SRAM)
encoder_thread_ = std::thread([this, jpeg_queue]() {
image_to_jpeg_cb(fb_->buf, fb_->len, fb_->width, fb_->height, fb_->format, 80,
uint16_t w = frame_.width ? frame_.width : 320;
uint16_t h = frame_.height ? frame_.height : 240;
v4l2_pix_fmt_t enc_fmt = frame_.format;
image_to_jpeg_cb(
frame_.data, frame_.len, w, h, enc_fmt, 80,
[](void* arg, size_t index, const void* data, size_t len) -> size_t {
auto jpeg_queue = (QueueHandle_t)arg;
JpegChunk chunk = {
.data = (uint8_t*)heap_caps_aligned_alloc(16, len, MALLOC_CAP_SPIRAM),
.len = len
};
memcpy(chunk.data, data, len);
xQueueSend(jpeg_queue, &chunk, portMAX_DELAY);
return len;
}, jpeg_queue);
auto jpeg_queue = (QueueHandle_t)arg;
JpegChunk chunk = {.data = (uint8_t*)heap_caps_aligned_alloc(16, len, MALLOC_CAP_SPIRAM), .len = len};
memcpy(chunk.data, data, len);
xQueueSend(jpeg_queue, &chunk, portMAX_DELAY);
return len;
},
jpeg_queue);
});
auto network = Board::GetInstance().GetNetwork();
@ -195,7 +635,7 @@ std::string Esp32Camera::Explain(const std::string& question) {
vQueueDelete(jpeg_queue);
throw std::runtime_error("Failed to connect to explain URL");
}
{
// 第一块question字段
std::string question_field;
@ -224,7 +664,7 @@ std::string Esp32Camera::Explain(const std::string& question) {
break;
}
if (chunk.data == nullptr) {
break; // The last chunk
break; // The last chunk
}
http->Write((const char*)chunk.data, chunk.len);
total_sent += chunk.len;
@ -254,7 +694,7 @@ std::string Esp32Camera::Explain(const std::string& question) {
// Get remain task stack size
size_t remain_stack_size = uxTaskGetStackHighWaterMark(nullptr);
ESP_LOGI(TAG, "Explain image size=%dx%d, compressed size=%d, remain stack size=%d, question=%s\n%s",
fb_->width, fb_->height, total_sent, remain_stack_size, question.c_str(), result.c_str());
ESP_LOGI(TAG, "Explain image size=%d bytes, compressed size=%d, remain stack size=%d, question=%s\n%s",
(int)frame_.len, (int)total_sent, (int)remain_stack_size, question.c_str(), result.c_str());
return result;
}

View File

@ -1,15 +1,18 @@
#ifndef ESP32_CAMERA_H
#define ESP32_CAMERA_H
#pragma once
#include "sdkconfig.h"
#include <esp_camera.h>
#ifndef CONFIG_IDF_TARGET_ESP32
#include <lvgl.h>
#include <thread>
#include <memory>
#include <vector>
#include <freertos/FreeRTOS.h>
#include <freertos/queue.h>
#include "camera.h"
#include "jpg/image_to_jpeg.h"
#include "esp_video_init.h"
struct JpegChunk {
uint8_t* data;
@ -18,13 +21,24 @@ struct JpegChunk {
class Esp32Camera : public Camera {
private:
camera_fb_t* fb_ = nullptr;
struct FrameBuffer {
uint8_t *data = nullptr;
size_t len = 0;
uint16_t width = 0;
uint16_t height = 0;
v4l2_pix_fmt_t format = 0;
} frame_;
v4l2_pix_fmt_t sensor_format_ = 0;
int video_fd_ = -1;
bool streaming_on_ = false;
struct MmapBuffer { void *start = nullptr; size_t length = 0; };
std::vector<MmapBuffer> mmap_buffers_;
std::string explain_url_;
std::string explain_token_;
std::thread encoder_thread_;
public:
Esp32Camera(const camera_config_t& config);
Esp32Camera(const esp_video_init_config_t& config);
~Esp32Camera();
virtual void SetExplainUrl(const std::string& url, const std::string& token);
@ -35,4 +49,4 @@ public:
virtual std::string Explain(const std::string& question);
};
#endif // ESP32_CAMERA_H
#endif // ndef CONFIG_IDF_TARGET_ESP32

View File

@ -42,15 +42,15 @@
#define DISPLAY_BACKLIGHT_OUTPUT_INVERT false
/* DFRobot K10 Camera pins */
#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 7
#define PWDN_GPIO_NUM GPIO_NUM_NC
#define RESET_GPIO_NUM GPIO_NUM_NC
#define XCLK_GPIO_NUM GPIO_NUM_7
#define VSYNC_GPIO_NUM 4
#define HREF_GPIO_NUM 5
#define PCLK_GPIO_NUM 17
#define SIOD_GPIO_NUM 20
#define SIOC_GPIO_NUM 19
#define VSYNC_GPIO_NUM GPIO_NUM_4
#define HREF_GPIO_NUM GPIO_NUM_5
#define PCLK_GPIO_NUM GPIO_NUM_17
#define SIOD_GPIO_NUM GPIO_NUM_20
#define SIOC_GPIO_NUM GPIO_NUM_19
/* Camera pins */
#define CAMERA_PIN_PWDN PWDN_GPIO_NUM
@ -59,14 +59,14 @@
#define CAMERA_PIN_SIOD SIOD_GPIO_NUM
#define CAMERA_PIN_SIOC SIOC_GPIO_NUM
#define CAMERA_PIN_D9 6
#define CAMERA_PIN_D8 15
#define CAMERA_PIN_D7 16
#define CAMERA_PIN_D6 18
#define CAMERA_PIN_D5 9
#define CAMERA_PIN_D4 11
#define CAMERA_PIN_D3 10
#define CAMERA_PIN_D2 8
#define CAMERA_PIN_D9 GPIO_NUM_6
#define CAMERA_PIN_D8 GPIO_NUM_15
#define CAMERA_PIN_D7 GPIO_NUM_16
#define CAMERA_PIN_D6 GPIO_NUM_18
#define CAMERA_PIN_D5 GPIO_NUM_9
#define CAMERA_PIN_D4 GPIO_NUM_11
#define CAMERA_PIN_D3 GPIO_NUM_10
#define CAMERA_PIN_D2 GPIO_NUM_8
#define CAMERA_PIN_VSYNC VSYNC_GPIO_NUM
#define CAMERA_PIN_HREF HREF_GPIO_NUM
#define CAMERA_PIN_PCLK PCLK_GPIO_NUM

View File

@ -167,36 +167,43 @@ private:
}
void InitializeCamera() {
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D2,
[1] = CAMERA_PIN_D3,
[2] = CAMERA_PIN_D4,
[3] = CAMERA_PIN_D5,
[4] = CAMERA_PIN_D6,
[5] = CAMERA_PIN_D7,
[6] = CAMERA_PIN_D8,
[7] = CAMERA_PIN_D9,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D2;
config.pin_d1 = CAMERA_PIN_D3;
config.pin_d2 = CAMERA_PIN_D4;
config.pin_d3 = CAMERA_PIN_D5;
config.pin_d4 = CAMERA_PIN_D6;
config.pin_d5 = CAMERA_PIN_D7;
config.pin_d6 = CAMERA_PIN_D8;
config.pin_d7 = CAMERA_PIN_D9;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = -1; // 这里如果写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1; // 这里如果写1 默认使用I2C1
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_VGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = false,
.i2c_handle = i2c_bus_,
.freq = 100000,
};
camera_ = new Esp32Camera(config);
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
}
void InitializeIli9341Display() {

View File

@ -22,22 +22,22 @@
#define RESET_FACTORY_BUTTON_GPIO GPIO_NUM_NC
/* DFRobot Camera pins */
#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 5
#define Y9_GPIO_NUM 4
#define Y8_GPIO_NUM 6
#define Y7_GPIO_NUM 7
#define Y6_GPIO_NUM 14
#define Y5_GPIO_NUM 17
#define Y4_GPIO_NUM 21
#define Y3_GPIO_NUM 18
#define Y2_GPIO_NUM 16
#define VSYNC_GPIO_NUM 1
#define HREF_GPIO_NUM 2
#define PCLK_GPIO_NUM 15
#define SIOD_GPIO_NUM 8
#define SIOC_GPIO_NUM 9
#define PWDN_GPIO_NUM GPIO_NUM_NC
#define RESET_GPIO_NUM GPIO_NUM_NC
#define XCLK_GPIO_NUM GPIO_NUM_5
#define Y9_GPIO_NUM GPIO_NUM_4
#define Y8_GPIO_NUM GPIO_NUM_6
#define Y7_GPIO_NUM GPIO_NUM_7
#define Y6_GPIO_NUM GPIO_NUM_14
#define Y5_GPIO_NUM GPIO_NUM_17
#define Y4_GPIO_NUM GPIO_NUM_21
#define Y3_GPIO_NUM GPIO_NUM_18
#define Y2_GPIO_NUM GPIO_NUM_16
#define VSYNC_GPIO_NUM GPIO_NUM_1
#define HREF_GPIO_NUM GPIO_NUM_2
#define PCLK_GPIO_NUM GPIO_NUM_15
#define SIOD_GPIO_NUM GPIO_NUM_8
#define SIOC_GPIO_NUM GPIO_NUM_9
/* Camera pins */
#define CAMERA_PIN_PWDN PWDN_GPIO_NUM

View File

@ -30,35 +30,47 @@ class DfrobotEsp32S3AiCam : public WifiBoard {
}
void InitializeCamera() {
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = CAMERA_PIN_SIOD; // 这里如果写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1; // 这里如果写1 默认使用I2C1
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_VGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_ = new Esp32Camera(config);
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 1,
.scl_pin = CAMERA_PIN_SIOC,
.sda_pin = CAMERA_PIN_SIOD,
},
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
camera_->SetVFlip(1);
}

View File

@ -3,7 +3,11 @@
"builds": [
{
"name": "esp-sparkbot",
"sdkconfig_append": []
"sdkconfig_append": [
"CONFIG_CAMERA_OV2640=y",
"CONFIG_CAMERA_OV2640_AUTO_DETECT_DVP_INTERFACE_SENSOR=y",
"CONFIG_CAMERA_OV2640_DVP_YUV422_240X240_25FPS=y"
]
}
]
}

View File

@ -121,41 +121,48 @@ private:
}
void InitializeCamera() {
camera_config_t camera_config = {};
camera_config.pin_pwdn = SPARKBOT_CAMERA_PWDN;
camera_config.pin_reset = SPARKBOT_CAMERA_RESET;
camera_config.pin_xclk = SPARKBOT_CAMERA_XCLK;
camera_config.pin_pclk = SPARKBOT_CAMERA_PCLK;
camera_config.pin_sccb_sda = SPARKBOT_CAMERA_SIOD;
camera_config.pin_sccb_scl = SPARKBOT_CAMERA_SIOC;
// DVP pin configuration
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = SPARKBOT_CAMERA_D0,
[1] = SPARKBOT_CAMERA_D1,
[2] = SPARKBOT_CAMERA_D2,
[3] = SPARKBOT_CAMERA_D3,
[4] = SPARKBOT_CAMERA_D4,
[5] = SPARKBOT_CAMERA_D5,
[6] = SPARKBOT_CAMERA_D6,
[7] = SPARKBOT_CAMERA_D7,
},
.vsync_io = SPARKBOT_CAMERA_VSYNC,
.de_io = SPARKBOT_CAMERA_HSYNC,
.pclk_io = SPARKBOT_CAMERA_PCLK,
.xclk_io = SPARKBOT_CAMERA_XCLK,
};
camera_config.pin_d0 = SPARKBOT_CAMERA_D0;
camera_config.pin_d1 = SPARKBOT_CAMERA_D1;
camera_config.pin_d2 = SPARKBOT_CAMERA_D2;
camera_config.pin_d3 = SPARKBOT_CAMERA_D3;
camera_config.pin_d4 = SPARKBOT_CAMERA_D4;
camera_config.pin_d5 = SPARKBOT_CAMERA_D5;
camera_config.pin_d6 = SPARKBOT_CAMERA_D6;
camera_config.pin_d7 = SPARKBOT_CAMERA_D7;
// 复用 I2C 总线
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = false, // 不初始化新的 SCCB使用现有的 I2C 总线
.i2c_handle = i2c_bus_, // 使用现有的 I2C 总线句柄
.freq = 100000, // 100kHz
};
camera_config.pin_vsync = SPARKBOT_CAMERA_VSYNC;
camera_config.pin_href = SPARKBOT_CAMERA_HSYNC;
camera_config.pin_pclk = SPARKBOT_CAMERA_PCLK;
camera_config.xclk_freq_hz = SPARKBOT_CAMERA_XCLK_FREQ;
camera_config.ledc_timer = SPARKBOT_LEDC_TIMER;
camera_config.ledc_channel = SPARKBOT_LEDC_CHANNEL;
camera_config.fb_location = CAMERA_FB_IN_PSRAM;
// DVP configuration
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = SPARKBOT_CAMERA_RESET,
.pwdn_pin = SPARKBOT_CAMERA_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = SPARKBOT_CAMERA_XCLK_FREQ,
};
// Main video configuration
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_config.sccb_i2c_port = I2C_NUM_0;
camera_config.pixel_format = PIXFORMAT_RGB565;
camera_config.frame_size = FRAMESIZE_240X240;
camera_config.jpeg_quality = 12;
camera_config.fb_count = 1;
camera_config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
camera_ = new Esp32Camera(camera_config);
camera_ = new Esp32Camera(video_config);
Settings settings("sparkbot", false);
// 考虑到部分复刻使用了不可动摄像头的设计,默认启用翻转

View File

@ -174,51 +174,47 @@ private:
ESP_ERROR_CHECK(spi_bus_initialize(SPI3_HOST, &buscfg, SPI_DMA_CH_AUTO));
}
void InitializeCamera() {
camera_config_t config = {};
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAM_PIN_D0,
[1] = CAM_PIN_D1,
[2] = CAM_PIN_D2,
[3] = CAM_PIN_D3,
[4] = CAM_PIN_D4,
[5] = CAM_PIN_D5,
[6] = CAM_PIN_D6,
[7] = CAM_PIN_D7,
},
.vsync_io = CAM_PIN_VSYNC,
.de_io = CAM_PIN_HREF,
.pclk_io = CAM_PIN_PCLK,
.xclk_io = CAM_PIN_XCLK,
};
config.pin_pwdn = CAM_PIN_PWDN;
config.pin_reset = CAM_PIN_RESET;
config.pin_xclk = CAM_PIN_XCLK;
config.pin_sccb_sda = CAM_PIN_SIOD;
config.pin_sccb_scl = CAM_PIN_SIOC;
config.sccb_i2c_port = I2C_NUM_0;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = I2C_NUM_0,
.scl_pin = CAM_PIN_SIOC,
.sda_pin = CAM_PIN_SIOD,
},
.freq = 100000,
};
config.pin_d7 = CAM_PIN_D7;
config.pin_d6 = CAM_PIN_D6;
config.pin_d5 = CAM_PIN_D5;
config.pin_d4 = CAM_PIN_D4;
config.pin_d3 = CAM_PIN_D3;
config.pin_d2 = CAM_PIN_D2;
config.pin_d1 = CAM_PIN_D1;
config.pin_d0 = CAM_PIN_D0;
config.pin_vsync = CAM_PIN_VSYNC;
config.pin_href = CAM_PIN_HREF;
config.pin_pclk = CAM_PIN_PCLK;
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAM_PIN_RESET,
.pwdn_pin = CAM_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = 10000000,
};
/* XCLK 20MHz or 10MHz for OV2640 double FPS (Experimental) */
config.xclk_freq_hz = 10000000;
config.ledc_timer = LEDC_TIMER_1;
config.ledc_channel = LEDC_CHANNEL_0;
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
config.pixel_format = PIXFORMAT_RGB565; /* YUV422,GRAYSCALE,RGB565,JPEG */
config.frame_size = FRAMESIZE_240X240; /* QQVGA-UXGA, For ESP32, do not use sizes above QVGA when not JPEG. The performance of the ESP32-S series has improved a lot, but JPEG mode always gives better frame rates */
config.jpeg_quality = 12; /* 0-63, for OV series camera sensors, lower number means higher quality */
config.fb_count = 2; /* When jpeg mode is used, if fb_count more than one, the driver will work in continuous mode */
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_err_t err = esp_camera_init(&config); // 测试相机是否存在
if (err != ESP_OK) {
ESP_LOGE(TAG, "Camera is not plugged in or not supported, error: %s", esp_err_to_name(err));
// 如果摄像头初始化失败,设置 camera_ 为 nullptr
camera_ = nullptr;
return;
}else
{
esp_camera_deinit();// 释放之前的摄像头资源,为正确初始化做准备
camera_ = new Esp32Camera(config);
}
camera_ = new Esp32Camera(video_config);
}

View File

@ -59,23 +59,23 @@
#define DISPLAY_BACKLIGHT_PIN GPIO_NUM_NC
#define DISPLAY_BACKLIGHT_OUTPUT_INVERT false
/* Camera pins */
#define CAMERA_PIN_PWDN -1
#define CAMERA_PIN_RESET -1
#define CAMERA_PIN_XCLK 40
#define CAMERA_PIN_SIOD 17
#define CAMERA_PIN_SIOC 18
#define CAMERA_PIN_PWDN GPIO_NUM_NC
#define CAMERA_PIN_RESET GPIO_NUM_NC
#define CAMERA_PIN_XCLK GPIO_NUM_40
#define CAMERA_PIN_SIOD GPIO_NUM_17
#define CAMERA_PIN_SIOC GPIO_NUM_18
#define CAMERA_PIN_D7 39
#define CAMERA_PIN_D6 41
#define CAMERA_PIN_D5 42
#define CAMERA_PIN_D4 12
#define CAMERA_PIN_D3 3
#define CAMERA_PIN_D2 14
#define CAMERA_PIN_D1 47
#define CAMERA_PIN_D0 13
#define CAMERA_PIN_VSYNC 21
#define CAMERA_PIN_HREF 38
#define CAMERA_PIN_PCLK 11
#define CAMERA_PIN_D7 GPIO_NUM_39
#define CAMERA_PIN_D6 GPIO_NUM_41
#define CAMERA_PIN_D5 GPIO_NUM_42
#define CAMERA_PIN_D4 GPIO_NUM_12
#define CAMERA_PIN_D3 GPIO_NUM_3
#define CAMERA_PIN_D2 GPIO_NUM_14
#define CAMERA_PIN_D1 GPIO_NUM_47
#define CAMERA_PIN_D0 GPIO_NUM_13
#define CAMERA_PIN_VSYNC GPIO_NUM_21
#define CAMERA_PIN_HREF GPIO_NUM_38
#define CAMERA_PIN_PCLK GPIO_NUM_11
#define XCLK_FREQ_HZ 20000000
#endif // _BOARD_CONFIG_H_

View File

@ -333,37 +333,43 @@ private:
}
void InitializeCamera() {
// Open camera power
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = -1; // 这里写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_VGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = false,
.i2c_handle = i2c_bus_,
.freq = 100000,
};
camera_ = new Esp32Camera(config);
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
}
public:

View File

@ -84,37 +84,47 @@ private:
}
void InitializeCamera() {
// Open camera power
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = CAMERA_PIN_SIOD; // 这里写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_VGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 1,
.scl_pin = CAMERA_PIN_SIOC,
.sda_pin = CAMERA_PIN_SIOD,
},
.freq = 100000,
};
camera_ = new Esp32Camera(config);
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
}
public:

View File

@ -43,23 +43,23 @@
#define ML307_RX_PIN GPIO_NUM_12
#define ML307_TX_PIN GPIO_NUM_13
/* Camera pins */
#define CAMERA_PIN_PWDN -1
#define CAMERA_PIN_RESET -1
#define CAMERA_PIN_XCLK 15
#define CAMERA_PIN_SIOD 4
#define CAMERA_PIN_SIOC 5
#define CAMERA_PIN_PWDN GPIO_NUM_NC
#define CAMERA_PIN_RESET GPIO_NUM_NC
#define CAMERA_PIN_XCLK GPIO_NUM_15
#define CAMERA_PIN_SIOD GPIO_NUM_4
#define CAMERA_PIN_SIOC GPIO_NUM_5
#define CAMERA_PIN_D7 16
#define CAMERA_PIN_D6 17
#define CAMERA_PIN_D5 18
#define CAMERA_PIN_D4 12
#define CAMERA_PIN_D3 10
#define CAMERA_PIN_D2 8
#define CAMERA_PIN_D1 9
#define CAMERA_PIN_D0 11
#define CAMERA_PIN_VSYNC 6
#define CAMERA_PIN_HREF 7
#define CAMERA_PIN_PCLK 13
#define CAMERA_PIN_D7 GPIO_NUM_16
#define CAMERA_PIN_D6 GPIO_NUM_17
#define CAMERA_PIN_D5 GPIO_NUM_18
#define CAMERA_PIN_D4 GPIO_NUM_12
#define CAMERA_PIN_D3 GPIO_NUM_10
#define CAMERA_PIN_D2 GPIO_NUM_8
#define CAMERA_PIN_D1 GPIO_NUM_9
#define CAMERA_PIN_D0 GPIO_NUM_11
#define CAMERA_PIN_VSYNC GPIO_NUM_6
#define CAMERA_PIN_HREF GPIO_NUM_7
#define CAMERA_PIN_PCLK GPIO_NUM_13
#define XCLK_FREQ_HZ 20000000

View File

@ -99,37 +99,47 @@ private:
}
void InitializeCamera() {
// Open camera power
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = -1; // 这里写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_VGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 1,
.scl_pin = CAMERA_PIN_SIOC,
.sda_pin = GPIO_NUM_NC,
},
.freq = 100000,
};
camera_ = new Esp32Camera(config);
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
}
public:

View File

@ -38,23 +38,23 @@
#define DISPLAY_BACKLIGHT_OUTPUT_INVERT true
/* Camera pins */
#define CAMERA_PIN_PWDN -1
#define CAMERA_PIN_RESET -1
#define CAMERA_PIN_XCLK 5
#define CAMERA_PIN_SIOD 1
#define CAMERA_PIN_SIOC 2
#define CAMERA_PIN_PWDN GPIO_NUM_NC
#define CAMERA_PIN_RESET GPIO_NUM_NC
#define CAMERA_PIN_XCLK GPIO_NUM_5
#define CAMERA_PIN_SIOD GPIO_NUM_1
#define CAMERA_PIN_SIOC GPIO_NUM_2
#define CAMERA_PIN_D7 9
#define CAMERA_PIN_D6 4
#define CAMERA_PIN_D5 6
#define CAMERA_PIN_D4 15
#define CAMERA_PIN_D3 17
#define CAMERA_PIN_D2 8
#define CAMERA_PIN_D1 18
#define CAMERA_PIN_D0 16
#define CAMERA_PIN_VSYNC 3
#define CAMERA_PIN_HREF 46
#define CAMERA_PIN_PCLK 7
#define CAMERA_PIN_D7 GPIO_NUM_9
#define CAMERA_PIN_D6 GPIO_NUM_4
#define CAMERA_PIN_D5 GPIO_NUM_6
#define CAMERA_PIN_D4 GPIO_NUM_15
#define CAMERA_PIN_D3 GPIO_NUM_17
#define CAMERA_PIN_D2 GPIO_NUM_8
#define CAMERA_PIN_D1 GPIO_NUM_18
#define CAMERA_PIN_D0 GPIO_NUM_16
#define CAMERA_PIN_VSYNC GPIO_NUM_3
#define CAMERA_PIN_HREF GPIO_NUM_46
#define CAMERA_PIN_PCLK GPIO_NUM_7
#define XCLK_FREQ_HZ 24000000

View File

@ -195,35 +195,43 @@ private:
// Open camera power
pca9557_->SetOutputState(2, 0);
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = -1; // 这里写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_VGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_ = new Esp32Camera(config);
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = false,
.i2c_handle = i2c_bus_,
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
}
public:

View File

@ -224,41 +224,56 @@ private:
}
void InitializeCamera() {
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
#ifdef CONFIG_BOARD_TYPE_LILYGO_T_CAMERAPLUS_S3_V1_0_V1_1
config.pin_sccb_sda = -1; // 这里如果写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = SIOC_GPIO_NUM;
config.sccb_i2c_port = 0; // 这里如果写0 默认使用I2C0
#elif defined CONFIG_BOARD_TYPE_LILYGO_T_CAMERAPLUS_S3_V1_2
config.pin_sccb_sda = SIOD_GPIO_NUM;
config.pin_sccb_scl = SIOC_GPIO_NUM;
config.sccb_i2c_port = 1;
#endif
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_240X240;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = Y2_GPIO_NUM,
[1] = Y3_GPIO_NUM,
[2] = Y4_GPIO_NUM,
[3] = Y5_GPIO_NUM,
[4] = Y6_GPIO_NUM,
[5] = Y7_GPIO_NUM,
[6] = Y8_GPIO_NUM,
[7] = Y9_GPIO_NUM,
},
.vsync_io = VSYNC_GPIO_NUM,
.de_io = HREF_GPIO_NUM,
.pclk_io = PCLK_GPIO_NUM,
.xclk_io = XCLK_GPIO_NUM,
};
camera_ = new Esp32Camera(config);
esp_video_init_sccb_config_t sccb_config = {
#ifdef CONFIG_BOARD_TYPE_LILYGO_T_CAMERAPLUS_S3_V1_0_V1_1
.init_sccb = true,
.i2c_config = {
.port = 0,
.scl_pin = SIOC_GPIO_NUM,
.sda_pin = GPIO_NUM_NC,
},
#elif defined CONFIG_BOARD_TYPE_LILYGO_T_CAMERAPLUS_S3_V1_2
.init_sccb = true,
.i2c_config = {
.port = 1,
.scl_pin = SIOC_GPIO_NUM,
.sda_pin = SIOD_GPIO_NUM,
},
#endif
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = RESET_GPIO_NUM,
.pwdn_pin = PWDN_GPIO_NUM,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
camera_->SetVFlip(1);
camera_->SetHMirror(1);
}

View File

@ -16,90 +16,90 @@
#ifdef T_CameraPlus_S3_V1_0_V1_1
// SPI
#define SPI_SCLK 36
#define SPI_MOSI 35
#define SPI_MISO 37
#define SPI_SCLK GPIO_NUM_36
#define SPI_MOSI GPIO_NUM_35
#define SPI_MISO GPIO_NUM_37
// IIC
#define IIC_SDA 1
#define IIC_SCL 2
#define IIC_SDA GPIO_NUM_1
#define IIC_SCL GPIO_NUM_2
// MSM261
#define MSM261_BCLK 18
#define MSM261_WS 39
#define MSM261_DATA 40
#define MSM261_BCLK GPIO_NUM_18
#define MSM261_WS GPIO_NUM_39
#define MSM261_DATA GPIO_NUM_40
// MAX98357A
#define MAX98357A_DATA 38
#define MAX98357A_DATA GPIO_NUM_38
// FP-133H01D
#define LCD_CS 34
#define LCD_RST 33
#define LCD_CS GPIO_NUM_34
#define LCD_RST GPIO_NUM_33
// OV2640
#define OV2640_PWDN -1
#define OV2640_RESET 3
#define OV2640_VSYNC 4
#define OV2640_PWDN GPIO_NUM_NC
#define OV2640_RESET GPIO_NUM_3
#define OV2640_VSYNC GPIO_NUM_4
// CST816
#define TP_RST 48
#define TP_RST GPIO_NUM_48
// SY6970
#define SY6970_INT 47
#define SY6970_INT GPIO_NUM_47
#endif
#ifdef T_CameraPlus_S3_V1_2
// SPI
#define SPI_SCLK 35
#define SPI_MOSI 34
#define SPI_MISO 48
#define SPI_SCLK GPIO_NUM_35
#define SPI_MOSI GPIO_NUM_34
#define SPI_MISO GPIO_NUM_48
// IIC
#define IIC_SDA 33
#define IIC_SCL 37
#define IIC_SDA GPIO_NUM_33
#define IIC_SCL GPIO_NUM_37
// MP34DT05TR
#define MP34DT05TR_LRCLK 40
#define MP34DT05TR_DATA 38
#define MP34DT05TR_LRCLK GPIO_NUM_40
#define MP34DT05TR_DATA GPIO_NUM_38
#define MP34DT05TR_MAX98357_EN 18
#define MP34DT05TR_MAX98357_EN GPIO_NUM_18
// MAX98357A
#define MAX98357A_DATA 39
#define MAX98357A_DATA GPIO_NUM_39
// FP-133H01D
#define LCD_CS 36
#define LCD_RST -1
#define LCD_CS GPIO_NUM_36
#define LCD_RST GPIO_NUM_NC
// OV2640
#define OV2640_PWDN 4
#define OV2640_RESET -1
#define OV2640_VSYNC 3
#define OV2640_PWDN GPIO_NUM_4
#define OV2640_RESET GPIO_NUM_NC
#define OV2640_VSYNC GPIO_NUM_3
// CST816
#define TP_RST -1
#define TP_RST GPIO_NUM_NC
#endif
// SD
#define SD_CS 21
#define SD_CS GPIO_NUM_21
#define SD_SCLK SPI_SCLK
#define SD_MOSI SPI_MOSI
#define SD_MISO SPI_MISO
// MAX98357A
#define MAX98357A_BCLK 41
#define MAX98357A_LRCLK 42
#define MAX98357A_BCLK GPIO_NUM_41
#define MAX98357A_LRCLK GPIO_NUM_42
// FP-133H01D
#define LCD_WIDTH 240
#define LCD_HEIGHT 240
#define LCD_BL 46
#define LCD_BL GPIO_NUM_46
#define LCD_MOSI SPI_MOSI
#define LCD_SCLK SPI_SCLK
#define LCD_DC 45
#define LCD_DC GPIO_NUM_45
// SY6970
#define SY6970_SDA IIC_SDA
@ -107,19 +107,19 @@
#define SY6970_ADDRESS 0x6A
// OV2640
#define OV2640_XCLK 7
#define OV2640_SDA 1
#define OV2640_SCL 2
#define OV2640_D9 6
#define OV2640_D8 8
#define OV2640_D7 9
#define OV2640_D6 11
#define OV2640_D5 13
#define OV2640_D4 15
#define OV2640_D3 14
#define OV2640_D2 12
#define OV2640_HREF 5
#define OV2640_PCLK 10
#define OV2640_XCLK GPIO_NUM_7
#define OV2640_SDA GPIO_NUM_1
#define OV2640_SCL GPIO_NUM_2
#define OV2640_D9 GPIO_NUM_6
#define OV2640_D8 GPIO_NUM_8
#define OV2640_D7 GPIO_NUM_9
#define OV2640_D6 GPIO_NUM_11
#define OV2640_D5 GPIO_NUM_13
#define OV2640_D4 GPIO_NUM_15
#define OV2640_D3 GPIO_NUM_14
#define OV2640_D2 GPIO_NUM_12
#define OV2640_HREF GPIO_NUM_5
#define OV2640_PCLK GPIO_NUM_10
#define PWDN_GPIO_NUM OV2640_PWDN
#define RESET_GPIO_NUM OV2640_RESET
@ -145,10 +145,10 @@
#define CST816_ADDRESS 0x15
#define TP_SDA IIC_SDA
#define TP_SCL IIC_SCL
#define TP_INT 47
#define TP_INT GPIO_NUM_47
// AP1511B
#define AP1511B_FBC 16
#define AP1511B_FBC GPIO_NUM_16
// KEY
#define KEY1 17
#define KEY1 GPIO_NUM_17

View File

@ -4,7 +4,10 @@
{
"name": "m5stack-core-s3",
"sdkconfig_append": [
"CONFIG_SPIRAM_MODE_QUAD=y"
"CONFIG_SPIRAM_MODE_QUAD=y",
"CONFIG_CAMERA_GC0308=y",
"CONFIG_CAMERA_GC0308_AUTO_DETECT_DVP_INTERFACE_SENSOR=y",
"CONFIG_CAMERA_GC0308_DVP_YUV422_320X240_20FPS=y"
]
}
]

View File

@ -291,33 +291,44 @@ private:
}
void InitializeCamera() {
// Open camera power
camera_config_t config = {};
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = CAMERA_PIN_SIOD;
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 1;
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_QVGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
camera_ = new Esp32Camera(config);
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = false,
.i2c_handle = i2c_bus_,
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
camera_->SetHMirror(false);
}
public:

View File

@ -39,23 +39,23 @@
#define DISPLAY_SPI_SCLK_HZ (20 * 1000 * 1000)
/* Camera pins */
#define CAMERA_PIN_PWDN -1
#define CAMERA_PIN_RESET -1
#define CAMERA_PIN_XCLK 43
#define CAMERA_PIN_SIOD -1
#define CAMERA_PIN_SIOC -1
#define CAMERA_PIN_PWDN GPIO_NUM_NC
#define CAMERA_PIN_RESET GPIO_NUM_NC
#define CAMERA_PIN_XCLK GPIO_NUM_43
#define CAMERA_PIN_SIOD GPIO_NUM_NC
#define CAMERA_PIN_SIOC GPIO_NUM_NC
#define CAMERA_PIN_D7 48
#define CAMERA_PIN_D6 47
#define CAMERA_PIN_D5 46
#define CAMERA_PIN_D4 45
#define CAMERA_PIN_D3 39
#define CAMERA_PIN_D2 18
#define CAMERA_PIN_D1 17
#define CAMERA_PIN_D0 2
#define CAMERA_PIN_VSYNC 21
#define CAMERA_PIN_HREF 1
#define CAMERA_PIN_PCLK 44
#define CAMERA_PIN_D7 GPIO_NUM_48
#define CAMERA_PIN_D6 GPIO_NUM_47
#define CAMERA_PIN_D5 GPIO_NUM_46
#define CAMERA_PIN_D4 GPIO_NUM_45
#define CAMERA_PIN_D3 GPIO_NUM_39
#define CAMERA_PIN_D2 GPIO_NUM_18
#define CAMERA_PIN_D1 GPIO_NUM_17
#define CAMERA_PIN_D0 GPIO_NUM_2
#define CAMERA_PIN_VSYNC GPIO_NUM_21
#define CAMERA_PIN_HREF GPIO_NUM_1
#define CAMERA_PIN_PCLK GPIO_NUM_44
#define XCLK_FREQ_HZ 20000000

View File

@ -156,35 +156,47 @@ private:
}
void InitializeCamera() {
camera_config_t config = {};
config.ledc_channel = LEDC_CHANNEL_2; // LEDC通道选择 用于生成XCLK时钟 但是S3不用
config.ledc_timer = LEDC_TIMER_2; // LEDC timer选择 用于生成XCLK时钟 但是S3不用
config.pin_d0 = CAMERA_PIN_D0;
config.pin_d1 = CAMERA_PIN_D1;
config.pin_d2 = CAMERA_PIN_D2;
config.pin_d3 = CAMERA_PIN_D3;
config.pin_d4 = CAMERA_PIN_D4;
config.pin_d5 = CAMERA_PIN_D5;
config.pin_d6 = CAMERA_PIN_D6;
config.pin_d7 = CAMERA_PIN_D7;
config.pin_xclk = CAMERA_PIN_XCLK;
config.pin_pclk = CAMERA_PIN_PCLK;
config.pin_vsync = CAMERA_PIN_VSYNC;
config.pin_href = CAMERA_PIN_HREF;
config.pin_sccb_sda = CAMERA_PIN_SIOD; // 这里如果写-1 表示使用已经初始化的I2C接口
config.pin_sccb_scl = CAMERA_PIN_SIOC;
config.sccb_i2c_port = 0; // 这里如果写1 默认使用I2C1
config.pin_pwdn = CAMERA_PIN_PWDN;
config.pin_reset = CAMERA_PIN_RESET;
config.xclk_freq_hz = XCLK_FREQ_HZ;
config.pixel_format = PIXFORMAT_RGB565;
config.frame_size = FRAMESIZE_QVGA;
config.jpeg_quality = 12;
config.fb_count = 1;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAMERA_PIN_D0,
[1] = CAMERA_PIN_D1,
[2] = CAMERA_PIN_D2,
[3] = CAMERA_PIN_D3,
[4] = CAMERA_PIN_D4,
[5] = CAMERA_PIN_D5,
[6] = CAMERA_PIN_D6,
[7] = CAMERA_PIN_D7,
},
.vsync_io = CAMERA_PIN_VSYNC,
.de_io = CAMERA_PIN_HREF,
.pclk_io = CAMERA_PIN_PCLK,
.xclk_io = CAMERA_PIN_XCLK,
};
camera_ = new Esp32Camera(config);
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = 0,
.scl_pin = CAMERA_PIN_SIOC,
.sda_pin = CAMERA_PIN_SIOD,
},
.freq = 100000,
};
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAMERA_PIN_RESET,
.pwdn_pin = CAMERA_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = XCLK_FREQ_HZ,
};
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
camera_ = new Esp32Camera(video_config);
camera_->SetVFlip(1);
}
public:

View File

@ -177,51 +177,47 @@ private:
}
void InitializeCamera() {
camera_config_t config = {};
static esp_cam_ctlr_dvp_pin_config_t dvp_pin_config = {
.data_width = CAM_CTLR_DATA_WIDTH_8,
.data_io = {
[0] = CAM_PIN_D0,
[1] = CAM_PIN_D1,
[2] = CAM_PIN_D2,
[3] = CAM_PIN_D3,
[4] = CAM_PIN_D4,
[5] = CAM_PIN_D5,
[6] = CAM_PIN_D6,
[7] = CAM_PIN_D7,
},
.vsync_io = CAM_PIN_VSYNC,
.de_io = CAM_PIN_HREF,
.pclk_io = CAM_PIN_PCLK,
.xclk_io = CAM_PIN_XCLK,
};
config.pin_pwdn = CAM_PIN_PWDN;
config.pin_reset = CAM_PIN_RESET;
config.pin_xclk = CAM_PIN_XCLK;
config.pin_sccb_sda = CAM_PIN_SIOD;
config.pin_sccb_scl = CAM_PIN_SIOC;
config.sccb_i2c_port = I2C_NUM_0;
esp_video_init_sccb_config_t sccb_config = {
.init_sccb = true,
.i2c_config = {
.port = I2C_NUM_0,
.scl_pin = CAM_PIN_SIOC,
.sda_pin = CAM_PIN_SIOD,
},
.freq = 100000,
};
config.pin_d7 = CAM_PIN_D7;
config.pin_d6 = CAM_PIN_D6;
config.pin_d5 = CAM_PIN_D5;
config.pin_d4 = CAM_PIN_D4;
config.pin_d3 = CAM_PIN_D3;
config.pin_d2 = CAM_PIN_D2;
config.pin_d1 = CAM_PIN_D1;
config.pin_d0 = CAM_PIN_D0;
config.pin_vsync = CAM_PIN_VSYNC;
config.pin_href = CAM_PIN_HREF;
config.pin_pclk = CAM_PIN_PCLK;
esp_video_init_dvp_config_t dvp_config = {
.sccb_config = sccb_config,
.reset_pin = CAM_PIN_RESET,
.pwdn_pin = CAM_PIN_PWDN,
.dvp_pin = dvp_pin_config,
.xclk_freq = 10000000,
};
/* XCLK 20MHz or 10MHz for OV2640 double FPS (Experimental) */
config.xclk_freq_hz = 10000000;
config.ledc_timer = LEDC_TIMER_1;
config.ledc_channel = LEDC_CHANNEL_0;
esp_video_init_config_t video_config = {
.dvp = &dvp_config,
};
config.pixel_format = PIXFORMAT_RGB565; /* YUV422,GRAYSCALE,RGB565,JPEG */
config.frame_size = FRAMESIZE_240X240; /* QQVGA-UXGA, For ESP32, do not use sizes above QVGA when not JPEG. The performance of the ESP32-S series has improved a lot, but JPEG mode always gives better frame rates */
config.jpeg_quality = 12; /* 0-63, for OV series camera sensors, lower number means higher quality */
config.fb_count = 2; /* When jpeg mode is used, if fb_count more than one, the driver will work in continuous mode */
config.fb_location = CAMERA_FB_IN_PSRAM;
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
esp_err_t err = esp_camera_init(&config); // 测试相机是否存在
if (err != ESP_OK) {
ESP_LOGE(TAG, "Camera is not plugged in or not supported, error: %s", esp_err_to_name(err));
// 如果摄像头初始化失败,设置 camera_ 为 nullptr
camera_ = nullptr;
return;
}else
{
esp_camera_deinit();// 释放之前的摄像头资源,为正确初始化做准备
camera_ = new Esp32Camera(config);
}
camera_ = new Esp32Camera(video_config);
}

View File

@ -1,17 +0,0 @@
# 说明 / Description
## 中文
本目录代码移植自 https://github.com/espressif/esp32-camera/blob/master/conversions/jpge.cpp
由于原版本使用了 8KB 静态全局变量,会导致程序加载后长期占用 SRAM。
本版本改为类成员变量,仅在使用时从堆内存申请,代码由 Cursor 重新生成。
## English
The code in this directory is ported from https://github.com/espressif/esp32-camera/blob/master/conversions/jpge.cpp
The original version used 8KB static global variables, which would cause long-term SRAM occupation after program loading.
This version has been changed to class member variables, which are only allocated from heap memory when in use. The code has been regenerated by Cursor.

View File

@ -1,228 +1,414 @@
// 基于原版to_jpg.cpp替换为使用jpeg_encoder以节省SRAM
// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
#include <stddef.h>
#include <string.h>
#include <memory>
#include <esp_attr.h>
#include <esp_heap_caps.h>
#include <esp_log.h>
#include <stddef.h>
#include <string.h>
#include "jpeg_encoder.h" // 使用新的JPEG编码器
#include "esp_jpeg_common.h"
#include "esp_jpeg_enc.h"
#if CONFIG_XIAOZHI_ENABLE_HARDWARE_JPEG_ENCODER
#include "driver/jpeg_encode.h"
#endif
#include "image_to_jpeg.h"
#define TAG "image_to_jpeg"
static void *_malloc(size_t size)
{
void * res = malloc(size);
if(res) {
return res;
}
// check if SPIRAM is enabled and is allocatable
static void* malloc_psram(size_t size) {
void* p = malloc(size);
if (p)
return p;
#if (CONFIG_SPIRAM_SUPPORT && (CONFIG_SPIRAM_USE_CAPS_ALLOC || CONFIG_SPIRAM_USE_MALLOC))
return heap_caps_malloc(size, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT);
#else
return NULL;
#endif
}
static __always_inline uint8_t expand_5_to_8(uint8_t v) {
return (uint8_t)((v << 3) | (v >> 2));
}
static __always_inline uint8_t expand_6_to_8(uint8_t v) {
return (uint8_t)((v << 2) | (v >> 4));
}
static uint8_t* convert_input_to_encoder_buf(const uint8_t* src, uint16_t width, uint16_t height, v4l2_pix_fmt_t format,
jpeg_pixel_format_t* out_fmt, int* out_size) {
// 直接支持的格式GRAY、RGB888、YCbYCr(YUYV)
if (format == V4L2_PIX_FMT_GREY) {
int sz = (int)width * (int)height;
uint8_t* buf = (uint8_t*)jpeg_calloc_align(sz, 16);
if (!buf)
return NULL;
memcpy(buf, src, sz);
if (out_fmt)
*out_fmt = JPEG_PIXEL_FORMAT_GRAY;
if (out_size)
*out_size = sz;
return buf;
}
// V4L2 YUYV (Y Cb Y Cr) 可直接作为 JPEG_PIXEL_FORMAT_YCbYCr 输入
if (format == V4L2_PIX_FMT_YUYV) {
int sz = (int)width * (int)height * 2;
uint8_t* buf = (uint8_t*)jpeg_calloc_align(sz, 16);
if (!buf)
return NULL;
memcpy(buf, src, sz);
if (out_fmt)
*out_fmt = JPEG_PIXEL_FORMAT_YCbYCr;
if (out_size)
*out_size = sz;
return buf;
}
// V4L2 UYVY (Cb Y Cr Y) -> 重排为 YUYV 再作为 YCbYCr 输入
if (format == V4L2_PIX_FMT_UYVY) {
int sz = (int)width * (int)height * 2;
const uint8_t* s = src;
uint8_t* buf = (uint8_t*)jpeg_calloc_align(sz, 16);
if (!buf)
return NULL;
uint8_t* d = buf;
for (int i = 0; i < sz; i += 4) {
// src: Cb, Y0, Cr, Y1 -> dst: Y0, Cb, Y1, Cr
d[0] = s[1];
d[1] = s[0];
d[2] = s[3];
d[3] = s[2];
s += 4;
d += 4;
}
if (out_fmt)
*out_fmt = JPEG_PIXEL_FORMAT_YCbYCr;
if (out_size)
*out_size = sz;
return buf;
}
// V4L2 YUV422P (YUV422 Planar) -> 重排为 YUYV (YCbYCr)
if (format == V4L2_PIX_FMT_YUV422P) {
int sz = (int)width * (int)height * 2;
const uint8_t* y_plane = src;
const uint8_t* u_plane = y_plane + (int)width * (int)height;
const uint8_t* v_plane = u_plane + ((int)width / 2) * (int)height;
uint8_t* buf = (uint8_t*)jpeg_calloc_align(sz, 16);
if (!buf)
return NULL;
uint8_t* dst = buf;
for (int y = 0; y < height; y++) {
const uint8_t* y_row = y_plane + y * (int)width;
const uint8_t* u_row = u_plane + y * ((int)width / 2);
const uint8_t* v_row = v_plane + y * ((int)width / 2);
for (int x = 0; x < width; x += 2) {
uint8_t y0 = y_row[x + 0];
uint8_t y1 = y_row[x + 1];
uint8_t cb = u_row[x / 2];
uint8_t cr = v_row[x / 2];
dst[0] = y0;
dst[1] = cb;
dst[2] = y1;
dst[3] = cr;
dst += 4;
}
}
if (out_fmt)
*out_fmt = JPEG_PIXEL_FORMAT_YCbYCr;
if (out_size)
*out_size = sz;
return buf;
}
// 其余格式转换为 RGB888
int rgb_size = (int)width * (int)height * 3;
uint8_t* rgb = (uint8_t*)jpeg_calloc_align(rgb_size, 16);
if (!rgb)
return NULL;
if (format == V4L2_PIX_FMT_RGB24) {
// V4L2_RGB24 即 RGB888
memcpy(rgb, src, rgb_size);
} else if (format == V4L2_PIX_FMT_RGB565) {
// RGB565 小端,需要转换为 RGB888
const uint8_t* p = src;
uint8_t* d = rgb;
int pixels = (int)width * (int)height;
for (int i = 0; i < pixels; i++) {
uint8_t lo = p[0]; // 低字节LSB
uint8_t hi = p[1]; // 高字节MSB
p += 2;
uint8_t r5 = (hi >> 3) & 0x1F;
uint8_t g6 = ((hi & 0x07) << 3) | ((lo & 0xE0) >> 5);
uint8_t b5 = lo & 0x1F;
d[0] = expand_5_to_8(r5);
d[1] = expand_6_to_8(g6);
d[2] = expand_5_to_8(b5);
d += 3;
}
} else {
// 其他未覆盖格式,清零
memset(rgb, 0, rgb_size);
}
if (out_fmt)
*out_fmt = JPEG_PIXEL_FORMAT_RGB888;
if (out_size)
*out_size = rgb_size;
return rgb;
}
#if CONFIG_XIAOZHI_ENABLE_HARDWARE_JPEG_ENCODER
static jpeg_encoder_handle_t s_hw_jpeg_handle = NULL;
static bool hw_jpeg_ensure_inited(void) {
if (s_hw_jpeg_handle) {
return true;
}
jpeg_encode_engine_cfg_t eng_cfg = {
.intr_priority = 0,
.timeout_ms = 100,
};
esp_err_t er = jpeg_new_encoder_engine(&eng_cfg, &s_hw_jpeg_handle);
if (er != ESP_OK) {
ESP_LOGE(TAG, "jpeg_new_encoder_engine failed: %d", (int)er);
s_hw_jpeg_handle = NULL;
return false;
}
return true;
}
static uint8_t* convert_input_to_hw_encoder_buf(const uint8_t* src, uint16_t width, uint16_t height, v4l2_pix_fmt_t format,
jpeg_enc_input_format_t* out_fmt, int* out_size) {
if (format == V4L2_PIX_FMT_GREY) {
int sz = (int)width * (int)height;
uint8_t* buf = (uint8_t*)malloc_psram(sz);
if (!buf)
return NULL;
memcpy(buf, src, sz);
if (out_fmt)
*out_fmt = JPEG_ENCODE_IN_FORMAT_GRAY;
if (out_size)
*out_size = sz;
return buf;
}
if (format == V4L2_PIX_FMT_RGB24) {
int sz = (int)width * (int)height * 3;
uint8_t* buf = (uint8_t*)malloc_psram(sz);
if (!buf) {
ESP_LOGE(TAG, "malloc_psram failed");
return NULL;
}
memcpy(buf, src, sz);
if (out_fmt)
*out_fmt = JPEG_ENCODE_IN_FORMAT_RGB888;
if (out_size)
*out_size = sz;
return buf;
}
if (format == V4L2_PIX_FMT_RGB565) {
int sz = (int)width * (int)height * 2;
uint8_t* buf = (uint8_t*)malloc_psram(sz);
if (!buf)
return NULL;
memcpy(buf, src, sz);
if (out_fmt)
*out_fmt = JPEG_ENCODE_IN_FORMAT_RGB565;
if (out_size)
*out_size = sz;
return buf;
}
if (format == V4L2_PIX_FMT_YUYV) {
// 硬件需要 | Y1 V Y0 U | 的“大端”格式,因此需要 bswap16
int sz = (int)width * (int)height * 2;
uint16_t* buf = (uint16_t*)malloc_psram(sz);
if (!buf)
return NULL;
const uint16_t* bsrc = (const uint16_t*)src;
for (int i = 0; i < sz / 2; i++) {
buf[i] = __builtin_bswap16(bsrc[i]);
}
if (out_fmt)
*out_fmt = JPEG_ENCODE_IN_FORMAT_YUV422;
if (out_size)
*out_size = sz;
return (uint8_t*)buf;
}
return NULL;
}
static IRAM_ATTR void convert_line_format(uint8_t * src, pixformat_t format, uint8_t * dst, size_t width, size_t in_channels, size_t line)
{
int i=0, o=0, l=0;
if(format == PIXFORMAT_GRAYSCALE) {
memcpy(dst, src + line * width, width);
} else if(format == PIXFORMAT_RGB888) {
l = width * 3;
src += l * line;
for(i=0; i<l; i+=3) {
dst[o++] = src[i+2];
dst[o++] = src[i+1];
dst[o++] = src[i];
}
} else if(format == PIXFORMAT_RGB565) {
l = width * 2;
src += l * line;
for(i=0; i<l; i+=2) {
dst[o++] = src[i] & 0xF8;
dst[o++] = (src[i] & 0x07) << 5 | (src[i+1] & 0xE0) >> 3;
dst[o++] = (src[i+1] & 0x1F) << 3;
}
} else if(format == PIXFORMAT_YUV422) {
// YUV422转RGB的简化实现
l = width * 2;
src += l * line;
for(i=0; i<l; i+=4) {
int y0 = src[i];
int u = src[i+1];
int y1 = src[i+2];
int v = src[i+3];
// 简化的YUV到RGB转换
int c = y0 - 16;
int d = u - 128;
int e = v - 128;
int r = (298 * c + 409 * e + 128) >> 8;
int g = (298 * c - 100 * d - 208 * e + 128) >> 8;
int b = (298 * c + 516 * d + 128) >> 8;
dst[o++] = (r < 0) ? 0 : ((r > 255) ? 255 : r);
dst[o++] = (g < 0) ? 0 : ((g > 255) ? 255 : g);
dst[o++] = (b < 0) ? 0 : ((b > 255) ? 255 : b);
// Y1像素
c = y1 - 16;
r = (298 * c + 409 * e + 128) >> 8;
g = (298 * c - 100 * d - 208 * e + 128) >> 8;
b = (298 * c + 516 * d + 128) >> 8;
dst[o++] = (r < 0) ? 0 : ((r > 255) ? 255 : r);
dst[o++] = (g < 0) ? 0 : ((g > 255) ? 255 : g);
dst[o++] = (b < 0) ? 0 : ((b > 255) ? 255 : b);
}
}
}
// 回调流实现 - 用于回调版本的JPEG编码
class callback_stream : public jpge2_simple::output_stream {
protected:
jpg_out_cb ocb;
void * oarg;
size_t index;
public:
callback_stream(jpg_out_cb cb, void * arg) : ocb(cb), oarg(arg), index(0) { }
virtual ~callback_stream() { }
virtual bool put_buf(const void* data, int len)
{
index += ocb(oarg, index, data, len);
return true;
}
virtual jpge2_simple::uint get_size() const
{
return static_cast<jpge2_simple::uint>(index);
}
};
// 内存流实现 - 用于直接内存输出
class memory_stream : public jpge2_simple::output_stream {
protected:
uint8_t *out_buf;
size_t max_len, index;
public:
memory_stream(void *pBuf, uint buf_size) : out_buf(static_cast<uint8_t*>(pBuf)), max_len(buf_size), index(0) { }
virtual ~memory_stream() { }
virtual bool put_buf(const void* pBuf, int len)
{
if (!pBuf) {
//end of image
return true;
}
if ((size_t)len > (max_len - index)) {
//ESP_LOGW(TAG, "JPG output overflow: %d bytes (%d,%d,%d)", len - (max_len - index), len, index, max_len);
len = max_len - index;
}
if (len) {
memcpy(out_buf + index, pBuf, len);
index += len;
}
return true;
}
virtual jpge2_simple::uint get_size() const
{
return static_cast<jpge2_simple::uint>(index);
}
};
// 使用优化的JPEG编码器进行图像转换必须在堆上创建编码器
static bool convert_image(uint8_t *src, uint16_t width, uint16_t height, pixformat_t format, uint8_t quality, jpge2_simple::output_stream *dst_stream)
{
int num_channels = 3;
jpge2_simple::subsampling_t subsampling = jpge2_simple::H2V2;
if(format == PIXFORMAT_GRAYSCALE) {
num_channels = 1;
subsampling = jpge2_simple::Y_ONLY;
}
if(!quality) {
static bool encode_with_hw_jpeg(const uint8_t* src, size_t src_len, uint16_t width, uint16_t height,
v4l2_pix_fmt_t format, uint8_t quality, uint8_t** jpg_out, size_t* jpg_out_len,
jpg_out_cb cb, void* cb_arg) {
if (quality < 1)
quality = 1;
} else if(quality > 100) {
if (quality > 100)
quality = 100;
}
jpge2_simple::params comp_params = jpge2_simple::params();
comp_params.m_subsampling = subsampling;
comp_params.m_quality = quality;
// ⚠️ 关键必须在堆上创建编码器约8KB内存从堆分配
auto dst_image = std::make_unique<jpge2_simple::jpeg_encoder>();
if (!dst_image->init(dst_stream, width, height, num_channels, comp_params)) {
ESP_LOGE(TAG, "JPG encoder init failed");
jpeg_enc_input_format_t enc_src_type = JPEG_ENCODE_IN_FORMAT_RGB888;
int enc_in_size = 0;
uint8_t* enc_in = convert_input_to_hw_encoder_buf(src, width, height, format, &enc_src_type, &enc_in_size);
if (!enc_in) {
ESP_LOGW(TAG, "hw jpeg: unsupported format, fallback to sw");
return false;
}
uint8_t* line = (uint8_t*)_malloc(width * num_channels);
if(!line) {
ESP_LOGE(TAG, "Scan line malloc failed");
if (!hw_jpeg_ensure_inited()) {
free(enc_in);
return false;
}
for (int i = 0; i < height; i++) {
convert_line_format(src, format, line, width, num_channels, i);
if (!dst_image->process_scanline(line)) {
ESP_LOGE(TAG, "JPG process line %u failed", i);
free(line);
return false;
}
}
free(line);
jpeg_encode_cfg_t enc_cfg = {0};
enc_cfg.width = width;
enc_cfg.height = height;
enc_cfg.src_type = enc_src_type;
enc_cfg.image_quality = quality;
enc_cfg.sub_sample = (enc_src_type == JPEG_ENCODE_IN_FORMAT_GRAY) ? JPEG_DOWN_SAMPLING_GRAY : JPEG_DOWN_SAMPLING_YUV422;
if (!dst_image->process_scanline(NULL)) {
ESP_LOGE(TAG, "JPG image finish failed");
size_t out_cap = (size_t)width * (size_t)height * 3 / 2 + 64 * 1024;
if (out_cap < 128 * 1024)
out_cap = 128 * 1024;
jpeg_encode_memory_alloc_cfg_t jpeg_enc_output_mem_cfg = { .buffer_direction = JPEG_ENC_ALLOC_OUTPUT_BUFFER };
size_t out_cap_aligned = 0;
uint8_t* outbuf = (uint8_t*)jpeg_alloc_encoder_mem(out_cap, &jpeg_enc_output_mem_cfg, &out_cap_aligned);
if (!outbuf) {
free(enc_in);
ESP_LOGE(TAG, "alloc out buffer failed");
return false;
}
// dst_image会在unique_ptr销毁时自动释放内存
uint32_t out_len = 0;
esp_err_t er = jpeg_encoder_process(s_hw_jpeg_handle, &enc_cfg, enc_in, (uint32_t)enc_in_size, outbuf, (uint32_t)out_cap_aligned, &out_len);
free(enc_in);
if (er != ESP_OK) {
free(outbuf);
ESP_LOGE(TAG, "jpeg_encoder_process failed: %d", (int)er);
return false;
}
if (cb) {
cb(cb_arg, 0, outbuf, (size_t)out_len);
cb(cb_arg, 1, NULL, 0);
free(outbuf);
if (jpg_out)
*jpg_out = NULL;
if (jpg_out_len)
*jpg_out_len = 0;
return true;
}
if (jpg_out && jpg_out_len) {
*jpg_out = outbuf;
*jpg_out_len = (size_t)out_len;
return true;
}
free(outbuf);
return true;
}
#endif // CONFIG_XIAOZHI_ENABLE_HARDWARE_JPEG_ENCODER
static bool encode_with_esp_new_jpeg(const uint8_t* src, size_t src_len, uint16_t width, uint16_t height,
v4l2_pix_fmt_t format, uint8_t quality, uint8_t** jpg_out, size_t* jpg_out_len,
jpg_out_cb cb, void* cb_arg) {
if (quality < 1)
quality = 1;
if (quality > 100)
quality = 100;
jpeg_pixel_format_t enc_src_type = JPEG_PIXEL_FORMAT_RGB888;
int enc_in_size = 0;
uint8_t* enc_in = convert_input_to_encoder_buf(src, width, height, format, &enc_src_type, &enc_in_size);
if (!enc_in) {
ESP_LOGE(TAG, "alloc/convert input failed");
return false;
}
jpeg_enc_config_t cfg = DEFAULT_JPEG_ENC_CONFIG();
cfg.width = width;
cfg.height = height;
cfg.src_type = enc_src_type;
cfg.subsampling = (enc_src_type == JPEG_PIXEL_FORMAT_GRAY) ? JPEG_SUBSAMPLE_GRAY : JPEG_SUBSAMPLE_420;
cfg.quality = quality;
cfg.rotate = JPEG_ROTATE_0D;
cfg.task_enable = false;
jpeg_enc_handle_t h = NULL;
jpeg_error_t ret = jpeg_enc_open(&cfg, &h);
if (ret != JPEG_ERR_OK) {
jpeg_free_align(enc_in);
ESP_LOGE(TAG, "jpeg_enc_open failed: %d", (int)ret);
return false;
}
// 估算输出缓冲区:宽高的 1.5 倍 + 64KB
size_t out_cap = (size_t)width * (size_t)height * 3 / 2 + 64 * 1024;
if (out_cap < 128 * 1024)
out_cap = 128 * 1024;
uint8_t* outbuf = (uint8_t*)malloc_psram(out_cap);
if (!outbuf) {
jpeg_enc_close(h);
jpeg_free_align(enc_in);
ESP_LOGE(TAG, "alloc out buffer failed");
return false;
}
int out_len = 0;
ret = jpeg_enc_process(h, enc_in, enc_in_size, outbuf, (int)out_cap, &out_len);
jpeg_enc_close(h);
jpeg_free_align(enc_in);
if (ret != JPEG_ERR_OK) {
free(outbuf);
ESP_LOGE(TAG, "jpeg_enc_process failed: %d", (int)ret);
return false;
}
if (cb) {
cb(cb_arg, 0, outbuf, (size_t)out_len);
cb(cb_arg, 1, NULL, 0); // 结束信号
free(outbuf);
if (jpg_out)
*jpg_out = NULL;
if (jpg_out_len)
*jpg_out_len = 0;
return true;
}
if (jpg_out && jpg_out_len) {
*jpg_out = outbuf;
*jpg_out_len = (size_t)out_len;
return true;
}
free(outbuf);
return true;
}
// 🚀 主要函数高效的图像到JPEG转换实现节省8KB SRAM
bool image_to_jpeg(uint8_t *src, size_t src_len, uint16_t width, uint16_t height, pixformat_t format, uint8_t quality, uint8_t ** out, size_t * out_len)
{
ESP_LOGI(TAG, "Using optimized JPEG encoder (saves ~8KB SRAM)");
// 分配JPEG输出缓冲区这个大小对于大多数图像应该足够
int jpg_buf_len = 128*1024;
uint8_t * jpg_buf = (uint8_t *)_malloc(jpg_buf_len);
if(jpg_buf == NULL) {
ESP_LOGE(TAG, "JPG buffer malloc failed");
return false;
bool image_to_jpeg(uint8_t* src, size_t src_len, uint16_t width, uint16_t height, v4l2_pix_fmt_t format,
uint8_t quality, uint8_t** out, size_t* out_len) {
#if CONFIG_XIAOZHI_ENABLE_HARDWARE_JPEG_ENCODER
if (encode_with_hw_jpeg(src, src_len, width, height, format, quality, out, out_len, NULL, NULL)) {
return true;
}
memory_stream dst_stream(jpg_buf, jpg_buf_len);
if(!convert_image(src, width, height, format, quality, &dst_stream)) {
free(jpg_buf);
return false;
}
*out = jpg_buf;
*out_len = dst_stream.get_size();
return true;
// Fallback to esp_new_jpeg
#endif
return encode_with_esp_new_jpeg(src, src_len, width, height, format, quality, out, out_len, NULL, NULL);
}
// 🚀 回调版本使用回调函数处理JPEG数据流适合流式传输
bool image_to_jpeg_cb(uint8_t *src, size_t src_len, uint16_t width, uint16_t height, pixformat_t format, uint8_t quality, jpg_out_cb cb, void *arg)
{
callback_stream dst_stream(cb, arg);
return convert_image(src, width, height, format, quality, &dst_stream);
bool image_to_jpeg_cb(uint8_t* src, size_t src_len, uint16_t width, uint16_t height, v4l2_pix_fmt_t format,
uint8_t quality, jpg_out_cb cb, void* arg) {
#if CONFIG_XIAOZHI_ENABLE_HARDWARE_JPEG_ENCODER
if (encode_with_hw_jpeg(src, src_len, width, height, format, quality, NULL, NULL, cb, arg)) {
return true;
}
// Fallback to esp_new_jpeg
#endif
return encode_with_esp_new_jpeg(src, src_len, width, height, format, quality, NULL, NULL, cb, arg);
}

View File

@ -1,12 +1,14 @@
// image_to_jpeg.h - 图像到JPEG转换的高效编码接口
// 节省约8KB SRAM的JPEG编码实现
#ifndef IMAGE_TO_JPEG_H
#define IMAGE_TO_JPEG_H
#pragma once
#include "sdkconfig.h"
#ifndef CONFIG_IDF_TARGET_ESP32
#include <stdint.h>
#include <stddef.h>
#include <esp_camera.h> // 包含ESP32相机驱动的定义避免重复定义pixformat_t和camera_fb_t
#include <linux/videodev2.h>
typedef uint32_t v4l2_pix_fmt_t; // see linux/videodev2.h for details
#ifdef __cplusplus
extern "C" {
@ -37,7 +39,7 @@ typedef size_t (*jpg_out_cb)(void *arg, size_t index, const void *data, size_t l
* @return true , false
*/
bool image_to_jpeg(uint8_t *src, size_t src_len, uint16_t width, uint16_t height,
pixformat_t format, uint8_t quality, uint8_t **out, size_t *out_len);
v4l2_pix_fmt_t format, uint8_t quality, uint8_t **out, size_t *out_len);
/**
* @brief JPEG
@ -59,10 +61,10 @@ bool image_to_jpeg(uint8_t *src, size_t src_len, uint16_t width, uint16_t height
* @return true , false
*/
bool image_to_jpeg_cb(uint8_t *src, size_t src_len, uint16_t width, uint16_t height,
pixformat_t format, uint8_t quality, jpg_out_cb cb, void *arg);
v4l2_pix_fmt_t format, uint8_t quality, jpg_out_cb cb, void *arg);
#ifdef __cplusplus
}
#endif
#endif /* IMAGE_TO_JPEG_H */
#endif // ndef CONFIG_IDF_TARGET_ESP32

View File

@ -1,722 +0,0 @@
// jpeg_encoder.cpp - C++ class for JPEG compression with class member arrays.
// 简单版本:直接使用类成员变量,必须在堆上创建实例
// Modified from jpge.cpp to use class member variables instead of static variables
// Public domain, Rich Geldreich <richgel99@gmail.com>
#include "jpeg_encoder.h"
#include <stdint.h>
#include <stdarg.h>
#include <stddef.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <malloc.h>
#include "esp_heap_caps.h"
#define JPGE_MAX(a,b) (((a)>(b))?(a):(b))
#define JPGE_MIN(a,b) (((a)<(b))?(a):(b))
namespace jpge2_simple {
static inline void *jpge_malloc(size_t nSize) {
void * b = malloc(nSize);
if(b){
return b;
}
// check if SPIRAM is enabled and allocate on SPIRAM if allocatable
#if (CONFIG_SPIRAM_SUPPORT && (CONFIG_SPIRAM_USE_CAPS_ALLOC || CONFIG_SPIRAM_USE_MALLOC))
return heap_caps_malloc(nSize, MALLOC_CAP_SPIRAM | MALLOC_CAP_8BIT);
#else
return NULL;
#endif
}
static inline void jpge_free(void *p) { free(p); }
// Various JPEG enums and tables.
enum { M_SOF0 = 0xC0, M_DHT = 0xC4, M_SOI = 0xD8, M_EOI = 0xD9, M_SOS = 0xDA, M_DQT = 0xDB, M_APP0 = 0xE0 };
enum { DC_LUM_CODES = 12, AC_LUM_CODES = 256, DC_CHROMA_CODES = 12, AC_CHROMA_CODES = 256, MAX_HUFF_SYMBOLS = 257, MAX_HUFF_CODESIZE = 32 };
static const uint8 s_zag[64] = { 0,1,8,16,9,2,3,10,17,24,32,25,18,11,4,5,12,19,26,33,40,48,41,34,27,20,13,6,7,14,21,28,35,42,49,56,57,50,43,36,29,22,15,23,30,37,44,51,58,59,52,45,38,31,39,46,53,60,61,54,47,55,62,63 };
static const int16 s_std_lum_quant[64] = { 16,11,12,14,12,10,16,14,13,14,18,17,16,19,24,40,26,24,22,22,24,49,35,37,29,40,58,51,61,60,57,51,56,55,64,72,92,78,64,68,87,69,55,56,80,109,81,87,95,98,103,104,103,62,77,113,121,112,100,120,92,101,103,99 };
static const int16 s_std_croma_quant[64] = { 17,18,18,24,21,24,47,26,26,47,99,66,56,66,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99 };
static const uint8 s_dc_lum_bits[17] = { 0,0,1,5,1,1,1,1,1,1,0,0,0,0,0,0,0 };
static const uint8 s_dc_lum_val[DC_LUM_CODES] = { 0,1,2,3,4,5,6,7,8,9,10,11 };
static const uint8 s_ac_lum_bits[17] = { 0,0,2,1,3,3,2,4,3,5,5,4,4,0,0,1,0x7d };
static const uint8 s_ac_lum_val[AC_LUM_CODES] = {
0x01,0x02,0x03,0x00,0x04,0x11,0x05,0x12,0x21,0x31,0x41,0x06,0x13,0x51,0x61,0x07,0x22,0x71,0x14,0x32,0x81,0x91,0xa1,0x08,0x23,0x42,0xb1,0xc1,0x15,0x52,0xd1,0xf0,
0x24,0x33,0x62,0x72,0x82,0x09,0x0a,0x16,0x17,0x18,0x19,0x1a,0x25,0x26,0x27,0x28,0x29,0x2a,0x34,0x35,0x36,0x37,0x38,0x39,0x3a,0x43,0x44,0x45,0x46,0x47,0x48,0x49,
0x4a,0x53,0x54,0x55,0x56,0x57,0x58,0x59,0x5a,0x63,0x64,0x65,0x66,0x67,0x68,0x69,0x6a,0x73,0x74,0x75,0x76,0x77,0x78,0x79,0x7a,0x83,0x84,0x85,0x86,0x87,0x88,0x89,
0x8a,0x92,0x93,0x94,0x95,0x96,0x97,0x98,0x99,0x9a,0xa2,0xa3,0xa4,0xa5,0xa6,0xa7,0xa8,0xa9,0xaa,0xb2,0xb3,0xb4,0xb5,0xb6,0xb7,0xb8,0xb9,0xba,0xc2,0xc3,0xc4,0xc5,
0xc6,0xc7,0xc8,0xc9,0xca,0xd2,0xd3,0xd4,0xd5,0xd6,0xd7,0xd8,0xd9,0xda,0xe1,0xe2,0xe3,0xe4,0xe5,0xe6,0xe7,0xe8,0xe9,0xea,0xf1,0xf2,0xf3,0xf4,0xf5,0xf6,0xf7,0xf8,
0xf9,0xfa
};
static const uint8 s_dc_chroma_bits[17] = { 0,0,3,1,1,1,1,1,1,1,1,1,0,0,0,0,0 };
static const uint8 s_dc_chroma_val[DC_CHROMA_CODES] = { 0,1,2,3,4,5,6,7,8,9,10,11 };
static const uint8 s_ac_chroma_bits[17] = { 0,0,2,1,2,4,4,3,4,7,5,4,4,0,1,2,0x77 };
static const uint8 s_ac_chroma_val[AC_CHROMA_CODES] = {
0x00,0x01,0x02,0x03,0x11,0x04,0x05,0x21,0x31,0x06,0x12,0x41,0x51,0x07,0x61,0x71,0x13,0x22,0x32,0x81,0x08,0x14,0x42,0x91,0xa1,0xb1,0xc1,0x09,0x23,0x33,0x52,0xf0,
0x15,0x62,0x72,0xd1,0x0a,0x16,0x24,0x34,0xe1,0x25,0xf1,0x17,0x18,0x19,0x1a,0x26,0x27,0x28,0x29,0x2a,0x35,0x36,0x37,0x38,0x39,0x3a,0x43,0x44,0x45,0x46,0x47,0x48,
0x49,0x4a,0x53,0x54,0x55,0x56,0x57,0x58,0x59,0x5a,0x63,0x64,0x65,0x66,0x67,0x68,0x69,0x6a,0x73,0x74,0x75,0x76,0x77,0x78,0x79,0x7a,0x82,0x83,0x84,0x85,0x86,0x87,
0x88,0x89,0x8a,0x92,0x93,0x94,0x95,0x96,0x97,0x98,0x99,0x9a,0xa2,0xa3,0xa4,0xa5,0xa6,0xa7,0xa8,0xa9,0xaa,0xb2,0xb3,0xb4,0xb5,0xb6,0xb7,0xb8,0xb9,0xba,0xc2,0xc3,
0xc4,0xc5,0xc6,0xc7,0xc8,0xc9,0xca,0xd2,0xd3,0xd4,0xd5,0xd6,0xd7,0xd8,0xd9,0xda,0xe2,0xe3,0xe4,0xe5,0xe6,0xe7,0xe8,0xe9,0xea,0xf2,0xf3,0xf4,0xf5,0xf6,0xf7,0xf8,
0xf9,0xfa
};
const int YR = 19595, YG = 38470, YB = 7471, CB_R = -11059, CB_G = -21709, CB_B = 32768, CR_R = 32768, CR_G = -27439, CR_B = -5329;
static inline uint8 clamp(int i) {
if (i < 0) {
i = 0;
} else if (i > 255){
i = 255;
}
return static_cast<uint8>(i);
}
static void RGB_to_YCC(uint8* pDst, const uint8 *pSrc, int num_pixels) {
for ( ; num_pixels; pDst += 3, pSrc += 3, num_pixels--) {
const int r = pSrc[0], g = pSrc[1], b = pSrc[2];
pDst[0] = static_cast<uint8>((r * YR + g * YG + b * YB + 32768) >> 16);
pDst[1] = clamp(128 + ((r * CB_R + g * CB_G + b * CB_B + 32768) >> 16));
pDst[2] = clamp(128 + ((r * CR_R + g * CR_G + b * CR_B + 32768) >> 16));
}
}
static void RGB_to_Y(uint8* pDst, const uint8 *pSrc, int num_pixels) {
for ( ; num_pixels; pDst++, pSrc += 3, num_pixels--) {
pDst[0] = static_cast<uint8>((pSrc[0] * YR + pSrc[1] * YG + pSrc[2] * YB + 32768) >> 16);
}
}
static void Y_to_YCC(uint8* pDst, const uint8* pSrc, int num_pixels) {
for( ; num_pixels; pDst += 3, pSrc++, num_pixels--) {
pDst[0] = pSrc[0];
pDst[1] = 128;
pDst[2] = 128;
}
}
// Forward DCT - DCT derived from jfdctint.
enum { CONST_BITS = 13, ROW_BITS = 2 };
#define DCT_DESCALE(x, n) (((x) + (((int32)1) << ((n) - 1))) >> (n))
#define DCT_MUL(var, c) (static_cast<int16>(var) * static_cast<int32>(c))
#define DCT1D(s0, s1, s2, s3, s4, s5, s6, s7) \
int32 t0 = s0 + s7, t7 = s0 - s7, t1 = s1 + s6, t6 = s1 - s6, t2 = s2 + s5, t5 = s2 - s5, t3 = s3 + s4, t4 = s3 - s4; \
int32 t10 = t0 + t3, t13 = t0 - t3, t11 = t1 + t2, t12 = t1 - t2; \
int32 u1 = DCT_MUL(t12 + t13, 4433); \
s2 = u1 + DCT_MUL(t13, 6270); \
s6 = u1 + DCT_MUL(t12, -15137); \
u1 = t4 + t7; \
int32 u2 = t5 + t6, u3 = t4 + t6, u4 = t5 + t7; \
int32 z5 = DCT_MUL(u3 + u4, 9633); \
t4 = DCT_MUL(t4, 2446); t5 = DCT_MUL(t5, 16819); \
t6 = DCT_MUL(t6, 25172); t7 = DCT_MUL(t7, 12299); \
u1 = DCT_MUL(u1, -7373); u2 = DCT_MUL(u2, -20995); \
u3 = DCT_MUL(u3, -16069); u4 = DCT_MUL(u4, -3196); \
u3 += z5; u4 += z5; \
s0 = t10 + t11; s1 = t7 + u1 + u4; s3 = t6 + u2 + u3; s4 = t10 - t11; s5 = t5 + u2 + u4; s7 = t4 + u1 + u3;
static void DCT2D(int32 *p) {
int32 c, *q = p;
for (c = 7; c >= 0; c--, q += 8) {
int32 s0 = q[0], s1 = q[1], s2 = q[2], s3 = q[3], s4 = q[4], s5 = q[5], s6 = q[6], s7 = q[7];
DCT1D(s0, s1, s2, s3, s4, s5, s6, s7);
q[0] = s0 << ROW_BITS; q[1] = DCT_DESCALE(s1, CONST_BITS-ROW_BITS); q[2] = DCT_DESCALE(s2, CONST_BITS-ROW_BITS); q[3] = DCT_DESCALE(s3, CONST_BITS-ROW_BITS);
q[4] = s4 << ROW_BITS; q[5] = DCT_DESCALE(s5, CONST_BITS-ROW_BITS); q[6] = DCT_DESCALE(s6, CONST_BITS-ROW_BITS); q[7] = DCT_DESCALE(s7, CONST_BITS-ROW_BITS);
}
for (q = p, c = 7; c >= 0; c--, q++) {
int32 s0 = q[0*8], s1 = q[1*8], s2 = q[2*8], s3 = q[3*8], s4 = q[4*8], s5 = q[5*8], s6 = q[6*8], s7 = q[7*8];
DCT1D(s0, s1, s2, s3, s4, s5, s6, s7);
q[0*8] = DCT_DESCALE(s0, ROW_BITS+3); q[1*8] = DCT_DESCALE(s1, CONST_BITS+ROW_BITS+3); q[2*8] = DCT_DESCALE(s2, CONST_BITS+ROW_BITS+3); q[3*8] = DCT_DESCALE(s3, CONST_BITS+ROW_BITS+3);
q[4*8] = DCT_DESCALE(s4, ROW_BITS+3); q[5*8] = DCT_DESCALE(s5, CONST_BITS+ROW_BITS+3); q[6*8] = DCT_DESCALE(s6, CONST_BITS+ROW_BITS+3); q[7*8] = DCT_DESCALE(s7, CONST_BITS+ROW_BITS+3);
}
}
// Compute the actual canonical Huffman codes/code sizes given the JPEG huff bits and val arrays.
// 简化版本:直接使用成员变量,不需要动态分配
void jpeg_encoder::compute_huffman_table(uint *codes, uint8 *code_sizes, uint8 *bits, uint8 *val)
{
int i, l, last_p, si;
uint8 *huff_size = m_huff_size_temp; // 直接使用成员变量
uint *huff_code = m_huff_code_temp; // 直接使用成员变量
uint code;
int p = 0;
for (l = 1; l <= 16; l++) {
for (i = 1; i <= bits[l]; i++) {
huff_size[p++] = (char)l;
}
}
huff_size[p] = 0;
last_p = p; // write sentinel
code = 0; si = huff_size[0]; p = 0;
while (huff_size[p]) {
while (huff_size[p] == si) {
huff_code[p++] = code++;
}
code <<= 1;
si++;
}
memset(codes, 0, sizeof(codes[0])*256);
memset(code_sizes, 0, sizeof(code_sizes[0])*256);
for (p = 0; p < last_p; p++) {
codes[val[p]] = huff_code[p];
code_sizes[val[p]] = huff_size[p];
}
}
void jpeg_encoder::flush_output_buffer()
{
if (m_out_buf_left != JPGE_OUT_BUF_SIZE) {
m_all_stream_writes_succeeded = m_all_stream_writes_succeeded && m_pStream->put_buf(m_out_buf, JPGE_OUT_BUF_SIZE - m_out_buf_left);
}
m_pOut_buf = m_out_buf;
m_out_buf_left = JPGE_OUT_BUF_SIZE;
}
void jpeg_encoder::emit_byte(uint8 i)
{
*m_pOut_buf++ = i;
if (--m_out_buf_left == 0) {
flush_output_buffer();
}
}
void jpeg_encoder::put_bits(uint bits, uint len)
{
uint8 c = 0;
m_bit_buffer |= ((uint32)bits << (24 - (m_bits_in += len)));
while (m_bits_in >= 8) {
c = (uint8)((m_bit_buffer >> 16) & 0xFF);
emit_byte(c);
if (c == 0xFF) {
emit_byte(0);
}
m_bit_buffer <<= 8;
m_bits_in -= 8;
}
}
void jpeg_encoder::emit_word(uint i)
{
emit_byte(uint8(i >> 8)); emit_byte(uint8(i & 0xFF));
}
// JPEG marker generation.
void jpeg_encoder::emit_marker(int marker)
{
emit_byte(uint8(0xFF)); emit_byte(uint8(marker));
}
// Emit JFIF marker
void jpeg_encoder::emit_jfif_app0()
{
emit_marker(M_APP0);
emit_word(2 + 4 + 1 + 2 + 1 + 2 + 2 + 1 + 1);
emit_byte(0x4A); emit_byte(0x46); emit_byte(0x49); emit_byte(0x46); /* Identifier: ASCII "JFIF" */
emit_byte(0);
emit_byte(1); /* Major version */
emit_byte(1); /* Minor version */
emit_byte(0); /* Density unit */
emit_word(1);
emit_word(1);
emit_byte(0); /* No thumbnail image */
emit_byte(0);
}
// Emit quantization tables
void jpeg_encoder::emit_dqt()
{
for (int i = 0; i < ((m_num_components == 3) ? 2 : 1); i++)
{
emit_marker(M_DQT);
emit_word(64 + 1 + 2);
emit_byte(static_cast<uint8>(i));
for (int j = 0; j < 64; j++)
emit_byte(static_cast<uint8>(m_quantization_tables[i][j]));
}
}
// Emit start of frame marker
void jpeg_encoder::emit_sof()
{
emit_marker(M_SOF0); /* baseline */
emit_word(3 * m_num_components + 2 + 5 + 1);
emit_byte(8); /* precision */
emit_word(m_image_y);
emit_word(m_image_x);
emit_byte(m_num_components);
for (int i = 0; i < m_num_components; i++)
{
emit_byte(static_cast<uint8>(i + 1)); /* component ID */
emit_byte((m_comp_h_samp[i] << 4) + m_comp_v_samp[i]); /* h and v sampling */
emit_byte(i > 0); /* quant. table num */
}
}
// Emit Huffman table.
void jpeg_encoder::emit_dht(uint8 *bits, uint8 *val, int index, bool ac_flag)
{
emit_marker(M_DHT);
int length = 0;
for (int i = 1; i <= 16; i++)
length += bits[i];
emit_word(length + 2 + 1 + 16);
emit_byte(static_cast<uint8>(index + (ac_flag << 4)));
for (int i = 1; i <= 16; i++)
emit_byte(bits[i]);
for (int i = 0; i < length; i++)
emit_byte(val[i]);
}
// Emit all Huffman tables.
void jpeg_encoder::emit_dhts()
{
emit_dht(m_huff_bits[0+0], m_huff_val[0+0], 0, false);
emit_dht(m_huff_bits[2+0], m_huff_val[2+0], 0, true);
if (m_num_components == 3) {
emit_dht(m_huff_bits[0+1], m_huff_val[0+1], 1, false);
emit_dht(m_huff_bits[2+1], m_huff_val[2+1], 1, true);
}
}
// emit start of scan
void jpeg_encoder::emit_sos()
{
emit_marker(M_SOS);
emit_word(2 * m_num_components + 2 + 1 + 3);
emit_byte(m_num_components);
for (int i = 0; i < m_num_components; i++)
{
emit_byte(static_cast<uint8>(i + 1));
if (i == 0)
emit_byte((0 << 4) + 0);
else
emit_byte((1 << 4) + 1);
}
emit_byte(0); /* spectral selection */
emit_byte(63);
emit_byte(0);
}
void jpeg_encoder::load_block_8_8_grey(int x)
{
uint8 *pSrc;
sample_array_t *pDst = m_sample_array;
x <<= 3;
for (int i = 0; i < 8; i++, pDst += 8)
{
pSrc = m_mcu_lines[i] + x;
pDst[0] = pSrc[0] - 128; pDst[1] = pSrc[1] - 128; pDst[2] = pSrc[2] - 128; pDst[3] = pSrc[3] - 128;
pDst[4] = pSrc[4] - 128; pDst[5] = pSrc[5] - 128; pDst[6] = pSrc[6] - 128; pDst[7] = pSrc[7] - 128;
}
}
void jpeg_encoder::load_block_8_8(int x, int y, int c)
{
uint8 *pSrc;
sample_array_t *pDst = m_sample_array;
x = (x * (8 * 3)) + c;
y <<= 3;
for (int i = 0; i < 8; i++, pDst += 8)
{
pSrc = m_mcu_lines[y + i] + x;
pDst[0] = pSrc[0 * 3] - 128; pDst[1] = pSrc[1 * 3] - 128; pDst[2] = pSrc[2 * 3] - 128; pDst[3] = pSrc[3 * 3] - 128;
pDst[4] = pSrc[4 * 3] - 128; pDst[5] = pSrc[5 * 3] - 128; pDst[6] = pSrc[6 * 3] - 128; pDst[7] = pSrc[7 * 3] - 128;
}
}
void jpeg_encoder::load_block_16_8(int x, int c)
{
uint8 *pSrc1, *pSrc2;
sample_array_t *pDst = m_sample_array;
x = (x * (16 * 3)) + c;
int a = 0, b = 2;
for (int i = 0; i < 16; i += 2, pDst += 8)
{
pSrc1 = m_mcu_lines[i + 0] + x;
pSrc2 = m_mcu_lines[i + 1] + x;
pDst[0] = ((pSrc1[ 0 * 3] + pSrc1[ 1 * 3] + pSrc2[ 0 * 3] + pSrc2[ 1 * 3] + a) >> 2) - 128; pDst[1] = ((pSrc1[ 2 * 3] + pSrc1[ 3 * 3] + pSrc2[ 2 * 3] + pSrc2[ 3 * 3] + b) >> 2) - 128;
pDst[2] = ((pSrc1[ 4 * 3] + pSrc1[ 5 * 3] + pSrc2[ 4 * 3] + pSrc2[ 5 * 3] + a) >> 2) - 128; pDst[3] = ((pSrc1[ 6 * 3] + pSrc1[ 7 * 3] + pSrc2[ 6 * 3] + pSrc2[ 7 * 3] + b) >> 2) - 128;
pDst[4] = ((pSrc1[ 8 * 3] + pSrc1[ 9 * 3] + pSrc2[ 8 * 3] + pSrc2[ 9 * 3] + a) >> 2) - 128; pDst[5] = ((pSrc1[10 * 3] + pSrc1[11 * 3] + pSrc2[10 * 3] + pSrc2[11 * 3] + b) >> 2) - 128;
pDst[6] = ((pSrc1[12 * 3] + pSrc1[13 * 3] + pSrc2[12 * 3] + pSrc2[13 * 3] + a) >> 2) - 128; pDst[7] = ((pSrc1[14 * 3] + pSrc1[15 * 3] + pSrc2[14 * 3] + pSrc2[15 * 3] + b) >> 2) - 128;
int temp = a; a = b; b = temp;
}
}
void jpeg_encoder::load_block_16_8_8(int x, int c)
{
uint8 *pSrc1;
sample_array_t *pDst = m_sample_array;
x = (x * (16 * 3)) + c;
for (int i = 0; i < 8; i++, pDst += 8)
{
pSrc1 = m_mcu_lines[i + 0] + x;
pDst[0] = ((pSrc1[ 0 * 3] + pSrc1[ 1 * 3]) >> 1) - 128; pDst[1] = ((pSrc1[ 2 * 3] + pSrc1[ 3 * 3]) >> 1) - 128;
pDst[2] = ((pSrc1[ 4 * 3] + pSrc1[ 5 * 3]) >> 1) - 128; pDst[3] = ((pSrc1[ 6 * 3] + pSrc1[ 7 * 3]) >> 1) - 128;
pDst[4] = ((pSrc1[ 8 * 3] + pSrc1[ 9 * 3]) >> 1) - 128; pDst[5] = ((pSrc1[10 * 3] + pSrc1[11 * 3]) >> 1) - 128;
pDst[6] = ((pSrc1[12 * 3] + pSrc1[13 * 3]) >> 1) - 128; pDst[7] = ((pSrc1[14 * 3] + pSrc1[15 * 3]) >> 1) - 128;
}
}
void jpeg_encoder::load_quantized_coefficients(int component_num)
{
int32 *q = m_quantization_tables[component_num > 0];
int16 *pDst = m_coefficient_array;
for (int i = 0; i < 64; i++)
{
sample_array_t j = m_sample_array[s_zag[i]];
if (j < 0)
{
if ((j = -j + (*q >> 1)) < *q)
*pDst++ = 0;
else
*pDst++ = static_cast<int16>(-(j / *q));
}
else
{
if ((j = j + (*q >> 1)) < *q)
*pDst++ = 0;
else
*pDst++ = static_cast<int16>((j / *q));
}
q++;
}
}
void jpeg_encoder::code_coefficients_pass_two(int component_num)
{
int i, j, run_len, nbits, temp1, temp2;
int16 *pSrc = m_coefficient_array;
uint *codes[2];
uint8 *code_sizes[2];
if (component_num == 0)
{
codes[0] = m_huff_codes[0 + 0]; codes[1] = m_huff_codes[2 + 0];
code_sizes[0] = m_huff_code_sizes[0 + 0]; code_sizes[1] = m_huff_code_sizes[2 + 0];
}
else
{
codes[0] = m_huff_codes[0 + 1]; codes[1] = m_huff_codes[2 + 1];
code_sizes[0] = m_huff_code_sizes[0 + 1]; code_sizes[1] = m_huff_code_sizes[2 + 1];
}
temp1 = temp2 = pSrc[0] - m_last_dc_val[component_num];
m_last_dc_val[component_num] = pSrc[0];
if (temp1 < 0)
{
temp1 = -temp1; temp2--;
}
nbits = 0;
while (temp1)
{
nbits++; temp1 >>= 1;
}
put_bits(codes[0][nbits], code_sizes[0][nbits]);
if (nbits) put_bits(temp2 & ((1 << nbits) - 1), nbits);
for (run_len = 0, i = 1; i < 64; i++)
{
if ((temp1 = m_coefficient_array[i]) == 0)
run_len++;
else
{
while (run_len >= 16)
{
put_bits(codes[1][0xF0], code_sizes[1][0xF0]);
run_len -= 16;
}
if ((temp2 = temp1) < 0)
{
temp1 = -temp1;
temp2--;
}
nbits = 1;
while (temp1 >>= 1)
nbits++;
j = (run_len << 4) + nbits;
put_bits(codes[1][j], code_sizes[1][j]);
put_bits(temp2 & ((1 << nbits) - 1), nbits);
run_len = 0;
}
}
if (run_len)
put_bits(codes[1][0], code_sizes[1][0]);
}
void jpeg_encoder::code_block(int component_num)
{
DCT2D(m_sample_array);
load_quantized_coefficients(component_num);
code_coefficients_pass_two(component_num);
}
void jpeg_encoder::process_mcu_row()
{
if (m_num_components == 1)
{
for (int i = 0; i < m_mcus_per_row; i++)
{
load_block_8_8_grey(i); code_block(0);
}
}
else if ((m_comp_h_samp[0] == 1) && (m_comp_v_samp[0] == 1))
{
for (int i = 0; i < m_mcus_per_row; i++)
{
load_block_8_8(i, 0, 0); code_block(0); load_block_8_8(i, 0, 1); code_block(1); load_block_8_8(i, 0, 2); code_block(2);
}
}
else if ((m_comp_h_samp[0] == 2) && (m_comp_v_samp[0] == 1))
{
for (int i = 0; i < m_mcus_per_row; i++)
{
load_block_8_8(i * 2 + 0, 0, 0); code_block(0); load_block_8_8(i * 2 + 1, 0, 0); code_block(0);
load_block_16_8_8(i, 1); code_block(1); load_block_16_8_8(i, 2); code_block(2);
}
}
else if ((m_comp_h_samp[0] == 2) && (m_comp_v_samp[0] == 2))
{
for (int i = 0; i < m_mcus_per_row; i++)
{
load_block_8_8(i * 2 + 0, 0, 0); code_block(0); load_block_8_8(i * 2 + 1, 0, 0); code_block(0);
load_block_8_8(i * 2 + 0, 1, 0); code_block(0); load_block_8_8(i * 2 + 1, 1, 0); code_block(0);
load_block_16_8(i, 1); code_block(1); load_block_16_8(i, 2); code_block(2);
}
}
}
void jpeg_encoder::load_mcu(const void *pSrc)
{
const uint8* Psrc = reinterpret_cast<const uint8*>(pSrc);
uint8* pDst = m_mcu_lines[m_mcu_y_ofs]; // OK to write up to m_image_bpl_xlt bytes to pDst
if (m_num_components == 1) {
if (m_image_bpp == 3)
RGB_to_Y(pDst, Psrc, m_image_x);
else
memcpy(pDst, Psrc, m_image_x);
} else {
if (m_image_bpp == 3)
RGB_to_YCC(pDst, Psrc, m_image_x);
else
Y_to_YCC(pDst, Psrc, m_image_x);
}
// Possibly duplicate pixels at end of scanline if not a multiple of 8 or 16
if (m_num_components == 1)
memset(m_mcu_lines[m_mcu_y_ofs] + m_image_bpl_xlt, pDst[m_image_bpl_xlt - 1], m_image_x_mcu - m_image_x);
else
{
const uint8 y = pDst[m_image_bpl_xlt - 3 + 0], cb = pDst[m_image_bpl_xlt - 3 + 1], cr = pDst[m_image_bpl_xlt - 3 + 2];
uint8 *q = m_mcu_lines[m_mcu_y_ofs] + m_image_bpl_xlt;
for (int i = m_image_x; i < m_image_x_mcu; i++)
{
*q++ = y; *q++ = cb; *q++ = cr;
}
}
if (++m_mcu_y_ofs == m_mcu_y)
{
process_mcu_row();
m_mcu_y_ofs = 0;
}
}
// Quantization table generation.
void jpeg_encoder::compute_quant_table(int32 *pDst, const int16 *pSrc)
{
int32 q;
if (m_params.m_quality < 50)
q = 5000 / m_params.m_quality;
else
q = 200 - m_params.m_quality * 2;
for (int i = 0; i < 64; i++)
{
int32 j = *pSrc++; j = (j * q + 50L) / 100L;
*pDst++ = JPGE_MIN(JPGE_MAX(j, 1), 255);
}
}
// Higher-level methods.
bool jpeg_encoder::jpg_open(int p_x_res, int p_y_res, int src_channels)
{
m_num_components = 3;
switch (m_params.m_subsampling)
{
case Y_ONLY:
{
m_num_components = 1;
m_comp_h_samp[0] = 1; m_comp_v_samp[0] = 1;
m_mcu_x = 8; m_mcu_y = 8;
break;
}
case H1V1:
{
m_comp_h_samp[0] = 1; m_comp_v_samp[0] = 1;
m_comp_h_samp[1] = 1; m_comp_v_samp[1] = 1;
m_comp_h_samp[2] = 1; m_comp_v_samp[2] = 1;
m_mcu_x = 8; m_mcu_y = 8;
break;
}
case H2V1:
{
m_comp_h_samp[0] = 2; m_comp_v_samp[0] = 1;
m_comp_h_samp[1] = 1; m_comp_v_samp[1] = 1;
m_comp_h_samp[2] = 1; m_comp_v_samp[2] = 1;
m_mcu_x = 16; m_mcu_y = 8;
break;
}
case H2V2:
{
m_comp_h_samp[0] = 2; m_comp_v_samp[0] = 2;
m_comp_h_samp[1] = 1; m_comp_v_samp[1] = 1;
m_comp_h_samp[2] = 1; m_comp_v_samp[2] = 1;
m_mcu_x = 16; m_mcu_y = 16;
}
}
m_image_x = p_x_res; m_image_y = p_y_res;
m_image_bpp = src_channels;
m_image_bpl = m_image_x * src_channels;
m_image_x_mcu = (m_image_x + m_mcu_x - 1) & (~(m_mcu_x - 1));
m_image_y_mcu = (m_image_y + m_mcu_y - 1) & (~(m_mcu_y - 1));
m_image_bpl_xlt = m_image_x * m_num_components;
m_image_bpl_mcu = m_image_x_mcu * m_num_components;
m_mcus_per_row = m_image_x_mcu / m_mcu_x;
if ((m_mcu_lines[0] = static_cast<uint8*>(jpge_malloc(m_image_bpl_mcu * m_mcu_y))) == NULL) {
return false;
}
for (int i = 1; i < m_mcu_y; i++)
m_mcu_lines[i] = m_mcu_lines[i-1] + m_image_bpl_mcu;
if(m_last_quality != m_params.m_quality){
m_last_quality = m_params.m_quality;
compute_quant_table(m_quantization_tables[0], s_std_lum_quant);
compute_quant_table(m_quantization_tables[1], s_std_croma_quant);
}
if(!m_huff_initialized){
m_huff_initialized = true;
memcpy(m_huff_bits[0+0], s_dc_lum_bits, 17); memcpy(m_huff_val[0+0], s_dc_lum_val, DC_LUM_CODES);
memcpy(m_huff_bits[2+0], s_ac_lum_bits, 17); memcpy(m_huff_val[2+0], s_ac_lum_val, AC_LUM_CODES);
memcpy(m_huff_bits[0+1], s_dc_chroma_bits, 17); memcpy(m_huff_val[0+1], s_dc_chroma_val, DC_CHROMA_CODES);
memcpy(m_huff_bits[2+1], s_ac_chroma_bits, 17); memcpy(m_huff_val[2+1], s_ac_chroma_val, AC_CHROMA_CODES);
compute_huffman_table(m_huff_codes[0+0], m_huff_code_sizes[0+0], m_huff_bits[0+0], m_huff_val[0+0]);
compute_huffman_table(m_huff_codes[2+0], m_huff_code_sizes[2+0], m_huff_bits[2+0], m_huff_val[2+0]);
compute_huffman_table(m_huff_codes[0+1], m_huff_code_sizes[0+1], m_huff_bits[0+1], m_huff_val[0+1]);
compute_huffman_table(m_huff_codes[2+1], m_huff_code_sizes[2+1], m_huff_bits[2+1], m_huff_val[2+1]);
}
m_out_buf_left = JPGE_OUT_BUF_SIZE;
m_pOut_buf = m_out_buf;
m_bit_buffer = 0;
m_bits_in = 0;
m_mcu_y_ofs = 0;
m_pass_num = 2;
memset(m_last_dc_val, 0, 3 * sizeof(m_last_dc_val[0]));
// Emit all markers at beginning of image file.
emit_marker(M_SOI);
emit_jfif_app0();
emit_dqt();
emit_sof();
emit_dhts();
emit_sos();
return m_all_stream_writes_succeeded;
}
bool jpeg_encoder::process_end_of_image()
{
if (m_mcu_y_ofs) {
if (m_mcu_y_ofs < 16) { // check here just to shut up static analysis
for (int i = m_mcu_y_ofs; i < m_mcu_y; i++) {
memcpy(m_mcu_lines[i], m_mcu_lines[m_mcu_y_ofs - 1], m_image_bpl_mcu);
}
}
process_mcu_row();
}
put_bits(0x7F, 7);
emit_marker(M_EOI);
flush_output_buffer();
m_all_stream_writes_succeeded = m_all_stream_writes_succeeded && m_pStream->put_buf(NULL, 0);
m_pass_num++; // purposely bump up m_pass_num, for debugging
return true;
}
void jpeg_encoder::clear()
{
m_mcu_lines[0] = NULL;
m_pass_num = 0;
m_all_stream_writes_succeeded = true;
// 简单版本:成员变量自动初始化,不需要额外处理
m_last_quality = 0;
m_huff_initialized = false;
}
jpeg_encoder::jpeg_encoder()
{
clear();
}
jpeg_encoder::~jpeg_encoder()
{
deinit();
}
bool jpeg_encoder::init(output_stream *pStream, int width, int height, int src_channels, const params &comp_params)
{
deinit();
if (((!pStream) || (width < 1) || (height < 1)) || ((src_channels != 1) && (src_channels != 3) && (src_channels != 4)) || (!comp_params.check())) return false;
// 简单版本:不需要动态分配内存,成员变量已经存在
m_pStream = pStream;
m_params = comp_params;
return jpg_open(width, height, src_channels);
}
void jpeg_encoder::deinit()
{
jpge_free(m_mcu_lines[0]);
clear();
// 简单版本:不需要释放成员变量内存
}
bool jpeg_encoder::process_scanline(const void* pScanline)
{
if ((m_pass_num < 1) || (m_pass_num > 2)) {
return false;
}
if (m_all_stream_writes_succeeded) {
if (!pScanline) {
if (!process_end_of_image()) {
return false;
}
} else {
load_mcu(pScanline);
}
}
return m_all_stream_writes_succeeded;
}
} // namespace jpge2_simple

View File

@ -1,119 +0,0 @@
// jpeg_encoder.h - 使用类成员变量的简单版本
// 这个版本直接在类中声明数组,要求必须在堆上创建实例
#ifndef JPEG_ENCODER_H
#define JPEG_ENCODER_H
namespace jpge2_simple
{
typedef unsigned char uint8;
typedef signed short int16;
typedef signed int int32;
typedef unsigned short uint16;
typedef unsigned int uint32;
typedef unsigned int uint;
enum subsampling_t { Y_ONLY = 0, H1V1 = 1, H2V1 = 2, H2V2 = 3 };
struct params {
inline params() : m_quality(85), m_subsampling(H2V2) { }
inline bool check() const {
if ((m_quality < 1) || (m_quality > 100)) return false;
if ((uint)m_subsampling > (uint)H2V2) return false;
return true;
}
int m_quality;
subsampling_t m_subsampling;
};
class output_stream {
public:
virtual ~output_stream() { };
virtual bool put_buf(const void* Pbuf, int len) = 0;
virtual uint get_size() const = 0;
};
// 简单版本:直接在类中声明数组
// 警告:必须在堆上创建实例!(使用 new
class jpeg_encoder {
public:
jpeg_encoder();
~jpeg_encoder();
bool init(output_stream *pStream, int width, int height, int src_channels, const params &comp_params = params());
bool process_scanline(const void* pScanline);
void deinit();
private:
jpeg_encoder(const jpeg_encoder &);
jpeg_encoder &operator =(const jpeg_encoder &);
typedef int32 sample_array_t;
enum { JPGE_OUT_BUF_SIZE = 512 };
output_stream *m_pStream;
params m_params;
uint8 m_num_components;
uint8 m_comp_h_samp[3], m_comp_v_samp[3];
int m_image_x, m_image_y, m_image_bpp, m_image_bpl;
int m_image_x_mcu, m_image_y_mcu;
int m_image_bpl_xlt, m_image_bpl_mcu;
int m_mcus_per_row;
int m_mcu_x, m_mcu_y;
uint8 *m_mcu_lines[16];
uint8 m_mcu_y_ofs;
sample_array_t m_sample_array[64];
int16 m_coefficient_array[64];
int m_last_dc_val[3];
uint8 m_out_buf[JPGE_OUT_BUF_SIZE];
uint8 *m_pOut_buf;
uint m_out_buf_left;
uint32 m_bit_buffer;
uint m_bits_in;
uint8 m_pass_num;
bool m_all_stream_writes_succeeded;
// 直接声明为类成员变量约8KB
int32 m_last_quality;
int32 m_quantization_tables[2][64]; // 512 bytes
bool m_huff_initialized;
uint m_huff_codes[4][256]; // 4096 bytes
uint8 m_huff_code_sizes[4][256]; // 1024 bytes
uint8 m_huff_bits[4][17]; // 68 bytes
uint8 m_huff_val[4][256]; // 1024 bytes
// compute_huffman_table的临时缓冲区也作为成员变量
uint8 m_huff_size_temp[257]; // 257 bytes
uint m_huff_code_temp[257]; // 1028 bytes
bool jpg_open(int p_x_res, int p_y_res, int src_channels);
void flush_output_buffer();
void put_bits(uint bits, uint len);
void emit_byte(uint8 i);
void emit_word(uint i);
void emit_marker(int marker);
void emit_jfif_app0();
void emit_dqt();
void emit_sof();
void emit_dht(uint8 *bits, uint8 *val, int index, bool ac_flag);
void emit_dhts();
void emit_sos();
void compute_quant_table(int32 *dst, const int16 *src);
void load_quantized_coefficients(int component_num);
void load_block_8_8_grey(int x);
void load_block_8_8(int x, int y, int c);
void load_block_16_8(int x, int c);
void load_block_16_8_8(int x, int c);
void code_coefficients_pass_two(int component_num);
void code_block(int component_num);
void process_mcu_row();
bool process_end_of_image();
void load_mcu(const void* src);
void clear();
void compute_huffman_table(uint *codes, uint8 *code_sizes, uint8 *bits, uint8 *val);
};
} // namespace jpge2_simple
#endif // JPEG_ENCODER_H

View File

@ -237,7 +237,7 @@ bool LvglDisplay::SnapshotToJpeg(std::string& jpeg_data, int quality) {
jpeg_data.clear();
// 🚀 使用回调版本的JPEG编码器进一步节省内存
bool ret = image_to_jpeg_cb(draw_buffer->data, draw_buffer->data_size, draw_buffer->header.w, draw_buffer->header.h, PIXFORMAT_RGB565, quality,
bool ret = image_to_jpeg_cb((uint8_t*)draw_buffer->data, draw_buffer->data_size, draw_buffer->header.w, draw_buffer->header.h, V4L2_PIX_FMT_RGB565, quality,
[](void *arg, size_t index, const void *data, size_t len) -> size_t {
std::string* output = static_cast<std::string*>(arg);
if (data && len > 0) {

View File

@ -26,7 +26,10 @@ dependencies:
espressif/esp-sr: ~2.1.5
espressif/button: ~4.1.3
espressif/knob: ^1.0.0
espressif/esp32-camera: ~2.1.2
espressif/esp_video:
version: '==1.2.0' # for compatibility. update version may need to modify this project code.
rules:
- if: target not in [esp32]
espressif/esp_lcd_touch_ft5x06: ~1.0.7
espressif/esp_lcd_touch_gt911: ^1
espressif/esp_lcd_touch_gt1151: ^1
@ -36,7 +39,7 @@ dependencies:
esp_lvgl_port: ~2.6.0
espressif/esp_io_expander_tca95xx_16bit: ^2.0.0
espressif2022/image_player: ==1.1.0~1
espressif2022/esp_emote_gfx: ^1.1.0
espressif2022/esp_emote_gfx: ^1.1.2
espressif/adc_mic: ^0.2.1
espressif/esp_mmap_assets: '>=1.2'
txp666/otto-emoji-gif-component:
@ -45,6 +48,7 @@ dependencies:
- if: target not in [esp32c5]
espressif/adc_battery_estimation: ^0.2.0
espressif/esp_new_jpeg: ^0.6.1
# SenseCAP Watcher Board
wvirgil123/sscma_client:
@ -89,4 +93,4 @@ dependencies:
## Required IDF version
idf:
version: '>=5.4.0'
version: '>=5.5.0'

View File

@ -188,7 +188,7 @@ def release(board_type: str, config_filename: str = "config.json", *, filter_nam
for append in sdkconfig_append:
f.write(f"{append}\n")
# Build with macro BOARD_NAME defined to name
if os.system(f"idf.py -DBOARD_NAME={name} build") != 0:
if os.system(f"idf.py -DBOARD_NAME={name} -DBOARD_TYPE={board_type} build") != 0:
print("build failed")
sys.exit(1)