websocket ESP 32-CAM通过socketIO将视频输出流传输到节点服务器的最快方式

fdbelqdn  于 2022-11-11  发布在  其他
关注(0)|答案(3)|浏览(114)

我想从ESP 32-CAM流视频摄像机到网络浏览器。这样做,我使用一个nodejs服务器(广播视频和服务html)和SocketIO通信(ESP 32-CAM -〉nodejs和nodejs -〉网络浏览器之间)。这样,这避免了多个客户端直接连接到ESP 32-CAM,并避免处理NAT/路由器配置。它作为中继/中继器,而不是代理。
我实际上成功地将视频数据(通过jpg base64)发送到nodejs,并在Web浏览器中查看它。
下面是代码:
ESP 32-凸轮:


# include "WiFi.h"

# include "esp_camera.h"

# include "base64.h"

# include <ArduinoJson.h>

# include <WebSocketsClient.h>

# include <SocketIOclient.h>

// Pin definition for CAMERA_MODEL_AI_THINKER

# define PWDN_GPIO_NUM     32

# define RESET_GPIO_NUM    -1

# define XCLK_GPIO_NUM      0

# define SIOD_GPIO_NUM     26

# define SIOC_GPIO_NUM     27

# define Y9_GPIO_NUM       35

# define Y8_GPIO_NUM       34

# define Y7_GPIO_NUM       39

# define Y6_GPIO_NUM       36

# define Y5_GPIO_NUM       21

# define Y4_GPIO_NUM       19

# define Y3_GPIO_NUM       18

# define Y2_GPIO_NUM        5

# define VSYNC_GPIO_NUM    25

# define HREF_GPIO_NUM     23

# define PCLK_GPIO_NUM     22

// Replace with your network credentials
const char* hostname = "ESP32CAM";
const char* ssid = "ssid";
const char* password = "pass";
SocketIOclient socketIO;

void socketIOEvent(socketIOmessageType_t type, uint8_t * payload, size_t length) {
    switch(type) {
        case sIOtype_DISCONNECT:
            Serial.printf("[IOc] Disconnected!\n");
            break;
        case sIOtype_CONNECT:
            Serial.printf("[IOc] Connected to url: %s\n", payload);

            // join default namespace (no auto join in Socket.IO V3)
            socketIO.send(sIOtype_CONNECT, "/");
            break;
        case sIOtype_EVENT:
            Serial.printf("[IOc] get event: %s\n", payload);
            break;
        case sIOtype_ACK:
            Serial.printf("[IOc] get ack: %u\n", length);
            break;
        case sIOtype_ERROR:
            Serial.printf("[IOc] get error: %u\n", length);
            break;
        case sIOtype_BINARY_EVENT:
            Serial.printf("[IOc] get binary: %u\n", length);
            break;
        case sIOtype_BINARY_ACK:
            Serial.printf("[IOc] get binary ack: %u\n", length);
            break;
    }
}

void setupCamera()
{

    camera_config_t config;
    config.ledc_channel = LEDC_CHANNEL_0;
    config.ledc_timer = LEDC_TIMER_0;
    config.pin_d0 = Y2_GPIO_NUM;
    config.pin_d1 = Y3_GPIO_NUM;
    config.pin_d2 = Y4_GPIO_NUM;
    config.pin_d3 = Y5_GPIO_NUM;
    config.pin_d4 = Y6_GPIO_NUM;
    config.pin_d5 = Y7_GPIO_NUM;
    config.pin_d6 = Y8_GPIO_NUM;
    config.pin_d7 = Y9_GPIO_NUM;
    config.pin_xclk = XCLK_GPIO_NUM;
    config.pin_pclk = PCLK_GPIO_NUM;
    config.pin_vsync = VSYNC_GPIO_NUM;
    config.pin_href = HREF_GPIO_NUM;
    config.pin_sscb_sda = SIOD_GPIO_NUM;
    config.pin_sscb_scl = SIOC_GPIO_NUM;
    config.pin_pwdn = PWDN_GPIO_NUM;
    config.pin_reset = RESET_GPIO_NUM;
    config.xclk_freq_hz = 20000000;
    config.pixel_format = PIXFORMAT_JPEG;

    config.frame_size = FRAMESIZE_CIF; // FRAMESIZE_ + QVGA|CIF|VGA|SVGA|XGA|SXGA|UXGA
    config.jpeg_quality = 10;
    config.fb_count = 2;

    // Init Camera
    esp_err_t err = esp_camera_init(&config);
    if (err != ESP_OK) {
      Serial.printf("Camera init failed with error 0x%x", err);
      return;
    }

}

void setup(){
  Serial.begin(115200);

  // Connect to Wi-Fi
  WiFi.begin(ssid, password);
  while (WiFi.status() != WL_CONNECTED) {
    delay(1000);
    Serial.println("Connecting to WiFi..");
  }

  // Print ESP32 Local IP Address
  Serial.println(WiFi.localIP());

  setupCamera();

  // server address, port and URL
  // without ssl to test speed may change later
  socketIO.begin("server", port,"/socket.io/?EIO=4");

  // event handler
  socketIO.onEvent(socketIOEvent);

}

unsigned long messageTimestamp = 0;
void loop() {
    socketIO.loop();

    uint64_t now = millis();

    if(now - messageTimestamp > 10) {
        messageTimestamp = now;

        camera_fb_t * fb = NULL;

        // Take Picture with Camera
        fb = esp_camera_fb_get();  
        if(!fb) {
          Serial.println("Camera capture failed");
          return;
        }

        //Slow
        String picture_encoded = base64::encode(fb->buf,fb->len);

        // create JSON message for Socket.IO (event)
        DynamicJsonDocument doc(15000);
        JsonArray array = doc.to<JsonArray>();

        // add event name
        // Hint: socket.on('event_name', ....
        array.add("jpgstream_server");

        // add payload (parameters) for the event
        JsonObject param1 = array.createNestedObject();
        param1["hostname"] = hostname;
        param1["picture"] = String((char *)fb->buf);

        // JSON to String (serializion)
        String output;
        serializeJson(doc, output);

        // Send event        
        socketIO.sendEVENT(output);
        Serial.println("Image sent");
        Serial.println(output);
        esp_camera_fb_return(fb); 
    }
}

节点j:

const express = require('express');
const app = express();
const http = require('http').Server(app);
const io = require('socket.io')(http);
const port = 3000;

const express_config= require('./config/express.js');

express_config.init(app);

var cameraArray={};

app.get('/', (req, res) => {
    res.render('index', {});
});

io.on('connection', (socket) => {
  socket.on('jpgstream_server', (msg) => {
    io.to('webusers').emit('jpgstream_client', msg);
  });

  socket.on('webuser', (msg) => {
      socket.join('webusers');      
  });

});

http.listen(port, () => {
      console.log(`App listening at http://localhost:${port}`)
})

网页浏览器:

<!DOCTYPE html>
<html>
<%- include('./partials/head.ejs') %>
<body class="page_display">
    <div class="main_content">
        <div class="page_title"><h1 class="tcenter">Camera relay</h1></div>
        <div class="tcenter">
            <img id="jpgstream" class="jpgstream" src="" />
        </div>
    </div>

    <script src="/socket.io/socket.io.js"></script>
    <script>
    var socket = io();

    socket.emit("webuser",{});

    socket.on('jpgstream_client', function(msg) {
        console.log(msg);
        var x = document.getElementsByTagName("img").item(0);
        x.setAttribute("src", 'data:image/jpg;base64,'+msg.picture);        
    });
    </script>
</body>
</html>

由于硬件的限制,我不希望视频流畅清晰,但我甚至没有10 fps的分辨率。瓶颈似乎来自base64编码。ESP 32-CAM Web服务器示例速度更快(https://github.com/espressif/arduino-esp32/blob/master/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino),但需要直接访问ESP 32-CAM。
是否有一种解决方案可以优化base64编码或通过socketIO发送数据以提高速度?

41zrol4v

41zrol4v1#

我知道现在说有点晚了,但我还是想分享一下我对这个问题的解决方案:通过socketIO从ESP 32相机发送图像确实是可能的,尽管性能不完美。
我将重点介绍OV2640 camera module,因为它在AI-Thinker板上使用,是最常见的。当此相机拍摄图像时,它将根据相机模块 * 中的设置进行处理 *(不浪费ESP 32处理时间),然后写入ESP 32缓冲区。此时,它已完全编码(jpeg,uint 8)并准备就绪,剩下要做的就是将该缓冲区传递给socketIO。
socketIO协议已经提供了发送二进制数据的可能性:
二进制事件数据包
{【型号】:5,【型号】:“/",“数据”:[“你好”,〈缓冲区01 02 03〉] }
被编码为51-[“你好”,{"_占位符”:真,“编号”:0}] +〈缓冲区01 02 03〉
然而,所选WebSocket库中socketIO类的ESP 32实现缺少这个方法。幸运的是,它可以很容易地实现。您所需要做的就是发送一个格式正确的websocket文本框架,其中包含一个保持器(在服务器解析时将被替换为二进制),然后在另一个单独的框架中发送实际的二进制数据:

// modified client for sending binaries
class SocketIOclientMod : public SocketIOclient {
    public:
    bool sendBIN(const uint8_t * payload, size_t length, bool headerToPayload = false);
};

// text frame content including hostname and placeholder:
// 451["pic",{"hostname":"ESP32_cam","image":{"placeholder":true,"num":0}}]
char binaryLeadFrame[100];
strcpy(binaryLeadFrame, "451-[\"image\",{\"hostname\":\"");
strcat(binaryLeadFrame, hostname);
strcat(binaryLeadFrame,"\",\"pic\":{\"_placeholder\":true,\"num\":0}}]");

// send text frame followed by binary frame
bool SocketIOclientMod::sendBIN(uint8_t * payload, size_t length, bool headerToPayload) {
    bool ret = false;
    if (length == 0) {
        length = strlen((const char *) payload);
    }
    ret = sendFrame(&_client, WSop_text, (uint8_t *) binaryLeadFrame,
          strlen((const char*) binaryLeadFrame), true, headerToPayload);

    if (ret) {
        ret = sendFrame(&_client, WSop_binary, payload, length, true, headerToPayload);
    }
    return ret;
}

服务器会将其解析为事件“图像”

{
  "hostname":"yourHostname",
  "pic":{
    "type":"Buffer",
    "data": [image data encoded as jpeg, uint8]
    }
}

在您的服务器上,只需将数据转发给您的webusers,并在收到时为webusers显示图像。
现在剩下要做的就是发送缓冲区:

SocketIOclientMod socketIO;

// use this whenever you want to send an image:
void sendImage(){
    camera_fb_t *fb = esp_camera_fb_get();
    socketIO.sendBIN(fb->buf,fb->len);
    esp_camera_fb_return(fb);
}

关于相机本身,还有两点需要注意。ESP 32的板载天线并不理想,如果有的话,应该是external antenna should be used。如果只对部分图像感兴趣,避免发送不必要的像素也是有益的。总体而言,相机模块的最高分辨率限制在15 fps(UXGA/SXGA),30 fps在SVGA和60 fps在CIF(见规格的芯片)。实际上可以实现的速率将较低,由于有限的WiFi速率和其他进程在硬件上运行,但10 fps的质量不错肯定是可能的。
我知道不使用socketIO做同样的事情会更容易,开销也更小,但不是每个人都有这样的选择。所以,如果像我这样的人读到这篇文章,他们别无选择,只能使用socketIO:
我希望这对你有帮助。

mf98qq94

mf98qq942#

老实说,ESP 32中的任何数据转换(raw-〉base64-〉JSON-〉WebSockets)是性能方面的最佳选择,但是假设您的诊断是正确的,并且使用的是this base64 library,那么问题可能来自于这样一个事实:虽然ESP 32内核运行得相当快(240 MHz),其所有代码和数据都来自外部SPI连接的闪存。正如您所猜测的,从那里获取任何内容都是 * 缓慢 * 的。它的闪存缓存为32 KB,但是Base64编码填充很可能在连续帧之间过期。
首先要确保内核和SPI总线以最大频率(240 MHz、80 MHz)运行。抱歉,我不知道Arduino-land是如何实现的。在ESP-IDF下,这是通过idf.py menuconfig实现的。
其次,你可以通过将代码和数据从Flash移到RAM来调整base64库。通过添加IRAM_ATTR将编码函数移到指令RAM。字符表被标记为constexpr,这使得编译器将其放入Flash。我怀疑删除constexpr会将其加载到数据RAM。

gwbalxhn

gwbalxhn3#

根据@Tarmo的回答,SocketIO似乎不是性能的好选择。
即使socketIO有二进制支持,转换似乎仍然是必需的。
我切换到二进制WebSocket和性能好得多。
此处提供项目示例:https://github.com/Inglebard/esp32cam-relay

相关问题