Question/Issue:
[esp32 cam can’t control the servo motor ]
Project ID:
[823139]
Context/Use case:
[Provide context or use case where the issue is encountered]
Steps Taken:
- [Step 1]
- [Step 2]
- [Step 3]
Expected Outcome:
[servo Control]
Actual Outcome:
[Camera capture failed
Failed to capture image]
Reproducibility:
- [ -] Always
- [ ] Sometimes
- [ ] Rarely
Environment:
- **Platform:[esp32 cam]
-/ Edge Impulse Arduino examples
- Copyright (c) 2022 EdgeImpulse Inc.
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the “Software”), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
- THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- SOFTWARE.
*/
// These sketches are tested with 2.0.4 ESP32 Arduino Core
// Release ESP32 Arduino 2.0.4 based on ESP-IDF 4.4.1 · espressif/arduino-esp32 · GitHub
/* Includes ---------------------------------------------------------------- */
#include <Grade_Project_inferencing.h>
#include “edge-impulse-sdk/dsp/image/image.hpp”
#include <ESP32Servo.h>
#include “esp_camera.h”
// Select camera model - find more camera models in camera_pins.h file here
// https://github.com/espressif/arduino-esp32/blob/master/libraries/ESP32/examples/Camera/CameraWebServer/camera_pins.h
//#define CAMERA_MODEL_ESP_EYE // Has PSRAM
#define CAMERA_MODEL_AI_THINKER // Has PSRAM
#if defined(CAMERA_MODEL_ESP_EYE)
#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 4
#define SIOD_GPIO_NUM 18
#define SIOC_GPIO_NUM 23
#define Y9_GPIO_NUM 36
#define Y8_GPIO_NUM 37
#define Y7_GPIO_NUM 38
#define Y6_GPIO_NUM 39
#define Y5_GPIO_NUM 35
#define Y4_GPIO_NUM 14
#define Y3_GPIO_NUM 13
#define Y2_GPIO_NUM 34
#define VSYNC_GPIO_NUM 5
#define HREF_GPIO_NUM 27
#define PCLK_GPIO_NUM 25
#elif defined(CAMERA_MODEL_AI_THINKER)
#define PWDN_GPIO_NUM 32
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 0
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27
#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 21
#define Y4_GPIO_NUM 19
#define Y3_GPIO_NUM 18
#define Y2_GPIO_NUM 5
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22
#else
#error “Camera model not selected”
#endif
/* Constant defines -------------------------------------------------------- */
#define EI_CAMERA_RAW_FRAME_BUFFER_COLS 320
#define EI_CAMERA_RAW_FRAME_BUFFER_ROWS 240
#define EI_CAMERA_FRAME_BYTE_SIZE 3
/* Servo Control ----------------------------------------------------------- */
// Define servo pins for each material (adjust these pins according to your setup)
#define SERVO_PLASTIC_PIN 12
#define SERVO_METAL_PIN 13
#define SERVO_CARTON_PIN 14
// Confidence threshold (adjust as needed, 0.7 = 70% confidence)
#define CONFIDENCE_THRESHOLD 0.7
// Servo objects
Servo servoPlastic;
Servo servoMetal;
Servo servoCarton;
// Variables to prevent continuous triggering
unsigned long lastDetectionTime = 0;
const unsigned long DEBOUNCE_DELAY = 3000; // 3 seconds between detections
String lastDetectedMaterial = “”;
/* Private variables ------------------------------------------------------- */
static bool debug_nn = false; // Set this to true to see e.g. features generated from the raw signal
static bool is_initialised = false;
uint8_t *snapshot_buf; //points to the output of the capture
static camera_config_t camera_config = {
.pin_pwdn = PWDN_GPIO_NUM,
.pin_reset = RESET_GPIO_NUM,
.pin_xclk = XCLK_GPIO_NUM,
.pin_sscb_sda = SIOD_GPIO_NUM,
.pin_sscb_scl = SIOC_GPIO_NUM,
.pin_d7 = Y9_GPIO_NUM,
.pin_d6 = Y8_GPIO_NUM,
.pin_d5 = Y7_GPIO_NUM,
.pin_d4 = Y6_GPIO_NUM,
.pin_d3 = Y5_GPIO_NUM,
.pin_d2 = Y4_GPIO_NUM,
.pin_d1 = Y3_GPIO_NUM,
.pin_d0 = Y2_GPIO_NUM,
.pin_vsync = VSYNC_GPIO_NUM,
.pin_href = HREF_GPIO_NUM,
.pin_pclk = PCLK_GPIO_NUM,
//XCLK 20MHz or 10MHz for OV2640 double FPS (Experimental)
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,
.pixel_format = PIXFORMAT_JPEG, //YUV422,GRAYSCALE,RGB565,JPEG
.frame_size = FRAMESIZE_QVGA, //QQVGA-UXGA Do not use sizes above QVGA when not JPEG
.jpeg_quality = 12, //0-63 lower number means higher quality
.fb_count = 1, //if more than one, i2s runs in continuous mode. Use only with JPEG
.fb_location = CAMERA_FB_IN_PSRAM,
.grab_mode = CAMERA_GRAB_WHEN_EMPTY,
};
/* Function definitions ------------------------------------------------------- */
bool ei_camera_init(void);
void ei_camera_deinit(void);
bool ei_camera_capture(uint32_t img_width, uint32_t img_height, uint8_t *out_buf) ;
void controlServo(const String& material, float confidence);
void activateServo(Servo& servo, int pin, const String& materialName);
/**
-
@brief Arduino setup function
*/
void setup()
{
// put your setup code here, to run once:
Serial.begin(115200);
//comment out the below line to start inference immediately after upload
while (!Serial);
Serial.println(“Edge Impulse Inferencing Demo”);// Initialize servos
servoPlastic.attach(SERVO_PLASTIC_PIN);
servoMetal.attach(SERVO_METAL_PIN);
servoCarton.attach(SERVO_CARTON_PIN);// Move all servos to initial position (0 degrees)
servoPlastic.write(0);
servoMetal.write(0);
servoCarton.write(0);Serial.println(“Servos initialized and set to initial position”);
if (ei_camera_init() == false) {
ei_printf(“Failed to initialize Camera!\r\n”);
}
else {
ei_printf(“Camera initialized\r\n”);
}ei_printf(“\nStarting continuous inference in 2 seconds…\n”);
ei_sleep(2000);
}
/**
-
@brief Get data and run inferencing
-
@param[in] debug Get debug info if true
*/
void loop()
{
// instead of wait_ms, we’ll wait on the signal, this allows threads to cancel us…
if (ei_sleep(5) != EI_IMPULSE_OK) {
return;
}snapshot_buf = (uint8_t*)malloc(EI_CAMERA_RAW_FRAME_BUFFER_COLS * EI_CAMERA_RAW_FRAME_BUFFER_ROWS * EI_CAMERA_FRAME_BYTE_SIZE);
// check if allocation was successful
if(snapshot_buf == nullptr) {
ei_printf(“ERR: Failed to allocate snapshot buffer!\n”);
return;
}ei::signal_t signal;
signal.total_length = EI_CLASSIFIER_INPUT_WIDTH * EI_CLASSIFIER_INPUT_HEIGHT;
signal.get_data = &ei_camera_get_data;if (ei_camera_capture((size_t)EI_CLASSIFIER_INPUT_WIDTH, (size_t)EI_CLASSIFIER_INPUT_HEIGHT, snapshot_buf) == false) {
ei_printf(“Failed to capture image\r\n”);
free(snapshot_buf);
return;
}// Run the classifier
ei_impulse_result_t result = { 0 };EI_IMPULSE_ERROR err = run_classifier(&signal, &result, debug_nn);
if (err != EI_IMPULSE_OK) {
ei_printf(“ERR: Failed to run classifier (%d)\n”, err);
free(snapshot_buf);
return;
}// print the predictions
ei_printf(“Predictions (DSP: %d ms., Classification: %d ms., Anomaly: %d ms.): \n”,
result.timing.dsp, result.timing.classification, result.timing.anomaly);// Check classifications and control servos
bool detectionMade = false;
String detectedMaterial = “”;
float highestConfidence = 0;
#if EI_CLASSIFIER_OBJECT_DETECTION == 1
// Object detection mode
ei_printf(“Object detection bounding boxes:\r\n”);
for (uint32_t i = 0; i < result.bounding_boxes_count; i++) {
ei_impulse_result_bounding_box_t bb = result.bounding_boxes[i];
if (bb.value == 0) {
continue;
}
ei_printf(" %s (%f) [ x: %u, y: %u, width: %u, height: %u ]\r\n",
bb.label,
bb.value,
bb.x,
bb.y,
bb.width,
bb.height);
// Check if this detection meets our confidence threshold
if (bb.value > highestConfidence) {
highestConfidence = bb.value;
detectedMaterial = String(bb.label);
}
}
if (highestConfidence >= CONFIDENCE_THRESHOLD) {
detectionMade = true;
}
#else
// Classification mode
ei_printf(“Predictions:\r\n”);
for (uint16_t i = 0; i < EI_CLASSIFIER_LABEL_COUNT; i++) {
ei_printf(" %s: “, ei_classifier_inferencing_categories[i]);
ei_printf(”%.5f\r\n", result.classification[i].value);
// Check if this classification meets our confidence threshold
if (result.classification[i].value > highestConfidence) {
highestConfidence = result.classification[i].value;
detectedMaterial = String(ei_classifier_inferencing_categories[i]);
}
}
if (highestConfidence >= CONFIDENCE_THRESHOLD) {
detectionMade = true;
}
#endif
// Control servo based on detection
if (detectionMade && (millis() - lastDetectionTime > DEBOUNCE_DELAY || detectedMaterial != lastDetectedMaterial)) {
controlServo(detectedMaterial, highestConfidence);
lastDetectionTime = millis();
lastDetectedMaterial = detectedMaterial;
}
// Print anomaly result (if it exists)
#if EI_CLASSIFIER_HAS_ANOMALY
ei_printf(“Anomaly prediction: %.3f\r\n”, result.anomaly);
#endif
#if EI_CLASSIFIER_HAS_VISUAL_ANOMALY
ei_printf(“Visual anomalies:\r\n”);
for (uint32_t i = 0; i < result.visual_ad_count; i++) {
ei_impulse_result_bounding_box_t bb = result.visual_ad_grid_cells[i];
if (bb.value == 0) {
continue;
}
ei_printf(" %s (%f) [ x: %u, y: %u, width: %u, height: %u ]\r\n",
bb.label,
bb.value,
bb.x,
bb.y,
bb.width,
bb.height);
}
#endif
free(snapshot_buf);
}
/**
-
Control servo based on detected material
*/
void controlServo(const String& material, float confidence) {
Serial.println(“=== MATERIAL DETECTED ===”);
Serial.print("Material: ");
Serial.println(material);
Serial.print("Confidence: ");
Serial.println(confidence);// Convert material to lowercase for case-insensitive comparison
String materialLower = material;
materialLower.toLowerCase();if (materialLower.indexOf(“plastic”) >= 0) {
Serial.println(“Activating PLASTIC servo”);
activateServo(servoPlastic, SERVO_PLASTIC_PIN, “PLASTIC”);
}
else if (materialLower.indexOf(“metal”) >= 0) {
Serial.println(“Activating METAL servo”);
activateServo(servoMetal, SERVO_METAL_PIN, “METAL”);
}
else if (materialLower.indexOf(“carton”) >= 0) {
Serial.println(“Activating CARTON servo”);
activateServo(servoCarton, SERVO_CARTON_PIN, “CARTON”);
}
else {
Serial.println(“Unknown material - no action taken”);
}
Serial.println(“=========================”);
}
/**
-
Activate a specific servo
*/
void activateServo(Servo& servo, int pin, const String& materialName) {
Serial.print("Moving servo for ");
Serial.println(materialName);// Move servo to 90 degrees (active position)
servo.write(90);
delay(1000);// Return servo to 0 degrees (rest position)
servo.write(0);Serial.print(“Servo for “);
Serial.print(materialName);
Serial.println(” completed movement”);
}
/**
-
@brief Setup image sensor & start streaming
*/
bool ei_camera_init(void) {
if (is_initialised) return true;
#if defined(CAMERA_MODEL_ESP_EYE)
pinMode(13, INPUT_PULLUP);
pinMode(14, INPUT_PULLUP);
#endif
//initialize the camera
esp_err_t err = esp_camera_init(&camera_config);
if (err != ESP_OK) {
Serial.printf("Camera init failed with error 0x%x\n", err);
return false;
}
sensor_t * s = esp_camera_sensor_get();
// initial sensors are flipped vertically and colors are a bit saturated
if (s->id.PID == OV3660_PID) {
s->set_vflip(s, 1); // flip it back
s->set_brightness(s, 1); // up the brightness just a bit
s->set_saturation(s, 0); // lower the saturation
}
#if defined(CAMERA_MODEL_M5STACK_WIDE)
s->set_vflip(s, 1);
s->set_hmirror(s, 1);
#elif defined(CAMERA_MODEL_ESP_EYE)
s->set_vflip(s, 1);
s->set_hmirror(s, 1);
s->set_awb_gain(s, 1);
#endif
is_initialised = true;
return true;
}
/**
-
@brief Stop streaming of sensor data
*/
void ei_camera_deinit(void) {
//deinitialize the camera
esp_err_t err = esp_camera_deinit();if (err != ESP_OK) {
ei_printf(“Camera deinit failed\n”);
return;
}is_initialised = false;
}
/**
-
@brief Capture, rescale and crop image
*/
bool ei_camera_capture(uint32_t img_width, uint32_t img_height, uint8_t *out_buf) {
bool do_resize = false;if (!is_initialised) {
ei_printf(“ERR: Camera is not initialized\r\n”);
return false;
}camera_fb_t *fb = esp_camera_fb_get();
if (!fb) {
ei_printf(“Camera capture failed\n”);
return false;
}bool converted = fmt2rgb888(fb->buf, fb->len, PIXFORMAT_JPEG, snapshot_buf);
esp_camera_fb_return(fb);
if(!converted){
ei_printf(“Conversion failed\n”);
return false;
}if ((img_width != EI_CAMERA_RAW_FRAME_BUFFER_COLS)
|| (img_height != EI_CAMERA_RAW_FRAME_BUFFER_ROWS)) {
do_resize = true;
}if (do_resize) {
ei::image::processing::crop_and_interpolate_rgb888(
out_buf,
EI_CAMERA_RAW_FRAME_BUFFER_COLS,
EI_CAMERA_RAW_FRAME_BUFFER_ROWS,
out_buf,
img_width,
img_height);
}return true;
}
static int ei_camera_get_data(size_t offset, size_t length, float *out_ptr)
{
// we already have a RGB888 buffer, so recalculate offset into pixel index
size_t pixel_ix = offset * 3;
size_t pixels_left = length;
size_t out_ptr_ix = 0;
while (pixels_left != 0) {
// Swap BGR to RGB here
// due to https://github.com/espressif/esp32-camera/issues/379
out_ptr[out_ptr_ix] = (snapshot_buf[pixel_ix + 2] << 16) + (snapshot_buf[pixel_ix + 1] << 8) + snapshot_buf[pixel_ix];
// go to the next pixel
out_ptr_ix++;
pixel_ix+=3;
pixels_left--;
}
// and done!
return 0;
}
#if !defined(EI_CLASSIFIER_SENSOR) || EI_CLASSIFIER_SENSOR != EI_CLASSIFIER_SENSOR_CAMERA
#error “Invalid model for current sensor”
#endif
additional information
I use external source 5 v 20 A