Question/Issue:
I followed your provided tutorial on ‘Continuous motion recognition’. Everything worked fine and it’s recognizing the right results when i am using it in the browser. I deployed the build and put it via ‘add .ZIP libary’ in Arduino IDE and when i am running it there it is only recognizing two gestures. When i am moving it it says ‘acht’ (i am doing an eight motion) and when its sitting on my desk it correctly recognizes ‘idle’. The problem: it doesnt matter what motion i am doing it only recognizes it as ‘acht’. I have two more gestures that always appear as 0,00 - 0,03. Additionally is the anomaly factor quite high.
I used different Microcontrollers: Esp32 and Arduino Nano Esp32 but the same accelerometer: MPU-6050
I asked multiple people but we can’t find a mistake in my Arduino code and the motion recognition in the browers works fine.
Project ID:
504353
Additional Information:
This is my Arduino code:
#include <Naezz-project-1_inferencing.h>
#include <Arduino.h>
#include <Adafruit_MPU6050.h>
#include <Adafruit_Sensor.h>
#include <Wire.h>
Adafruit_MPU6050 mpu;
#define CONVERT_G_TO_MS2 9.80665f
#define FREQUENCY_HZ 94 /*EI_CLASSIFIER_FREQUENCY*/
#define INTERVAL_MS (1000 / (FREQUENCY_HZ + 1))
static unsigned long last_interval_ms = 0;
// to classify 1 frame of data you need EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE values
float features[EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE];
// keep track of where we are in the feature array
size_t feature_ix = 0;
void setup() {
Serial.begin(115200);
Serial.println("Started");
if (!mpu.begin()) {
Serial.println("Failed to initialize IMU!");
while (1);
}
mpu.setAccelerometerRange(MPU6050_RANGE_2_G);
mpu.setGyroRange(MPU6050_RANGE_250_DEG);
mpu.setFilterBandwidth(MPU6050_BAND_21_HZ);
Serial.println("MPU6050 bereit.");
}
void loop() {
//float x, y, z;
if (millis() > last_interval_ms + INTERVAL_MS) {
last_interval_ms = millis();
// read sensor data in exactly the same way as in the Data Forwarder example
//IMU.readAcceleration(x, y, z);
sensors_event_t a, g, temp;
mpu.getEvent(&a, &g, &temp);
// fill the features buffer
features[feature_ix++] = a.acceleration.x * CONVERT_G_TO_MS2;
features[feature_ix++] = a.acceleration.y * CONVERT_G_TO_MS2;
features[feature_ix++] = a.acceleration.z * CONVERT_G_TO_MS2;
// features buffer full? then classify!
if (feature_ix == EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE) {
ei_impulse_result_t result;
// create signal from features frame
signal_t signal;
numpy::signal_from_buffer(features, EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE, &signal);
// run classifier
EI_IMPULSE_ERROR res = run_classifier(&signal, &result, false);
ei_printf("run_classifier returned: %d\n", res);
if (res != 0) return;
// print predictions
ei_printf("Predictions (DSP: %d ms., Classification: %d ms., Anomaly: %d ms.): \n",
result.timing.dsp, result.timing.classification, result.timing.anomaly);
// print the predictions
for (size_t ix = 0; ix < EI_CLASSIFIER_LABEL_COUNT; ix++) {
ei_printf("%s:\t%.5f\n", result.classification[ix].label, result.classification[ix].value);
}
#if EI_CLASSIFIER_HAS_ANOMALY == 1
ei_printf("anomaly:\t%.3f\n", result.anomaly);
#endif
// reset features frame
feature_ix = 0;
}
}
}
void ei_printf(const char *format, ...) {
static char print_buf[1024] = { 0 };
va_list args;
va_start(args, format);
int r = vsnprintf(print_buf, sizeof(print_buf), format, args);
va_end(args);
if (r > 0) {
Serial.write(print_buf);
}
}