|
| 1 | +/* |
| 2 | + An example sketch for Edge Impulse trained model inference for Rock Paper Scissors classification with single Light sensor |
| 3 | + (continious inference) |
| 4 | +
|
| 5 | + Copyright (c) 2021 Seeed technology co., ltd. |
| 6 | + Author : Dmitry Maslov |
| 7 | + Create Time : February 2021 |
| 8 | + Change Log : |
| 9 | +
|
| 10 | + The MIT License (MIT) |
| 11 | +
|
| 12 | + Permission is hereby granted, free of charge, to any person obtaining a copy |
| 13 | + of this software and associated documentation files (the "Software"), to deal |
| 14 | + in the Software without restriction, including without limitation the rights |
| 15 | + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
| 16 | + copies of the Software, and to permit persons to whom the Software is |
| 17 | + furnished to do so, subject to the following conditions: |
| 18 | +
|
| 19 | + The above copyright notice and this permission notice shall be included in |
| 20 | + all copies or substantial portions of the Software. |
| 21 | +
|
| 22 | + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 23 | + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 24 | + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
| 25 | + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 26 | + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 27 | + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
| 28 | + THE SOFTWARE. |
| 29 | +*/ |
| 30 | + |
| 31 | +/* Includes ---------------------------------------------------------------- */ |
| 32 | +#include <video_tinyml_raw_inference.h> |
| 33 | +#include <Seeed_Arduino_FreeRTOS.h> |
| 34 | + |
| 35 | +#define ERROR_LED_LIGHTUP_STATE HIGH |
| 36 | + |
| 37 | +/* Private variables ------------------------------------------------------- */ |
| 38 | +static bool debug_nn = false; // Set this to true to see e.g. features generated from the raw signal |
| 39 | +static uint32_t run_inference_every_ms = 250; |
| 40 | + |
| 41 | +static float buffer[EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE] = {0}; |
| 42 | +static float inference_buffer[EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE]; |
| 43 | +uint8_t axis_num = 1; |
| 44 | + |
| 45 | +TaskHandle_t Handle_aTask; |
| 46 | +TaskHandle_t Handle_bTask; |
| 47 | + |
| 48 | +/** |
| 49 | +* @brief Arduino setup function |
| 50 | +*/ |
| 51 | +void setup() |
| 52 | +{ |
| 53 | + // put your setup code here, to run once: |
| 54 | + Serial.begin(115200); |
| 55 | + Serial.println("Edge Impulse Inferencing Demo"); |
| 56 | + |
| 57 | + if (EI_CLASSIFIER_RAW_SAMPLES_PER_FRAME != axis_num) { |
| 58 | + ei_printf("ERR: EI_CLASSIFIER_RAW_SAMPLES_PER_FRAME should be equal to (%d) (the (%d) sensor axes)\n", axis_num, axis_num); |
| 59 | + return; |
| 60 | + } |
| 61 | + vSetErrorLed(LED_BUILTIN, ERROR_LED_LIGHTUP_STATE); |
| 62 | + |
| 63 | + // Create the threads that will be managed by the rtos |
| 64 | + // Sets the stack size and priority of each task |
| 65 | + // Also initializes a handler pointer to each task, which are important to communicate with and retrieve info from tasks |
| 66 | + |
| 67 | + xTaskCreate(run_inference_background,"Inference", 512, NULL, tskIDLE_PRIORITY + 2, &Handle_aTask); |
| 68 | + xTaskCreate(read_data, "Data collection", 256, NULL, tskIDLE_PRIORITY + 1, &Handle_bTask); |
| 69 | + |
| 70 | + // Start the RTOS, this function will never return and will schedule the tasks. |
| 71 | + |
| 72 | + vTaskStartScheduler(); |
| 73 | +} |
| 74 | + |
| 75 | +/** |
| 76 | +* @brief Printf function uses vsnprintf and output using Arduino Serial |
| 77 | +* |
| 78 | +* @param[in] format Variable argument list |
| 79 | +*/ |
| 80 | +void ei_printf(const char *format, ...) { |
| 81 | + static char print_buf[1024] = { 0 }; |
| 82 | + |
| 83 | + va_list args; |
| 84 | + va_start(args, format); |
| 85 | + int r = vsnprintf(print_buf, sizeof(print_buf), format, args); |
| 86 | + va_end(args); |
| 87 | + |
| 88 | + if (r > 0) { |
| 89 | + Serial.write(print_buf); |
| 90 | + } |
| 91 | +} |
| 92 | + |
| 93 | +/** |
| 94 | + * @brief Run inferencing in the background. |
| 95 | + */ |
| 96 | +static void run_inference_background(void* pvParameters) |
| 97 | +{ |
| 98 | + // wait until we have a full buffer |
| 99 | + delay((EI_CLASSIFIER_INTERVAL_MS * EI_CLASSIFIER_RAW_SAMPLE_COUNT) + 100); |
| 100 | + |
| 101 | + // This is a structure that smoothens the output result |
| 102 | + // With the default settings 70% of readings should be the same before classifying. |
| 103 | + ei_classifier_smooth_t smooth; |
| 104 | + ei_classifier_smooth_init(&smooth, 10 /* no. of readings */, 7 /* min. readings the same */, 0.8 /* min. confidence */, 0.3 /* max anomaly */); |
| 105 | + |
| 106 | + while (1) { |
| 107 | + // copy the buffer |
| 108 | + memcpy(inference_buffer, buffer, EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE * sizeof(float)); |
| 109 | + |
| 110 | + // Turn the raw buffer in a signal which we can the classify |
| 111 | + signal_t signal; |
| 112 | + int err = numpy::signal_from_buffer(inference_buffer, EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE, &signal); |
| 113 | + if (err != 0) { |
| 114 | + ei_printf("Failed to create signal from buffer (%d)\n", err); |
| 115 | + return; |
| 116 | + } |
| 117 | + |
| 118 | + // Run the classifier |
| 119 | + ei_impulse_result_t result = { 0 }; |
| 120 | + |
| 121 | + err = run_classifier(&signal, &result, debug_nn); |
| 122 | + if (err != EI_IMPULSE_OK) { |
| 123 | + ei_printf("ERR: Failed to run classifier (%d)\n", err); |
| 124 | + return; |
| 125 | + } |
| 126 | + |
| 127 | + // print the predictions |
| 128 | + ei_printf("Predictions "); |
| 129 | + ei_printf("(DSP: %d ms., Classification: %d ms., Anomaly: %d ms.)", |
| 130 | + result.timing.dsp, result.timing.classification, result.timing.anomaly); |
| 131 | + ei_printf(": "); |
| 132 | + |
| 133 | + // ei_classifier_smooth_update yields the predicted label |
| 134 | + const char *prediction = ei_classifier_smooth_update(&smooth, &result); |
| 135 | + ei_printf("%s ", prediction); |
| 136 | + // print the cumulative results |
| 137 | + ei_printf(" [ "); |
| 138 | + for (size_t ix = 0; ix < smooth.count_size; ix++) { |
| 139 | + ei_printf("%u", smooth.count[ix]); |
| 140 | + if (ix != smooth.count_size + 1) { |
| 141 | + ei_printf(", "); |
| 142 | + } |
| 143 | + else { |
| 144 | + ei_printf(" "); |
| 145 | + } |
| 146 | + } |
| 147 | + ei_printf("]\n"); |
| 148 | + |
| 149 | + delay(run_inference_every_ms); |
| 150 | + } |
| 151 | + |
| 152 | + ei_classifier_smooth_free(&smooth); |
| 153 | +} |
| 154 | + |
| 155 | +/** |
| 156 | +* @brief Get data and run inferencing |
| 157 | +* |
| 158 | +* @param[in] debug Get debug info if true |
| 159 | +*/ |
| 160 | +static void read_data(void* pvParameters) |
| 161 | +{ |
| 162 | + while (1) { |
| 163 | + // Determine the next tick (and then sleep later) |
| 164 | + uint64_t next_tick = micros() + (EI_CLASSIFIER_INTERVAL_MS * 1000); |
| 165 | + |
| 166 | + // roll the buffer -3 points so we can overwrite the last one |
| 167 | + numpy::roll(buffer, EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE, -1); |
| 168 | + |
| 169 | + buffer[EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE - 1] = analogRead(WIO_LIGHT); |
| 170 | + |
| 171 | + // and wait for next tick |
| 172 | + uint64_t time_to_wait = next_tick - micros(); |
| 173 | + delay((int)floor((float)time_to_wait / 1000.0f)); |
| 174 | + delayMicroseconds(time_to_wait % 1000); |
| 175 | + } |
| 176 | +} |
| 177 | + |
| 178 | +void loop() |
| 179 | +{ |
| 180 | + //nothing, all the work is done in two threads |
| 181 | +} |
0 commit comments