diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 9a7f176..7633a12 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -8,3 +8,9 @@ add_subdirectory(ExternalInterfaceLib) add_subdirectory(Identity) add_subdirectory(Dahlquist) add_subdirectory(mass_spring_damper) + +find_package(OpenCV QUIET) +if (OpenCV_FOUND) + message("OpenCV found, building OpenCV example(s)...") + add_subdirectory(opencv) +endif () diff --git a/examples/opencv/CMakeLists.txt b/examples/opencv/CMakeLists.txt new file mode 100644 index 0000000..32f48ef --- /dev/null +++ b/examples/opencv/CMakeLists.txt @@ -0,0 +1,9 @@ + +generateFMU(onnx_tracker + SOURCES "tracking.cpp" + RESOURCE_FOLDER + "${CMAKE_CURRENT_SOURCE_DIR}/resources" + LINK_TARGETS ${OpenCV_LIBRARIES} + FMI_VERSIONS + "fmi3" +) diff --git a/examples/opencv/Lenna_(test_image).png b/examples/opencv/Lenna_(test_image).png new file mode 100644 index 0000000..59ef68a Binary files /dev/null and b/examples/opencv/Lenna_(test_image).png differ diff --git a/examples/opencv/readme.txt b/examples/opencv/readme.txt new file mode 100644 index 0000000..9301a44 --- /dev/null +++ b/examples/opencv/readme.txt @@ -0,0 +1 @@ +This FMU must be built with dynamic MSVC runtime. \ No newline at end of file diff --git a/examples/opencv/resources/yolo11n.onnx b/examples/opencv/resources/yolo11n.onnx new file mode 100644 index 0000000..43dd51f Binary files /dev/null and b/examples/opencv/resources/yolo11n.onnx differ diff --git a/examples/opencv/run_tracking.py b/examples/opencv/run_tracking.py new file mode 100644 index 0000000..f791c96 --- /dev/null +++ b/examples/opencv/run_tracking.py @@ -0,0 +1,35 @@ + +from ecospy import * + +from pathlib import Path +from time import sleep + +def read_png_file_to_bytes(path: str) -> bytes: + with open(path, "rb") as f: + return f.read() + +def main(): + print(f"Ecoslib version: {EcosLib.version()}") + + EcosLib.set_log_level("debug") + + fmu_path = str((Path(__file__).parent.parent.parent / 'cmake-build-debug' / 'models' /'fmi3' / 'onnx_tracker' / 'onnx_tracker.fmu').resolve()) + image = read_png_file_to_bytes("Lenna_(test_image).png") + + print(f"image size: {len(image)} bytes") + + with EcosSimulationStructure() as ss: + ss.add_model("model", fmu_path) + + with(EcosSimulation(structure=ss, step_size=1/100)) as sim: + + sim.init() + sim.set_binary("model::blob", image) + sim.step() + sleep(1) + sim.terminate() + + + +if __name__ == "__main__": + main() diff --git a/examples/opencv/tracking.cpp b/examples/opencv/tracking.cpp new file mode 100644 index 0000000..daddd51 --- /dev/null +++ b/examples/opencv/tracking.cpp @@ -0,0 +1,206 @@ + +#include "fmu4cpp/fmu_base.hpp" + + +#include +#include + +#include +#include + +const std::vector &coco_names(); + +class BoxDrawer { +public: + BoxDrawer(double confThreshold = 0.5, double nmsThreshold = 0.4) + : confThreshold(confThreshold), + nmsThreshold(nmsThreshold), + classNames(coco_names()) { + } + + void setConfThreshold(double confThreshold) { + this->confThreshold = confThreshold; + } + + void draw(cv::Mat &frame, const std::vector &classIds, + const std::vector &confidences, + const std::vector &boxes) const { + // Non-maximum suppression to remove redundant overlapping boxes + std::vector indices; + cv::dnn::NMSBoxes(boxes, confidences, confThreshold, nmsThreshold, indices); + + for (const int idx: indices) { + const cv::Rect &box = boxes[idx]; + drawPred(classIds[idx], confidences[idx], box.x, box.y, box.x + box.width, box.y + box.height, frame, + classNames); + } + } + +private: + float confThreshold; + float nmsThreshold; + std::vector classNames; + + static void drawPred(int classId, float conf, int left, int top, int right, int bottom, cv::Mat &frame, + const std::vector &classNames) { + rectangle(frame, cv::Point(left, top), cv::Point(right, bottom), cv::Scalar(255, 178, 50), 3); + + std::ostringstream oss; + oss << std::fixed << std::setprecision(2) << conf; + std::string label = oss.str(); + if (!classNames.empty()) { + CV_Assert(classId < classNames.size()); + label = classNames[classId] + ": " + label; + } + + int baseLine; + const auto labelSize = getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); + top = cv::max(top, labelSize.height); + rectangle(frame, cv::Point(left, top - labelSize.height), + cv::Point(left + labelSize.width, top + baseLine), cv::Scalar::all(255), cv::FILLED); + putText(frame, label, cv::Point(left, top), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0), 1); + } +}; + +class Tracking : public fmu4cpp::fmu_base { +public: + FMU4CPP_CTOR(Tracking) { + + register_binary("blob", &binary_).setCausality(fmu4cpp::causality_t::INPUT); + + register_real("confThreshold", &confThreshold) + .setCausality(fmu4cpp::causality_t::PARAMETER) + .setVariability(fmu4cpp::variability_t::TUNABLE) + .setMin(0.0) + .setMax(1.0); + + Tracking::reset(); + } + + void exit_initialisation_mode() override { + const auto onnxPath = resourceLocation() / "yolo11n.onnx"; + + if (!std::filesystem::exists(onnxPath)) { + throw std::runtime_error("ONNX model file not found: " + onnxPath.string()); + } + + net_ = cv::dnn::readNetFromONNX(onnxPath.string()); + net_.setPreferableBackend(cv::dnn::DNN_BACKEND_OPENCV); + net_.setPreferableTarget(cv::dnn::DNN_TARGET_CPU); + } + + bool do_step(double) override { + + static cv::Size size(640, 640); + + std::cout << "Running inference..." << std::endl; + std::cout << binary_.size() << " bytes received." << std::endl; + + cv::Mat frame, blob; + if (!binary_.empty()) { + frame = cv::imdecode(binary_, cv::IMREAD_COLOR); + + int height = frame.rows; + int width = frame.cols; + std::cout << "Frame size: " << width << "x" << height << std::endl; + int length = std::max(height, width); + cv::Mat image = cv::Mat::zeros(length, length, CV_8UC3); + frame.copyTo(image(cv::Rect(0, 0, width, height))); + + // Create a 4D blob from the frame + cv::dnn::blobFromImage(image, blob, 1 / 255.0, size, {}, true, false); + net_.setInput(blob); + + // Run forward pass + std::vector outs; + net_.forward(outs); + + // Extract the first output and reshape if necessary + cv::Mat output = outs.front(); + + // If output is not 2D, reshape it (e.g., for YOLO, it may be 3D: [1, N, M]) + if (output.dims > 2) { + output = output.reshape(1, output.size[1]);// Flatten to 2D if required + } + + // Transpose if needed (verify first) + const cv::Mat& transposedOutput = output.t();// Transpose output for processing + int rows = transposedOutput.rows; + + // Post-process detections + std::vector classIds; + std::vector confidences; + std::vector boxes; + + for (int i = 0; i < rows; ++i) { + cv::Mat scores = transposedOutput.row(i).colRange(4, transposedOutput.cols); + cv::Point classIdPoint; + double confidence; + minMaxLoc(scores, nullptr, &confidence, nullptr, &classIdPoint); + if (confidence > confThreshold) { + // Extract bounding box coordinates + float centerX = transposedOutput.at(i, 0); + float centerY = transposedOutput.at(i, 1); + float width = transposedOutput.at(i, 2); + float height = transposedOutput.at(i, 3); + + // Calculate top-left corner of the bounding box + int x = static_cast(centerX - 0.5 * width); + int y = static_cast(centerY - 0.5 * height); + int w = static_cast(width); + int h = static_cast(height); + + classIds.push_back(classIdPoint.x); + confidences.emplace_back(static_cast(confidence)); + boxes.emplace_back(x, y, w, h); + } + } + + drawer.setConfThreshold(confThreshold); + drawer.draw(frame, classIds, confidences, boxes); + imshow(windowName_, frame); + + cv::waitKey(1); + } + + return true; + } + + void reset() override { + // do nothing + } + +private: + cv::dnn::Net net_; + + std::string windowName_ = "Tracking"; + std::vector binary_; + + double confThreshold = 0.5; + BoxDrawer drawer{}; +}; + +const std::vector &coco_names() { + static std::vector names = { + "person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat", "traffic light", + "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", + "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", + "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", + "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", + "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", + "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse", + "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", + "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"}; + return names; +} + + +fmu4cpp::model_info fmu4cpp::get_model_info() { + model_info info; + info.modelName = "Tracking"; + info.description = "A tracking model using ONNX and OpenCV"; + return info; +} + + +FMU4CPP_INSTANTIATE(Tracking); \ No newline at end of file