C++ opencv4 ANN(神經網路) 範例
C++ opencv4 ANN(神經網路) 範例
資料來源: chatgpt/gemini
chatgpt code
#include <opencv2/opencv.hpp>
#include <opencv2/ml/ml.hpp>
#include <iostream>
#include <vector>
using namespace cv;
using namespace cv::ml;
using namespace std;
int main() {
// Create training data
// Here, we are assuming 2 features for simplicity
Mat trainData = (Mat_<float>(4, 2) << 0, 0,
0, 1,
1, 0,
1, 1);
// The labels for each training sample
Mat labels = (Mat_<int>(4, 1) << 0, 1, 1, 0); // XOR problem
// Create and configure the ANN
Ptr<ANN_MLP> ann = ANN_MLP::create();
// Set up the network architecture
// Layer sizes: input (2), hidden layer (3), output (1)
vector<int> layerSizes = {2, 3, 1}; // 2 inputs, 3 neurons in the hidden layer, 1 output
ann->setLayerSizes(layerSizes);
ann->setActivationFunction(ANN_MLP::SIGMOID_SYM, 1, 1); // Sigmoid function
ann->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, 100, 1e-6)); // Stop after 100 iterations or a small change
ann->setTrainMethod(ANN_MLP::BACKPROP, 0.1); // Backpropagation with learning rate 0.1
// Train the ANN
ann->train(trainData, ROW_SAMPLE, labels);
// Test the network
Mat testData = (Mat_<float>(2, 2) << 0, 0,
1, 1);
Mat results;
ann->predict(testData, results);
// Output results (Prediction)
cout << "Prediction for (0,0): " << results.at<float>(0, 0) << endl;
cout << "Prediction for (1,1): " << results.at<float>(1, 0) << endl;
return 0;
}
gemini code
#include <iostream>
#include <vector>
#include <opencv2/opencv.hpp>
#include <opencv2/ml.hpp>
using namespace cv;
using namespace cv::ml;
using namespace std;
int main() {
// 1. 準備訓練數據
// 假設我們有 4 個二維數據點和它們的標籤 (0 或 1)
Mat trainingData = (Mat_<float>(4, 2) << 0, 0,
0, 1,
1, 0,
1, 1);
Mat labels = (Mat_<int>(4, 1) << 0,
1,
1,
0);
// 2. 建立 ANN 模型
// 設定網路的層數和每層的神經元數量
// 這裡我們創建一個包含一個輸入層 (2 個神經元), 一個隱藏層 (3 個神經元), 和一個輸出層 (1 個神經元) 的網路
Ptr<ANN_MLP> ann = ANN_MLP::create();
Mat layerSizes = (Mat_<int>(1, 3) << 2, 3, 1);
ann->setLayerSizes(layerSizes);
ann->setActivationFunction(ANN_MLP::SIGMOID_SYM); // 使用 Sigmoid 激活函數
ann->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER + TermCriteria::EPS, 1000, 1e-6)); // 設定終止條件
ann->setTrainMethod(ANN_MLP::BACKPROP); // 使用反向傳播演算法
// 3. 訓練 ANN 模型
cout << "開始訓練..." << endl;
ann->train(trainingData, ml::ROW_SAMPLE, labels);
cout << "訓練完成!" << endl;
// 4. 使用訓練好的模型進行預測
cout << "\n預測結果:" << endl;
Mat testData = (Mat_<float>(2, 2) << 0.5, 0.5,
1.5, 0.5);
Mat predictedLabels;
ann->predict(testData, predictedLabels);
for (int i = 0; i < predictedLabels.rows; ++i) {
cout << "輸入: (" << testData.at<float>(i, 0) << ", " << testData.at<float>(i, 1) << ") -> 預測標籤: " << predictedLabels.at<float>(i, 0) << endl;
}
// 5. (可選) 儲存訓練好的模型
ann->save("trained_ann.xml");
cout << "\n模型已儲存為 trained_ann.xml" << endl;
// 6. (可選) 載入已儲存的模型
Ptr<ANN_MLP> loadedAnn = Algorithm::load<ANN_MLP>("trained_ann.xml");
cout << "\n載入的模型預測結果:" << endl;
Mat loadedPredictedLabels;
loadedAnn->predict(testData, loadedPredictedLabels);
for (int i = 0; i < loadedPredictedLabels.rows; ++i) {
cout << "輸入: (" << testData.at<float>(i, 0) << ", " << testData.at<float>(i, 1) << ") -> 預測標籤: " << loadedPredictedLabels.at<float>(i, 0) << endl;
}
return 0;
}