diff --git a/README.md b/README.md
index 2219a68..f9ae57b 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-
+
diff --git a/conf/facerec/common_capturer_pb_uld_fda.xml b/conf/facerec/common_capturer_pb_uld_fda.xml
new file mode 100644
index 0000000..27b0303
--- /dev/null
+++ b/conf/facerec/common_capturer_pb_uld_fda.xml
@@ -0,0 +1,23 @@
+
+
+
+fld_capturer
+
+pb_fd
+uld
+
+150
+1
+
+0.2
+0.3
+0.7
+
+frontal
+fda
+share/fda/fda0017.3500.enc
+
+0
+conf/facerec/iris_fitter.xml
+
+
diff --git a/conf/facerec/common_capturer_ssyv_fda_v2.xml b/conf/facerec/common_capturer_ssyv_fda_v2.xml
index 98a276c..1c62e98 100644
--- a/conf/facerec/common_capturer_ssyv_fda_v2.xml
+++ b/conf/facerec/common_capturer_ssyv_fda_v2.xml
@@ -3,8 +3,8 @@
fld_capturer
-
ssyv
-
+
pb_fd
+
ssyv
2
0.4
@@ -14,4 +14,7 @@
fda
share/fda/fda0017.3500.enc
+
0
+
conf/facerec/iris_fitter.xml
+
diff --git a/conf/facerec/common_capturer_ssyv_fda_v3.xml b/conf/facerec/common_capturer_ssyv_fda_v3.xml
new file mode 100644
index 0000000..e29fa06
--- /dev/null
+++ b/conf/facerec/common_capturer_ssyv_fda_v3.xml
@@ -0,0 +1,20 @@
+
+
+
+fld_capturer
+
+pb_fd
+ssyv
+3
+
+0.4
+0.45
+
+frontal
+fda
+share/fda/fda0017.3500.enc
+
+0
+conf/facerec/iris_fitter.xml
+
+
diff --git a/conf/facerec/common_capturer_ssyv_fda_v5.xml b/conf/facerec/common_capturer_ssyv_fda_v5.xml
index f502861..7ccb106 100644
--- a/conf/facerec/common_capturer_ssyv_fda_v5.xml
+++ b/conf/facerec/common_capturer_ssyv_fda_v5.xml
@@ -3,9 +3,9 @@
fld_capturer
-
ssyv
-
-
3
+
pb_fd
+
ssyv
+
4
0.4
0.45
@@ -14,4 +14,7 @@
fda
share/fda/fda0017.3500.enc
+
0
+
conf/facerec/iris_fitter.xml
+
diff --git a/conf/facerec/safety_city_q1.xml b/conf/facerec/safety_city_q1.xml
index f502861..a9e4003 100644
--- a/conf/facerec/safety_city_q1.xml
+++ b/conf/facerec/safety_city_q1.xml
@@ -3,9 +3,9 @@
fld_capturer
-
ssyv
-
-
3
+
pb_fd
+
ssyv
+
4
0.4
0.45
diff --git a/conf/facerec/safety_city_q2.xml b/conf/facerec/safety_city_q2.xml
index 12d462c..374ae91 100644
--- a/conf/facerec/safety_city_q2.xml
+++ b/conf/facerec/safety_city_q2.xml
@@ -2,36 +2,16 @@
fld_capturer
-frontal
-
-uld
-50
-10000
-1
-uld-part1
-share/facedetectors/uld/uld_part1.enc
-uld-part2
-share/facedetectors/uld/uld_part2.enc
-uld-part3
-share/facedetectors/uld/uld_part3.enc
+pb_fd
+ssyv
+3
-0.3
-0.2
-0.6
-1.0
-
-0
-0
-0
+0.4
+0.45
+frontal
fda
share/fda/fda0017.3500.enc
-0
-conf/facerec/iris_fitter.xml
-
-
-1
-
diff --git a/conf/facerec/safety_city_q3.xml b/conf/facerec/safety_city_q3.xml
new file mode 100644
index 0000000..12d462c
--- /dev/null
+++ b/conf/facerec/safety_city_q3.xml
@@ -0,0 +1,37 @@
+
+
+
+fld_capturer
+frontal
+
+uld
+50
+10000
+1
+
+uld-part1
+share/facedetectors/uld/uld_part1.enc
+uld-part2
+share/facedetectors/uld/uld_part2.enc
+uld-part3
+share/facedetectors/uld/uld_part3.enc
+
+0.3
+0.2
+0.6
+1.0
+
+0
+0
+0
+
+fda
+share/fda/fda0017.3500.enc
+
+0
+conf/facerec/iris_fitter.xml
+
+
+1
+
+
diff --git a/conf/facerec/video_worker_fdatracker_pb_uld_fda.xml b/conf/facerec/video_worker_fdatracker_pb_uld_fda.xml
new file mode 100644
index 0000000..7ac9311
--- /dev/null
+++ b/conf/facerec/video_worker_fdatracker_pb_uld_fda.xml
@@ -0,0 +1,106 @@
+
+
+
+libface_video_worker
+
+fdatracker
+
+
+
+share/fda/fda0017.3500.enc
+
+5
+30
+0.7
+0.2
+
+
+0
+conf/facerec/iris_fitter.xml
+
+0.1
+0
+8
+640
+640
+
+
+pb_fd
+uld
+150
+1
+
+0.2
+0.3
+0.7
+
+0
+0
+0
+
+0
+conf/facerec/depth_liveness_estimator_cnn.xml
+5000
+150
+3
+
+share/face_quality/fda_fqm.bin
+
+2
+
+ -40
+ 40
+
+ -40
+ 40
+
+5
+
+3
+
+10
+
+32
+32
+0.20
+
+0
+
+0
+
+0
+
+1
+
+
+10000
+1000
+
+0
+
+0
+
+0
+
+
+
+0
+
+0
+
+1
+
+-1
+-1
+-1
+
+0
+
+
+conf/facerec/age_gender_estimator.xml
+conf/facerec/emotions_estimator.xml
+
+0
+conf/facerec/active_liveness_estimator.xml
+
+
diff --git a/conf/facerec/video_worker_fdatracker_ssyv_fda_v2.xml b/conf/facerec/video_worker_fdatracker_ssyv_fda_v2.xml
new file mode 100644
index 0000000..b4d59eb
--- /dev/null
+++ b/conf/facerec/video_worker_fdatracker_ssyv_fda_v2.xml
@@ -0,0 +1,105 @@
+
+
+
+libface_video_worker
+
+fdatracker
+
+
+
+share/fda/fda0017.3500.enc
+
+5
+30
+0.7
+0.2
+
+
+0
+conf/facerec/iris_fitter.xml
+
+0.1
+0
+8
+640
+640
+
+
+pb_fd
+ssyv
+2
+1
+
+0.45
+0.4
+
+0
+0
+0
+
+0
+conf/facerec/depth_liveness_estimator_cnn.xml
+5000
+150
+3
+
+share/face_quality/fda_fqm.bin
+
+2
+
+ -40
+ 40
+
+ -40
+ 40
+
+5
+
+3
+
+10
+
+32
+32
+0.20
+
+0
+
+0
+
+0
+
+1
+
+
+10000
+1000
+
+0
+
+0
+
+0
+
+
+
+0
+
+0
+
+1
+
+-1
+-1
+-1
+
+0
+
+
+conf/facerec/age_gender_estimator.xml
+conf/facerec/emotions_estimator.xml
+
+0
+conf/facerec/active_liveness_estimator.xml
+
+
diff --git a/conf/facerec/video_worker_fdatracker_ssyv_fda_v3.xml b/conf/facerec/video_worker_fdatracker_ssyv_fda_v3.xml
new file mode 100644
index 0000000..5a09938
--- /dev/null
+++ b/conf/facerec/video_worker_fdatracker_ssyv_fda_v3.xml
@@ -0,0 +1,105 @@
+
+
+
+libface_video_worker
+
+fdatracker
+
+
+
+share/fda/fda0017.3500.enc
+
+5
+30
+0.7
+0.2
+
+
+0
+conf/facerec/iris_fitter.xml
+
+0.1
+0
+8
+640
+640
+
+
+pb_fd
+ssyv
+3
+1
+
+0.45
+0.4
+
+0
+0
+0
+
+0
+conf/facerec/depth_liveness_estimator_cnn.xml
+5000
+150
+3
+
+share/face_quality/fda_fqm.bin
+
+2
+
+ -40
+ 40
+
+ -40
+ 40
+
+5
+
+3
+
+10
+
+32
+32
+0.20
+
+0
+
+0
+
+0
+
+1
+
+
+10000
+1000
+
+0
+
+0
+
+0
+
+
+
+0
+
+0
+
+1
+
+-1
+-1
+-1
+
+0
+
+
+conf/facerec/age_gender_estimator.xml
+conf/facerec/emotions_estimator.xml
+
+0
+conf/facerec/active_liveness_estimator.xml
+
+
diff --git a/conf/facerec/video_worker_fdatracker_ssyv_fda_v5.xml b/conf/facerec/video_worker_fdatracker_ssyv_fda_v5.xml
new file mode 100644
index 0000000..5fbf4a7
--- /dev/null
+++ b/conf/facerec/video_worker_fdatracker_ssyv_fda_v5.xml
@@ -0,0 +1,105 @@
+
+
+
+libface_video_worker
+
+fdatracker
+
+
+
+share/fda/fda0017.3500.enc
+
+5
+30
+0.7
+0.2
+
+
+0
+conf/facerec/iris_fitter.xml
+
+0.1
+0
+8
+640
+640
+
+
+pb_fd
+ssyv
+4
+1
+
+0.45
+0.4
+
+0
+0
+0
+
+0
+conf/facerec/depth_liveness_estimator_cnn.xml
+5000
+150
+3
+
+share/face_quality/fda_fqm.bin
+
+2
+
+ -40
+ 40
+
+ -40
+ 40
+
+5
+
+3
+
+10
+
+32
+32
+0.20
+
+0
+
+0
+
+0
+
+1
+
+
+10000
+1000
+
+0
+
+0
+
+0
+
+
+
+0
+
+0
+
+1
+
+-1
+-1
+-1
+
+0
+
+
+conf/facerec/age_gender_estimator.xml
+conf/facerec/emotions_estimator.xml
+
+0
+conf/facerec/active_liveness_estimator.xml
+
+
diff --git a/examples/android/demo/build.gradle b/examples/android/demo/build.gradle
index ebdf9f2..28771bc 100644
--- a/examples/android/demo/build.gradle
+++ b/examples/android/demo/build.gradle
@@ -50,7 +50,8 @@ android {
"share/quality_iso/**",
"share/face_quality/**",
"share/faceanalysis/**",
- "share/processing_block/**",
+ "share/processing_block/age_estimator/**",
+ "share/processing_block/gender_estimator/**",
"share/age_second/**",
"share/liveness_2d_v2/**",
"share/faceattributes/**",
diff --git a/examples/android/telpo_liveness_video_recognition/src/main/res/layout/about.xml b/examples/android/telpo_liveness_video_recognition/src/main/res/layout/about.xml
index 29105ee..89f67ec 100644
--- a/examples/android/telpo_liveness_video_recognition/src/main/res/layout/about.xml
+++ b/examples/android/telpo_liveness_video_recognition/src/main/res/layout/about.xml
@@ -16,7 +16,7 @@
android:id="@+id/aboutText1"
android:layout_width="match_parent"
android:layout_height="wrap_content"
- android:text="Video recognition with depth and IR liveness demo for Telpo (v 0.2)\n\nFace SDK version 3.19.1\n"
+ android:text="Video recognition with depth and IR liveness demo for Telpo (v 0.2)\n\nFace SDK version 3.20.0\n"
android:layout_below="@+id/aboutLogo"
/>
diff --git a/examples/cpp/CMakeLists.txt b/examples/cpp/CMakeLists.txt
index 52a9c0b..7f85d77 100644
--- a/examples/cpp/CMakeLists.txt
+++ b/examples/cpp/CMakeLists.txt
@@ -17,4 +17,7 @@ add_subdirectory(test_calibration)
add_subdirectory(demo)
add_subdirectory(video_recognition_demo)
add_subdirectory(test_videocap)
-add_subdirectory(processing_block)
+
+if(NOT WITHOUT_PROCESSING_BLOCK)
+ add_subdirectory(processing_block)
+endif()
diff --git a/examples/cpp/processing_block/processing_block.cpp b/examples/cpp/processing_block/processing_block.cpp
index b7844b7..3fb68c4 100644
--- a/examples/cpp/processing_block/processing_block.cpp
+++ b/examples/cpp/processing_block/processing_block.cpp
@@ -101,7 +101,7 @@ void drawFaceKeypoint(const pbio::Context& data, cv::Mat& image)
{
for(const auto& point : obj.at("keypoints").at("points"))
{
- cv::circle(image, cv::Point2f(point["x"].getDouble() * image.size[1], point["y"].getDouble() * image.size[0]), 2, {0, 255, 0}, 5);
+ cv::circle(image, cv::Point2f(point["proj"][0].getDouble() * image.size[1], point["proj"][1].getDouble() * image.size[0]), 2, {0, 255, 0}, 5);
}
}
}
@@ -322,10 +322,9 @@ int main(int argc, char **argv)
configCtx["facerec_conf_dir"] = sdk_dir + "/conf/facerec/";
}
-
pbio::ProcessingBlock processingBlock = service->createProcessingBlock(configCtx);
- if(unit_type == "quality" || (unit_type == "liveness" && modification == "v4"))
+ if(unit_type == "quality" || (unit_type == "liveness" && modification == "2d"))
{
// create capturer
const pbio::Capturer::Ptr capturer = service->createCapturer("common_capturer_refa_fda_a.xml");
@@ -347,14 +346,21 @@ int main(int argc, char **argv)
{
auto faceCtx = service->createContext();
faceCtx["unit_type"] = unitTypes.at("face");
- faceCtx["ONNXRuntime"]["library_path"] = lib_dir;
- faceCtx["use_cuda"] = use_cuda;
- faceCtx["confidence_threshold"] = 0.4;
+ faceCtx["version"] = static_cast
(2);
pbio::ProcessingBlock faceBlock = service->createProcessingBlock(faceCtx);
auto faceImageCtx = ioData["image"];
pbio::context_utils::putImage(faceImageCtx, input_rawimg);
faceBlock(ioData);
+
+ if (unit_type.compare("face_keypoint"))
+ {
+ auto fitterCtx = service->createContext();
+ fitterCtx["unit_type"] = unitTypes.at("face_keypoint");
+ pbio::ProcessingBlock fitterBlock = service->createProcessingBlock(fitterCtx);
+ fitterBlock(ioData);
+ }
+
processingBlock(ioData);
}
else // just put the whole image to the Context
diff --git a/examples/cpp/processing_block_recognition_demo/main.cpp b/examples/cpp/processing_block_recognition_demo/main.cpp
index 15f1eb9..ef3c2ed 100644
--- a/examples/cpp/processing_block_recognition_demo/main.cpp
+++ b/examples/cpp/processing_block_recognition_demo/main.cpp
@@ -15,7 +15,7 @@ using Context = api::Context;
#include "../console_arguments_parser/ConsoleArgumentsParser.h"
-void recognitionSample(std::string sdk_path, std::string input_image_path1, std::string input_image_path2, std::string window, std::string output, std::string mode);
+void recognitionSample(std::string sdk_path, std::string input_image_path1, std::string input_image_path2, std::string window, std::string output, std::string mode, std::string modification);
int main(int argc, char **argv)
{
@@ -26,6 +26,7 @@ int main(int argc, char **argv)
" [--sdk_path ..]"
" [--window ]"
" [--output ]"
+ " [--modification <30, 50, 100, 1000>]"
<< std::endl;
ConsoleArgumentsParser parser(argc, argv);
@@ -35,12 +36,13 @@ int main(int argc, char **argv)
const std::string sdk_dir = parser.get("--sdk_path", "..");
const std::string window = parser.get("--window", "yes");
const std::string output = parser.get("--output", "no");
+ const std::string modification = parser.get("--modification", "1000");
try{
if (mode != "verify" && mode != "identify")
throw std::runtime_error("there is no modifier " + mode);
- recognitionSample(sdk_dir, input_image_path, input_image_path2, window, output, mode);
+ recognitionSample(sdk_dir, input_image_path, input_image_path2, window, output, mode, modification);
}catch(const std::exception &e){
std::cout << "! exception catched: '" << e.what() << "' ... exiting" << std::endl;
return 1;
@@ -117,7 +119,7 @@ void checkFileExist(std::string path)
}
-api::Context getFaces(api::Service &service, api::ProcessingBlock &faceDetector, api::ProcessingBlock &faceFitter, cv::Mat &image){
+api::Context getFaces(pbio::FacerecService &service, api::ProcessingBlock &faceDetector, api::ProcessingBlock &faceFitter, cv::Mat &image){
cv::Mat input_image;
cv::cvtColor(image, input_image, cv::COLOR_BGR2RGB);
@@ -132,30 +134,28 @@ api::Context getFaces(api::Service &service, api::ProcessingBlock &faceDetector,
return ioData;
}
-void recognitionSample(std::string sdk_path, std::string input_image_path1, std::string input_image_path2, std::string window, std::string output, std::string mode)
+void recognitionSample(std::string sdk_path, std::string input_image_path1, std::string input_image_path2, std::string window, std::string output, std::string mode, std::string modification)
{
api::Service service = api::Service::createService(sdk_path);
Context detectorCtx = service.createContext();
Context fitterCtx = service.createContext();
- Context recognizerCtx = service.createContext();
- Context matcherCtx = service.createContext();
-
+ Context faceTemplateExtractorCtx = service.createContext();
+
detectorCtx["unit_type"] = "FACE_DETECTOR";
detectorCtx["modification"] = "uld";
- detectorCtx["min_size"] = 50l;
+ detectorCtx["precision_level"] = 3;
detectorCtx["confidence_threshold"] = 0.6;
fitterCtx["unit_type"] = "FACE_FITTER";
fitterCtx["modification"] = "tddfa_faster";
- recognizerCtx["unit_type"] = "FACE_RECOGNIZER";
- matcherCtx["unit_type"] = "MATCHER_MODULE";
+ faceTemplateExtractorCtx["unit_type"] = "FACE_TEMPLATE_EXTRACTOR";
+ faceTemplateExtractorCtx["modification"] = modification;
api::ProcessingBlock faceDetector = service.createProcessingBlock(detectorCtx);
api::ProcessingBlock faceFitter = service.createProcessingBlock(fitterCtx);
- api::ProcessingBlock recognizerModule = service.createProcessingBlock(recognizerCtx);
- api::ProcessingBlock matcherModule = service.createProcessingBlock(matcherCtx);
+ api::ProcessingBlock faceTemplateExtractor = service.createProcessingBlock(faceTemplateExtractorCtx);
checkFileExist(input_image_path1);
checkFileExist(input_image_path2);
@@ -179,25 +179,31 @@ void recognitionSample(std::string sdk_path, std::string input_image_path1, std:
throw std::runtime_error("many faces on" + input_image_path2 + " image");
///////////Recognizer////////////////
- recognizerModule(ioData);
- recognizerModule(ioData2);
+ faceTemplateExtractor(ioData);
+ faceTemplateExtractor(ioData2);
/////////////////////////////////////
- Context matcherData = service.createContext();
-
if (mode == "verify")
{
- matcherData["verification"]["objects"].push_back(ioData["objects"][0]);
- matcherData["verification"]["objects"].push_back(ioData2["objects"][0]);
+ Context verificationConfig = service.createContext();
+ Context verificationData = service.createContext();
- ///////////Matcher////////////////
- matcherModule(matcherData);
+ verificationConfig["unit_type"] = "VERIFICATION_MODULE";
+ verificationConfig["modification"] = modification;
+
+ api::ProcessingBlock verificationModule = service.createProcessingBlock(verificationConfig);
+
+ verificationData["template1"] = ioData["objects"][0]["template"];
+ verificationData["template2"] = ioData2["objects"][0]["template"];
+
+ ///////////Verification////////////////
+ verificationModule(verificationData);
//////////////////////////////////
- double distance = matcherData["verification"]["result"]["distance"].getDouble();
- bool verdict = matcherData["verification"]["result"]["verdict"].getBool();
+ double distance = verificationData["result"]["distance"].getDouble();
+ double score = verificationData["result"]["score"].getDouble();
- cv::Scalar color = verdict ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255);
+ cv::Scalar color = score >= 0.85 ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255);
drawBBox(ioData["objects"][0], image, output, color);
drawBBox(ioData2["objects"][0], image2, output, color);
@@ -212,7 +218,8 @@ void recognitionSample(std::string sdk_path, std::string input_image_path1, std:
crop2.copyTo(result(cv::Rect(crop1.cols, 0, crop2.cols, crop2.rows)));
std::cout << "distance = " << distance << "\n";
- std::cout << "verdict = " << (verdict ? "True" : "False") << "\n";
+ std::cout << "verdict = " << (score >= 0.85 ? "True" : "False") << "\n";
+ std::cout << "score = " << score << "\n";
if (window == "yes"){
cv::imshow("result", result);
@@ -222,27 +229,50 @@ void recognitionSample(std::string sdk_path, std::string input_image_path1, std:
}
else
{
- matcherData["search"]["knn"] = 1l;
- matcherData["search"]["type_index"] = "array";
+ Context matcherConfig = service.createContext();
+ Context templateIndexConfig = service.createContext();
+ Context templates = service.createContext();
+ Context matcherData = service.createContext();
+
+ matcherConfig["unit_type"] = "MATCHER_MODULE";
+ matcherConfig["modification"] = modification;
- matcherData["search"]["template_index"] = ioData["objects"];
- matcherData["search"]["queries"].push_back(ioData2["objects"][0]);
+ templateIndexConfig["unit_type"] = "TEMPLATE_INDEX";
+ templateIndexConfig["modification"] = modification;
+
+ api::ProcessingBlock matcherModule = service.createProcessingBlock(matcherConfig);
+ api::ProcessingBlock templateIndex = service.createProcessingBlock(templateIndexConfig);
+
+ for (const Context& object : ioData["objects"])
+ {
+ templates.push_back(object["template"]);
+ }
+
+ ioData["templates"] = std::move(templates);
+
+ templateIndex(ioData);
+
+ matcherData["knn"] = 1l;
+ matcherData["template_index"] = ioData["template_index"];
+ matcherData["queries"].push_back(ioData2["objects"][0]);
///////////Matcher////////////////
matcherModule(matcherData);
//////////////////////////////////
- int find_index = static_cast(matcherData["search"]["results"][0]["index"].getLong());
- double distance = matcherData["search"]["results"][0]["distance"].getDouble();
- bool verdict = matcherData["search"]["results"][0]["verdict"].getBool();
+ int find_index = static_cast(matcherData["results"][0]["index"].getLong());
+ double distance = matcherData["results"][0]["distance"].getDouble();
+ double score = matcherData["results"][0]["score"].getDouble();
+
std::cout << "distance = " << distance <<" \n";
- std::cout << "verdict = " << verdict <<" \n";
+ std::cout << "verdict = " << (score >= 0.85 ? "True" : "False") <<" \n";
std::cout << "index = " << find_index <<" \n";
+ std::cout << "score = " << score << " \n";
for(int i = 0; i < ioData["objects"].size(); i++)
{
const api::Context &obj = ioData["objects"][i];
- cv::Scalar color = (i == find_index && verdict) ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255);
+ cv::Scalar color = (i == find_index && score >= 0.85) ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255);
drawBBox(obj, image, output, color);
}
diff --git a/examples/csharp/demo/csharp_demo.csproj b/examples/csharp/demo/csharp_demo.csproj
index f80f509..91fa534 100644
--- a/examples/csharp/demo/csharp_demo.csproj
+++ b/examples/csharp/demo/csharp_demo.csproj
@@ -1,21 +1,20 @@
-
+
Exe
- netcoreapp2.0
+ net6.0
+
..\..\..\bin\FacerecCSharpWrapper.dll
../../../lib/FacerecCSharpWrapper.dll
-
-
-
-
+
-
-
+
+
+
diff --git a/examples/csharp/demo/vs/Properties/launchSettings.json b/examples/csharp/demo/vs/Properties/launchSettings.json
new file mode 100644
index 0000000..20f9ca4
--- /dev/null
+++ b/examples/csharp/demo/vs/Properties/launchSettings.json
@@ -0,0 +1,8 @@
+{
+ "profiles": {
+ "csharp_demo": {
+ "commandName": "Project",
+ "commandLineArgs": "--config_dir ../../../../../../../conf/facerec"
+ }
+ }
+}
\ No newline at end of file
diff --git a/examples/csharp/demo/vs/csharp_demo.csproj b/examples/csharp/demo/vs/csharp_demo.csproj
index 998faef..7269e33 100644
--- a/examples/csharp/demo/vs/csharp_demo.csproj
+++ b/examples/csharp/demo/vs/csharp_demo.csproj
@@ -1,68 +1,21 @@
-
-
-
+
+
- Debug
- x64
- csharp_demo
Exe
- v4.6
- {9C504E04-5D81-4B01-8482-3B94D27BED43}
- --config_dir ../../../../../../conf/facerec
-
-
- x64
- true
- full
- false
- bin\Debug\
- DEBUG;TRACE
- prompt
- 4
-
-
- x64
- pdbonly
- true
- bin\Release\
- TRACE
- prompt
- 4
+ net6.0
+
- ..\..\..\..\bin\FacerecCSharpWrapper.dll
- True
-
-
- ..\..\..\..\bin\csharp_demo\deps\CommandLine.dll
- True
-
-
- ..\..\..\..\bin\csharp_demo\deps\OpenCvSharp.Blob.dll
- True
+ ..\..\..\..\bin\FacerecCSharpWrapper.dll
-
- ..\..\..\..\bin\csharp_demo\deps\OpenCvSharp.dll
- True
-
-
-
-
-
-
-
-
-
+
PreserveNewest
-
- PreserveNewest
-
-
+
PreserveNewest
@@ -77,13 +30,17 @@
PreserveNewest
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+
+
+
+
-
-
-
\ No newline at end of file
+
+
diff --git a/examples/csharp/demo/vs/csharp_demo.sln b/examples/csharp/demo/vs/csharp_demo.sln
index da047af..7f6d9c3 100644
--- a/examples/csharp/demo/vs/csharp_demo.sln
+++ b/examples/csharp/demo/vs/csharp_demo.sln
@@ -1,22 +1,22 @@
-
-Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 14
-VisualStudioVersion = 14.0.25420.1
-MinimumVisualStudioVersion = 10.0.40219.1
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "csharp_demo", "csharp_demo.csproj", "{9C504E04-5D81-4B01-8482-3B94D27BED43}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|x64 = Debug|x64
- Release|x64 = Release|x64
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {9C504E04-5D81-4B01-8482-3B94D27BED43}.Debug|x64.ActiveCfg = Debug|x64
- {9C504E04-5D81-4B01-8482-3B94D27BED43}.Debug|x64.Build.0 = Debug|x64
- {9C504E04-5D81-4B01-8482-3B94D27BED43}.Release|x64.ActiveCfg = Release|x64
- {9C504E04-5D81-4B01-8482-3B94D27BED43}.Release|x64.Build.0 = Release|x64
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
-EndGlobal
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 14
+VisualStudioVersion = 14.0.25420.1
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "csharp_demo", "csharp_demo.csproj", "{9C504E04-5D81-4B01-8482-3B94D27BED43}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/examples/csharp/demo/demo.cs b/examples/csharp/demo/vs/demo.cs
similarity index 78%
rename from examples/csharp/demo/demo.cs
rename to examples/csharp/demo/vs/demo.cs
index 61b461f..8202107 100644
--- a/examples/csharp/demo/demo.cs
+++ b/examples/csharp/demo/vs/demo.cs
@@ -72,7 +72,7 @@ public Worker(string facerec_conf_dir, string capturer_conf, string license_dir)
//_age_geder_estimator = _service.createAgeGenderEstimator("age_gender_estimator_v2.xml");
_emotions_estimator = _service.createEmotionsEstimator("emotions_estimator.xml");
_face_quality_estimator = _service.createFaceQualityEstimator("face_quality_estimator.xml");
- _liveness_2d_estimator = _service.createLiveness2DEstimator("liveness_2d_estimator_v3.xml");
+ _liveness_2d_estimator = _service.createLiveness2DEstimator("liveness_2d_estimator_v3.xml");
_face_mask_estimator = _service.createFaceAttributesEstimator("face_mask_estimator.xml");
_eyes_openness_estimator = _service.createFaceAttributesEstimator("eyes_openness_estimator_v2.xml");
@@ -96,18 +96,18 @@ public Worker(string facerec_conf_dir, string capturer_conf, string license_dir)
bool get_flag(int i)
{
- switch(i)
+ switch (i)
{
- case 0: return _flag_positions;
- case 1: return _flag_angles;
- case 2: return _flag_quality;
- case 3: return _flag_liveness;
- case 4: return _flag_age_gender;
- case 5: return _flag_cutting_base;
- case 6: return _flag_cutting_full;
- case 7: return _flag_cutting_token;
- case 8: return _flag_points;
- case 9: return _flag_face_quality;
+ case 0: return _flag_positions;
+ case 1: return _flag_angles;
+ case 2: return _flag_quality;
+ case 3: return _flag_liveness;
+ case 4: return _flag_age_gender;
+ case 5: return _flag_cutting_base;
+ case 6: return _flag_cutting_full;
+ case 7: return _flag_cutting_token;
+ case 8: return _flag_points;
+ case 9: return _flag_face_quality;
case 10: return _flag_angles_vectors;
case 11: return _flag_emotions;
case 12: return _flag_masked_face;
@@ -119,28 +119,28 @@ bool get_flag(int i)
void set_flag(int i, bool value)
{
- switch(i)
+ switch (i)
{
- case 0: _flag_positions = value; return;
- case 1: _flag_angles = value; return;
- case 2: _flag_quality = value; return;
- case 3: _flag_liveness = value; return;
- case 4: _flag_age_gender = value; return;
- case 5: _flag_cutting_base = value; return;
- case 6: _flag_cutting_full = value; return;
- case 7: _flag_cutting_token = value; return;
- case 8: _flag_points = value; return;
- case 9: _flag_face_quality = value; return;
+ case 0: _flag_positions = value; return;
+ case 1: _flag_angles = value; return;
+ case 2: _flag_quality = value; return;
+ case 3: _flag_liveness = value; return;
+ case 4: _flag_age_gender = value; return;
+ case 5: _flag_cutting_base = value; return;
+ case 6: _flag_cutting_full = value; return;
+ case 7: _flag_cutting_token = value; return;
+ case 8: _flag_points = value; return;
+ case 9: _flag_face_quality = value; return;
case 10: _flag_angles_vectors = value; return;
- case 11: _flag_emotions = value; return;
- case 12: _flag_masked_face = value; return;
- case 13: _flag_eyes_openness = value; return;
+ case 11: _flag_emotions = value; return;
+ case 12: _flag_masked_face = value; return;
+ case 13: _flag_eyes_openness = value; return;
}
}
string flag_name(int i)
{
- switch(i)
+ switch (i)
{
case 0: return "rectangles";
case 1: return "angles";
@@ -167,25 +167,25 @@ OpenCvSharp.Rect flag_rect(int i)
}
// mouse click callback for flags control
- static void onMouse(OpenCvSharp.MouseEvent mouseEvent, int x, int y, OpenCvSharp.MouseEvent flags)
+ static void onMouse(OpenCvSharp.MouseEventTypes MouseEventTypes, int x, int y, OpenCvSharp.MouseEventFlags flags, nint userData)
{
- if(mouseEvent != OpenCvSharp.MouseEvent.LButtonDown && mouseEvent != OpenCvSharp.MouseEvent.LButtonDown && mouseEvent != OpenCvSharp.MouseEvent.MButtonDown)
+ if (MouseEventTypes != OpenCvSharp.MouseEventTypes.LButtonDown && MouseEventTypes != OpenCvSharp.MouseEventTypes.LButtonDown && MouseEventTypes != OpenCvSharp.MouseEventTypes.MButtonDown)
{
return;
}
// if mouse click is in some flag's rectangle
// change flag state
- for(int i = 0; i < flags_count; ++i)
+ for (int i = 0; i < flags_count; ++i)
{
OpenCvSharp.Rect r = Worker.Instance.flag_rect(i);
- if(x >= r.X && y >= r.Y && x <= r.X + r.Width && y <= r.Y + r.Height)
+ if (x >= r.X && y >= r.Y && x <= r.X + r.Width && y <= r.Y + r.Height)
Worker.Instance.set_flag(i, !Worker.Instance.get_flag(i));
}
}
- void puttext(OpenCvSharp.Mat image, string text, OpenCvSharp.Point2f position)
+ void puttext(OpenCvSharp.Mat image, string text, OpenCvSharp.Point position)
{
// twice - for better reading
// since we are drawing on the frame from webcam
@@ -224,7 +224,7 @@ public void work(OpenCvSharp.Mat frame)
// clone the frame for drawing on it
OpenCvSharp.Mat draw_image = frame.Clone();
// handle each face on the frame separately
- for(int i = 0; i < samples.Count; ++i)
+ for (int i = 0; i < samples.Count; ++i)
{
RawSample sample = samples[i];
@@ -232,11 +232,11 @@ public void work(OpenCvSharp.Mat frame)
RawSample.Rectangle rectangle = sample.getRectangle();
// set a point to place information for this face
- OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f(
+ OpenCvSharp.Point text_point = new OpenCvSharp.Point(
rectangle.x + rectangle.width + 3,
rectangle.y + 10);
- const float text_line_height = 22;
+ const int text_line_height = 22;
// draw facial points
// red color for all points
@@ -244,12 +244,12 @@ public void work(OpenCvSharp.Mat frame)
// yellow for right eye
// (yes, there is a mess with left and right eyes in face_sdk api,
// but if we fix it now we will lose compatibility with previous versions)
- if(_flag_points)
+ if (_flag_points)
{
List points = sample.getLandmarks();
List iris_points = sample.getIrisLandmarks();
- for(int j = -2; j < points.Count; ++j)
+ for (int j = -2; j < points.Count; ++j)
{
Point p =
j == -2 ?
@@ -268,7 +268,7 @@ public void work(OpenCvSharp.Mat frame)
OpenCvSharp.Cv2.Circle(
draw_image,
- new OpenCvSharp.Point2f(p.x, p.y),
+ new OpenCvSharp.Point(p.x, p.y),
j < 0 ? 4 : 2,
color,
-1,
@@ -276,31 +276,32 @@ public void work(OpenCvSharp.Mat frame)
}
// draw iris points
- for(int j = 0; j < iris_points.Count; ++j)
+ for (int j = 0; j < iris_points.Count; ++j)
{
int ms = 1;
OpenCvSharp.Scalar color = new OpenCvSharp.Scalar(0, 255, 255);
int oi = j - 20 * Convert.ToInt32(j >= 20);
Point pt1 = iris_points[j];
Point pt2 = iris_points[(oi < 19 ? j : j - 15) + 1];
- OpenCvSharp.Point2f cv_pt1 = new OpenCvSharp.Point2f(pt1.x, pt1.y);
- OpenCvSharp.Point2f cv_pt2 = new OpenCvSharp.Point2f(pt2.x, pt2.y);
+ OpenCvSharp.Point cv_pt1 = new OpenCvSharp.Point(pt1.x, pt1.y);
+ OpenCvSharp.Point cv_pt2 = new OpenCvSharp.Point(pt2.x, pt2.y);
- if(oi < 5)
+ if (oi < 5)
{
color = new OpenCvSharp.Scalar(0, 165, 255);
- if(oi == 0)
+ if (oi == 0)
{
double radius = Math.Sqrt(Math.Pow(pt1.x - pt2.x, 2) + Math.Pow(pt1.y - pt2.y, 2));
OpenCvSharp.Cv2.Circle(
draw_image,
cv_pt1,
- (int) radius,
+ (int)radius,
color,
ms,
OpenCvSharp.LineTypes.AntiAlias);
}
- }else
+ }
+ else
{
OpenCvSharp.Cv2.Line(
draw_image,
@@ -322,7 +323,7 @@ public void work(OpenCvSharp.Mat frame)
}
// draw rectangle
- if(_flag_positions)
+ if (_flag_positions)
{
OpenCvSharp.Cv2.Rectangle(
@@ -338,18 +339,18 @@ public void work(OpenCvSharp.Mat frame)
}
// draw age and gender
- if( _flag_age_gender )
+ if (_flag_age_gender)
{
- AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample);
+ AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample);
string age_text = "age: ";
- switch(age_gender.age)
+ switch (age_gender.age)
{
- case AgeGenderEstimator.Age.AGE_KID : age_text += "kid "; break;
- case AgeGenderEstimator.Age.AGE_YOUNG : age_text += "young "; break;
- case AgeGenderEstimator.Age.AGE_ADULT : age_text += "adult "; break;
- case AgeGenderEstimator.Age.AGE_SENIOR : age_text += "senior "; break;
+ case AgeGenderEstimator.Age.AGE_KID: age_text += "kid "; break;
+ case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young "; break;
+ case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult "; break;
+ case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break;
}
age_text += string.Format("years: {0:G3}", age_gender.age_years);
@@ -371,12 +372,12 @@ public void work(OpenCvSharp.Mat frame)
}
// draw emotions
- if( _flag_emotions )
+ if (_flag_emotions)
{
List emotions =
_emotions_estimator.estimateEmotions(sample);
- for(int j = 0; j < emotions.Count; ++j)
+ for (int j = 0; j < emotions.Count; ++j)
{
EmotionsEstimator.Emotion emotion = emotions[j].emotion;
float confidence = emotions[j].confidence;
@@ -388,23 +389,23 @@ public void work(OpenCvSharp.Mat frame)
(int)text_point.Y - (int)text_line_height / 2,
(int)(100 * confidence),
(int)text_line_height),
- emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? new OpenCvSharp.Scalar(255, 0, 0) :
- emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? new OpenCvSharp.Scalar(0, 255, 0) :
- emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? new OpenCvSharp.Scalar(0, 0, 255) :
+ emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? new OpenCvSharp.Scalar(255, 0, 0) :
+ emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? new OpenCvSharp.Scalar(0, 255, 0) :
+ emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? new OpenCvSharp.Scalar(0, 0, 255) :
emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) :
new OpenCvSharp.Scalar(0, 0, 0),
-1);
puttext(
draw_image,
- emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? "neutral" :
- emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? "happy" :
- emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? "angry" :
+ emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL ? "neutral" :
+ emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY ? "happy" :
+ emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY ? "angry" :
emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" :
emotion == EmotionsEstimator.Emotion.EMOTION_DISGUSTED ? "disgusted" :
emotion == EmotionsEstimator.Emotion.EMOTION_SAD ? "sad" :
emotion == EmotionsEstimator.Emotion.EMOTION_SCARED ? "scared" : "?",
- text_point + new OpenCvSharp.Point2f(100, 0));
+ text_point + new OpenCvSharp.Point(100, 0));
text_point.Y += text_line_height;
@@ -414,9 +415,9 @@ public void work(OpenCvSharp.Mat frame)
// draw angles text
- if( _flag_angles )
+ if (_flag_angles)
{
-
+
string yaw, pitch, roll;
yaw = string.Format("yaw: {0}", (0.1f * (int)10 * sample.getAngles().yaw + 0.5f));
pitch = string.Format("pitch: {0}", (0.1f * (int)10 * sample.getAngles().pitch + 0.5f));
@@ -435,36 +436,40 @@ public void work(OpenCvSharp.Mat frame)
}
// draw angles vectors
- if( _flag_angles_vectors )
+ if (_flag_angles_vectors)
{
RawSample.Angles angles = sample.getAngles();
- float cos_a = (float)Math.Cos( angles.yaw * OpenCvSharp.Cv2.PI / 180 );
- float sin_a = (float)Math.Sin( angles.yaw * OpenCvSharp.Cv2.PI / 180 );
+ float cos_a = (float)Math.Cos(angles.yaw * OpenCvSharp.Cv2.PI / 180);
+ float sin_a = (float)Math.Sin(angles.yaw * OpenCvSharp.Cv2.PI / 180);
- float cos_b = (float)Math.Cos( angles.pitch * OpenCvSharp.Cv2.PI / 180 );
- float sin_b = (float)Math.Sin( angles.pitch * OpenCvSharp.Cv2.PI / 180 );
+ float cos_b = (float)Math.Cos(angles.pitch * OpenCvSharp.Cv2.PI / 180);
+ float sin_b = (float)Math.Sin(angles.pitch * OpenCvSharp.Cv2.PI / 180);
- float cos_c = (float)Math.Cos( angles.roll * OpenCvSharp.Cv2.PI / 180 );
- float sin_c = (float)Math.Sin( angles.roll * OpenCvSharp.Cv2.PI / 180 );
+ float cos_c = (float)Math.Cos(angles.roll * OpenCvSharp.Cv2.PI / 180);
+ float sin_c = (float)Math.Sin(angles.roll * OpenCvSharp.Cv2.PI / 180);
OpenCvSharp.Point3f[] xyz = {
new OpenCvSharp.Point3f(cos_a * cos_c, -sin_c, -sin_a),
new OpenCvSharp.Point3f(sin_c, cos_b * cos_c, -sin_b),
new OpenCvSharp.Point3f(sin_a, sin_b, cos_a * cos_b) };
- OpenCvSharp.Point2f center = new OpenCvSharp.Point2f(
+ OpenCvSharp.Point center = new OpenCvSharp.Point(
+ (sample.getLeftEye().x + sample.getRightEye().x) * 0.5f,
+ (sample.getLeftEye().y + sample.getRightEye().y) * 0.5f);
+
+ OpenCvSharp.Point2f centerf = new OpenCvSharp.Point(
(sample.getLeftEye().x + sample.getRightEye().x) * 0.5f,
(sample.getLeftEye().y + sample.getRightEye().y) * 0.5f);
float length = (rectangle.width + rectangle.height) * 0.3f;
- for(int c = 0; c < 3; ++c)
+ for (int c = 0; c < 3; ++c)
{
OpenCvSharp.Cv2.Line(
draw_image,
center,
- center + new OpenCvSharp.Point2f(xyz[c].X, -xyz[c].Y) * length,
+ center + new OpenCvSharp.Point(xyz[c].X * length, -xyz[c].Y * length),
c == 0 ? new OpenCvSharp.Scalar(50, 255, 255) :
c == 1 ? new OpenCvSharp.Scalar(50, 255, 50) :
c == 2 ? new OpenCvSharp.Scalar(50, 50, 255) : new OpenCvSharp.Scalar(),
@@ -474,7 +479,7 @@ public void work(OpenCvSharp.Mat frame)
}
// draw quality text
- if( _flag_quality )
+ if (_flag_quality)
{
QualityEstimator.Quality q =
_quality_estimator.estimateQuality(sample);
@@ -501,7 +506,7 @@ public void work(OpenCvSharp.Mat frame)
}
// draw liveness text
- if( _flag_liveness )
+ if (_flag_liveness)
{
Liveness2DEstimator.LivenessAndScore liveness_2d_result = _liveness_2d_estimator.estimate(sample);
string score_str = Math.Round(liveness_2d_result.score, 3).ToString();
@@ -518,7 +523,7 @@ public void work(OpenCvSharp.Mat frame)
}
// draw face quality
- if( _flag_face_quality )
+ if (_flag_face_quality)
{
float quality = _face_quality_estimator.estimateQuality(sample);
@@ -529,7 +534,8 @@ public void work(OpenCvSharp.Mat frame)
}
// draw face attribute (masked_face)
- if(_flag_masked_face) {
+ if (_flag_masked_face)
+ {
FaceAttributesEstimator.Attribute attr = _face_mask_estimator.estimate(sample);
string score_str = Math.Round(attr.score, 3).ToString();
puttext(
@@ -541,7 +547,8 @@ public void work(OpenCvSharp.Mat frame)
}
// draw face attribute (eyes_openness)
- if(_flag_eyes_openness) {
+ if (_flag_eyes_openness)
+ {
FaceAttributesEstimator.Attribute attr = _eyes_openness_estimator.estimate(sample);
string left_score_str = Math.Round(attr.left_eye_state.score, 3).ToString();
string right_score_str = Math.Round(attr.right_eye_state.score, 3).ToString();
@@ -570,11 +577,11 @@ public void work(OpenCvSharp.Mat frame)
}
// draw face cuts
- for(int cut_i = 0; cut_i < 3; ++cut_i)
+ for (int cut_i = 0; cut_i < 3; ++cut_i)
{
- if( (cut_i == 0 && ! _flag_cutting_base) ||
- (cut_i == 1 && ! _flag_cutting_full) ||
- (cut_i == 2 && ! _flag_cutting_token) )
+ if ((cut_i == 0 && !_flag_cutting_base) ||
+ (cut_i == 1 && !_flag_cutting_full) ||
+ (cut_i == 2 && !_flag_cutting_token))
{
continue;
}
@@ -616,7 +623,7 @@ public void work(OpenCvSharp.Mat frame)
img.Rows - img_rect_y,
draw_image.Rows - Math.Max(0, text_point.Y));
- if(img_rect_width <= 0 || img_rect_height <= 0)
+ if (img_rect_width <= 0 || img_rect_height <= 0)
continue;
OpenCvSharp.Rect img_rect = new OpenCvSharp.Rect(img_rect_x, img_rect_y, img_rect_width, img_rect_height);
@@ -637,7 +644,7 @@ draw_image[new OpenCvSharp.Rect(
}
// draw checkboxes
- for(int i = 0; i < flags_count; ++i)
+ for (int i = 0; i < flags_count; ++i)
{
OpenCvSharp.Rect rect = flag_rect(i);
OpenCvSharp.Rect rect2 = new OpenCvSharp.Rect(rect.X + 5, rect.Y + 5, rect.Width - 10, rect.Height - 10);
@@ -645,7 +652,7 @@ draw_image[new OpenCvSharp.Rect(
OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(255), -1);
OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(0), 2, OpenCvSharp.LineTypes.AntiAlias);
- if( get_flag(i) )
+ if (get_flag(i))
{
OpenCvSharp.Cv2.Rectangle(draw_image, rect2, OpenCvSharp.Scalar.All(0), -1, OpenCvSharp.LineTypes.AntiAlias);
}
@@ -653,15 +660,15 @@ draw_image[new OpenCvSharp.Rect(
puttext(
draw_image,
flag_name(i),
- new OpenCvSharp.Point2f(rect.X + rect.Width + 3, rect.Y + rect.Height - 3));
+ new OpenCvSharp.Point(rect.X + rect.Width + 3, rect.Y + rect.Height - 3));
}
-
-
+
+
// show image with drawed information
OpenCvSharp.Cv2.ImShow("demo", draw_image);
// register callback on mouse events
- OpenCvSharp.Cv2.SetMouseCallback("demo", (OpenCvSharp.CvMouseCallback)onMouse);
+ OpenCvSharp.Cv2.SetMouseCallback("demo", onMouse);
}
public void dispose()
@@ -713,7 +720,7 @@ static int Main(string[] args)
// print values of arguments
Console.WriteLine("Arguments:");
- foreach(var opt in options.GetType().GetProperties())
+ foreach (var opt in options.GetType().GetProperties())
{
Console.WriteLine("--{0} = {1}", opt.Name, opt.GetValue(options, null));
}
@@ -738,17 +745,16 @@ static int Main(string[] args)
// VideoCapture camera;
OpenCvSharp.VideoCapture camera = new OpenCvSharp.VideoCapture();
-
- for(; camera_id < 10; ++camera_id)
+ for (; camera_id < 10; ++camera_id)
{
camera.Open(camera_id);
- camera.Set(OpenCvSharp.CaptureProperty.FrameWidth, desiredFrameWidht);
- camera.Set(OpenCvSharp.CaptureProperty.FrameHeight, desiredFrameHeight);
+ camera.Set(OpenCvSharp.VideoCaptureProperties.FrameWidth, desiredFrameWidht);
+ camera.Set(OpenCvSharp.VideoCaptureProperties.FrameHeight, desiredFrameHeight);
OpenCvSharp.Mat frame = new OpenCvSharp.Mat();
for (int i = 0; i < 10; i++)
frame = camera.RetrieveMat();
- if(frame.Empty())
+ if (frame.Empty())
{
Console.WriteLine("webcam " + camera_id + " not opened");
camera.Release();
@@ -759,22 +765,22 @@ static int Main(string[] args)
}
}
- for(;;)
+ for (; ; )
{
OpenCvSharp.Mat frame;
//camera.Grab();
frame = camera.RetrieveMat();
- if(frame.Empty())
+ if (frame.Empty())
break;
-
+
// give a frame to the worker
worker.work(frame);
- if(27 == (char) OpenCvSharp.Cv2.WaitKey(10))
+ if (27 == (char)OpenCvSharp.Cv2.WaitKey(10))
{
- camera.Release();
+ camera.Release();
break;
}
}
@@ -783,11 +789,11 @@ static int Main(string[] args)
// otherwise licence error may occur
// when create sdk object in next time
worker.dispose();
-
}
- catch(Exception e)
- {
- Console.WriteLine("! exception catched: '" + e + "' ... exiting");
+ catch (Exception e)
+ {
+ Console.WriteLine($"! exception catched: '{e}' ... exiting");
+
return 1;
}
Console.WriteLine("close program");
diff --git a/examples/csharp/processing_block_demo/README.txt b/examples/csharp/processing_block_demo/README.txt
new file mode 100644
index 0000000..8fa75df
--- /dev/null
+++ b/examples/csharp/processing_block_demo/README.txt
@@ -0,0 +1,28 @@
+=== Build with Visual Studio ===
+
+1. extract windows_x86_64.zip
+1.1 please make sure the libonnxruntime.so library can be found when building the project
+2. open examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.sln in Visual Studio, build and run.
+
+=== Build with dotnet ===
+
+Requirements:
+ * dotnet https://docs.microsoft.com/en-us/dotnet/core/get-started
+ * opencvsharp https://github.com/shimat/opencvsharp
+ * command line parser https://github.com/commandlineparser/commandline
+
+
+Build
+cd examples/csharp/demo
+dotnet publish -o publish
+
+
+Run
+
+Windows:
+set PATH=%PATH%;..\..\..\bin
+dotnet publish\csharp_processing_block_demo.dll --sdk_path=../../../ --unit_type=face --modification uld --min_size 50
+
+
+Linux:
+LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../../../lib:/path/to/opencvsharp/native/libs dotnet publish/csharp_processing_block_demo.dll --sdk_path=../../../ --unit_type=face --modification uld --min_size 50
diff --git a/examples/csharp/processing_block_demo/csharp_processing_block_demo.csproj b/examples/csharp/processing_block_demo/csharp_processing_block_demo.csproj
new file mode 100644
index 0000000..51cdfe9
--- /dev/null
+++ b/examples/csharp/processing_block_demo/csharp_processing_block_demo.csproj
@@ -0,0 +1,21 @@
+
+
+
+ Exe
+ net6.0
+ true
+
+
+
+
+ ..\..\..\bin\FacerecCSharpWrapper.dll
+ ../../../lib/FacerecCSharpWrapper.dll
+
+
+
+
+
+
+
+
+
diff --git a/examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.csproj b/examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.csproj
new file mode 100644
index 0000000..b966035
--- /dev/null
+++ b/examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.csproj
@@ -0,0 +1,48 @@
+
+
+
+ Exe
+ net6.0
+ true
+ {F6F992D0-AF99-4A65-98AF-E7DC073BBA5C}
+
+
+
+
+ ..\..\..\..\bin\FacerecCSharpWrapper.dll
+
+
+
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+
+
+
+
+
+
+
diff --git a/examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.sln b/examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.sln
new file mode 100644
index 0000000..deebd7d
--- /dev/null
+++ b/examples/csharp/processing_block_demo/vs/csharp_processing_block_demo.sln
@@ -0,0 +1,22 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 14
+VisualStudioVersion = 14.0.25420.1
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "csharp_processing_block_demo", "csharp_processing_block_demo.csproj", "{F6F992D0-AF99-4A65-98AF-E7DC073BBA5C}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {9C504E04-5D81-4B01-8482-3B94D27BED43}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/examples/csharp/processing_block_demo/vs/processing_block_demo.cs b/examples/csharp/processing_block_demo/vs/processing_block_demo.cs
new file mode 100644
index 0000000..d03b465
--- /dev/null
+++ b/examples/csharp/processing_block_demo/vs/processing_block_demo.cs
@@ -0,0 +1,633 @@
+using System;
+using System.Collections.Generic;
+using System.ComponentModel.Design;
+using System.Data.SqlTypes;
+using System.Linq;
+using System.Runtime.InteropServices;
+using System.Text;
+using CommandLine;
+using OpenCvSharp;
+
+using VDT.FaceRecognition.SDK;
+
+public class Options
+{
+ [Option("sdk_path", Default = "../../../", HelpText = "Path to FaceSDK directory.")]
+ public string SdkPath { get; set; }
+
+ [Option("input_image")]
+ public string InputImage { get; set; }
+
+ [Option("unit_type", Default = "objects", HelpText = "body|face|face_keypoint|pose|objects|emotions|age|gender|mask|liveness|quality")]
+ public string UnitType { get; set; }
+
+ [Option("version", Default = 1)]
+ public int Version { get; set; }
+
+ [Option("modification", Default = "")]
+ public string Modification { get; set; }
+
+ [Option("use_cuda", Default = false)]
+ public bool UseCuda { get; set; }
+};
+
+namespace csharp_processing_block_demo
+{
+ class Program
+ {
+ private static readonly Dictionary unitTypes = new()
+ {
+ { "body", "HUMAN_BODY_DETECTOR" },
+ { "face", "FACE_DETECTOR" },
+ { "objects", "OBJECT_DETECTOR" },
+ { "face_keypoint", "FACE_FITTER" },
+ { "emotions", "EMOTION_ESTIMATOR" },
+ { "age", "AGE_ESTIMATOR" },
+ { "gender","GENDER_ESTIMATOR" },
+ { "mask", "MASK_ESTIMATOR" },
+ { "liveness", "LIVENESS_ESTIMATOR" },
+ { "quality", "QUALITY_ASSESSMENT_ESTIMATOR" },
+ { "pose", "HUMAN_POSE_ESTIMATOR" },
+ };
+
+ static int Main(string[] args)
+ {
+ Console.WriteLine
+ (
+ $@"Usage: dotnet csharp_csharp_processing_block_demo.dll {System.Reflection.Assembly.GetExecutingAssembly().Location}
+ [--input_image ]
+ [--unit_type body|face|face_keypoint|pose|objects|emotions|age|gender|mask|liveness|quality]
+ [--sdk_path ../../../]
+ [--use_cuda]"
+ );
+
+ bool error = false;
+ Options options = new Options();
+ CommandLine.Parser.Default.ParseArguments(args)
+ .WithParsed(opts => options = opts)
+ .WithNotParsed(errs => error = true);
+
+ if (error)
+ {
+ return 1;
+ }
+
+ Console.WriteLine("Arguments:");
+ foreach (var opt in options.GetType().GetProperties())
+ {
+ Console.WriteLine($"--{ToSnakeCase(opt.Name)} = {opt.GetValue(options, null)}");
+ }
+ Console.WriteLine();
+
+ try
+ {
+ FacerecService service = FacerecService.createService(options.SdkPath + "/conf/facerec/", options.SdkPath + "/license/");
+ string libDir = options.SdkPath + (OperatingSystem.IsWindows() ? "\\bin" : "/lib");
+
+ string unitType = options.UnitType;
+ Dictionary