Commit 42665126 by wangquyuan

add by wqy

1 parent 279308ca
cmake_minimum_required(VERSION 2.8) cmake_minimum_required(VERSION 2.8)
project(YgydServer) project(yuexinserver)
set(SOLUTION_DIR ${CMAKE_CURRENT_SOURCE_DIR}) set(SOLUTION_DIR ${CMAKE_CURRENT_SOURCE_DIR})
......
# yuexinserver布署文档
- 操作系统:`ubuntu16.04`
## 先安装好docker19.03或更高版本, 支持--gpus参数创建容器
## 安装过程
先复制 `package.tar.gz``/home/test` 目录,并解压缩它,再运行下面命令:
开始安装
```shell
sudo cd /home/test
sudo tar xvf package.tar.gz
sudo cd package
```
## 配置文件
config.json是配置文件
其中可能需要修改的是:
http_port:配置后台服务的侦听端口
logpath: 日志路径
datapath:配置服务的数据存放目录,设置为绝对路径, 此目录必须先创建好,如果savevideo=0,则此参数可以为空
savevideo:服务是否保存接受到的视频,0:不保存,1:保存.如果是1,则同时会保存此视频的应答文件,扩展名为".json".
device: "gpu" or "cpu",如果主机带有显卡,建议使用gpu,否则速度比较慢
```shell
sudo cd /home/test/package
sudo vi config.json
```
根据需要修改参数:
## 用Dockerfile创建镜像, 注意要配置好device和datapath参数。
```shell
sudo cd /home/test/package
sudo docker build -t yuexinserver:1 .
```
## 当device配置为"gpu"时,创建容器,这个示例假设datapath设置为/tmp/data, 对应的主机目录为/home/test/data,
## /home/test/data必须先创建。
```shell
sudo mkdir /home/test/data
sudo docker run --name yuexinserver -d --restart always --gpus all --net host -p 4000:4000 -v /home/test/data:/tmp/data yuexinserver:1 ./yuexinserver
```
## 当device配置为"cpu"时,创建容器
```shell
sudo mkdir /home/test/data
sudo docker run --name yuexinserver -d --restart always --net host -p 4000:4000 -v /home/test/data:/tmp/data yuexinserver:1 ./yuexinserver
```
##
##测试程序test使用说明
```shell
./test --help
```
Usage of ./test:
-f string
image file
-h string
http url (default "http://127.0.0.1:60000")
-t string
feature,action,emotion,video
##获取人脸特征值
```shell
./test -t feature -h http://127.0.0.1:4000 -f ./test.jpeg
```
##获取微表情
```shell
./test -t action -h http://127.0.0.1:4000 -f ./test.jpeg
```
##获取情绪
```shell
./test -t emotion -h http://127.0.0.1:4000 -f ./test.jpeg
```
##综合, 如果成功,则返回结果保存在/tmp/response.json文件
```shell
./test -t video -h http://127.0.0.1:4000 -f ./test.mp4
```
##
##api.txt是接口说明文件
...@@ -108,7 +108,8 @@ Content-Type:"multipart/form-data" ...@@ -108,7 +108,8 @@ Content-Type:"multipart/form-data"
"emotion": [0.50, 0.05, 0.02, 0.01, 0.37, 0.05, 0.00], #情绪结果,[平静、快乐、愤怒、惊讶、厌恶、悲伤、恐惧] "emotion": [0.50, 0.05, 0.02, 0.01, 0.37, 0.05, 0.00], #情绪结果,[平静、快乐、愤怒、惊讶、厌恶、悲伤、恐惧]
"faceInfo": [-6.42, -13.49, -2.15], #头部xyz轴数据,[yaw,pitch,rool] "faceInfo": [-6.42, -13.49, -2.15], #头部xyz轴数据,[yaw,pitch,rool]
"eyeInfo": 2 #眼部状态,0-未识别到眼睛、1-不是眼睛、2-睁眼、3-闭眼 "eyeInfo": 2, #眼部状态,0-未识别到眼睛、1-不是眼睛、2-睁眼、3-闭眼
"heartInfo":60 #心率
} }
...@@ -129,17 +130,20 @@ Content-Type:"multipart/form-data" ...@@ -129,17 +130,20 @@ Content-Type:"multipart/form-data"
"microAction": [0.01, 0.03, 0.02, 0.00, 0.00, 0.03, 0.04, 0.01, 0.01, 0.99, 0.00, 0.02, 0.08, 0.22, 0.51, 0.30, 0.31, 0.09], "microAction": [0.01, 0.03, 0.02, 0.00, 0.00, 0.03, 0.04, 0.01, 0.01, 0.99, 0.00, 0.02, 0.08, 0.22, 0.51, 0.30, 0.31, 0.09],
"emotion": [0.50, 0.05, 0.02, 0.01, 0.37, 0.05, 0.00], "emotion": [0.50, 0.05, 0.02, 0.01, 0.37, 0.05, 0.00],
"faceInfo": [-6.42, -13.49, -2.15], "faceInfo": [-6.42, -13.49, -2.15],
"eyeInfo": 2 "eyeInfo": 2,
"heartInfo":60
}, { }, {
"microAction": [0.02, 0.03, 0.02, 0.00, 0.00, 0.07, 0.04, 0.02, 0.01, 1.00, 0.00, 0.03, 0.10, 0.19, 0.68, 0.41, 0.32, 0.08], "microAction": [0.02, 0.03, 0.02, 0.00, 0.00, 0.07, 0.04, 0.02, 0.01, 1.00, 0.00, 0.03, 0.10, 0.19, 0.68, 0.41, 0.32, 0.08],
"emotion": [0.53, 0.05, 0.02, 0.01, 0.34, 0.06, 0.00], "emotion": [0.53, 0.05, 0.02, 0.01, 0.34, 0.06, 0.00],
"faceInfo": [-5.83, -13.58, -2.46], "faceInfo": [-5.83, -13.58, -2.46],
"eyeInfo": 2 "eyeInfo": 2,
"heartInfo":60
}, { }, {
"microAction": [0.02, 0.03, 0.02, 0.00, 0.00, 0.07, 0.04, 0.02, 0.01, 0.99, 0.00, 0.03, 0.10, 0.19, 0.67, 0.40, 0.31, 0.08], "microAction": [0.02, 0.03, 0.02, 0.00, 0.00, 0.07, 0.04, 0.02, 0.01, 0.99, 0.00, 0.03, 0.10, 0.19, 0.67, 0.40, 0.31, 0.08],
"emotion": [0.53, 0.05, 0.02, 0.01, 0.34, 0.06, 0.00], "emotion": [0.53, 0.05, 0.02, 0.01, 0.34, 0.06, 0.00],
"faceInfo": [-5.83, -13.58, -2.46], "faceInfo": [-5.83, -13.58, -2.46],
"eyeInfo": 2 "eyeInfo": 2,
"heartInfo":60
}] }]
} }
} }
......
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y libjpeg8 libpng12-dev libjasper-dev libgomp1
COPY ./ /package
ENV LD_LIBRARY_PATH=/package/lib:/package:$LD_LIBRARY_PATH
WORKDIR /package
...@@ -6,7 +6,8 @@ ...@@ -6,7 +6,8 @@
"work_timeout":10, "work_timeout":10,
"logpath":"/tmp", "logpath":"/tmp",
"frame_fps":25, "datapath":"/tmp",
"savevideo":1,
"device":"gpu", "device":"gpu",
...@@ -14,11 +15,11 @@ ...@@ -14,11 +15,11 @@
"face_detector":"SeetaFaceDetector6.0.IPC.sta", "face_detector":"SeetaFaceDetector6.0.IPC.sta",
"face_landmarker81":"SeetaFaceLandmarker5.0.pts81.tsm.sta", "face_landmarker81":"SeetaFaceLandmarker5.0.pts81.tsm.sta",
"face_landmarker5":"SeetaFaceLandmarker5.0.pts5.tsm.sta", "face_landmarker5":"SeetaFaceLandmarker5.0.pts5.tsm.sta",
"face_recognizer":"SeetaFaceRecognizer6.0.Resnet101.Arcface.20200713.tsm.json", "face_recognizer":"SeetaFaceRecognzier.Rest50.Arcface.lffd-plfd.1219.sta",
"pose_model":"SeetaPoseEstimation1.1.0.sta", "pose_model":"SeetaPoseEstimation1.1.0.sta",
"actionunit_model":"SeetaActionUnit1.0.0.ext.sta", "actionunit_model":"SeetaActionUnit1.0.0.ext.sta",
"emotion_model":"SeetaEmotionRecognizer1.0.tsm.sta", "emotion_model":"SeetaEmotionRecognizer.v1.sta",
"eye_model":"SeetaEyeBlink.squeezenet.4class.214000.1010.sta" "eye_model":"SeetaEyeBlink.squeezenet.4class.214000.1010.sta"
} }
......
...@@ -140,9 +140,9 @@ target_link_libraries(${PROJECT_NAME} -L/usr/local/lib -lORZ_static) ...@@ -140,9 +140,9 @@ target_link_libraries(${PROJECT_NAME} -L/usr/local/lib -lORZ_static)
include_directories(/wqy/tools/opencv4_home/include/opencv4) include_directories(/wqy/tools/opencv4_home/include/opencv4)
target_link_libraries(${PROJECT_NAME} -L/wqy/tools/opencv4_home/lib -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_imgcodecs -lopencv_video -lopencv_videoio) target_link_libraries(${PROJECT_NAME} -L/wqy/tools/opencv4_home/lib -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_imgcodecs -lopencv_video -lopencv_videoio)
include_directories(/wqy/test/qtproject/emotions/include) include_directories(/wqy/test/qtproject/sdk-nolimits/include)
target_link_libraries(${PROJECT_NAME} -L/wqy/test/qtproject/emotions/lib64 -lSeetaPoseEstimation600 -lSeetaActionUnit600 -lSeetaEmotionRecognizer200 -lSeetaEyeStateDetector200 -lSeetaFaceTracking600 -lSeetaFaceLandmarker600 -lSeetaAuthorize -ltennis -lslm_runtime -lSeetaFaceDetector600 -lSeetaFaceRecognizer610 -lSeetaHeartRateDetector600) target_link_libraries(${PROJECT_NAME} -L/wqy/test/qtproject/sdk-nolimits/lib64 -lSeetaPoseEstimation600 -lSeetaActionUnit600 -lSeetaEmotionRecognizer200 -lSeetaEyeStateDetector200 -lSeetaFaceTracking600 -lSeetaFaceLandmarker600 -ltennis -lSeetaFaceDetector600 -lSeetaFaceRecognizer610 -lSeetaHeartRateDetector600)
......
...@@ -49,7 +49,8 @@ public: ...@@ -49,7 +49,8 @@ public:
//JSONField( self, MysqlConfig, db ); //JSONField( self, MysqlConfig, db );
//JSONField( self, int, frame_fps ) = 25; JSONField( self, std::string, datapath );
JSONField( self, int, savevideo ) = 0;
JSONField( self, std::string, device ) = "cpu"; JSONField( self, std::string, device ) = "cpu";
JSONField( self, ModelsConfig, models ); JSONField( self, ModelsConfig, models );
......
...@@ -27,7 +27,7 @@ struct bone_config : public websocketpp::config::asio ...@@ -27,7 +27,7 @@ struct bone_config : public websocketpp::config::asio
static bool const enable_multithreading = true; static bool const enable_multithreading = true;
static size_t const max_message_size = 2048 * 1000 * 1000; static size_t const max_message_size = 2048 * 1000 * 1000;
static long const timeout_open_handshake = 60 * 1000; static long const timeout_open_handshake = 3 * 60 * 1000;
struct transport_config : public core::transport_config struct transport_config : public core::transport_config
{ {
......
...@@ -91,7 +91,7 @@ using websocketpp::lib::bind; ...@@ -91,7 +91,7 @@ using websocketpp::lib::bind;
const int VIDEO_WIDTH = 800; const int VIDEO_WIDTH = 800;
const int VIDEO_HEIGHT = 600; const int VIDEO_HEIGHT = 600;
std::string g_response; //std::string g_response;
//seeta::FaceTracker *g_track = NULL;//create_face_detector(); //seeta::FaceTracker *g_track = NULL;//create_face_detector();
seeta::FaceDetector *g_fd = NULL;//create_face_detector(); seeta::FaceDetector *g_fd = NULL;//create_face_detector();
...@@ -893,12 +893,16 @@ static int recognize( seeta::HeartRateDetector *heartrate, cv::Mat &mat, const S ...@@ -893,12 +893,16 @@ static int recognize( seeta::HeartRateDetector *heartrate, cv::Mat &mat, const S
} }
static void do_heart_rate( const std::string &videofile, int *rate ) static void do_heart_rate( const std::string &videofile, int *rate, void *hearts )
{ {
*rate = 0; *rate = 0;
std::vector<int> * phearts = (std::vector<int> *)hearts;
phearts->clear();
cv::VideoCapture *m_capture = NULL; cv::VideoCapture *m_capture = NULL;
seeta::HeartRateDetector *m_heartrate = NULL; seeta::HeartRateDetector *m_heartrate = NULL;
double value = 0.0;
std::vector<double> rates;
int nheartrate = 0; int nheartrate = 0;
try try
{ {
...@@ -926,7 +930,7 @@ static void do_heart_rate( const std::string &videofile, int *rate ) ...@@ -926,7 +930,7 @@ static void do_heart_rate( const std::string &videofile, int *rate )
std::chrono::system_clock::time_point starttimer = std::chrono::system_clock::now(); std::chrono::system_clock::time_point starttimer = std::chrono::system_clock::now();
std::chrono::system_clock::time_point lasttimer; std::chrono::system_clock::time_point lasttimer;
std::vector<double> rates; //std::vector<double> rates;
cv::Mat mat; cv::Mat mat;
int num = 0; int num = 0;
...@@ -992,13 +996,15 @@ static void do_heart_rate( const std::string &videofile, int *rate ) ...@@ -992,13 +996,15 @@ static void do_heart_rate( const std::string &videofile, int *rate )
} }
} }
value = 0.0;
//std::cout << "-----num:" << num << std::endl; //std::cout << "-----num:" << num << std::endl;
double value = 0.0;
int nret = recognize( m_heartrate, mat, img, faces.data[index].pos, value ); int nret = recognize( m_heartrate, mat, img, faces.data[index].pos, value );
if( nret == 0 ) if( nret == 0 )
{ {
//std::cout << "-----rate:" << value << std::endl; //std::cout << "-----rate:" << value << std::endl;
rates.push_back( value ); rates.push_back( value );
phearts->push_back((int)value);
if( rates.size() <= 1 ) if( rates.size() <= 1 )
{ {
lasttimer = std::chrono::system_clock::now(); lasttimer = std::chrono::system_clock::now();
...@@ -1009,17 +1015,19 @@ static void do_heart_rate( const std::string &videofile, int *rate ) ...@@ -1009,17 +1015,19 @@ static void do_heart_rate( const std::string &videofile, int *rate )
auto timer_duration = std::chrono::duration_cast<std::chrono::milliseconds>( endtimer - lasttimer ); auto timer_duration = std::chrono::duration_cast<std::chrono::milliseconds>( endtimer - lasttimer );
if( timer_duration.count() >= 4 * 1000 ) if( timer_duration.count() >= 4 * 1000 )
{ {
value = ( double )( compute_heart_rate( rates ) ); //std::cout << "heart compute frame is ok" << std::endl;
nheartrate = ( int )value; //value = ( double )( compute_heart_rate( rates ) );
std::cout << "heart rate:" << nheartrate << std::endl; //nheartrate = ( int )value;
//std::cout << "heart rate:" << nheartrate << std::endl;
//strresponse = "{\"code\":0,\"msg\":\"" + GetError( 0 ) + "\"}"; //strresponse = "{\"code\":0,\"msg\":\"" + GetError( 0 ) + "\"}";
break; //break;
} }
} }
} }
else else
{ {
phearts->push_back(0);
} }
}//end while }//end while
...@@ -1029,6 +1037,11 @@ static void do_heart_rate( const std::string &videofile, int *rate ) ...@@ -1029,6 +1037,11 @@ static void do_heart_rate( const std::string &videofile, int *rate )
LOG( _ERROR_, "heartrate compute failed:%s", GETNULLPTR( e.what() ) ); LOG( _ERROR_, "heartrate compute failed:%s", GETNULLPTR( e.what() ) );
} }
value = ( double )( compute_heart_rate( rates ) );
nheartrate = ( int )value;
std::cout << "heart rate:" << nheartrate << std::endl;
if(!m_heartrate) if(!m_heartrate)
{ {
delete m_heartrate; delete m_heartrate;
...@@ -1226,7 +1239,7 @@ static void do_video( const std::string &videofile, int frameNum, int *eyes, ...@@ -1226,7 +1239,7 @@ static void do_video( const std::string &videofile, int frameNum, int *eyes,
if ((count > 0) && (count2 > 0)) if ((count > 0) && (count2 > 0))
{ {
*eyes = (*eyes) + 1; *eyes = (*eyes) + 1;
std::cout << "eyes:" << *eyes << std::endl; //std::cout << "eyes:" << *eyes << std::endl;
} }
if((leftstate == seeta::EyeStateDetector::EYE_OPEN) || if((leftstate == seeta::EyeStateDetector::EYE_OPEN) ||
...@@ -1273,7 +1286,7 @@ struct UploadFileData ...@@ -1273,7 +1286,7 @@ struct UploadFileData
std::string filename; std::string filename;
}; };
static int parse_boundary_paramters( const std::string &filename, const std::string &boundary, const std::string &body, int &frameNum ) static int parse_boundary_paramters( std::string &filename, const std::string &boundary, const std::string &body, int &frameNum )
{ {
int nret = -1; int nret = -1;
size_t nbegin, nend, nfind1, nfind2, nfind3; size_t nbegin, nend, nfind1, nfind2, nfind3;
...@@ -1403,6 +1416,8 @@ static int parse_boundary_paramters( const std::string &filename, const std::str ...@@ -1403,6 +1416,8 @@ static int parse_boundary_paramters( const std::string &filename, const std::str
//strtmp = "/tmp/" + uploadfiles[i].filename; //strtmp = "/tmp/" + uploadfiles[i].filename;
//int fd = open(strtmp.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666); //int fd = open(strtmp.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666);
//std::cout << "---chunkpath:" << chunkpath << std::endl; //std::cout << "---chunkpath:" << chunkpath << std::endl;
filename += "_" + uploadfiles[i].filename;
int fd = open( filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666 ); int fd = open( filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666 );
if( fd >= 0 ) if( fd >= 0 )
{ {
...@@ -1476,14 +1491,14 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1476,14 +1491,14 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
return; return;
} }
/*
if (g_response.length() > 0) if (g_response.length() > 0)
{ {
std::cout << "send:" << g_response.length() << std::endl; std::cout << "send:" << g_response.length() << std::endl;
create_http_response( con, g_response, 200 ); create_http_response( con, g_response, 200 );
return; return;
} }
*/
std::string filename; std::string filename;
int frameNum = 1; int frameNum = 1;
...@@ -1493,8 +1508,9 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1493,8 +1508,9 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
if( nfind != string::npos ) if( nfind != string::npos )
{ {
strboundary = strContentType.substr( nfind + 9 ); strboundary = strContentType.substr( nfind + 9 );
filename = "/tmp/" + get_uuid( "" ); //createdir("/data");
std::cout << "upload file:" << filename << std::endl; filename = g_config->datapath + "/" + get_uuid( "" );
//std::cout << "upload file:" << filename << std::endl;
nret = parse_boundary_paramters( filename, strboundary, strBody, frameNum ); nret = parse_boundary_paramters( filename, strboundary, strBody, frameNum );
if(nret == -1) if(nret == -1)
{ {
...@@ -1515,11 +1531,12 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1515,11 +1531,12 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
std::vector<std::vector<float>> face_poses; std::vector<std::vector<float>> face_poses;
std::vector<std::vector<float>> face_actions; std::vector<std::vector<float>> face_actions;
std::vector<std::vector<float>> face_emotions; std::vector<std::vector<float>> face_emotions;
std::vector<int> heart_beats;
std::vector<int> blink_eyes; std::vector<int> blink_eyes;
if(nret == 0) if(nret == 0)
{ {
std::cout << "upload file:" << filename << std::endl;
std::thread heartrate_thread(do_heart_rate, filename, &hearts); std::thread heartrate_thread(do_heart_rate, filename, &hearts, &heart_beats);
do_video(filename, frameNum, &eyes, face_features, do_video(filename, frameNum, &eyes, face_features,
face_poses,face_actions,face_emotions, blink_eyes); face_poses,face_actions,face_emotions, blink_eyes);
...@@ -1527,7 +1544,7 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1527,7 +1544,7 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
} }
remove( filename.c_str() ); //remove( filename.c_str() );
strresponse = "{\"code\":" + std::to_string(nret) + ",\"msg\":\"" + GetError( nret ) + "\",\"data\":{"; strresponse = "{\"code\":" + std::to_string(nret) + ",\"msg\":\"" + GetError( nret ) + "\",\"data\":{";
strresponse += "\"resultData\":{\"blinkNum\":" + std::to_string(eyes) + ",\"heartRate\":" + std::to_string(hearts) + "}"; strresponse += "\"resultData\":{\"blinkNum\":" + std::to_string(eyes) + ",\"heartRate\":" + std::to_string(hearts) + "}";
strresponse += ",\"userFaces\":["; strresponse += ",\"userFaces\":[";
...@@ -1556,6 +1573,21 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1556,6 +1573,21 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
if (num > blink_eyes.size()) if (num > blink_eyes.size())
num = blink_eyes.size(); num = blink_eyes.size();
std::vector<int> heart_beats2;
heart_beats2.resize(num);
if (heart_beats.size() < num)
{
for(int i=0; i<num - heart_beats.size(); i++)
{
heart_beats2.push_back(0);
}
memcpy(heart_beats2.data() + num - heart_beats.size(), heart_beats.data(), sizeof(int) * num);
}else
{
memcpy(heart_beats2.data(), heart_beats.data(), sizeof(int) * num);
}
std::cout << "num:" << num << std::endl; std::cout << "num:" << num << std::endl;
//num = 3; //num = 3;
for(int i=0; i<num; i++) for(int i=0; i<num; i++)
...@@ -1593,7 +1625,9 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1593,7 +1625,9 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
} }
strresponse += floattostring(face_poses[i][m]); strresponse += floattostring(face_poses[i][m]);
} }
strresponse += "],\"eyeInfo\":" + std::to_string(blink_eyes[i]);
strresponse += "],\"heartInfo\":" + std::to_string(heart_beats2[i]);
strresponse += ",\"eyeInfo\":" + std::to_string(blink_eyes[i]);
//for(int m=0; m<blink_eyes[i].size(); m++) //for(int m=0; m<blink_eyes[i].size(); m++)
//{ //{
...@@ -1608,19 +1642,30 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -1608,19 +1642,30 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
} }
strresponse += "]}}"; strresponse += "]}}";
g_response = strresponse; //g_response = strresponse;
std::cout << "response:" << strresponse.length() << std::endl; std::cout << "response:" << strresponse.length() << std::endl;
create_http_response(con, strresponse, 200); create_http_response(con, strresponse, 200);
/*
int fd = open( "/tmp/response.txt", O_WRONLY | O_CREAT | O_TRUNC, 0666 ); if (g_config->savevideo) {
if( fd >= 0 ) //remove( filename.c_str() );
std::string res = filename;
nfind = res.find(".");
if(nfind >= 0)
{
res = res.substr(0, nfind);
}
res += ".json";
int fd = open( res.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666 );
if( fd >= 0 )
{
write( fd, strresponse.data(), strresponse.length() );
close( fd );
}
}else
{ {
write( fd, strresponse.data(), strresponse.length() ); remove( filename.c_str() );
close( fd ); }
}
sleep(1);
*/
return; return;
} }
......
...@@ -182,7 +182,8 @@ int main( int argc, char *argv[] ) ...@@ -182,7 +182,8 @@ int main( int argc, char *argv[] )
return -1; return -1;
} }
//std::cout << "frame_fps:" << g_config->frame_fps << std::endl; std::cout << "datapath:" << g_config->datapath << std::endl;
std::cout << "savevideo:" << g_config->savevideo << std::endl;
std::cout << "device:" << g_config->device << std::endl; std::cout << "device:" << g_config->device << std::endl;
std::cout << "face_detector:" << g_config->models.face_detector << std::endl; std::cout << "face_detector:" << g_config->models.face_detector << std::endl;
std::cout << "face_landmarker81:" << g_config->models.face_landmarker81 << std::endl; std::cout << "face_landmarker81:" << g_config->models.face_landmarker81 << std::endl;
...@@ -195,6 +196,18 @@ int main( int argc, char *argv[] ) ...@@ -195,6 +196,18 @@ int main( int argc, char *argv[] )
std::cout << "eye_model:" << g_config->models.eye_model << std::endl; std::cout << "eye_model:" << g_config->models.eye_model << std::endl;
if (g_config->datapath.length() <= 0)
{
if (g_config->savevideo > 0)
{
std::cout << "the datapath is empty!" << std::endl;
return -1;
}else
{
g_config->datapath = "/tmp";
}
}
logpath = g_config->logpath; logpath = g_config->logpath;
//std::string logpath(argv[8]); //std::string logpath(argv[8]);
......
...@@ -3,6 +3,16 @@ ...@@ -3,6 +3,16 @@
./test -t action -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg ./test -t action -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg
./test -t feature -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg ./test -t feature -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg
./test -t video -h http://192.168.1.33:4000 -f ./cap1.mp4
mkdir /tmp/data
sudo docker build -t ygyd:1 .
sudo docker run -t -d --rm --net host -p 4000:4000 -v /tmp/data:/data ygyd:1 ./YgydServer
ffmpeg -t 20 -f v4l2 -i /dev/video0 -r 20 -f mp4 cap1.mp4 ffmpeg -t 20 -f v4l2 -i /dev/video0 -r 20 -f mp4 cap1.mp4
ffplay ./cap1.mp4 ffplay ./cap1.mp4
...@@ -9,7 +9,7 @@ import ( ...@@ -9,7 +9,7 @@ import (
//"sync" //"sync"
"encoding/json" "encoding/json"
//"strconv" //"strconv"
"time" //"time"
//"math/rand" //"math/rand"
//"crypto/md5" //"crypto/md5"
"bytes" "bytes"
...@@ -363,7 +363,7 @@ func get_video2(filename, url string) bool { ...@@ -363,7 +363,7 @@ func get_video2(filename, url string) bool {
//seq := "http://192.168.1.33:4000/query/feature" //seq := "http://192.168.1.33:4000/query/feature"
seq := url + "/query/video" seq := url + "/query/video"
client := &http.Client{Timeout: 60 * time.Second,} client := &http.Client{}
request, err := http.NewRequest("POST", seq, body) request, err := http.NewRequest("POST", seq, body)
if err != nil { if err != nil {
...@@ -421,21 +421,13 @@ func get_video2(filename, url string) bool { ...@@ -421,21 +421,13 @@ func get_video2(filename, url string) bool {
body2, err := io.ReadAll(resp.Body) body2, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
if strings.Contains(err.Error(), "unexpected EOF") && len(body2) > 0 { fmt.Println("recv ReadAll failed: ", err)
return false
}else {
fmt.Println("recv ReadAll failed: ", err)
return false
}
}else {
fmt.Println("length:", len(body2))
} }
fmt.Println("length:", len(body2)) fmt.Println("recv response ok, save to /tmp/response.json, length:", len(body2))
//fmt.Println(string(body2)) //fmt.Println(string(body2))
ioutil.WriteFile("/tmp/recv.txt",body2, 0666); ioutil.WriteFile("/tmp/response.json",body2, 0666);
/* /*
var res Response2 var res Response2
......
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!