Commit 744458c2 by wangquyuan

add by wqy

1 parent 0e485ae2
......@@ -142,7 +142,7 @@ target_link_libraries(${PROJECT_NAME} -L/wqy/tools/opencv4_home/lib -lopencv_cor
include_directories(/wqy/test/qtproject/emotions/include)
target_link_libraries(${PROJECT_NAME} -L/wqy/test/qtproject/emotions/lib64 -lSeetaPoseEstimation600 -lSeetaActionUnit600 -lSeetaEmotionRecognizer200 -lSeetaEyeStateDetector200 -lSeetaFaceTracking600 -lSeetaFaceLandmarker600 -lSeetaAuthorize -ltennis -lslm_runtime -lSeetaFaceDetector600 -lSeetaFaceRecognizer610)
target_link_libraries(${PROJECT_NAME} -L/wqy/test/qtproject/emotions/lib64 -lSeetaPoseEstimation600 -lSeetaActionUnit600 -lSeetaEmotionRecognizer200 -lSeetaEyeStateDetector200 -lSeetaFaceTracking600 -lSeetaFaceLandmarker600 -lSeetaAuthorize -ltennis -lslm_runtime -lSeetaFaceDetector600 -lSeetaFaceRecognizer610 -lSeetaHeartRateDetector600)
......
......@@ -49,6 +49,7 @@
#include "seeta/ActionUnit.h"
#include "seeta/EyeStateDetector.h"
#include "seeta/HeartRateDetector.h"
......@@ -84,6 +85,9 @@ using websocketpp::lib::placeholders::_1;
using websocketpp::lib::placeholders::_2;
using websocketpp::lib::bind;
const int VIDEO_WIDTH = 800;
const int VIDEO_HEIGHT = 600;
seeta::FaceTracker *g_track = NULL;//create_face_detector();
seeta::FaceDetector *g_fd = NULL;//create_face_detector();
......@@ -299,7 +303,7 @@ int init_engine()
return -1;
}
g_track = create_face_tracker(400,400);
g_track = create_face_tracker(VIDEO_WIDTH,VIDEO_HEIGHT);
if (!g_track )
{
return -1;
......@@ -734,6 +738,265 @@ static std::string do_query_face_emotion( const std::string &body )
return strresponse;
}
static int compute_heart_rate(std::vector<double> &vecs)
{
if(vecs.size() < 1)
return 0.0;
double value = 0.0;
std::vector<double> data;
for(int i=0; i<vecs.size(); i++)
{
value += vecs[i];
if(i >= 10)
{
value -= vecs[i - 10];
}
if(i>= 9)
{
data.push_back(value / 10);
}
}
if(vecs.size() < 10)
{
data.push_back(value / vecs.size());
}
std::map<int,int> totals;
std::map<int,int>::iterator iter;
//std::stringstream stream;
for(int i=0; i<data.size(); i++)
{
int m = floor(data[i]);
iter = totals.find(m);
if(iter != totals.end())
{
iter->second++;
}else
{
totals.insert(std::map<int,int>::value_type(m, 1));
}
//if(i % 32 == 0)
// stream << "\n";
//stream << data[i] << ", ";
}
//stream << "\n";
//qDebug() << stream.str().c_str();
int max = 0;
int maxvalue = 0;
for(iter = totals.begin(); iter != totals.end(); ++iter)
{
if(iter->second > max)
{
max = iter->second;
maxvalue = iter->first;
}
}
return maxvalue;
}
static int recognize(seeta::HeartRateDetector *heartrate, cv::Mat &mat, const SeetaImageData &image, const SeetaRect face, double & value)
{
std::vector<SeetaPointF> spoints81(81);
face_landmarker81(image, face, spoints81.data());
auto time_now = std::chrono::system_clock::now();
auto duration_in_ms = std::chrono::duration_cast<std::chrono::milliseconds>(time_now.time_since_epoch());
double time = duration_in_ms.count();
cv::Mat mat2 = mat.clone();
//cv::imwrite("/tmp/test.jpg",mat2);
int x = heartrate->get_signal(mat2, time, spoints81.data());
//int x = heartrate->get_signal(mat2, spoints81.data());
if(x == -1)
{
return -1;
}
if(heartrate->is_waiting())
{
value = heartrate->get_waiting_time();
return 1;
}else
{
value = heartrate->get_heart_rate();
return 0;
}
return 0;
}
static std::string do_query_face_video( const std::string &body )
{
std::string strresponse;
try
{
std::map<std::string, std::string> parameters;
int n = parse_http_parameters(body, parameters);
std::map<std::string, std::string>::iterator iter;
iter = parameters.find("userVideo");
if (iter == parameters.end())
{
LOG( _ERROR_, "do not find the parameter userVideo" );
strresponse = "{\"code\":4,\"msg\":\"" + GetError( 4 ) + "\"}";
return strresponse;
}
std::string strbase64 = base64_decode( iter->second );
std::string filename = "/tmp/" + get_uuid("");
std::ofstream outf(filename,std::ios::out|std::ios::binary);
outf.write((const char *)strbase64.data(), strbase64.length());
outf.close();
iter = parameters.find("frameNum");
if (iter == parameters.end())
{
LOG( _ERROR_, "do not find the parameter frameNum" );
strresponse = "{\"code\":4,\"msg\":\"" + GetError( 4 ) + "\"}";
return strresponse;
}
int frameNum = atoi(iter->second.c_str());
if(frameNum <= 0)
{
frameNum = 1;
}
//std::string strbase64 = base64_decode( iter->second );
//std::vector<unsigned char> imagedatas( strbase64.begin(), strbase64.end() );
//cv::Mat mat = cv::imdecode( imagedatas, 1 ); //COLOR_LOAD_IMAGE_COLOR);
cv::VideoCapture * m_capture = new cv::VideoCapture;
m_capture->open(filename.c_str());
//m_capture->set( cv::CAP_PROP_FRAME_WIDTH, VIDEO_WIDTH );
//m_capture->set( cv::CAP_PROP_FRAME_HEIGHT, VIDEO_HEIGHT );
if(!m_capture->isOpened())
{
m_capture->release();
delete m_capture;
m_capture = NULL;
std::cout << "------open video---failed" << std::endl;
strresponse = "{\"code\":8,\"msg\":\"" + GetError( 8) + "\"}";
return strresponse;
}
seeta::HeartRateDetector *m_heartrate = new seeta::HeartRateDetector;
m_heartrate->set_frame_number(300);
m_heartrate->reset();
std::chrono::system_clock::time_point lasttimer;
std::vector<double> rates;
cv::Mat mat;
int nheartrate = 0;
int num = 0;
while(1)
{
if( !m_capture->read(mat))
{
std::cout << "read end" << std::endl;
break;
}
if(mat.channels() == 4)
{
std::cout << "channels:" << mat.channels() << std::endl;
//cv::cvtColor(mat, mat, cv::COLOR_RGBA2GBR);
}
if( !mat.data)
{
std::cout << "skip invalid frame" << std::endl;
continue;
}
//cv::Mat mat2;
//cv::Size size (VIDEO_WIDTH, VIDEO_HEIGHT);
//cv::resize(mat, mat2, size, 0, 0, cv::INTER_CUBIC);
//mat = mat2.clone();
SeetaImageData img;
img.width = mat.cols;
img.height = mat.rows;
img.channels = mat.channels();
img.data = mat.data;
//cv::cvtColor(mat, mat2, cv::COLOR_BGR2RGB);
auto faces = face_detector(img);
if (faces.size <= 0)
{
std::cout << "no find face,skip frame" << std::endl;
continue;
}
num++;
int index = 0;
int maxarea = faces.data[0].pos.width * faces.data[0].pos.height;
for (int i=1; i<faces.size; i++)
{
if(maxarea < faces.data[i].pos.width * faces.data[i].pos.height)
{
index = i;
maxarea = faces.data[i].pos.width * faces.data[i].pos.height;
}
}
std::cout << "-----num:" << num << std::endl;
double value = 0.0;
int nret = recognize(m_heartrate, mat, img, faces.data[index].pos, value);
if(nret == 0)
{
std::cout << "-----rate:" << value << std::endl;
rates.push_back(value);
if(rates.size() <= 1 )
{
lasttimer = std::chrono::system_clock::now();
}else
{
std::chrono::system_clock::time_point endtimer = std::chrono::system_clock::now();
auto timer_duration= std::chrono::duration_cast<std::chrono::milliseconds>(endtimer - lasttimer);
if(timer_duration.count() >= 4 * 1000)
{
value = (double)(compute_heart_rate(rates));
nheartrate = (int)value;
std::cout << "heart rate:" << nheartrate << std::endl;
strresponse = "{\"code\":0,\"msg\":\"" + GetError( 0 ) + "\"}";
break;
}
}
}else {
}
}//end while
delete m_heartrate;
delete m_capture;
}
catch( std::exception &e )
{
LOG( _ERROR_, "parse message failed:%s", GETNULLPTR( e.what() ) );
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1 ) + "\"}";
}
return strresponse;
}
void on_http( httpserver *s, websocketpp::connection_hdl hdl )
{
......@@ -793,7 +1056,11 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
strresponse = do_query_face_emotion( strBody );
create_http_response( con, strresponse, 200 );
}
else
else if( strUri == "/query/video" )
{
strresponse = do_query_face_video( strBody );
create_http_response( con, strresponse, 200 );
}else
{
//std::cout << "----no method:" << strMethod << std::endl;
......
......@@ -10,7 +10,7 @@ static std::vector<ErrorCode> errors =
{5, "request is not support yet"},
{6, "database is not avilable"},
{7, "image data parse failed"},
{8, "this pot is not existed"},
{8, "open video failed"},
{9, "this pot is offline"},
};
......
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!