Commit 4f094e67 by wangquyuan

add by wqy

1 parent 9cefd768
......@@ -104,6 +104,7 @@ typedef websocketpp::server<bone_config> httpserver;
int init_engine();
//void do_video2(const std::string &videofile, int nrotate);
void on_http( httpserver *s, websocketpp::connection_hdl hdl );
......
......@@ -1110,12 +1110,218 @@ static int geteyecount(seeta::EyeStateDetector::EYE_STATE oldstate, seeta::EyeSt
return 0;
}
static void do_video2( const std::string &videofile, int nrotate, void * poses)
{
//*eyes = 0;
//*total_frames = 0;
//*valid_frames = 0;
cv::VideoCapture *m_capture = NULL;
int num = 0;
unsigned long valid_frames = 0;
unsigned long total_frames =0 ;
std::vector< std::vector<float> > *face_poses = (std::vector< std::vector<float> > *)(poses);
try
{
m_capture = new cv::VideoCapture;
m_capture->open( videofile.c_str() );
//m_capture->set( cv::CAP_PROP_FPS, 25 );
if( !m_capture->isOpened() )
{
m_capture->release();
delete m_capture;
m_capture = NULL;
std::cout << "------open video---failed" << std::endl;
return;
}
/*
int fps = m_capture->get( cv::CAP_PROP_FPS);
if(fps < 1 )
{
fps = 20;
}
*/
//std::chrono::system_clock::time_point starttimer = std::chrono::system_clock::now();
//std::chrono::system_clock::time_point lasttimer;
//std::vector<double> rates;
cv::Mat mat;
//int nheartrate = 0;
//int num = 0;
//seeta::EyeStateDetector::EYE_STATE m_leftstate, m_rightstate;
//m_leftstate = m_rightstate = seeta::EyeStateDetector::EYE_UNKNOWN;
//int per_frame = 1000 / fps;
while( 1 )
{
//std::chrono::system_clock::time_point cur = std::chrono::system_clock::now();
//auto timer_duration2 = std::chrono::duration_cast<std::chrono::milliseconds>( cur - starttimer );
//if( timer_duration2.count() < per_frame )
//{
// std::this_thread::sleep_for( std::chrono::milliseconds( per_frame - timer_duration2.count() ) );
//}
//starttimer = std::chrono::system_clock::now();
//std::this_thread::sleep_for( std::chrono::milliseconds( 10 ) );
if( !m_capture->read( mat ) )
{
std::cout << "read end" << std::endl;
break;
}
if( mat.channels() == 4 )
{
std::cout << "channels:" << mat.channels() << std::endl;
//cv::cvtColor(mat, mat, cv::COLOR_RGBA2GBR);
}
if( !mat.data )
{
std::cout << "skip invalid frame" << std::endl;
continue;
}
//cv::Mat mat2;
//cv::Size size (VIDEO_WIDTH, VIDEO_HEIGHT);
//cv::resize(mat, mat2, size, 0, 0, cv::INTER_CUBIC);
//mat = mat2.clone();
cv::Mat mat2;
SeetaImageData img;
if(nrotate != 0)
{
Rotate(mat, mat2, nrotate);
img.width = mat2.cols;
img.height = mat2.rows;
img.channels = mat2.channels();
img.data = mat2.data;
}else
{
img.width = mat.cols;
img.height = mat.rows;
img.channels = mat.channels();
img.data = mat.data;
}
total_frames = (total_frames) + 1;
//SeetaImageData img;
//img.width = mat.cols;
//img.height = mat.rows;
//img.channels = mat.channels();
//img.data = mat.data;
//cv::cvtColor(mat, mat2, cv::COLOR_BGR2RGB);
auto faces = face_detector( img );
if( faces.size <= 0 )
{
//flags.push_back(0);
std::cout << "no find face,skip frame" << std::endl;
continue;
}
//flags.push_back(1);
num++;
int index = 0;
int maxarea = faces.data[0].pos.width * faces.data[0].pos.height;
for( int i = 1; i < faces.size; i++ )
{
if( maxarea < faces.data[i].pos.width * faces.data[i].pos.height )
{
index = i;
maxarea = faces.data[i].pos.width * faces.data[i].pos.height;
}
}
//std::cout << "-----num:" << num << std::endl;
//SeetaPointF points[5];
//face_landmarker5(img, faces.data[index].pos, points);
/*
if((num % frameNum) == 0)
{
std::vector<float> features = face_recognizer(img, points);
face_features.push_back(features);
}
*/
/*
std::vector<float> actions;
std::vector<float> features2 = face_action(img, points);
for(int i=0; i<features2.size(); i++)
{
if( i== 11)
{
continue;
}
actions.push_back(features2[i]);
}
face_actions.push_back(actions);
std::vector<float> emotions = face_emotion(img, points);
face_emotions.push_back(emotions);
*/
float yaw,pitch,roll;
yaw = pitch = roll = 0.0;
pose_estimate(img, faces.data[index].pos, &yaw, &pitch, &roll);
std::vector<float> poses;
poses.push_back(yaw);
poses.push_back(pitch);
poses.push_back(roll);
std::cout << "yaw:" << yaw << ",pitch:" << pitch << ",roll:" << roll << std::endl;
face_poses->push_back(poses);
//seeta::EyeStateDetector::EYE_STATE leftstate, rightstate;
//leftstate = rightstate = seeta::EyeStateDetector::EYE_UNKNOWN;
//eye_detect(img, points,leftstate,rightstate);
///int count = geteyecount(m_leftstate, leftstate);
/*
if(count >= 0)
{
//*eyes += count;
m_leftstate = leftstate;
}
*/
//m_leftstate = leftstate;
//int count2 = geteyecount(m_rightstate, rightstate);
/*
if(count2 >= 0)
{
//*eyes += count;
m_rightstate = rightstate;
}
*/
}//end while
}
catch( std::exception &e )
{
LOG( _ERROR_, "heartrate compute failed:%s", GETNULLPTR( e.what() ) );
}
if(!m_capture)
{
m_capture->release();
delete m_capture;
}
//*rate = nheartrate;
valid_frames = num;
std::cout << "total:" << total_frames << ", valid:" << valid_frames << std::endl;
return;
}
static void do_video( const std::string &videofile, int frameNum, int nrotate, int *eyes,
std::vector<std::vector<float>> &face_features,
std::vector<std::vector<float>> &face_poses,
//std::vector<std::vector<float>> &face_poses,
std::vector<std::vector<float>> &face_actions,
std::vector<std::vector<float>> &face_emotions,
std::vector<int> &blink_eyes, unsigned long *total_frames, unsigned long *valid_frames,
......@@ -1265,6 +1471,7 @@ static void do_video( const std::string &videofile, int frameNum, int nrotate, i
std::vector<float> emotions = face_emotion(img, points);
face_emotions.push_back(emotions);
/*
float yaw,pitch,roll;
yaw = pitch = roll = 0.0;
pose_estimate(img, faces.data[index].pos, &yaw, &pitch, &roll);
......@@ -1273,8 +1480,9 @@ static void do_video( const std::string &videofile, int frameNum, int nrotate, i
poses.push_back(pitch);
poses.push_back(roll);
//std::cout << "yaw:" << yaw << ",pitch:" << pitch << ",roll:" << roll << std::endl;
std::cout << "yaw:" << yaw << ",pitch:" << pitch << ",roll:" << roll << std::endl;
face_poses.push_back(poses);
*/
seeta::EyeStateDetector::EYE_STATE leftstate, rightstate;
leftstate = rightstate = seeta::EyeStateDetector::EYE_UNKNOWN;
......@@ -1639,9 +1847,13 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
int nrotate = get_video_rotate(filename);
nrotate = nrotate * -1;
std::thread heartrate_thread(do_heart_rate, filename, nrotate, &hearts, &heart_beats, &fps);
std::thread pose_thread(do_video2, filename, nrotate, (void *)&face_poses);
do_video(filename, frameNum, nrotate, &eyes, face_features,
face_poses,face_actions,face_emotions, blink_eyes, &total_frames, &valid_frames, flags);
/*face_poses,*/face_actions,face_emotions, blink_eyes, &total_frames, &valid_frames, flags);
pose_thread.join();
heartrate_thread.join();
}
......@@ -1729,13 +1941,14 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
strresponse += floattostring(face_emotions[i][m]);
}
strresponse += "],\"faceInfo\":[";
for(int m=0; m<face_poses[i].size(); m++)
{
if(m > 0)
{
strresponse += ",";
}
strresponse += floattostring(face_poses[i][m]);
}
......@@ -1806,13 +2019,13 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
strresponse = do_query_face_emotion( strBody );
create_http_response( con, strresponse, 200 );
}
/*
else if( strUri == "/query/video" )
/*
else if( strUri == "/query/pose" )
{
strresponse = do_query_face_video( strBody );
strresponse = do_query_face_pose( strBody );
create_http_response( con, strresponse, 200 );
}
*/
*/
else
{
......
......@@ -284,6 +284,16 @@ int main( int argc, char *argv[] )
LOG( _ERROR_, "init engine failed, system exited" );
return -1;
}
/*
if( argc != 3 ) {
std::cout << "parameters error" << std::endl;
return -1;
}
do_video2(argv[1], atoi(argv[2]));
return 0;
*/
/*
CMysqlConnectPool *pmysqlclient = CMysqlConnectPool::GetInstance();
if( pmysqlclient == nullptr )
......
......@@ -30,3 +30,9 @@ IP:8.142.64.229
项目路径:/usr/local/zkyx」
服务器IP:8.142.64.229
路径:/mnt/zkyxEmotion/package
用户名:root
密码:yixinxd@2022^%
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!