Commit 262f2763 by wangquyuan

add by wqy

1 parent 58ac7830
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
"work_timeout":10, "work_timeout":10,
"logpath":"/tmp", "logpath":"/tmp",
"device":"gpu",
"models": { "models": {
"face_detector":"SeetaFaceDetector6.0.IPC.sta", "face_detector":"SeetaFaceDetector6.0.IPC.sta",
......
...@@ -134,8 +134,17 @@ target_link_libraries(${PROJECT_NAME} -L/usr/local/lib -lORZ_static) ...@@ -134,8 +134,17 @@ target_link_libraries(${PROJECT_NAME} -L/usr/local/lib -lORZ_static)
#include_directories(${SeetaCV_INCLUDE_DIRS}) #include_directories(${SeetaCV_INCLUDE_DIRS})
#target_link_libraries(${PROJECT_NAME} ${SeetaCV_LIBRARIES}) #target_link_libraries(${PROJECT_NAME} ${SeetaCV_LIBRARIES})
#include_directories(/wqy/tools/opencv4_home/include/opencv4)
#target_link_libraries(${PROJECT_NAME} -L/wqy/tools/opencv4_home/lib -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_imgcodecs)
include_directories(/wqy/tools/opencv4_home/include/opencv4)
target_link_libraries(${PROJECT_NAME} -L/wqy/tools/opencv4_home/lib -lopencv_core -lopencv_imgproc -lopencv_highgui -lopencv_imgcodecs -lopencv_video -lopencv_videoio)
include_directories(/wqy/test/qtproject/emotions/include)
target_link_libraries(${PROJECT_NAME} -L/wqy/test/qtproject/emotions/lib64 -lSeetaPoseEstimation600 -lSeetaActionUnit600 -lSeetaEmotionRecognizer200 -lSeetaEyeStateDetector200 -lSeetaFaceTracking600 -lSeetaFaceLandmarker600 -lSeetaAuthorize -ltennis -lslm_runtime -lSeetaFaceDetector600 -lSeetaFaceRecognizer610)
target_link_libraries(${PROJECT_NAME} -pthread -lrt) target_link_libraries(${PROJECT_NAME} -pthread -lrt)
......
...@@ -49,6 +49,7 @@ public: ...@@ -49,6 +49,7 @@ public:
//JSONField( self, MysqlConfig, db ); //JSONField( self, MysqlConfig, db );
JSONField( self, std::string, device ) = "cpu";
JSONField( self, ModelsConfig, models ); JSONField( self, ModelsConfig, models );
}; };
......
...@@ -35,4 +35,11 @@ void geturlparameters(const std::string &str, std::map<std::string, std::string> ...@@ -35,4 +35,11 @@ void geturlparameters(const std::string &str, std::map<std::string, std::string>
std::string encodetojson(const std::string &str); std::string encodetojson(const std::string &str);
bool is_space_char(unsigned char c);
int skip_space_chars(const std::string &str, std::string::size_type nbegin);
int parse_http_parameters(const std::string & body, std::map<std::string,std::string> &paramters);
#endif #endif
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
#include <mutex> #include <mutex>
#include <future> #include <future>
#include <chrono> #include <chrono>
#include <set> #include <vector>
//#include "eupulogger4system.h" //#include "eupulogger4system.h"
#include "util.h" #include "util.h"
#include "errorcode.h" #include "errorcode.h"
...@@ -30,14 +30,34 @@ ...@@ -30,14 +30,34 @@
//#include "user.h" //#include "user.h"
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include "seeta/FaceLandmarker.h"
#include "seeta/FaceDetector.h"
#include "seeta/Common/Struct.h"
#include "seeta/CTrackingFaceInfo.h"
#include "seeta/FaceTracker.h"
#include "seeta/PoseEstimator.h"
#include "seeta/FaceRecognizer.h"
#include "seeta/EmotionRecognizer.h"
#include "seeta/SeetaEmotionRecognizerConfig.h"
#include "seeta/ActionUnit.h"
#include "seeta/EyeStateDetector.h"
extern CSequence *g_seq;
extern CSequence *g_seq;
using std::string; using std::string;
extern Config *g_config; extern Config *g_config;
extern std::string gmodelpath;
std::mutex g_mutex; std::mutex g_mutex;
...@@ -97,55 +117,253 @@ void create_http_response( httpserver::connection_ptr con, const std::string &bo ...@@ -97,55 +117,253 @@ void create_http_response( httpserver::connection_ptr con, const std::string &bo
//con->append_header("Access-Control-Request-Method","POST,GET"); //con->append_header("Access-Control-Request-Method","POST,GET");
} }
static seeta::FaceDetector* create_face_detector()
{
seeta::ModelSetting fd_model;
fd_model.append(gmodelpath + g_config->models.face_detector);
if (g_config->device == "cpu" )
{
fd_model.set_device( seeta::ModelSetting::CPU );
}else
{
fd_model.set_device( seeta::ModelSetting::GPU );
}
fd_model.set_id(0);
seeta::FaceDetector *m_fd = new seeta::FaceDetector(fd_model);
m_fd->set(seeta::FaceDetector::PROPERTY_MIN_FACE_SIZE, 100);
return m_fd;
}
static seeta::FaceTracker * create_face_tracker(int width, int height)
{
seeta::ModelSetting fd_model;
fd_model.append(gmodelpath + g_config->models.face_detector);
if (g_config->device == "cpu" )
{
fd_model.set_device( seeta::ModelSetting::CPU );
}else
{
fd_model.set_device( seeta::ModelSetting::GPU );
}
fd_model.set_id(0);
seeta::FaceTracker *m_tracker = new seeta::FaceTracker(fd_model, width, height);
m_tracker->SetMinFaceSize(100); //set(seeta::FaceTracker::PROPERTY_MIN_FACE_SIZE, 100);
return m_tracker;
}
static std::string do_query_device( const std::string &body ) static seeta::FaceLandmarker * create_face_landmarker5()
{
seeta::ModelSetting pd_model;
pd_model.append(gmodelpath + g_config->models.face_landmarker5);
if (g_config->device == "cpu" )
{
pd_model.set_device( seeta::ModelSetting::CPU );
}else
{
pd_model.set_device( seeta::ModelSetting::GPU );
}
pd_model.set_id(0);
seeta::FaceLandmarker *m_pd5 = new seeta::FaceLandmarker(pd_model);
return m_pd5;
}
static seeta::FaceLandmarker * create_face_landmarker81()
{
seeta::ModelSetting pd_model;
pd_model.append(gmodelpath + g_config->models.face_landmarker81);
if (g_config->device == "cpu" )
{
pd_model.set_device( seeta::ModelSetting::CPU );
}else
{
pd_model.set_device( seeta::ModelSetting::GPU );
}
pd_model.set_id(0);
seeta::FaceLandmarker *m_pd81 = new seeta::FaceLandmarker(pd_model);
return m_pd81;
}
static seeta::FaceRecognizer * create_face_recognizer()
{
seeta::ModelSetting fr_model;
fr_model.append(gmodelpath + g_config->models.face_recognizer);
if (g_config->device == "cpu" )
{
fr_model.set_device( seeta::ModelSetting::CPU );
}else
{
fr_model.set_device( seeta::ModelSetting::GPU );
}
fr_model.set_id(0);
seeta::FaceRecognizer *m_fr = new seeta::FaceRecognizer(fr_model);
return m_fr;
}
static seeta::PoseEstimator* create_pose_estimator()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.pose_model);
if (g_config->device == "cpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
}
pose_model.set_id(0);
seeta::PoseEstimator *m_pose = new seeta::PoseEstimator(pose_model);
return m_pose;
}
static seeta::ActionUnit* create_actionunit()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.actionunit_model);
if (g_config->device == "cpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
}
pose_model.set_id(0);
seeta::ActionUnit *m_pose = new seeta::ActionUnit(pose_model);
return m_pose;
}
static seeta::EmotionRecognizer* create_emotion_recognizer()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.emotion_model);
if (g_config->device == "cpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
}
pose_model.set_id(0);
seeta::EmotionRecognizer *m_pose = new seeta::EmotionRecognizer(pose_model);
return m_pose;
}
static seeta::EyeStateDetector* create_eye_detection()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.eye_model);
if (g_config->device == "cpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
}
pose_model.set_id(0);
seeta::EyeStateDetector *m_pose = new seeta::EyeStateDetector(pose_model);
return m_pose;
}
static std::string do_query_face_feature( const std::string &body )
{ {
std::string strresponse; std::string strresponse;
try seeta::FaceDetector *fd = NULL;//create_face_detector();
seeta::FaceLandmarker *pd = NULL; //create_face_landmarker();
seeta::FaceRecognizer *fr = NULL; //create_face_recognizer();
try
{ {
/* std::map<std::string, std::string> parameters;
orz::jug devicejug = orz::json2jug( body ); int n = parse_http_parameters(body, parameters);
QueryDeviceJson dev; std::map<std::string, std::string>::iterator iter;
dev.parse( devicejug ); iter = parameters.find("imgStr");
if (iter == parameters.end())
bool bfind = false;
g_mutex.lock();
std::map<std::string, PotInfo *>::iterator iter = g_pots.find( dev.potuid );
if( iter != g_pots.end() )
{ {
bfind = true;
strresponse = "{\"code\":0,\"msg\":\"ok\",\"servers\":";
strresponse += "{\"secretkey\":\"" + iter->second->secretkey + "\",\"ip\":";
strresponse += "\"" + iter->second->ip + "\",\"port\":" ;
strresponse += std::to_string( iter->second->port ) + ",\"desc\":\"";
strresponse += iter->second->desc + "\"}}";
}
g_mutex.unlock();
if( !bfind ) LOG( _ERROR_, "do not find the parameter imgStr" );
{ strresponse = "{\"code\":4,\"msg\":\"" + GetError( 4 ) + "\"}";
PotInfo info; return strresponse;
std::string sql = "select deviceid,appid,ip,port,state,type,secretkey,description,create_date,";
sql += "update_date ,potuid,os from " + g_config->db.db + ".pots where potuid='" + dev.potuid + "'";
int nret = doQueryMySql_pots( sql, info );
if( nret > 0 )
{
strresponse = "{\"code\":0,\"msg\":\"ok\",\"servers\":";
strresponse += "{\"secretkey\":\"" + info.secretkey + "\",\"ip\":";
strresponse += "\"" + info.ip + "\",\"port\":" ;
strresponse += std::to_string( info.port ) + ",\"desc\":\"";
strresponse += info.desc + "\"}}";
}
else
{
LOG( _ERROR_, "access db failed:%s", GETNULLSTR( sql ) );
strresponse = "{\"code\":8,\"msg\":\"" + GetError( 8 ) + "\"}";
}
} }
*/
//create_http_response( con, strresponse, 200 ); std::string strbase64 = base64_decode( iter->second );
std::vector<unsigned char> imagedatas( strbase64.begin(), strbase64.end() );
cv::Mat mat = cv::imdecode( imagedatas, 1 ); //COLOR_LOAD_IMAGE_COLOR);
std::cout << "------cv::imdecode---begin" << std::endl;
if( !mat.data )
{
std::cout << "------cv::imdecode---failed" << std::endl;
strresponse = "{\"code\":7,\"msg\":\"" + GetError( 7) + "\"}";
return strresponse;
}
std::cout << "------cv::imdecode---end" << std::endl;
SeetaImageData img;
img.width = mat.cols;
img.height = mat.rows;
img.channels = mat.channels();
img.data = mat.data;
//seeta::FaceDetector *fd = NULL;//create_face_detector();
//seeta::FaceLandmarker *pd = NULL; //create_face_landmarker();
//seeta::FaceRecognizer *fr = NULL; //create_face_recognizer();
do
{
fd = create_face_detector();
if (!fd ) {
strresponse = "{\"code\":2,\"msg\":\"" + GetError( 2) + "\"}";
break;
}
pd = create_face_landmarker5();
if (!pd ) {
strresponse = "{\"code\":2,\"msg\":\"" + GetError( 2) + "\"}";
break;
}
fr = create_face_recognizer();
if (!fr ) {
strresponse = "{\"code\":2,\"msg\":\"" + GetError( 2) + "\"}";
break;
}
auto faces = fd->detect(img);
if (faces.size <= 0) {
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1) + "\"}";
break;
}
int index = 0;
int maxarea = faces.data[0].pos.width * faces.data[0].pos.height;
for (int i=1; i<faces.size; i++)
{
if(maxarea < faces.data[i].pos.width * faces.data[i].pos.height)
{
index = i;
maxarea = faces.data[i].pos.width * faces.data[i].pos.height;
}
}
SeetaPointF points[5];
pd->mark(img, faces.data[index].pos, points);
std::vector<float> features(fr->GetExtractFeatureSize());
if(!fr->Extract(img, points, features.data()))
{
strresponse = "{\"code\":3,\"msg\":\"" + GetError( 3) + "\"}";
break;
}
std::string strtmp((const char *)features.data(), int(features.size() * sizeof(float)));
std::string enbase64 = base64_encode(strtmp);
strresponse = "{\"code\":0,\"msg\":\"" + GetError(0) + "\",\"data\":\"";
strresponse += enbase64 + "\"}";
}while(0);
} }
catch( std::exception &e ) catch( std::exception &e )
...@@ -154,7 +372,25 @@ static std::string do_query_device( const std::string &body ) ...@@ -154,7 +372,25 @@ static std::string do_query_device( const std::string &body )
//std::cout << "parse message failed:" << e.what() << std::endl; //std::cout << "parse message failed:" << e.what() << std::endl;
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1 ) + "\"}"; strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1 ) + "\"}";
} }
if(!fd)
{
delete fd;
fd = NULL;
}
if(!pd)
{
delete pd;
pd = NULL;
}
if(!fr)
{
delete fr;
fr = NULL;
}
return strresponse; return strresponse;
} }
...@@ -191,11 +427,20 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl ) ...@@ -191,11 +427,20 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
if( strMethod == "POST" ) if( strMethod == "POST" )
{ {
//std::cout << "post:body:" << strBody << std::endl; //std::cout << "post:body:" << strBody << std::endl;
string strContentType = rt.get_header("Content-Type");
std::cout << "----ContentType:" << strContentType << std::endl;
if( strUri == "/query/device" ) if (strContentType != "application/x-www-form-urlencoded" )
{ {
strresponse = "the request is not implement";
LOG( _WARN_, "no support get method:%s", GETNULLSTR( strUri ) );
create_http_response( con, strresponse, 404 );
return;
}
strresponse = do_query_device( strBody ); if( strUri == "/query/feature" )
{
strresponse = do_query_face_feature( strBody );
create_http_response( con, strresponse, 200 ); create_http_response( con, strresponse, 200 );
} }
else else
......
...@@ -3,45 +3,16 @@ ...@@ -3,45 +3,16 @@
static std::vector<ErrorCode> errors = static std::vector<ErrorCode> errors =
{ {
{0, "ok"}, {0, "ok"},
{1, "json format is incorrect"}, {1, "do not find face"},
{2, "signature is incorrect" }, {2, "create detector engine failed" },
{3, "authorization parameter is incorrect"}, {3, "face recognize failed "},
{4, "parameters are incorrect"}, {4, "parameters are incorrect"},
{5, "request is not support yet"}, {5, "request is not support yet"},
{6, "database is not avilable"}, {6, "database is not avilable"},
{7, "request format is incorrect"}, {7, "image data parse failed"},
{8, "this pot is not existed"}, {8, "this pot is not existed"},
{9, "this pot is offline"}, {9, "this pot is offline"},
{10, "this pot is timeout"},
{11, "this transactionid is invalid"},
{12, "this template's child template is not empty"},
{13, "this template is being used by pots"},
{14, "this parameter version is not found"},
{15, "this table is not existed"},
{16, "this data version is too old"},
{17, "this table already exists"},
{18, "this template already exists"},
{19, "this pot's parameter is empty"},
{20, "this template is not existed"},
{21, "user do not register"},
{22, "user do not login"},
{23, "do not find token parameter"},
{24, "the image size is more than 64k"},
{25, "the username already exists"},
{26, "the insert data do not match the table columns"},
{27, "this template type already exists"},
}; };
......
...@@ -32,7 +32,7 @@ Config *g_config = NULL; ...@@ -32,7 +32,7 @@ Config *g_config = NULL;
CSequence *g_seq = NULL; CSequence *g_seq = NULL;
//extern DataTableUpdate g_datatableupdate; //extern DataTableUpdate g_datatableupdate;
std::string gmodelpath;
using websocketpp::lib::placeholders::_1; using websocketpp::lib::placeholders::_1;
using websocketpp::lib::placeholders::_2; using websocketpp::lib::placeholders::_2;
...@@ -172,6 +172,7 @@ int main( int argc, char *argv[] ) ...@@ -172,6 +172,7 @@ int main( int argc, char *argv[] )
return 0; return 0;
} }
gmodelpath = exepath + "models/";
g_config = parse_config( exepath + "config.json" ); g_config = parse_config( exepath + "config.json" );
if( !g_config ) if( !g_config )
{ {
...@@ -179,6 +180,7 @@ int main( int argc, char *argv[] ) ...@@ -179,6 +180,7 @@ int main( int argc, char *argv[] )
return -1; return -1;
} }
std::cout << "device:" << g_config->device << std::endl;
std::cout << "face_detector:" << g_config->models.face_detector << std::endl; std::cout << "face_detector:" << g_config->models.face_detector << std::endl;
std::cout << "face_landmarker81:" << g_config->models.face_landmarker81 << std::endl; std::cout << "face_landmarker81:" << g_config->models.face_landmarker81 << std::endl;
std::cout << "face_landmarker5:" << g_config->models.face_landmarker5 << std::endl; std::cout << "face_landmarker5:" << g_config->models.face_landmarker5 << std::endl;
......
...@@ -483,4 +483,73 @@ std::string encodetojson(const std::string &str) ...@@ -483,4 +483,73 @@ std::string encodetojson(const std::string &str)
} }
return oss.str(); return oss.str();
} }
bool is_space_char(unsigned char c)
{
return (c == 9 || c == 32);
}
int skip_space_chars(const std::string &str, std::string::size_type nbegin)
{
while(is_space_char(str[nbegin]))
{
if(nbegin >= str.length() - 1)
return nbegin;
nbegin++;
}
return nbegin;
}
static int parse_paramter(const std::string &str, std::string &name, std::string &value)
{
std::string::size_type nfind = str.find("=");
if(nfind != std::string::npos)
{
name = str.substr(0, nfind);
value = str.substr(nfind + 1);
name = decodeuricomponent(name);//urldecode(name);
value = decodeuricomponent(value);//urldecode(value);
return 0;
}
return -1;
}
int parse_http_parameters(const std::string & body, std::map<std::string,std::string> &paramters)
{
//LOG(_INFO_,"recv post req:%s ",GETNULLSTR(command));
std::string::size_type nbegin,nend, nfind;
nbegin = nend = 0;
nbegin = skip_space_chars(body, 0);
std::string name,value;
std::string strtmp;
//std::map<std::string, std::string> paramters;
while(1)
{
name = value = "";
nfind = body.find("&", nbegin);
if(nfind != std::string::npos)
{
strtmp = body.substr(nbegin, nfind - nbegin);
if(parse_paramter(strtmp, name, value) >= 0)
{
paramters[name] = value;
}
nbegin = nfind + 1;
continue;
}else if(nbegin < body.length())
{
strtmp = body.substr(nbegin);
if(parse_paramter(strtmp, name, value) >= 0)
{
paramters[name] = value;
}
break;
}
}
return paramters.size();
}
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!