Commit 0e485ae2 by wangquyuan

add by wqy

1 parent 262f2763
......@@ -101,7 +101,7 @@ typedef websocketpp::server<bone_config> httpserver;
int init_engine();
void on_http( httpserver *s, websocketpp::connection_hdl hdl );
......
......@@ -5,6 +5,9 @@
#include <string>
#include <vector>
std::string floattostring(float value);
std::string get_uuid(const std::string &prefix);
std::string create_transactionid(const std::string &prefix);
std::string get_format_date();
......
......@@ -59,7 +59,7 @@ using std::string;
extern Config *g_config;
extern std::string gmodelpath;
std::mutex g_mutex;
//std::mutex g_mutex;
/*
std::map<std::string, PotInfo *> g_pots;
......@@ -84,6 +84,27 @@ using websocketpp::lib::placeholders::_1;
using websocketpp::lib::placeholders::_2;
using websocketpp::lib::bind;
seeta::FaceTracker *g_track = NULL;//create_face_detector();
seeta::FaceDetector *g_fd = NULL;//create_face_detector();
seeta::FaceLandmarker *g_pd5 = NULL; //create_face_landmarker();
seeta::FaceLandmarker *g_pd81 = NULL; //create_face_landmarker();
seeta::FaceRecognizer *g_fr = NULL; //create_face_recognizer();
seeta::PoseEstimator *g_pose = NULL;
seeta::ActionUnit *g_action = NULL;
seeta::EmotionRecognizer *g_emotion = NULL;
seeta::EyeStateDetector *g_eye = NULL;
/////////////////////////////////
std::mutex g_fd_lock;
std::mutex g_track_lock;
std::mutex g_pd5_lock;
std::mutex g_pd81_lock;
std::mutex g_fr_lock;
std::mutex g_pose_lock;
std::mutex g_action_lock;
std::mutex g_emotion_lock;
std::mutex g_eye_lock;
// pull out the type of messages sent by our config
//typedef server::message_ptr message_ptr;
......@@ -121,12 +142,12 @@ static seeta::FaceDetector* create_face_detector()
{
seeta::ModelSetting fd_model;
fd_model.append(gmodelpath + g_config->models.face_detector);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
fd_model.set_device( seeta::ModelSetting::CPU );
fd_model.set_device( seeta::ModelSetting::GPU );
}else
{
fd_model.set_device( seeta::ModelSetting::GPU );
fd_model.set_device( seeta::ModelSetting::CPU );
}
fd_model.set_id(0);
seeta::FaceDetector *m_fd = new seeta::FaceDetector(fd_model);
......@@ -138,12 +159,12 @@ static seeta::FaceTracker * create_face_tracker(int width, int height)
{
seeta::ModelSetting fd_model;
fd_model.append(gmodelpath + g_config->models.face_detector);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
fd_model.set_device( seeta::ModelSetting::CPU );
fd_model.set_device( seeta::ModelSetting::GPU );
}else
{
fd_model.set_device( seeta::ModelSetting::GPU );
fd_model.set_device( seeta::ModelSetting::CPU );
}
fd_model.set_id(0);
seeta::FaceTracker *m_tracker = new seeta::FaceTracker(fd_model, width, height);
......@@ -155,12 +176,12 @@ static seeta::FaceLandmarker * create_face_landmarker5()
{
seeta::ModelSetting pd_model;
pd_model.append(gmodelpath + g_config->models.face_landmarker5);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
pd_model.set_device( seeta::ModelSetting::CPU );
pd_model.set_device( seeta::ModelSetting::GPU );
}else
{
pd_model.set_device( seeta::ModelSetting::GPU );
pd_model.set_device( seeta::ModelSetting::CPU );
}
pd_model.set_id(0);
seeta::FaceLandmarker *m_pd5 = new seeta::FaceLandmarker(pd_model);
......@@ -171,12 +192,12 @@ static seeta::FaceLandmarker * create_face_landmarker81()
{
seeta::ModelSetting pd_model;
pd_model.append(gmodelpath + g_config->models.face_landmarker81);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
pd_model.set_device( seeta::ModelSetting::CPU );
pd_model.set_device( seeta::ModelSetting::GPU );
}else
{
pd_model.set_device( seeta::ModelSetting::GPU );
pd_model.set_device( seeta::ModelSetting::CPU );
}
pd_model.set_id(0);
seeta::FaceLandmarker *m_pd81 = new seeta::FaceLandmarker(pd_model);
......@@ -188,12 +209,12 @@ static seeta::FaceRecognizer * create_face_recognizer()
{
seeta::ModelSetting fr_model;
fr_model.append(gmodelpath + g_config->models.face_recognizer);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
fr_model.set_device( seeta::ModelSetting::CPU );
fr_model.set_device( seeta::ModelSetting::GPU );
}else
{
fr_model.set_device( seeta::ModelSetting::GPU );
fr_model.set_device( seeta::ModelSetting::CPU );
}
fr_model.set_id(0);
seeta::FaceRecognizer *m_fr = new seeta::FaceRecognizer(fr_model);
......@@ -204,12 +225,12 @@ static seeta::PoseEstimator* create_pose_estimator()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.pose_model);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
pose_model.set_device( seeta::ModelSetting::GPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
pose_model.set_device( seeta::ModelSetting::CPU );
}
pose_model.set_id(0);
seeta::PoseEstimator *m_pose = new seeta::PoseEstimator(pose_model);
......@@ -221,12 +242,12 @@ static seeta::ActionUnit* create_actionunit()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.actionunit_model);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
pose_model.set_device( seeta::ModelSetting::GPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
pose_model.set_device( seeta::ModelSetting::CPU );
}
pose_model.set_id(0);
seeta::ActionUnit *m_pose = new seeta::ActionUnit(pose_model);
......@@ -239,12 +260,12 @@ static seeta::EmotionRecognizer* create_emotion_recognizer()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.emotion_model);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
pose_model.set_device( seeta::ModelSetting::GPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
pose_model.set_device( seeta::ModelSetting::CPU );
}
pose_model.set_id(0);
seeta::EmotionRecognizer *m_pose = new seeta::EmotionRecognizer(pose_model);
......@@ -255,26 +276,219 @@ static seeta::EyeStateDetector* create_eye_detection()
{
seeta::ModelSetting pose_model;
pose_model.append(gmodelpath + g_config->models.eye_model);
if (g_config->device == "cpu" )
if (g_config->device == "gpu" )
{
pose_model.set_device( seeta::ModelSetting::CPU );
pose_model.set_device( seeta::ModelSetting::GPU );
}else
{
pose_model.set_device( seeta::ModelSetting::GPU );
pose_model.set_device( seeta::ModelSetting::CPU );
}
pose_model.set_id(0);
seeta::EyeStateDetector *m_pose = new seeta::EyeStateDetector(pose_model);
return m_pose;
}
int init_engine()
{
try
{
g_fd = create_face_detector();
if (!g_fd )
{
return -1;
}
g_track = create_face_tracker(400,400);
if (!g_track )
{
return -1;
}
g_pd5 = create_face_landmarker5();
if (!g_pd5 )
{
return -1;
}
g_pd81 = create_face_landmarker81();
if (!g_pd81 )
{
return -1;
}
g_fr = create_face_recognizer();
if (!g_fr )
{
return -1;
}
g_pose = create_pose_estimator();
if (!g_pose )
{
return -1;
}
g_action = create_actionunit();
if (!g_action )
{
return -1;
}
g_emotion = create_emotion_recognizer();
if (!g_emotion )
{
return -1;
}
g_eye = create_eye_detection();
if (!g_eye )
{
return -1;
}
}catch(std::exception &e)
{
LOG(_ERROR_, "init engine failed:%s",e.what());
return -1;
}
return 0;
}
static SeetaFaceInfoArray face_detector(const SeetaImageData &image)
{
SeetaFaceInfoArray faces;
std::lock_guard<std::mutex> guard( g_fd_lock );
try
{
faces = g_fd->detect(image);
}catch(std::exception &e)
{
LOG(_ERROR_,"face detector exception:%s", e.what());
}
return faces;
}
static SeetaTrackingFaceInfoArray face_tracker(const SeetaImageData &image)
{
SeetaTrackingFaceInfoArray faces;
std::lock_guard<std::mutex> guard( g_track_lock );
try
{
faces = g_track->Track(image);
}catch(std::exception &e)
{
LOG(_ERROR_,"face tracker exception:%s", e.what());
}
return faces;
}
static void face_landmarker5(const SeetaImageData &image, const SeetaRect &face, SeetaPointF *points)
{
std::lock_guard<std::mutex> guard( g_pd5_lock );
try
{
g_pd5->mark(image, face, points);
}catch(std::exception &e)
{
LOG(_ERROR_,"face landmarker 5 exception:%s", e.what());
}
return;
}
static void face_landmarker81(const SeetaImageData &image, const SeetaRect &face, SeetaPointF *points)
{
//std::vector<float> points;
std::lock_guard<std::mutex> guard( g_pd81_lock );
try
{
g_pd81->mark(image, face,points);
}catch(std::exception &e)
{
LOG(_ERROR_,"face landmarker 81 exception:%s", e.what());
}
return;
}
static std::vector<float> face_recognizer(const SeetaImageData &image, const SeetaPointF *points)
{
std::vector<float> features;
std::lock_guard<std::mutex> guard( g_fr_lock );
try
{
features.resize(g_fr->GetExtractFeatureSize());
if(!g_fr->Extract(image, points, features.data()))
{
LOG(_WARN_,"face recognizer failed");
}
}catch(std::exception &e)
{
LOG(_ERROR_,"face recognizer exception:%s", e.what());
}
return features;
}
static void pose_estimate(const SeetaImageData &image, const SeetaRect face, float *yaw, float *pitch, float *roll)
{
std::lock_guard<std::mutex> guard( g_pose_lock );
try
{
g_pose->Estimate(image, face, yaw,pitch,roll);
}catch(std::exception &e)
{
LOG(_ERROR_,"pose estimate exception:%s", e.what());
}
return;
}
static void eye_detect(const SeetaImageData &image, const SeetaPointF *points, seeta::EyeStateDetector::EYE_STATE &leftstate, seeta::EyeStateDetector::EYE_STATE &rightstate)
{
std::lock_guard<std::mutex> guard( g_eye_lock );
try
{
g_eye->Detect(image, points, leftstate, rightstate);
}catch(std::exception &e)
{
LOG(_ERROR_,"pose eye detector exception:%s", e.what());
}
}
static std::vector<float> face_action(const SeetaImageData &image, const SeetaPointF *points)
{
std::vector<float> features;
std::lock_guard<std::mutex> guard( g_action_lock );
try
{
features.resize(g_action->GetExtractFeatureSize());
std::cout << "face_action: GetExtractFeatureSize():" << g_action->GetExtractFeatureSize() << std::endl;
g_action->Extract(image, points, features.data());
}catch(std::exception &e)
{
LOG(_ERROR_,"face action detector exception:%s", e.what());
}
return features;
}
static std::vector<float> face_emotion(const SeetaImageData &image, const SeetaPointF *points)
{
std::vector<float> features;
std::lock_guard<std::mutex> guard( g_emotion_lock );
try
{
features.resize(g_emotion->emotion_count());
std::cout << "face_emotion: emotion_count():" << g_emotion->emotion_count() << std::endl;
g_emotion->recognize_emotion(image, points, features.data());
}catch(std::exception &e)
{
LOG(_ERROR_,"face emotion detector exception:%s", e.what());
}
return features;
}
static std::string do_query_face_feature( const std::string &body )
{
std::string strresponse;
seeta::FaceDetector *fd = NULL;//create_face_detector();
seeta::FaceLandmarker *pd = NULL; //create_face_landmarker();
seeta::FaceRecognizer *fr = NULL; //create_face_recognizer();
try
{
std::map<std::string, std::string> parameters;
......@@ -309,29 +523,87 @@ static std::string do_query_face_feature( const std::string &body )
img.channels = mat.channels();
img.data = mat.data;
//seeta::FaceDetector *fd = NULL;//create_face_detector();
//seeta::FaceLandmarker *pd = NULL; //create_face_landmarker();
//seeta::FaceRecognizer *fr = NULL; //create_face_recognizer();
do
{
auto faces = face_detector(img);//fd->detect(img);
if (faces.size <= 0) {
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1) + "\"}";
break;
}
int index = 0;
int maxarea = faces.data[0].pos.width * faces.data[0].pos.height;
for (int i=1; i<faces.size; i++)
{
if(maxarea < faces.data[i].pos.width * faces.data[i].pos.height)
{
index = i;
maxarea = faces.data[i].pos.width * faces.data[i].pos.height;
}
}
SeetaPointF points[5];
face_landmarker5(img, faces.data[index].pos, points);
std::vector<float> features = face_recognizer(img, points);
std::string strtmp((const char *)features.data(), int(features.size() * sizeof(float)));
std::string enbase64 = base64_encode(strtmp);
strresponse = "{\"code\":0,\"msg\":\"" + GetError(0) + "\",\"data\":\"";
strresponse += enbase64 + "\"}";
}while(0);
fd = create_face_detector();
if (!fd ) {
strresponse = "{\"code\":2,\"msg\":\"" + GetError( 2) + "\"}";
break;
}
pd = create_face_landmarker5();
if (!pd ) {
strresponse = "{\"code\":2,\"msg\":\"" + GetError( 2) + "\"}";
break;
catch( std::exception &e )
{
LOG( _ERROR_, "parse message failed:%s", GETNULLPTR( e.what() ) );
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1 ) + "\"}";
}
fr = create_face_recognizer();
if (!fr ) {
strresponse = "{\"code\":2,\"msg\":\"" + GetError( 2) + "\"}";
break;
return strresponse;
}
static std::string do_query_face_action( const std::string &body )
{
std::string strresponse;
try
{
std::map<std::string, std::string> parameters;
int n = parse_http_parameters(body, parameters);
std::map<std::string, std::string>::iterator iter;
iter = parameters.find("imgStr");
if (iter == parameters.end())
{
LOG( _ERROR_, "do not find the parameter imgStr" );
strresponse = "{\"code\":4,\"msg\":\"" + GetError( 4 ) + "\"}";
return strresponse;
}
std::string strbase64 = base64_decode( iter->second );
std::vector<unsigned char> imagedatas( strbase64.begin(), strbase64.end() );
cv::Mat mat = cv::imdecode( imagedatas, 1 ); //COLOR_LOAD_IMAGE_COLOR);
std::cout << "------cv::imdecode---begin" << std::endl;
if( !mat.data )
{
std::cout << "------cv::imdecode---failed" << std::endl;
strresponse = "{\"code\":7,\"msg\":\"" + GetError( 7) + "\"}";
return strresponse;
}
auto faces = fd->detect(img);
std::cout << "------cv::imdecode---end" << std::endl;
SeetaImageData img;
img.width = mat.cols;
img.height = mat.rows;
img.channels = mat.channels();
img.data = mat.data;
do
{
auto faces = face_detector(img);//fd->detect(img);
if (faces.size <= 0) {
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1) + "\"}";
break;
......@@ -349,48 +621,116 @@ static std::string do_query_face_feature( const std::string &body )
}
SeetaPointF points[5];
pd->mark(img, faces.data[index].pos, points);
face_landmarker5(img, faces.data[index].pos, points);
std::vector<float> features = face_action(img, points);
//std::string strtmp((const char *)features.data(), int(features.size() * sizeof(float)));
//std::string enbase64 = base64_encode(strtmp);
std::vector<float> features(fr->GetExtractFeatureSize());
if(!fr->Extract(img, points, features.data()))
strresponse = "{\"code\":0,\"msg\":\"" + GetError(0) + "\",\"data\":[";
for(int i=0; i<features.size(); i++)
{
strresponse = "{\"code\":3,\"msg\":\"" + GetError( 3) + "\"}";
break;
if( i== 11) {
continue;
}
std::string strtmp((const char *)features.data(), int(features.size() * sizeof(float)));
std::string enbase64 = base64_encode(strtmp);
strresponse = "{\"code\":0,\"msg\":\"" + GetError(0) + "\",\"data\":\"";
strresponse += enbase64 + "\"}";
if( i > 0)
{
strresponse += ",";
}
strresponse += floattostring(features[i]);
}
strresponse += "]}";
}while(0);
}
catch( std::exception &e )
{
LOG( _ERROR_, "parse message failed:%s", GETNULLPTR( e.what() ) );
//std::cout << "parse message failed:" << e.what() << std::endl;
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1 ) + "\"}";
}
return strresponse;
}
static std::string do_query_face_emotion( const std::string &body )
{
std::string strresponse;
if(!fd)
try
{
delete fd;
fd = NULL;
std::map<std::string, std::string> parameters;
int n = parse_http_parameters(body, parameters);
std::map<std::string, std::string>::iterator iter;
iter = parameters.find("imgStr");
if (iter == parameters.end())
{
LOG( _ERROR_, "do not find the parameter imgStr" );
strresponse = "{\"code\":4,\"msg\":\"" + GetError( 4 ) + "\"}";
return strresponse;
}
if(!pd)
std::string strbase64 = base64_decode( iter->second );
std::vector<unsigned char> imagedatas( strbase64.begin(), strbase64.end() );
cv::Mat mat = cv::imdecode( imagedatas, 1 ); //COLOR_LOAD_IMAGE_COLOR);
std::cout << "------cv::imdecode---begin" << std::endl;
if( !mat.data )
{
delete pd;
pd = NULL;
std::cout << "------cv::imdecode---failed" << std::endl;
strresponse = "{\"code\":7,\"msg\":\"" + GetError( 7) + "\"}";
return strresponse;
}
if(!fr)
std::cout << "------cv::imdecode---end" << std::endl;
SeetaImageData img;
img.width = mat.cols;
img.height = mat.rows;
img.channels = mat.channels();
img.data = mat.data;
do
{
delete fr;
fr = NULL;
auto faces = face_detector(img);//fd->detect(img);
if (faces.size <= 0) {
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1) + "\"}";
break;
}
int index = 0;
int maxarea = faces.data[0].pos.width * faces.data[0].pos.height;
for (int i=1; i<faces.size; i++)
{
if(maxarea < faces.data[i].pos.width * faces.data[i].pos.height)
{
index = i;
maxarea = faces.data[i].pos.width * faces.data[i].pos.height;
}
}
SeetaPointF points[5];
face_landmarker5(img, faces.data[index].pos, points);
std::vector<float> features = face_emotion(img, points);
strresponse = "{\"code\":0,\"msg\":\"" + GetError(0) + "\",\"data\":[";
for(int i=0; i<features.size(); i++)
{
if( i > 0)
{
strresponse += ",";
}
strresponse += floattostring(features[i]);
}
strresponse += "]}";
}while(0);
}
catch( std::exception &e )
{
LOG( _ERROR_, "parse message failed:%s", GETNULLPTR( e.what() ) );
strresponse = "{\"code\":1,\"msg\":\"" + GetError( 1 ) + "\"}";
}
return strresponse;
}
......@@ -443,6 +783,16 @@ void on_http( httpserver *s, websocketpp::connection_hdl hdl )
strresponse = do_query_face_feature( strBody );
create_http_response( con, strresponse, 200 );
}
else if( strUri == "/query/action" )
{
strresponse = do_query_face_action( strBody );
create_http_response( con, strresponse, 200 );
}
else if( strUri == "/query/emotion" )
{
strresponse = do_query_face_emotion( strBody );
create_http_response( con, strresponse, 200 );
}
else
{
......
......@@ -223,6 +223,13 @@ int main( int argc, char *argv[] )
CSimpleLog::instance()->setDebugMode( true );
if (init_engine() < 0 )
{
std::cout << "init engine failed, system exited!" << std::endl;
LOG(_ERROR_, "init engine failed, system exited");
return -1;
}
/*
CMysqlConnectPool *pmysqlclient = CMysqlConnectPool::GetInstance();
if( pmysqlclient == nullptr )
......
......@@ -16,6 +16,16 @@
#include <sstream>
#include <string.h>
std::string floattostring(float value)
{
char buf[100];
memset(buf, 0, sizeof(buf));
snprintf(buf, sizeof(buf) - 1, "%0.2f", value);
return std::string(buf);
}
std::string get_uuid( const std::string &prefix )
{
std::string str = "";
......@@ -44,26 +54,6 @@ std::string get_uuid( const std::string &prefix )
return str;
}
std::string create_transactionid( const std::string &prefix )
{
std::string id = get_uuid( "" );
unsigned char MD5result[16];
char res[100];
memset( res, 0, sizeof( res ) );
MD5( ( unsigned char * ) id.c_str(), id.length(), MD5result );
for( int i = 0; i < 16; i++ )
{
sprintf( res + i * 2, "%02x", MD5result[i] );
}
std::string str( res, 32 );
str = prefix + str;
return str;
}
std::string get_format_date()
{
std::time_t t = std::time( nullptr );
......
No preview for this file type
./test -t emotion -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg
./test -t action -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg
./test -t feature -h http://192.168.1.33:4000 -f /wqy/Downloads/test2.jpeg
ffmpeg -t 20 -f v4l2 -i /dev/video0 -r 20 -f mp4 cap1.mp4
ffplay ./cap1.mp4
No preview for this file type
package main
import (
"fmt"
"io/ioutil"
"net/http"
//"net/url"
"strings"
//"sync"
"encoding/json"
//"strconv"
//"time"
//"math/rand"
//"crypto/md5"
//"bytes"
//"os"
"encoding/base64"
"flag"
)
type Response struct {
Code int `json:"code"`
Msg string `json:"msg"`
Data string `json:"data"`
}
type Response2 struct {
Code int `json:"code"`
Msg string `json:"msg"`
Data []float32 `json:"data"`
}
func get_feature(filename, url string) bool {
//data := `{"check_id":"sJjcUq2CORH8tTjSNJ14","check_code":"97369"}`
//str := sign(data)
data1, err := ioutil.ReadFile(filename)
if err != nil {
fmt.Println("read file failed: ", err.Error())
return false
}
strdata := base64.StdEncoding.EncodeToString(data1)
data := "imgStr=" + strdata
//seq := "http://192.168.1.33:4000/query/feature"
seq := url + "/query/feature"
//client := &http.Client{}
//request, err := http.NewRequest("POST", seq, strings.NewReader(data))
resp, err := http.Post(seq, "application/x-www-form-urlencoded",
strings.NewReader(data))
if err != nil {
fmt.Println("send request error: " , err)
return false
}
//request.Header.Add("Authorization","AIPSS1.0 " + str)
//request.Header.Add("Content-Type","application/json; charset=utf-8")
//resp, err := client.Do(request)
//if err != nil {
// fmt.Println("do_register error: " , err)
// return false
//}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("LoginSeetaAuthCenter ReadAll failed: ", err)
return false
}
fmt.Println(string(body))
var res Response
err = json.Unmarshal(body, &res)
if err != nil {
fmt.Println("Unmarshal error:", err)
return false
}
if res.Code == 0 {
fmt.Println("face detector ok" )
//bits := base64.StdEncoding.DecodeToString(res.Data)
}else {
fmt.Println("face detector falied" )
}
return true
}
func get_action(filename, url string) bool {
//data := `{"check_id":"sJjcUq2CORH8tTjSNJ14","check_code":"97369"}`
//str := sign(data)
data1, err := ioutil.ReadFile(filename)
if err != nil {
fmt.Println("read file failed: ", err.Error())
return false
}
strdata := base64.StdEncoding.EncodeToString(data1)
data := "imgStr=" + strdata
//seq := "http://192.168.1.33:4000/query/feature"
seq := url + "/query/action"
//client := &http.Client{}
//request, err := http.NewRequest("POST", seq, strings.NewReader(data))
resp, err := http.Post(seq, "application/x-www-form-urlencoded",
strings.NewReader(data))
if err != nil {
fmt.Println("send request error: " , err)
return false
}
//request.Header.Add("Authorization","AIPSS1.0 " + str)
//request.Header.Add("Content-Type","application/json; charset=utf-8")
//resp, err := client.Do(request)
//if err != nil {
// fmt.Println("do_register error: " , err)
// return false
//}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("LoginSeetaAuthCenter ReadAll failed: ", err)
return false
}
fmt.Println(string(body))
var res Response2
err = json.Unmarshal(body, &res)
if err != nil {
fmt.Println("Unmarshal error:", err)
return false
}
if res.Code == 0 {
fmt.Println("face detector ok" )
//bits := base64.StdEncoding.DecodeToString(res.Data)
fmt.Println(res)
}else {
fmt.Println("face detector falied" )
}
return true
}
func get_emotion(filename, url string) bool {
//data := `{"check_id":"sJjcUq2CORH8tTjSNJ14","check_code":"97369"}`
//str := sign(data)
data1, err := ioutil.ReadFile(filename)
if err != nil {
fmt.Println("read file failed: ", err.Error())
return false
}
strdata := base64.StdEncoding.EncodeToString(data1)
data := "imgStr=" + strdata
//seq := "http://192.168.1.33:4000/query/feature"
seq := url + "/query/emotion"
//client := &http.Client{}
//request, err := http.NewRequest("POST", seq, strings.NewReader(data))
resp, err := http.Post(seq, "application/x-www-form-urlencoded",
strings.NewReader(data))
if err != nil {
fmt.Println("send request error: " , err)
return false
}
//request.Header.Add("Authorization","AIPSS1.0 " + str)
//request.Header.Add("Content-Type","application/json; charset=utf-8")
//resp, err := client.Do(request)
//if err != nil {
// fmt.Println("do_register error: " , err)
// return false
//}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println("LoginSeetaAuthCenter ReadAll failed: ", err)
return false
}
fmt.Println(string(body))
var res Response2
err = json.Unmarshal(body, &res)
if err != nil {
fmt.Println("Unmarshal error:", err)
return false
}
if res.Code == 0 {
fmt.Println("face detector ok" )
//bits := base64.StdEncoding.DecodeToString(res.Data)
fmt.Println(res)
}else {
fmt.Println("face detector falied" )
}
return true
}
var gname string
var gfilename string
var gurl string
func init() {
flag.StringVar(&gname, "t", "", "feature,action,emotion,video")
flag.StringVar(&gfilename, "f", "", "image file")
flag.StringVar(&gurl, "h", "http://127.0.0.1:60000", "http url")
}
func main() {
flag.Parse()
fmt.Println("name:" + gname)
fmt.Println("filename:" + gfilename)
fmt.Println("url:" + gurl)
if gname == "feature" {
get_feature(gfilename, gurl)
}else if gname == "action" {
get_action(gfilename, gurl)
}else if gname == "emotion" {
get_emotion(gfilename, gurl)
}else {
fmt.Println("not support type:",gname)
}
//test()
//login()
//go worker_thread()
//query_orders()
//query_inquirys()
}
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!