Merge branch 'feature/logging' into develop
This commit is contained in:
commit
96f9f5d06f
7 changed files with 839 additions and 836 deletions
|
@ -224,7 +224,7 @@ void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF
|
||||||
featuresFile << "au intensities: " << au_intensities.size() << endl;
|
featuresFile << "au intensities: " << au_intensities.size() << endl;
|
||||||
featuresFile << "{" << endl;
|
featuresFile << "{" << endl;
|
||||||
|
|
||||||
for (int i = 0; i < au_intensities.size(); ++i)
|
for (size_t i = 0; i < au_intensities.size(); ++i)
|
||||||
{
|
{
|
||||||
// Use matlab format, so + 1
|
// Use matlab format, so + 1
|
||||||
featuresFile << au_intensities[i].first << " " << au_intensities[i].second << endl;
|
featuresFile << au_intensities[i].first << " " << au_intensities[i].second << endl;
|
||||||
|
@ -236,7 +236,7 @@ void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF
|
||||||
featuresFile << "au occurences: " << au_occurences.size() << endl;
|
featuresFile << "au occurences: " << au_occurences.size() << endl;
|
||||||
featuresFile << "{" << endl;
|
featuresFile << "{" << endl;
|
||||||
|
|
||||||
for (int i = 0; i < au_occurences.size(); ++i)
|
for (size_t i = 0; i < au_occurences.size(); ++i)
|
||||||
{
|
{
|
||||||
// Use matlab format, so + 1
|
// Use matlab format, so + 1
|
||||||
featuresFile << au_occurences[i].first << " " << au_occurences[i].second << endl;
|
featuresFile << au_occurences[i].first << " " << au_occurences[i].second << endl;
|
||||||
|
|
|
@ -786,7 +786,7 @@ void post_process_output_file(FaceAnalysis::FaceAnalyser& face_analyser, string
|
||||||
|
|
||||||
int begin_ind = -1;
|
int begin_ind = -1;
|
||||||
|
|
||||||
for (int i = 0; i < tokens.size(); ++i)
|
for (size_t i = 0; i < tokens.size(); ++i)
|
||||||
{
|
{
|
||||||
if (tokens[i].find("AU") != string::npos && begin_ind == -1)
|
if (tokens[i].find("AU") != string::npos && begin_ind == -1)
|
||||||
{
|
{
|
||||||
|
@ -802,14 +802,14 @@ void post_process_output_file(FaceAnalysis::FaceAnalyser& face_analyser, string
|
||||||
outfile << output_file_contents[0].c_str() << endl;
|
outfile << output_file_contents[0].c_str() << endl;
|
||||||
|
|
||||||
// Write the contents
|
// Write the contents
|
||||||
for (int i = 1; i < output_file_contents.size(); ++i)
|
for (int i = 1; i < (int)output_file_contents.size(); ++i)
|
||||||
{
|
{
|
||||||
std::vector<std::string> tokens;
|
std::vector<std::string> tokens;
|
||||||
boost::split(tokens, output_file_contents[i], boost::is_any_of(","));
|
boost::split(tokens, output_file_contents[i], boost::is_any_of(","));
|
||||||
|
|
||||||
outfile << tokens[0];
|
outfile << tokens[0];
|
||||||
|
|
||||||
for (int t = 1; t < tokens.size(); ++t)
|
for (int t = 1; t < (int)tokens.size(); ++t)
|
||||||
{
|
{
|
||||||
if (t >= begin_ind && t < end_ind)
|
if (t >= begin_ind && t < end_ind)
|
||||||
{
|
{
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1595,9 +1595,10 @@ namespace dlib
|
||||||
explicit literal_assign_helper(matrix* m_): m(m_), r(0), c(0),has_been_used(false) {next();}
|
explicit literal_assign_helper(matrix* m_): m(m_), r(0), c(0),has_been_used(false) {next();}
|
||||||
~literal_assign_helper()
|
~literal_assign_helper()
|
||||||
{
|
{
|
||||||
DLIB_CASSERT(!has_been_used || r == m->nr(),
|
assert(!has_been_used || r == m->nr());
|
||||||
"You have used the matrix comma based assignment incorrectly by failing to\n"
|
//DLIB_CASSERT(!has_been_used || r == m->nr(),
|
||||||
"supply a full set of values for every element of a matrix object.\n");
|
// "You have used the matrix comma based assignment incorrectly by failing to\n"
|
||||||
|
// "supply a full set of values for every element of a matrix object.\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
const literal_assign_helper& operator, (
|
const literal_assign_helper& operator, (
|
||||||
|
|
|
@ -23,9 +23,11 @@ namespace dlib
|
||||||
|
|
||||||
cv_image (const cv::Mat img)
|
cv_image (const cv::Mat img)
|
||||||
{
|
{
|
||||||
DLIB_CASSERT(img.depth() == cv::DataType<typename pixel_traits<pixel_type>::basic_pixel_type>::depth &&
|
assert(img.depth() == cv::DataType<typename pixel_traits<pixel_type>::basic_pixel_type>::depth &&
|
||||||
img.channels() == pixel_traits<pixel_type>::num,
|
img.channels() == pixel_traits<pixel_type>::num);
|
||||||
"The pixel type you gave doesn't match pixel used by the open cv Mat object.");
|
//DLIB_CASSERT(img.depth() == cv::DataType<typename pixel_traits<pixel_type>::basic_pixel_type>::depth &&
|
||||||
|
// img.channels() == pixel_traits<pixel_type>::num,
|
||||||
|
// "The pixel type you gave doesn't match pixel used by the open cv Mat object.");
|
||||||
IplImage temp = img;
|
IplImage temp = img;
|
||||||
init(&temp);
|
init(&temp);
|
||||||
}
|
}
|
||||||
|
|
|
@ -707,7 +707,7 @@ void FaceAnalyser::ExtractAllPredictionsOfflineReg(vector<std::pair<std::string,
|
||||||
|
|
||||||
// Find the current id of the AU and the corresponding cutoff
|
// Find the current id of the AU and the corresponding cutoff
|
||||||
int au_id = -1;
|
int au_id = -1;
|
||||||
for (int a = 0; a < dyn_au_names.size(); ++a)
|
for (size_t a = 0; a < dyn_au_names.size(); ++a)
|
||||||
{
|
{
|
||||||
if (au_name.compare(dyn_au_names[a]) == 0)
|
if (au_name.compare(dyn_au_names[a]) == 0)
|
||||||
{
|
{
|
||||||
|
|
|
@ -645,7 +645,7 @@ bool CLNF::DetectLandmarks(const cv::Mat_<uchar> &image, const cv::Mat_<float> &
|
||||||
if(parts_used)
|
if(parts_used)
|
||||||
{
|
{
|
||||||
|
|
||||||
for (int part_model = 0; part_model < hierarchical_models.size(); ++part_model)
|
for (size_t part_model = 0; part_model < hierarchical_models.size(); ++part_model)
|
||||||
{
|
{
|
||||||
vector<pair<int, int>> mappings = this->hierarchical_mapping[part_model];
|
vector<pair<int, int>> mappings = this->hierarchical_mapping[part_model];
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue