Merge branch 'feature/logging' into develop

This commit is contained in:
Tadas Baltrusaitis 2016-09-06 10:26:10 -04:00
commit 96f9f5d06f
7 changed files with 839 additions and 836 deletions

View file

@ -224,7 +224,7 @@ void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF
featuresFile << "au intensities: " << au_intensities.size() << endl;
featuresFile << "{" << endl;
for (int i = 0; i < au_intensities.size(); ++i)
for (size_t i = 0; i < au_intensities.size(); ++i)
{
// Use matlab format, so + 1
featuresFile << au_intensities[i].first << " " << au_intensities[i].second << endl;
@ -236,7 +236,7 @@ void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF
featuresFile << "au occurences: " << au_occurences.size() << endl;
featuresFile << "{" << endl;
for (int i = 0; i < au_occurences.size(); ++i)
for (size_t i = 0; i < au_occurences.size(); ++i)
{
// Use matlab format, so + 1
featuresFile << au_occurences[i].first << " " << au_occurences[i].second << endl;

View file

@ -786,7 +786,7 @@ void post_process_output_file(FaceAnalysis::FaceAnalyser& face_analyser, string
int begin_ind = -1;
for (int i = 0; i < tokens.size(); ++i)
for (size_t i = 0; i < tokens.size(); ++i)
{
if (tokens[i].find("AU") != string::npos && begin_ind == -1)
{
@ -802,14 +802,14 @@ void post_process_output_file(FaceAnalysis::FaceAnalyser& face_analyser, string
outfile << output_file_contents[0].c_str() << endl;
// Write the contents
for (int i = 1; i < output_file_contents.size(); ++i)
for (int i = 1; i < (int)output_file_contents.size(); ++i)
{
std::vector<std::string> tokens;
boost::split(tokens, output_file_contents[i], boost::is_any_of(","));
outfile << tokens[0];
for (int t = 1; t < tokens.size(); ++t)
for (int t = 1; t < (int)tokens.size(); ++t)
{
if (t >= begin_ind && t < end_ind)
{

File diff suppressed because it is too large Load diff

View file

@ -1595,9 +1595,10 @@ namespace dlib
explicit literal_assign_helper(matrix* m_): m(m_), r(0), c(0),has_been_used(false) {next();}
~literal_assign_helper()
{
DLIB_CASSERT(!has_been_used || r == m->nr(),
"You have used the matrix comma based assignment incorrectly by failing to\n"
"supply a full set of values for every element of a matrix object.\n");
assert(!has_been_used || r == m->nr());
//DLIB_CASSERT(!has_been_used || r == m->nr(),
// "You have used the matrix comma based assignment incorrectly by failing to\n"
// "supply a full set of values for every element of a matrix object.\n");
}
const literal_assign_helper& operator, (

View file

@ -23,9 +23,11 @@ namespace dlib
cv_image (const cv::Mat img)
{
DLIB_CASSERT(img.depth() == cv::DataType<typename pixel_traits<pixel_type>::basic_pixel_type>::depth &&
img.channels() == pixel_traits<pixel_type>::num,
"The pixel type you gave doesn't match pixel used by the open cv Mat object.");
assert(img.depth() == cv::DataType<typename pixel_traits<pixel_type>::basic_pixel_type>::depth &&
img.channels() == pixel_traits<pixel_type>::num);
//DLIB_CASSERT(img.depth() == cv::DataType<typename pixel_traits<pixel_type>::basic_pixel_type>::depth &&
// img.channels() == pixel_traits<pixel_type>::num,
// "The pixel type you gave doesn't match pixel used by the open cv Mat object.");
IplImage temp = img;
init(&temp);
}

View file

@ -707,7 +707,7 @@ void FaceAnalyser::ExtractAllPredictionsOfflineReg(vector<std::pair<std::string,
// Find the current id of the AU and the corresponding cutoff
int au_id = -1;
for (int a = 0; a < dyn_au_names.size(); ++a)
for (size_t a = 0; a < dyn_au_names.size(); ++a)
{
if (au_name.compare(dyn_au_names[a]) == 0)
{

View file

@ -645,7 +645,7 @@ bool CLNF::DetectLandmarks(const cv::Mat_<uchar> &image, const cv::Mat_<float> &
if(parts_used)
{
for (int part_model = 0; part_model < hierarchical_models.size(); ++part_model)
for (size_t part_model = 0; part_model < hierarchical_models.size(); ++part_model)
{
vector<pair<int, int>> mappings = this->hierarchical_mapping[part_model];