5

I need code to find entropy of an image.

for(int i=0;i<grey_image.rows;i++)
{
    for(int j=1;j<grey_image.cols;j++)
    {
        //cout<<i<<" "<<j<<" "<<(int)grey_image.at<uchar>(i,j)<<endl;
        int a=(int)grey_image.at<uchar>(i,j);
        int b=(int)grey_image.at<uchar>(i,j-1);
        int x=a-b;
        if(x<0)
            x=0-x;
        probability_array[x]++;
        //grey_image.at<uchar>(i,j) = 255;
    }
}
//calculating probability
int n=rows*cols;
for(int i=0;i<256;i++)
{
    probability_array[i]/=n;
    //cout<<probability_array[i]<<endl;
}
// galeleo team formula
float entropy=0;
for(int i=0;i<256;i++)
{
    if (probability_array[i]>0)
    {
        float x=probability_array[i]*log(probability_array[i]);
        entropy+=x;
    }
}
return 0-entropy;

Actually I am using this to dump in a programmable camera to measure entropy. Now I want to use it in windows system. I am getting entropy of a gray image as zero.Please help me out. Where did I go wrong.

Яois
  • 3,838
  • 4
  • 28
  • 50
user3872220
  • 81
  • 1
  • 1
  • 3
  • 1
    Shouldn't it be 0 anyway? A low entropy value means that there is a lot of order in the image. High entropy means that there is a lot going on (random noise have higher entropy than an uniform image) – ButterDog Jul 24 '14 at 09:46

3 Answers3

12

Without knowing what image are you using, we cannot know if a zero entropy result is not the right answer (as suggested by @Xocoatzin). Besides, your code can benefit from some of the latest OpenCV features : Here is a working implementation using OpenCV histograms and matrix expressions:

    if (frame.channels()==3) cvtColor(frame,frame,CV_BGR2GRAY);
    /// Establish the number of bins
    int histSize = 256;
    /// Set the ranges ( for B,G,R) )
    float range[] = { 0, 256 } ;
    const float* histRange = { range };
    bool uniform = true; bool accumulate = false;
    /// Compute the histograms:
    calcHist( &frame, 1, 0, Mat(), hist, 1, &histSize, &histRange, uniform, accumulate );
    hist /= frame.total();
    hist += 1e-4; //prevent 0

    Mat logP;
    cv::log(hist,logP);

    float entropy = -1*sum(hist.mul(logP)).val[0];

    cout << entropy << endl;
StereoMatching
  • 4,971
  • 6
  • 38
  • 70
Яois
  • 3,838
  • 4
  • 28
  • 50
4

here is what i m using, hope it helps; https://github.com/samidalati/OpenCV-Entropy you can find couple of methods to calculate the entropy of colored and grayscaled images using OpenCV

 float entropy(Mat seq, Size size, int index)
{
  int cnt = 0;
  float entr = 0;
  float total_size = size.height * size.width; //total size of all symbols in an image

  for(int i=0;i<index;i++)
  {
    float sym_occur = seq.at<float>(0, i); //the number of times a sybmol has occured
    if(sym_occur>0) //log of zero goes to infinity
      {
        cnt++;
        entr += (sym_occur/total_size)*(log2(total_size/sym_occur));
      }
  }
  cout<<"cnt: "<<cnt<<endl;
  return entr;

}

// myEntropy calculates relative occurrence of different symbols within given input                sequence using histogram
Mat myEntropy(Mat seq, int histSize)
{ 

  float range[] = { 0, 256 } ;
  const float* histRange = { range };

  bool uniform = true; bool accumulate = false;

  Mat hist;

  /// Compute the histograms:
  calcHist( &seq, 1, 0, Mat(), hist, 1, &histSize, &histRange, uniform, accumulate );

  return hist;
}
Sami Dalati
  • 151
  • 1
  • 3
0
enter code here

//Calculate Entropy of 2D histogram
double Sum_prob_1k = 0, Sum_prob_kl = 0, Sum_prob_ln_1k = 0, Sum_prob_ln_kl = 0;
for (int k = start; k < end; k++)
{
    Sum_prob_1k = 0; Sum_prob_kl = 0;
    Sum_prob_ln_1k = 0; Sum_prob_ln_kl = 0;
    //i=1 need to be start = 1
    for (int i = 1; i < k; i++)
    {
        Sum_prob_1k += HiGreyN[i];
        if (HiGreyN[i] != 0)
            Sum_prob_ln_1k += (HiGreyN[i] * System.Math.Log(HiGreyN[i]));
    }
    for (int i = k; i < end; i++)
    {
        Sum_prob_kl += HiGreyN[i];
        if (HiGreyN[i] != 0)
            Sum_prob_ln_kl += (HiGreyN[i] * System.Math.Log(HiGreyN[i]));
    }
    //Final equation of entropy for each K
    EiGrey[k] = System.Math.Log(Sum_prob_1k) + System.Math.Log(Sum_prob_kl) -
               (Sum_prob_ln_1k / Sum_prob_1k) - (Sum_prob_ln_kl / Sum_prob_kl);
    if (EiGrey[k] < 0)
        EiGrey[k] = 0;
}
//End calculating 2D Entropy
Georgi
  • 1
  • 1
  • 1
    When giving an answer it is preferable to give [some explanation as to WHY your answer](http://stackoverflow.com/help/how-to-answer) is the one. – Stephen Rauch Feb 12 '17 at 00:20