3

Here is my example code (I am sorry it is quite long):

#include "stdafx.h"
#ifndef WIN32_LEAN_AND_MEAN
  #define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#include <winsock2.h>

typedef BOOL OPT_TYPE;

int _tmain(int argc, _TCHAR* argv[])
{
    WSADATA wsaData;
    WSAStartup(MAKEWORD(2,2), &wsaData);

    int handle= socket(AF_INET, SOCK_STREAM, 0);
    if(SOCKET_ERROR == handle )
    {
      int error =  WSAGetLastError();
      printf("ERROR in socket: errno: %d\n", error);
      return error;
    }

   OPT_TYPE flag = -1;
   int flagLength = sizeof(flag);
   if (SOCKET_ERROR == getsockopt(handle, IPPROTO_TCP, TCP_NODELAY,
         reinterpret_cast<char*>(&flag), &flagLength))
   {
      int error =  WSAGetLastError();
      printf("ERROR in getsockopt: errno: %d\n", error);
      return error;
   }
   if(TRUE != flag && FALSE != flag)
   {
      printf("ERROR in getsockopt (default value): flag is invalid: %d, length is %d\n", flag, flagLength);
   }

   OPT_TYPE setflag = TRUE;
   if (SOCKET_ERROR == setsockopt(handle, IPPROTO_TCP, TCP_NODELAY,
         reinterpret_cast<char*>(&setflag), flagLength))
   {
      int error =  WSAGetLastError();
      printf("ERROR in getsockopt: errno: %d\n", error);
      return error;
   }
   if (SOCKET_ERROR == getsockopt(handle, IPPROTO_TCP, TCP_NODELAY,
         reinterpret_cast<char*>(&flag), &flagLength))
   {
      int error =  WSAGetLastError();
      printf("ERROR in getsockopt: errno: %d\n", error);
      return error;
   }
   if(TRUE != flag && FALSE != flag)
   {
      printf("ERROR in getsockopt (default value): flag is invalid: %d, length is %d\n", flag, flagLength);
   }

   setflag = FALSE;
   if (SOCKET_ERROR == setsockopt(handle, IPPROTO_TCP, TCP_NODELAY,
         reinterpret_cast<char*>(&setflag), flagLength))
   {
      int error =  WSAGetLastError();
      printf("ERROR in getsockopt: errno: %d\n", error);
      return error;
   }
   if (SOCKET_ERROR == getsockopt(handle, IPPROTO_TCP, TCP_NODELAY,
         reinterpret_cast<char*>(&flag), &flagLength))
   {
      int error =  WSAGetLastError();
      printf("ERROR in getsockopt: errno: %d\n", error);
      return error;
   }
   if(TRUE != flag && FALSE != flag)
   {
      printf("ERROR in getsockopt (default value): flag is invalid: %d, length is %d\n", flag, flagLength);
   }
   return 0;
}

Prints:

ERROR in getsockopt (default value): flag is invalid: -256, length is 1
ERROR in getsockopt (default value): flag is invalid: -255, length is 1
ERROR in getsockopt (default value): flag is invalid: -256, length is 1

Can anyone reproduce this? Note: I am on Win7, x64 Compiling a x32 Application with VS2010.

The documentation on getsockopt says i should use BOOL. The documentation on IPPROTO_TCP says i should use DWORD. However both does not work. When you set OPT_TYPE to a 8 bit type (e.g. char) it works. Did i miss something on the documentation?

kuga
  • 1,483
  • 1
  • 17
  • 38
  • Can you check the value of "flagLength" before and after the _first_ call to `getsockopt()`? Before, it should be 4, and I guess, that it is "1" after, because only one byte is written to your flag, which perfectly explains the results -256 and -255. And you are right, flagLength should be 4 after the call to getsockopt() according to the documentation. – Ctx Nov 13 '17 at 15:15
  • 1
    There is a reason why you are obliged to pass a *pointer to* the option length: the function updates the pointed-to length to tell you how many bytes it wrote into the option value. It looks like it must be writing only one. A quick and dirty workaround might be to set the option value to `0` before the call instead of to `-1`. More robust would be to ignore any un-set bytes when you evaluate the value. – John Bollinger Nov 13 '17 at 15:27
  • @Ctx You are right! I updated the Question acordingly – kuga Nov 13 '17 at 15:28
  • @kuga needless, since there is no question anymore... Winsock takes a char optval (1 byte) for TCP_NODELAY instead of BOOL/DWORD/int (4 byte) and the documentation is buggy. Case closed. – Ctx Nov 13 '17 at 15:29
  • @JohnBollinger well, that's only half of the truth, you usually initialize the optlen with the number of bytes you expect to get (if it is fixed size); that the number of returned bytes doesn't match the documented datatype to use for the specific socket option is something I wouldn't expect, too. It might even be a case for an `assert(flagLength == sizeof );` – Ctx Nov 13 '17 at 15:32
  • That's a good point, @Ctx. And an even better reason not to set the length to `-1` initially. – John Bollinger Nov 13 '17 at 15:34
  • @JohnBollinger You mean, to not set the flag to -1? Ok, but it shouldn't matter anyway, if the API doesn't work as documented anything might happen ;) – Ctx Nov 13 '17 at 15:36
  • No, @Ctx, I mean to set it to the size of the option value, just as you say, instead of setting it to -1. – John Bollinger Nov 13 '17 at 15:39
  • @JohnBollinger Yes, of course, setting it to -1 shouldn't even work since getsockopt expects the size of the reserved area to be preset in that parameter. The OP did set it to sizeof(BOOL);, which look correct to me. – Ctx Nov 13 '17 at 15:42
  • 1
    @Ctx, you're right again. I guess it's still too early on a Monday morning for me to be looking at code. But even though winsock's `getsockopt()` behaves differently from what its documentation says with respect to option length, the problem still could have been detected and even automatically accommodated by looking at the option length it actually reports. – John Bollinger Nov 13 '17 at 16:01
  • 2
    When `flag` is `BOOL`, the code is initializing `flag` to `-1` (`0xFFFFFFFF`), and then `getsockopt()` returns `flaglength=1`, which means only the 1st byte is being set (to `0x00`), leaving the other 3 bytes as `0xFF`. That is where `-256` (`0xFFFFFF00`) comes from. If `flag` had been initialized to `0` instead of `-1`, `flag` would have ended up as `0x00000000` and the check for `FALSE` after the 1st `getsockopt()` would have succeeded. But yes, Microsoft should fix the documentation to say `TCP_NODELAY` uses `char`, however, `BOOL` "works" fine when initialized correctly. – Remy Lebeau Nov 13 '17 at 22:19
  • @RemyLebeau No, BOOL works fine when initialized to "0", which is clearly a _workaround_. "Correctly" implies, that this had to be done in sane code, but this is not the case. It shouldn't be necessary to initialize `flag` at all! – Ctx Nov 13 '17 at 23:01

0 Answers0