Consider the following code segment:
static int const MAX = 1024; // limit on the number of directory entries
int actual = 0; // counter of directory entires
int ptrsize = sizeof(char*);
int i = 0;
char cwd[1024];
DIR* d;
struct dirent** buffer;
struct dirent** file;
getcwd ( cwd, sizeof ( cwd ) );
d = opendir ( cwd );
buffer = (struct dirent**)malloc(MAX * ptrsize); // allocate the large buffer
/* fill the buffer with dirents for each file in the cwd */
while ( ( buffer[actual] = readdir ( d ) ) != NULL ) actual++;
/* copy the active part of the buffer to the file array and free the buffer */
file = (struct dirent**)malloc(actual * ptrsize);
while ( i < actual ) {
file[i] = buffer[i];
i++;
}
free ( buffer );
Would it be more efficient to achieve the same result the following way, using automatic virtual memory?
static int const MAX = 1024; // limit on the number of directory entries
int actual = 0; // counter of directory entires
char cwd[1024];
DIR* d;
struct dirent* file[MAX];
getcwd ( cwd, sizeof ( cwd ) );
d = opendir ( cwd );
while ( ( file[actual] = readdir ( d ) ) != NULL ) actual++;
In case there are 10 files in the directory, 1014 array elements of file won't be used, so nearly 100%. Would it still be more efficient since virtual memory will reclaim empty array elements for other purposes?