I am testing a red-black tree implementation (repository) and I find that with Windows 10 and gcc, malloc starts returning NULL after inserting approx. 50 million nodes but on Linux it works at least up to 100 million nodes.
What conclusion can I make from this result? Is it a bug in my program or is it that malloc is "more efficient" (how?) on Linux?
int test() {
int T = 1000000000; //test case 1,000,000,000 nodes
int r2;
struct node *root = NULL;
srand(time(NULL));
struct node *z;
LEAF = malloc(sizeof(struct node));
LEAF->color = BLACK;
LEAF->left = NULL;
LEAF->right = NULL;
LEAF->key = 0;
while (T-- > 0) {
r2 = (2 + T) * (rand() % 100); // data
z = malloc(sizeof(struct node));
if (z != NULL) {
z->key = r2;
z->left = NULL;
z->right = NULL;
z->parent = NULL;
z->color = RED;
root = insert(root, z);
} else printf("malloc failed at node number %d", T);
}
root = NULL;
return 0;
}