A = 0
B = 1
A, B = B, A + B
running 5 times:
the values of B in Python will be 1, 1, 2, 3, 5. the values of B in C will be 1, 1, 1, 1, 1.
Would anyone explain this to me?
#include <stdio.h>
#define TRUE (1 == 1);
#define FALSE (1 == 0);
int fibonacci(int sequencia);
int main() {
fibonacci(5);
return FALSE;
}
int fibonacci(int sequencia) {
static int antigo = 0;
static int novo = 1;
static int copia = 0;
if (sequencia == 0) {
return TRUE;
}
else {
printf("%i\n", novo);
}
antigo, novo = novo, antigo + novo;
fibonacci(sequencia - 1);
}
antigo = 0
novo = 1
def fibonacci(sequencia):
global antigo, novo
if sequencia != 0:
print novo
else:
return True
antigo, novo = novo, antigo + novo
fibonacci(sequencia - 1)
if __name__ == "__main__":
fibonacci(5)