I have the below, where I am converting decimals to fractions. It works, 0.6 becomes 3/5.
However, if I had 0.666666666666. I'd expect it to be 2/3. How can I achieve this?
#set starting parameters
decimal = 0.6
starting_denominator = 100
starting_numerator = int(decimal * 100)
print('starting fraction: '+str(starting_numerator) + '/' + str(starting_denominator))
#find the common factors for the numerator and denominator
i = 1
numerator_factors = []
denominator_factors = []
while i < starting_numerator+1:
if starting_numerator%i == 0:
numerator_factors.append(i)
i = i+1
i = 1
while i < starting_denominator+1:
if starting_denominator%i == 0:
denominator_factors.append(i)
i = i+1
print('numerator factors: '+ str(numerator_factors))
print('denominator factors: '+ str(denominator_factors))
#Find matching factors and find highest across both
matches = []
for numfactor in numerator_factors:
for denfactor in denominator_factors:
if numfactor == denfactor:
matches.append(numfactor)
end_numerator = starting_numerator/max(matches)
end_denominator = starting_denominator/max(matches)
print('end fraction: '+str(int(end_numerator)) + '/' + str(int(end_denominator)))