I'm trying to append to a list an object from the given value(amount) up to one. But the problem with what I have right now it that the Coin object can't be interpreted as an integer. Is there a workaround to the add_to_table method in order to achieve what is expected?
class Test:
def __init__(self, table=[]):
"""(Test, int) -> NoneType
"""
self.table = [(0, []), (1, []), (2, [])]
def add_to_table(self, amount):
"""(Test, int) -> NoneType
Adds to the first table Coin(amount) to Coin(1)
ex.
[(0, [Coin(3), Coin(2), Coin(1)]), (1, []), (2, [])]
"""
self.table[0][1].extend(reversed(range(Coin(1), Coin(amount + 1))))
class Coin:
def __init__(self, length):
"""(Coin, int) -> NoneType
"""
self.length = length
Expected output:
t1 = Test()
t1.table
[(0, []), (1, []), (2, [])]
t1.add_to_table(3)
t1.table
[(0, [Coin(3), Coin(2), Coin(1)]), (1, []), (2, [])]