My print statements, within many invocations, are not displaying to console.
The purpose of my program is to display "working out" of the math behind forward pass
in a perceptron
. However, the math isn't important to know here.
Let's crudely assume any math referenced is sound.
My problem occurs at # -- OUTPUT --
in Perceptron.py
.
Excuse the size of the program.
main.py
import os
os.system('clear')
import Perceptron
Perceptron.py
import ActivationFunctions as af
import numpy as np
from math import e
X = [[0.5, 0.3], [-0.5, 0.9], [0, -0.1], [1, 0]]
target = 0.7
LR = 0.01
dp = 5
# ----- Forward Pass -----
print('Forward Pass')
# -- INPUT --
in_str = 'in = '
for input in X:
substr = '('+str(input[0])+' x '+str(input[1])+') + '
in_str += substr
in_str = in_str[:-3]
print(in_str)
calcs = [x * y for x, y in X]
in_str = ' = '
for c in calcs:
substr = '('+str(c)+') + '
in_str += substr
in_str = in_str[:-3]
print(in_str)
ans = round(sum([x * y for x, y in X]), dp)
print(' = ' + str(ans))
print()
# PROBLEM OCCURS HERE
# -- OUTPUT --
# SIGMOID
out = af.invoker('softmax', LR, ans, dp)
print()
ActivationFunctions.py
import numpy as np
from math import e
def binary_step(ans, dp):
if ans >= 0: return 1
else: return 0
def identity(ans, dp):
return round(ans, dp)
def logistic(ans, dp):
return round((1)/(1+(e**-ans)), dp)
def tanh(ans, dp):
return round(((e**ans) - (e**-ans))/((e**ans) + (e**-ans)), dp)
def relu(ans, dp):
if ans < 0: return 0
else: return round(ans, dp)
def leaky_relu(LR, ans, dp):
if ans < 0: return round(LR*ans, dp)
else: return round(ans, dp)
def softmax(ans, dp):
print('out = 1 / (1 + e^-'+str(+ans)+')')
out = round(1 / (1 + e**-ans), dp)
print(' = '+str(out))
return out
def invoker(name, LR, ans, dp):
name = name.lower()
if 'binary' or 'step' in name: return binary_step(ans, dp)
elif name == 'identity': return identity(ans, dp)
elif name == 'logistic': return logistic(ans, dp)
elif name == 'tanh': return tanh(ans, dp)
elif name == 'relu': return relu(ans, dp)
elif name == 'lrelu' or 'leaky' in name: return leaky_relu(LR, ans, dp)
elif name == 'softmax': return softmax(ans, dp)
else: print("ENTER VALID ACTIVATION FUNCTION")
Output should appear right after:
Forward Pass
in = (0.5 x 0.3) + (-0.5 x 0.9) + (0 x -0.1) + (1 x 0)
= (0.15) + (-0.45) + (-0.0) + (0)
= -0.3