Sorry, Alone !!! I think the Normalize Fn of the Tensorlfow are fractional Fn that considers beta, gamma, and sigma values.
model = tf.keras.models.Sequential([
tf.keras.layers.InputLayer(input_shape=(1, 32, 32, 3)), # input shape to have value 25088 but received input with shape (None, 784)
tf.keras.layers.Normalization(mean=3., variance=2. ,name='Layer_1'),
tf.keras.layers.Normalization(mean=4., variance=6. ,name='Layer_2'),
tf.keras.layers.Dense(256, activation='relu' ,name='Layer_3'),
])
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(6, activation=tf.nn.softmax ,name='Layer_4'))
model.summary()
with tf.compat.v1.variable_scope('Layer_1', reuse=tf.compat.v1.AUTO_REUSE):
v2 = tf.compat.v1.get_variable('v2', shape=[256]) # <tf.Variable 'Layer_1/v2:0' shape=(256,) dtype=float32, numpy=array([-0.06715409, 0.10130859, 0.05591007, -0.05931217, 0.10036706, ...
x1 = tf.compat.v1.get_variable('x', shape=[256]) # <tf.Variable 'Layer_1/x:0' shape=(256,) dtype=float32, numpy=array([-6.63143843e-02, 3.17198113e-02, 1.04614533e-01, -2.30028257e-02, ...
y1 = tf.compat.v1.get_variable('y', shape=[256]) # <tf.Variable 'Layer_1/y:0' shape=(256,) dtype=float32, numpy=array([-0.10782533, 0.01488321, -0.04950972, -0.09561327, 0.10698273, ...
y2 = tf.compat.v1.get_variable('y_', shape=[256]) # <tf.Variable 'Layer_1/y_:0' shape=(256,) dtype=float32, numpy=array([-0.04931336, -0.10670284, -0.10054329, -0.09619174, 0.08752564, ...
mu = tf.compat.v1.get_variable('mu', shape=[256]) # <tf.Variable 'Layer_1/mu:0' shape=(256,) dtype=float32, numpy=array([-0.06098992, 0.02202646, -0.05624849, 0.0602672 , -0.02878931, ...
sigma = tf.compat.v1.get_variable('sigma', shape=[256]) # <tf.Variable 'Layer_1/sigma:0' shape=(256,) dtype=float32, numpy=array([ 2.84786597e-02, 1.00004725e-01, -8.51654559e-02, -5.34656569e-02, ...
gamma = tf.compat.v1.get_variable('gamma', shape=[256]) # <tf.Variable 'Layer_1/gamma:0' shape=(256,) dtype=float32, numpy=array([ 0.10177503, 0.04634983, -0.02325767, 0.04158259, 0.10051229, ...
beta = tf.compat.v1.get_variable('beta', shape=[256]) # <tf.Variable 'Layer_1/beta:0' shape=(256,) dtype=float32, numpy=array([-7.85651207e-02, -4.94908020e-02, 8.88925046e-03, 9.37148184e-03, ...