I have a class called RL_Brain
:
JavaScript
x
30
30
1
class RL_Brain():
2
def __init__(self, n_features, n_action, memory_size=10, batch_size=32, gamma=0.9, fi_size=10):
3
self.n_features = n_features
4
self.n_actions = n_action
5
6
self.encoder = keras.Sequential([
7
Input((self.n_features,)),
8
Dense(16, activation='relu', kernel_initializer='glorot_normal', name='encoder_1'),
9
Dense(16, activation='relu', kernel_initializer='glorot_normal', name='encoder_2'),
10
Dense(16, activation='relu', kernel_initializer='glorot_normal', name='encoder_3'),
11
Dense(self.fi_size, activation='softmax', name='fi'),
12
])
13
14
self.decoder = keras.Sequential([
15
Input((self.fi_size,)),
16
Dense(16, activation='relu', name='decoder_1', trainable=True),
17
Dense(16, activation='relu', name='decoder_2', trainable=True),
18
Dense(16, activation='relu', name='decoder_3', trainable=True),
19
Dense(self.n_features, activation=None, name='decoder_output', trainable=True)
20
])
21
22
def learn(self, state, r, a, state_):
23
encoded = tf.one_hot(tf.argmax(self.encoder(state), axis=1), depth=self.fi_size)
24
encoded_ = tf.one_hot(tf.argmax(self.encoder(state_), axis=1), depth=self.fi_size)
25
decoded_state = self.decoder(encoded).numpy()
26
with tf.GradientTape() as tape:
27
loss1 = mean_squared_error(state, decoded_state)
28
grads = tape.gradient(loss1, self.decoder.trainable_variables)
29
self.opt.apply_gradients(zip(grads, self.decoder.trainable_variables))
30
When I run the learn
function, I get the following error:
JavaScript
1
4
1
File "/Users/wangheng/app/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/utils.py", line 78, in filter_empty_gradients raise ValueError("No gradients provided for any variable: %s." %
2
3
ValueError: No gradients provided for any variable: ['decoder_1/kernel:0', 'decoder_1/bias:0', 'decoder_2/kernel:0', 'decoder_2/bias:0', 'decoder_3/kernel:0', 'decoder_3/bias:0', 'decoder_output/kernel:0', 'decoder_output/bias:0'].
4
Advertisement
Answer
the following line is causing that error
JavaScript
1
2
1
decoded_state = self.decoder(encoded).numpy()
2
Once you do that, there is no path from your loss function to your trainable variables so no gradient can be calculated.