I am using an AdamW optimizer that uses cosine decay with a warmup learning scheduler. I have written the custom scheduler from scratch and using the AdamW optimizer provided by the TensorFlow addons library.
class CosineScheduler(tf.keras.optimizers.schedules.LearningRateSchedule): def __init__(self, learning_rate_base, total_steps, warmup_learning_rate=0.0, warmup_steps=0): self.learning_rate_base = learning_rate_base self.total_steps = total_steps self.warmup_learning_rate =warmup_learning_rate self.warmup_steps = warmup_steps def __call__(self,step): learning_rate = 0.5 * self.learning_rate_base * (1 + tf.cos( np.pi * (tf.cast(step, tf.float32) - self.warmup_steps)/ float(self.total_steps-self.warmup_steps))) if self.warmup_steps > 0: slope = (self.learning_rate_base - self.warmup_learning_rate) / self.warmup_steps warmup_rate = slope * tf.cast(step, tf.float32) + self.warmup_learning_rate learning_rate = tf.where(step < self.warmup_steps, warmup_rate, learning_rate) lr = tf.where(step > self.total_steps, 0.0, learning_rate, name='learning_rate') wandb.log({"lr": lr}) return lr learning_rate = CosineScheduler(learning_rate_base=0.001, total_steps=23000, warmup_learning_rate=0.0, warmup_steps=1660)
loss_func = tf.keras.losses.CategoricalCrossentropy(label_smoothing=0.1) optimizer = tfa.optimizers.AdamW(learning_rate,weight_decay=0.1)
I get the following error prompt where it says that weight_decay has multiple arguments
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-12-6f9fd0a9c1cb> in <module> 1 loss_func = tf.keras.losses.CategoricalCrossentropy(label_smoothing=0.1) ----> 2 optimizer = tfa.optimizers.AdamW(learning_rate,weight_decay=0.1) /opt/conda/lib/python3.7/site-packages/typeguard/__init__.py in wrapper(*args, **kwargs) 923 924 def wrapper(*args, **kwargs): --> 925 memo = _CallMemo(python_func, _localns, args=args, kwargs=kwargs) 926 check_argument_types(memo) 927 retval = func(*args, **kwargs) /opt/conda/lib/python3.7/site-packages/typeguard/__init__.py in __init__(self, func, frame_locals, args, kwargs, forward_refs_policy) 126 127 if args is not None and kwargs is not None: --> 128 self.arguments = signature.bind(*args, **kwargs).arguments 129 else: 130 assert frame_locals is not None, 'frame must be specified if args or kwargs is None' /opt/conda/lib/python3.7/inspect.py in bind(*args, **kwargs) 3013 if the passed arguments can not be bound. 3014 """ -> 3015 return args[0]._bind(args[1:], kwargs) 3016 3017 def bind_partial(*args, **kwargs): /opt/conda/lib/python3.7/inspect.py in _bind(self, args, kwargs, partial) 2954 raise TypeError( 2955 'multiple values for argument {arg!r}'.format( -> 2956 arg=param.name)) from None 2957 2958 arguments[param.name] = arg_val TypeError: multiple values for argument 'weight_decay'
What is causing problem and how do I resolve this?
Advertisement
Answer
The problem is that weight_decay
is the first positional argument of tfa.optimizers.AdamW
. In
optimizer = tfa.optimizers.AdamW(learning_rate,weight_decay=0.1)
you hand over a positional argument and a kw argument weight_decay
. This causes the error. According to the documentation, learning rate
is the second positional parameter (even though optional), not the first.
Just write
optimizer = tfa.optimizers.AdamW(0.1, learning_rate)
or
optimizer = tfa.optimizers.AdamW(weight_decay=0.1, learning_rate=learning_rate)
or
optimizer = tfa.optimizers.AdamW(learning_rate=learning_rate, weight_decay=0.1)