Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix warnings during GAN training #3334

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
18 changes: 9 additions & 9 deletions fastai/vision/gan.py
Expand Up @@ -138,8 +138,8 @@ def __init__(self, switch_eval=False, clip=None, beta=0.98, gen_first=False, sho
self.gen_loss,self.crit_loss = AvgSmoothLoss(beta=beta),AvgSmoothLoss(beta=beta)

def _set_trainable(self):
train_model = self.generator if self.gen_mode else self.critic
loss_model = self.generator if not self.gen_mode else self.critic
train_model = self.gan_generator if self.gan_gen_mode else self.gan_critic
loss_model = self.gan_generator if not self.gan_gen_mode else self.gan_critic
set_freeze_model(train_model, True)
set_freeze_model(loss_model, False)
if self.switch_eval:
Expand All @@ -148,9 +148,9 @@ def _set_trainable(self):

def before_fit(self):
"Initialize smootheners."
self.generator,self.critic = self.model.generator,self.model.critic
self.gen_mode = self.gen_first
self.switch(self.gen_mode)
self.gan_generator,self.gan_critic = self.model.generator,self.model.critic
self.learn.gan_gen_mode = self.gen_first
self.switch(self.gan_gen_mode)
self.crit_losses,self.gen_losses = [],[]
self.gen_loss.reset() ; self.crit_loss.reset()
#self.recorder.no_val=True
Expand All @@ -165,13 +165,13 @@ def before_batch(self):
"Clamp the weights with `self.clip` if it's not None, set the correct input/target."
if self.training and self.clip is not None:
for p in self.critic.parameters(): p.data.clamp_(-self.clip, self.clip)
if not self.gen_mode:
if not self.gan_gen_mode:
(self.learn.xb,self.learn.yb) = (self.yb,self.xb)

def after_batch(self):
"Record `last_loss` in the proper list."
if not self.training: return
if self.gen_mode:
if self.gan_gen_mode:
self.gen_loss.accumulate(self.learn)
self.gen_losses.append(self.gen_loss.value)
self.last_gen = self.learn.to_detach(self.pred)
Expand All @@ -181,7 +181,7 @@ def after_batch(self):

def before_epoch(self):
"Put the critic or the generator back to eval if necessary."
self.switch(self.gen_mode)
self.switch(self.gan_gen_mode)

#def after_epoch(self):
# "Show a sample image."
Expand All @@ -198,7 +198,7 @@ def before_epoch(self):

def switch(self, gen_mode=None):
"Switch the model and loss function, if `gen_mode` is provided, in the desired mode."
self.gen_mode = (not self.gen_mode) if gen_mode is None else gen_mode
self.learn.gan_gen_mode = (not self.gen_mode) if gen_mode is None else gen_mode
self._set_trainable()
self.model.switch(gen_mode)
self.loss_func.switch(gen_mode)
Expand Down
18 changes: 9 additions & 9 deletions nbs/24_vision.gan.ipynb
Expand Up @@ -394,8 +394,8 @@
" self.gen_loss,self.crit_loss = AvgSmoothLoss(beta=beta),AvgSmoothLoss(beta=beta)\n",
"\n",
" def _set_trainable(self):\n",
" train_model = self.generator if self.gen_mode else self.critic\n",
" loss_model = self.generator if not self.gen_mode else self.critic\n",
" train_model = self.gan_generator if self.gan_gen_mode else self.gan_critic\n",
" loss_model = self.gan_generator if not self.gan_gen_mode else self.gan_critic\n",
" set_freeze_model(train_model, True)\n",
" set_freeze_model(loss_model, False)\n",
" if self.switch_eval:\n",
Expand All @@ -404,9 +404,9 @@
"\n",
" def before_fit(self):\n",
" \"Initialize smootheners.\"\n",
" self.generator,self.critic = self.model.generator,self.model.critic\n",
" self.gen_mode = self.gen_first\n",
" self.switch(self.gen_mode)\n",
" self.gan_generator,self.gan_critic = self.model.generator,self.model.critic\n",
" self.learn.gan_gen_mode = self.gen_first\n",
" self.switch(self.gan_gen_mode)\n",
" self.crit_losses,self.gen_losses = [],[]\n",
" self.gen_loss.reset() ; self.crit_loss.reset()\n",
" #self.recorder.no_val=True\n",
Expand All @@ -421,13 +421,13 @@
" \"Clamp the weights with `self.clip` if it's not None, set the correct input/target.\"\n",
" if self.training and self.clip is not None:\n",
" for p in self.critic.parameters(): p.data.clamp_(-self.clip, self.clip)\n",
" if not self.gen_mode:\n",
" if not self.gan_gen_mode:\n",
" (self.learn.xb,self.learn.yb) = (self.yb,self.xb)\n",
"\n",
" def after_batch(self):\n",
" \"Record `last_loss` in the proper list.\"\n",
" if not self.training: return\n",
" if self.gen_mode:\n",
" if self.gan_gen_mode:\n",
" self.gen_loss.accumulate(self.learn)\n",
" self.gen_losses.append(self.gen_loss.value)\n",
" self.last_gen = self.learn.to_detach(self.pred)\n",
Expand All @@ -437,7 +437,7 @@
"\n",
" def before_epoch(self):\n",
" \"Put the critic or the generator back to eval if necessary.\"\n",
" self.switch(self.gen_mode)\n",
" self.switch(self.gan_gen_mode)\n",
"\n",
" #def after_epoch(self):\n",
" # \"Show a sample image.\"\n",
Expand All @@ -454,7 +454,7 @@
"\n",
" def switch(self, gen_mode=None):\n",
" \"Switch the model and loss function, if `gen_mode` is provided, in the desired mode.\"\n",
" self.gen_mode = (not self.gen_mode) if gen_mode is None else gen_mode\n",
" self.learn.gan_gen_mode = (not self.gen_mode) if gen_mode is None else gen_mode\n",
" self._set_trainable()\n",
" self.model.switch(gen_mode)\n",
" self.loss_func.switch(gen_mode)"
Expand Down