You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
File "/mnt/home/.conda/envs/sequoia/bin/sequoia_sweep", line 33, in <module>
sys.exit(load_entry_point('sequoia', 'console_scripts', 'sequoia_sweep')())
File "/mnt/home/dev/Sequoia/sequoia/experiments/hpo_sweep.py", line 129, in main
return HPOSweep.main()
File "/mnt/home/dev/Sequoia/sequoia/experiments/hpo_sweep.py", line 125, in main
return experiment.launch(argv, strict_args=strict_args)
File "/mnt/home/dev/Sequoia/sequoia/experiments/hpo_sweep.py", line 72, in launch
best_params, best_objective = self.method.hparam_sweep(
File "/mnt/home/dev/Sequoia/sequoia/settings/base/bases.py", line 732, in hparam_sweep
result: Results = setting.apply(self)
File "/mnt/home/dev/Sequoia/sequoia/settings/passive/cl/class_incremental_setting.py", line 362, in apply
results: ClassIncrementalResults = super().main_loop(method)
File "/mnt/home/dev/Sequoia/sequoia/settings/assumptions/incremental.py", line 222, in main_loop
method.fit(
File "/mnt/home/dev/Sequoia/sequoia/methods/experience_replay.py", line 112, in fit
loss_replay = F.cross_entropy(b_logits, b_samples['y'])
File "/mnt/home/.conda/envs/sequoia/lib/python3.8/site-packages/torch/nn/functional.py", line 2690, in cross_entropy
return nll_loss(log_softmax(input, 1), target, weight, None, ignore_index, None, reduction)
File "/mnt/home/.conda/envs/sequoia/lib/python3.8/site-packages/torch/nn/functional.py", line 1672, in log_softmax
ret = input.log_softmax(dim)
(function _print_stack)
Training Epoch 1: 1%| | 1/127 [00:00<01:03, 2.00it/s, loss=5.3e+3, replay loss=3.07e+4]
2021-03-20:17:23:26,572 ERROR [sequoia/settings/base/bases.py:735] Encountered an error, this trial will be dropped:
2021-03-20:17:23:26,573 ERROR [sequoia/settings/base/bases.py:736] ------------------------------------------------------------
2021-03-20:17:23:26,579 ERROR [sequoia/settings/base/bases.py:740] Traceback (most recent call last):
File "/mnt/home/dev/Sequoia/sequoia/settings/base/bases.py", line 732, in hparam_sweep
result: Results = setting.apply(self)
File "/mnt/home/dev/Sequoia/sequoia/settings/passive/cl/class_incremental_setting.py", line 362, in apply
results: ClassIncrementalResults = super().main_loop(method)
File "/mnt/home/dev/Sequoia/sequoia/settings/assumptions/incremental.py", line 222, in main_loop
method.fit(
File "/mnt/home/dev/Sequoia/sequoia/methods/experience_replay.py", line 116, in fit
loss.backward()
File "/mnt/home/.conda/envs/sequoia/lib/python3.8/site-packages/torch/tensor.py", line 245, in backward
torch.autograd.backward(self, gradient, retain_graph, create_graph, inputs=inputs)
File "/mnt/home/.conda/envs/sequoia/lib/python3.8/site-packages/torch/autograd/__init__.py", line 145, in backward
Variable._execution_engine.run_backward(
RuntimeError: Function 'LogSoftmaxBackward' returned nan values in its 0th output.
The text was updated successfully, but these errors were encountered:
The text was updated successfully, but these errors were encountered: