diff --git a/CHANGELOG.md b/CHANGELOG.md index 15e2957..2e9205b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +## neptune-fastai 1.1.1 + +### Fixes +- Don't error if `optim.__name__` is not present. (https://github.com/neptune-ai/neptune-fastai/pull/54) + ## neptune-fastai 1.1.0 ### Changes diff --git a/src/neptune_fastai/impl/__init__.py b/src/neptune_fastai/impl/__init__.py index abe784c..7107b7c 100644 --- a/src/neptune_fastai/impl/__init__.py +++ b/src/neptune_fastai/impl/__init__.py @@ -154,7 +154,17 @@ def _batch_size(self) -> int: @property def _optimizer_name(self) -> Optional[str]: - return self.opt_func.__name__ + NA = "N/A" + optim_name = getattr(self.opt_func, "__name__", NA) + if optim_name == NA: + warning_msg = ( + "NeptuneCallback: Couldn't retrieve the optimizer name, " + "so it will be logged as 'N/A'. You can set the optimizer " + "name by assigning it to the __name__ attribute. " + "Eg. >>> optimizer.__name__ = 'NAME'" + ) + warnings.warn(warning_msg) + return optim_name @property def _device(self) -> str: diff --git a/tests/neptune_fastai/test_e2e.py b/tests/neptune_fastai/test_e2e.py index 8f06735..e6b0c49 100644 --- a/tests/neptune_fastai/test_e2e.py +++ b/tests/neptune_fastai/test_e2e.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from functools import partial from itertools import islice from pathlib import Path @@ -24,6 +25,7 @@ untar_data, ) from fastai.callback.all import SaveModelCallback +from fastai.optimizer import Adam from fastai.tabular.all import ( Categorify, FillMissing, @@ -71,12 +73,15 @@ def test_vision_classification_with_handler(self): device=torch.device("cpu"), ) + opt_func = partial(Adam, lr=3e-3, wd=0.01) + learn = cnn_learner( dls, squeezenet1_0, metrics=error_rate, cbs=[NeptuneCallback(run, "experiment")], pretrained=False, + opt_func=opt_func, ) learn.fit(1) @@ -91,6 +96,7 @@ def test_vision_classification_with_handler(self): exp_config = run["experiment/config"].fetch() assert exp_config["batch_size"] == 64 assert exp_config["criterion"] == "CrossEntropyLoss()" + assert exp_config["optimizer"]["name"] == "N/A" assert exp_config["input_shape"] == {"x": "[3, 224, 224]", "y": 1} # and