-
Notifications
You must be signed in to change notification settings - Fork 229
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Include contrib folder and pytorch stringifier
This PR introduces a new contrib folder under pudb for community customization. A stringifier for pytorch is included. Detailed changelog: * A module was added under pudb/contrib/stringifiers as place to store custom stringifiers. * A stringifier for pytorch tensors and modules is included. * An option was added in the settings menu for the user to enable or disable the cotrib content for users that want to stick to the core pudb installation. * Changes were made in var_view.py and settings.py to allow for inclusion of the contrib/stringifiers in the configuration menu. Signed-off-by: Giorgos Paraskevopoulos <geopar@central.ntua.gr>
- Loading branch information
Showing
9 changed files
with
159 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
# Community contributed extensions for pudb | ||
|
||
Here the community can extend pudb with custom stringifiers, themes and shells. | ||
|
||
|
||
## How to contribute your stringifiers | ||
|
||
Simply add a new python module inside `contrib/stringifiers` that contains your custom stringifier. | ||
|
||
Then add your stringifier to the `CONTRIB_STRINGIFIERS` dict inside | ||
`contrib/stringifiers/__init__.py`. | ||
|
||
The new options should appear in the pudb settings pane after setting the `Enable community contributed content` option. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
from pudb.contrib.stringifiers import CONTRIB_STRINGIFIERS |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
from pudb.contrib.stringifiers.torch_stringifier import torch_stringifier_fn | ||
|
||
CONTRIB_STRINGIFIERS = { | ||
# User contributed stringifiers | ||
# Use the contrib prefix for all keys to avoid clashes with the core stringifiers | ||
# and make known to the user that this is community contributed code | ||
"contrib.pytorch": torch_stringifier_fn, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
from typing import Any | ||
|
||
try: | ||
import torch | ||
|
||
HAVE_TORCH = 1 | ||
except: | ||
HAVE_TORCH = 0 | ||
|
||
import pudb.var_view as vv | ||
|
||
|
||
def torch_stringifier_fn(value: Any) -> str: | ||
if not HAVE_TORCH: | ||
# Fall back to default stringifier | ||
|
||
return vv.default_stringifier(value) | ||
|
||
if isinstance(value, torch.nn.Module): | ||
device: str = str(next(value.parameters()).device) | ||
params: int = sum([p.numel() for p in value.parameters() if p.requires_grad]) | ||
rep: str = value.__repr__() if len(value.__repr__()) < 55 else type( | ||
value | ||
).__name__ | ||
|
||
return "{}[{}] Params: {}".format(rep, device, params) | ||
elif isinstance(value, torch.Tensor): | ||
return "{}[{}][{}] {}".format( | ||
type(value).__name__, | ||
str(value.dtype).replace("torch.", ""), | ||
str(value.device), | ||
str(list(value.shape)), | ||
) | ||
else: | ||
# Fall back to default stringifier | ||
|
||
return vv.default_stringifier(value) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
try: | ||
import torch | ||
HAVE_TORCH = True | ||
except ImportError: | ||
HAVE_TORCH = False | ||
|
||
from pudb.var_view import default_stringifier | ||
from pudb.contrib.stringifiers.torch_stringifier import torch_stringifier_fn | ||
|
||
def test_tensor(): | ||
if HAVE_TORCH: | ||
x = torch.randn(10, 5, 4) | ||
assert torch_stringifier_fn(x) == "Tensor[float32][cpu] [10, 5, 4]" | ||
|
||
|
||
def test_conv_module(): | ||
if HAVE_TORCH: | ||
x = torch.nn.Conv2d(20, 10, 3) | ||
assert torch_stringifier_fn(x) == "Conv2d(20, 10, kernel_size=(3, 3), stride=(1, 1))[cpu] Params: 1810" | ||
|
||
|
||
def test_linear_module(): | ||
if HAVE_TORCH: | ||
x = torch.nn.Linear(5, 2, bias=False) | ||
assert torch_stringifier_fn(x) == "Linear(in_features=5, out_features=2, bias=False)[cpu] Params: 10" | ||
|
||
|
||
def test_long_module_repr_should_revert_to_type(): | ||
if HAVE_TORCH: | ||
x = torch.nn.Transformer() | ||
assert torch_stringifier_fn(x) == "Transformer[cpu] Params: 44140544" | ||
|
||
|
||
def test_reverts_to_default_for_str(): | ||
x = "Everyone has his day, and some days last longer than others." | ||
assert torch_stringifier_fn(x) == default_stringifier(x) | ||
|
||
|
||
def test_reverts_to_default_for_dict(): | ||
x = {"a": 1, "b": 2, "c": 3} | ||
assert torch_stringifier_fn(x) == default_stringifier(x) | ||
|
||
|
||
def test_reverts_to_default_for_list(): | ||
x = list(range(1000)) | ||
assert torch_stringifier_fn(x) == default_stringifier(x) |