From 8bb459c7ba4e5b91eb3a684a7a57e798e2a862f6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 17:24:07 +0000 Subject: [PATCH 1/6] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.1 → v0.4.5](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.1...v0.4.5) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 964e292ab85..9811dbd3910 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: - --exclude=binder/ - --exclude=versioneer.py - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.1 + rev: v0.4.5 hooks: - id: ruff args: ["--fix", "--output-format=full"] From 6bd28039a459eaf8069ea8b0a5b34c4b10005765 Mon Sep 17 00:00:00 2001 From: Virgile Andreani Date: Tue, 28 May 2024 05:32:58 -0400 Subject: [PATCH 2/6] Add _version.py to exclude list of linter --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 83afadc67bb..8458b8f13f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ addopts = ["--color=yes"] [tool.ruff] line-length = 100 target-version = "py310" -exclude = ["versioneer.py"] +extend-exclude = ["versioneer.py", "_version.py"] [tool.ruff.lint] select = ["D", "E", "F", "I", "UP", "W", "RUF"] From a7b3e07ef4b155a3d75de55c9966b99f9bab3602 Mon Sep 17 00:00:00 2001 From: Virgile Andreani Date: Tue, 28 May 2024 05:33:29 -0400 Subject: [PATCH 3/6] Remove deprecated ruff config option --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8458b8f13f3..160374d25fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,6 @@ extend-exclude = ["versioneer.py", "_version.py"] [tool.ruff.lint] select = ["D", "E", "F", "I", "UP", "W", "RUF"] -ignore-init-module-imports = true ignore = [ "E501", "F841", # Local variable name is assigned to but never used From a5844f8cdb562945427079ca9565256b9a9fd54e Mon Sep 17 00:00:00 2001 From: Virgile Andreani Date: Tue, 28 May 2024 05:34:51 -0400 Subject: [PATCH 4/6] Apply safe ruff fix --- pymc/model/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymc/model/core.py b/pymc/model/core.py index 0cf8bd26cf6..874d52f9bd6 100644 --- a/pymc/model/core.py +++ b/pymc/model/core.py @@ -209,7 +209,7 @@ def __init_subclass__(cls, **kwargs): def __call__(cls, *args, **kwargs): # We type hint Model here so type checkers understand that Model is a context manager. # This metaclass is only used for Model, so this is safe to do. See #6809 for more info. - instance: "Model" = cls.__new__(cls, *args, **kwargs) + instance: Model = cls.__new__(cls, *args, **kwargs) with instance: # appends context instance.__init__(*args, **kwargs) return instance From aea690adc8458b480562a5ed56fc3c66a99d2c52 Mon Sep 17 00:00:00 2001 From: Virgile Andreani Date: Tue, 28 May 2024 05:37:07 -0400 Subject: [PATCH 5/6] Apply unsafe f-string related ruff fixes --- pymc/backends/base.py | 8 ++++---- pymc/distributions/multivariate.py | 8 ++++---- pymc/sampling/forward.py | 5 ++--- pymc/sampling/mcmc.py | 5 +---- pymc/sampling/parallel.py | 8 ++++---- pymc/step_methods/hmc/integration.py | 2 +- pymc/step_methods/metropolis.py | 2 +- pymc/variational/inference.py | 4 ++-- pymc/variational/opvi.py | 12 ++++++------ tests/variational/test_opvi.py | 2 +- 10 files changed, 26 insertions(+), 30 deletions(-) diff --git a/pymc/backends/base.py b/pymc/backends/base.py index 7854cc09313..477723a6970 100644 --- a/pymc/backends/base.py +++ b/pymc/backends/base.py @@ -186,7 +186,7 @@ def _set_sampler_vars(self, sampler_vars): for stats in sampler_vars: for key, dtype in stats.items(): if dtypes.setdefault(key, dtype) != dtype: - raise ValueError("Sampler statistic %s appears with " "different types." % key) + raise ValueError(f"Sampler statistic {key} appears with different types.") self.sampler_vars = sampler_vars @@ -247,7 +247,7 @@ def get_sampler_stats( sampler_idxs = [i for i, s in enumerate(self.sampler_vars) if stat_name in s] if not sampler_idxs: - raise KeyError("Unknown sampler stat %s" % stat_name) + raise KeyError(f"Unknown sampler stat {stat_name}") vals = np.stack( [self._get_sampler_stats(stat_name, i, burn, thin) for i in sampler_idxs], axis=-1 @@ -388,7 +388,7 @@ def __getitem__(self, idx): return self.get_values(var, burn=burn, thin=thin) if var in self.stat_names: return self.get_sampler_stats(var, burn=burn, thin=thin) - raise KeyError("Unknown variable %s" % var) + raise KeyError(f"Unknown variable {var}") _attrs = {"_straces", "varnames", "chains", "stat_names", "_report"} @@ -512,7 +512,7 @@ def get_sampler_stats( List or ndarray depending on parameters. """ if stat_name not in self.stat_names: - raise KeyError("Unknown sampler statistic %s" % stat_name) + raise KeyError(f"Unknown sampler statistic {stat_name}") if chains is None: chains = self.chains diff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py index 347a6e2539a..359b0743dd8 100644 --- a/pymc/distributions/multivariate.py +++ b/pymc/distributions/multivariate.py @@ -1047,11 +1047,11 @@ def WishartBartlett(name, S, nu, is_cholesky=False, return_cholesky=False, initv tril_testval = None c = pt.sqrt( - ChiSquared("%s_c" % name, nu - np.arange(2, 2 + n_diag), shape=n_diag, initval=diag_testval) + ChiSquared(f"{name}_c", nu - np.arange(2, 2 + n_diag), shape=n_diag, initval=diag_testval) ) - pm._log.info("Added new variable %s_c to model diagonal of Wishart." % name) - z = Normal("%s_z" % name, 0.0, 1.0, shape=n_tril, initval=tril_testval) - pm._log.info("Added new variable %s_z to model off-diagonals of Wishart." % name) + pm._log.info(f"Added new variable {name}_c to model diagonal of Wishart.") + z = Normal(f"{name}_z", 0.0, 1.0, shape=n_tril, initval=tril_testval) + pm._log.info(f"Added new variable {name}_z to model off-diagonals of Wishart.") # Construct A matrix A = pt.zeros(S.shape, dtype=np.float32) A = pt.set_subtensor(A[diag_idx], c) diff --git a/pymc/sampling/forward.py b/pymc/sampling/forward.py index 23b3a601658..ce51f5cc720 100644 --- a/pymc/sampling/forward.py +++ b/pymc/sampling/forward.py @@ -419,7 +419,7 @@ def sample_prior_predictive( data = {k: np.stack(v) for k, v in zip(names, values)} if data is None: - raise AssertionError("No variables sampled: attempting to sample %s" % names) + raise AssertionError(f"No variables sampled: attempting to sample {names}") prior: dict[str, np.ndarray] = {} for var_name in vars_: @@ -765,8 +765,7 @@ def sample_posterior_predictive( samples = len(_trace) else: raise TypeError( - "Do not know how to compute number of samples for trace argument of type %s" - % type(_trace) + f"Do not know how to compute number of samples for trace argument of type {type(_trace)}" ) assert samples is not None diff --git a/pymc/sampling/mcmc.py b/pymc/sampling/mcmc.py index f2ef43e8b58..c09cb163f5a 100644 --- a/pymc/sampling/mcmc.py +++ b/pymc/sampling/mcmc.py @@ -697,10 +697,7 @@ def joined_blas_limiter(): msg = "Tuning was enabled throughout the whole trace." _log.warning(msg) elif draws < 100: - msg = ( - "Only %s samples per chain. Reliable r-hat and ESS diagnostics require longer chains for accurate estimate." - % draws - ) + msg = f"Only {draws} samples per chain. Reliable r-hat and ESS diagnostics require longer chains for accurate estimate." _log.warning(msg) auto_nuts_init = True diff --git a/pymc/sampling/parallel.py b/pymc/sampling/parallel.py index 9f950f621f1..a34947c7064 100644 --- a/pymc/sampling/parallel.py +++ b/pymc/sampling/parallel.py @@ -58,7 +58,7 @@ def __init__(self, exc, tb): tb = traceback.format_exception(type(exc), exc, tb) tb = "".join(tb) self.exc = exc - self.tb = '\n"""\n%s"""' % tb + self.tb = f'\n"""\n{tb}"""' def __reduce__(self): return rebuild_exc, (self.exc, self.tb) @@ -216,7 +216,7 @@ def __init__( mp_ctx, ): self.chain = chain - process_name = "worker_chain_%s" % chain + process_name = f"worker_chain_{chain}" self._msg_pipe, remote_conn = multiprocessing.Pipe() self._shared_point = {} @@ -228,7 +228,7 @@ def __init__( size *= int(dim) size *= dtype.itemsize if size != ctypes.c_size_t(size).value: - raise ValueError("Variable %s is too large" % name) + raise ValueError(f"Variable {name} is too large") array = mp_ctx.RawArray("c", size) self._shared_point[name] = (array, shape, dtype) @@ -388,7 +388,7 @@ def __init__( mp_ctx=None, ): if any(len(arg) != chains for arg in [seeds, start_points]): - raise ValueError("Number of seeds and start_points must be %s." % chains) + raise ValueError(f"Number of seeds and start_points must be {chains}.") if mp_ctx is None or isinstance(mp_ctx, str): # Closes issue https://github.com/pymc-devs/pymc/issues/3849 diff --git a/pymc/step_methods/hmc/integration.py b/pymc/step_methods/hmc/integration.py index c8defa2e819..2d1e725cde9 100644 --- a/pymc/step_methods/hmc/integration.py +++ b/pymc/step_methods/hmc/integration.py @@ -51,7 +51,7 @@ def __init__(self, potential: QuadPotential, logp_dlogp_func): def compute_state(self, q: RaveledVars, p: RaveledVars): """Compute Hamiltonian functions using a position and momentum.""" if q.data.dtype != self._dtype or p.data.dtype != self._dtype: - raise ValueError("Invalid dtype. Must be %s" % self._dtype) + raise ValueError(f"Invalid dtype. Must be {self._dtype}") logp, dlogp = self._logp_dlogp_func(q) diff --git a/pymc/step_methods/metropolis.py b/pymc/step_methods/metropolis.py index 6c3f2b8a095..a816728cefb 100644 --- a/pymc/step_methods/metropolis.py +++ b/pymc/step_methods/metropolis.py @@ -178,7 +178,7 @@ def __init__( elif S.ndim == 2: self.proposal_dist = MultivariateNormalProposal(S) else: - raise ValueError("Invalid rank for variance: %s" % S.ndim) + raise ValueError(f"Invalid rank for variance: {S.ndim}") self.scaling = np.atleast_1d(scaling).astype("d") self.tune = tune diff --git a/pymc/variational/inference.py b/pymc/variational/inference.py index 42dad4a404b..0c596d2d981 100644 --- a/pymc/variational/inference.py +++ b/pymc/variational/inference.py @@ -72,8 +72,8 @@ def _maybe_score(self, score): score = returns_loss elif score and not returns_loss: warnings.warn( - "method `fit` got `score == True` but %s " - "does not return loss. Ignoring `score` argument" % self.objective.op + f"method `fit` got `score == True` but {self.objective.op} " + "does not return loss. Ignoring `score` argument" ) score = False else: diff --git a/pymc/variational/opvi.py b/pymc/variational/opvi.py index 35f924c1a7b..e1165a874cd 100644 --- a/pymc/variational/opvi.py +++ b/pymc/variational/opvi.py @@ -375,7 +375,7 @@ def step_function( if fn_kwargs is None: fn_kwargs = {} if score and not self.op.returns_loss: - raise NotImplementedError("%s does not have loss" % self.op) + raise NotImplementedError(f"{self.op} does not have loss") updates = self.updates( obj_n_mc=obj_n_mc, tf_n_mc=tf_n_mc, @@ -416,7 +416,7 @@ def score_function( if fn_kwargs is None: fn_kwargs = {} if not self.op.returns_loss: - raise NotImplementedError("%s does not have loss" % self.op) + raise NotImplementedError(f"{self.op} does not have loss") if more_replacements is None: more_replacements = {} loss = self(sc_n_mc, more_replacements=more_replacements) @@ -496,13 +496,13 @@ def apply(self, f): # pragma: no cover def __call__(self, f=None): if self.has_test_function: if f is None: - raise ParametrizationError("Operator %s requires TestFunction" % self) + raise ParametrizationError(f"Operator {self} requires TestFunction") else: if not isinstance(f, TestFunction): f = TestFunction.from_function(f) else: if f is not None: - warnings.warn("TestFunction for %s is redundant and removed" % self, stacklevel=3) + warnings.warn(f"TestFunction for {self} is redundant and removed", stacklevel=3) else: pass f = TestFunction() @@ -555,7 +555,7 @@ def setup(self, approx): @classmethod def from_function(cls, f): if not callable(f): - raise ParametrizationError("Need callable, got %r" % f) + raise ParametrizationError(f"Need callable, got {f!r}") obj = TestFunction() obj.__call__ = f return obj @@ -1512,7 +1512,7 @@ def vars_names(vs): found.name = name + "_vi_random_slice" break else: - raise KeyError("%r not found" % name) + raise KeyError(f"{name!r} not found") return found @node_property diff --git a/tests/variational/test_opvi.py b/tests/variational/test_opvi.py index a196a2b60c2..5b02571898a 100644 --- a/tests/variational/test_opvi.py +++ b/tests/variational/test_opvi.py @@ -261,7 +261,7 @@ def test_logq_mini_2_sample_2_var(parametric_grouped_approxes, three_var_model): def test_logq_globals(three_var_approx): if not three_var_approx.has_logq: - pytest.skip("%s does not implement logq" % three_var_approx) + pytest.skip(f"{three_var_approx} does not implement logq") approx = three_var_approx logq, symbolic_logq = approx.set_size_and_deterministic( [approx.logq, approx.symbolic_logq], 1, 0 From d96ccf8cb9ab09f03f0484bed624f6c84a359f41 Mon Sep 17 00:00:00 2001 From: Virgile Andreani Date: Tue, 28 May 2024 05:47:16 -0400 Subject: [PATCH 6/6] Remove a few more uses of .format --- pymc/distributions/shape_utils.py | 14 ++++---------- pymc/gp/util.py | 5 ++--- pymc/printing.py | 12 ++++++------ 3 files changed, 12 insertions(+), 19 deletions(-) diff --git a/pymc/distributions/shape_utils.py b/pymc/distributions/shape_utils.py index 7a4b0a95c18..1d0fee588d0 100644 --- a/pymc/distributions/shape_utils.py +++ b/pymc/distributions/shape_utils.py @@ -141,11 +141,8 @@ def broadcast_dist_samples_shape(shapes, size=None): if size is None: broadcasted_shape = np.broadcast_shapes(*shapes) if broadcasted_shape is None: - raise ValueError( - "Cannot broadcast provided shapes {} given size: {}".format( - ", ".join([f"{s}" for s in shapes]), size - ) - ) + tmp = ", ".join([f"{s}" for s in shapes]) + raise ValueError(f"Cannot broadcast provided shapes {tmp} given size: {size}") return broadcasted_shape shapes = [_check_shape_type(s) for s in shapes] _size = to_tuple(size) @@ -154,11 +151,8 @@ def broadcast_dist_samples_shape(shapes, size=None): try: broadcast_shape = np.broadcast_shapes(*sp_shapes) except ValueError: - raise ValueError( - "Cannot broadcast provided shapes {} given size: {}".format( - ", ".join([f"{s}" for s in shapes]), size - ) - ) + tmp = ", ".join([f"{s}" for s in shapes]) + raise ValueError(f"Cannot broadcast provided shapes {tmp} given size: {size}") broadcastable_shapes = [] for shape, sp_shape in zip(shapes, sp_shapes): if _size == shape[: len(_size)]: diff --git a/pymc/gp/util.py b/pymc/gp/util.py index ba20130a3d6..4a4a18fda42 100644 --- a/pymc/gp/util.py +++ b/pymc/gp/util.py @@ -143,9 +143,8 @@ def getter(self): value = getattr(self, name, None) if value is None: raise AttributeError( - "'{}' not set. Provide as argument " - "to condition, or call 'prior' " - "first".format(name.lstrip("_")) + f"'{name.lstrip('_')}' not set. Provide as argument " + "to condition, or call 'prior' first" ) else: return value diff --git a/pymc/printing.py b/pymc/printing.py index 6c6bdbb71d2..f1a34c6f95a 100644 --- a/pymc/printing.py +++ b/pymc/printing.py @@ -65,12 +65,11 @@ def str_for_dist( else r"\\operatorname{Unknown}" ) if include_params: + params = ",~".join([d.strip("$") for d in dist_args]) if print_name: - return r"${} \sim {}({})$".format( - print_name, op_name, ",~".join([d.strip("$") for d in dist_args]) - ) + return rf"${print_name} \sim {op_name}({params})$" else: - return r"${}({})$".format(op_name, ",~".join([d.strip("$") for d in dist_args])) + return rf"${op_name}({params})$" else: if print_name: @@ -83,10 +82,11 @@ def str_for_dist( dist.owner.op._print_name[0] if hasattr(dist.owner.op, "_print_name") else "Unknown" ) if include_params: + params = ", ".join(dist_args) if print_name: - return r"{} ~ {}({})".format(print_name, dist_name, ", ".join(dist_args)) + return rf"{print_name} ~ {dist_name}({params})" else: - return r"{}({})".format(dist_name, ", ".join(dist_args)) + return rf"{dist_name}({params})" else: if print_name: return rf"{print_name} ~ {dist_name}"