From 6defa80deda0049454604e44f2328621b5f4c42a Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Fri, 18 Aug 2023 16:32:35 -0400 Subject: [PATCH 01/18] refactor to create a class for afids An `afids.py` file has been added in order to create an AFIDSet dict class, enabling the storing of both metadata information, well as loading fiducials from files directly into a dataframe. The choice of using a dataframe here is for easier downstream manipulation. - `get_afid` has been moved into here as a method of the AfidSet class Moved extension specific file handling into its own `.py` files (e.g. `extensions/fcsv.py`). This choice was made to simplify the maintenance of handling different file extensions. - Separate semi-private methods are used for grabbing the metadata, as well as loading the fiducials into a polars dataframe. Additionally `io.py` has been refactored to generalize the handling of different file types (calling the methods within the different `extensions`). Updates poetry.lock and pyproject.toml to include the `attrs` library --- afids_utils/afids.py | 61 ++++++ afids_utils/exceptions.py | 15 +- afids_utils/ext/__init__.py | 0 afids_utils/ext/fcsv.py | 130 ++++++++++++ afids_utils/io.py | 191 ++++++++---------- afids_utils/resources/afids_descs.json | 36 ++++ afids_utils/resources/template.fcsv | 8 +- .../data/tpl-MNI152NLin2009cAsym_afids.fcsv | 8 +- poetry.lock | 35 +--- pyproject.toml | 8 +- 10 files changed, 341 insertions(+), 151 deletions(-) create mode 100644 afids_utils/afids.py create mode 100644 afids_utils/ext/__init__.py create mode 100644 afids_utils/ext/fcsv.py create mode 100644 afids_utils/resources/afids_descs.json diff --git a/afids_utils/afids.py b/afids_utils/afids.py new file mode 100644 index 00000000..b1cb7736 --- /dev/null +++ b/afids_utils/afids.py @@ -0,0 +1,61 @@ +"""Anatomical fiducial classes""" +from __future__ import annotations + +import attr +import numpy as np +import polars as pl +from numpy.typing import NDArray + +from afids_utils.exceptions import InvalidFiducialError + + +@attr.define +class AfidSet(dict): + """Base class for a set of fiducials""" + + slicer_version: str = attr.field() + coord_system: str = attr.field() + afids_df: pl.DataFrame = attr.field() + + def __attrs_post_init__(self): + self["metadata"] = { + "slicer_version": self.slicer_version, + "coord_system": self.coord_system, + } + self["afids"] = self.afids_df + + def get_afid(self, label: int) -> NDArray[np.single]: + """ + Extract a specific AFID's spatial coordinates + + Parameters + ---------- + label + Unique fiducial number to extract from + + Returns + ------- + numpy.ndarray[shape=(3,), dtype=numpy.single] + NumPy array containing spatial coordinates (x, y, z) of single AFID + coordinate + + Raises + ------ + InvalidFiducialError + If none or more than expected number of fiducials exist + """ + + # Filter based off of integer type + if isinstance(label, int): + # Fiducial selection out of bounds + if label < 1 or label > len(self["afids"]): + raise InvalidFiducialError( + f"Fiducial number {label} is not valid." + ) + + return ( + self["afids"] + .filter(pl.col("id") == str(label)) + .select("x_mm", "y_mm", "z_mm") + .to_numpy()[0] + ) diff --git a/afids_utils/exceptions.py b/afids_utils/exceptions.py index beee5569..aa90c5a6 100644 --- a/afids_utils/exceptions.py +++ b/afids_utils/exceptions.py @@ -1,8 +1,15 @@ """Custom exceptions""" -class InvalidFiducialNumberError(Exception): - """Exception for invalid fiducial number""" +class InvalidFileError(Exception): + """Exception raised when file to be parsed is invalid""" - def __init__(self, fid_num: int) -> None: - super().__init__(f"Provided fiducial {fid_num} is not valid.") + def __init__(self, message): + super().__init__(message) + + +class InvalidFiducialError(Exception): + """Exception for invalid fiducial selection""" + + def __init__(self, message) -> None: + super().__init__(message) diff --git a/afids_utils/ext/__init__.py b/afids_utils/ext/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py new file mode 100644 index 00000000..32be2c46 --- /dev/null +++ b/afids_utils/ext/fcsv.py @@ -0,0 +1,130 @@ +"""Methods for handling .fcsv files associated with AFIDs""" +from __future__ import annotations + +import json +import re +from importlib import resources +from itertools import islice +from os import PathLike +from typing import Dict + +import polars as pl + +from afids_utils.afids import AfidSet +from afids_utils.exceptions import InvalidFileError + +HEADER_ROWS: int = 2 +FCSV_COLS: Dict[str] = { + "x": pl.Float32, + "y": pl.Float32, + "z": pl.Float32, + "label": pl.Utf8, + "desc": pl.Utf8, +} + + +def _get_metadata(fcsv_path: PathLike[str] | str) -> tuple[str, str]: + """ + Internal function to extract metadata from header of fcsv files + + Parameters + ---------- + fcsv_path + Path to .fcsv file containing AFIDs coordinates + + Returns + ------- + parsed_version + Slicer version associated with fiducial file + + parsed_coord + Coordinate system of fiducials + + Raises + ------ + InvalidFileError + If header is missing or invalid from .fcsv file + """ + try: + with open(fcsv_path, "r") as fcsv: + header = list(islice(fcsv, HEADER_ROWS)) + + parsed_version = re.findall(r"\d+\.\d+", header[0])[0] + parsed_coord = re.split(r"\s", header[1])[-2] + except IndexError: + raise InvalidFileError("Missing or invalid header in .fcsv file") + + # Set to human-understandable coordinate system + if parsed_coord == "0": + parsed_coord = "LPS" + elif parsed_coord == "1": + parsed_coord = "RAS" + + if parsed_coord not in ["LPS", "RAS"]: + raise InvalidFileError("Invalid coordinate system in header") + + return parsed_version, parsed_coord + + +def _get_afids(fcsv_path: PathLike[str] | str) -> pl.DataFrame: + """ + Internal function for converting .fcsv file to a pl.DataFrame + + Parameters + ---------- + fcsv_path + Path to .fcsv file containing AFID coordinates + + Returns + ------- + pl.DataFrame + Dataframe containing afids ids, descriptions, and coordinates + """ + # Read in fiducials to dataframe, shortening id header + afids_df = pl.read_csv( + fcsv_path, + skip_rows=HEADER_ROWS, + columns=list(FCSV_COLS.keys()), + new_columns=["x_mm", "y_mm", "z_mm"], + dtypes=FCSV_COLS, + ) + + return afids_df + + +def load_fcsv( + fcsv_path: PathLike[str] | str, species: str = "human" +) -> AfidSet: + """ + Read in fcsv to an AfidSet + + Parameters + ---------- + fcsv_path + Path to .fcsv file containing AFIDs coordinates + + species + The species associated with the .fcsv file (default: human) + + Returns + ------- + AfidSet + Set of anatomical fiducials containing spatial coordinates and metadata + """ + # Load expected mappings + with resources.open_text( + "afids_utils.resources", "afids_descs.json" + ) as json_fpath: + json.load(json_fpath) + + # Grab metadata + slicer_version, coord_system = _get_metadata(fcsv_path) + + # Grab afids + afids_set = AfidSet( + slicer_version=slicer_version, + coord_system=coord_system, + afids_df=_get_afids(fcsv_path), + ) + + return afids_set diff --git a/afids_utils/io.py b/afids_utils/io.py index e8b96080..a98565b5 100644 --- a/afids_utils/io.py +++ b/afids_utils/io.py @@ -1,118 +1,99 @@ -"""Methods for loading and saving files associated with AFIDs""" +"""General methods for loading and saving files associated with AFIDs""" from __future__ import annotations -import csv -from importlib import resources from os import PathLike -import numpy as np -import polars as pl -from numpy.typing import NDArray - -from afids_utils.exceptions import InvalidFiducialNumberError - -FCSV_FIELDNAMES = { - "# columns = id": pl.Utf8, - "x": pl.Float32, - "y": pl.Float32, - "z": pl.Float32, - "ow": pl.UInt8, - "ox": pl.UInt8, - "oy": pl.UInt8, - "oz": pl.UInt8, - "vis": pl.UInt8, - "sel": pl.UInt8, - "lock": pl.UInt8, - "label": pl.UInt8, - "desc": pl.Utf8, - "associatedNodeID": pl.Utf8, -} - - -def get_afid( - fcsv_path: PathLike[str] | str, fid_num: int -) -> NDArray[np.single]: +from afids_utils.afids import AfidSet +from afids_utils.extensions.fcsv import load_fcsv + +def load(afids_fpath: PathLike[str] | str) -> AfidSet: """ - Extract specific fiducial's spatial coordinates + Load an AFIDs file Parameters ---------- - fcsv_path : os.PathLike[str] | str - Path to .fcsv file to extract AFID coordinates from - - fid_num : int - Unique fiducial number to extract from .fcsv file + afids_fpath : os.PathLike[str] | str + Path to .fcsv or .json file containing AFIDs information Returns ------- - numpy.ndarray[shape=(3,), dtype=numpy.single] - NumPy array containing spatial coordinates (x, y, z) of single AFID - coordinate - """ - if fid_num < 1 or fid_num > 32: - raise InvalidFiducialNumberError(fid_num) - fcsv_df = pl.scan_csv( - fcsv_path, separator=",", skip_rows=2, dtypes=FCSV_FIELDNAMES - ) - - return ( - fcsv_df.filter(pl.col("label") == fid_num) - .select("x", "y", "z") - .collect() - .to_numpy()[0] - ) - - -def afids_to_fcsv( - afid_coords: NDArray[np.single], - fcsv_output: PathLike[str] | str, -) -> None: - """ - Save AFIDS to Slicer-compatible .fcsv file - - Parameters - ---------- - afid_coords : numpy.ndarray[shape=(N, 3), dtype=numpy.single] - Floating-point NumPy array containing spatial coordinates (x, y, z) of - `N` AFIDs - - fcsv_output : os.PathLike[str] | str - Path of file (including filename and extension) to save AFIDs to + AfidSet + Set of anatomical fiducials containing spatial coordinates and metadata + Raises + ------ + IOError + If extension to fiducial file is not .fcsv or .json or does not exist """ - # Read in fcsv template - with resources.open_text( - "afids_utils.resources", "template.fcsv" - ) as template_fcsv_file: - header = [template_fcsv_file.readline() for _ in range(3)] - reader = csv.DictReader( - template_fcsv_file, fieldnames=list(FCSV_FIELDNAMES.keys()) - ) - fcsv = list(reader) - - # Check to make sure shape of AFIDs array matches expected template - if afid_coords.shape[0] != len(fcsv): - raise TypeError( - f"Expected {len(fcsv)} AFIDs, but received {afid_coords.shape[0]}." - ) - if afid_coords.shape[1] != 3: - raise TypeError( - "Expected 3 spatial dimensions (x, y, z)," - f"but received {afid_coords.shape[1]}." - ) - - # Loop over fiducials and update with fiducial spatial coordinates - for idx, row in enumerate(fcsv): - row["x"] = afid_coords[idx][0] - row["y"] = afid_coords[idx][1] - row["z"] = afid_coords[idx][2] - - # Write output fcsv - with open(fcsv_output, "w", encoding="utf-8", newline="") as out_fcsv_file: - for line in header: - out_fcsv_file.write(line) - writer = csv.DictWriter( - out_fcsv_file, fieldnames=list(FCSV_FIELDNAMES.keys()) - ) - for row in fcsv: - writer.writerow(row) + afids_fpath = Path(afids_fpath) + + # Check if file exists + if not afids_fpath.exists(): + raise IOError("Provided AFID file does not exist") + + afids_fpath_ext = afids_fpath.suffix + + # Loading fcsv + if afids_fpath_ext = ".fcsv": + load_fcsv(afids_fpath) + # Loading json + # if afids_fpath_ext = ".json": + # load_json(afids_path) + else: + raise IOError("Invalid file extension") + + + +# def afids_to_fcsv( +# afid_coords: NDArray[np.single], +# fcsv_output: PathLike[str] | str, +# ) -> None: +# """ +# Save AFIDS to Slicer-compatible .fcsv file + +# Parameters +# ---------- +# afid_coords : numpy.ndarray[shape=(N, 3), dtype=numpy.single] +# Floating-point NumPy array containing spatial coordinates (x, y, z) of +# `N` AFIDs + +# fcsv_output : os.PathLike[str] | str +# Path of file (including filename and extension) to save AFIDs to + +# """ +# # Read in fcsv template +# with resources.open_text( +# "afids_utils.resources", "template.fcsv" +# ) as template_fcsv_file: +# header = [template_fcsv_file.readline() for _ in range(3)] +# reader = csv.DictReader( +# template_fcsv_file, fieldnames=list(FCSV_FIELDNAMES.keys()) +# ) +# fcsv = list(reader) + +# # Check to make sure shape of AFIDs array matches expected template +# if afid_coords.shape[0] != len(fcsv): +# raise TypeError( +# f"Expected {len(fcsv)} AFIDs, but received {afid_coords.shape[0]}." +# ) +# if afid_coords.shape[1] != 3: +# raise TypeError( +# "Expected 3 spatial dimensions (x, y, z)," +# f"but received {afid_coords.shape[1]}." +# ) + +# # Loop over fiducials and update with fiducial spatial coordinates +# for idx, row in enumerate(fcsv): +# row["x"] = afid_coords[idx][0] +# row["y"] = afid_coords[idx][1] +# row["z"] = afid_coords[idx][2] + +# # Write output fcsv +# with open(fcsv_output, "w", encoding="utf-8", newline="") as out_fcsv_file: +# for line in header: +# out_fcsv_file.write(line) +# writer = csv.DictWriter( +# out_fcsv_file, fieldnames=list(FCSV_FIELDNAMES.keys()) +# ) +# for row in fcsv: +# writer.writerow(row) diff --git a/afids_utils/resources/afids_descs.json b/afids_utils/resources/afids_descs.json new file mode 100644 index 00000000..2ffe8e2a --- /dev/null +++ b/afids_utils/resources/afids_descs.json @@ -0,0 +1,36 @@ +{ + "human": [ + "AC", + "PC", + ["infracollicular sulcus", "ICS"], + "PMJ", + ["superior interpeduncular fossa", "SIPF"], + ["R superior LMS", "RSLMS"], + ["L superior LMS", "LSLMS"], + ["R inferior LMS", "RILMS"], + ["L inferior LMS", "LILMS"], + ["Culmen", "CUL"], + ["Intermammillary sulcus", "IMS"], + ["R MB", "RMB"], + ["L MB", "LMB"], + ["pineal gland", "PG"], + ["R LV at AC", "RLVAC"], + ["L LV at AC", "LLVAC"], + ["R LV at PC", "RLVPC"], + ["L LV at PC", "LLVPC"], + ["Genu of CC", "GENU"], + ["Splenium of CC", "SPLE"], + ["R AL temporal horn", "RALTH"], + ["L AL temporal horn", "LALTH"], + ["R superior AM temporal horn", "RSAMTH"], + ["L superior AM temporal horn", "LSAMTH"], + ["R inferior AM temporal horn", "RIAMTH"], + ["L inferior AM temporal horn", "LIAMTH"], + ["R indusium griseum origin", "RIGO"], + ["L indusium griseum origin", "LIGO"], + ["R ventral occipital horn", "RVOH"], + ["L ventral occipital horn", "LVOH"], + ["R olfactory sulcal fundus", "ROSF"], + ["L olfactory sulcal fundus", "LOSF"] + ] +} diff --git a/afids_utils/resources/template.fcsv b/afids_utils/resources/template.fcsv index 43e8f6c5..c88414b9 100644 --- a/afids_utils/resources/template.fcsv +++ b/afids_utils/resources/template.fcsv @@ -10,8 +10,8 @@ vtkMRMLMarkupsFiducialNode_6,0,0,0,0,0,0,1,1,1,1,6,R superior LMS,vtkMRMLScalarV vtkMRMLMarkupsFiducialNode_7,0,0,0,0,0,0,1,1,1,1,7,L superior LMS,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_8,0,0,0,0,0,0,1,1,1,1,8,R inferior LMS,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_9,0,0,0,0,0,0,1,1,1,1,9,L inferior LMS,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_10,0,0,0,0,0,0,1,1,1,1,10,culmen,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_11,0,0,0,0,0,0,1,1,1,1,11,intermammillary sulcus,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_10,0,0,0,0,0,0,1,1,1,1,10,Culmen,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_11,0,0,0,0,0,0,1,1,1,1,11,Intermammillary sulcus,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_12,0,0,0,0,0,0,1,1,1,1,12,R MB,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_13,0,0,0,0,0,0,1,1,1,1,13,L MB,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_14,0,0,0,0,0,0,1,1,1,1,14,pineal gland,vtkMRMLScalarVolumeNode1 @@ -19,8 +19,8 @@ vtkMRMLMarkupsFiducialNode_15,0,0,0,0,0,0,1,1,1,1,15,R LV at AC,vtkMRMLScalarVol vtkMRMLMarkupsFiducialNode_16,0,0,0,0,0,0,1,1,1,1,16,L LV at AC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_17,0,0,0,0,0,0,1,1,1,1,17,R LV at PC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_18,0,0,0,0,0,0,1,1,1,1,18,L LV at PC,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_19,0,0,0,0,0,0,1,1,1,1,19,genu of CC,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_20,0,0,0,0,0,0,1,1,1,1,20,splenium,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_19,0,0,0,0,0,0,1,1,1,1,19,Genu of CC,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_20,0,0,0,0,0,0,1,1,1,1,20,Splenium,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_21,0,0,0,0,0,0,1,1,1,1,21,R AL temporal horn,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_22,0,0,0,0,0,0,1,1,1,1,22,L AL temporal horn,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_23,0,0,0,0,0,0,1,1,1,1,23,R superior AM temporal horn,vtkMRMLScalarVolumeNode1 diff --git a/afids_utils/tests/data/tpl-MNI152NLin2009cAsym_afids.fcsv b/afids_utils/tests/data/tpl-MNI152NLin2009cAsym_afids.fcsv index bba9bf7f..00218287 100644 --- a/afids_utils/tests/data/tpl-MNI152NLin2009cAsym_afids.fcsv +++ b/afids_utils/tests/data/tpl-MNI152NLin2009cAsym_afids.fcsv @@ -10,8 +10,8 @@ vtkMRMLMarkupsFiducialNode_6,12.67275,-26.960749999999997,-10.38225,0,0,0,1,1,1, vtkMRMLMarkupsFiducialNode_7,-13.004999999999999,-27.190749999999998,-10.32375,0,0,0,1,1,1,0,7,L superior LMS,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_8,10.873,-30.96475,-21.533,0,0,0,1,1,1,0,8,R inferior LMS,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_9,-11.18,-30.437000000000005,-21.537,0,0,0,1,1,1,0,9,L inferior LMS,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_10,-0.004750000000000004,-52.3695,2.06825,0,0,0,1,1,1,0,10,culmen,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_11,-0.0665,-8.131499999999999,-14.7755,0,0,0,1,1,1,0,11,intermammillary sulcus,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_10,-0.004750000000000004,-52.3695,2.06825,0,0,0,1,1,1,0,10,Culmen,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_11,-0.0665,-8.131499999999999,-14.7755,0,0,0,1,1,1,0,11,Intermammillary sulcus,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_12,2.0255,-8.143500000000001,-14.752749999999999,0,0,0,1,1,1,0,12,R MB,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_13,-2.3954999999999997,-8.0565,-14.8675,0,0,0,1,1,1,0,13,L MB,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_14,0.04425,-31.73675,0.76675,0,0,0,1,1,1,0,14,pineal gland,vtkMRMLScalarVolumeNode1 @@ -19,8 +19,8 @@ vtkMRMLMarkupsFiducialNode_15,15.141750000000002,5.3445,24.358249999999998,0,0,0 vtkMRMLMarkupsFiducialNode_16,-15.6095,5.22575,24.63125,0,0,0,1,1,1,0,16,L LV at AC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_17,18.4955,-22.061999999999998,27.66,0,0,0,1,1,1,0,17,R LV at PC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_18,-18.999000000000002,-22.046,27.284,0,0,0,1,1,1,0,18,L LV at PC,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_19,0.17799999999999996,33.423500000000004,2.6755,0,0,0,1,1,1,0,19,genu of CC,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_20,0.037000000000000005,-37.6845,6.14675,0,0,0,1,1,1,0,20,splenium of CC,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_19,0.17799999999999996,33.423500000000004,2.6755,0,0,0,1,1,1,0,19,Genu of CC,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_20,0.037000000000000005,-37.6845,6.14675,0,0,0,1,1,1,0,20,Splenium of CC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_21,34.312250000000006,-4.982,-26.87875,0,0,0,1,1,1,0,21,R AL temporal horn,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_22,-34.72725,-7.2475,-25.203,0,0,0,1,1,1,0,22,L AL temporal horn,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_23,18.69625,-10.56775,-17.7725,0,0,0,1,1,1,0,23,R superior AM temporal horn,vtkMRMLScalarVolumeNode1 diff --git a/poetry.lock b/poetry.lock index 558dab8e..c0c8324c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -23,7 +22,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -73,7 +71,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -88,7 +85,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -100,7 +96,6 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -176,7 +171,6 @@ toml = ["tomli"] name = "distro" version = "1.8.0" description = "Distro - an OS platform information API" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -188,7 +182,6 @@ files = [ name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -203,7 +196,6 @@ test = ["pytest (>=6)"] name = "hypothesis" version = "6.77.0" description = "A library for property-based testing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -237,7 +229,6 @@ zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.3)"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -249,7 +240,6 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -267,7 +257,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "maison" version = "1.4.0" description = "Maison" -category = "dev" optional = false python-versions = ">=3.6.1,<4.0.0" files = [ @@ -284,7 +273,6 @@ toml = ">=0.10.2,<0.11.0" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -296,7 +284,6 @@ files = [ name = "numpy" version = "1.24.3" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -334,7 +321,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -346,7 +332,6 @@ files = [ name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -358,7 +343,6 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -370,7 +354,6 @@ files = [ name = "platformdirs" version = "3.5.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -386,7 +369,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -402,7 +384,6 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -421,7 +402,6 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "polars" version = "0.18.3" description = "Blazingly fast DataFrame library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -454,7 +434,6 @@ xlsxwriter = ["xlsxwriter"] name = "pydantic" version = "1.10.8" description = "Data validation and settings management using python type hints" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -507,7 +486,6 @@ email = ["email-validator (>=1.0.3)"] name = "pytest" version = "7.3.1" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -530,7 +508,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -549,7 +526,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "ruff" version = "0.0.270" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -576,7 +552,6 @@ files = [ name = "ruyaml" version = "0.91.0" description = "ruyaml is a fork of ruamel.yaml" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -595,7 +570,6 @@ docs = ["Sphinx"] name = "setuptools" version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -612,7 +586,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" optional = false python-versions = "*" files = [ @@ -624,7 +597,6 @@ files = [ name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -636,7 +608,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -648,7 +619,6 @@ files = [ name = "typing-extensions" version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -660,7 +630,6 @@ files = [ name = "yamlfix" version = "1.10.0" description = "A simple opionated yaml formatter that keeps your comments!" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -676,4 +645,4 @@ ruyaml = ">=0.91.0" [metadata] lock-version = "2.0" python-versions = ">=3.8, <3.11" -content-hash = "6df38d38052f742632a5f44cc85b774e3ca01babbfd2d1340cf31d3268164be9" +content-hash = "12f7c55b8940c1e5739172158cafa827fc11a0ac60c7caba9abae60dacfdfe44" diff --git a/pyproject.toml b/pyproject.toml index 9d581f08..5b3eedb1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ exclude = ["afids_utils/tests"] [tool.poetry.dependencies] python = ">=3.8, <3.11" +attrs = "^23.1.0" numpy = "^1.24.3" polars = {extras = ["numpy"], version = "^0.18.3"} @@ -42,7 +43,6 @@ ruff-check = "ruff afids_utils" quality = ["yamlfix", "isort", "black", "ruff"] quality-check = ["yamlfix-check", "isort-check", "black-check", "ruff-check"] test = "pytest --doctest-modules afids_utils/tests" -test_cov = "pytest --doctest-modules --cov=afids_utils afids_utils/tests/ --cov-report xml:../cov.xml" [tool.coverage] omit = "afids_utils/tests/*" @@ -73,4 +73,10 @@ shell = """ find . -type f \\( \ -iname \\*.yaml -o -iname \\*.yml \ \\) -exec yamlfix --check {} \\; +""" + +[tool.poe.tasks.test_cov] +shell = """ +pytest --doctest-modules --cov=afids_utils afids_utils/tests/ \ +--cov-report xml:../cov.xml """ \ No newline at end of file From ddaea1be05141cb33e4a6e3f7609713dc3683b7d Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 08:33:45 -0400 Subject: [PATCH 02/18] add tests for handling fcsv extension --- afids_utils/ext/fcsv.py | 82 +++++++++++-- afids_utils/io.py | 40 +++++- afids_utils/tests/test_ext.py | 221 ++++++++++++++++++++++++++++++++++ 3 files changed, 330 insertions(+), 13 deletions(-) create mode 100644 afids_utils/tests/test_ext.py diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py index 32be2c46..40356a97 100644 --- a/afids_utils/ext/fcsv.py +++ b/afids_utils/ext/fcsv.py @@ -1,6 +1,7 @@ """Methods for handling .fcsv files associated with AFIDs""" from __future__ import annotations +import csv import json import re from importlib import resources @@ -9,11 +10,28 @@ from typing import Dict import polars as pl +from numpy.typing import NDArray from afids_utils.afids import AfidSet from afids_utils.exceptions import InvalidFileError HEADER_ROWS: int = 2 +FCSV_FIELDNAMES = ( + "# columns = id", + "x", + "y", + "z", + "ow", + "ox", + "oy", + "oz", + "vis", + "sel", + "lock", + "label", + "desc", + "associatedNodeID", +) FCSV_COLS: Dict[str] = { "x": pl.Float32, "y": pl.Float32, @@ -93,7 +111,7 @@ def _get_afids(fcsv_path: PathLike[str] | str) -> pl.DataFrame: def load_fcsv( - fcsv_path: PathLike[str] | str, species: str = "human" + fcsv_path: PathLike[str] | str, ) -> AfidSet: """ Read in fcsv to an AfidSet @@ -103,20 +121,11 @@ def load_fcsv( fcsv_path Path to .fcsv file containing AFIDs coordinates - species - The species associated with the .fcsv file (default: human) - Returns ------- AfidSet Set of anatomical fiducials containing spatial coordinates and metadata """ - # Load expected mappings - with resources.open_text( - "afids_utils.resources", "afids_descs.json" - ) as json_fpath: - json.load(json_fpath) - # Grab metadata slicer_version, coord_system = _get_metadata(fcsv_path) @@ -128,3 +137,56 @@ def load_fcsv( ) return afids_set + + +# TODO: Handle metadata - specifically setting the coordinate system +def save_fcsv( + afid_coords: NDArray[np.single], + out_fcsv: PathLike[str] | str, +) -> None: + """ + Save fiducials to output fcsv file + + Parameters + ---------- + afid_coords + Floating-point NumPy array containing spatial coordinates (x, y, z) + + out_fcsv + Path of fcsv file to save AFIDs to + + """ + # Read in fcsv template + with resources.open_text( + "afids_utils.resources", "template.fcsv" + ) as template_fcsv_file: + header = [template_fcsv_file.readline() for _ in range(HEADER_ROWS+1)] + reader = csv.DictReader(template_fcsv_file, fieldnames=FCSV_FIELDNAMES) + fcsv = list(reader) + + # Check to make sure shape of AFIDs array matches expected template + if afid_coords.shape[0] != len(fcsv): + raise TypeError( + f"Expected {len(fcsv)} AFIDs, but received {afid_coords.shape[0]}" + ) + if afid_coords.shape[1] != 3: + raise TypeError( + "Expected 3 spatial dimensions (x, y, z)," + f"but received {afid_coords.shape[1]}" + ) + + # Loop over fiducials and update with fiducial spatial coordinates + for idx, row in enumerate(fcsv): + row["x"] = afid_coords[idx][0] + row["y"] = afid_coords[idx][1] + row["z"] = afid_coords[idx][2] + + # Write output fcsv + with open(out_fcsv, "w", encoding="utf-8", newline="") as out_fcsv_file: + for line in header: + out_fcsv_file.write(line) + writer = csv.DictWriter(out_fcsv_file, fieldnames=FCSV_FIELDNAMES) + + for row in fcsv: + writer.writerow(row) + \ No newline at end of file diff --git a/afids_utils/io.py b/afids_utils/io.py index a98565b5..24fced7f 100644 --- a/afids_utils/io.py +++ b/afids_utils/io.py @@ -2,9 +2,12 @@ from __future__ import annotations from os import PathLike +from pathlib import Path from afids_utils.afids import AfidSet -from afids_utils.extensions.fcsv import load_fcsv +from afids_utils.exceptions import InvalidFiducialError, InvalidFileError +from afids_utils.ext.fcsv import load_fcsv + def load(afids_fpath: PathLike[str] | str) -> AfidSet: """ @@ -24,6 +27,12 @@ def load(afids_fpath: PathLike[str] | str) -> AfidSet: ------ IOError If extension to fiducial file is not .fcsv or .json or does not exist + + InvalidFileError + If fiducial file has none or more than expected number of fiducials + + InvalidFiducialError + If description in fiducial file does not match expected """ afids_fpath = Path(afids_fpath) @@ -34,14 +43,39 @@ def load(afids_fpath: PathLike[str] | str) -> AfidSet: afids_fpath_ext = afids_fpath.suffix # Loading fcsv - if afids_fpath_ext = ".fcsv": - load_fcsv(afids_fpath) + if afids_fpath_ext == ".fcsv": + afids_set = load_fcsv(afids_fpath) # Loading json # if afids_fpath_ext = ".json": # load_json(afids_path) else: raise IOError("Invalid file extension") + # Perform validation of loaded file + # Check fiducials exist and don't exceed expected number of fiducials + if len(afids_set["afids"]) < 1: + raise InvalidFileError("No fiducials exist") + if len(afids_set["afids"]) > len(mappings[species]): + raise InvalidFileError("More fiducials than expected") + + # Validate descriptions, before dropping + for label in range(1, len(afids_set["afids"] + 1)): + desc = ( + afids_set["afids"] + .filter(pl.col("label") == str(label)) + .select("desc") + .item() + ) + + if desc not in mappings[species][label - 1]: + raise InvalidFiducialError( + f"Description for label {label} does not match expected" + ) + + # Drop description column + afids_set["afids"] = afids_set["afids"].drop("desc") + + return afids_set # def afids_to_fcsv( diff --git a/afids_utils/tests/test_ext.py b/afids_utils/tests/test_ext.py new file mode 100644 index 00000000..49e894a9 --- /dev/null +++ b/afids_utils/tests/test_ext.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +import csv +import re +import tempfile +from os import PathLike +from pathlib import Path + +import polars as pl +import pytest +from hypothesis import HealthCheck, assume, given, settings +from hypothesis import strategies as st + +from afids_utils.afids import AfidSet +from afids_utils.ext.fcsv import ( + FCSV_FIELDNAMES, _get_metadata, load_fcsv, save_fcsv +) +from afids_utils.exceptions import InvalidFileError +from afids_utils.tests.strategies import afid_coords + + +@pytest.fixture +def valid_fcsv_file() -> PathLike[str]: + return ( + Path(__file__).parent / "data" / "tpl-MNI152NLin2009cAsym_afids.fcsv" + ) + + +class TestLoadFcsv: + def test_load_valid_fcsv(self, valid_fcsv_file: PathLike[str]): + afids_set = load_fcsv(valid_fcsv_file) + + assert isinstance(afids_set, AfidSet) + + # Check to make sure internal types are correct + assert isinstance(afids_set["metadata"], dict) + assert isinstance(afids_set["metadata"]["slicer_version"], str) + assert isinstance(afids_set["metadata"]["coord_system"], str) + assert isinstance(afids_set["afids"], pl.DataFrame) + + @given(coord_num=st.integers(min_value=0, max_value=1)) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_get_valid_metadata( + self, valid_fcsv_file: PathLike[str], coord_num: int + ): + # Randomize coordinate system + with open(valid_fcsv_file) as valid_fcsv: + valid_fcsv_data = valid_fcsv.readlines() + valid_fcsv_data[1] = valid_fcsv_data[1].replace( + "# CoordinateSystem = 0", + f"# CoordinateSystem = {str(coord_num)}", + ) + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", + ) as temp_valid_fcsv_file: + temp_valid_fcsv_file.writelines(valid_fcsv_data) + temp_valid_fcsv_file.flush() + + parsed_ver, parsed_coord = _get_metadata(temp_valid_fcsv_file.name) + + # Check version pattern matches expected + ver_regex = re.compile(r"\d+\.\d+") + assert ver_regex.match(parsed_ver) + + # Check to make sure coordinate system is correct + if coord_num == 0: + assert parsed_coord == "LPS" + else: + assert parsed_coord == "RAS" + + @given(coord_num=st.integers(min_value=2)) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_num_coord( + self, valid_fcsv_file: PathLike[str], coord_num: int + ): + with open(valid_fcsv_file) as valid_fcsv: + fcsv_data = valid_fcsv.readlines() + fcsv_data[1] = fcsv_data[1].replace( + "# CoordinateSystem = 0", + f"# CoordinateSystem = {str(coord_num)}", + ) + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", + ) as temp_invalid_fcsv_file: + temp_invalid_fcsv_file.writelines(fcsv_data) + temp_invalid_fcsv_file.flush() + + with pytest.raises( + InvalidFileError, match="Invalid coordinate.*" + ): + _get_metadata(temp_invalid_fcsv_file.name) + + + @given(coord_str=st.text(min_size=3, alphabet=st.characters(whitelist_categories=['Lu', 'Ll', 'Lt']))) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_str_coord( + self, valid_fcsv_file: PathLike[str], coord_str: int + ): + assume(coord_str not in ["LPS", "RAS"]) + + with open(valid_fcsv_file) as valid_fcsv: + fcsv_data = valid_fcsv.readlines() + fcsv_data[1] = fcsv_data[1].replace( + "# CoordinateSystem = 0", + f"# CoordinateSystem = {coord_str}", + ) + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", + ) as temp_invalid_fcsv_file: + temp_invalid_fcsv_file.writelines(fcsv_data) + temp_invalid_fcsv_file.flush() + + with pytest.raises( + InvalidFileError, match="Invalid coordinate.*" + ): + _get_metadata(temp_invalid_fcsv_file.name) + + + def test_invalid_header( + self, valid_fcsv_file: PathLike[str] + ): + with open(valid_fcsv_file) as valid_fcsv: + valid_fcsv_data = valid_fcsv.readlines() + invalid_fcsv_data = valid_fcsv_data[0] + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", + ) as temp_invalid_fcsv_file: + temp_invalid_fcsv_file.writelines(invalid_fcsv_data) + temp_invalid_fcsv_file.flush() + + with pytest.raises(InvalidFileError, match="Missing or invalid.*"): + _get_metadata(temp_invalid_fcsv_file.name) + + +class TestSaveFcsv: + @given(afids_coords=afid_coords()) + def test_save_fcsv_invalid_template( + self, afids_coords: NDArray[np.single] + ): + with pytest.raises(FileNotFoundError): + save_fcsv(afids_coords, "/invalid/template/path.fcsv") + + @given(afids_coords=afid_coords()) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_save_fcsv_valid_template( + self, afids_coords: NDArray[np.single], valid_fcsv_file: PathLike[str], + ): + with tempfile.NamedTemporaryFile( + mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + ) as out_fcsv_file: + # Create and check output file + save_fcsv(afids_coords, out_fcsv_file.name) + + # Load files + with open( + valid_fcsv_file, "r", encoding="utf-8", newline="" + ) as template_fcsv_file, open( + out_fcsv_file.name, "r", encoding="utf-8", newline="" + ) as output_fcsv_file: + template_header = [ + template_fcsv_file.readline() for _ in range(3) + ] + output_header = [output_fcsv_file.readline() for _ in range(3)] + reader = csv.DictReader( + output_fcsv_file, fieldnames=FCSV_FIELDNAMES + ) + output_fcsv = list(reader) + + # Check header + assert output_header == template_header + # Check contents + for idx, row in enumerate(output_fcsv): + assert (row["x"], row["y"], row["z"]) == ( + str(afids_coords[idx][0]), + str(afids_coords[idx][1]), + str(afids_coords[idx][2]), + ) + + + @given(afids_coords=afid_coords(bad_range=True)) + def test_invalid_num_afids(self, afids_coords: NDArray[np.single]) -> None: + with tempfile.NamedTemporaryFile( + mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + ) as out_fcsv_file: + with pytest.raises(TypeError) as err: + save_fcsv(afids_coords, out_fcsv_file) + + assert "AFIDs, but received" in str(err.value) + + + @given(afids_coords=afid_coords(bad_dims=True)) + def test_invalid_afids_dims( + self, afids_coords: NDArray[np.single] + ) -> None: + with tempfile.NamedTemporaryFile( + mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + ) as out_fcsv_file: + with pytest.raises(TypeError) as err: + save_fcsv(afids_coords, out_fcsv_file) + + assert "Expected 3 spatial dimensions" in str(err.value) \ No newline at end of file From 8c7dfee042e2bc8be9f5512d7b0fff590886644c Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 16:45:06 -0400 Subject: [PATCH 03/18] first pass at refactoring io.py and tests --- afids_utils/io.py | 116 ++++++-------- afids_utils/resources/afids_descs.json | 159 +++++++++++++++---- afids_utils/tests/test_io.py | 204 +++++++++++++++---------- 3 files changed, 301 insertions(+), 178 deletions(-) diff --git a/afids_utils/io.py b/afids_utils/io.py index 24fced7f..2cea7104 100644 --- a/afids_utils/io.py +++ b/afids_utils/io.py @@ -1,21 +1,25 @@ """General methods for loading and saving files associated with AFIDs""" from __future__ import annotations +import json +import polars as pl +from importlib import resources from os import PathLike from pathlib import Path from afids_utils.afids import AfidSet from afids_utils.exceptions import InvalidFiducialError, InvalidFileError -from afids_utils.ext.fcsv import load_fcsv +from afids_utils.ext.fcsv import load_fcsv, save_fcsv - -def load(afids_fpath: PathLike[str] | str) -> AfidSet: +def load( + afids_fpath: PathLike[str] | str, +) -> AfidSet: """ Load an AFIDs file Parameters ---------- - afids_fpath : os.PathLike[str] | str + afids_fpath Path to .fcsv or .json file containing AFIDs information Returns @@ -34,9 +38,8 @@ def load(afids_fpath: PathLike[str] | str) -> AfidSet: InvalidFiducialError If description in fiducial file does not match expected """ - afids_fpath = Path(afids_fpath) - # Check if file exists + afids_fpath = Path(afids_fpath) if not afids_fpath.exists(): raise IOError("Provided AFID file does not exist") @@ -49,14 +52,17 @@ def load(afids_fpath: PathLike[str] | str) -> AfidSet: # if afids_fpath_ext = ".json": # load_json(afids_path) else: - raise IOError("Invalid file extension") + raise IOError("Unsupported file extension") # Perform validation of loaded file - # Check fiducials exist and don't exceed expected number of fiducials - if len(afids_set["afids"]) < 1: - raise InvalidFileError("No fiducials exist") - if len(afids_set["afids"]) > len(mappings[species]): - raise InvalidFileError("More fiducials than expected") + # Load expected mappings + with resources.open_text( + "afids_utils.resources", "afids_descs.json" + ) as json_fpath: + mappings = json.load(json_fpath) + # Check expected number of fiducials exist + if len(afids_set["afids"]) != len(mappings['human']): + raise InvalidFileError("Unexpected number of fiducials") # Validate descriptions, before dropping for label in range(1, len(afids_set["afids"] + 1)): @@ -67,7 +73,7 @@ def load(afids_fpath: PathLike[str] | str) -> AfidSet: .item() ) - if desc not in mappings[species][label - 1]: + if desc not in mappings['human'][label - 1]: raise InvalidFiducialError( f"Description for label {label} does not match expected" ) @@ -77,57 +83,35 @@ def load(afids_fpath: PathLike[str] | str) -> AfidSet: return afids_set +# TODO: Handle the metadata +def save( + afids_coords: NDArray[np.single], + out_fpath: PathLike[str] | str, +) -> None: + """Save AFIDs to Slicer-compatible file + + Parameters + ---------- + afid_coords : numpy.ndarray[shape=(N, 3), dtype=numpy.single] + Floating-point NumPy array containing spatial coordinates (x, y, z) of + `N` AFIDs + + fcsv_output : os.PathLike[str] | str + Path of file (including filename and extension) to save AFIDs to -# def afids_to_fcsv( -# afid_coords: NDArray[np.single], -# fcsv_output: PathLike[str] | str, -# ) -> None: -# """ -# Save AFIDS to Slicer-compatible .fcsv file - -# Parameters -# ---------- -# afid_coords : numpy.ndarray[shape=(N, 3), dtype=numpy.single] -# Floating-point NumPy array containing spatial coordinates (x, y, z) of -# `N` AFIDs - -# fcsv_output : os.PathLike[str] | str -# Path of file (including filename and extension) to save AFIDs to - -# """ -# # Read in fcsv template -# with resources.open_text( -# "afids_utils.resources", "template.fcsv" -# ) as template_fcsv_file: -# header = [template_fcsv_file.readline() for _ in range(3)] -# reader = csv.DictReader( -# template_fcsv_file, fieldnames=list(FCSV_FIELDNAMES.keys()) -# ) -# fcsv = list(reader) - -# # Check to make sure shape of AFIDs array matches expected template -# if afid_coords.shape[0] != len(fcsv): -# raise TypeError( -# f"Expected {len(fcsv)} AFIDs, but received {afid_coords.shape[0]}." -# ) -# if afid_coords.shape[1] != 3: -# raise TypeError( -# "Expected 3 spatial dimensions (x, y, z)," -# f"but received {afid_coords.shape[1]}." -# ) - -# # Loop over fiducials and update with fiducial spatial coordinates -# for idx, row in enumerate(fcsv): -# row["x"] = afid_coords[idx][0] -# row["y"] = afid_coords[idx][1] -# row["z"] = afid_coords[idx][2] - -# # Write output fcsv -# with open(fcsv_output, "w", encoding="utf-8", newline="") as out_fcsv_file: -# for line in header: -# out_fcsv_file.write(line) -# writer = csv.DictWriter( -# out_fcsv_file, fieldnames=list(FCSV_FIELDNAMES.keys()) -# ) -# for row in fcsv: -# writer.writerow(row) + Raises + ------ + IOError + If file extension is not supported + """ + + out_fpath_ext = Path(out_fpath).suffix + + # Saving fcsv + if out_fpath_ext == ".fcsv": + save_fcsv(afids_coords, out_fpath) + # Saving json + # if out_fpath_ext = ".json": + # save_json(afids_coords, out_fpath) + else: + raise IOError("Unsupported file extension") diff --git a/afids_utils/resources/afids_descs.json b/afids_utils/resources/afids_descs.json index 2ffe8e2a..2896a9c4 100644 --- a/afids_utils/resources/afids_descs.json +++ b/afids_utils/resources/afids_descs.json @@ -1,36 +1,129 @@ { "human": [ - "AC", - "PC", - ["infracollicular sulcus", "ICS"], - "PMJ", - ["superior interpeduncular fossa", "SIPF"], - ["R superior LMS", "RSLMS"], - ["L superior LMS", "LSLMS"], - ["R inferior LMS", "RILMS"], - ["L inferior LMS", "LILMS"], - ["Culmen", "CUL"], - ["Intermammillary sulcus", "IMS"], - ["R MB", "RMB"], - ["L MB", "LMB"], - ["pineal gland", "PG"], - ["R LV at AC", "RLVAC"], - ["L LV at AC", "LLVAC"], - ["R LV at PC", "RLVPC"], - ["L LV at PC", "LLVPC"], - ["Genu of CC", "GENU"], - ["Splenium of CC", "SPLE"], - ["R AL temporal horn", "RALTH"], - ["L AL temporal horn", "LALTH"], - ["R superior AM temporal horn", "RSAMTH"], - ["L superior AM temporal horn", "LSAMTH"], - ["R inferior AM temporal horn", "RIAMTH"], - ["L inferior AM temporal horn", "LIAMTH"], - ["R indusium griseum origin", "RIGO"], - ["L indusium griseum origin", "LIGO"], - ["R ventral occipital horn", "RVOH"], - ["L ventral occipital horn", "LVOH"], - ["R olfactory sulcal fundus", "ROSF"], - ["L olfactory sulcal fundus", "LOSF"] + [ + "AC" + ], + [ + "PC" + ], + [ + "infracollicular sulcus", + "ICS" + ], + [ + "PMJ" + ], + [ + "superior interpeduncular fossa", + "SIPF" + ], + [ + "R superior LMS", + "RSLMS" + ], + [ + "L superior LMS", + "LSLMS" + ], + [ + "R inferior LMS", + "RILMS" + ], + [ + "L inferior LMS", + "LILMS" + ], + [ + "Culmen", + "CUL" + ], + [ + "Intermammillary sulcus", + "IMS" + ], + [ + "R MB", + "RMB" + ], + [ + "L MB", + "LMB" + ], + [ + "pineal gland", + "PG" + ], + [ + "R LV at AC", + "RLVAC" + ], + [ + "L LV at AC", + "LLVAC" + ], + [ + "R LV at PC", + "RLVPC" + ], + [ + "L LV at PC", + "LLVPC" + ], + [ + "Genu of CC", + "GENU" + ], + [ + "Splenium of CC", + "SPLE" + ], + [ + "R AL temporal horn", + "RALTH" + ], + [ + "L AL temporal horn", + "LALTH" + ], + [ + "R superior AM temporal horn", + "RSAMTH" + ], + [ + "L superior AM temporal horn", + "LSAMTH" + ], + [ + "R inferior AM temporal horn", + "RIAMTH" + ], + [ + "L inferior AM temporal horn", + "LIAMTH" + ], + [ + "R indusium griseum origin", + "RIGO" + ], + [ + "L indusium griseum origin", + "LIGO" + ], + [ + "R ventral occipital horn", + "RVOH" + ], + [ + "L ventral occipital horn", + "LVOH" + ], + [ + "R olfactory sulcal fundus", + "ROSF" + ], + [ + "L olfactory sulcal fundus", + "LOSF" + ] ] -} +} \ No newline at end of file diff --git a/afids_utils/tests/test_io.py b/afids_utils/tests/test_io.py index 12bf8e74..952eafac 100644 --- a/afids_utils/tests/test_io.py +++ b/afids_utils/tests/test_io.py @@ -1,20 +1,24 @@ from __future__ import annotations -import csv +import json import tempfile from os import PathLike +from importlib import resources from pathlib import Path import numpy as np +import polars as pl import pytest from hypothesis import HealthCheck, assume, given, settings from hypothesis import strategies as st from numpy.typing import NDArray -from afids_utils.exceptions import InvalidFiducialNumberError -from afids_utils.io import FCSV_FIELDNAMES, afids_to_fcsv, get_afid +from afids_utils.afids import AfidSet +from afids_utils.io import load, save +from afids_utils.exceptions import InvalidFiducialError, InvalidFileError from afids_utils.tests.strategies import afid_coords +whitelist_strs = ['Lu', 'Ll', 'Lt'] @pytest.fixture def valid_fcsv_file() -> PathLike[str]: @@ -23,103 +27,145 @@ def valid_fcsv_file() -> PathLike[str]: ) -class TestGetAfid: - @given(afid_num=st.integers(min_value=1, max_value=32)) - @settings( - suppress_health_check=[HealthCheck.function_scoped_fixture], - ) - def test_valid_num_get_afid( - self, valid_fcsv_file: PathLike[str], afid_num: int - ): - afid = get_afid(valid_fcsv_file, afid_num) +@pytest.fixture +def human_mappings() -> List[List[str] | str]: + with resources.open_text( + "afids_utils.resources", "afids_descs.json" + ) as json_fpath: + mappings = json.load(json_fpath) + + return mappings['human'] + + +class TestLoad: + def test_valid_file(self, valid_fcsv_file: PathLike[str]): + afids_set = load(valid_fcsv_file) - # Check array type - assert isinstance(afid, np.ndarray) - # Check array values - assert afid.dtype == np.single + assert isinstance(afids_set, AfidSet) + - @given(afid_num=st.integers(min_value=-1000, max_value=1000)) + def test_invalid_fpath(self): + with pytest.raises(IOError, match=".*does not exist"): + load('invalid/fpath.fcsv') + + + @given( + ext=st.text( + min_size=2, + max_size=5, + alphabet=st.characters(whitelist_categories=whitelist_strs) + ) + ) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], ) - def test_invalid_num_get_afid( - self, - valid_fcsv_file: PathLike[str], - afid_num: int, - ): - assume(afid_num < 1 or afid_num > 32) + def test_invalid_ext(self, valid_fcsv_file: PathLike[str], ext: str): + assume(not ext == "fcsv" or not ext == "json") - with pytest.raises( - InvalidFiducialNumberError, match=".*is not valid." - ): - get_afid(valid_fcsv_file, afid_num) + with tempfile.NamedTemporaryFile( + mode="w", prefix="sub-test_desc-", suffix=f"_afids.{ext}", + ) as invalid_file_ext: + with pytest.raises(IOError, match="Unsupported .* extension"): + load(invalid_file_ext.name) -class TestAfidsToFcsv: - @given(afids_coords=afid_coords()) - def test_invalid_template(self, afids_coords: NDArray[np.single]) -> None: - with pytest.raises(FileNotFoundError): - afids_to_fcsv( - afids_coords, - "/invalid/fcsv/path", - ) + def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): + # Create additional line of fiducials + with open(valid_fcsv_file) as valid_fcsv: + fcsv_data = valid_fcsv.readlines() + fcsv_data.append(fcsv_data[-1]) - @given(afids_coords=afid_coords(bad_range=True)) - def test_invalid_num_afids(self, afids_coords: NDArray[np.single]) -> None: with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", ) as out_fcsv_file: - with pytest.raises(TypeError) as err: - afids_to_fcsv(afids_coords, out_fcsv_file) + out_fcsv_file.writelines(fcsv_data) + out_fcsv_file.flush() - assert "AFIDs, but received" in str(err.value) + # Test that InvalidFileError raised containing correct message + with pytest.raises(InvalidFileError, match="Unexpected number.*"): + load(out_fcsv_file.name) - @given(afids_coords=afid_coords(bad_dims=True)) - def test_invalid_afids_dims( - self, afids_coords: NDArray[np.single] + + @given( + label=st.integers(min_value=0, max_value=31), + desc=st.text( + min_size=2, + max_size=5, + alphabet=st.characters(whitelist_categories=whitelist_strs), + ), + ) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_desc( + self, + valid_fcsv_file: PathLike[str], + human_mappings: List[List[str] | str], + label: int, + desc: str ) -> None: + assume(desc not in human_mappings[label]) + + # Replace valid description with a mismatch + with open(valid_fcsv_file) as valid_fcsv: + fcsv_data = valid_fcsv.readlines() + fcsv_data[label+3] = fcsv_data[label+3].replace( + human_mappings[label][0], desc + ) + print(fcsv_data) + + # Write to temp file with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", ) as out_fcsv_file: - with pytest.raises(TypeError) as err: - afids_to_fcsv(afids_coords, out_fcsv_file) + out_fcsv_file.writelines(fcsv_data) + out_fcsv_file.flush() + + # Test for description match error raised + with pytest.raises( + InvalidFiducialError, match="Description for label .*" + ): + load(out_fcsv_file.name) - assert "Expected 3 spatial dimensions" in str(err.value) +class TestSave: @given(afids_coords=afid_coords()) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], ) - def test_write_fcsv( - self, afids_coords: NDArray[np.single], valid_fcsv_file: PathLike[str] - ) -> None: + def test_save_fcsv(self, afids_coords: NDArray[np.single]): with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv" ) as out_fcsv_file: - # Create and check output file - afids_to_fcsv(afids_coords, out_fcsv_file.name) - - # Load files - with open( - valid_fcsv_file, "r", encoding="utf-8", newline="" - ) as template_fcsv_file, open( - out_fcsv_file.name, "r", encoding="utf-8", newline="" - ) as output_fcsv_file: - template_header = [ - template_fcsv_file.readline() for _ in range(3) - ] - output_header = [output_fcsv_file.readline() for _ in range(3)] - reader = csv.DictReader( - output_fcsv_file, fieldnames=FCSV_FIELDNAMES - ) - output_fcsv = list(reader) - - # Check header - assert output_header == template_header - # Check contents - for idx, row in enumerate(output_fcsv): - assert (row["x"], row["y"], row["z"]) == ( - str(afids_coords[idx][0]), - str(afids_coords[idx][1]), - str(afids_coords[idx][2]), - ) + save(afids_coords, out_fcsv_file.name) + + assert Path(out_fcsv_file.name).exists() + + + @given( + afids_coords=afid_coords(), + ext=st.text( + min_size=2, + max_size=5, + alphabet=st.characters(whitelist_categories=whitelist_strs) + ) + ) + def test_save_invalid_ext( + self, afids_coords: NDArray[np.single], ext: str + ): + assume(not ext == "fcsv" or not ext == "json") + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix=f"_afids.{ext}" + ) as out_file: + + with pytest.raises(IOError, match="Unsupported file extension"): + save(afids_coords, out_file.name) \ No newline at end of file From 485314e1290449f09df05a0050b457683be7d441 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 17:50:20 -0400 Subject: [PATCH 04/18] first pass at refactoring afids.py and associated tests --- afids_utils/afids.py | 6 ++-- afids_utils/tests/test_afids.py | 55 +++++++++++++++++++++++++++++++++ afids_utils/tests/test_ext.py | 6 ---- 3 files changed, 58 insertions(+), 9 deletions(-) create mode 100644 afids_utils/tests/test_afids.py diff --git a/afids_utils/afids.py b/afids_utils/afids.py index b1cb7736..f5d578e2 100644 --- a/afids_utils/afids.py +++ b/afids_utils/afids.py @@ -31,7 +31,7 @@ def get_afid(self, label: int) -> NDArray[np.single]: Parameters ---------- label - Unique fiducial number to extract from + Unique AFID label to extract from Returns ------- @@ -50,12 +50,12 @@ def get_afid(self, label: int) -> NDArray[np.single]: # Fiducial selection out of bounds if label < 1 or label > len(self["afids"]): raise InvalidFiducialError( - f"Fiducial number {label} is not valid." + f"AFID label {label} is not valid." ) return ( self["afids"] - .filter(pl.col("id") == str(label)) + .filter(pl.col("label") == str(label)) .select("x_mm", "y_mm", "z_mm") .to_numpy()[0] ) diff --git a/afids_utils/tests/test_afids.py b/afids_utils/tests/test_afids.py new file mode 100644 index 00000000..b9bc7dd8 --- /dev/null +++ b/afids_utils/tests/test_afids.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from pathlib import Path + +import numpy as np +import polars as pl +import pytest +from hypothesis import HealthCheck, assume, given, settings +from hypothesis import strategies as st + +from afids_utils.io import load +from afids_utils.exceptions import InvalidFiducialError + + +@pytest.fixture +def valid_fcsv_file() -> PathLike[str]: + return ( + Path(__file__).parent / "data" / "tpl-MNI152NLin2009cAsym_afids.fcsv" + ) + +class TestAfids: + def test_init(self, valid_fcsv_file: PathLike[str]): + # Load valid file to check internal types + afids_set = load(valid_fcsv_file) + + # Check to make sure internal types are correct + assert isinstance(afids_set["metadata"], dict) + assert isinstance(afids_set["metadata"]["slicer_version"], str) + assert isinstance(afids_set["metadata"]["coord_system"], str) + assert isinstance(afids_set["afids"], pl.DataFrame) + + + @given(label=st.integers(min_value=1, max_value=32)) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_valid_get_afid(self, valid_fcsv_file: PathLike[str], label: int): + afids_set = load(valid_fcsv_file) + afid_pos = afids_set.get_afid(label) + + # Check array type + assert isinstance(afid_pos, np.ndarray) + # Check array values + assert afid_pos.dtype == np.single + + @given(label=st.integers(min_value=-100, max_value=100)) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_get_afid(self, valid_fcsv_file: PathLike[str], label: int): + afids_set = load(valid_fcsv_file) + assume(not 1 <= label <= len(afids_set['afids'])) + + with pytest.raises(InvalidFiducialError, match=".*not valid"): + afids_set.get_afid(label) \ No newline at end of file diff --git a/afids_utils/tests/test_ext.py b/afids_utils/tests/test_ext.py index 49e894a9..8fc26f95 100644 --- a/afids_utils/tests/test_ext.py +++ b/afids_utils/tests/test_ext.py @@ -32,12 +32,6 @@ def test_load_valid_fcsv(self, valid_fcsv_file: PathLike[str]): assert isinstance(afids_set, AfidSet) - # Check to make sure internal types are correct - assert isinstance(afids_set["metadata"], dict) - assert isinstance(afids_set["metadata"]["slicer_version"], str) - assert isinstance(afids_set["metadata"]["coord_system"], str) - assert isinstance(afids_set["afids"], pl.DataFrame) - @given(coord_num=st.integers(min_value=0, max_value=1)) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], From af9a15abc7a9840d2099660f5fa50c4da86fba6b Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 18:35:59 -0400 Subject: [PATCH 05/18] change saving to use AfidSet --- afids_utils/afids.py | 2 +- afids_utils/ext/fcsv.py | 1 - afids_utils/io.py | 12 +++++----- afids_utils/tests/test_io.py | 44 +++++++++++++++++++----------------- 4 files changed, 30 insertions(+), 29 deletions(-) diff --git a/afids_utils/afids.py b/afids_utils/afids.py index f5d578e2..34c049e8 100644 --- a/afids_utils/afids.py +++ b/afids_utils/afids.py @@ -50,7 +50,7 @@ def get_afid(self, label: int) -> NDArray[np.single]: # Fiducial selection out of bounds if label < 1 or label > len(self["afids"]): raise InvalidFiducialError( - f"AFID label {label} is not valid." + f"AFID label {label} is not valid" ) return ( diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py index 40356a97..bfac7f6f 100644 --- a/afids_utils/ext/fcsv.py +++ b/afids_utils/ext/fcsv.py @@ -139,7 +139,6 @@ def load_fcsv( return afids_set -# TODO: Handle metadata - specifically setting the coordinate system def save_fcsv( afid_coords: NDArray[np.single], out_fcsv: PathLike[str] | str, diff --git a/afids_utils/io.py b/afids_utils/io.py index 2cea7104..8cd0cabe 100644 --- a/afids_utils/io.py +++ b/afids_utils/io.py @@ -72,7 +72,6 @@ def load( .select("desc") .item() ) - if desc not in mappings['human'][label - 1]: raise InvalidFiducialError( f"Description for label {label} does not match expected" @@ -83,18 +82,17 @@ def load( return afids_set -# TODO: Handle the metadata +# TODO: Handle the metadata - specifically setting the coordinate system def save( - afids_coords: NDArray[np.single], + afids_set: AfidSet, out_fpath: PathLike[str] | str, ) -> None: """Save AFIDs to Slicer-compatible file Parameters ---------- - afid_coords : numpy.ndarray[shape=(N, 3), dtype=numpy.single] - Floating-point NumPy array containing spatial coordinates (x, y, z) of - `N` AFIDs + afids_set + An AFID dataset containing metadata and coordinates fcsv_output : os.PathLike[str] | str Path of file (including filename and extension) to save AFIDs to @@ -107,6 +105,8 @@ def save( out_fpath_ext = Path(out_fpath).suffix + afids_coords = afids_set["afids"].select("x_mm", "y_mm", "z_mm").to_numpy() + # Saving fcsv if out_fpath_ext == ".fcsv": save_fcsv(afids_coords, out_fpath) diff --git a/afids_utils/tests/test_io.py b/afids_utils/tests/test_io.py index 952eafac..e8511e86 100644 --- a/afids_utils/tests/test_io.py +++ b/afids_utils/tests/test_io.py @@ -18,8 +18,6 @@ from afids_utils.exceptions import InvalidFiducialError, InvalidFileError from afids_utils.tests.strategies import afid_coords -whitelist_strs = ['Lu', 'Ll', 'Lt'] - @pytest.fixture def valid_fcsv_file() -> PathLike[str]: return ( @@ -53,7 +51,9 @@ def test_invalid_fpath(self): ext=st.text( min_size=2, max_size=5, - alphabet=st.characters(whitelist_categories=whitelist_strs) + alphabet=st.characters( + min_codepoint=ord('A'), max_codepoint=ord('z') + ) ) ) @settings( @@ -89,11 +89,13 @@ def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): @given( - label=st.integers(min_value=0, max_value=31), + label=st.integers(min_value=1, max_value=32), desc=st.text( min_size=2, max_size=5, - alphabet=st.characters(whitelist_categories=whitelist_strs), + alphabet=st.characters( + min_codepoint=ord('A'), max_codepoint=ord('z') + ), ), ) @settings( @@ -106,15 +108,14 @@ def test_invalid_desc( label: int, desc: str ) -> None: - assume(desc not in human_mappings[label]) + assume(desc not in human_mappings[label-1]) # Replace valid description with a mismatch with open(valid_fcsv_file) as valid_fcsv: fcsv_data = valid_fcsv.readlines() - fcsv_data[label+3] = fcsv_data[label+3].replace( - human_mappings[label][0], desc + fcsv_data[label+2] = fcsv_data[label+2].replace( + human_mappings[label-1][0], desc ) - print(fcsv_data) # Write to temp file with tempfile.NamedTemporaryFile( @@ -127,37 +128,38 @@ def test_invalid_desc( # Test for description match error raised with pytest.raises( - InvalidFiducialError, match="Description for label .*" + InvalidFiducialError, match="Description for label.*" ): load(out_fcsv_file.name) class TestSave: - @given(afids_coords=afid_coords()) - @settings( - suppress_health_check=[HealthCheck.function_scoped_fixture], - ) - def test_save_fcsv(self, afids_coords: NDArray[np.single]): + def test_save_fcsv(self, valid_fcsv_file: PathLike[str]): with tempfile.NamedTemporaryFile( mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" ) as out_fcsv_file: - save(afids_coords, out_fcsv_file.name) + afids_set = load(valid_fcsv_file) + save(afids_set, out_fcsv_file.name) assert Path(out_fcsv_file.name).exists() @given( - afids_coords=afid_coords(), ext=st.text( min_size=2, max_size=5, - alphabet=st.characters(whitelist_categories=whitelist_strs) + alphabet=st.characters( + min_codepoint=ord('A'), max_codepoint=ord('z') + ) ) ) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) def test_save_invalid_ext( - self, afids_coords: NDArray[np.single], ext: str + self, valid_fcsv_file: PathLike[str], ext: str ): assume(not ext == "fcsv" or not ext == "json") @@ -166,6 +168,6 @@ def test_save_invalid_ext( prefix="sub-test_desc-", suffix=f"_afids.{ext}" ) as out_file: - + afids_set = load(valid_fcsv_file) with pytest.raises(IOError, match="Unsupported file extension"): - save(afids_coords, out_file.name) \ No newline at end of file + save(afids_set, out_file.name) \ No newline at end of file From f8c229996e6ec78e6b7d8511367f3fdc8e96fe36 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 18:36:07 -0400 Subject: [PATCH 06/18] update code coverage --- codecov.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/codecov.yml b/codecov.yml index 0accf9ba..612d9bc5 100644 --- a/codecov.yml +++ b/codecov.yml @@ -12,6 +12,12 @@ component_management: - component_id: afids-utils_io name: afids_utils/io.py paths: [afids_utils/io.py] + - component_id: afids-utils_ext + name: afids_utils/ext + paths: [afids_utils/ext/*.py] + - component_id: afids-utils_afids + name: afids_utils/afids.py + paths: [afidS_utils/afids.py] - component_id: afids-utils_transforms name: afids_utils/transforms.py paths: [afids_utils/transforms.py] From df85426a6d543176583d83ec8655f25fd20afc03 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 19:59:04 -0400 Subject: [PATCH 07/18] quality fixes and updates to exceptions thrown --- afids_utils/afids.py | 4 +- afids_utils/ext/fcsv.py | 7 +-- afids_utils/io.py | 17 ++++--- afids_utils/tests/test_afids.py | 17 ++++--- afids_utils/tests/test_ext.py | 41 ++++++++--------- afids_utils/tests/test_io.py | 79 ++++++++++++++------------------- 6 files changed, 80 insertions(+), 85 deletions(-) diff --git a/afids_utils/afids.py b/afids_utils/afids.py index 34c049e8..669e872c 100644 --- a/afids_utils/afids.py +++ b/afids_utils/afids.py @@ -49,9 +49,7 @@ def get_afid(self, label: int) -> NDArray[np.single]: if isinstance(label, int): # Fiducial selection out of bounds if label < 1 or label > len(self["afids"]): - raise InvalidFiducialError( - f"AFID label {label} is not valid" - ) + raise InvalidFiducialError(f"AFID label {label} is not valid") return ( self["afids"] diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py index bfac7f6f..276fc3b7 100644 --- a/afids_utils/ext/fcsv.py +++ b/afids_utils/ext/fcsv.py @@ -2,13 +2,13 @@ from __future__ import annotations import csv -import json import re from importlib import resources from itertools import islice from os import PathLike from typing import Dict +import numpy as np import polars as pl from numpy.typing import NDArray @@ -159,7 +159,9 @@ def save_fcsv( with resources.open_text( "afids_utils.resources", "template.fcsv" ) as template_fcsv_file: - header = [template_fcsv_file.readline() for _ in range(HEADER_ROWS+1)] + header = [ + template_fcsv_file.readline() for _ in range(HEADER_ROWS + 1) + ] reader = csv.DictReader(template_fcsv_file, fieldnames=FCSV_FIELDNAMES) fcsv = list(reader) @@ -188,4 +190,3 @@ def save_fcsv( for row in fcsv: writer.writerow(row) - \ No newline at end of file diff --git a/afids_utils/io.py b/afids_utils/io.py index 8cd0cabe..d0ac59e4 100644 --- a/afids_utils/io.py +++ b/afids_utils/io.py @@ -2,15 +2,17 @@ from __future__ import annotations import json -import polars as pl from importlib import resources from os import PathLike from pathlib import Path +import polars as pl + from afids_utils.afids import AfidSet from afids_utils.exceptions import InvalidFiducialError, InvalidFileError from afids_utils.ext.fcsv import load_fcsv, save_fcsv + def load( afids_fpath: PathLike[str] | str, ) -> AfidSet: @@ -31,7 +33,7 @@ def load( ------ IOError If extension to fiducial file is not .fcsv or .json or does not exist - + InvalidFileError If fiducial file has none or more than expected number of fiducials @@ -41,7 +43,7 @@ def load( # Check if file exists afids_fpath = Path(afids_fpath) if not afids_fpath.exists(): - raise IOError("Provided AFID file does not exist") + raise FileNotFoundError("Provided AFID file does not exist") afids_fpath_ext = afids_fpath.suffix @@ -52,7 +54,7 @@ def load( # if afids_fpath_ext = ".json": # load_json(afids_path) else: - raise IOError("Unsupported file extension") + raise ValueError("Unsupported file extension") # Perform validation of loaded file # Load expected mappings @@ -61,7 +63,7 @@ def load( ) as json_fpath: mappings = json.load(json_fpath) # Check expected number of fiducials exist - if len(afids_set["afids"]) != len(mappings['human']): + if len(afids_set["afids"]) != len(mappings["human"]): raise InvalidFileError("Unexpected number of fiducials") # Validate descriptions, before dropping @@ -72,7 +74,7 @@ def load( .select("desc") .item() ) - if desc not in mappings['human'][label - 1]: + if desc not in mappings["human"][label - 1]: raise InvalidFiducialError( f"Description for label {label} does not match expected" ) @@ -82,6 +84,7 @@ def load( return afids_set + # TODO: Handle the metadata - specifically setting the coordinate system def save( afids_set: AfidSet, @@ -114,4 +117,4 @@ def save( # if out_fpath_ext = ".json": # save_json(afids_coords, out_fpath) else: - raise IOError("Unsupported file extension") + raise ValueError("Unsupported file extension") diff --git a/afids_utils/tests/test_afids.py b/afids_utils/tests/test_afids.py index b9bc7dd8..cb4a8f95 100644 --- a/afids_utils/tests/test_afids.py +++ b/afids_utils/tests/test_afids.py @@ -1,5 +1,6 @@ from __future__ import annotations +from os import PathLike from pathlib import Path import numpy as np @@ -8,8 +9,8 @@ from hypothesis import HealthCheck, assume, given, settings from hypothesis import strategies as st -from afids_utils.io import load from afids_utils.exceptions import InvalidFiducialError +from afids_utils.io import load @pytest.fixture @@ -18,6 +19,7 @@ def valid_fcsv_file() -> PathLike[str]: Path(__file__).parent / "data" / "tpl-MNI152NLin2009cAsym_afids.fcsv" ) + class TestAfids: def test_init(self, valid_fcsv_file: PathLike[str]): # Load valid file to check internal types @@ -29,7 +31,6 @@ def test_init(self, valid_fcsv_file: PathLike[str]): assert isinstance(afids_set["metadata"]["coord_system"], str) assert isinstance(afids_set["afids"], pl.DataFrame) - @given(label=st.integers(min_value=1, max_value=32)) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], @@ -46,10 +47,12 @@ def test_valid_get_afid(self, valid_fcsv_file: PathLike[str], label: int): @given(label=st.integers(min_value=-100, max_value=100)) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], - ) - def test_invalid_get_afid(self, valid_fcsv_file: PathLike[str], label: int): + ) + def test_invalid_get_afid( + self, valid_fcsv_file: PathLike[str], label: int + ): afids_set = load(valid_fcsv_file) - assume(not 1 <= label <= len(afids_set['afids'])) - + assume(not 1 <= label <= len(afids_set["afids"])) + with pytest.raises(InvalidFiducialError, match=".*not valid"): - afids_set.get_afid(label) \ No newline at end of file + afids_set.get_afid(label) diff --git a/afids_utils/tests/test_ext.py b/afids_utils/tests/test_ext.py index 8fc26f95..79471a5d 100644 --- a/afids_utils/tests/test_ext.py +++ b/afids_utils/tests/test_ext.py @@ -6,16 +6,20 @@ from os import PathLike from pathlib import Path -import polars as pl +import numpy as np import pytest from hypothesis import HealthCheck, assume, given, settings from hypothesis import strategies as st +from numpy.typing import NDArray from afids_utils.afids import AfidSet +from afids_utils.exceptions import InvalidFileError from afids_utils.ext.fcsv import ( - FCSV_FIELDNAMES, _get_metadata, load_fcsv, save_fcsv + FCSV_FIELDNAMES, + _get_metadata, + load_fcsv, + save_fcsv, ) -from afids_utils.exceptions import InvalidFileError from afids_utils.tests.strategies import afid_coords @@ -89,13 +93,15 @@ def test_invalid_num_coord( temp_invalid_fcsv_file.writelines(fcsv_data) temp_invalid_fcsv_file.flush() - with pytest.raises( - InvalidFileError, match="Invalid coordinate.*" - ): + with pytest.raises(InvalidFileError, match="Invalid coordinate.*"): _get_metadata(temp_invalid_fcsv_file.name) - - @given(coord_str=st.text(min_size=3, alphabet=st.characters(whitelist_categories=['Lu', 'Ll', 'Lt']))) + @given( + coord_str=st.text( + min_size=3, + alphabet=st.characters(whitelist_categories=["Lu", "Ll", "Lt"]), + ) + ) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], ) @@ -119,19 +125,14 @@ def test_invalid_str_coord( temp_invalid_fcsv_file.writelines(fcsv_data) temp_invalid_fcsv_file.flush() - with pytest.raises( - InvalidFileError, match="Invalid coordinate.*" - ): + with pytest.raises(InvalidFileError, match="Invalid coordinate.*"): _get_metadata(temp_invalid_fcsv_file.name) - - def test_invalid_header( - self, valid_fcsv_file: PathLike[str] - ): + def test_invalid_header(self, valid_fcsv_file: PathLike[str]): with open(valid_fcsv_file) as valid_fcsv: valid_fcsv_data = valid_fcsv.readlines() invalid_fcsv_data = valid_fcsv_data[0] - + with tempfile.NamedTemporaryFile( mode="w", prefix="sub-test_desc-", @@ -157,7 +158,9 @@ def test_save_fcsv_invalid_template( suppress_health_check=[HealthCheck.function_scoped_fixture], ) def test_save_fcsv_valid_template( - self, afids_coords: NDArray[np.single], valid_fcsv_file: PathLike[str], + self, + afids_coords: NDArray[np.single], + valid_fcsv_file: PathLike[str], ): with tempfile.NamedTemporaryFile( mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" @@ -190,7 +193,6 @@ def test_save_fcsv_valid_template( str(afids_coords[idx][2]), ) - @given(afids_coords=afid_coords(bad_range=True)) def test_invalid_num_afids(self, afids_coords: NDArray[np.single]) -> None: with tempfile.NamedTemporaryFile( @@ -201,7 +203,6 @@ def test_invalid_num_afids(self, afids_coords: NDArray[np.single]) -> None: assert "AFIDs, but received" in str(err.value) - @given(afids_coords=afid_coords(bad_dims=True)) def test_invalid_afids_dims( self, afids_coords: NDArray[np.single] @@ -212,4 +213,4 @@ def test_invalid_afids_dims( with pytest.raises(TypeError) as err: save_fcsv(afids_coords, out_fcsv_file) - assert "Expected 3 spatial dimensions" in str(err.value) \ No newline at end of file + assert "Expected 3 spatial dimensions" in str(err.value) diff --git a/afids_utils/tests/test_io.py b/afids_utils/tests/test_io.py index e8511e86..6a029623 100644 --- a/afids_utils/tests/test_io.py +++ b/afids_utils/tests/test_io.py @@ -2,21 +2,19 @@ import json import tempfile -from os import PathLike from importlib import resources +from os import PathLike from pathlib import Path +from typing import List -import numpy as np -import polars as pl import pytest from hypothesis import HealthCheck, assume, given, settings from hypothesis import strategies as st -from numpy.typing import NDArray from afids_utils.afids import AfidSet -from afids_utils.io import load, save from afids_utils.exceptions import InvalidFiducialError, InvalidFileError -from afids_utils.tests.strategies import afid_coords +from afids_utils.io import load, save + @pytest.fixture def valid_fcsv_file() -> PathLike[str]: @@ -32,7 +30,7 @@ def human_mappings() -> List[List[str] | str]: ) as json_fpath: mappings = json.load(json_fpath) - return mappings['human'] + return mappings["human"] class TestLoad: @@ -40,20 +38,18 @@ def test_valid_file(self, valid_fcsv_file: PathLike[str]): afids_set = load(valid_fcsv_file) assert isinstance(afids_set, AfidSet) - def test_invalid_fpath(self): - with pytest.raises(IOError, match=".*does not exist"): - load('invalid/fpath.fcsv') - + with pytest.raises(FileNotFoundError, match=".*does not exist"): + load("invalid/fpath.fcsv") @given( ext=st.text( - min_size=2, - max_size=5, + min_size=2, + max_size=5, alphabet=st.characters( - min_codepoint=ord('A'), max_codepoint=ord('z') - ) + min_codepoint=ord("A"), max_codepoint=ord("z") + ), ) ) @settings( @@ -63,12 +59,13 @@ def test_invalid_ext(self, valid_fcsv_file: PathLike[str], ext: str): assume(not ext == "fcsv" or not ext == "json") with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix=f"_afids.{ext}", + mode="w", + prefix="sub-test_desc-", + suffix=f"_afids.{ext}", ) as invalid_file_ext: - with pytest.raises(IOError, match="Unsupported .* extension"): + with pytest.raises(ValueError, match="Unsupported .* extension"): load(invalid_file_ext.name) - def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): # Create additional line of fiducials with open(valid_fcsv_file) as valid_fcsv: @@ -87,14 +84,13 @@ def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): with pytest.raises(InvalidFileError, match="Unexpected number.*"): load(out_fcsv_file.name) - @given( label=st.integers(min_value=1, max_value=32), desc=st.text( min_size=2, max_size=5, alphabet=st.characters( - min_codepoint=ord('A'), max_codepoint=ord('z') + min_codepoint=ord("A"), max_codepoint=ord("z") ), ), ) @@ -102,19 +98,19 @@ def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): suppress_health_check=[HealthCheck.function_scoped_fixture], ) def test_invalid_desc( - self, - valid_fcsv_file: PathLike[str], - human_mappings: List[List[str] | str], - label: int, - desc: str + self, + valid_fcsv_file: PathLike[str], + human_mappings: List[List[str] | str], + label: int, + desc: str, ) -> None: - assume(desc not in human_mappings[label-1]) + assume(desc not in human_mappings[label - 1]) # Replace valid description with a mismatch with open(valid_fcsv_file) as valid_fcsv: fcsv_data = valid_fcsv.readlines() - fcsv_data[label+2] = fcsv_data[label+2].replace( - human_mappings[label-1][0], desc + fcsv_data[label + 2] = fcsv_data[label + 2].replace( + human_mappings[label - 1][0], desc ) # Write to temp file @@ -136,38 +132,31 @@ def test_invalid_desc( class TestSave: def test_save_fcsv(self, valid_fcsv_file: PathLike[str]): with tempfile.NamedTemporaryFile( - mode="w", - prefix="sub-test_desc-", - suffix="_afids.fcsv" + mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" ) as out_fcsv_file: afids_set = load(valid_fcsv_file) save(afids_set, out_fcsv_file.name) assert Path(out_fcsv_file.name).exists() - @given( ext=st.text( - min_size=2, - max_size=5, + min_size=2, + max_size=5, alphabet=st.characters( - min_codepoint=ord('A'), max_codepoint=ord('z') - ) + min_codepoint=ord("A"), max_codepoint=ord("z") + ), ) ) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], ) - def test_save_invalid_ext( - self, valid_fcsv_file: PathLike[str], ext: str - ): + def test_save_invalid_ext(self, valid_fcsv_file: PathLike[str], ext: str): assume(not ext == "fcsv" or not ext == "json") - + with tempfile.NamedTemporaryFile( - mode="w", - prefix="sub-test_desc-", - suffix=f"_afids.{ext}" + mode="w", prefix="sub-test_desc-", suffix=f"_afids.{ext}" ) as out_file: afids_set = load(valid_fcsv_file) - with pytest.raises(IOError, match="Unsupported file extension"): - save(afids_set, out_file.name) \ No newline at end of file + with pytest.raises(ValueError, match="Unsupported file extension"): + save(afids_set, out_file.name) From bef365d8ed4d8095e88bdecf2b072d63ac47a22f Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Tue, 22 Aug 2023 20:01:31 -0400 Subject: [PATCH 08/18] update docstrings for afids_utils.io.save --- afids_utils/io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/afids_utils/io.py b/afids_utils/io.py index d0ac59e4..00c75321 100644 --- a/afids_utils/io.py +++ b/afids_utils/io.py @@ -97,7 +97,7 @@ def save( afids_set An AFID dataset containing metadata and coordinates - fcsv_output : os.PathLike[str] | str + out_fpath Path of file (including filename and extension) to save AFIDs to Raises From 29cdcef079502a279c1ac1243873f658ac39e281 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 10:33:24 -0400 Subject: [PATCH 09/18] update doc requirements --- docs/requirements.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index ba91c21c..4e789acd 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,8 @@ +sphinx>=7,<8 sphinx-argparse>=0.4.0,<0.5 sphinx-copybutton>=0.5.1,<0.6 -sphinx-design>=0.4.1,<0.5.0 +sphinx-design>=0.5,<0.6 sphinx-reredirects>=0.1,<0.2 furo>=2023.3.23,<2024 -myst-parser>=1.0.0,<2.0 +myst-parser>=2.0,<3 numpydoc>=1.5.0,<1.6 \ No newline at end of file From 7cb62be37bc992ea898934eecc5013e057351dbb Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 11:09:10 -0400 Subject: [PATCH 10/18] update attr -> attrs --- afids_utils/afids.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/afids_utils/afids.py b/afids_utils/afids.py index 669e872c..e0624a76 100644 --- a/afids_utils/afids.py +++ b/afids_utils/afids.py @@ -1,7 +1,7 @@ """Anatomical fiducial classes""" from __future__ import annotations -import attr +import attrs import numpy as np import polars as pl from numpy.typing import NDArray @@ -9,13 +9,13 @@ from afids_utils.exceptions import InvalidFiducialError -@attr.define +@attrs.define class AfidSet(dict): """Base class for a set of fiducials""" - slicer_version: str = attr.field() - coord_system: str = attr.field() - afids_df: pl.DataFrame = attr.field() + slicer_version: str = attrs.field() + coord_system: str = attrs.field() + afids_df: pl.DataFrame = attrs.field() def __attrs_post_init__(self): self["metadata"] = { From 8c51097487f2e54ed8b76c45ed289e4126e81af6 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 16:42:44 -0400 Subject: [PATCH 11/18] drop polars from dependency --- poetry.lock | 415 ++++++++++++++++++++++--------------------------- pyproject.toml | 1 - 2 files changed, 186 insertions(+), 230 deletions(-) diff --git a/poetry.lock b/poetry.lock index c0c8324c..54d3ef3e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,36 +20,33 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "black" -version = "23.3.0" +version = "23.7.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, - {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, - {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, - {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, - {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, - {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, - {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, - {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, - {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, - {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, - {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, - {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, - {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, - {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, ] [package.dependencies] @@ -69,13 +66,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -94,71 +91,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.0" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, + {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, + {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, + {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, + {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, + {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, + {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, + {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, + {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, + {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, + {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, + {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, + {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, ] [package.dependencies] @@ -180,13 +169,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -194,23 +183,23 @@ test = ["pytest (>=6)"] [[package]] name = "hypothesis" -version = "6.77.0" +version = "6.82.6" description = "A library for property-based testing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "hypothesis-6.77.0-py3-none-any.whl", hash = "sha256:bfa03bb5c7d8b775378fc795d9251c885cd1be7d8ddf677f3ebd8f3892efc782"}, - {file = "hypothesis-6.77.0.tar.gz", hash = "sha256:23bfe3743d0c88e61330debca60fc813db2ef83be5777f2baf113d035ac76081"}, + {file = "hypothesis-6.82.6-py3-none-any.whl", hash = "sha256:e99c445140e43f1cceda07b569f2f2d920d95435c6b0e6b507b35b01bb025e9d"}, + {file = "hypothesis-6.82.6.tar.gz", hash = "sha256:f52ac4180a16208224e3d648fbf0fef8b9ca24863ba4b41bfef30a78c42646bd"}, ] [package.dependencies] attrs = ">=19.2.0" exceptiongroup = {version = ">=1.0.0", markers = "python_version < \"3.11\""} -numpy = {version = ">=1.16.0", optional = true, markers = "extra == \"numpy\""} +numpy = {version = ">=1.17.3", optional = true, markers = "extra == \"numpy\""} sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "importlib-metadata (>=3.6)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.16.0)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2023.3)"] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2023.3)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] dateutil = ["python-dateutil (>=1.4)"] @@ -218,7 +207,7 @@ django = ["django (>=3.2)"] dpcontracts = ["dpcontracts (>=0.4)"] ghostwriter = ["black (>=19.10b0)"] lark = ["lark (>=0.10.1)"] -numpy = ["numpy (>=1.16.0)"] +numpy = ["numpy (>=1.17.3)"] pandas = ["pandas (>=1.1)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] @@ -282,39 +271,39 @@ files = [ [[package]] name = "numpy" -version = "1.24.3" +version = "1.24.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.8" files = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, - {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, - {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] [[package]] @@ -341,39 +330,39 @@ files = [ [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.extras] @@ -398,81 +387,49 @@ tomli = ">=1.2.2" [package.extras] poetry-plugin = ["poetry (>=1.0,<2.0)"] -[[package]] -name = "polars" -version = "0.18.3" -description = "Blazingly fast DataFrame library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "polars-0.18.3-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:0e7d03d0f4a53892ae095f4274a3f8776eb6b6da2ee82a2076636c8319180e32"}, - {file = "polars-0.18.3-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:90d1ff55d18fa9eafec6455230ace27607ecd5b51c68795c724b6338e3a3314b"}, - {file = "polars-0.18.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:961471351c31645f2db76cbf42c52e84b8a04832cae0111fb16f83599e033cd7"}, - {file = "polars-0.18.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96befd1c18c0d668a8c50d1833559cbe00536d7b018857a0c0935d6ae60a0052"}, - {file = "polars-0.18.3-cp37-abi3-win_amd64.whl", hash = "sha256:0950441f464a33da42c8facef6110ece93d15a27a14190176de5cc27dd573d3f"}, - {file = "polars-0.18.3.tar.gz", hash = "sha256:efc3f629fddb060dc90c2dbd575824c1d662f37294c18c769d28e2bc4d5a1413"}, -] - -[package.dependencies] -numpy = {version = ">=1.16.0", optional = true, markers = "extra == \"numpy\""} - -[package.extras] -all = ["polars[connectorx,deltalake,fsspec,matplotlib,numpy,pandas,pyarrow,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] -connectorx = ["connectorx"] -deltalake = ["deltalake (>=0.8.0)"] -fsspec = ["fsspec"] -matplotlib = ["matplotlib"] -numpy = ["numpy (>=1.16.0)"] -pandas = ["pandas", "pyarrow (>=7.0.0)"] -pyarrow = ["pyarrow (>=7.0.0)"] -sqlalchemy = ["pandas", "sqlalchemy"] -timezone = ["backports.zoneinfo", "tzdata"] -xlsx2csv = ["xlsx2csv (>=0.8.0)"] -xlsxwriter = ["xlsxwriter"] - [[package]] name = "pydantic" -version = "1.10.8" +version = "1.10.12" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, ] [package.dependencies] @@ -484,13 +441,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] @@ -502,7 +459,7 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -568,18 +525,18 @@ docs = ["Sphinx"] [[package]] name = "setuptools" -version = "67.8.0" +version = "68.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -617,24 +574,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.6.3" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, - {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "yamlfix" -version = "1.10.0" +version = "1.11.0" description = "A simple opionated yaml formatter that keeps your comments!" optional = false python-versions = ">=3.7.2" files = [ - {file = "yamlfix-1.10.0-py3-none-any.whl", hash = "sha256:742aad5594cae33187acc8b1e802437ead444598c7d586285d7bba68f0dc1e48"}, - {file = "yamlfix-1.10.0.tar.gz", hash = "sha256:80b1c31aadff7bf4f0eac7219c4f461728029e5da773e812428b07e43a613851"}, + {file = "yamlfix-1.11.0-py3-none-any.whl", hash = "sha256:e4435f2ca5cb38f399d8ea9baf41d2815732df2d2ae15f72b20c467902ee8290"}, + {file = "yamlfix-1.11.0.tar.gz", hash = "sha256:7b462f94b14c9982827c65028ddb79ad98535f77021a1f7a4470fd86341e61ce"}, ] [package.dependencies] @@ -645,4 +602,4 @@ ruyaml = ">=0.91.0" [metadata] lock-version = "2.0" python-versions = ">=3.8, <3.11" -content-hash = "12f7c55b8940c1e5739172158cafa827fc11a0ac60c7caba9abae60dacfdfe44" +content-hash = "75480edbe0b67bd83a83342d4b9883214e66be5379367e2faf857d64499b88a8" diff --git a/pyproject.toml b/pyproject.toml index 5b3eedb1..19af3e0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ exclude = ["afids_utils/tests"] python = ">=3.8, <3.11" attrs = "^23.1.0" numpy = "^1.24.3" -polars = {extras = ["numpy"], version = "^0.18.3"} [tool.poetry.group.dev] optional = true From 488abd57d03b0645d464815ccc0c30dc082d86c4 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 16:45:56 -0400 Subject: [PATCH 12/18] partially tkkuehn comments - Remove `io.py`, moving methods to the AfidSet class. `load` is now a ClassMethod while `save` is a method of the instance. - Added an `AfidPosition` class, which is now used to store AFIDs - Drops polars from use, favouring `List[AfidPosition]` instead - Update mismatched desc in template.fcsv - Update strategies.py to generate coords using `AfidPosition` - Update all tests to correspond with changes made --- afids_utils/afids.py | 155 ++++++++++++++++++++----- afids_utils/ext/fcsv.py | 117 ++++++++++--------- afids_utils/io.py | 120 ------------------- afids_utils/resources/template.fcsv | 2 +- afids_utils/tests/strategies.py | 51 ++++++--- afids_utils/tests/test_afids.py | 172 +++++++++++++++++++++++++--- afids_utils/tests/test_ext.py | 72 ++++++------ afids_utils/tests/test_io.py | 162 -------------------------- 8 files changed, 411 insertions(+), 440 deletions(-) delete mode 100644 afids_utils/io.py delete mode 100644 afids_utils/tests/test_io.py diff --git a/afids_utils/afids.py b/afids_utils/afids.py index e0624a76..fd31c41f 100644 --- a/afids_utils/afids.py +++ b/afids_utils/afids.py @@ -1,30 +1,133 @@ """Anatomical fiducial classes""" from __future__ import annotations +import json +from importlib import resources +from os import PathLike +from pathlib import Path + import attrs -import numpy as np -import polars as pl -from numpy.typing import NDArray -from afids_utils.exceptions import InvalidFiducialError +from afids_utils.exceptions import InvalidFiducialError, InvalidFileError + + +@attrs.define +class AfidPosition: + """Base class for a single AFID position""" + + label: int = attrs.field() + x: float = attrs.field() + y: float = attrs.field() + z: float = attrs.field() + desc: str = attrs.field() @attrs.define -class AfidSet(dict): - """Base class for a set of fiducials""" +class AfidSet: + """Base class for a set of AFIDs""" slicer_version: str = attrs.field() coord_system: str = attrs.field() - afids_df: pl.DataFrame = attrs.field() + afids: list[AfidPosition] = attrs.field() + + @classmethod + def load(cls, afids_fpath: PathLike[str] | str) -> AfidSet: + """ + Load an AFIDs file + + Parameters + ---------- + afids_fpath + Path to .fcsv or .json file containing AFIDs information - def __attrs_post_init__(self): - self["metadata"] = { - "slicer_version": self.slicer_version, - "coord_system": self.coord_system, - } - self["afids"] = self.afids_df + Returns + ------- + AfidSet + Set of anatomical fiducials containing coordinates and metadata + + Raises + ------ + IOError + If extension to fiducial file is not supported - def get_afid(self, label: int) -> NDArray[np.single]: + InvalidFileError + If fiducial file has none or more than expected number of fiducials + + InvalidFiducialError + If description in fiducial file does not match expected + """ + # Check if file exists + afids_fpath = Path(afids_fpath) + if not afids_fpath.exists(): + raise FileNotFoundError("Provided AFID file does not exist") + + afids_fpath_ext = afids_fpath.suffix + + # Loading fcsv + if afids_fpath_ext == ".fcsv": + from afids_utils.ext.fcsv import load_fcsv + + slicer_version, coord_system, afids_positions = load_fcsv( + afids_fpath + ) + # Loading json + # if afids_fpath_ext = ".json": + # load_json(afids_path) + else: + raise ValueError("Unsupported file extension") + + # Perform validation of loaded file + # Load expected mappings + with resources.open_text( + "afids_utils.resources", "afids_descs.json" + ) as json_fpath: + mappings = json.load(json_fpath) + # Check expected number of fiducials exist + if len(afids_positions) != len(mappings["human"]): + raise InvalidFileError("Unexpected number of fiducials") + + # Validate descriptions, before dropping + for label in range(len(afids_positions)): + if afids_positions[label].desc not in mappings["human"][label]: + raise InvalidFiducialError( + f"Description for label {label+1} does not match expected" + ) + + return cls( + slicer_version=slicer_version, + coord_system=coord_system, + afids=afids_positions, + ) + + # TODO: Handle the metadata - specifically setting the coordinate system + def save(self, out_fpath: PathLike[str] | str) -> None: + """Save AFIDs to Slicer-compatible file + + Parameters + ---------- + out_fpath + Path of file (including filename and extension) to save AFIDs to + + Raises + ------ + ValueError + If file extension is not supported + """ + + out_fpath_ext = Path(out_fpath).suffix + + # Saving fcsv + if out_fpath_ext == ".fcsv": + from afids_utils.ext.fcsv import save_fcsv + + save_fcsv(self.afids, out_fpath) + # Saving json + # if out_fpath_ext = ".json": + # save_json(afids_coords, out_fpath) + else: + raise ValueError("Unsupported file extension") + + def get_afid(self, label: int) -> AfidPosition: """ Extract a specific AFID's spatial coordinates @@ -35,25 +138,17 @@ def get_afid(self, label: int) -> NDArray[np.single]: Returns ------- - numpy.ndarray[shape=(3,), dtype=numpy.single] - NumPy array containing spatial coordinates (x, y, z) of single AFID - coordinate + afid_position + Spatial position of Afid (as class AfidPosition) Raises ------ InvalidFiducialError - If none or more than expected number of fiducials exist + If AFID label given out of valid range """ - # Filter based off of integer type - if isinstance(label, int): - # Fiducial selection out of bounds - if label < 1 or label > len(self["afids"]): - raise InvalidFiducialError(f"AFID label {label} is not valid") - - return ( - self["afids"] - .filter(pl.col("label") == str(label)) - .select("x_mm", "y_mm", "z_mm") - .to_numpy()[0] - ) + # Fiducial selection out of bounds + if label < 1 or label > len(self.afids): + raise InvalidFiducialError(f"AFID label {label} is not valid") + + return self.afids[label - 1] diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py index 276fc3b7..859abf29 100644 --- a/afids_utils/ext/fcsv.py +++ b/afids_utils/ext/fcsv.py @@ -2,21 +2,17 @@ from __future__ import annotations import csv +import io import re from importlib import resources from itertools import islice from os import PathLike -from typing import Dict -import numpy as np -import polars as pl -from numpy.typing import NDArray - -from afids_utils.afids import AfidSet +from afids_utils.afids import AfidPosition from afids_utils.exceptions import InvalidFileError HEADER_ROWS: int = 2 -FCSV_FIELDNAMES = ( +FCSV_FIELDNAMES: tuple[str] = ( "# columns = id", "x", "y", @@ -32,23 +28,16 @@ "desc", "associatedNodeID", ) -FCSV_COLS: Dict[str] = { - "x": pl.Float32, - "y": pl.Float32, - "z": pl.Float32, - "label": pl.Utf8, - "desc": pl.Utf8, -} -def _get_metadata(fcsv_path: PathLike[str] | str) -> tuple[str, str]: +def _get_metadata(in_fcsv: io.TextIO) -> tuple[str, str]: """ Internal function to extract metadata from header of fcsv files Parameters ---------- - fcsv_path - Path to .fcsv file containing AFIDs coordinates + in_fcsv + Data from provided fcsv file to parse metadata from Returns ------- @@ -64,15 +53,16 @@ def _get_metadata(fcsv_path: PathLike[str] | str) -> tuple[str, str]: If header is missing or invalid from .fcsv file """ try: - with open(fcsv_path, "r") as fcsv: - header = list(islice(fcsv, HEADER_ROWS)) + header = list(islice(in_fcsv, HEADER_ROWS)) + # Parse version and coordinate system parsed_version = re.findall(r"\d+\.\d+", header[0])[0] parsed_coord = re.split(r"\s", header[1])[-2] + except IndexError: raise InvalidFileError("Missing or invalid header in .fcsv file") - # Set to human-understandable coordinate system + # Transform coordinate system so human-understandable if parsed_coord == "0": parsed_coord = "LPS" elif parsed_coord == "1": @@ -84,35 +74,43 @@ def _get_metadata(fcsv_path: PathLike[str] | str) -> tuple[str, str]: return parsed_version, parsed_coord -def _get_afids(fcsv_path: PathLike[str] | str) -> pl.DataFrame: +def _get_afids(in_fcsv: io.TextIO) -> list[AfidPosition]: """ Internal function for converting .fcsv file to a pl.DataFrame Parameters ---------- - fcsv_path - Path to .fcsv file containing AFID coordinates + in_fcsv + Data from provided fcsv file to parse metadata from Returns ------- - pl.DataFrame - Dataframe containing afids ids, descriptions, and coordinates + afid_positions + List containing spatial position of afids """ - # Read in fiducials to dataframe, shortening id header - afids_df = pl.read_csv( - fcsv_path, - skip_rows=HEADER_ROWS, - columns=list(FCSV_COLS.keys()), - new_columns=["x_mm", "y_mm", "z_mm"], - dtypes=FCSV_COLS, - ) + # Read in AFIDs from fcsv (set to start from 1 to skip header fields) + afids = list(islice(in_fcsv, 1, None)) + + # Add to list of AfidPosition + afids_positions = [] + for afid in afids: + afid = afid.split(",") + afids_positions.append( + AfidPosition( + label=int(afid[-3]), + x=float(afid[1]), + y=float(afid[2]), + z=float(afid[3]), + desc=afid[-2], + ) + ) - return afids_df + return afids_positions def load_fcsv( fcsv_path: PathLike[str] | str, -) -> AfidSet: +) -> tuple[str, str, list[AfidPosition]]: """ Read in fcsv to an AfidSet @@ -123,24 +121,26 @@ def load_fcsv( Returns ------- - AfidSet - Set of anatomical fiducials containing spatial coordinates and metadata - """ - # Grab metadata - slicer_version, coord_system = _get_metadata(fcsv_path) + slicer_version + Slicer version associated with fiducial file - # Grab afids - afids_set = AfidSet( - slicer_version=slicer_version, - coord_system=coord_system, - afids_df=_get_afids(fcsv_path), - ) + coord_system + Coordinate system of fiducials + + afids_positions + List containing spatial position of afids + """ + with open(fcsv_path) as in_fcsv: + # Grab metadata + slicer_version, coord_system = _get_metadata(in_fcsv) + # Grab afids + afids_positions = _get_afids(in_fcsv) - return afids_set + return slicer_version, coord_system, afids_positions def save_fcsv( - afid_coords: NDArray[np.single], + afid_coords: list[AfidPosition], out_fcsv: PathLike[str] | str, ) -> None: """ @@ -149,11 +149,15 @@ def save_fcsv( Parameters ---------- afid_coords - Floating-point NumPy array containing spatial coordinates (x, y, z) + List of AFID spatial positions out_fcsv Path of fcsv file to save AFIDs to + Raises + ------ + TypeError + If number of fiducials to write does not match expected number """ # Read in fcsv template with resources.open_text( @@ -166,21 +170,16 @@ def save_fcsv( fcsv = list(reader) # Check to make sure shape of AFIDs array matches expected template - if afid_coords.shape[0] != len(fcsv): - raise TypeError( - f"Expected {len(fcsv)} AFIDs, but received {afid_coords.shape[0]}" - ) - if afid_coords.shape[1] != 3: + if len(afid_coords) != len(fcsv): raise TypeError( - "Expected 3 spatial dimensions (x, y, z)," - f"but received {afid_coords.shape[1]}" + f"Expected {len(fcsv)} AFIDs, but received {len(afid_coords)}" ) # Loop over fiducials and update with fiducial spatial coordinates for idx, row in enumerate(fcsv): - row["x"] = afid_coords[idx][0] - row["y"] = afid_coords[idx][1] - row["z"] = afid_coords[idx][2] + row["x"] = afid_coords[idx].x + row["y"] = afid_coords[idx].y + row["z"] = afid_coords[idx].z # Write output fcsv with open(out_fcsv, "w", encoding="utf-8", newline="") as out_fcsv_file: diff --git a/afids_utils/io.py b/afids_utils/io.py deleted file mode 100644 index 00c75321..00000000 --- a/afids_utils/io.py +++ /dev/null @@ -1,120 +0,0 @@ -"""General methods for loading and saving files associated with AFIDs""" -from __future__ import annotations - -import json -from importlib import resources -from os import PathLike -from pathlib import Path - -import polars as pl - -from afids_utils.afids import AfidSet -from afids_utils.exceptions import InvalidFiducialError, InvalidFileError -from afids_utils.ext.fcsv import load_fcsv, save_fcsv - - -def load( - afids_fpath: PathLike[str] | str, -) -> AfidSet: - """ - Load an AFIDs file - - Parameters - ---------- - afids_fpath - Path to .fcsv or .json file containing AFIDs information - - Returns - ------- - AfidSet - Set of anatomical fiducials containing spatial coordinates and metadata - - Raises - ------ - IOError - If extension to fiducial file is not .fcsv or .json or does not exist - - InvalidFileError - If fiducial file has none or more than expected number of fiducials - - InvalidFiducialError - If description in fiducial file does not match expected - """ - # Check if file exists - afids_fpath = Path(afids_fpath) - if not afids_fpath.exists(): - raise FileNotFoundError("Provided AFID file does not exist") - - afids_fpath_ext = afids_fpath.suffix - - # Loading fcsv - if afids_fpath_ext == ".fcsv": - afids_set = load_fcsv(afids_fpath) - # Loading json - # if afids_fpath_ext = ".json": - # load_json(afids_path) - else: - raise ValueError("Unsupported file extension") - - # Perform validation of loaded file - # Load expected mappings - with resources.open_text( - "afids_utils.resources", "afids_descs.json" - ) as json_fpath: - mappings = json.load(json_fpath) - # Check expected number of fiducials exist - if len(afids_set["afids"]) != len(mappings["human"]): - raise InvalidFileError("Unexpected number of fiducials") - - # Validate descriptions, before dropping - for label in range(1, len(afids_set["afids"] + 1)): - desc = ( - afids_set["afids"] - .filter(pl.col("label") == str(label)) - .select("desc") - .item() - ) - if desc not in mappings["human"][label - 1]: - raise InvalidFiducialError( - f"Description for label {label} does not match expected" - ) - - # Drop description column - afids_set["afids"] = afids_set["afids"].drop("desc") - - return afids_set - - -# TODO: Handle the metadata - specifically setting the coordinate system -def save( - afids_set: AfidSet, - out_fpath: PathLike[str] | str, -) -> None: - """Save AFIDs to Slicer-compatible file - - Parameters - ---------- - afids_set - An AFID dataset containing metadata and coordinates - - out_fpath - Path of file (including filename and extension) to save AFIDs to - - Raises - ------ - IOError - If file extension is not supported - """ - - out_fpath_ext = Path(out_fpath).suffix - - afids_coords = afids_set["afids"].select("x_mm", "y_mm", "z_mm").to_numpy() - - # Saving fcsv - if out_fpath_ext == ".fcsv": - save_fcsv(afids_coords, out_fpath) - # Saving json - # if out_fpath_ext = ".json": - # save_json(afids_coords, out_fpath) - else: - raise ValueError("Unsupported file extension") diff --git a/afids_utils/resources/template.fcsv b/afids_utils/resources/template.fcsv index c88414b9..dd641f63 100644 --- a/afids_utils/resources/template.fcsv +++ b/afids_utils/resources/template.fcsv @@ -20,7 +20,7 @@ vtkMRMLMarkupsFiducialNode_16,0,0,0,0,0,0,1,1,1,1,16,L LV at AC,vtkMRMLScalarVol vtkMRMLMarkupsFiducialNode_17,0,0,0,0,0,0,1,1,1,1,17,R LV at PC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_18,0,0,0,0,0,0,1,1,1,1,18,L LV at PC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_19,0,0,0,0,0,0,1,1,1,1,19,Genu of CC,vtkMRMLScalarVolumeNode1 -vtkMRMLMarkupsFiducialNode_20,0,0,0,0,0,0,1,1,1,1,20,Splenium,vtkMRMLScalarVolumeNode1 +vtkMRMLMarkupsFiducialNode_20,0,0,0,0,0,0,1,1,1,1,20,Splenium of CC,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_21,0,0,0,0,0,0,1,1,1,1,21,R AL temporal horn,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_22,0,0,0,0,0,0,1,1,1,1,22,L AL temporal horn,vtkMRMLScalarVolumeNode1 vtkMRMLMarkupsFiducialNode_23,0,0,0,0,0,0,1,1,1,1,23,R superior AM temporal horn,vtkMRMLScalarVolumeNode1 diff --git a/afids_utils/tests/strategies.py b/afids_utils/tests/strategies.py index 1f42a6de..c779acc4 100644 --- a/afids_utils/tests/strategies.py +++ b/afids_utils/tests/strategies.py @@ -5,6 +5,8 @@ from hypothesis.extra.numpy import arrays from numpy.typing import NDArray +from afids_utils.afids import AfidPosition + @st.composite def afid_coords( @@ -13,28 +15,41 @@ def afid_coords( max_value: float = 50.0, width: int = 16, bad_range: bool = False, - bad_dims: bool = False, -) -> NDArray[np.single]: - # Set (in)valid dimensions for array containing AFID coords - num_afids, spatial_dims = 32, 3 - if bad_range: - num_afids = draw( +) -> list[AfidPosition]: + # Set (in)valid number of Afid coordinates for array containing AFID coords + num_afids = ( + 32 + if not bad_range + else draw( st.integers(min_value=0, max_value=100).filter(lambda x: x != 32) ) - if bad_dims: - spatial_dims = draw( - st.integers(min_value=0, max_value=10).filter(lambda x: x != 3) - ) + ) - return draw( - arrays( - shape=(num_afids, spatial_dims), - dtype=np.single, - elements=st.floats( - min_value=min_value, max_value=max_value, width=width - ), + afid_pos = [] + for afid in range(num_afids): + afid_pos.append( + AfidPosition( + label=afid + 1, + x=draw( + st.floats( + min_value=min_value, max_value=max_value, width=width + ) + ), + y=draw( + st.floats( + min_value=min_value, max_value=max_value, width=width + ) + ), + z=draw( + st.floats( + min_value=min_value, max_value=max_value, width=width + ) + ), + desc="", + ) ) - ) + + return afid_pos @st.composite diff --git a/afids_utils/tests/test_afids.py b/afids_utils/tests/test_afids.py index cb4a8f95..e5dd979c 100644 --- a/afids_utils/tests/test_afids.py +++ b/afids_utils/tests/test_afids.py @@ -1,16 +1,17 @@ from __future__ import annotations +import json +import tempfile +from importlib import resources from os import PathLike from pathlib import Path -import numpy as np -import polars as pl import pytest from hypothesis import HealthCheck, assume, given, settings from hypothesis import strategies as st -from afids_utils.exceptions import InvalidFiducialError -from afids_utils.io import load +from afids_utils.afids import AfidPosition, AfidSet +from afids_utils.exceptions import InvalidFiducialError, InvalidFileError @pytest.fixture @@ -20,29 +21,166 @@ def valid_fcsv_file() -> PathLike[str]: ) -class TestAfids: - def test_init(self, valid_fcsv_file: PathLike[str]): +@pytest.fixture +def human_mappings() -> list[list[str] | str]: + with resources.open_text( + "afids_utils.resources", "afids_descs.json" + ) as json_fpath: + mappings = json.load(json_fpath) + + return mappings["human"] + + +class TestAfidsIO: + @given(label=st.integers(min_value=0, max_value=31)) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_valid_load(self, valid_fcsv_file: PathLike[str], label: int): # Load valid file to check internal types - afids_set = load(valid_fcsv_file) + afids_set = AfidSet.load(valid_fcsv_file) + + # Check correct type created after loading + assert isinstance(afids_set, AfidSet) # Check to make sure internal types are correct - assert isinstance(afids_set["metadata"], dict) - assert isinstance(afids_set["metadata"]["slicer_version"], str) - assert isinstance(afids_set["metadata"]["coord_system"], str) - assert isinstance(afids_set["afids"], pl.DataFrame) + assert isinstance(afids_set.slicer_version, str) + assert isinstance(afids_set.coord_system, str) + assert isinstance(afids_set.afids, list) + assert isinstance(afids_set.afids[label], AfidPosition) + + def test_invalid_fpath(self): + with pytest.raises(FileNotFoundError, match=".*does not exist"): + AfidSet.load("invalid/fpath.fcsv") + + @given( + ext=st.text( + min_size=2, + max_size=5, + alphabet=st.characters( + min_codepoint=ord("A"), max_codepoint=ord("z") + ), + ) + ) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_ext(self, valid_fcsv_file: PathLike[str], ext: str): + assume(not ext == "fcsv" or not ext == "json") + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix=f"_afids.{ext}", + ) as invalid_file_ext: + with pytest.raises(ValueError, match="Unsupported .* extension"): + AfidSet.load(invalid_file_ext.name) + + def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): + # Create additional line of fiducials + with open(valid_fcsv_file) as valid_fcsv: + fcsv_data = valid_fcsv.readlines() + fcsv_data.append(fcsv_data[-1]) + + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", + ) as out_fcsv_file: + out_fcsv_file.writelines(fcsv_data) + out_fcsv_file.flush() + + # Test that InvalidFileError raised containing correct message + with pytest.raises(InvalidFileError, match="Unexpected number.*"): + AfidSet.load(out_fcsv_file.name) + + @given( + label=st.integers(min_value=0, max_value=31), + desc=st.text( + min_size=2, + max_size=5, + alphabet=st.characters( + min_codepoint=ord("A"), max_codepoint=ord("z") + ), + ), + ) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_desc( + self, + valid_fcsv_file: PathLike[str], + human_mappings: list[list[str] | str], + label: int, + desc: str, + ) -> None: + assume(desc not in human_mappings[label]) + + # Replace valid description with a mismatch + with open(valid_fcsv_file) as valid_fcsv: + fcsv_data = valid_fcsv.readlines() + fcsv_data[label + 3] = fcsv_data[label + 3].replace( + human_mappings[label][0], desc + ) + + # Write to temp file + with tempfile.NamedTemporaryFile( + mode="w", + prefix="sub-test_desc-", + suffix="_afids.fcsv", + ) as out_fcsv_file: + out_fcsv_file.writelines(fcsv_data) + out_fcsv_file.flush() + + # Test for description match error raised + with pytest.raises( + InvalidFiducialError, match="Description for label.*" + ): + AfidSet.load(out_fcsv_file.name) + + def test_valid_save(self, valid_fcsv_file: PathLike[str]): + with tempfile.NamedTemporaryFile( + mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" + ) as out_fcsv_file: + afids_set = AfidSet.load(valid_fcsv_file) + afids_set.save(out_fcsv_file.name) + + assert Path(out_fcsv_file.name).exists() + + @given( + ext=st.text( + min_size=2, + max_size=5, + alphabet=st.characters( + min_codepoint=ord("A"), max_codepoint=ord("z") + ), + ) + ) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_invalid_ext_save(self, valid_fcsv_file: PathLike[str], ext: str): + assume(not ext == "fcsv" or not ext == "json") + + with tempfile.NamedTemporaryFile( + mode="w", prefix="sub-test_desc-", suffix=f"_afids.{ext}" + ) as out_file: + afids_set = AfidSet.load(valid_fcsv_file) + with pytest.raises(ValueError, match="Unsupported file extension"): + afids_set.save(out_file.name) + +class TestAfidsCore: @given(label=st.integers(min_value=1, max_value=32)) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], ) def test_valid_get_afid(self, valid_fcsv_file: PathLike[str], label: int): - afids_set = load(valid_fcsv_file) + afids_set = AfidSet.load(valid_fcsv_file) afid_pos = afids_set.get_afid(label) # Check array type - assert isinstance(afid_pos, np.ndarray) - # Check array values - assert afid_pos.dtype == np.single + assert isinstance(afid_pos, AfidPosition) @given(label=st.integers(min_value=-100, max_value=100)) @settings( @@ -51,8 +189,8 @@ def test_valid_get_afid(self, valid_fcsv_file: PathLike[str], label: int): def test_invalid_get_afid( self, valid_fcsv_file: PathLike[str], label: int ): - afids_set = load(valid_fcsv_file) - assume(not 1 <= label <= len(afids_set["afids"])) + afids_set = AfidSet.load(valid_fcsv_file) + assume(not 1 <= label <= len(afids_set.afids)) with pytest.raises(InvalidFiducialError, match=".*not valid"): afids_set.get_afid(label) diff --git a/afids_utils/tests/test_ext.py b/afids_utils/tests/test_ext.py index 79471a5d..aa2eb8b1 100644 --- a/afids_utils/tests/test_ext.py +++ b/afids_utils/tests/test_ext.py @@ -6,18 +6,16 @@ from os import PathLike from pathlib import Path -import numpy as np import pytest from hypothesis import HealthCheck, assume, given, settings from hypothesis import strategies as st -from numpy.typing import NDArray -from afids_utils.afids import AfidSet +from afids_utils.afids import AfidPosition from afids_utils.exceptions import InvalidFileError from afids_utils.ext.fcsv import ( FCSV_FIELDNAMES, + _get_afids, _get_metadata, - load_fcsv, save_fcsv, ) from afids_utils.tests.strategies import afid_coords @@ -31,11 +29,6 @@ def valid_fcsv_file() -> PathLike[str]: class TestLoadFcsv: - def test_load_valid_fcsv(self, valid_fcsv_file: PathLike[str]): - afids_set = load_fcsv(valid_fcsv_file) - - assert isinstance(afids_set, AfidSet) - @given(coord_num=st.integers(min_value=0, max_value=1)) @settings( suppress_health_check=[HealthCheck.function_scoped_fixture], @@ -59,7 +52,8 @@ def test_get_valid_metadata( temp_valid_fcsv_file.writelines(valid_fcsv_data) temp_valid_fcsv_file.flush() - parsed_ver, parsed_coord = _get_metadata(temp_valid_fcsv_file.name) + with open(temp_valid_fcsv_file.name) as temp_in_fcsv: + parsed_ver, parsed_coord = _get_metadata(temp_in_fcsv) # Check version pattern matches expected ver_regex = re.compile(r"\d+\.\d+") @@ -93,13 +87,18 @@ def test_invalid_num_coord( temp_invalid_fcsv_file.writelines(fcsv_data) temp_invalid_fcsv_file.flush() - with pytest.raises(InvalidFileError, match="Invalid coordinate.*"): - _get_metadata(temp_invalid_fcsv_file.name) + with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: + with pytest.raises( + InvalidFileError, match="Invalid coordinate.*" + ): + _get_metadata(temp_in_fcsv) @given( coord_str=st.text( min_size=3, - alphabet=st.characters(whitelist_categories=["Lu", "Ll", "Lt"]), + alphabet=st.characters( + min_codepoint=ord("A"), max_codepoint=ord("z") + ), ) ) @settings( @@ -125,8 +124,11 @@ def test_invalid_str_coord( temp_invalid_fcsv_file.writelines(fcsv_data) temp_invalid_fcsv_file.flush() - with pytest.raises(InvalidFileError, match="Invalid coordinate.*"): - _get_metadata(temp_invalid_fcsv_file.name) + with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: + with pytest.raises( + InvalidFileError, match="Invalid coordinate.*" + ): + _get_metadata(temp_in_fcsv) def test_invalid_header(self, valid_fcsv_file: PathLike[str]): with open(valid_fcsv_file) as valid_fcsv: @@ -144,11 +146,27 @@ def test_invalid_header(self, valid_fcsv_file: PathLike[str]): with pytest.raises(InvalidFileError, match="Missing or invalid.*"): _get_metadata(temp_invalid_fcsv_file.name) + @given(label=st.integers(min_value=0, max_value=31)) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) + def test_valid_get_afids(self, valid_fcsv_file: PathLike[str], label: int): + with open(valid_fcsv_file) as valid_fcsv: + afids_positions = _get_afids(valid_fcsv) + + assert isinstance(afids_positions, list) + assert isinstance(afids_positions[label], AfidPosition) + class TestSaveFcsv: @given(afids_coords=afid_coords()) + @settings( + suppress_health_check=[HealthCheck.function_scoped_fixture], + ) def test_save_fcsv_invalid_template( - self, afids_coords: NDArray[np.single] + self, + afids_coords: list[AfidPosition], + valid_fcsv_file: PathLike[str], ): with pytest.raises(FileNotFoundError): save_fcsv(afids_coords, "/invalid/template/path.fcsv") @@ -159,7 +177,7 @@ def test_save_fcsv_invalid_template( ) def test_save_fcsv_valid_template( self, - afids_coords: NDArray[np.single], + afids_coords: list[AfidPosition], valid_fcsv_file: PathLike[str], ): with tempfile.NamedTemporaryFile( @@ -188,13 +206,13 @@ def test_save_fcsv_valid_template( # Check contents for idx, row in enumerate(output_fcsv): assert (row["x"], row["y"], row["z"]) == ( - str(afids_coords[idx][0]), - str(afids_coords[idx][1]), - str(afids_coords[idx][2]), + str(afids_coords[idx].x), + str(afids_coords[idx].y), + str(afids_coords[idx].z), ) @given(afids_coords=afid_coords(bad_range=True)) - def test_invalid_num_afids(self, afids_coords: NDArray[np.single]) -> None: + def test_invalid_num_afids(self, afids_coords: list[AfidPosition]) -> None: with tempfile.NamedTemporaryFile( mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" ) as out_fcsv_file: @@ -202,15 +220,3 @@ def test_invalid_num_afids(self, afids_coords: NDArray[np.single]) -> None: save_fcsv(afids_coords, out_fcsv_file) assert "AFIDs, but received" in str(err.value) - - @given(afids_coords=afid_coords(bad_dims=True)) - def test_invalid_afids_dims( - self, afids_coords: NDArray[np.single] - ) -> None: - with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" - ) as out_fcsv_file: - with pytest.raises(TypeError) as err: - save_fcsv(afids_coords, out_fcsv_file) - - assert "Expected 3 spatial dimensions" in str(err.value) diff --git a/afids_utils/tests/test_io.py b/afids_utils/tests/test_io.py deleted file mode 100644 index 6a029623..00000000 --- a/afids_utils/tests/test_io.py +++ /dev/null @@ -1,162 +0,0 @@ -from __future__ import annotations - -import json -import tempfile -from importlib import resources -from os import PathLike -from pathlib import Path -from typing import List - -import pytest -from hypothesis import HealthCheck, assume, given, settings -from hypothesis import strategies as st - -from afids_utils.afids import AfidSet -from afids_utils.exceptions import InvalidFiducialError, InvalidFileError -from afids_utils.io import load, save - - -@pytest.fixture -def valid_fcsv_file() -> PathLike[str]: - return ( - Path(__file__).parent / "data" / "tpl-MNI152NLin2009cAsym_afids.fcsv" - ) - - -@pytest.fixture -def human_mappings() -> List[List[str] | str]: - with resources.open_text( - "afids_utils.resources", "afids_descs.json" - ) as json_fpath: - mappings = json.load(json_fpath) - - return mappings["human"] - - -class TestLoad: - def test_valid_file(self, valid_fcsv_file: PathLike[str]): - afids_set = load(valid_fcsv_file) - - assert isinstance(afids_set, AfidSet) - - def test_invalid_fpath(self): - with pytest.raises(FileNotFoundError, match=".*does not exist"): - load("invalid/fpath.fcsv") - - @given( - ext=st.text( - min_size=2, - max_size=5, - alphabet=st.characters( - min_codepoint=ord("A"), max_codepoint=ord("z") - ), - ) - ) - @settings( - suppress_health_check=[HealthCheck.function_scoped_fixture], - ) - def test_invalid_ext(self, valid_fcsv_file: PathLike[str], ext: str): - assume(not ext == "fcsv" or not ext == "json") - - with tempfile.NamedTemporaryFile( - mode="w", - prefix="sub-test_desc-", - suffix=f"_afids.{ext}", - ) as invalid_file_ext: - with pytest.raises(ValueError, match="Unsupported .* extension"): - load(invalid_file_ext.name) - - def test_invalid_label_range(self, valid_fcsv_file: PathLike[str]): - # Create additional line of fiducials - with open(valid_fcsv_file) as valid_fcsv: - fcsv_data = valid_fcsv.readlines() - fcsv_data.append(fcsv_data[-1]) - - with tempfile.NamedTemporaryFile( - mode="w", - prefix="sub-test_desc-", - suffix="_afids.fcsv", - ) as out_fcsv_file: - out_fcsv_file.writelines(fcsv_data) - out_fcsv_file.flush() - - # Test that InvalidFileError raised containing correct message - with pytest.raises(InvalidFileError, match="Unexpected number.*"): - load(out_fcsv_file.name) - - @given( - label=st.integers(min_value=1, max_value=32), - desc=st.text( - min_size=2, - max_size=5, - alphabet=st.characters( - min_codepoint=ord("A"), max_codepoint=ord("z") - ), - ), - ) - @settings( - suppress_health_check=[HealthCheck.function_scoped_fixture], - ) - def test_invalid_desc( - self, - valid_fcsv_file: PathLike[str], - human_mappings: List[List[str] | str], - label: int, - desc: str, - ) -> None: - assume(desc not in human_mappings[label - 1]) - - # Replace valid description with a mismatch - with open(valid_fcsv_file) as valid_fcsv: - fcsv_data = valid_fcsv.readlines() - fcsv_data[label + 2] = fcsv_data[label + 2].replace( - human_mappings[label - 1][0], desc - ) - - # Write to temp file - with tempfile.NamedTemporaryFile( - mode="w", - prefix="sub-test_desc-", - suffix="_afids.fcsv", - ) as out_fcsv_file: - out_fcsv_file.writelines(fcsv_data) - out_fcsv_file.flush() - - # Test for description match error raised - with pytest.raises( - InvalidFiducialError, match="Description for label.*" - ): - load(out_fcsv_file.name) - - -class TestSave: - def test_save_fcsv(self, valid_fcsv_file: PathLike[str]): - with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix="_afids.fcsv" - ) as out_fcsv_file: - afids_set = load(valid_fcsv_file) - save(afids_set, out_fcsv_file.name) - - assert Path(out_fcsv_file.name).exists() - - @given( - ext=st.text( - min_size=2, - max_size=5, - alphabet=st.characters( - min_codepoint=ord("A"), max_codepoint=ord("z") - ), - ) - ) - @settings( - suppress_health_check=[HealthCheck.function_scoped_fixture], - ) - def test_save_invalid_ext(self, valid_fcsv_file: PathLike[str], ext: str): - assume(not ext == "fcsv" or not ext == "json") - - with tempfile.NamedTemporaryFile( - mode="w", prefix="sub-test_desc-", suffix=f"_afids.{ext}" - ) as out_file: - afids_set = load(valid_fcsv_file) - with pytest.raises(ValueError, match="Unsupported file extension"): - save(afids_set, out_file.name) From 6001c0028fc0d2adaaa6f08f0e9aab6a5032a658 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 16:46:10 -0400 Subject: [PATCH 13/18] update codecov components --- codecov.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/codecov.yml b/codecov.yml index 612d9bc5..27221636 100644 --- a/codecov.yml +++ b/codecov.yml @@ -9,15 +9,12 @@ coverage: threshold: 5% component_management: individual_components: - - component_id: afids-utils_io - name: afids_utils/io.py - paths: [afids_utils/io.py] - - component_id: afids-utils_ext - name: afids_utils/ext - paths: [afids_utils/ext/*.py] - component_id: afids-utils_afids name: afids_utils/afids.py paths: [afidS_utils/afids.py] + - component_id: afids-utils_ext + name: afids_utils/ext + paths: [afids_utils/ext/*.py] - component_id: afids-utils_transforms name: afids_utils/transforms.py paths: [afids_utils/transforms.py] From c3fa44191b90c7da266f8b1179a0927316462943 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 16:50:18 -0400 Subject: [PATCH 14/18] update doc's toctree --- docs/api/afids.md | 11 +++++++++++ docs/api/io.md | 6 ------ docs/index.md | 2 +- 3 files changed, 12 insertions(+), 7 deletions(-) create mode 100644 docs/api/afids.md delete mode 100644 docs/api/io.md diff --git a/docs/api/afids.md b/docs/api/afids.md new file mode 100644 index 00000000..d8fa2257 --- /dev/null +++ b/docs/api/afids.md @@ -0,0 +1,11 @@ +## afids_utils.afids + +```{eval-rst} + .. autoclass: afids_utils.afids.AfidPosition + :members: +``` + +```{eval-rst} + .. autoclass:: afids_utils.afids.AfidSet + :members: +``` \ No newline at end of file diff --git a/docs/api/io.md b/docs/api/io.md deleted file mode 100644 index f1344d0e..00000000 --- a/docs/api/io.md +++ /dev/null @@ -1,6 +0,0 @@ -## afids_utils.io - -```{eval-rst} - .. automodule:: afids_utils.io - :members: -``` \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 3beca1fb..a1194058 100644 --- a/docs/index.md +++ b/docs/index.md @@ -7,6 +7,6 @@ :hidden: :maxdepth: 1 -api/io +api/afids api/transform ``` \ No newline at end of file From e5e83cde33ae22581ce3aac8b87354bb175deb64 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 17:09:49 -0400 Subject: [PATCH 15/18] minor refactor for better handling of load_fcsv --- afids_utils/ext/fcsv.py | 22 +++++++++++----------- afids_utils/tests/test_ext.py | 19 +++++++++++-------- 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py index 859abf29..293085b8 100644 --- a/afids_utils/ext/fcsv.py +++ b/afids_utils/ext/fcsv.py @@ -2,10 +2,8 @@ from __future__ import annotations import csv -import io import re from importlib import resources -from itertools import islice from os import PathLike from afids_utils.afids import AfidPosition @@ -30,7 +28,7 @@ ) -def _get_metadata(in_fcsv: io.TextIO) -> tuple[str, str]: +def _get_metadata(in_fcsv: list[str]) -> tuple[str, str]: """ Internal function to extract metadata from header of fcsv files @@ -53,7 +51,7 @@ def _get_metadata(in_fcsv: io.TextIO) -> tuple[str, str]: If header is missing or invalid from .fcsv file """ try: - header = list(islice(in_fcsv, HEADER_ROWS)) + header = in_fcsv[:HEADER_ROWS+1] # Parse version and coordinate system parsed_version = re.findall(r"\d+\.\d+", header[0])[0] @@ -74,7 +72,7 @@ def _get_metadata(in_fcsv: io.TextIO) -> tuple[str, str]: return parsed_version, parsed_coord -def _get_afids(in_fcsv: io.TextIO) -> list[AfidPosition]: +def _get_afids(in_fcsv: list[str]) -> list[AfidPosition]: """ Internal function for converting .fcsv file to a pl.DataFrame @@ -89,7 +87,7 @@ def _get_afids(in_fcsv: io.TextIO) -> list[AfidPosition]: List containing spatial position of afids """ # Read in AFIDs from fcsv (set to start from 1 to skip header fields) - afids = list(islice(in_fcsv, 1, None)) + afids = in_fcsv[HEADER_ROWS+1:] # Add to list of AfidPosition afids_positions = [] @@ -130,11 +128,13 @@ def load_fcsv( afids_positions List containing spatial position of afids """ - with open(fcsv_path) as in_fcsv: - # Grab metadata - slicer_version, coord_system = _get_metadata(in_fcsv) - # Grab afids - afids_positions = _get_afids(in_fcsv) + with open(fcsv_path) as in_fcsv_fpath: + in_fcsv = in_fcsv_fpath.readlines() + + # Grab metadata + slicer_version, coord_system = _get_metadata(in_fcsv) + # Grab afids + afids_positions = _get_afids(in_fcsv) return slicer_version, coord_system, afids_positions diff --git a/afids_utils/tests/test_ext.py b/afids_utils/tests/test_ext.py index aa2eb8b1..ab3712af 100644 --- a/afids_utils/tests/test_ext.py +++ b/afids_utils/tests/test_ext.py @@ -53,7 +53,9 @@ def test_get_valid_metadata( temp_valid_fcsv_file.flush() with open(temp_valid_fcsv_file.name) as temp_in_fcsv: - parsed_ver, parsed_coord = _get_metadata(temp_in_fcsv) + parsed_ver, parsed_coord = _get_metadata( + temp_in_fcsv.readlines() + ) # Check version pattern matches expected ver_regex = re.compile(r"\d+\.\d+") @@ -90,8 +92,8 @@ def test_invalid_num_coord( with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: with pytest.raises( InvalidFileError, match="Invalid coordinate.*" - ): - _get_metadata(temp_in_fcsv) + ): + _get_metadata(temp_in_fcsv.readlines()) @given( coord_str=st.text( @@ -124,11 +126,11 @@ def test_invalid_str_coord( temp_invalid_fcsv_file.writelines(fcsv_data) temp_invalid_fcsv_file.flush() - with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: + with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: with pytest.raises( InvalidFileError, match="Invalid coordinate.*" ): - _get_metadata(temp_in_fcsv) + _get_metadata(temp_in_fcsv.readlines()) def test_invalid_header(self, valid_fcsv_file: PathLike[str]): with open(valid_fcsv_file) as valid_fcsv: @@ -143,8 +145,9 @@ def test_invalid_header(self, valid_fcsv_file: PathLike[str]): temp_invalid_fcsv_file.writelines(invalid_fcsv_data) temp_invalid_fcsv_file.flush() - with pytest.raises(InvalidFileError, match="Missing or invalid.*"): - _get_metadata(temp_invalid_fcsv_file.name) + with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: + with pytest.raises(InvalidFileError, match="Missing or invalid.*"): + _get_metadata(temp_in_fcsv.readlines()) @given(label=st.integers(min_value=0, max_value=31)) @settings( @@ -152,7 +155,7 @@ def test_invalid_header(self, valid_fcsv_file: PathLike[str]): ) def test_valid_get_afids(self, valid_fcsv_file: PathLike[str], label: int): with open(valid_fcsv_file) as valid_fcsv: - afids_positions = _get_afids(valid_fcsv) + afids_positions = _get_afids(valid_fcsv.readlines()) assert isinstance(afids_positions, list) assert isinstance(afids_positions[label], AfidPosition) From 6fa217796474cd4a3dd5d48a9c6ff4625d4244ed Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 17:12:14 -0400 Subject: [PATCH 16/18] add exceptions to the docs --- docs/exceptions/exceptions.md | 11 +++++++++++ docs/index.md | 8 ++++++++ 2 files changed, 19 insertions(+) create mode 100644 docs/exceptions/exceptions.md diff --git a/docs/exceptions/exceptions.md b/docs/exceptions/exceptions.md new file mode 100644 index 00000000..83150c22 --- /dev/null +++ b/docs/exceptions/exceptions.md @@ -0,0 +1,11 @@ +## afids_utils.exceptions + +```{eval-rst} + .. autoclass: afids_utils.exceptions.InvalidFileError + :members: +``` + +```{eval-rst} + .. autoclass:: afids_utils.exceptions.InvalidFiducialError + :members: +``` \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index a1194058..fa8521cc 100644 --- a/docs/index.md +++ b/docs/index.md @@ -9,4 +9,12 @@ api/afids api/transform +``` +```{toctree} +:caption: Exceptions +:name: afids_exceptions +:hidden: +:maxdepth: 1 + +exceptions/exceptions ``` \ No newline at end of file From edd92bedbb9bf0343617d14cd31d4f41db5ad7a1 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 17:13:55 -0400 Subject: [PATCH 17/18] linting / formatting updates --- afids_utils/ext/fcsv.py | 4 ++-- afids_utils/tests/test_ext.py | 12 +++++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/afids_utils/ext/fcsv.py b/afids_utils/ext/fcsv.py index 293085b8..41e2afb6 100644 --- a/afids_utils/ext/fcsv.py +++ b/afids_utils/ext/fcsv.py @@ -51,7 +51,7 @@ def _get_metadata(in_fcsv: list[str]) -> tuple[str, str]: If header is missing or invalid from .fcsv file """ try: - header = in_fcsv[:HEADER_ROWS+1] + header = in_fcsv[: HEADER_ROWS + 1] # Parse version and coordinate system parsed_version = re.findall(r"\d+\.\d+", header[0])[0] @@ -87,7 +87,7 @@ def _get_afids(in_fcsv: list[str]) -> list[AfidPosition]: List containing spatial position of afids """ # Read in AFIDs from fcsv (set to start from 1 to skip header fields) - afids = in_fcsv[HEADER_ROWS+1:] + afids = in_fcsv[HEADER_ROWS + 1 :] # Add to list of AfidPosition afids_positions = [] diff --git a/afids_utils/tests/test_ext.py b/afids_utils/tests/test_ext.py index ab3712af..c23c83c3 100644 --- a/afids_utils/tests/test_ext.py +++ b/afids_utils/tests/test_ext.py @@ -53,8 +53,8 @@ def test_get_valid_metadata( temp_valid_fcsv_file.flush() with open(temp_valid_fcsv_file.name) as temp_in_fcsv: - parsed_ver, parsed_coord = _get_metadata( - temp_in_fcsv.readlines() + parsed_ver, parsed_coord = _get_metadata( + temp_in_fcsv.readlines() ) # Check version pattern matches expected @@ -92,7 +92,7 @@ def test_invalid_num_coord( with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: with pytest.raises( InvalidFileError, match="Invalid coordinate.*" - ): + ): _get_metadata(temp_in_fcsv.readlines()) @given( @@ -126,7 +126,7 @@ def test_invalid_str_coord( temp_invalid_fcsv_file.writelines(fcsv_data) temp_invalid_fcsv_file.flush() - with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: + with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: with pytest.raises( InvalidFileError, match="Invalid coordinate.*" ): @@ -146,7 +146,9 @@ def test_invalid_header(self, valid_fcsv_file: PathLike[str]): temp_invalid_fcsv_file.flush() with open(temp_invalid_fcsv_file.name) as temp_in_fcsv: - with pytest.raises(InvalidFileError, match="Missing or invalid.*"): + with pytest.raises( + InvalidFileError, match="Missing or invalid.*" + ): _get_metadata(temp_in_fcsv.readlines()) @given(label=st.integers(min_value=0, max_value=31)) From e13eebd7120915fdc4491124ef2a25e44774c7a8 Mon Sep 17 00:00:00 2001 From: Jason Kai Date: Wed, 23 Aug 2023 17:17:47 -0400 Subject: [PATCH 18/18] fix doc rst eval --- docs/api/afids.md | 2 -- docs/exceptions/exceptions.md | 2 -- 2 files changed, 4 deletions(-) diff --git a/docs/api/afids.md b/docs/api/afids.md index d8fa2257..e47b3cb9 100644 --- a/docs/api/afids.md +++ b/docs/api/afids.md @@ -3,9 +3,7 @@ ```{eval-rst} .. autoclass: afids_utils.afids.AfidPosition :members: -``` -```{eval-rst} .. autoclass:: afids_utils.afids.AfidSet :members: ``` \ No newline at end of file diff --git a/docs/exceptions/exceptions.md b/docs/exceptions/exceptions.md index 83150c22..33b1fc91 100644 --- a/docs/exceptions/exceptions.md +++ b/docs/exceptions/exceptions.md @@ -3,9 +3,7 @@ ```{eval-rst} .. autoclass: afids_utils.exceptions.InvalidFileError :members: -``` -```{eval-rst} .. autoclass:: afids_utils.exceptions.InvalidFiducialError :members: ``` \ No newline at end of file