diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 59b81b3a..c672d5a4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,6 +17,7 @@ repos: entry: "mypy --strict ./python" pass_filenames: false additional_dependencies: + - "types-requests" - "pytest-asyncio" - "pydantic" - "marimo" diff --git a/pyproject.toml b/pyproject.toml index 8f1cd85d..bd105ad9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,7 @@ dependencies = [ "typing-extensions>=4.12.2", "ariadne-codegen[subscriptions]>=0.14.0", "plotly>4", + "requests>=2.32.3", ] [project.optional-dependencies] @@ -20,6 +21,7 @@ dev = [ "pytest-asyncio==0.24.0", "ruff==0.6.4", "mypy==1.11.2", + "types-requests>=2.32.0", "coverage==7.6.1", "twine==5.1.1", "marimo==0.8.14", diff --git a/python/queries.gql b/python/queries.gql index 9c5b751b..b4d80cf6 100644 --- a/python/queries.gql +++ b/python/queries.gql @@ -17,6 +17,21 @@ fragment CollectionNotFound on CollectionNotFound { id } +fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } +} + +fragment CollectionFileNotFound on CollectionFileNotFound { + id +} + mutation CollectionCreate($organizationID: ID!, $key: ID!, $parentID: ID) { collectionCreate( organizationID: $organizationID @@ -142,3 +157,80 @@ query CollectionDocuments( } } } + +mutation CollectionFileCreate($collectionID: ID!, $key: ID!) { + collectionFileCreate(collectionID: $collectionID, key: $key) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } +} + +mutation CollectionFileDelete($id: ID!) { + collectionFileDelete(id: $id) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + ... on CollectionFileNotFound { + ...CollectionFileNotFound + } + } +} + +query CollectionFiles( + $collectionID: ID! + $tag: TagInput + $after: ID + $first: Int +) { + collection(id: $collectionID) { + __typename + ... on Collection { + id + key + files(after: $after, first: $first, tag: $tag) { + edges { + node { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } +} + +mutation CollectionFileTagAdd($id: ID!, $tag: TagInput!) { + collectionFileTagAdd(id: $id, tag: $tag) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } +} + +mutation CollectionFileTagDelete($id: ID!, $tag_key: String!) { + collectionFileTagDelete(id: $id, key: $tag_key) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } +} + +query CollectionFile($id: ID!) { + collectionFile(id: $id) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } +} \ No newline at end of file diff --git a/python/src/numerous/_client/_fs_client.py b/python/src/numerous/_client/_fs_client.py index 8a3e42e8..a6e7ea9b 100644 --- a/python/src/numerous/_client/_fs_client.py +++ b/python/src/numerous/_client/_fs_client.py @@ -1,24 +1,31 @@ +from __future__ import annotations + import json from dataclasses import asdict, dataclass from pathlib import Path -from typing import Any, Optional +from typing import TYPE_CHECKING, Any, BinaryIO +from numerous.collection.file_reference import FileReference from numerous.generated.graphql.fragments import ( CollectionDocumentReference, CollectionDocumentReferenceTags, + CollectionFileReferenceTags, CollectionReference, ) -from numerous.generated.graphql.input_types import TagInput from numerous.jsonbase64 import base64_to_dict, dict_to_base64 +if TYPE_CHECKING: + from numerous.generated.graphql.input_types import TagInput + + @dataclass class FileSystemCollectionTag: key: str value: str @staticmethod - def load(tag: dict[str, Any]) -> "FileSystemCollectionTag": + def load(tag: dict[str, Any]) -> FileSystemCollectionTag: key = tag.get("key") if not isinstance(key, str): tname = type(key).__name__ @@ -33,13 +40,87 @@ def load(tag: dict[str, Any]) -> "FileSystemCollectionTag": return FileSystemCollectionTag(key=key, value=value) - def to_reference_tag(self) -> CollectionDocumentReferenceTags: + def to_file_reference_tag(self) -> CollectionFileReferenceTags: + return CollectionFileReferenceTags( + key=self.key, + value=self.value, + ) + + def to_document_reference_tag(self) -> CollectionDocumentReferenceTags: return CollectionDocumentReferenceTags( key=self.key, value=self.value, ) +@dataclass +class FileSystemFileMetadata: + file_id: str + file_key: str + tags: list[FileSystemCollectionTag] + + def save(self, path: Path) -> None: + def convert_to_serializable(obj: Path) -> str: + if isinstance(obj, Path): + return str(obj) + return "" + + with path.open("w") as f: + json.dump(asdict(self), f, default=convert_to_serializable) + + @staticmethod + def load(file_path: Path) -> FileSystemFileMetadata: + with file_path.open("r") as f: + file_content = json.load(f) + + if not isinstance(file_content, dict): + tname = type(file_content).__name__ + msg = f"FileSystemCollection file must be a dict, found {tname}" + raise TypeError(msg) + + file_id = file_content.get("file_id") + if not isinstance(file_id, str): + tname = type(file_content).__name__ + msg = f"FileSystemCollection file id must be a str, found {tname}" + raise TypeError(msg) + + file_key = file_content.get("file_key") + if not isinstance(file_key, str): + tname = type(file_content).__name__ + msg = f"FileSystemCollection file id must be a str, found {tname}" + raise TypeError(msg) + + tags = file_content.get("tags", []) + if not isinstance(tags, list): + tname = type(tags).__name__ + msg = f"FileSystemCollection tags must be a list, found {tname}" + raise TypeError(msg) + + return FileSystemFileMetadata( + file_id=file_id, + file_key=file_key, + tags=[FileSystemCollectionTag.load(tag) for tag in tags], + ) + + def reference_tags(self) -> list[CollectionFileReferenceTags]: + return [ + CollectionFileReferenceTags(key=tag.key, value=tag.value) + for tag in self.tags + ] + + def tag_matches(self, tag_input: TagInput) -> bool: + matching_tag = next( + ( + tag + for tag in self.tags + if tag.key == tag_input.key and tag.value == tag_input.value + ), + None, + ) + + return matching_tag is not None + + @dataclass class FileSystemCollectionDocument: data: dict[str, Any] @@ -50,7 +131,7 @@ def save(self, path: Path) -> None: json.dump(asdict(self), f) @staticmethod - def load(path: Path) -> "FileSystemCollectionDocument": + def load(path: Path) -> FileSystemCollectionDocument: with path.open("r") as f: file_content = json.load(f) @@ -97,13 +178,55 @@ def tag_matches(self, tag_input: TagInput) -> bool: return matching_tag is not None +@dataclass +class FileIndexEntry: + collection_id: str + file_key: str + _path: Path | None = None + + @staticmethod + def load(path: Path) -> FileIndexEntry: + return FileIndexEntry(**json.loads(path.read_text()), _path=path) + + def remove(self) -> None: + if self._path is not None: + self._path.unlink() + + def save(self, path: Path) -> None: + if self._path: + msg = "Cannot save file index entry that was already saved." + raise RuntimeError(msg) + self._path = path + path.write_text( + json.dumps({"collection_id": self.collection_id, "file_key": self.file_key}) + ) + + class FileSystemClient: + FILE_INDEX_DIR = "__file_index__" + def __init__(self, base_path: Path) -> None: self._base_path = base_path self._base_path.mkdir(exist_ok=True) + (self._base_path / self.FILE_INDEX_DIR).mkdir(exist_ok=True) + + def _file_index_entry(self, file_id: str) -> FileIndexEntry: + return FileIndexEntry.load(self._base_path / self.FILE_INDEX_DIR / file_id) + + def _file_metadata_path(self, collection_id: str, file_key: str) -> Path: + return self._base_path / collection_id / f"{_escape(file_key)}.file.meta.json" + + def _file_data_path(self, collection_id: str, file_key: str) -> Path: + return self._base_path / collection_id / f"{_escape(file_key)}.file.data" + + def _document_path(self, collection_id: str, document_key: str) -> Path: + return self._base_path / collection_id / f"{document_key}.doc.json" + + def _document_path_from_id(self, document_id: str) -> Path: + return self._base_path / (document_id + ".doc.json") def get_collection_reference( - self, collection_key: str, parent_collection_id: Optional[str] = None + self, collection_key: str, parent_collection_id: str | None = None ) -> CollectionReference: collection_relpath = ( Path(parent_collection_id) / collection_key @@ -117,8 +240,8 @@ def get_collection_reference( def get_collection_document( self, collection_id: str, document_key: str - ) -> Optional[CollectionDocumentReference]: - path = self._base_path / collection_id / f"{document_key}.json" + ) -> CollectionDocumentReference | None: + path = self._document_path(collection_id, document_key) if not path.exists(): return None @@ -129,20 +252,20 @@ def get_collection_document( id=doc_id, key=document_key, data=dict_to_base64(doc.data), - tags=[tag.to_reference_tag() for tag in doc.tags], + tags=[tag.to_document_reference_tag() for tag in doc.tags], ) def set_collection_document( self, collection_id: str, document_key: str, encoded_data: str - ) -> Optional[CollectionDocumentReference]: - doc_path = self._base_path / collection_id / f"{document_key}.json" + ) -> CollectionDocumentReference | None: + path = self._document_path(collection_id, document_key) data = base64_to_dict(encoded_data) - if doc_path.exists(): - doc = FileSystemCollectionDocument.load(doc_path) + if path.exists(): + doc = FileSystemCollectionDocument.load(path) doc.data = data else: doc = FileSystemCollectionDocument(data, []) - doc.save(doc_path) + doc.save(path) doc_id = str(Path(collection_id) / document_key) return CollectionDocumentReference( @@ -154,8 +277,8 @@ def set_collection_document( def delete_collection_document( self, document_id: str - ) -> Optional[CollectionDocumentReference]: - doc_path = self._base_path / (document_id + ".json") + ) -> CollectionDocumentReference | None: + doc_path = self._document_path_from_id(document_id) if not doc_path.exists(): return None @@ -165,15 +288,15 @@ def delete_collection_document( return CollectionDocumentReference( id=document_id, - key=doc_path.stem, + key=doc_path.name.removesuffix(".doc.json"), data=dict_to_base64(doc.data), tags=doc.reference_tags(), ) def add_collection_document_tag( self, document_id: str, tag: TagInput - ) -> Optional[CollectionDocumentReference]: - doc_path = self._base_path / (document_id + ".json") + ) -> CollectionDocumentReference | None: + doc_path = self._document_path_from_id(document_id) if not doc_path.exists(): return None @@ -190,8 +313,8 @@ def add_collection_document_tag( def delete_collection_document_tag( self, document_id: str, tag_key: str - ) -> Optional[CollectionDocumentReference]: - doc_path = self._base_path / (document_id + ".json") + ) -> CollectionDocumentReference | None: + doc_path = self._document_path_from_id(document_id) if not doc_path.exists(): return None @@ -210,15 +333,15 @@ def get_collection_documents( self, collection_id: str, end_cursor: str, # noqa: ARG002 - tag_input: Optional[TagInput], - ) -> tuple[Optional[list[Optional[CollectionDocumentReference]]], bool, str]: + tag_input: TagInput | None, + ) -> tuple[list[CollectionDocumentReference | None], bool, str]: col_path = self._base_path / collection_id if not col_path.exists(): return [], False, "" - documents: list[Optional[CollectionDocumentReference]] = [] + documents: list[CollectionDocumentReference | None] = [] for doc_path in col_path.iterdir(): - if doc_path.suffix != ".json": + if not doc_path.name.endswith(".doc.json"): continue doc = FileSystemCollectionDocument.load(doc_path) @@ -227,11 +350,15 @@ def get_collection_documents( # skips files that do not match tag input, if it is given continue - doc_id = str(doc_path.relative_to(self._base_path).with_suffix("")) + doc_id = str( + doc_path.relative_to(self._base_path).with_name( + doc_path.name.removesuffix(".doc.json") + ) + ) documents.append( CollectionDocumentReference( id=doc_id, - key=doc_path.stem, + key=doc_path.name.removesuffix(".doc.json"), data=dict_to_base64(doc.data), tags=doc.reference_tags(), ) @@ -239,11 +366,113 @@ def get_collection_documents( return sorted(documents, key=lambda d: d.id if d else ""), False, "" + def create_collection_file_reference( + self, collection_id: str, file_key: str + ) -> FileReference | None: + meta_path = self._file_metadata_path(collection_id, file_key) + if meta_path.exists(): + meta = FileSystemFileMetadata.load(meta_path) + else: + file_id = _escape(collection_id + "_" + file_key) + index_entry = FileIndexEntry(collection_id=collection_id, file_key=file_key) + index_entry.save(self._base_path / self.FILE_INDEX_DIR / file_id) + meta = FileSystemFileMetadata(file_id=file_id, file_key=file_key, tags=[]) + meta.save(self._file_metadata_path(collection_id, file_key)) + + return FileReference( + client=self, + file_id=meta.file_id, + key=file_key, + ) + + def collection_file_tags(self, file_id: str) -> dict[str, str] | None: + try: + index_entry = self._file_index_entry(file_id) + except FileNotFoundError: + return None + + meta_path = self._file_metadata_path( + index_entry.collection_id, index_entry.file_key + ) + + if not meta_path.exists(): + return None + + meta = FileSystemFileMetadata.load(meta_path) + return {tag.key: tag.value for tag in meta.tags} + + def delete_collection_file(self, file_id: str) -> None: + index_entry = self._file_index_entry(file_id) + meta_path = self._file_metadata_path( + index_entry.collection_id, index_entry.file_key + ) + data_path = self._file_data_path( + index_entry.collection_id, index_entry.file_key + ) + + if not meta_path.exists(): + return + + meta_path.unlink() + data_path.unlink() + + def get_collection_files( + self, + collection_id: str, + end_cursor: str, # noqa: ARG002 + tag_input: TagInput | None, + ) -> tuple[list[FileReference], bool, str]: + col_path = self._base_path / collection_id + if not col_path.exists(): + return [], False, "" + + files: list[FileReference] = [] + for file_path in col_path.iterdir(): + if not file_path.name.endswith(".file.meta.json"): + continue + + meta = FileSystemFileMetadata.load(file_path) + + if tag_input and not meta.tag_matches(tag_input): + # skips files that do not match tag input, if it is given + continue + + files.append( + FileReference(client=self, file_id=meta.file_id, key=meta.file_key) + ) + + return files, False, "" + + def add_collection_file_tag(self, file_id: str, tag: TagInput) -> None: + index_entry = self._file_index_entry(file_id) + meta_path = self._file_metadata_path( + index_entry.collection_id, index_entry.file_key + ) + if not meta_path.exists(): + return + + meta = FileSystemFileMetadata.load(meta_path) + if not meta.tag_matches(tag): + meta.tags.append(FileSystemCollectionTag(key=tag.key, value=tag.value)) + meta.save(meta_path) + + def delete_collection_file_tag(self, file_id: str, tag_key: str) -> None: + index_entry = self._file_index_entry(file_id) + meta_path = self._file_metadata_path( + index_entry.collection_id, index_entry.file_key + ) + if not meta_path.exists(): + return + + meta = FileSystemFileMetadata.load(meta_path) + meta.tags = [tag for tag in meta.tags if tag.key != tag_key] + meta.save(meta_path) + def get_collection_collections( self, collection_key: str, end_cursor: str, # noqa: ARG002 - ) -> tuple[Optional[list[CollectionReference]], bool, str]: + ) -> tuple[list[CollectionReference], bool, str]: col_path = self._base_path / collection_key if not col_path.exists(): return [], False, "" @@ -255,3 +484,48 @@ def get_collection_collections( collections.append(CollectionReference(id=col_id, key=item.name)) return sorted(collections, key=lambda c: c.id), False, "" + + def read_text(self, file_id: str) -> str: + index_entry = self._file_index_entry(file_id) + data_path = self._file_data_path( + index_entry.collection_id, index_entry.file_key + ) + return data_path.read_text() + + def read_bytes(self, file_id: str) -> bytes: + index_entry = self._file_index_entry(file_id) + data_path = self._file_data_path( + index_entry.collection_id, index_entry.file_key + ) + return data_path.read_bytes() + + def save_file(self, file_id: str, data: bytes | str) -> None: + index_entry = self._file_index_entry(file_id) + data_path = self._file_data_path( + index_entry.collection_id, index_entry.file_key + ) + if isinstance(data, bytes): + data_path.write_bytes(data) + else: + data_path.write_text(data) + + def open_file(self, file_id: str) -> BinaryIO: + index_entry = self._file_index_entry(file_id) + data_path = self._file_data_path( + index_entry.collection_id, index_entry.file_key + ) + return data_path.open("rb") + + def file_exists(self, file_id: str) -> bool: + try: + index_entry = self._file_index_entry(file_id) + except FileNotFoundError: + return False + + return self._file_data_path( + index_entry.collection_id, index_entry.file_key + ).exists() + + +def _escape(key: str) -> str: + return key.replace("/", "__") diff --git a/python/src/numerous/_client/_graphql_client.py b/python/src/numerous/_client/_graphql_client.py index 2e1e9ce7..7fd6f6f2 100644 --- a/python/src/numerous/_client/_graphql_client.py +++ b/python/src/numerous/_client/_graphql_client.py @@ -1,10 +1,15 @@ """GraphQL client wrapper for numerous.""" +from __future__ import annotations + +import io import os -from typing import Optional, Union +from typing import TYPE_CHECKING, BinaryIO + +import requests from numerous.collection.exceptions import ParentCollectionNotFoundError -from numerous.generated.graphql.client import Client as GQLClient +from numerous.collection.file_reference import FileReference from numerous.generated.graphql.collection_collections import ( CollectionCollectionsCollectionCollection, CollectionCollectionsCollectionCollectionCollectionsEdgesNode, @@ -13,36 +18,65 @@ CollectionDocumentCollectionCollectionDocument, CollectionDocumentCollectionCollectionNotFound, ) -from numerous.generated.graphql.collection_document_delete import ( - CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocument, - CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocumentNotFound, -) -from numerous.generated.graphql.collection_document_set import ( - CollectionDocumentSetCollectionDocumentSetCollectionDocument, - CollectionDocumentSetCollectionDocumentSetCollectionNotFound, -) -from numerous.generated.graphql.collection_document_tag_add import ( - CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocument, - CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocumentNotFound, -) -from numerous.generated.graphql.collection_document_tag_delete import ( - CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocument, - CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocumentNotFound, -) from numerous.generated.graphql.collection_documents import ( CollectionDocumentsCollectionCollection, CollectionDocumentsCollectionCollectionDocumentsEdgesNode, ) +from numerous.generated.graphql.collection_file import ( + CollectionFileCollectionFileCollectionFile, + CollectionFileCollectionFileCollectionFileNotFound, +) +from numerous.generated.graphql.collection_files import ( + CollectionFilesCollectionCollection, + CollectionFilesCollectionCollectionFilesEdgesNode, +) from numerous.generated.graphql.fragments import ( CollectionDocumentReference, + CollectionFileReference, CollectionNotFound, CollectionReference, ) -from numerous.generated.graphql.input_types import TagInput from numerous.threaded_event_loop import ThreadedEventLoop +if TYPE_CHECKING: + from numerous.generated.graphql.client import Client as GQLClient + from numerous.generated.graphql.collection_document_delete import ( + CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocument, + CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocumentNotFound, + ) + from numerous.generated.graphql.collection_document_set import ( + CollectionDocumentSetCollectionDocumentSetCollectionDocument, + CollectionDocumentSetCollectionDocumentSetCollectionNotFound, + ) + from numerous.generated.graphql.collection_document_tag_add import ( + CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocument, + CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocumentNotFound, + ) + from numerous.generated.graphql.collection_document_tag_delete import ( + CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocument, + CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocumentNotFound, + ) + from numerous.generated.graphql.collection_file_create import ( + CollectionFileCreateCollectionFileCreateCollectionFile, + CollectionFileCreateCollectionFileCreateCollectionNotFound, + ) + from numerous.generated.graphql.collection_file_delete import ( + CollectionFileDeleteCollectionFileDeleteCollectionFile, + CollectionFileDeleteCollectionFileDeleteCollectionFileNotFound, + ) + from numerous.generated.graphql.collection_file_tag_add import ( + CollectionFileTagAddCollectionFileTagAddCollectionFile, + CollectionFileTagAddCollectionFileTagAddCollectionFileNotFound, + ) + from numerous.generated.graphql.collection_file_tag_delete import ( + CollectionFileTagDeleteCollectionFileTagDeleteCollectionFile, + CollectionFileTagDeleteCollectionFileTagDeleteCollectionFileNotFound, + ) + from numerous.generated.graphql.input_types import TagInput + COLLECTED_OBJECTS_NUMBER = 100 +_REQUEST_TIMEOUT_SECONDS_ = 1.5 class APIURLMissingError(Exception): @@ -69,8 +103,8 @@ def __init__(self) -> None: class GraphQLClient: def __init__(self, gql: GQLClient) -> None: self._gql = gql - self._threaded_event_loop = ThreadedEventLoop() - self._threaded_event_loop.start() + self._loop = ThreadedEventLoop() + self._loop.start() organization_id = os.getenv("NUMEROUS_ORGANIZATION_ID") if not organization_id: @@ -85,11 +119,9 @@ def __init__(self, gql: GQLClient) -> None: def _create_collection_ref( self, - collection_response: Union[ - CollectionReference, - CollectionCollectionsCollectionCollectionCollectionsEdgesNode, - CollectionNotFound, - ], + collection_response: CollectionReference + | CollectionCollectionsCollectionCollectionCollectionsEdgesNode + | CollectionNotFound, ) -> CollectionReference: if isinstance(collection_response, CollectionNotFound): raise ParentCollectionNotFoundError(collection_id=collection_response.id) @@ -99,7 +131,7 @@ def _create_collection_ref( ) async def _create_collection( - self, collection_key: str, parent_collection_id: Optional[str] = None + self, collection_key: str, parent_collection_id: str | None = None ) -> CollectionReference: response = await self._gql.collection_create( self._organization_id, @@ -110,7 +142,7 @@ async def _create_collection( return self._create_collection_ref(response.collection_create) def get_collection_reference( - self, collection_key: str, parent_collection_id: Optional[str] = None + self, collection_key: str, parent_collection_id: str | None = None ) -> CollectionReference: """ Retrieve a collection by its key and parent key. @@ -118,39 +150,36 @@ def get_collection_reference( This method retrieves a collection based on its key and parent key, or creates it if it doesn't exist. """ - return self._threaded_event_loop.await_coro( + return self._loop.await_coro( self._create_collection(collection_key, parent_collection_id) ) def _create_collection_document_ref( self, - collection_response: Optional[ - Union[ - CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocument, - CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocument, - CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocument, - CollectionDocumentSetCollectionDocumentSetCollectionDocument, - CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocumentNotFound, - CollectionDocumentSetCollectionDocumentSetCollectionNotFound, - CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocumentNotFound, - CollectionDocumentCollectionCollectionDocument, - CollectionDocumentsCollectionCollectionDocumentsEdgesNode, - CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocumentNotFound, - ] - ], - ) -> Optional[CollectionDocumentReference]: - if isinstance(collection_response, CollectionDocumentReference): + resp: CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocument + | CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocument + | CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocument + | CollectionDocumentSetCollectionDocumentSetCollectionDocument + | CollectionDocumentTagAddCollectionDocumentTagAddCollectionDocumentNotFound + | CollectionDocumentSetCollectionDocumentSetCollectionNotFound + | CollectionDocumentDeleteCollectionDocumentDeleteCollectionDocumentNotFound + | CollectionDocumentCollectionCollectionDocument + | CollectionDocumentsCollectionCollectionDocumentsEdgesNode + | CollectionDocumentTagDeleteCollectionDocumentTagDeleteCollectionDocumentNotFound # noqa: E501 + | None, + ) -> CollectionDocumentReference | None: + if isinstance(resp, CollectionDocumentReference): return CollectionDocumentReference( - id=collection_response.id, - key=collection_response.key, - data=collection_response.data, - tags=collection_response.tags, + id=resp.id, + key=resp.key, + data=resp.data, + tags=resp.tags, ) return None async def _get_collection_document( self, collection_id: str, document_key: str - ) -> Optional[CollectionDocumentReference]: + ) -> CollectionDocumentReference | None: response = await self._gql.collection_document( collection_id, document_key, @@ -167,14 +196,14 @@ async def _get_collection_document( def get_collection_document( self, collection_id: str, document_key: str - ) -> Optional[CollectionDocumentReference]: - return self._threaded_event_loop.await_coro( + ) -> CollectionDocumentReference | None: + return self._loop.await_coro( self._get_collection_document(collection_id, document_key) ) async def _set_collection_document( self, collection_id: str, document_key: str, document_data: str - ) -> Optional[CollectionDocumentReference]: + ) -> CollectionDocumentReference | None: response = await self._gql.collection_document_set( collection_id, document_key, @@ -185,14 +214,14 @@ async def _set_collection_document( def set_collection_document( self, collection_id: str, document_key: str, document_data: str - ) -> Optional[CollectionDocumentReference]: - return self._threaded_event_loop.await_coro( + ) -> CollectionDocumentReference | None: + return self._loop.await_coro( self._set_collection_document(collection_id, document_key, document_data) ) async def _delete_collection_document( self, document_id: str - ) -> Optional[CollectionDocumentReference]: + ) -> CollectionDocumentReference | None: response = await self._gql.collection_document_delete( document_id, headers=self._headers ) @@ -200,14 +229,12 @@ async def _delete_collection_document( def delete_collection_document( self, document_id: str - ) -> Optional[CollectionDocumentReference]: - return self._threaded_event_loop.await_coro( - self._delete_collection_document(document_id) - ) + ) -> CollectionDocumentReference | None: + return self._loop.await_coro(self._delete_collection_document(document_id)) async def _add_collection_document_tag( self, document_id: str, tag: TagInput - ) -> Optional[CollectionDocumentReference]: + ) -> CollectionDocumentReference | None: response = await self._gql.collection_document_tag_add( document_id, tag, headers=self._headers ) @@ -217,14 +244,14 @@ async def _add_collection_document_tag( def add_collection_document_tag( self, document_id: str, tag: TagInput - ) -> Optional[CollectionDocumentReference]: - return self._threaded_event_loop.await_coro( + ) -> CollectionDocumentReference | None: + return self._loop.await_coro( self._add_collection_document_tag(document_id, tag) ) async def _delete_collection_document_tag( self, document_id: str, tag_key: str - ) -> Optional[CollectionDocumentReference]: + ) -> CollectionDocumentReference | None: response = await self._gql.collection_document_tag_delete( document_id, tag_key, headers=self._headers ) @@ -234,8 +261,8 @@ async def _delete_collection_document_tag( def delete_collection_document_tag( self, document_id: str, tag_key: str - ) -> Optional[CollectionDocumentReference]: - return self._threaded_event_loop.await_coro( + ) -> CollectionDocumentReference | None: + return self._loop.await_coro( self._delete_collection_document_tag(document_id, tag_key) ) @@ -243,8 +270,8 @@ async def _get_collection_documents( self, collection_id: str, end_cursor: str, - tag_input: Optional[TagInput], - ) -> tuple[Optional[list[Optional[CollectionDocumentReference]]], bool, str]: + tag_input: TagInput | None, + ) -> tuple[list[CollectionDocumentReference | None] | None, bool, str]: response = await self._gql.collection_documents( collection_id, tag_input, @@ -269,15 +296,119 @@ async def _get_collection_documents( return result, has_next_page, end_cursor def get_collection_documents( - self, collection_id: str, end_cursor: str, tag_input: Optional[TagInput] - ) -> tuple[Optional[list[Optional[CollectionDocumentReference]]], bool, str]: - return self._threaded_event_loop.await_coro( + self, collection_id: str, end_cursor: str, tag_input: TagInput | None + ) -> tuple[list[CollectionDocumentReference | None] | None, bool, str]: + return self._loop.await_coro( self._get_collection_documents(collection_id, end_cursor, tag_input) ) + def _create_collection_files_ref( + self, + resp: ( + CollectionFileCreateCollectionFileCreateCollectionFile + | CollectionFileCreateCollectionFileCreateCollectionNotFound + | CollectionFileDeleteCollectionFileDeleteCollectionFile + | CollectionFileDeleteCollectionFileDeleteCollectionFileNotFound + | CollectionFilesCollectionCollectionFilesEdgesNode + | CollectionFileTagDeleteCollectionFileTagDeleteCollectionFile + | CollectionFileTagAddCollectionFileTagAddCollectionFile + | CollectionFileTagAddCollectionFileTagAddCollectionFileNotFound + | CollectionFileTagDeleteCollectionFileTagDeleteCollectionFileNotFound + | None + ), + ) -> FileReference | None: + if not isinstance(resp, CollectionFileReference): + return None + + return FileReference(client=self, key=resp.key, file_id=resp.id) + + async def _create_collection_file_reference( + self, collection_id: str, file_key: str + ) -> FileReference | None: + response = await self._gql.collection_file_create( + collection_id, + file_key, + headers=self._headers, + ) + return self._create_collection_files_ref(response.collection_file_create) + + def create_collection_file_reference( + self, collection_id: str, file_key: str + ) -> FileReference | None: + return self._loop.await_coro( + self._create_collection_file_reference(collection_id, file_key) + ) + + def collection_file_tags(self, file_id: str) -> dict[str, str] | None: + file = self._collection_file(file_id) + + if not isinstance(file, CollectionFileCollectionFileCollectionFile): + return None + + return {tag.key: tag.value for tag in file.tags} + + async def _delete_collection_file(self, file_id: str) -> None: + await self._gql.collection_file_delete(file_id, headers=self._headers) + + def delete_collection_file(self, file_id: str) -> None: + self._loop.await_coro(self._delete_collection_file(file_id)) + + async def _get_collection_files( + self, + collection_id: str, + end_cursor: str, + tag_input: TagInput | None, + ) -> tuple[list[FileReference], bool, str]: + response = await self._gql.collection_files( + collection_id, + tag_input, + after=end_cursor, + first=COLLECTED_OBJECTS_NUMBER, + headers=self._headers, + ) + + collection = response.collection + if not isinstance(collection, CollectionFilesCollectionCollection): + return [], False, "" + + files = collection.files + edges = files.edges + page_info = files.page_info + + result: list[FileReference] = [] + for edge in edges: + if ref := self._create_collection_files_ref(edge.node): + result.append(ref) # noqa: PERF401 + + end_cursor = page_info.end_cursor or "" + has_next_page = page_info.has_next_page + + return result, has_next_page, end_cursor + + def get_collection_files( + self, collection_id: str, end_cursor: str, tag_input: TagInput | None + ) -> tuple[list[FileReference], bool, str]: + return self._loop.await_coro( + self._get_collection_files(collection_id, end_cursor, tag_input) + ) + + async def _add_collection_file_tag(self, file_id: str, tag: TagInput) -> None: + await self._gql.collection_file_tag_add(file_id, tag, headers=self._headers) + + def add_collection_file_tag(self, file_id: str, tag: TagInput) -> None: + self._loop.await_coro(self._add_collection_file_tag(file_id, tag)) + + async def _delete_collection_file_tag(self, file_id: str, tag_key: str) -> None: + await self._gql.collection_file_tag_delete( + file_id, tag_key, headers=self._headers + ) + + def delete_collection_file_tag(self, file_id: str, tag_key: str) -> None: + return self._loop.await_coro(self._delete_collection_file_tag(file_id, tag_key)) + async def _get_collection_collections( self, collection_id: str, end_cursor: str - ) -> tuple[Optional[list[CollectionReference]], bool, str]: + ) -> tuple[list[CollectionReference] | None, bool, str]: response = await self._gql.collection_collections( collection_id, after=end_cursor, @@ -304,7 +435,79 @@ async def _get_collection_collections( def get_collection_collections( self, collection_key: str, end_cursor: str - ) -> tuple[Optional[list[CollectionReference]], bool, str]: - return self._threaded_event_loop.await_coro( + ) -> tuple[list[CollectionReference] | None, bool, str]: + return self._loop.await_coro( self._get_collection_collections(collection_key, end_cursor) ) + + def save_file(self, file_id: str, data: bytes | str) -> None: + file = self._collection_file(file_id) + if file is None or isinstance( + file, CollectionFileCollectionFileCollectionFileNotFound + ): + return + + if file.upload_url is None: + msg = "No upload URL for this file." + raise ValueError(msg) + + content_type = "application/octet-stream" + if isinstance(data, str): + content_type = "text/plain" + data = data.encode() # Convert string to bytes + + response = requests.put( + file.upload_url, + timeout=_REQUEST_TIMEOUT_SECONDS_, + headers={"Content-Type": content_type, "Content-Length": str(len(data))}, + data=data, + ) + response.raise_for_status() + + def read_text(self, file_id: str) -> str: + return self._request_file(file_id).text + + def read_bytes(self, file_id: str) -> bytes: + return self._request_file(file_id).content + + def open_file(self, file_id: str) -> BinaryIO: + return io.BytesIO(self._request_file(file_id).content) + + def _collection_file( + self, file_id: str + ) -> ( + CollectionFileCollectionFileCollectionFileNotFound + | CollectionFileCollectionFileCollectionFile + | None + ): + return self._loop.await_coro( + self._gql.collection_file(file_id, headers=self._headers) + ).collection_file + + def _request_file(self, file_id: str) -> requests.Response: + file = self._collection_file(file_id) + + if file is None or isinstance( + file, CollectionFileCollectionFileCollectionFileNotFound + ): + msg = "Collection file not found" + raise ValueError(msg) + + if file.download_url is None: + msg = "No download URL for this file." + raise ValueError(msg) + + response = requests.get(file.download_url, timeout=_REQUEST_TIMEOUT_SECONDS_) + response.raise_for_status() + + return response + + def file_exists(self, file_id: str) -> bool: + file = self._collection_file(file_id) + + if file is None or isinstance( + file, CollectionFileCollectionFileCollectionFileNotFound + ): + return False + + return file.download_url is not None and file.download_url.strip() != "" diff --git a/python/src/numerous/collection/__init__.py b/python/src/numerous/collection/__init__.py index 5fe5445b..fe63183a 100644 --- a/python/src/numerous/collection/__init__.py +++ b/python/src/numerous/collection/__init__.py @@ -1,7 +1,7 @@ """The Python SDK for numerous collections.""" -__all__ = ["collection", "NumerousCollection", "NumerousDocument"] +__all__ = ["collection", "CollectionReference", "DocumentReference"] from .collection import collection -from .numerous_collection import NumerousCollection -from .numerous_document import NumerousDocument +from .collection_reference import CollectionReference +from .document_reference import DocumentReference diff --git a/python/src/numerous/collection/_client.py b/python/src/numerous/collection/_client.py index 7381c425..621cbb01 100644 --- a/python/src/numerous/collection/_client.py +++ b/python/src/numerous/collection/_client.py @@ -5,44 +5,75 @@ manage collections, documents and files. """ -from typing import Optional, Protocol +from __future__ import annotations -from numerous.generated.graphql.fragments import ( - CollectionDocumentReference, - CollectionReference, -) -from numerous.generated.graphql.input_types import TagInput +from typing import TYPE_CHECKING, BinaryIO, Protocol + + +if TYPE_CHECKING: + from numerous.collection.file_reference import FileReference + from numerous.generated.graphql.fragments import ( + CollectionDocumentReference, + CollectionReference, + ) + from numerous.generated.graphql.input_types import TagInput class Client(Protocol): def get_collection_reference( - self, collection_key: str, parent_collection_id: Optional[str] = None + self, collection_key: str, parent_collection_id: str | None = None ) -> CollectionReference: ... def get_collection_document( self, collection_id: str, document_key: str - ) -> Optional[CollectionDocumentReference]: ... + ) -> CollectionDocumentReference | None: ... def set_collection_document( self, collection_id: str, document_key: str, document_data: str - ) -> Optional[CollectionDocumentReference]: ... + ) -> CollectionDocumentReference | None: ... def delete_collection_document( self, document_id: str - ) -> Optional[CollectionDocumentReference]: ... + ) -> CollectionDocumentReference | None: ... def add_collection_document_tag( self, document_id: str, tag: TagInput - ) -> Optional[CollectionDocumentReference]: ... + ) -> CollectionDocumentReference | None: ... def delete_collection_document_tag( self, document_id: str, tag_key: str - ) -> Optional[CollectionDocumentReference]: ... + ) -> CollectionDocumentReference | None: ... def get_collection_documents( - self, collection_id: str, end_cursor: str, tag_input: Optional[TagInput] - ) -> tuple[Optional[list[Optional[CollectionDocumentReference]]], bool, str]: ... + self, collection_id: str, end_cursor: str, tag_input: TagInput | None + ) -> tuple[list[CollectionDocumentReference | None] | None, bool, str]: ... def get_collection_collections( self, collection_key: str, end_cursor: str - ) -> tuple[Optional[list[CollectionReference]], bool, str]: ... + ) -> tuple[list[CollectionReference] | None, bool, str]: ... + + def get_collection_files( + self, collection_id: str, end_cursor: str, tag_input: TagInput | None + ) -> tuple[list[FileReference], bool, str]: ... + + def create_collection_file_reference( + self, collection_id: str, file_key: str + ) -> FileReference | None: ... + + def collection_file_tags(self, file_id: str) -> dict[str, str] | None: ... + + def delete_collection_file(self, file_id: str) -> None: ... + + def add_collection_file_tag(self, file_id: str, tag: TagInput) -> None: ... + + def delete_collection_file_tag(self, file_id: str, tag_key: str) -> None: ... + + def read_text(self, file_id: str) -> str: ... + + def read_bytes(self, file_id: str) -> bytes: ... + + def save_file(self, file_id: str, data: bytes | str) -> None: ... + + def open_file(self, file_id: str) -> BinaryIO: ... + + def file_exists(self, file_id: str) -> bool: ... diff --git a/python/src/numerous/collection/collection.py b/python/src/numerous/collection/collection.py index 8cc025a1..f7c2dceb 100644 --- a/python/src/numerous/collection/collection.py +++ b/python/src/numerous/collection/collection.py @@ -3,14 +3,14 @@ from typing import Optional from numerous._client._get_client import get_client -from numerous.collection.numerous_collection import NumerousCollection +from numerous.collection.collection_reference import CollectionReference from ._client import Client def collection( collection_key: str, _client: Optional[Client] = None -) -> NumerousCollection: +) -> CollectionReference: """ Get or create a root collection by key. @@ -28,4 +28,4 @@ def collection( if _client is None: _client = get_client() collection_ref = _client.get_collection_reference(collection_key) - return NumerousCollection(collection_ref, _client) + return CollectionReference(collection_ref.id, collection_ref.key, _client) diff --git a/python/src/numerous/collection/numerous_collection.py b/python/src/numerous/collection/collection_reference.py similarity index 51% rename from python/src/numerous/collection/numerous_collection.py rename to python/src/numerous/collection/collection_reference.py index eb8b67b6..5729a5b4 100644 --- a/python/src/numerous/collection/numerous_collection.py +++ b/python/src/numerous/collection/collection_reference.py @@ -4,8 +4,8 @@ from typing import Generator, Iterator, Optional from numerous.collection._client import Client -from numerous.collection.numerous_document import NumerousDocument -from numerous.generated.graphql.fragments import CollectionReference +from numerous.collection.document_reference import DocumentReference +from numerous.collection.file_reference import FileReference from numerous.generated.graphql.input_types import TagInput @@ -15,13 +15,15 @@ class CollectionNotFoundError(Exception): key: str -class NumerousCollection: - def __init__(self, collection_ref: CollectionReference, _client: Client) -> None: - self.key = collection_ref.key - self.id = collection_ref.id +class CollectionReference: + def __init__( + self, collection_id: str, collection_key: str, _client: Client + ) -> None: + self.key = collection_key + self.id = collection_id self._client = _client - def collection(self, collection_key: str) -> "NumerousCollection": + def collection(self, collection_key: str) -> "CollectionReference": """ Get or create a child collection of this collection by key. @@ -34,16 +36,16 @@ def collection(self, collection_key: str) -> "NumerousCollection": NumerousCollection: The child collection identified by the given key. """ - collection_ref = self._client.get_collection_reference( + ref = self._client.get_collection_reference( collection_key=collection_key, parent_collection_id=self.id ) - if collection_ref is None: + if ref is None: raise CollectionNotFoundError(parent_id=self.id, key=collection_key) - return NumerousCollection(collection_ref, self._client) + return CollectionReference(ref.id, ref.key, self._client) - def document(self, key: str) -> NumerousDocument: + def document(self, key: str) -> DocumentReference: """ Get or create a document by key. @@ -57,20 +59,86 @@ def document(self, key: str) -> NumerousDocument: """ numerous_doc_ref = self._client.get_collection_document(self.id, key) if numerous_doc_ref is not None: - numerous_document = NumerousDocument( + numerous_document = DocumentReference( self._client, numerous_doc_ref.key, (self.id, self.key), numerous_doc_ref, ) else: - numerous_document = NumerousDocument(self._client, key, (self.id, self.key)) + numerous_document = DocumentReference( + self._client, key, (self.id, self.key) + ) return numerous_document + def file(self, key: str) -> FileReference: + """ + Get or create a file by key. + + Args: + key: The key of the file. + + """ + file = self._client.create_collection_file_reference(self.id, key) + if file is None: + msg = "Failed to retrieve or create the file." + raise ValueError(msg) + + return file + + def save_file(self, file_key: str, file_data: str) -> None: + """ + Save data to a file in the collection. + + If the file with the specified key already exists, + it will be overwritten with the new data. + + Args: + file_key: The key of the file to save or update. + file_data: The data to be written to the file. + + Raises: + ValueError: If the file cannot be created or saved. + + """ + file = self.file(file_key) + file.save(file_data) + + def files( + self, tag_key: Optional[str] = None, tag_value: Optional[str] = None + ) -> Iterator[FileReference]: + """ + Retrieve files from the collection, filtered by a tag key and value. + + Args: + tag_key: The key of the tag used to filter files. + tag_value: The value of the tag used to filter files. + + Yields: + File references from the collection. + + """ + end_cursor = "" + tag_input = None + if tag_key is not None and tag_value is not None: + tag_input = TagInput(key=tag_key, value=tag_value) + has_next_page = True + while has_next_page: + result = self._client.get_collection_files(self.id, end_cursor, tag_input) + if result is None: + break + numerous_files, has_next_page, end_cursor = result + if numerous_files is None: + break + for numerous_file in numerous_files: + if numerous_file is None: + continue + yield numerous_file + def documents( self, tag_key: Optional[str] = None, tag_value: Optional[str] = None - ) -> Iterator[NumerousDocument]: + ) -> Iterator[DocumentReference]: """ Retrieve documents from the collection, filtered by a tag key and value. @@ -101,14 +169,14 @@ def documents( for numerous_doc_ref in numerous_doc_refs: if numerous_doc_ref is None: continue - yield NumerousDocument( + yield DocumentReference( self._client, numerous_doc_ref.key, (self.id, self.key), numerous_doc_ref, ) - def collections(self) -> Generator["NumerousCollection", None, None]: + def collections(self) -> Generator["CollectionReference", None, None]: """ Retrieve nested collections from the collection. @@ -122,8 +190,8 @@ def collections(self) -> Generator["NumerousCollection", None, None]: result = self._client.get_collection_collections(self.id, end_cursor) if result is None: break - collection_refs, has_next_page, end_cursor = result - if collection_refs is None: + refs, has_next_page, end_cursor = result + if refs is None: break - for collection_ref in collection_refs: - yield NumerousCollection(collection_ref, self._client) + for ref in refs: + yield CollectionReference(ref.id, ref.key, self._client) diff --git a/python/src/numerous/collection/numerous_document.py b/python/src/numerous/collection/document_reference.py similarity index 99% rename from python/src/numerous/collection/numerous_document.py rename to python/src/numerous/collection/document_reference.py index 50d23185..83b6846d 100644 --- a/python/src/numerous/collection/numerous_document.py +++ b/python/src/numerous/collection/document_reference.py @@ -8,7 +8,7 @@ from numerous.jsonbase64 import base64_to_dict, dict_to_base64 -class NumerousDocument: +class DocumentReference: """ Represents a document in a Numerous collection. diff --git a/python/src/numerous/collection/file_reference.py b/python/src/numerous/collection/file_reference.py new file mode 100644 index 00000000..c556a720 --- /dev/null +++ b/python/src/numerous/collection/file_reference.py @@ -0,0 +1,152 @@ +"""Class for working with numerous files.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, BinaryIO + +from numerous.generated.graphql.input_types import TagInput + + +if TYPE_CHECKING: + from io import TextIOWrapper + + from numerous.collection._client import Client + +NO_FILE_ERROR_MSG = "File does not exist." + + +class FileReference: + """ + Represents a file in a collection. + + Attributes: + key: The key of the file. + file_id: The unique identifier of the file. + + """ + + def __init__( + self, + *, + client: Client, + key: str, + file_id: str, + ) -> None: + """ + Initialize a file reference. + + Args: + client: The client used to interact with the Numerous collection. + key: The key of the file. + file_id: The unique identifier of the file. + tags: An optional list of tags associated with the file. + + """ + self.key: str = key + self.file_id: str = file_id + self._client: Client = client + + @property + def exists(self) -> bool: + """ + Indicate whether the file exists. + + Returns: + True if the file exists; False otherwise. + + """ + return self._client.file_exists(self.file_id) + + @property + def tags(self) -> dict[str, str]: + """ + Return the tags associated with the file. + + Returns: + A dictionary of tag key-value pairs. + + """ + tags = self._client.collection_file_tags(self.file_id) + if tags is None: + raise ValueError(NO_FILE_ERROR_MSG) + return tags + + def read_text(self) -> str: + """ + Read the file's content as text. + + Returns: + The text content of the file. + + """ + return self._client.read_text(self.file_id) + + def read_bytes(self) -> bytes: + """ + Read the file's content as bytes. + + Returns: + The byte content of the file. + + """ + return self._client.read_bytes(self.file_id) + + def open(self) -> BinaryIO: + """ + Open the file for reading in binary mode. + + Returns: + A binary file-like object for reading the file. + + """ + return self._client.open_file(self.file_id) + + def save(self, data: bytes | str) -> None: + """ + Upload and saves data to the file on the server. + + Args: + data: The content to save to the file, either as bytes or string. + + """ + self._client.save_file(self.file_id, data) + + def save_file(self, data: TextIOWrapper) -> None: + """ + Upload and saves a text file to the server. + + Args: + data: A file-like object containing the text content to upload. + + """ + self._client.save_file(self.file_id, data.read()) + + def delete(self) -> None: + """Delete the file from the server.""" + self._client.delete_collection_file(self.file_id) + + def tag(self, key: str, value: str) -> None: + """ + Add a tag to the file. + + Args: + key: The tag key. + value: The tag value. + + """ + self._client.add_collection_file_tag( + self.file_id, TagInput(key=key, value=value) + ) + + def tag_delete(self, tag_key: str) -> None: + """ + Delete a tag from the file. + + Args: + tag_key: The key of the tag to delete. + + Raises: + ValueError: If the file does not exist. + + """ + self._client.delete_collection_file_tag(self.file_id, tag_key) diff --git a/python/src/numerous/generated/graphql/__init__.py b/python/src/numerous/generated/graphql/__init__.py index 89758ff9..4bc0c3f1 100644 --- a/python/src/numerous/generated/graphql/__init__.py +++ b/python/src/numerous/generated/graphql/__init__.py @@ -52,6 +52,40 @@ CollectionDocumentsCollectionCollectionDocumentsPageInfo, CollectionDocumentsCollectionCollectionNotFound, ) +from .collection_file import ( + CollectionFile, + CollectionFileCollectionFileCollectionFile, + CollectionFileCollectionFileCollectionFileNotFound, +) +from .collection_file_create import ( + CollectionFileCreate, + CollectionFileCreateCollectionFileCreateCollectionFile, + CollectionFileCreateCollectionFileCreateCollectionNotFound, +) +from .collection_file_delete import ( + CollectionFileDelete, + CollectionFileDeleteCollectionFileDeleteCollectionFile, + CollectionFileDeleteCollectionFileDeleteCollectionFileNotFound, +) +from .collection_file_tag_add import ( + CollectionFileTagAdd, + CollectionFileTagAddCollectionFileTagAddCollectionFile, + CollectionFileTagAddCollectionFileTagAddCollectionFileNotFound, +) +from .collection_file_tag_delete import ( + CollectionFileTagDelete, + CollectionFileTagDeleteCollectionFileTagDeleteCollectionFile, + CollectionFileTagDeleteCollectionFileTagDeleteCollectionFileNotFound, +) +from .collection_files import ( + CollectionFiles, + CollectionFilesCollectionCollection, + CollectionFilesCollectionCollectionFiles, + CollectionFilesCollectionCollectionFilesEdges, + CollectionFilesCollectionCollectionFilesEdgesNode, + CollectionFilesCollectionCollectionFilesPageInfo, + CollectionFilesCollectionCollectionNotFound, +) from .enums import ( AppDeploymentStatus, AppSubscriptionStatus, @@ -71,6 +105,9 @@ from .fragments import ( CollectionDocumentReference, CollectionDocumentReferenceTags, + CollectionFileNotFound, + CollectionFileReference, + CollectionFileReferenceTags, CollectionNotFound, CollectionReference, ) @@ -149,6 +186,31 @@ "CollectionDocumentsCollectionCollectionDocumentsEdgesNode", "CollectionDocumentsCollectionCollectionDocumentsPageInfo", "CollectionDocumentsCollectionCollectionNotFound", + "CollectionFile", + "CollectionFileCollectionFileCollectionFile", + "CollectionFileCollectionFileCollectionFileNotFound", + "CollectionFileCreate", + "CollectionFileCreateCollectionFileCreateCollectionFile", + "CollectionFileCreateCollectionFileCreateCollectionNotFound", + "CollectionFileDelete", + "CollectionFileDeleteCollectionFileDeleteCollectionFile", + "CollectionFileDeleteCollectionFileDeleteCollectionFileNotFound", + "CollectionFileNotFound", + "CollectionFileReference", + "CollectionFileReferenceTags", + "CollectionFileTagAdd", + "CollectionFileTagAddCollectionFileTagAddCollectionFile", + "CollectionFileTagAddCollectionFileTagAddCollectionFileNotFound", + "CollectionFileTagDelete", + "CollectionFileTagDeleteCollectionFileTagDeleteCollectionFile", + "CollectionFileTagDeleteCollectionFileTagDeleteCollectionFileNotFound", + "CollectionFiles", + "CollectionFilesCollectionCollection", + "CollectionFilesCollectionCollectionFiles", + "CollectionFilesCollectionCollectionFilesEdges", + "CollectionFilesCollectionCollectionFilesEdgesNode", + "CollectionFilesCollectionCollectionFilesPageInfo", + "CollectionFilesCollectionCollectionNotFound", "CollectionNotFound", "CollectionReference", "GraphQLClientError", diff --git a/python/src/numerous/generated/graphql/client.py b/python/src/numerous/generated/graphql/client.py index a6021df8..943f63d9 100644 --- a/python/src/numerous/generated/graphql/client.py +++ b/python/src/numerous/generated/graphql/client.py @@ -13,6 +13,12 @@ from .collection_document_tag_add import CollectionDocumentTagAdd from .collection_document_tag_delete import CollectionDocumentTagDelete from .collection_documents import CollectionDocuments +from .collection_file import CollectionFile +from .collection_file_create import CollectionFileCreate +from .collection_file_delete import CollectionFileDelete +from .collection_file_tag_add import CollectionFileTagAdd +from .collection_file_tag_delete import CollectionFileTagDelete +from .collection_files import CollectionFiles from .input_types import TagInput @@ -368,3 +374,243 @@ async def collection_documents( ) data = self.get_data(response) return CollectionDocuments.model_validate(data) + + async def collection_file_create( + self, collection_id: str, key: str, **kwargs: Any + ) -> CollectionFileCreate: + query = gql( + """ + mutation CollectionFileCreate($collectionID: ID!, $key: ID!) { + collectionFileCreate(collectionID: $collectionID, key: $key) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } + } + + fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } + } + """ + ) + variables: Dict[str, object] = {"collectionID": collection_id, "key": key} + response = await self.execute( + query=query, + operation_name="CollectionFileCreate", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return CollectionFileCreate.model_validate(data) + + async def collection_file_delete( + self, id: str, **kwargs: Any + ) -> CollectionFileDelete: + query = gql( + """ + mutation CollectionFileDelete($id: ID!) { + collectionFileDelete(id: $id) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + ... on CollectionFileNotFound { + ...CollectionFileNotFound + } + } + } + + fragment CollectionFileNotFound on CollectionFileNotFound { + id + } + + fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } + } + """ + ) + variables: Dict[str, object] = {"id": id} + response = await self.execute( + query=query, + operation_name="CollectionFileDelete", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return CollectionFileDelete.model_validate(data) + + async def collection_files( + self, + collection_id: str, + tag: Union[Optional[TagInput], UnsetType] = UNSET, + after: Union[Optional[str], UnsetType] = UNSET, + first: Union[Optional[int], UnsetType] = UNSET, + **kwargs: Any + ) -> CollectionFiles: + query = gql( + """ + query CollectionFiles($collectionID: ID!, $tag: TagInput, $after: ID, $first: Int) { + collection(id: $collectionID) { + __typename + ... on Collection { + id + key + files(after: $after, first: $first, tag: $tag) { + edges { + node { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } + } + + fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } + } + """ + ) + variables: Dict[str, object] = { + "collectionID": collection_id, + "tag": tag, + "after": after, + "first": first, + } + response = await self.execute( + query=query, operation_name="CollectionFiles", variables=variables, **kwargs + ) + data = self.get_data(response) + return CollectionFiles.model_validate(data) + + async def collection_file_tag_add( + self, id: str, tag: TagInput, **kwargs: Any + ) -> CollectionFileTagAdd: + query = gql( + """ + mutation CollectionFileTagAdd($id: ID!, $tag: TagInput!) { + collectionFileTagAdd(id: $id, tag: $tag) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } + } + + fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } + } + """ + ) + variables: Dict[str, object] = {"id": id, "tag": tag} + response = await self.execute( + query=query, + operation_name="CollectionFileTagAdd", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return CollectionFileTagAdd.model_validate(data) + + async def collection_file_tag_delete( + self, id: str, tag_key: str, **kwargs: Any + ) -> CollectionFileTagDelete: + query = gql( + """ + mutation CollectionFileTagDelete($id: ID!, $tag_key: String!) { + collectionFileTagDelete(id: $id, key: $tag_key) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } + } + + fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } + } + """ + ) + variables: Dict[str, object] = {"id": id, "tag_key": tag_key} + response = await self.execute( + query=query, + operation_name="CollectionFileTagDelete", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return CollectionFileTagDelete.model_validate(data) + + async def collection_file(self, id: str, **kwargs: Any) -> CollectionFile: + query = gql( + """ + query CollectionFile($id: ID!) { + collectionFile(id: $id) { + __typename + ... on CollectionFile { + ...CollectionFileReference + } + } + } + + fragment CollectionFileReference on CollectionFile { + id + key + downloadURL + uploadURL + tags { + key + value + } + } + """ + ) + variables: Dict[str, object] = {"id": id} + response = await self.execute( + query=query, operation_name="CollectionFile", variables=variables, **kwargs + ) + data = self.get_data(response) + return CollectionFile.model_validate(data) diff --git a/python/src/numerous/generated/graphql/collection_file.py b/python/src/numerous/generated/graphql/collection_file.py new file mode 100644 index 00000000..cc5d6cc6 --- /dev/null +++ b/python/src/numerous/generated/graphql/collection_file.py @@ -0,0 +1,32 @@ +# Generated by ariadne-codegen +# Source: queries.gql + +from typing import Annotated, Literal, Optional, Union + +from pydantic import Field + +from .base_model import BaseModel +from .fragments import CollectionFileReference + + +class CollectionFile(BaseModel): + collection_file: Optional[ + Annotated[ + Union[ + "CollectionFileCollectionFileCollectionFile", + "CollectionFileCollectionFileCollectionFileNotFound", + ], + Field(discriminator="typename__"), + ] + ] = Field(alias="collectionFile") + + +class CollectionFileCollectionFileCollectionFile(CollectionFileReference): + typename__: Literal["CollectionFile"] = Field(alias="__typename") + + +class CollectionFileCollectionFileCollectionFileNotFound(BaseModel): + typename__: Literal["CollectionFileNotFound"] = Field(alias="__typename") + + +CollectionFile.model_rebuild() diff --git a/python/src/numerous/generated/graphql/collection_file_create.py b/python/src/numerous/generated/graphql/collection_file_create.py new file mode 100644 index 00000000..e0395f9b --- /dev/null +++ b/python/src/numerous/generated/graphql/collection_file_create.py @@ -0,0 +1,27 @@ +# Generated by ariadne-codegen +# Source: queries.gql + +from typing import Literal, Union + +from pydantic import Field + +from .base_model import BaseModel +from .fragments import CollectionFileReference + + +class CollectionFileCreate(BaseModel): + collection_file_create: Union[ + "CollectionFileCreateCollectionFileCreateCollectionFile", + "CollectionFileCreateCollectionFileCreateCollectionNotFound", + ] = Field(alias="collectionFileCreate", discriminator="typename__") + + +class CollectionFileCreateCollectionFileCreateCollectionFile(CollectionFileReference): + typename__: Literal["CollectionFile"] = Field(alias="__typename") + + +class CollectionFileCreateCollectionFileCreateCollectionNotFound(BaseModel): + typename__: Literal["CollectionNotFound"] = Field(alias="__typename") + + +CollectionFileCreate.model_rebuild() diff --git a/python/src/numerous/generated/graphql/collection_file_delete.py b/python/src/numerous/generated/graphql/collection_file_delete.py new file mode 100644 index 00000000..7eb4d3c5 --- /dev/null +++ b/python/src/numerous/generated/graphql/collection_file_delete.py @@ -0,0 +1,29 @@ +# Generated by ariadne-codegen +# Source: queries.gql + +from typing import Literal, Union + +from pydantic import Field + +from .base_model import BaseModel +from .fragments import CollectionFileNotFound, CollectionFileReference + + +class CollectionFileDelete(BaseModel): + collection_file_delete: Union[ + "CollectionFileDeleteCollectionFileDeleteCollectionFile", + "CollectionFileDeleteCollectionFileDeleteCollectionFileNotFound", + ] = Field(alias="collectionFileDelete", discriminator="typename__") + + +class CollectionFileDeleteCollectionFileDeleteCollectionFile(CollectionFileReference): + typename__: Literal["CollectionFile"] = Field(alias="__typename") + + +class CollectionFileDeleteCollectionFileDeleteCollectionFileNotFound( + CollectionFileNotFound +): + typename__: Literal["CollectionFileNotFound"] = Field(alias="__typename") + + +CollectionFileDelete.model_rebuild() diff --git a/python/src/numerous/generated/graphql/collection_file_tag_add.py b/python/src/numerous/generated/graphql/collection_file_tag_add.py new file mode 100644 index 00000000..0c8347d4 --- /dev/null +++ b/python/src/numerous/generated/graphql/collection_file_tag_add.py @@ -0,0 +1,27 @@ +# Generated by ariadne-codegen +# Source: queries.gql + +from typing import Literal, Union + +from pydantic import Field + +from .base_model import BaseModel +from .fragments import CollectionFileReference + + +class CollectionFileTagAdd(BaseModel): + collection_file_tag_add: Union[ + "CollectionFileTagAddCollectionFileTagAddCollectionFile", + "CollectionFileTagAddCollectionFileTagAddCollectionFileNotFound", + ] = Field(alias="collectionFileTagAdd", discriminator="typename__") + + +class CollectionFileTagAddCollectionFileTagAddCollectionFile(CollectionFileReference): + typename__: Literal["CollectionFile"] = Field(alias="__typename") + + +class CollectionFileTagAddCollectionFileTagAddCollectionFileNotFound(BaseModel): + typename__: Literal["CollectionFileNotFound"] = Field(alias="__typename") + + +CollectionFileTagAdd.model_rebuild() diff --git a/python/src/numerous/generated/graphql/collection_file_tag_delete.py b/python/src/numerous/generated/graphql/collection_file_tag_delete.py new file mode 100644 index 00000000..5dce0558 --- /dev/null +++ b/python/src/numerous/generated/graphql/collection_file_tag_delete.py @@ -0,0 +1,29 @@ +# Generated by ariadne-codegen +# Source: queries.gql + +from typing import Literal, Union + +from pydantic import Field + +from .base_model import BaseModel +from .fragments import CollectionFileReference + + +class CollectionFileTagDelete(BaseModel): + collection_file_tag_delete: Union[ + "CollectionFileTagDeleteCollectionFileTagDeleteCollectionFile", + "CollectionFileTagDeleteCollectionFileTagDeleteCollectionFileNotFound", + ] = Field(alias="collectionFileTagDelete", discriminator="typename__") + + +class CollectionFileTagDeleteCollectionFileTagDeleteCollectionFile( + CollectionFileReference +): + typename__: Literal["CollectionFile"] = Field(alias="__typename") + + +class CollectionFileTagDeleteCollectionFileTagDeleteCollectionFileNotFound(BaseModel): + typename__: Literal["CollectionFileNotFound"] = Field(alias="__typename") + + +CollectionFileTagDelete.model_rebuild() diff --git a/python/src/numerous/generated/graphql/collection_files.py b/python/src/numerous/generated/graphql/collection_files.py new file mode 100644 index 00000000..036b73e4 --- /dev/null +++ b/python/src/numerous/generated/graphql/collection_files.py @@ -0,0 +1,58 @@ +# Generated by ariadne-codegen +# Source: queries.gql + +from typing import Annotated, List, Literal, Optional, Union + +from pydantic import Field + +from .base_model import BaseModel +from .fragments import CollectionFileReference + + +class CollectionFiles(BaseModel): + collection: Optional[ + Annotated[ + Union[ + "CollectionFilesCollectionCollection", + "CollectionFilesCollectionCollectionNotFound", + ], + Field(discriminator="typename__"), + ] + ] + + +class CollectionFilesCollectionCollection(BaseModel): + typename__: Literal["Collection"] = Field(alias="__typename") + id: str + key: str + files: "CollectionFilesCollectionCollectionFiles" + + +class CollectionFilesCollectionCollectionFiles(BaseModel): + edges: List["CollectionFilesCollectionCollectionFilesEdges"] + page_info: "CollectionFilesCollectionCollectionFilesPageInfo" = Field( + alias="pageInfo" + ) + + +class CollectionFilesCollectionCollectionFilesEdges(BaseModel): + node: "CollectionFilesCollectionCollectionFilesEdgesNode" + + +class CollectionFilesCollectionCollectionFilesEdgesNode(CollectionFileReference): + typename__: Literal["CollectionFile"] = Field(alias="__typename") + + +class CollectionFilesCollectionCollectionFilesPageInfo(BaseModel): + has_next_page: bool = Field(alias="hasNextPage") + end_cursor: Optional[str] = Field(alias="endCursor") + + +class CollectionFilesCollectionCollectionNotFound(BaseModel): + typename__: Literal["CollectionNotFound"] = Field(alias="__typename") + + +CollectionFiles.model_rebuild() +CollectionFilesCollectionCollection.model_rebuild() +CollectionFilesCollectionCollectionFiles.model_rebuild() +CollectionFilesCollectionCollectionFilesEdges.model_rebuild() diff --git a/python/src/numerous/generated/graphql/fragments.py b/python/src/numerous/generated/graphql/fragments.py index 58006f08..30c69e61 100644 --- a/python/src/numerous/generated/graphql/fragments.py +++ b/python/src/numerous/generated/graphql/fragments.py @@ -1,7 +1,9 @@ # Generated by ariadne-codegen # Source: queries.gql -from typing import Any, List +from typing import Any, List, Optional + +from pydantic import Field from .base_model import BaseModel @@ -18,6 +20,23 @@ class CollectionDocumentReferenceTags(BaseModel): value: str +class CollectionFileNotFound(BaseModel): + id: str + + +class CollectionFileReference(BaseModel): + id: str + key: str + download_url: Optional[str] = Field(alias="downloadURL") + upload_url: Optional[str] = Field(alias="uploadURL") + tags: List["CollectionFileReferenceTags"] + + +class CollectionFileReferenceTags(BaseModel): + key: str + value: str + + class CollectionNotFound(BaseModel): id: str @@ -28,5 +47,7 @@ class CollectionReference(BaseModel): CollectionDocumentReference.model_rebuild() +CollectionFileNotFound.model_rebuild() +CollectionFileReference.model_rebuild() CollectionNotFound.model_rebuild() CollectionReference.model_rebuild() diff --git a/python/src/numerous/user.py b/python/src/numerous/user.py index 2d4c7bde..642e8414 100644 --- a/python/src/numerous/user.py +++ b/python/src/numerous/user.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Any, Optional -from numerous.collection import NumerousCollection, collection +from numerous.collection import CollectionReference, collection from numerous.collection._client import Client @@ -23,7 +23,7 @@ class User: _client: Optional[Client] = None @property - def collection(self) -> NumerousCollection: + def collection(self) -> CollectionReference: """ A user's collection. diff --git a/python/tests/test_collection_documents.py b/python/tests/test_collection_documents.py new file mode 100644 index 00000000..37215925 --- /dev/null +++ b/python/tests/test_collection_documents.py @@ -0,0 +1,462 @@ +from unittest.mock import Mock, call + +import pytest + +from numerous import collection +from numerous._client._graphql_client import COLLECTED_OBJECTS_NUMBER, GraphQLClient +from numerous.collection.document_reference import DocumentReference +from numerous.generated.graphql.client import Client as GQLClient +from numerous.generated.graphql.collection_collections import CollectionCollections +from numerous.generated.graphql.collection_create import CollectionCreate +from numerous.generated.graphql.collection_document import CollectionDocument +from numerous.generated.graphql.collection_document_delete import ( + CollectionDocumentDelete, +) +from numerous.generated.graphql.collection_document_set import CollectionDocumentSet +from numerous.generated.graphql.collection_document_tag_add import ( + CollectionDocumentTagAdd, +) +from numerous.generated.graphql.collection_document_tag_delete import ( + CollectionDocumentTagDelete, +) +from numerous.generated.graphql.collection_documents import CollectionDocuments +from numerous.generated.graphql.input_types import TagInput +from numerous.jsonbase64 import dict_to_base64 + + +ORGANIZATION_ID = "test_org" +COLLECTION_NAME = "test_collection" +COLLECTION_REFERENCE_KEY = "test_key" +COLLECTION_REFERENCE_ID = "test_id" +COLLECTION_DOCUMENT_KEY = "test_document" +DOCUMENT_DATA = {"test": "test"} +BASE64_DOCUMENT_DATA = dict_to_base64(DOCUMENT_DATA) +DOCUMENT_ID = "915b75c5-9e95-4fa7-aaa2-2214c8d251ce" +HEADERS_WITH_AUTHORIZATION = {"headers": {"Authorization": "Bearer token"}} + + +def _collection_create_collection_reference(key: str, ref_id: str) -> CollectionCreate: + return CollectionCreate.model_validate( + {"collectionCreate": {"typename__": "Collection", "key": key, "id": ref_id}} + ) + + +def _collection_document_set_reference(key: str) -> CollectionDocumentSet: + return CollectionDocumentSet.model_validate( + { + "collectionDocumentSet": { + "__typename": "CollectionDocument", + "id": DOCUMENT_ID, + "key": key, + "data": BASE64_DOCUMENT_DATA, + "tags": [], + } + } + ) + + +def _collection_document_tag_delete_found(_id: str) -> CollectionDocumentTagDelete: + return CollectionDocumentTagDelete.model_validate( + { + "collectionDocumentTagDelete": { + "__typename": "CollectionDocument", + "id": _id, + "key": "t21", + "data": BASE64_DOCUMENT_DATA, + "tags": [], + } + } + ) + + +def _collection_document_tag_add_found(_id: str) -> CollectionDocumentTagAdd: + return CollectionDocumentTagAdd.model_validate( + { + "collectionDocumentTagAdd": { + "__typename": "CollectionDocument", + "id": _id, + "key": "t21", + "data": BASE64_DOCUMENT_DATA, + "tags": [{"key": "key", "value": "test"}], + } + } + ) + + +def _collection_document_delete_found(_id: str) -> CollectionDocumentDelete: + return CollectionDocumentDelete.model_validate( + { + "collectionDocumentDelete": { + "__typename": "CollectionDocument", + "id": _id, + "key": "t21", + "data": BASE64_DOCUMENT_DATA, + "tags": [], + } + } + ) + + +def _collection_collections(_id: str) -> CollectionCollections: + return CollectionCollections.model_validate( + { + "collection": { + "__typename": "Collection", + "id": "1a9299d1-5c81-44bb-b94f-ba40afc05f3a", + "key": "root_collection", + "collections": { + "edges": [ + { + "node": { + "__typename": "Collection", + "id": "496da1f7-5378-4962-8373-5c30663848cf", + "key": "collection0", + } + }, + { + "node": { + "__typename": "Collection", + "id": "6ae8ee18-8ebb-4206-aba1-8d2b44c22682", + "key": "collection1", + } + }, + { + "node": { + "__typename": "Collection", + "id": "deb5ee57-e4ba-470c-a913-a6a619e9661d", + "key": "collection2", + } + }, + ], + "pageInfo": { + "hasNextPage": "false", + "endCursor": "deb5ee57-e4ba-470c-a913-a6a619e9661d", + }, + }, + } + } + ) + + +def _collection_documents_reference(key: str) -> CollectionDocuments: + return CollectionDocuments.model_validate( + { + "collection": { + "__typename": "Collection", + "id": "0d2f82fa-1546-49a4-a034-3392eefc3e4e", + "key": "t1", + "documents": { + "edges": [ + { + "node": { + "__typename": "CollectionDocument", + "id": "10634601-67b5-4015-840c-155d9faf9591", + "key": key, + "data": "ewogICJoZWxsbyI6ICJ3b3JsZCIKfQ==", + "tags": [{"key": "key", "value": "test"}], + } + }, + { + "node": { + "__typename": "CollectionDocument", + "id": "915b75c5-9e95-4fa7-aaa2-2214c8d251ce", + "key": key + "1", + "data": "ewogICJoZWxsbyI6ICJ3b3JsZCIKfQ==", + "tags": [], + } + }, + ], + "pageInfo": { + "hasNextPage": "false", + "endCursor": "915b75c5-9e95-4fa7-aaa2-2214c8d251ce", + }, + }, + } + } + ) + + +def _collection_document_reference(key: str) -> CollectionDocument: + return CollectionDocument.model_validate( + { + "collection": { + "__typename": "Collection", + "document": { + "__typename": "CollectionDocument", + "id": DOCUMENT_ID, + "key": key, + "data": BASE64_DOCUMENT_DATA, + "tags": [], + }, + } + } + ) + + +def _collection_create_collection_not_found(ref_id: str) -> CollectionCreate: + return CollectionCreate.model_validate( + {"collectionCreate": {"typename__": "CollectionNotFound", "id": ref_id}} + ) + + +@pytest.fixture(autouse=True) +def _set_env_vars(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("NUMEROUS_API_URL", "url_value") + monkeypatch.setenv("NUMEROUS_ORGANIZATION_ID", ORGANIZATION_ID) + monkeypatch.setenv("NUMEROUS_API_ACCESS_TOKEN", "token") + + +def test_collection_document_returns_new_document() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + test_collection = collection(COLLECTION_NAME, _client) + + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + + gql.collection_document.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_DOCUMENT_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + assert isinstance(document, DocumentReference) + assert document.exists is False + + +def test_collection_document_returns_existing_document() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_document.return_value = _collection_document_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + + gql.collection_document.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_DOCUMENT_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + assert isinstance(document, DocumentReference) + assert document.exists + + +def test_collection_document_set_data_uploads_document() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_document_set.return_value = _collection_document_set_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + assert isinstance(document, DocumentReference) + assert document.exists is False + + document.set({"test": "test"}) + + gql.collection_document_set.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_DOCUMENT_KEY, + BASE64_DOCUMENT_DATA, + **HEADERS_WITH_AUTHORIZATION, + ) + assert document.exists + + +def test_collection_document_get_returns_dict() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_document.return_value = _collection_document_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + + data = document.get() + + assert isinstance(document, DocumentReference) + gql.collection_document.assert_has_calls( + [ + call( + COLLECTION_REFERENCE_ID, + COLLECTION_DOCUMENT_KEY, + **HEADERS_WITH_AUTHORIZATION, + ), + call( + COLLECTION_REFERENCE_ID, + COLLECTION_DOCUMENT_KEY, + **HEADERS_WITH_AUTHORIZATION, + ), + ] + ) + assert document.exists + assert data == DOCUMENT_DATA + + +def test_collection_document_delete_marks_document_exists_false() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_document.return_value = _collection_document_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + assert document.document_id is not None + gql.collection_document_delete.return_value = _collection_document_delete_found( + document.document_id + ) + assert document.exists + + document.delete() + + gql.collection_document_delete.assert_called_once_with( + DOCUMENT_ID, **HEADERS_WITH_AUTHORIZATION + ) + assert document.exists is False + + +def test_collection_document_tag_add() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_document.return_value = _collection_document_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + assert document.document_id is not None + gql.collection_document_tag_add.return_value = _collection_document_tag_add_found( + document.document_id + ) + assert document.exists + + document.tag("key", "test") + + gql.collection_document_tag_add.assert_called_once_with( + DOCUMENT_ID, TagInput(key="key", value="test"), **HEADERS_WITH_AUTHORIZATION + ) + assert document.tags == {"key": "test"} + + +def test_collection_document_tag_delete() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_document.return_value = _collection_document_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + document = test_collection.document(COLLECTION_DOCUMENT_KEY) + assert document.document_id is not None + gql.collection_document_tag_add.return_value = _collection_document_tag_add_found( + document.document_id + ) + gql.collection_document_tag_delete.return_value = ( + _collection_document_tag_delete_found(document.document_id) + ) + assert document.exists + document.tag("key", "test") + assert document.tags == {"key": "test"} + + document.tag_delete("key") + + assert document.tags == {} + gql.collection_document_tag_delete.assert_called_once_with( + DOCUMENT_ID, "key", **HEADERS_WITH_AUTHORIZATION + ) + + +def test_collection_documents_return_more_than_one() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_documents.return_value = _collection_documents_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + + result = [] + expected_number_of_documents = 2 + for document in test_collection.documents(): + assert document.exists + result.append(document) + + assert len(result) == expected_number_of_documents + gql.collection_documents.assert_called_once_with( + COLLECTION_REFERENCE_ID, + None, + after="", + first=COLLECTED_OBJECTS_NUMBER, + **HEADERS_WITH_AUTHORIZATION, + ) + + +def test_collection_documents_query_tag_specific_document() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_documents.return_value = _collection_documents_reference( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + + tag_key = "key" + tag_value = "value" + for document in test_collection.documents(tag_key=tag_key, tag_value=tag_value): + assert document.exists + + gql.collection_documents.assert_called_once_with( + COLLECTION_REFERENCE_ID, + TagInput(key=tag_key, value=tag_value), + after="", + first=COLLECTED_OBJECTS_NUMBER, + **HEADERS_WITH_AUTHORIZATION, + ) + + +def test_collection_collections_return_more_than_one() -> None: + gql = Mock(GQLClient) + _client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_collections.return_value = _collection_collections( + COLLECTION_DOCUMENT_KEY + ) + test_collection = collection(COLLECTION_NAME, _client) + result = [] + expected_number_of_collections = 3 + for collection_element in test_collection.collections(): + assert collection_element.key + result.append(collection_element) + + assert len(result) == expected_number_of_collections + gql.collection_collections.assert_called_once_with( + COLLECTION_REFERENCE_ID, + after="", + first=COLLECTED_OBJECTS_NUMBER, + **HEADERS_WITH_AUTHORIZATION, + ) diff --git a/python/tests/test_collection_files.py b/python/tests/test_collection_files.py new file mode 100644 index 00000000..c3f54469 --- /dev/null +++ b/python/tests/test_collection_files.py @@ -0,0 +1,577 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Generator +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from numerous import collection +from numerous._client._graphql_client import COLLECTED_OBJECTS_NUMBER, GraphQLClient +from numerous.collection.file_reference import FileReference +from numerous.generated.graphql.client import Client as GQLClient +from numerous.generated.graphql.collection_create import CollectionCreate +from numerous.generated.graphql.collection_file import CollectionFile +from numerous.generated.graphql.collection_file_create import CollectionFileCreate +from numerous.generated.graphql.collection_file_delete import CollectionFileDelete +from numerous.generated.graphql.collection_file_tag_add import CollectionFileTagAdd +from numerous.generated.graphql.collection_file_tag_delete import ( + CollectionFileTagDelete, +) +from numerous.generated.graphql.collection_files import CollectionFiles +from numerous.generated.graphql.input_types import TagInput +from numerous.jsonbase64 import dict_to_base64 + + +if TYPE_CHECKING: + from pathlib import Path + + +ORGANIZATION_ID = "test-org-id" +COLLECTION_KEY = "test-collection-key" +NESTED_COLLECTION_ID = "nested_test_collection" +COLLECTION_REFERENCE_KEY = "test_key" +COLLECTION_REFERENCE_ID = "test_id" +NESTED_COLLECTION_REFERENCE_KEY = "nested_test_key" +NESTED_COLLECTION_REFERENCE_ID = "nested_test_id" +COLLECTION_DOCUMENT_KEY = "test_document" +COLLECTION_FILE_KEY = "test-file.txt" +DOCUMENT_DATA = {"test": "test"} +BASE64_DOCUMENT_DATA = dict_to_base64(DOCUMENT_DATA) +TEST_FILE_ID = "ce5aba38-842d-4ee0-877b-4af9d426c848" +HEADERS_WITH_AUTHORIZATION = {"headers": {"Authorization": "Bearer token"}} +_REQUEST_TIMEOUT_SECONDS = 1.5 + + +TEST_DOWNLOAD_URL = "http://127.0.0.1:8082/download/collection_files/" + TEST_FILE_ID +TEST_UPLOAD_URL = "http://127.0.0.1:8082/upload/collection_files/" + TEST_FILE_ID + + +TEST_FILE_TEXT_CONTENT = "File content 1;2;3;4;\n1;2;3;4" +TEST_FILE_BYTES_CONTENT = TEST_FILE_TEXT_CONTENT.encode() + + +def _collection_create_collection_reference(key: str, ref_id: str) -> CollectionCreate: + return CollectionCreate.model_validate( + {"collectionCreate": {"typename__": "Collection", "key": key, "id": ref_id}} + ) + + +def _collection_file_tag_delete_found(file_id: str) -> CollectionFileTagDelete: + return CollectionFileTagDelete.model_validate( + { + "collectionFileTagDelete": _collection_file_data( + file_id, + "t22", + "http://127.0.0.1:8082/download/collection_files/0ac6436b-f044-4616-97c6-2bb5a8dbf7a1", + "http://127.0.0.1:8082/upload/collection_files/0ac6436b-f044-4616-97c6-2bb5a8dbf7a1", + ) + } + ) + + +def _collection_file_tag_add_found(file_id: str) -> CollectionFileTagAdd: + return CollectionFileTagAdd.model_validate( + { + "collectionFileTagAdd": _collection_file_data( + file_id, + "t22", + TEST_DOWNLOAD_URL, + TEST_UPLOAD_URL, + tags={"key": "test"}, + ) + } + ) + + +def _collection_file_delete_found(file_id: str) -> CollectionFileDelete: + return CollectionFileDelete.model_validate( + { + "collectionFileDelete": _collection_file_data( + file_id, + "t21", + TEST_DOWNLOAD_URL, + TEST_UPLOAD_URL, + ) + } + ) + + +def _collection_files_reference() -> CollectionFiles: + return CollectionFiles.model_validate( + { + "collection": { + "__typename": "Collection", + "id": "0d2f82fa-1546-49a4-a034-3392eefc3e4e", + "key": "t1", + "files": { + "edges": [ + { + "node": _collection_file_data( + "0ac6436b-f044-4616-97c6-2bb5a8dbf7a1", + "t22", + TEST_DOWNLOAD_URL, + TEST_UPLOAD_URL, + ) + }, + { + "node": _collection_file_data( + "14ea9afd-41ba-42eb-8a55-314d161e32c6", + "t21", + "http://127.0.0.1:8082/download/collection_files/14ea9afd-41ba-42eb-8a55-314d161e32c6", + "http://127.0.0.1:8082/upload/collection_files/14ea9afd-41ba-42eb-8a55-314d161e32c6", + ), + }, + ], + "pageInfo": { + "hasNextPage": "false", + "endCursor": "14ea9afd-41ba-42eb-8a55-314d161e32c6", + }, + }, + } + } + ) + + +def _collection_file_reference( + key: str, tags: dict[str, str] | None = None +) -> CollectionFile: + return CollectionFile.model_validate( + { + "collectionFile": _collection_file_data( + TEST_FILE_ID, key, TEST_DOWNLOAD_URL, TEST_UPLOAD_URL, tags + ) + } + ) + + +def _collection_file_reference_not_found() -> CollectionFile: + return CollectionFile.model_validate( + {"collectionFile": {"__typename": "CollectionFileNotFound", "id": TEST_FILE_ID}} + ) + + +def _collection_file_create_reference(key: str) -> CollectionFileCreate: + return CollectionFileCreate.model_validate( + { + "collectionFileCreate": _collection_file_data( + TEST_FILE_ID, key, TEST_DOWNLOAD_URL, TEST_UPLOAD_URL + ) + } + ) + + +def _collection_file_reference_no_urls(key: str) -> CollectionFileCreate: + return CollectionFileCreate.model_validate( + {"collectionFileCreate": _collection_file_data(TEST_FILE_ID, key)} + ) + + +def _collection_file_data( + file_id: str, + key: str, + download_url: str | None = None, + upload_url: str | None = None, + tags: dict[str, str] | None = None, +) -> dict[str, Any]: + return { + "__typename": "CollectionFile", + "id": file_id, + "key": key, + "downloadURL": download_url or "", + "uploadURL": upload_url or "", + "tags": [{"key": key, "value": value} for key, value in tags.items()] + if tags + else [], + } + + +@pytest.fixture +def mock_get() -> Generator[MagicMock, None, None]: + with patch("requests.get") as m: + yield m + + +@pytest.fixture +def mock_put() -> Generator[MagicMock, None, None]: + with patch("requests.put") as m: + yield m + + +@pytest.fixture(autouse=True) +def _set_env_vars(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setenv("NUMEROUS_API_URL", "url_value") + monkeypatch.setenv("NUMEROUS_ORGANIZATION_ID", ORGANIZATION_ID) + monkeypatch.setenv("NUMEROUS_API_ACCESS_TOKEN", "token") + + +@pytest.fixture +def base_path(tmp_path: Path) -> Path: + return tmp_path + + +def test_exists_is_true_when_file_exists_and_has_download_url() -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_reference_no_urls( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + assert file.exists is True + + +def test_file_returns_file_exists_after_load(mock_get: MagicMock) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_get.return_value.status_code = 200 + mock_get.return_value.content = TEST_FILE_BYTES_CONTENT + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + assert file.exists is True + + +def test_read_file_returns_expected_text( + mock_get: MagicMock, +) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_get.return_value.status_code = 200 + mock_get.return_value.text = TEST_FILE_TEXT_CONTENT + + col = collection(COLLECTION_KEY, client) + + file = col.file(COLLECTION_FILE_KEY) + text = file.read_text() + + mock_get.assert_called_once_with( + TEST_DOWNLOAD_URL, timeout=_REQUEST_TIMEOUT_SECONDS + ) + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + assert text == TEST_FILE_TEXT_CONTENT + + +def test_read_bytes_returns_expected_bytes(mock_get: MagicMock) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_get.return_value.status_code = 200 + mock_get.return_value.content = TEST_FILE_BYTES_CONTENT + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + bytes_data = file.read_bytes() + + mock_get.assert_called_once_with( + TEST_DOWNLOAD_URL, timeout=_REQUEST_TIMEOUT_SECONDS + ) + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + + assert bytes_data == TEST_FILE_BYTES_CONTENT + + +def test_open_read_returns_expected_file_content( + mock_get: MagicMock, +) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_get.return_value.status_code = 200 + mock_get.return_value.content = TEST_FILE_BYTES_CONTENT + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + with file.open() as fd: + bytes_data = fd.read() + + mock_get.assert_called_once_with( + TEST_DOWNLOAD_URL, timeout=_REQUEST_TIMEOUT_SECONDS + ) + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + + assert bytes_data == TEST_FILE_BYTES_CONTENT + + +def test_save_with_bytes_makes_put_request(mock_put: MagicMock) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_put.return_value.status_code = 200 + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + file.save(TEST_FILE_BYTES_CONTENT) + + mock_put.assert_called_once_with( + TEST_UPLOAD_URL, + timeout=_REQUEST_TIMEOUT_SECONDS, + headers={ + "Content-Type": "application/octet-stream", + "Content-Length": str(len(TEST_FILE_BYTES_CONTENT)), + }, + data=TEST_FILE_BYTES_CONTENT, + ) + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + + assert isinstance(file, FileReference) + + +def test_save_makes_expected_put_request(mock_put: MagicMock) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_put.return_value.status_code = 200 + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + file.save(TEST_FILE_TEXT_CONTENT) + + mock_put.assert_called_once_with( + TEST_UPLOAD_URL, + timeout=_REQUEST_TIMEOUT_SECONDS, + headers={ + "Content-Type": "text/plain", + "Content-Length": str(len(TEST_FILE_TEXT_CONTENT)), + }, + data=TEST_FILE_BYTES_CONTENT, + ) + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + + +def test_save_file_makes_expected_put_request(mock_put: MagicMock) -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file.return_value = _collection_file_reference(COLLECTION_FILE_KEY) + mock_put.return_value.status_code = 200 + + col = collection(COLLECTION_KEY, client) + col.save_file(COLLECTION_FILE_KEY, TEST_FILE_TEXT_CONTENT) + + mock_put.assert_called_once_with( + TEST_UPLOAD_URL, + timeout=_REQUEST_TIMEOUT_SECONDS, + headers={ + "Content-Type": "text/plain", + "Content-Length": str(len(TEST_FILE_TEXT_CONTENT)), + }, + data=TEST_FILE_BYTES_CONTENT, + ) + gql.collection_file_create.assert_called_once_with( + COLLECTION_REFERENCE_ID, + COLLECTION_FILE_KEY, + **HEADERS_WITH_AUTHORIZATION, + ) + + +def test_delete_calls_expected_mutation() -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file_delete.return_value = _collection_file_delete_found( + TEST_FILE_ID + ) + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + file.delete() + + gql.collection_file_delete.assert_called_once_with( + TEST_FILE_ID, **HEADERS_WITH_AUTHORIZATION + ) + + +def test_collection_files_makes_expected_query_and_returns_expected_file_count() -> ( + None +): + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_files.return_value = _collection_files_reference() + + col = collection(COLLECTION_KEY, client) + result = list(col.files()) + + expected_number_of_files = 2 + assert len(result) == expected_number_of_files + gql.collection_files.assert_called_once_with( + COLLECTION_REFERENCE_ID, + None, + after="", + first=COLLECTED_OBJECTS_NUMBER, + **HEADERS_WITH_AUTHORIZATION, + ) + + +def test_tag_add_makes_expected_mutation() -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file_tag_add.return_value = _collection_file_tag_add_found( + TEST_FILE_ID + ) + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + file.tag("key", "test") + + gql.collection_file_tag_add.assert_called_once_with( + TEST_FILE_ID, TagInput(key="key", value="test"), **HEADERS_WITH_AUTHORIZATION + ) + + +def test_tag_delete_makes_expected_mutation() -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + gql.collection_file_tag_delete.return_value = _collection_file_tag_delete_found( + TEST_FILE_ID + ) + tag_key = "key" + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + file.tag_delete(tag_key) + + gql.collection_file_tag_delete.assert_called_once_with( + TEST_FILE_ID, tag_key, **HEADERS_WITH_AUTHORIZATION + ) + + +def test_collection_files_passes_tag_filter_on_to_client() -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_files.return_value = _collection_files_reference() + tag_key = "key" + tag_value = "value" + + col = collection(COLLECTION_KEY, client) + list(col.files(tag_key=tag_key, tag_value=tag_value)) + + gql.collection_files.assert_called_once_with( + COLLECTION_REFERENCE_ID, + TagInput(key=tag_key, value=tag_value), + after="", + first=COLLECTED_OBJECTS_NUMBER, + **HEADERS_WITH_AUTHORIZATION, + ) + + +def test_tags_property_queries_and_returns_expected_tags() -> None: + gql = Mock(GQLClient) + client = GraphQLClient(gql) + gql.collection_create.return_value = _collection_create_collection_reference( + COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID + ) + gql.collection_file_create.return_value = _collection_file_create_reference( + COLLECTION_FILE_KEY + ) + expected_tags = {"tag_1_key": "tag_1_value", "tag_2_key": "tag_2_value"} + gql.collection_file.return_value = _collection_file_reference( + COLLECTION_FILE_KEY, tags=expected_tags + ) + + col = collection(COLLECTION_KEY, client) + file = col.file(COLLECTION_FILE_KEY) + tags = file.tags + + assert tags == expected_tags diff --git a/python/tests/test_collections.py b/python/tests/test_collections.py index 3db05617..cbb3935d 100644 --- a/python/tests/test_collections.py +++ b/python/tests/test_collections.py @@ -1,28 +1,12 @@ -from unittest.mock import Mock, call +from unittest.mock import Mock import pytest from numerous import collection -from numerous._client._graphql_client import COLLECTED_OBJECTS_NUMBER, GraphQLClient +from numerous._client._graphql_client import GraphQLClient from numerous.collection.exceptions import ParentCollectionNotFoundError -from numerous.collection.numerous_document import NumerousDocument from numerous.generated.graphql.client import Client as GQLClient -from numerous.generated.graphql.collection_collections import CollectionCollections from numerous.generated.graphql.collection_create import CollectionCreate -from numerous.generated.graphql.collection_document import CollectionDocument -from numerous.generated.graphql.collection_document_delete import ( - CollectionDocumentDelete, -) -from numerous.generated.graphql.collection_document_set import CollectionDocumentSet -from numerous.generated.graphql.collection_document_tag_add import ( - CollectionDocumentTagAdd, -) -from numerous.generated.graphql.collection_document_tag_delete import ( - CollectionDocumentTagDelete, -) -from numerous.generated.graphql.collection_documents import CollectionDocuments -from numerous.generated.graphql.input_types import TagInput -from numerous.jsonbase64 import dict_to_base64 ORGANIZATION_ID = "test_org" @@ -32,10 +16,6 @@ COLLECTION_REFERENCE_ID = "test_id" NESTED_COLLECTION_REFERENCE_KEY = "nested_test_key" NESTED_COLLECTION_REFERENCE_ID = "nested_test_id" -COLLECTION_DOCUMENT_KEY = "test_document" -DOCUMENT_DATA = {"test": "test"} -BASE64_DOCUMENT_DATA = dict_to_base64(DOCUMENT_DATA) -DOCUMENT_ID = "915b75c5-9e95-4fa7-aaa2-2214c8d251ce" HEADERS_WITH_AUTHORIZATION = {"headers": {"Authorization": "Bearer token"}} @@ -45,158 +25,6 @@ def _collection_create_collection_reference(key: str, ref_id: str) -> Collection ) -def _collection_document_set_reference(key: str) -> CollectionDocumentSet: - return CollectionDocumentSet.model_validate( - { - "collectionDocumentSet": { - "__typename": "CollectionDocument", - "id": DOCUMENT_ID, - "key": key, - "data": BASE64_DOCUMENT_DATA, - "tags": [], - } - } - ) - - -def _collection_document_tag_delete_found(_id: str) -> CollectionDocumentTagDelete: - return CollectionDocumentTagDelete.model_validate( - { - "collectionDocumentTagDelete": { - "__typename": "CollectionDocument", - "id": _id, - "key": "t21", - "data": BASE64_DOCUMENT_DATA, - "tags": [], - } - } - ) - - -def _collection_document_tag_add_found(_id: str) -> CollectionDocumentTagAdd: - return CollectionDocumentTagAdd.model_validate( - { - "collectionDocumentTagAdd": { - "__typename": "CollectionDocument", - "id": _id, - "key": "t21", - "data": BASE64_DOCUMENT_DATA, - "tags": [{"key": "key", "value": "test"}], - } - } - ) - - -def _collection_document_delete_found(_id: str) -> CollectionDocumentDelete: - return CollectionDocumentDelete.model_validate( - { - "collectionDocumentDelete": { - "__typename": "CollectionDocument", - "id": _id, - "key": "t21", - "data": BASE64_DOCUMENT_DATA, - "tags": [], - } - } - ) - - -def _collection_collections(_id: str) -> CollectionCollections: - return CollectionCollections.model_validate( - { - "collection": { - "__typename": "Collection", - "id": "1a9299d1-5c81-44bb-b94f-ba40afc05f3a", - "key": "root_collection", - "collections": { - "edges": [ - { - "node": { - "__typename": "Collection", - "id": "496da1f7-5378-4962-8373-5c30663848cf", - "key": "collection0", - } - }, - { - "node": { - "__typename": "Collection", - "id": "6ae8ee18-8ebb-4206-aba1-8d2b44c22682", - "key": "collection1", - } - }, - { - "node": { - "__typename": "Collection", - "id": "deb5ee57-e4ba-470c-a913-a6a619e9661d", - "key": "collection2", - } - }, - ], - "pageInfo": { - "hasNextPage": "false", - "endCursor": "deb5ee57-e4ba-470c-a913-a6a619e9661d", - }, - }, - } - } - ) - - -def _collection_documents_reference(key: str) -> CollectionDocuments: - return CollectionDocuments.model_validate( - { - "collection": { - "__typename": "Collection", - "id": "0d2f82fa-1546-49a4-a034-3392eefc3e4e", - "key": "t1", - "documents": { - "edges": [ - { - "node": { - "__typename": "CollectionDocument", - "id": "10634601-67b5-4015-840c-155d9faf9591", - "key": key, - "data": "ewogICJoZWxsbyI6ICJ3b3JsZCIKfQ==", - "tags": [{"key": "key", "value": "test"}], - } - }, - { - "node": { - "__typename": "CollectionDocument", - "id": "915b75c5-9e95-4fa7-aaa2-2214c8d251ce", - "key": key + "1", - "data": "ewogICJoZWxsbyI6ICJ3b3JsZCIKfQ==", - "tags": [], - } - }, - ], - "pageInfo": { - "hasNextPage": "false", - "endCursor": "915b75c5-9e95-4fa7-aaa2-2214c8d251ce", - }, - }, - } - } - ) - - -def _collection_document_reference(key: str) -> CollectionDocument: - return CollectionDocument.model_validate( - { - "collection": { - "__typename": "Collection", - "document": { - "__typename": "CollectionDocument", - "id": DOCUMENT_ID, - "key": key, - "data": BASE64_DOCUMENT_DATA, - "tags": [], - }, - } - } - ) - - def _collection_create_collection_not_found(ref_id: str) -> CollectionCreate: return CollectionCreate.model_validate( {"collectionCreate": {"typename__": "CollectionNotFound", "id": ref_id}} @@ -261,259 +89,3 @@ def test_nested_collection_not_found_raises_parent_not_found_error() -> None: result.collection(NESTED_COLLECTION_REFERENCE_KEY) assert exc_info.value.collection_id == COLLECTION_REFERENCE_ID - - -def test_collection_document_returns_new_document() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - test_collection = collection(COLLECTION_NAME, _client) - - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - - gql.collection_document.assert_called_once_with( - COLLECTION_REFERENCE_ID, - COLLECTION_DOCUMENT_KEY, - **HEADERS_WITH_AUTHORIZATION, - ) - assert isinstance(document, NumerousDocument) - assert document.exists is False - - -def test_collection_document_returns_existing_document() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_document.return_value = _collection_document_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - - gql.collection_document.assert_called_once_with( - COLLECTION_REFERENCE_ID, - COLLECTION_DOCUMENT_KEY, - **HEADERS_WITH_AUTHORIZATION, - ) - assert isinstance(document, NumerousDocument) - assert document.exists - - -def test_collection_document_set_data_uploads_document() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_document_set.return_value = _collection_document_set_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - assert isinstance(document, NumerousDocument) - assert document.exists is False - - document.set({"test": "test"}) - - gql.collection_document_set.assert_called_once_with( - COLLECTION_REFERENCE_ID, - COLLECTION_DOCUMENT_KEY, - BASE64_DOCUMENT_DATA, - **HEADERS_WITH_AUTHORIZATION, - ) - assert document.exists - - -def test_collection_document_get_returns_dict() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_document.return_value = _collection_document_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - - data = document.get() - - assert isinstance(document, NumerousDocument) - gql.collection_document.assert_has_calls( - [ - call( - COLLECTION_REFERENCE_ID, - COLLECTION_DOCUMENT_KEY, - **HEADERS_WITH_AUTHORIZATION, - ), - call( - COLLECTION_REFERENCE_ID, - COLLECTION_DOCUMENT_KEY, - **HEADERS_WITH_AUTHORIZATION, - ), - ] - ) - assert document.exists - assert data == DOCUMENT_DATA - - -def test_collection_document_delete_marks_document_exists_false() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_document.return_value = _collection_document_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - assert document.document_id is not None - gql.collection_document_delete.return_value = _collection_document_delete_found( - document.document_id - ) - assert document.exists - - document.delete() - - gql.collection_document_delete.assert_called_once_with( - DOCUMENT_ID, **HEADERS_WITH_AUTHORIZATION - ) - assert document.exists is False - - -def test_collection_document_tag_add() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_document.return_value = _collection_document_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - assert document.document_id is not None - gql.collection_document_tag_add.return_value = _collection_document_tag_add_found( - document.document_id - ) - assert document.exists - - document.tag("key", "test") - - gql.collection_document_tag_add.assert_called_once_with( - DOCUMENT_ID, TagInput(key="key", value="test"), **HEADERS_WITH_AUTHORIZATION - ) - assert document.tags == {"key": "test"} - - -def test_collection_document_tag_delete() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_document.return_value = _collection_document_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - document = test_collection.document(COLLECTION_DOCUMENT_KEY) - assert document.document_id is not None - gql.collection_document_tag_add.return_value = _collection_document_tag_add_found( - document.document_id - ) - gql.collection_document_tag_delete.return_value = ( - _collection_document_tag_delete_found(document.document_id) - ) - assert document.exists - document.tag("key", "test") - assert document.tags == {"key": "test"} - - document.tag_delete("key") - - assert document.tags == {} - gql.collection_document_tag_delete.assert_called_once_with( - DOCUMENT_ID, "key", **HEADERS_WITH_AUTHORIZATION - ) - - -def test_collection_documents_return_more_than_one() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_documents.return_value = _collection_documents_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - - result = [] - expected_number_of_documents = 2 - for document in test_collection.documents(): - assert document.exists - result.append(document) - - assert len(result) == expected_number_of_documents - gql.collection_documents.assert_called_once_with( - COLLECTION_REFERENCE_ID, - None, - after="", - first=COLLECTED_OBJECTS_NUMBER, - **HEADERS_WITH_AUTHORIZATION, - ) - - -def test_collection_documents_query_tag_specific_document() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_documents.return_value = _collection_documents_reference( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - - tag_key = "key" - tag_value = "value" - for document in test_collection.documents(tag_key=tag_key, tag_value=tag_value): - assert document.exists - - gql.collection_documents.assert_called_once_with( - COLLECTION_REFERENCE_ID, - TagInput(key=tag_key, value=tag_value), - after="", - first=COLLECTED_OBJECTS_NUMBER, - **HEADERS_WITH_AUTHORIZATION, - ) - - -def test_collection_collections_return_more_than_one() -> None: - gql = Mock(GQLClient) - _client = GraphQLClient(gql) - gql.collection_create.return_value = _collection_create_collection_reference( - COLLECTION_REFERENCE_KEY, COLLECTION_REFERENCE_ID - ) - gql.collection_collections.return_value = _collection_collections( - COLLECTION_DOCUMENT_KEY - ) - test_collection = collection(COLLECTION_NAME, _client) - result = [] - expected_number_of_collections = 3 - for collection_element in test_collection.collections(): - assert collection_element.key - result.append(collection_element) - - assert len(result) == expected_number_of_collections - gql.collection_collections.assert_called_once_with( - COLLECTION_REFERENCE_ID, - after="", - first=COLLECTED_OBJECTS_NUMBER, - **HEADERS_WITH_AUTHORIZATION, - ) diff --git a/python/tests/test_fs_client.py b/python/tests/test_fs_client.py index 7d734dbb..a5311de9 100644 --- a/python/tests/test_fs_client.py +++ b/python/tests/test_fs_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from pathlib import Path from typing import Any @@ -15,7 +17,7 @@ _TEST_COLLECTION_KEY = "collection_key" -_TEST_COLLECTION_ID = _TEST_COLLECTION_KEY +TEST_COLLECTION_ID = _TEST_COLLECTION_KEY _TEST_NESTED_COLLECTION_KEY = "nested_collection_key" _TEST_NESTED_COLLECTION_ID = str( @@ -33,6 +35,9 @@ _TEST_DOCUMENT_KEY = "document_key" _TEST_ANOTHER_DOCUMENT_KEY = "another_document_key" +TEST_FILE_KEY = "file_key" +TEST_FILE_ID = "file_id" + @pytest.fixture def base_path(tmp_path: Path) -> Path: @@ -52,11 +57,11 @@ def test_get_document_returns_expected_existing_document_reference( {"key": "tag-1-key", "value": "tag-1-value"}, {"key": "tag-2-key", "value": "tag-2-value"}, ] - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=data, tags=tags ) - doc = client.get_collection_document(_TEST_COLLECTION_ID, _TEST_DOCUMENT_KEY) + doc = client.get_collection_document(TEST_COLLECTION_ID, _TEST_DOCUMENT_KEY) assert doc == CollectionDocumentReference( id=str(Path(_TEST_COLLECTION_KEY) / _TEST_DOCUMENT_KEY), @@ -77,7 +82,7 @@ def test_get_document_returns_expected_nested_existing_document_reference( {"key": "tag-1-key", "value": "tag-1-value"}, {"key": "tag-2-key", "value": "tag-2-value"}, ] - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY / _TEST_NESTED_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=data, @@ -102,7 +107,7 @@ def test_get_document_returns_expected_none_for_nonexisting_document( ) -> None: (base_path / _TEST_COLLECTION_KEY).mkdir() - doc = client.get_collection_document(_TEST_COLLECTION_ID, _TEST_DOCUMENT_KEY) + doc = client.get_collection_document(TEST_COLLECTION_ID, _TEST_DOCUMENT_KEY) assert doc is None @@ -115,12 +120,14 @@ def test_set_document_creates_expected_file( encoded_data = dict_to_base64(data) doc = client.set_collection_document( - _TEST_COLLECTION_ID, _TEST_DOCUMENT_KEY, encoded_data + TEST_COLLECTION_ID, _TEST_DOCUMENT_KEY, encoded_data ) assert doc is not None assert doc.data == dict_to_base64(data) - stored_doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.json" + stored_doc_path = ( + base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.doc.json" + ) assert stored_doc_path.exists() is True assert stored_doc_path.read_text() == json.dumps({"data": data, "tags": []}) @@ -129,14 +136,14 @@ def test_delete_collection_document_removes_expected_file( client: FileSystemClient, base_path: Path ) -> None: data = {"field1": 123, "field2": "text"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=data, tags=[] ) - doc_id = str(Path(_TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) + doc_id = str(Path(TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) doc = client.delete_collection_document(doc_id) - doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.json" + doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.doc.json" assert doc_path.exists() is False assert doc == CollectionDocumentReference( id=doc_id, @@ -149,7 +156,7 @@ def test_delete_collection_document_removes_expected_file( def test_delete_collection_document_for_nonexisting_returns_none( client: FileSystemClient, ) -> None: - doc_id = str(Path(_TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) + doc_id = str(Path(TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) doc = client.delete_collection_document(doc_id) assert doc is None @@ -159,19 +166,19 @@ def test_add_collection_document_tag_adds_expected_tag( base_path: Path, client: FileSystemClient ) -> None: data = {"field1": 123, "field2": "text"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=data, tags=[{"key": "pre-existing-tag-key", "value": "pre-existing-tag-value"}], ) - doc_id = str(Path(_TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) + doc_id = str(Path(TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) client.add_collection_document_tag( doc_id, TagInput(key="added-tag-key", value="added-tag-value") ) - doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.json" + doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.doc.json" assert json.loads(doc_path.read_text())["tags"] == [ {"key": "pre-existing-tag-key", "value": "pre-existing-tag-value"}, {"key": "added-tag-key", "value": "added-tag-value"}, @@ -182,7 +189,7 @@ def test_delete_collection_document_tag_deletes_expected_tag( base_path: Path, client: FileSystemClient ) -> None: data = {"field1": 123, "field2": "text"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=data, @@ -192,10 +199,10 @@ def test_delete_collection_document_tag_deletes_expected_tag( ], ) - doc_id = str(Path(_TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) + doc_id = str(Path(TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY) client.delete_collection_document_tag(doc_id, "tag-to-be-deleted-key") - doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.json" + doc_path = base_path / _TEST_COLLECTION_KEY / f"{_TEST_DOCUMENT_KEY}.doc.json" assert json.loads(doc_path.read_text())["tags"] == [ {"key": "tag-key", "value": "tag-value"}, ] @@ -205,35 +212,41 @@ def test_get_collection_documents_returns_all_documents( base_path: Path, client: FileSystemClient ) -> None: test_data = {"name": "test document"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=test_data, tags=[] ) test_another_data = {"name": "another test document"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_ANOTHER_DOCUMENT_KEY, data=test_another_data, tags=[], ) + expected_number_of_files = 2 result, has_next_page, end_cursor = client.get_collection_documents( _TEST_COLLECTION_KEY, "", None ) - assert result == [ - CollectionDocumentReference( - id=str(Path(_TEST_COLLECTION_ID) / _TEST_ANOTHER_DOCUMENT_KEY), - key=_TEST_ANOTHER_DOCUMENT_KEY, - data=dict_to_base64(test_another_data), - tags=[], - ), + assert ( CollectionDocumentReference( - id=str(Path(_TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY), + id=str(Path(TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY), key=_TEST_DOCUMENT_KEY, data=dict_to_base64(test_data), tags=[], - ), - ] + ) + in result + ) + assert ( + CollectionDocumentReference( + id=str(Path(TEST_COLLECTION_ID) / _TEST_ANOTHER_DOCUMENT_KEY), + key=_TEST_ANOTHER_DOCUMENT_KEY, + data=dict_to_base64(test_another_data), + tags=[], + ) + in result + ) + assert len(result) == expected_number_of_files assert has_next_page is False assert end_cursor == "" @@ -242,14 +255,14 @@ def test_get_collection_documents_returns_documents_with_tag( base_path: Path, client: FileSystemClient ) -> None: test_tagged_data = {"name": "test document"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_DOCUMENT_KEY, data=test_tagged_data, tags=[{"key": "tag-key", "value": "tag-value"}], ) test_untagged_data = {"name": "another test document"} - _create_test_file_system_document( + _create_test_document( base_path / _TEST_COLLECTION_KEY, _TEST_ANOTHER_DOCUMENT_KEY, data=test_untagged_data, @@ -264,7 +277,7 @@ def test_get_collection_documents_returns_documents_with_tag( assert result == [ CollectionDocumentReference( - id=str(Path(_TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY), + id=str(Path(TEST_COLLECTION_ID) / _TEST_DOCUMENT_KEY), key=_TEST_DOCUMENT_KEY, data=dict_to_base64(test_tagged_data), tags=[CollectionDocumentReferenceTags(key="tag-key", value="tag-value")], @@ -277,28 +290,188 @@ def test_get_collection_documents_returns_documents_with_tag( def test_get_collection_collections_returns_expected_collections( base_path: Path, client: FileSystemClient ) -> None: - (base_path / _TEST_COLLECTION_ID).mkdir() + (base_path / TEST_COLLECTION_ID).mkdir() (base_path / _TEST_NESTED_COLLECTION_ID).mkdir() (base_path / _TEST_ANOTHER_NESTED_COLLECTION_ID).mkdir() collections, has_next_page, end_cursor = client.get_collection_collections( _TEST_COLLECTION_KEY, "" ) + expected_number_of_files = 2 assert has_next_page is False assert end_cursor == "" - assert [ + assert ( + CollectionReference( + id=_TEST_NESTED_COLLECTION_ID, key=_TEST_NESTED_COLLECTION_KEY + ) + in collections + ) + assert ( CollectionReference( id=_TEST_ANOTHER_NESTED_COLLECTION_ID, key=_TEST_ANOTHER_NESTED_COLLECTION_KEY, - ), - CollectionReference( - id=_TEST_NESTED_COLLECTION_ID, key=_TEST_NESTED_COLLECTION_KEY - ), - ] == collections + ) + in collections + ) + assert len(collections) == expected_number_of_files + + +def test_get_collection_file_returns_expected_existing_file_reference( + client: FileSystemClient, base_path: Path +) -> None: + data = "File content 1;2;3;4;\n1;2;3;4" + tags = [ + {"key": "tag-1-key", "value": "tag-1-value"}, + {"key": "tag-2-key", "value": "tag-2-value"}, + ] + _create_test_file(base_path, data=data, tags=tags) + + file = client.create_collection_file_reference(TEST_COLLECTION_ID, TEST_FILE_KEY) + + assert file is not None + assert file.file_id == TEST_FILE_ID + assert file.key == TEST_FILE_KEY + assert file.exists is True + assert file.tags == {"tag-1-key": "tag-1-value", "tag-2-key": "tag-2-value"} + + +def test_get_collection_file_returns_expected_nonexisting_file_reference( + client: FileSystemClient, base_path: Path +) -> None: + (base_path / TEST_COLLECTION_ID).mkdir(parents=True) + + file = client.create_collection_file_reference(TEST_COLLECTION_ID, TEST_FILE_KEY) + + assert file is not None + assert file.key == TEST_FILE_KEY + assert file.exists is False + assert file.tags == {} + + +def test_get_collection_files_returns_all_files( + base_path: Path, client: FileSystemClient +) -> None: + test_files = { + TEST_FILE_ID + "_1": (TEST_FILE_KEY + "_1", "File content 1;2;3;4;\n1;2;3;4"), + TEST_FILE_ID + "_2": (TEST_FILE_KEY + "_2", "File content 4;5;6;7;\n4;5;6;7"), + } + for file_id, (file_key, data) in test_files.items(): + _create_test_file(base_path, TEST_COLLECTION_ID, file_key, file_id, data) + + result, has_next_page, end_cursor = client.get_collection_files( + TEST_COLLECTION_ID, "", None + ) + + assert result is not None + result_files = { + file.file_id: (file.key, file.read_text()) for file in result if file + } + assert result_files == test_files + assert has_next_page is False + assert end_cursor == "" + + +def test_delete_collection_file_removes_expected_file( + client: FileSystemClient, base_path: Path +) -> None: + data = "File content 1;2;3;4;\n1;2;3;4" + _create_test_file(base_path, data=data) + data_path = base_path / _TEST_COLLECTION_KEY / f"{TEST_FILE_KEY}.file.data" + meta_path = base_path / _TEST_COLLECTION_KEY / f"{TEST_FILE_KEY}.file.meta.json" + + client.delete_collection_file(TEST_FILE_ID) + + assert meta_path.exists() is False + assert data_path.exists() is False -def _create_test_file_system_document( +def test_add_collection_file_tag_adds_expected_tag( + base_path: Path, client: FileSystemClient +) -> None: + data = "File content 1;2;3;4;\n1;2;3;4" + tags = [{"key": "pre-existing-tag-key", "value": "pre-existing-tag-value"}] + + _create_test_file(base_path, data=data, tags=tags) + + meta_path = base_path / _TEST_COLLECTION_KEY / f"{TEST_FILE_KEY}.file.meta.json" + + client.add_collection_file_tag( + TEST_FILE_ID, TagInput(key="added-tag-key", value="added-tag-value") + ) + + assert json.loads(meta_path.read_text())["tags"] == [ + {"key": "pre-existing-tag-key", "value": "pre-existing-tag-value"}, + {"key": "added-tag-key", "value": "added-tag-value"}, + ] + + +def test_delete_collection_file_tag_deletes_expected_tag( + base_path: Path, client: FileSystemClient +) -> None: + data = "File content 1;2;3;4;\n1;2;3;4" + tags = [ + {"key": "tag-key", "value": "tag-value"}, + {"key": "tag-to-be-deleted-key", "value": "tag-to-be-deleted-value"}, + ] + _create_test_file(base_path, data=data, tags=tags) + + client.delete_collection_file_tag(TEST_FILE_ID, "tag-to-be-deleted-key") + + meta_path = base_path / TEST_COLLECTION_ID / f"{TEST_FILE_KEY}.file.meta.json" + assert json.loads(meta_path.read_text())["tags"] == [ + {"key": "tag-key", "value": "tag-value"}, + ] + + +def test_file_exists_returns_true_for_existing_file( + base_path: Path, client: FileSystemClient +) -> None: + _create_test_file(base_path, file_id=TEST_FILE_ID, data="some data") + + assert client.file_exists(TEST_FILE_ID) is True + + +def test_file_exists_returns_false_for_nonexisting_file( + client: FileSystemClient, +) -> None: + assert client.file_exists(TEST_FILE_ID) is False + + +def test_file_exists_returns_false_for_nonexisting_referenced_file( + base_path: Path, + client: FileSystemClient, +) -> None: + (base_path / TEST_COLLECTION_ID).mkdir(parents=True) + f = client.create_collection_file_reference(TEST_COLLECTION_ID, TEST_FILE_KEY) + + assert f is not None + assert client.file_exists(f.file_id) is False + + +def test_collection_file_tags_returns_expected_tags( + base_path: Path, client: FileSystemClient +) -> None: + _create_test_file( + base_path, + tags=[ + {"key": "tag-1", "value": "value-1"}, + {"key": "tag-2", "value": "value-2"}, + ], + ) + + tags = client.collection_file_tags(TEST_FILE_ID) + + assert tags == {"tag-1": "value-1", "tag-2": "value-2"} + + +def test_collection_file_tags_returns_non_for_nonexisting_file( + client: FileSystemClient, +) -> None: + assert client.collection_file_tags(TEST_FILE_ID) is None + + +def _create_test_document( collection_path: Path, document_key: str, data: dict[str, Any], @@ -306,5 +479,30 @@ def _create_test_file_system_document( ) -> None: collection_path.mkdir(exist_ok=True, parents=True) stored_doc_data = json.dumps({"data": data, "tags": tags}) - doc_path = collection_path / f"{document_key}.json" + doc_path = collection_path / f"{document_key}.doc.json" doc_path.write_text(stored_doc_data) + + +def _create_test_file( # noqa: PLR0913 + base_path: Path, + collection_id: str = TEST_COLLECTION_ID, + file_key: str = TEST_FILE_KEY, + file_id: str = TEST_FILE_ID, + data: str | None = None, + tags: list[dict[str, str]] | None = None, +) -> None: + index_path = base_path / FileSystemClient.FILE_INDEX_DIR + index_path.mkdir(parents=True, exist_ok=True) + index_entry_path = index_path / file_id + index_entry_path.write_text( + json.dumps({"file_key": file_key, "collection_id": collection_id}) + ) + collection_path = base_path / collection_id + collection_path.mkdir(parents=True, exist_ok=True) + meta_path = collection_path / f"{file_key}.file.meta.json" + meta_path.write_text( + json.dumps({"file_id": file_id, "file_key": file_key, "tags": tags or []}) + ) + data_path = collection_path / f"{file_key}.file.data" + if data: + data_path.write_text(data) diff --git a/python/tests/test_numerous_client.py b/python/tests/test_get_client.py similarity index 100% rename from python/tests/test_numerous_client.py rename to python/tests/test_get_client.py diff --git a/shared/schema.gql b/shared/schema.gql index 65ec73e5..9c89b87d 100644 --- a/shared/schema.gql +++ b/shared/schema.gql @@ -1022,6 +1022,10 @@ union CollectionFileResult = CollectionFile | CollectionFileNotFound union CollectionFileCreateResult = CollectionFile | CollectionNotFound union CollectionFileDeleteResult = CollectionFile | CollectionFileNotFound +extend type Query { + collectionFile(id: ID!): CollectionFileResult @canAccessCollectionFile +} + extend type Mutation { # idempotent collectionFileCreate(