diff --git a/cloudpathlib/__init__.py b/cloudpathlib/__init__.py index da4fe28e..c51fc45e 100644 --- a/cloudpathlib/__init__.py +++ b/cloudpathlib/__init__.py @@ -4,9 +4,11 @@ from .azure.azblobclient import AzureBlobClient from .azure.azblobpath import AzureBlobPath from .cloudpath import CloudPath, implementation_registry -from .s3.s3client import S3Client -from .gs.gspath import GSPath from .gs.gsclient import GSClient +from .gs.gspath import GSPath +from .http.httpclient import HttpClient +from .http.httppath import HttpPath +from .s3.s3client import S3Client from .s3.s3path import S3Path @@ -27,6 +29,8 @@ "implementation_registry", "GSClient", "GSPath", + "HttpClient", + "HttpPath", "S3Client", "S3Path", ] diff --git a/cloudpathlib/cloudpath.py b/cloudpathlib/cloudpath.py index 4aa895a6..01c9d8e7 100644 --- a/cloudpathlib/cloudpath.py +++ b/cloudpathlib/cloudpath.py @@ -27,7 +27,6 @@ Generator, List, Optional, - Sequence, Tuple, Type, TYPE_CHECKING, @@ -286,11 +285,11 @@ def __setstate__(self, state: Dict[str, Any]) -> None: @property def _no_prefix(self) -> str: - return self._str[len(self.cloud_prefix) :] + return self._str[len(self.anchor) :] @property def _no_prefix_no_drive(self) -> str: - return self._str[len(self.cloud_prefix) + len(self.drive) :] + return self._str[len(self.anchor) + len(self.drive) :] @overload @classmethod @@ -881,9 +880,9 @@ def relative_to(self, other: Self, walk_up: bool = False) -> PurePosixPath: # absolute) if not isinstance(other, CloudPath): raise ValueError(f"{self} is a cloud path, but {other} is not") - if self.cloud_prefix != other.cloud_prefix: + if self.anchor != other.anchor: raise ValueError( - f"{self} is a {self.cloud_prefix} path, but {other} is a {other.cloud_prefix} path" + f"{self} is a {self.anchor} path, but {other} is a {other.anchor} path" ) kwargs = dict(walk_up=walk_up) @@ -921,7 +920,7 @@ def parent(self) -> Self: return self._dispatch_to_path("parent") @property - def parents(self) -> Sequence[Self]: + def parents(self) -> Tuple[Self, ...]: return self._dispatch_to_path("parents") @property @@ -1176,7 +1175,7 @@ def copytree(self, destination, force_overwrite_to_cloud=None, ignore=None): ) elif subpath.is_dir(): subpath.copytree( - destination / subpath.name, + destination / (subpath.name + ("" if subpath.name.endswith("/") else "/")), force_overwrite_to_cloud=force_overwrite_to_cloud, ignore=ignore, ) @@ -1210,8 +1209,8 @@ def _new_cloudpath(self, path: Union[str, os.PathLike]) -> Self: path = path[1:] # add prefix/anchor if it is not already - if not path.startswith(self.cloud_prefix): - path = f"{self.cloud_prefix}{path}" + if not path.startswith(self.anchor): + path = f"{self.anchor}{path}" return self.client.CloudPath(path) diff --git a/cloudpathlib/http/__init__.py b/cloudpathlib/http/__init__.py new file mode 100644 index 00000000..5ad785be --- /dev/null +++ b/cloudpathlib/http/__init__.py @@ -0,0 +1,7 @@ +from .httpclient import HttpClient +from .httppath import HttpPath + +__all__ = [ + "HttpClient", + "HttpPath", +] diff --git a/cloudpathlib/http/httpclient.py b/cloudpathlib/http/httpclient.py new file mode 100644 index 00000000..b9febac7 --- /dev/null +++ b/cloudpathlib/http/httpclient.py @@ -0,0 +1,173 @@ +from datetime import datetime +import os +import re +import urllib.request +import urllib.parse +import urllib.error +from pathlib import Path +from typing import Iterable, Optional, Tuple, Union, Callable +import shutil +import mimetypes +import urllib.response + +import pytz + +from cloudpathlib.client import Client, register_client_class +from cloudpathlib.enums import FileCacheMode + +from .httppath import HttpPath + + +@register_client_class("http") +class HttpClient(Client): + def __init__( + self, + file_cache_mode: Optional[Union[str, FileCacheMode]] = None, + local_cache_dir: Optional[Union[str, os.PathLike]] = None, + content_type_method: Optional[Callable] = mimetypes.guess_type, + auth: Optional[urllib.request.BaseHandler] = None, + custom_list_page_parser: Optional[Callable[[str], Iterable[str]]] = None, + custom_dir_matcher: Optional[Callable[[str], bool]] = None, + ): + super().__init__(file_cache_mode, local_cache_dir, content_type_method) + self.auth = auth + + if self.auth is None: + self.opener = urllib.request.build_opener() + else: + self.opener = urllib.request.build_opener(self.auth) + + self.custom_list_page_parser = custom_list_page_parser + + self.dir_matcher = ( + custom_dir_matcher if custom_dir_matcher is not None else lambda x: x.endswith("/") + ) + + def _get_metadata(self, cloud_path: HttpPath) -> dict: + with self.opener.open(cloud_path.as_url()) as response: + last_modified = response.headers.get("Last-Modified", None) + + if last_modified is not None: + # per https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified + last_modified = datetime.strptime(last_modified, "%a, %d %b %Y %H:%M:%S %Z") + + # should always be utc https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#gmt + last_modified = last_modified.replace(tzinfo=pytz.UTC) + + return { + "size": int(response.headers.get("Content-Length", 0)), + "last_modified": last_modified, + "content_type": response.headers.get("Content-Type", None), + } + + def _download_file(self, cloud_path: HttpPath, local_path: Union[str, os.PathLike]) -> Path: + local_path = Path(local_path) + with self.opener.open(cloud_path.as_url()) as response: + with open(local_path, "wb") as out_file: + shutil.copyfileobj(response, out_file) + return local_path + + def _exists(self, cloud_path: HttpPath) -> bool: + request = urllib.request.Request(cloud_path.as_url(), method="HEAD") + try: + with self.opener.open(request) as response: + return response.status == 200 + except (urllib.error.HTTPError, urllib.error.URLError) as e: + if isinstance(e, urllib.error.URLError) or e.code == 404: + return False + raise + + def _move_file(self, src: HttpPath, dst: HttpPath, remove_src: bool = True) -> HttpPath: + self._upload_file(src, dst) + if remove_src: + self._remove(src) + return dst + + def _remove(self, cloud_path: HttpPath, missing_ok: bool = True) -> None: + request = urllib.request.Request(cloud_path.as_url(), method="DELETE") + try: + with self.opener.open(request) as response: + if response.status != 204: + raise Exception(f"Failed to delete {cloud_path}.") + except urllib.error.HTTPError as e: + if e.code == 404 and missing_ok: + pass + else: + raise FileNotFoundError(f"Failed to delete {cloud_path}.") + + def _list_dir(self, cloud_path: HttpPath, recursive: bool) -> Iterable[Tuple[HttpPath, bool]]: + try: + with self.opener.open(cloud_path.as_url()) as response: + # Parse the directory listing + for path, is_dir in self._parse_list_dir_response( + response.read().decode(), base_url=str(cloud_path) + ): + yield path, is_dir + + # If it's a directory and recursive is True, list the contents of the directory + if recursive and is_dir: + yield from self._list_dir(path, recursive=True) + + except Exception as e: # noqa E722 + raise NotImplementedError( + f"Unable to parse response as a listing of files; please provide a custom parser as `custom_list_page_parser`. Error raised: {e}" + ) + + def _upload_file(self, local_path: Union[str, os.PathLike], cloud_path: HttpPath) -> HttpPath: + local_path = Path(local_path) + if self.content_type_method is not None: + content_type, _ = self.content_type_method(local_path) + + headers = {"Content-Type": content_type or "application/octet-stream"} + + with open(local_path, "rb") as file_data: + request = urllib.request.Request( + cloud_path.as_url(), data=file_data.read(), method="PUT", headers=headers + ) + with self.opener.open(request) as response: + if response.status != 201 and response.status != 200: + raise Exception(f"Failed to upload {local_path} to {cloud_path}.") + return cloud_path + + def _get_public_url(self, cloud_path: HttpPath) -> str: + return cloud_path.as_url() + + def _generate_presigned_url(self, cloud_path: HttpPath, expire_seconds: int = 60 * 60) -> str: + raise NotImplementedError("Presigned URLs are not supported using urllib.") + + def _parse_list_dir_response( + self, response: str, base_url: str + ) -> Iterable[Tuple[HttpPath, bool]]: + # Ensure base_url ends with a trailing slash so joining works + if not base_url.endswith("/"): + base_url += "/" + + def _simple_links(html: str) -> Iterable[str]: + return re.findall(r' None: + request = urllib.request.Request(url.as_url(), method=method, **kwargs) + with self.opener.open(request) as response: + return response + + +HttpClient.HttpPath = HttpClient.CloudPath # type: ignore + + +@register_client_class("https") +class HttpsClient(HttpClient): + pass + + +HttpsClient.HttpsPath = HttpsClient.CloudPath # type: ignore diff --git a/cloudpathlib/http/httppath.py b/cloudpathlib/http/httppath.py new file mode 100644 index 00000000..96e16033 --- /dev/null +++ b/cloudpathlib/http/httppath.py @@ -0,0 +1,144 @@ +from pathlib import PurePosixPath +from typing import Any, Tuple, Union, Optional + +import os +from pathlib import Path +from tempfile import TemporaryDirectory +from typing import TYPE_CHECKING + +from ..cloudpath import CloudPath, NoStatError, register_path_class + + +if TYPE_CHECKING: + from .httpclient import HttpClient + + +@register_path_class("http") +class HttpPath(CloudPath): + cloud_prefix = "http://" + client: "HttpClient" + + def __init__( + self, + cloud_path: Union[str, "HttpPath"], + client: Optional["HttpClient"] = None, + ) -> None: + super().__init__(cloud_path, client) + + self._path = ( + PurePosixPath(self._url.path) + if self._url.path.startswith("/") + else PurePosixPath(f"/{self._url.path}") + ) + + def _dispatch_to_path(self, func: str, *args, **kwargs) -> Any: + sup = super()._dispatch_to_path(func, *args, **kwargs) + + # some dispatch methods like "__truediv__" strip trailing slashes; + # for http paths, we need to keep them to indicate directories + if func == "__truediv__" and str(args[0]).endswith("/"): + return self._new_cloudpath(str(sup) + "/") + + else: + return sup + + @property + def drive(self) -> str: + # For HTTP paths, no drive; use .anchor for scheme + netloc + return self._url.netloc + + @property + def anchor(self) -> str: + return f"{self._url.scheme}://{self._url.netloc}/" + + @property + def _no_prefix_no_drive(self) -> str: + # netloc appears in anchor and drive for httppath; so don't double count + return self._str[len(self.anchor) - 1 :] + + def is_dir(self) -> bool: + if not self.exists(): + return False + + # Use client default to iden + return self.client.dir_matcher(str(self)) + + def is_file(self) -> bool: + if not self.exists(): + return False + + return not self.client.dir_matcher(str(self)) + + def mkdir(self, parents: bool = False, exist_ok: bool = False) -> None: + pass # no-op for HTTP Paths + + def touch(self, exist_ok: bool = True) -> None: + if self.exists(): + if not exist_ok: + raise FileExistsError(f"File already exists: {self}") + + raise NotImplementedError( + "Touch not implemented for existing HTTP files since we can't update the modified time." + ) + else: + empty_file = Path(TemporaryDirectory().name) / "empty_file.txt" + empty_file.parent.mkdir(parents=True, exist_ok=True) + empty_file.write_text("") + self.client._upload_file(empty_file, self) + + def stat(self, follow_symlinks: bool = True) -> os.stat_result: + try: + meta = self.client._get_metadata(self) + except: # noqa E722 + raise NoStatError(f"Could not get metadata for {self}") + + return os.stat_result( + ( # type: ignore + None, # mode + None, # ino + self.cloud_prefix, # dev, + None, # nlink, + None, # uid, + None, # gid, + meta.get("size", 0), # size, + None, # atime, + meta.get("last_modified", 0).timestamp(), # mtime, + None, # ctime, + ) + ) + + def as_url(self, presign: bool = False, expire_seconds: int = 60 * 60) -> str: + if presign: + raise NotImplementedError("Presigning not supported for HTTP paths") + + return ( + self._url.geturl() + ) # recreate from what was initialized so we have the same query params, etc. + + @property + def name(self) -> str: + return self._path.name + + @property + def parents(self) -> Tuple["HttpPath", ...]: + return super().parents + (self._new_cloudpath(""),) + + def get(self, **kwargs): + return self.client.request(self, "GET", **kwargs) + + def put(self, **kwargs): + return self.client.request(self, "PUT", **kwargs) + + def post(self, **kwargs): + return self.client.request(self, "POST", **kwargs) + + def delete(self, **kwargs): + return self.client.request(self, "DELETE", **kwargs) + + def head(self, **kwargs): + return self.client.request(self, "HEAD", **kwargs) + + +@register_path_class("https") +class HttpsPath(HttpPath): + cloud_prefix: str = "https://" diff --git a/requirements-dev.txt b/requirements-dev.txt index 7c526692..c8291861 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -33,4 +33,5 @@ tabulate tenacity tqdm typer +types-pytz wheel diff --git a/tests/conftest.py b/tests/conftest.py index 301ffe87..e5c47211 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,11 @@ import os from pathlib import Path, PurePosixPath import shutil +import ssl from tempfile import TemporaryDirectory from typing import Dict, Optional +from urllib.parse import urlparse +from urllib.request import HTTPSHandler from azure.storage.blob import BlobServiceClient from azure.storage.filedatalake import ( @@ -18,6 +21,8 @@ from cloudpathlib import AzureBlobClient, AzureBlobPath, GSClient, GSPath, S3Client, S3Path from cloudpathlib.cloudpath import implementation_registry +from cloudpathlib.http.httpclient import HttpClient, HttpsClient +from cloudpathlib.http.httppath import HttpPath, HttpsPath from cloudpathlib.local import ( local_azure_blob_implementation, LocalAzureBlobClient, @@ -42,6 +47,8 @@ from .mock_clients.mock_s3 import mocked_session_class_factory, DEFAULT_S3_BUCKET_NAME +from .http_fixtures import http_server, https_server, utilities_dir # noqa: F401 + if os.getenv("USE_LIVE_CLOUD") == "1": load_dotenv(find_dotenv()) @@ -469,6 +476,78 @@ def local_s3_rig(request, monkeypatch, assets_dir): rig.client_class.reset_default_storage_dir() # reset local storage directory +class HttpProviderTestRig(CloudProviderTestRig): + def create_cloud_path(self, path: str, client=None): + """Http version needs to include netloc as well""" + if client: + return client.CloudPath( + cloud_path=f"{self.path_class.cloud_prefix}{self.drive}/{self.test_dir}/{path}" + ) + else: + return self.path_class( + cloud_path=f"{self.path_class.cloud_prefix}{self.drive}/{self.test_dir}/{path}" + ) + + +@fixture() +def http_rig(request, assets_dir, http_server): # noqa: F811 + test_dir = create_test_dir_name(request) + + host, server_dir = http_server + drive = urlparse(host).netloc + + # copy test assets + shutil.copytree(assets_dir, server_dir / test_dir) + + rig = CloudProviderTestRig( + path_class=HttpPath, + client_class=HttpClient, + drive=drive, + test_dir=test_dir, + ) + + rig.http_server_dir = server_dir + rig.client_class(**rig.required_client_kwargs).set_as_default_client() # set default client + + yield rig + + rig.client_class._default_client = None # reset default client + shutil.rmtree(server_dir) + + +@fixture() +def https_rig(request, assets_dir, https_server): # noqa: F811 + test_dir = create_test_dir_name(request) + + host, server_dir = https_server + drive = urlparse(host).netloc + + # copy test assets + shutil.copytree(assets_dir, server_dir / test_dir) + + skip_verify_ctx = ssl.SSLContext() + skip_verify_ctx.check_hostname = False + skip_verify_ctx.load_verify_locations(utilities_dir / "insecure-test.pem") + + rig = CloudProviderTestRig( + path_class=HttpsPath, + client_class=HttpsClient, + drive=drive, + test_dir=test_dir, + required_client_kwargs=dict( + auth=HTTPSHandler(context=skip_verify_ctx, check_hostname=False) + ), + ) + + rig.http_server_dir = server_dir + rig.client_class(**rig.required_client_kwargs).set_as_default_client() # set default client + + yield rig + + rig.client_class._default_client = None # reset default client + shutil.rmtree(server_dir) + + # create azure fixtures for both blob and gen2 storage azure_rigs = fixture_union( "azure_rigs", @@ -489,6 +568,7 @@ def local_s3_rig(request, monkeypatch, assets_dir): local_azure_rig, local_s3_rig, local_gs_rig, + http_rig, ], ) diff --git a/tests/http_fixtures.py b/tests/http_fixtures.py new file mode 100644 index 00000000..754d4a65 --- /dev/null +++ b/tests/http_fixtures.py @@ -0,0 +1,141 @@ +from datetime import datetime +from functools import partial +from http.server import HTTPServer, SimpleHTTPRequestHandler +import os +from pathlib import Path +import shutil +import ssl +import threading +import time +from urllib.request import urlopen + +from pytest import fixture + + +utilities_dir = Path(__file__).parent / "utilities" + + +class TestHTTPRequestHandler(SimpleHTTPRequestHandler): + """Also allows PUT and DELETE requests for testing.""" + + def do_PUT(self): + length = int(self.headers["Content-Length"]) + path = Path(self.translate_path(self.path)) + + if path.is_dir(): + path.mkdir(parents=True, exist_ok=True) + else: + path.parent.mkdir(parents=True, exist_ok=True) + + with path.open("wb") as f: + f.write(self.rfile.read(length)) + + now = datetime.now().timestamp() + os.utime(path, (now, now)) + + self.send_response(201) + self.end_headers() + + def do_DELETE(self): + path = Path(self.translate_path(self.path)) + + try: + if path.is_dir(): + shutil.rmtree(path) + else: + path.unlink() + self.send_response(204) + except FileNotFoundError: + self.send_response(404) + + self.end_headers() + + +def _http_server( + root_dir, port, hostname="localhost", use_ssl=False, certfile=None, keyfile=None, threaded=True +): + root_dir.mkdir(exist_ok=True) + + scheme = "http" if not use_ssl else "https" + + def start_server(): + handler = partial(TestHTTPRequestHandler, directory=str(root_dir)) + httpd = HTTPServer((hostname, port), handler) + + if use_ssl: + if not certfile or not keyfile: + raise ValueError("certfile and keyfile must be provided if `ssl=True`") + + context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + context.load_cert_chain(certfile=certfile, keyfile=keyfile) + context.check_hostname = False + httpd.socket = context.wrap_socket(httpd.socket, server_side=True) + + httpd.serve_forever() + + if threaded: + server_thread = threading.Thread(target=start_server, daemon=True) + server_thread.start() + + else: + start_server() + + # Wait for the server to start + for _ in range(10): + try: + if use_ssl: + req_context = ssl.SSLContext() + req_context.check_hostname = False + req_context.verify_mode = ssl.CERT_NONE + else: + req_context = None + + urlopen(f"{scheme}://{hostname}:{port}", context=req_context) + + break + except Exception: + time.sleep(0.1) + + return f"{scheme}://{hostname}:{port}", server_thread + + +@fixture(scope="module") +def http_server(tmp_path_factory, worker_id): + port = 9077 + ( + int(worker_id.lstrip("gw")) if worker_id != "master" else 0 + ) # don't collide if tests running in parallel with multiple servers + + server_dir = tmp_path_factory.mktemp("server_files").resolve() + + host, server_thread = _http_server(server_dir, port) + + yield host, server_dir + + server_thread.join(0) + + if server_dir.exists(): + shutil.rmtree(server_dir) + + +@fixture(scope="module") +def https_server(tmp_path_factory, worker_id): + port = 4443 + ( + int(worker_id.lstrip("gw")) if worker_id != "master" else 0 + ) # don't collide if tests running in parallel with multiple servers + + server_dir = tmp_path_factory.mktemp("server_files").resolve() + + host, server_thread = _http_server( + server_dir, + port, + use_ssl=True, + certfile=utilities_dir / "insecure-test.pem", + keyfile=utilities_dir / "insecure-test.key", + ) + + yield host, server_dir + + server_thread.join(0) + + if server_dir.exists(): + shutil.rmtree(server_dir) diff --git a/tests/test_client.py b/tests/test_client.py index fd58535b..78505696 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -9,6 +9,7 @@ from cloudpathlib import CloudPath from cloudpathlib.client import register_client_class from cloudpathlib.cloudpath import implementation_registry, register_path_class +from cloudpathlib.http.httpclient import HttpClient from cloudpathlib.s3.s3client import S3Client from cloudpathlib.s3.s3path import S3Path @@ -96,6 +97,10 @@ def _test_write_content_type(suffix, expected, rig_ref, check=True): for suffix, content_type in mimes: _test_write_content_type(suffix, content_type, rig, check=False) + if rig.client_class is HttpClient: + # HTTP client doesn't support custom content types + return + # custom mime type method def my_content_type(path): # do lookup for content types I define; fallback to diff --git a/tests/test_cloudpath_file_io.py b/tests/test_cloudpath_file_io.py index 7dc5b149..0bd29f63 100644 --- a/tests/test_cloudpath_file_io.py +++ b/tests/test_cloudpath_file_io.py @@ -14,17 +14,25 @@ CloudPathNotImplementedError, DirectoryNotEmptyError, ) +from cloudpathlib.http.httpclient import HttpClient +from cloudpathlib.http.httppath import HttpPath def test_file_discovery(rig): p = rig.create_cloud_path("dir_0/file0_0.txt") assert p.exists() - p2 = rig.create_cloud_path("dir_0/not_a_file") + p2 = rig.create_cloud_path("dir_0/not_a_file_yet.file") assert not p2.exists() p2.touch() assert p2.exists() - p2.touch(exist_ok=True) + + if rig.client_class not in [HttpClient]: # not supported to touch existing + p2.touch(exist_ok=True) + else: + with pytest.raises(NotImplementedError): + p2.touch(exist_ok=True) + with pytest.raises(FileExistsError): p2.touch(exist_ok=False) p2.unlink(missing_ok=False) @@ -83,19 +91,19 @@ def glob_test_dirs(rig, tmp_path): def _make_glob_directory(root): (root / "dirB").mkdir() - (root / "dirB" / "fileB").write_text("fileB") + (root / "dirB" / "fileB.txt").write_text("fileB") (root / "dirC").mkdir() (root / "dirC" / "dirD").mkdir() - (root / "dirC" / "dirD" / "fileD").write_text("fileD") - (root / "dirC" / "fileC").write_text("fileC") - (root / "fileA").write_text("fileA") + (root / "dirC" / "dirD" / "fileD.txt").write_text("fileD") + (root / "dirC" / "fileC.txt").write_text("fileC") + (root / "fileA.txt").write_text("fileA") - cloud_root = rig.create_cloud_path("glob-tests") + cloud_root = rig.create_cloud_path("glob-tests/") cloud_root.mkdir() _make_glob_directory(cloud_root) - local_root = tmp_path / "glob-tests" + local_root = tmp_path / "glob-tests/" local_root.mkdir() _make_glob_directory(local_root) @@ -108,7 +116,7 @@ def _make_glob_directory(root): def _lstrip_path_root(path, root): rel_path = str(path)[len(str(root)) :] - return rel_path.rstrip("/") # agnostic to trailing slash + return rel_path.strip("/") def _assert_glob_results_match(cloud_results, local_results, cloud_root, local_root): @@ -181,6 +189,9 @@ def test_walk(glob_test_dirs): def test_list_buckets(rig): + if rig.path_class in [HttpPath]: + return # no bucket listing for HTTP + # test we can list buckets buckets = list(rig.path_class(f"{rig.path_class.cloud_prefix}").iterdir()) assert len(buckets) > 0 @@ -319,6 +330,10 @@ def test_is_dir_is_file(rig, tmp_path): dir_nested_no_slash = rig.create_cloud_path("dir_1/dir_1_0") for test_case in [dir_slash, dir_no_slash, dir_nested_slash, dir_nested_no_slash]: + # skip no-slash cases, which are interpreted as files for http paths + if not str(test_case).endswith("/") and rig.path_class in [HttpPath]: + continue + assert test_case.is_dir() assert not test_case.is_file() @@ -337,7 +352,7 @@ def test_is_dir_is_file(rig, tmp_path): def test_file_read_writes(rig, tmp_path): p = rig.create_cloud_path("dir_0/file0_0.txt") - p2 = rig.create_cloud_path("dir_0/not_a_file") + p2 = rig.create_cloud_path("dir_0/not_a_file.txt") p3 = rig.create_cloud_path("") text = "lalala" * 10_000 @@ -355,16 +370,20 @@ def test_file_read_writes(rig, tmp_path): before_touch = datetime.now() sleep(1) - p.touch() - if not getattr(rig, "is_custom_s3", False): - # Our S3Path.touch implementation does not update mod time for MinIO - assert datetime.fromtimestamp(p.stat().st_mtime) > before_touch + + if rig.path_class not in [HttpPath]: # not supported to touch existing + p.touch() + + if not getattr(rig, "is_custom_s3", False): + # Our S3Path.touch implementation does not update mod time for MinIO + assert datetime.fromtimestamp(p.stat().st_mtime) > before_touch # no-op if not getattr(rig, "is_adls_gen2", False): p.mkdir() - assert p.etag is not None + if rig.path_class not in [HttpPath]: # not supported to touch existing + assert p.etag is not None dest = rig.create_cloud_path("dir2/new_file0_0.txt") assert not dest.exists() @@ -445,7 +464,7 @@ def test_cloud_path_download_to(rig, tmp_path): def test_fspath(rig): - p = rig.create_cloud_path("dir_0") + p = rig.create_cloud_path("dir_0/") assert os.fspath(p) == p.fspath diff --git a/tests/test_cloudpath_instantiation.py b/tests/test_cloudpath_instantiation.py index de139593..003a7e00 100644 --- a/tests/test_cloudpath_instantiation.py +++ b/tests/test_cloudpath_instantiation.py @@ -7,6 +7,7 @@ from cloudpathlib import AzureBlobPath, CloudPath, GSPath, S3Path from cloudpathlib.exceptions import InvalidPrefixError, MissingDependenciesError +from cloudpathlib.http.httppath import HttpPath @pytest.mark.parametrize( @@ -44,6 +45,9 @@ def test_dispatch_error(): @pytest.mark.parametrize("path", ["b/k", "b/k", "b/k.file", "b/k", "b"]) def test_instantiation(rig, path): + if rig.path_class in [HttpPath]: + path = "example-url.com/" + path + # check two cases of prefix for prefix in [rig.cloud_prefix.lower(), rig.cloud_prefix.upper()]: expected = prefix + path @@ -51,13 +55,17 @@ def test_instantiation(rig, path): assert repr(p) == f"{rig.path_class.__name__}('{expected}')" assert str(p) == expected - assert p._no_prefix == expected.split("://", 1)[-1] + if rig.path_class in [HttpPath]: + assert p._no_prefix == path.replace("example-url.com/", "") + assert str(p._path) == path.replace("example-url.com", "") + + else: + assert p._no_prefix == expected.split("://", 1)[-1] + assert str(p._path) == expected.split(":/", 1)[-1] assert p._url.scheme == expected.split("://", 1)[0].lower() assert p._url.netloc == expected.split("://", 1)[-1].split("/")[0] - assert str(p._path) == expected.split(":/", 1)[-1] - def test_default_client_lazy(rig): cp = rig.path_class(rig.cloud_prefix + "testing/file.txt") @@ -97,7 +105,7 @@ def test_dependencies_not_loaded(rig, monkeypatch): def test_is_pathlike(rig): - p = rig.create_cloud_path("dir_0") + p = rig.create_cloud_path("dir_0/") assert isinstance(p, os.PathLike) diff --git a/tests/test_cloudpath_manipulation.py b/tests/test_cloudpath_manipulation.py index aaf4098c..91e29c77 100644 --- a/tests/test_cloudpath_manipulation.py +++ b/tests/test_cloudpath_manipulation.py @@ -4,6 +4,7 @@ import pytest from cloudpathlib import CloudPath +from cloudpathlib.http.httppath import HttpPath def test_properties(rig): @@ -83,16 +84,27 @@ def test_joins(rig): if sys.version_info >= (3, 12): assert rig.create_cloud_path("a/b/c/d").match("A/*/C/D", case_sensitive=False) - assert rig.create_cloud_path("a/b/c/d").anchor == rig.cloud_prefix + if rig.path_class not in [HttpPath]: + assert rig.create_cloud_path("a/b/c/d").anchor == rig.cloud_prefix + assert rig.create_cloud_path("a/b/c/d").parent == rig.create_cloud_path("a/b/c") - assert rig.create_cloud_path("a/b/c/d").parents == ( - rig.create_cloud_path("a/b/c"), - rig.create_cloud_path("a/b"), - rig.create_cloud_path("a"), - rig.path_class(f"{rig.cloud_prefix}{rig.drive}/{rig.test_dir}"), - rig.path_class(f"{rig.cloud_prefix}{rig.drive}"), - ) + if rig.path_class not in [HttpPath]: + assert rig.create_cloud_path("a/b/c/d").parents == ( + rig.create_cloud_path("a/b/c"), + rig.create_cloud_path("a/b"), + rig.create_cloud_path("a"), + rig.path_class(f"{rig.cloud_prefix}{rig.drive}/{rig.test_dir}"), + rig.path_class(f"{rig.cloud_prefix}{rig.drive}"), + ) + else: + assert rig.create_cloud_path("a/b/c/d").parents == ( + rig.create_cloud_path("a/b/c"), + rig.create_cloud_path("a/b"), + rig.create_cloud_path("a"), + rig.path_class(f"{rig.cloud_prefix}{rig.drive}/{rig.test_dir}"), + rig.path_class(f"{rig.cloud_prefix}{rig.drive}/"), + ) assert rig.create_cloud_path("a").joinpath("b", "c") == rig.create_cloud_path("a/b/c") assert rig.create_cloud_path("a").joinpath(PurePosixPath("b"), "c") == rig.create_cloud_path( @@ -106,21 +118,32 @@ def test_joins(rig): == f"{rig.cloud_prefix}{rig.drive}/{rig.test_dir}/a/b/c" ) - assert rig.create_cloud_path("a/b/c/d").parts == ( - rig.cloud_prefix, - rig.drive, - rig.test_dir, - "a", - "b", - "c", - "d", - ) + if rig.path_class in [HttpPath]: + assert rig.create_cloud_path("a/b/c/d").parts == ( + rig.cloud_prefix + rig.drive + "/", + rig.test_dir, + "a", + "b", + "c", + "d", + ) + else: + assert rig.create_cloud_path("a/b/c/d").parts == ( + rig.cloud_prefix, + rig.drive, + rig.test_dir, + "a", + "b", + "c", + "d", + ) def test_with_segments(rig): - assert rig.create_cloud_path("a/b/c/d").with_segments("x", "y", "z") == rig.client_class( - **rig.required_client_kwargs - ).CloudPath(f"{rig.cloud_prefix}x/y/z") + to_test = rig.create_cloud_path("a/b/c/d").with_segments("x", "y", "z") + assert to_test == rig.client_class(**rig.required_client_kwargs).CloudPath( + f"{to_test.anchor}x/y/z" + ) def test_is_junction(rig): diff --git a/tests/test_cloudpath_upload_copy.py b/tests/test_cloudpath_upload_copy.py index acf5e5ec..e913d23f 100644 --- a/tests/test_cloudpath_upload_copy.py +++ b/tests/test_cloudpath_upload_copy.py @@ -4,6 +4,7 @@ import pytest +from cloudpathlib.http.httppath import HttpPath from cloudpathlib.local import LocalGSPath, LocalS3Path, LocalS3Client from cloudpathlib.exceptions import ( CloudPathFileExistsError, @@ -64,19 +65,19 @@ def test_upload_from_file(rig, upload_assets_dir): assert p.read_text() == "Hello from 2" # to file, file exists and is newer - p.touch() + p.write_text("newer") with pytest.raises(OverwriteNewerCloudError): p.upload_from(upload_assets_dir / "upload_1.txt") # to file, file exists and is newer; overwrite - p.touch() + p.write_text("even newer") sleep(1.1) p.upload_from(upload_assets_dir / "upload_1.txt", force_overwrite_to_cloud=True) assert p.exists() assert p.read_text() == "Hello from 1" # to dir, dir exists - p = rig.create_cloud_path("dir_0") # created by fixtures + p = rig.create_cloud_path("dir_0/") # created by fixtures assert p.exists() p.upload_from(upload_assets_dir / "upload_1.txt") assert (p / "upload_1.txt").exists() @@ -92,7 +93,7 @@ def test_upload_from_dir(rig, upload_assets_dir): assert assert_mirrored(p, upload_assets_dir) # to dir, dir exists - p2 = rig.create_cloud_path("dir_0") # created by fixtures + p2 = rig.create_cloud_path("dir_0/") # created by fixtures assert p2.exists() p2.upload_from(upload_assets_dir) @@ -100,12 +101,12 @@ def test_upload_from_dir(rig, upload_assets_dir): # a newer file exists on cloud sleep(1) - (p / "upload_1.txt").touch() + (p / "upload_1.txt").write_text("newer") with pytest.raises(OverwriteNewerCloudError): p.upload_from(upload_assets_dir) # force overwrite - (p / "upload_1.txt").touch() + (p / "upload_1.txt").write_text("even newer") (p / "upload_2.txt").unlink() p.upload_from(upload_assets_dir, force_overwrite_to_cloud=True) assert assert_mirrored(p, upload_assets_dir) @@ -135,9 +136,11 @@ def test_copy(rig, upload_assets_dir, tmpdir): # cloud to cloud -> make sure no local cache p_new = p.copy(p.parent / "new_upload_1.txt") assert p_new.exists() - assert not p_new._local.exists() # cache should never have been downloaded - assert not p._local.exists() # cache should never have been downloaded - assert p_new.read_text() == "Hello from 1" + + if rig.path_class not in [HttpPath]: + assert not p_new._local.exists() # cache should never have been downloaded + assert not p._local.exists() # cache should never have been downloaded + assert p_new.read_text() == "Hello from 1" # cloud to cloud path as string cloud_dest = str(p.parent / "new_upload_0.txt") @@ -146,14 +149,14 @@ def test_copy(rig, upload_assets_dir, tmpdir): assert p_new.read_text() == "Hello from 1" # cloud to cloud directory - cloud_dest = rig.create_cloud_path("dir_1") # created by fixtures + cloud_dest = rig.create_cloud_path("dir_1/") # created by fixtures p_new = p.copy(cloud_dest) assert str(p_new) == str(p_new.parent / p.name) # file created assert p_new.exists() assert p_new.read_text() == "Hello from 1" # cloud to cloud overwrite - p_new.touch() + p_new.write_text("p_new") with pytest.raises(OverwriteNewerCloudError): p_new = p.copy(p_new) @@ -193,7 +196,7 @@ def test_copy(rig, upload_assets_dir, tmpdir): (other_dir / p2.name).unlink() # cloud dir raises - cloud_dir = rig.create_cloud_path("dir_1") # created by fixtures + cloud_dir = rig.create_cloud_path("dir_1/") # created by fixtures with pytest.raises(ValueError) as e: p_new = cloud_dir.copy(Path(tmpdir.mkdir("test_copy_dir_fails"))) assert "use the method copytree" in str(e) @@ -207,12 +210,12 @@ def test_copytree(rig, tmpdir): p.copytree(local_out) with pytest.raises(CloudPathFileExistsError): - p = rig.create_cloud_path("dir_0") + p = rig.create_cloud_path("dir_0/") p_out = rig.create_cloud_path("dir_0/file0_0.txt") p.copytree(p_out) # cloud dir to local dir that exists - p = rig.create_cloud_path("dir_1") + p = rig.create_cloud_path("dir_1/") local_out = Path(tmpdir.mkdir("copytree_from_cloud")) p.copytree(local_out) assert assert_mirrored(p, local_out) @@ -228,12 +231,12 @@ def test_copytree(rig, tmpdir): assert assert_mirrored(p, local_out) # cloud dir to cloud dir that does not exist - p2 = rig.create_cloud_path("new_dir") + p2 = rig.create_cloud_path("new_dir/") p.copytree(p2) assert assert_mirrored(p2, p) # cloud dir to cloud dir that exists - p2 = rig.create_cloud_path("new_dir2") + p2 = rig.create_cloud_path("new_dir2/") (p2 / "existing_file.txt").write_text("asdf") # ensures p2 exists p.copytree(p2) assert assert_mirrored(p2, p, check_no_extra=False) @@ -251,7 +254,7 @@ def test_copytree(rig, tmpdir): (p / "dir2" / "file2.txt").write_text("ignore") # cloud dir to local dir but ignoring files (shutil.ignore_patterns) - p3 = rig.create_cloud_path("new_dir3") + p3 = rig.create_cloud_path("new_dir3/") p.copytree(p3, ignore=ignore_patterns("*.py", "dir*")) assert assert_mirrored(p, p3, check_no_extra=False) assert not (p3 / "ignored.py").exists() @@ -259,7 +262,7 @@ def test_copytree(rig, tmpdir): assert not (p3 / "dir2").exists() # cloud dir to local dir but ignoring files (custom function) - p4 = rig.create_cloud_path("new_dir4") + p4 = rig.create_cloud_path("new_dir4/") def _custom_ignore(path, names): ignore = [] diff --git a/tests/test_http.py b/tests/test_http.py new file mode 100644 index 00000000..d75c523f --- /dev/null +++ b/tests/test_http.py @@ -0,0 +1,36 @@ +from tests.conftest import CloudProviderTestRig + + +def test_https(https_rig: CloudProviderTestRig): + """Basic tests for https; we run the full suite against the http_rig""" + existing_file = https_rig.create_cloud_path("dir_0/file0_0.txt") + + # existence and listing + assert existing_file.exists() + assert existing_file.parent.exists() + assert existing_file.name in [f.name for f in existing_file.parent.iterdir()] + + # root level checks + root = list(existing_file.parents)[-1] + assert root.exists() + assert len(list(root.iterdir())) > 0 + + # reading and wrirting + existing_file.write_text("Hello from 0") + assert existing_file.read_text() == "Hello from 0" + + # creating new files + not_existing_file = https_rig.create_cloud_path("dir_0/new_file.txt") + + assert not not_existing_file.exists() + + not_existing_file.upload_from(existing_file) + + assert not_existing_file.read_text() == "Hello from 0" + + # deleteing + not_existing_file.unlink() + assert not not_existing_file.exists() + + # metadata + assert existing_file.stat().st_mtime != 0 diff --git a/tests/test_s3_specific.py b/tests/test_s3_specific.py index 4b12f7b9..45d2bfea 100644 --- a/tests/test_s3_specific.py +++ b/tests/test_s3_specific.py @@ -176,7 +176,7 @@ def test_directories(s3_like_rig): assert super_path.exists() assert not super_path.is_dir() - super_path = s3_like_rig.create_cloud_path("dir_0") + super_path = s3_like_rig.create_cloud_path("dir_0/") assert super_path.exists() assert super_path.is_dir() diff --git a/tests/utilities/insecure-test.csr b/tests/utilities/insecure-test.csr new file mode 100644 index 00000000..01625734 --- /dev/null +++ b/tests/utilities/insecure-test.csr @@ -0,0 +1,27 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIIEqjCCApICAQAwSDELMAkGA1UEBhMCVVMxETAPBgNVBAgMCENvbG9yYWRvMQ8w +DQYDVQQHDAZEZW52ZXIxFTATBgNVBAoMDGNsb3VkcGF0aGxpYjCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAK5PvMKSP46Sf+8kEFEQdbMkcr9Oph1pzPK6 +yIRwWJK2CRTduLKYjzeivyS3roqKf2RK8CI3/aPRdMENADdAlUvRkfHYy1VyJey+ +9kuZ/DZfcmMXcUkNfiezv2PltGSL0eGYlWCCH2sAZc51LZrBwfnma1NAXiqDe0yD +36izMxIKgoGQ+DoatxNhQVYprDOi4VRW7qtw6V2Y/zqBFXctjBVeLyEm4c0MLdUQ +I/Ftw1mcttPmFWgfkGuOEeDdL7HFTbRj6PpzIC4mh1OSDONmv455XSQmia4egrDS +bpIrBOH8Al3fukD8R+Bwv0thWjVezFUQCxiynfASq6Lhb/kqTp93XcWw4DVaVPox +xGUDqDgfPq4XGxrKQR3ah94c/7jyhz4ih6td5KLf4hvExK77i3l61dgqW/86uj7g +gJEkWcAAY/SVnZneZSEClM82P/YyGavTTzw6ibi1n2zaRnRjuzEqiC6C92VoYlWF +F4S50o/gHhCHYWb775IIt8CAYqqryBHrN0r2vvJVU6lOmHTsnfbVv+XzGgNroBP9 +NsP1jDJA04XGMCq6DT8B5V5GO6kVn37Uqb5ER6RTBTxlcHh6oqtzdoHlVxMjdLwh +HPAug/DTZn4a1b9zTyK1YqSzNIM8eV/ckmySG5YMZJQovMHd7YVzB4hjq9kVupxa +bfPhjIHxAgMBAAGgHTAbBgkqhkiG9w0BCQcxDgwMY2xvdWRwYXRobGliMA0GCSqG +SIb3DQEBCwUAA4ICAQBeTRNKjo+ol3zuKfteuKt7+mLWzL3/qtUSCmCwt6t+NebN +ebkOwZA4HW0uUt/rdht58CJAvFri+DnkEidt/ldcg/CQ/tpWALZFdNa2z0hb+qEL +Q7wHO1QkwHG8/Q7yrcBNGSDsp4l7cH+8FQBcAVJxn++ixTe4dIiyscUdNRkXywsT +/UdQlK3oULR7Zv9k3nDErXTow/6QazjxtUyrfyuFdSDTAKJaKCOLt5NcJif/Ev3G +rUMJQElNz3W0P73ci+ueuihYdaveDx1vptO9VCBnwFOyTgjCYPS9g3MB8KIh5cJz +sj2J5J5tEUsyAa8ky4hvoLyP7GE29XvPA8pH1rOtQ++lmMzpP1vkPEGe0ezXrw2y +h4LBJXeMCg3/r3otEHnppI5PRTX3m1WlHyInpFIjets6VLDKjwENyreDmO5hIfRd +4ZxjxYzG97Tekoa+v9Y9qf3YCCGvbswOwfyj8hNheoMKv2f+rG2MwSPWfYlML/oT +4UA/C3o9Y7oa7H9FdEiTuXRgLcKUZqZJ0JuVhSbdPAAYSdrQE/EF06jyU6ZENxUu +0UJRwaXLETIIii99TUxyTmJTrvWAEbo5hpwfA1P6aaCLtWj0Qm6WSD3uLjU56yaX +6Q2kdspxv1BiT2TC4RO/ZH/8OwmSfe0dSg8jEOQf2+B0DcTPD+yHjo2hZWpT0A== +-----END CERTIFICATE REQUEST----- diff --git a/tests/utilities/insecure-test.key b/tests/utilities/insecure-test.key new file mode 100644 index 00000000..2c6f5248 --- /dev/null +++ b/tests/utilities/insecure-test.key @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQwIBADANBgkqhkiG9w0BAQEFAASCCS0wggkpAgEAAoICAQCuT7zCkj+Okn/v +JBBREHWzJHK/TqYdaczyusiEcFiStgkU3biymI83or8kt66Kin9kSvAiN/2j0XTB +DQA3QJVL0ZHx2MtVciXsvvZLmfw2X3JjF3FJDX4ns79j5bRki9HhmJVggh9rAGXO +dS2awcH55mtTQF4qg3tMg9+oszMSCoKBkPg6GrcTYUFWKawzouFUVu6rcOldmP86 +gRV3LYwVXi8hJuHNDC3VECPxbcNZnLbT5hVoH5BrjhHg3S+xxU20Y+j6cyAuJodT +kgzjZr+OeV0kJomuHoKw0m6SKwTh/AJd37pA/EfgcL9LYVo1XsxVEAsYsp3wEqui +4W/5Kk6fd13FsOA1WlT6McRlA6g4Hz6uFxsaykEd2ofeHP+48oc+IoerXeSi3+Ib +xMSu+4t5etXYKlv/Oro+4ICRJFnAAGP0lZ2Z3mUhApTPNj/2Mhmr0088Oom4tZ9s +2kZ0Y7sxKogugvdlaGJVhReEudKP4B4Qh2Fm+++SCLfAgGKqq8gR6zdK9r7yVVOp +Tph07J321b/l8xoDa6AT/TbD9YwyQNOFxjAqug0/AeVeRjupFZ9+1Km+REekUwU8 +ZXB4eqKrc3aB5VcTI3S8IRzwLoPw02Z+GtW/c08itWKkszSDPHlf3JJskhuWDGSU +KLzB3e2FcweIY6vZFbqcWm3z4YyB8QIDAQABAoICAAvBHOdCSd7CTLYZY/kkl4D8 +sbkO+nOumUPx3F+MynaKzGKw5lczESpz7EaFp4pl9zdbE7yFRVchK/LeIzXNeSz8 +ecZXeCGP3A/XH+cgyTUdJr0GmuEf4XIpyzKN+8qoO+0KfhjKb9GBuABZdYKaSh2z +S2kLRMnCaip3FKgKjzbclm1auA8F3E50CWc7rXPYhXk5RqQxG6gUoVaNRR+BnbVy +T4kl+7gv9/09NsBrIcqTQ97pKWf03zl7y3D8DfODkVhbQLAttfa/4V/Y0BRkuAEk +wYumvVh6IvGQRNxjK0F6y8U0EmNSLYt+yAQgyENIXEzobozXmFtU1dX/fZxNix7n +9fRXFBjOHVJNyW2nYgdVPeENbG+3u68baVsYG8sjsbk6XJyh9SMozEPaOCIQGWcr +pFz9yZb2rCZKvqlz09Qnhx1TKblMnUkC1VmVXLZOgylhJY12aueibNpaPw6LHPu1 +8JUnN0e2PIUjl4wWn6GPmkN+PSMm6khUTwYZx199fC9QFuxkij1qG5iQwvvsuMIH +gxvjO3XP2RAR01UNxhPPG+PgM6g3TBCfRd2B21toKgKNC9kzwsVLg251czxeTVh1 +2/uK0h06MkqHl11fJvBrWKLUhsnpgNqMSGusDIvf9vA39LvJSVxAcE550/dhdbY9 +VSjPnS5jcsK7JA4RgJ3rAoIBAQD09k5m8H+pky3+RMP0BP0apsJk5feLFl++FQc0 +otLgPzKSBWPqdhY/2R/UJKBQEc28CkWrtoe5wr51gM3t/ytU4xltYY+o6MyaAChD +rtwhm62Uu0X/CA1G9FTmjQJkCmNybwHzaqoHZ4kEax3WVGx0FC6Zxp2rl/wIDYuJ +z1tls+MMsVAoeoDCoxpRzSxWqY4xeEROuJoEOPdesPCkUqqCga1rT6+I8IUA7lmb +wjrOD7RB3RyEuM5oxfIJBuXZKlgHGjF1M0eCo9xjQFZPCG2lkoNn5UJofEz8Ktbv +Cazx6YvHSMYuowEsonbuz2C3er2ydyCNIuE+n1oLGBz9RmKjAoIBAQC2KnWvhfM4 +sz31lxKDg5xPszU7wozItTWzMXsg6hXi/wIFtFc7Y23IY8al5WiUaO9fV42nOUDB +gNk684lsKPR144XE5jxUSzVqM9DCLj931fHpuAkmxr6bkhxnDMK37QQ3YUib68ca +nBucqozaoS15sdgzTc25xNWgPuLHxq3wVBi1bELbSgLrrWVHr8hB3xTLF1WbCLxC +RlNlSc7EnJ841xx1mZmTwxsWG+bHfs6NjgD4zVqbjLSj5Orv8f0pD4AE8pyISlr+ ++rJTT6iaHQvCKMYv4Ynfa74YA168BBR+9IcstrIkdno25uHOXDb97V32ab5S3yFW +YlRE0lEHA+ZbAoIBADrPX2dLWfrmQOaIr9rHz9Q0MPt0Uofr6TSDa+VxKdt4kLWJ +4cEKdLEDeaa+3FYc0B3TAYMGIXOxk3Q2Zjo7RrXsh9BA2fKdYXGflAsb0fGnpHbO +tzFRR46/Xhqzw90suU9h40ADXarFapnK9bDdN+Rua/mzO2tU48czKUr+o1y5YUtM +zofJUVxpOApnjbuInYC29P9JRoC5BHqRVFS/G/yVEYNv8B6aT/Q3RQAmE2QhVQ9y +/EPI8pUo4MDWDRykE9owqasPkp2EpYaWjaIPzfMwR6gL3HOlU/4+creUxRaXEV3Y +1OuhasjCgHc5BmlGaICOJRx9QUJ9k2qScXNFEK0CggEBALYazhkQdCtLa/YV9wkH +yXwXL3E1NJ30IOGo5mjp+cU5neDr/oQ9CmNX8x6mrhwmpp0OHmp8HpMSZpa7HLbG +XlN3hqNmcgrmQFiRwfBMYWA/vR0iv+tGpQdKUiBmLkXFqABgvUA5vImKY0QDbtmk +ZJySQApRjgZWkiQmmXiS0hE9UJIUzuT/INpPNb8rJ6tKAjRgeFCKtAAg43+PACem +VrlwuV+KlG+VjH9Wlyb5Si1SNwCB8UEssOxijMYfiC/C8fyAOCE7C6p4HUqRiH+/ +56BKOI1nDvgNcjP5MnwMLB0aAAOgA4fV9Kjrt/IeV08TOmp6HSwlKON9WraN9Thp +Gp8CggEBAIeGkjASPQsy41wK+9TFY2tPfDFee1pJ22JywGYasK1ZuZh/003bOYjs +cg4fpp0/1/yYe+Xgebb3xzHIUlauRMiNQUPFAQTTWlUnGyHVuTpxEVbLhcqY2FG/ +t5SPgmu1H31bdfpA4LoA2ewLFeGIjKQOTMX5aCgPyZaqW/BAG0BcPEntYlLJpGbG +zSPIw8qUL3n/Bm0zpI3SrcUQoe0qOVr6UdeGTNO0dCkhED53ZzvoeMjsBv2IGegC +OPGzJCiW8NYquIRXSu0N9MHPYYy9XJU8rwkdOPyzNMvw0duBedT9wY3cimAD3KtQ +MTfJlrjd23Xn+aEmf/4M35SFl7OFxts= +-----END PRIVATE KEY----- diff --git a/tests/utilities/insecure-test.pem b/tests/utilities/insecure-test.pem new file mode 100644 index 00000000..6a1469ba --- /dev/null +++ b/tests/utilities/insecure-test.pem @@ -0,0 +1,30 @@ +-----BEGIN CERTIFICATE----- +MIIFFzCCAv8CFBtqKeSAcQf/bQBPZaROIpbzIQ7UMA0GCSqGSIb3DQEBCwUAMEgx +CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhDb2xvcmFkbzEPMA0GA1UEBwwGRGVudmVy +MRUwEwYDVQQKDAxjbG91ZHBhdGhsaWIwHhcNMjQwOTEzMTExNzQzWhcNMzMxMTMw +MTExNzQzWjBIMQswCQYDVQQGEwJVUzERMA8GA1UECAwIQ29sb3JhZG8xDzANBgNV +BAcMBkRlbnZlcjEVMBMGA1UECgwMY2xvdWRwYXRobGliMIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEArk+8wpI/jpJ/7yQQURB1syRyv06mHWnM8rrIhHBY +krYJFN24spiPN6K/JLeuiop/ZErwIjf9o9F0wQ0AN0CVS9GR8djLVXIl7L72S5n8 +Nl9yYxdxSQ1+J7O/Y+W0ZIvR4ZiVYIIfawBlznUtmsHB+eZrU0BeKoN7TIPfqLMz +EgqCgZD4Ohq3E2FBVimsM6LhVFbuq3DpXZj/OoEVdy2MFV4vISbhzQwt1RAj8W3D +WZy20+YVaB+Qa44R4N0vscVNtGPo+nMgLiaHU5IM42a/jnldJCaJrh6CsNJukisE +4fwCXd+6QPxH4HC/S2FaNV7MVRALGLKd8BKrouFv+SpOn3ddxbDgNVpU+jHEZQOo +OB8+rhcbGspBHdqH3hz/uPKHPiKHq13kot/iG8TErvuLeXrV2Cpb/zq6PuCAkSRZ +wABj9JWdmd5lIQKUzzY/9jIZq9NPPDqJuLWfbNpGdGO7MSqILoL3ZWhiVYUXhLnS +j+AeEIdhZvvvkgi3wIBiqqvIEes3Sva+8lVTqU6YdOyd9tW/5fMaA2ugE/02w/WM +MkDThcYwKroNPwHlXkY7qRWfftSpvkRHpFMFPGVweHqiq3N2geVXEyN0vCEc8C6D +8NNmfhrVv3NPIrVipLM0gzx5X9ySbJIblgxklCi8wd3thXMHiGOr2RW6nFpt8+GM +gfECAwEAATANBgkqhkiG9w0BAQsFAAOCAgEAVIRLRR5bitzThcTsmSCPAzqbVCf1 +HSsTWGnISwiI3GD+2d+TykY+g9fw2eKbXzbfHu9VHFAMdpHfQc7Ud3d+tM45LnCo +cnvdXrpQg2EEdZaFJ76SmFMFoAnMd9LkuSdzt0P28nOlXVn/KDFp2ea8ROUUaM55 +oGjo6Cj7i9h5fEnuAEE2Gcepjp9DRjJRIuwAxcihEcQSxzv4mOHqwMuCk6dpOG5S +MgVoCMiWz/9vn9U+Vyn5cjTzLgbmEQPVm5BL57QfPUhFW8cAMR5NeIeizLSpiBZQ ++RvzW/S2T+s8Cc0GgUjgiAmOLRCVMLTJ+jv1KvWFzu762POqXpreTD9UGLHnUvxI +RbhEgxj8p4169CeJSa0A19U6pFWFsZU2MLJkjHTIGlpzk5Vg5qzMyybcbk9wQQZ/ +CMOg5pVaCZHyTUwrFxKF51oIv9a/tuQSe/ryj8GIj7t0mq0+7klvEn1a6wrkSr73 +FzMNaEm4eLRVWYbHj8m4314vvaDjtUXCcMDRLb8j3fjyrcPPTkbO99rt1jVfU5wS +Ji7tVksGrTIHHlWkqZdbPhfZyTBIG34FjtjSClNVsOBeX+VqUuku8uQaM/9iVNZS +QamZuURGQ1x5+XHMjUQpoqAII+zXegJ1RiVfequYcF7F0bermVVVGdb/Ly2yNH1F +O5/LKKZ32+d5sm4= +-----END CERTIFICATE-----