diff --git a/.flake8 b/.flake8 index e034c121..579a9c35 100644 --- a/.flake8 +++ b/.flake8 @@ -4,5 +4,4 @@ extend-ignore = E203, W503 exclude = .git __pycache__ - setup.py .venv \ No newline at end of file diff --git a/README.md b/README.md index cb343d9a..075e8960 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,7 @@ After migrations and development database loading are in place, you can rebuild ./scripts/update ``` -`pip` dependencies in `setup.py` are collected and installed through requirements files. +`pip` dependencies in `pyproject.toml` are collected and installed through requirements files. If you modify dependencies, run `./scripts/generate-requirements` to regenerate `requirements-*.txt` used by Dockerfiles otherwise your dependency change will not be realized. diff --git a/deployment/Dockerfile b/deployment/Dockerfile index 027f3567..fd877fd1 100644 --- a/deployment/Dockerfile +++ b/deployment/Dockerfile @@ -1,51 +1,44 @@ -FROM ubuntu:20.04 - -RUN apt-get update --fix-missing -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y wget unzip curl gnupg \ - apt-transport-https \ - python3-pip \ +FROM mcr.microsoft.com/azurelinux/base/python:3.12 + +RUN tdnf install -y \ + ca-certificates \ + build-essential \ + tar \ + wget \ + unzip \ jq \ git \ - libicu66 + azure-cli \ + && tdnf clean all # Install Azure Function Tools - -RUN curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/microsoft.gpg -RUN echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-ubuntu-focal-prod focal main" \ - > /etc/apt/sources.list.d/dotnetdev.list - -RUN apt-get update && apt-get install -y azure-functions-core-tools-4 - -# Install Terraform 1.8.2 - -RUN wget -O terraform.zip https://releases.hashicorp.com/terraform/1.8.2/terraform_1.8.2_linux_amd64.zip -RUN unzip terraform.zip -RUN mv terraform /usr/local/bin +RUN wget https://github.com/Azure/azure-functions-core-tools/releases/download/4.0.5700/Azure.Functions.Cli.linux-x64.4.0.5700.zip \ + && unzip Azure.Functions.Cli.linux-x64.4.0.5700.zip -d /usr/local/azure-functions-core-tools-4 \ + && chmod +x /usr/local/azure-functions-core-tools-4/func \ + && chmod +x /usr/local/azure-functions-core-tools-4/gozip \ + && ln -s /usr/local/azure-functions-core-tools-4/func /usr/local/bin/func \ + && ln -s /usr/local/azure-functions-core-tools-4/gozip /usr/local/bin/gozip + +# Install Terraform +RUN wget -O terraform.zip https://releases.hashicorp.com/terraform/1.11.2/terraform_1.11.2_linux_amd64.zip \ + && unzip terraform.zip \ + && mv terraform /usr/local/bin # Install kubectl - -RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" -RUN install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl - +RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" \ + && install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl # Install Helm - -RUN curl https://baltocdn.com/helm/signing.asc | apt-key add - -RUN echo "deb https://baltocdn.com/helm/stable/debian/ all main" | tee /etc/apt/sources.list.d/helm-stable-debian.list -RUN apt-get update -RUN apt-get install helm=3.14.0-1 +RUN wget https://get.helm.sh/helm-v3.14.4-linux-amd64.tar.gz \ + && tar -zxvf helm-v3.14.4-linux-amd64.tar.gz \ + && mv linux-amd64/helm /usr/local/bin/helm # Install kubelogin - -RUN curl -sL https://github.com/Azure/kubelogin/releases/download/v0.0.18/kubelogin-linux-amd64.zip --output kubelogin.zip \ +RUN curl -sL https://github.com/Azure/kubelogin/releases/download/v0.2.8/kubelogin-linux-amd64.zip --output kubelogin.zip \ && unzip -j kubelogin.zip bin/linux_amd64/kubelogin -d /usr/local/bin/ \ && rm -rf kubelogin.zip -# Install azure client -RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash - # Install Jinja -RUN pip3 install Jinja2 pyyaml==6.0 - +RUN pip install Jinja2 pyyaml==6.0.2 WORKDIR /opt/src diff --git a/deployment/bin/deploy b/deployment/bin/deploy index 70b88e8e..49cad396 100755 --- a/deployment/bin/deploy +++ b/deployment/bin/deploy @@ -176,6 +176,7 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then --wait \ --timeout 2m0s \ -f ${DEPLOY_VALUES_FILE} \ + --debug echo "================" echo "==== Tiler =====" @@ -188,6 +189,7 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then --wait \ --timeout 2m0s \ -f ${DEPLOY_VALUES_FILE} \ + --debug echo "==================" echo "==== Ingress =====" @@ -199,7 +201,8 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then --kube-context "${KUBE_CONTEXT}" \ --wait \ --timeout 2m0s \ - -f ${DEPLOY_VALUES_FILE} + -f ${DEPLOY_VALUES_FILE} \ + --debug echo "Installing ingress-nginx..." helm upgrade --install nginx-ingress helm/ingress-nginx-4.8.3.tgz \ @@ -215,7 +218,8 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then --version "4.8.3"\ --wait \ --timeout 2m0s \ - -f bin/nginx-values.yaml + -f bin/nginx-values.yaml \ + --debug ######################### # Deploy Azure Function # diff --git a/deployment/terraform/resources/ai.tf b/deployment/terraform/resources/ai.tf index b1f3b854..a39d546a 100644 --- a/deployment/terraform/resources/ai.tf +++ b/deployment/terraform/resources/ai.tf @@ -3,4 +3,10 @@ resource "azurerm_application_insights" "pc_application_insights" { location = azurerm_resource_group.pc.location resource_group_name = azurerm_resource_group.pc.name application_type = "web" + + lifecycle { + ignore_changes = [ + workspace_id + ] + } } \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 48528498..416ae3d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,7 +117,7 @@ services: redis: image: redis:6.2.6-buster - command: redis-server --port 6380 --requirepass devcache + command: redis-server --port 6380 --requirepass devcache --loglevel debug ports: - "6380:6380" volumes: diff --git a/pccommon/pccommon/constants.py b/pccommon/pccommon/constants.py index 423c99ca..dbac8e08 100644 --- a/pccommon/pccommon/constants.py +++ b/pccommon/pccommon/constants.py @@ -2,6 +2,8 @@ from opencensus.trace.attributes_helper import COMMON_ATTRIBUTES +CACHE_KEY_ITEM = "/item" + DEFAULT_COLLECTION_CONFIG_TABLE_NAME = "collectionconfig" DEFAULT_CONTAINER_CONFIG_TABLE_NAME = "containerconfig" DEFAULT_IP_EXCEPTION_CONFIG_TABLE_NAME = "ipexceptionlist" diff --git a/pccommon/pccommon/redis.py b/pccommon/pccommon/redis.py index b3ac0fcf..d166af59 100644 --- a/pccommon/pccommon/redis.py +++ b/pccommon/pccommon/redis.py @@ -14,6 +14,7 @@ from pccommon.config.core import PCAPIsConfig from pccommon.constants import ( BACKPRESSURE_KEY_PREFIX, + CACHE_KEY_ITEM, HTTP_429_TOO_MANY_REQUESTS, RATE_LIMIT_KEY_PREFIX, ) @@ -104,9 +105,15 @@ async def register_scripts(state: State) -> None: async def cached_result( - fn: Callable[[], Coroutine[Any, Any, T]], cache_key: str, request: Request + fn: Callable[[], Coroutine[Any, Any, T]], + cache_key: str, + request: Request, + read_only: bool = False, ) -> T: - """Either get the result from redis or run the function and cache the result.""" + """Either get the result from redis or run the function and cache the result. + + If `read_only` is True, only attempt to read from the cache, do not write to it. + """ host = request.url.hostname host_cache_key = f"{cache_key}:{host}" settings = PCAPIsConfig.from_environment() @@ -124,7 +131,7 @@ async def cached_result( except Exception as e: # Don't fail on redis failure logger.error( - f"Error in cache: {e}", + f"Error in cache read: {e}", extra=get_custom_dimensions({"cache_key": host_cache_key}, request), ) if settings.debug: @@ -139,14 +146,19 @@ async def cached_result( {"cache_key": host_cache_key, "duration": f"{te - ts:0.4f}"}, request ), ) + if read_only: + return result + try: if r: await r.set(host_cache_key, orjson.dumps(result), settings.redis_ttl) except Exception as e: # Don't fail on redis failure logger.error( - f"Error in cache: {e}", - extra=get_custom_dimensions({"cache_key": host_cache_key}, request), + f"Error in cache write: {e}", + extra=get_custom_dimensions( + {"cache_key": host_cache_key, "cache_value_type": type(result)}, request + ), ) if settings.debug: raise @@ -321,3 +333,8 @@ async def _wrapper(*args: Any, **kwargs: Any) -> T: return _wrapper return _decorator + + +def stac_item_cache_key(collection: str, item: str) -> str: + """Generate a cache key for a STAC item.""" + return f"{CACHE_KEY_ITEM}:{collection}:{item}" diff --git a/pcstac/pcstac/client.py b/pcstac/pcstac/client.py index dfc30eeb..e2f68475 100644 --- a/pcstac/pcstac/client.py +++ b/pcstac/pcstac/client.py @@ -18,15 +18,14 @@ from pccommon.config import get_all_render_configs, get_render_config from pccommon.config.collections import DefaultRenderConfig -from pccommon.constants import DEFAULT_COLLECTION_REGION +from pccommon.constants import CACHE_KEY_ITEM, DEFAULT_COLLECTION_REGION from pccommon.logging import get_custom_dimensions -from pccommon.redis import back_pressure, cached_result, rate_limit +from pccommon.redis import back_pressure, cached_result, rate_limit, stac_item_cache_key from pccommon.tracing import add_stac_attributes_from_search from pcstac.config import API_DESCRIPTION, API_LANDING_PAGE_ID, API_TITLE, get_settings from pcstac.contants import ( CACHE_KEY_COLLECTION, CACHE_KEY_COLLECTIONS, - CACHE_KEY_ITEM, CACHE_KEY_ITEMS, CACHE_KEY_LANDING_PAGE, CACHE_KEY_SEARCH, @@ -288,7 +287,7 @@ async def _fetch() -> Item: ) return item - cache_key = f"{CACHE_KEY_ITEM}:{collection_id}:{item_id}" + cache_key = stac_item_cache_key(collection_id, item_id) return await cached_result(_fetch, cache_key, request) @classmethod diff --git a/pcstac/pcstac/contants.py b/pcstac/pcstac/contants.py index 35f8e569..9e509b35 100644 --- a/pcstac/pcstac/contants.py +++ b/pcstac/pcstac/contants.py @@ -1,7 +1,6 @@ CACHE_KEY_COLLECTIONS = "/collections" CACHE_KEY_COLLECTION = "/collection" CACHE_KEY_ITEMS = "/items" -CACHE_KEY_ITEM = "/item" CACHE_KEY_QUERYABLES = "/queryables" CACHE_KEY_SEARCH = "/search" CACHE_KEY_LANDING_PAGE = "/landing-page" diff --git a/pcstac/pyproject.toml b/pcstac/pyproject.toml new file mode 100644 index 00000000..89c2257c --- /dev/null +++ b/pcstac/pyproject.toml @@ -0,0 +1,38 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "pcstac" +dynamic = ["version"] +description = "Planetary Computer API - STAC." +license = { text = "MIT" } +requires-python = ">=3.7" +dependencies = [ + "idna>=3.7.0", + "orjson==3.10.4", + "pypgstac[psycopg]>=0.8.5,<0.9", + "pystac==1.10.1", + "stac-fastapi.api==3.0.0b2", + "stac-fastapi.extensions==3.0.0b2", + "stac-fastapi.pgstac==3.0.0a4", + "stac-fastapi.types==3.0.0b2", + "typing_extensions>=4.6.1", + "urllib3>=2.2.2", +] + +[project.optional-dependencies] +dev = [ + "types-requests", +] +server = [ + "uvicorn[standard]==0.30.1", +] + +[tool.hatch.version] +path = "pcstac/version.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/pcstac", +] diff --git a/pcstac/requirements-server.txt b/pcstac/requirements-server.txt index 3110bffa..44ca2839 100644 --- a/pcstac/requirements-server.txt +++ b/pcstac/requirements-server.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=dev --extra=server --output-file=pcstac/requirements-server.txt ./pcstac/setup.py +# pip-compile --extra=dev --extra=server --output-file=pcstac/requirements-server.txt ./pcstac/pyproject.toml # annotated-types==0.7.0 # via pydantic @@ -49,14 +49,14 @@ httptools==0.6.1 idna==3.7 # via # anyio - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) iso8601==2.1.0 # via stac-fastapi-types lark==0.12.0 # via pygeofilter orjson==3.10.4 # via - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # pypgstac # stac-fastapi-pgstac plpygis==0.2.2 @@ -85,10 +85,10 @@ pygeoif==1.5.0 # via pygeofilter pypgstac[psycopg]==0.8.6 # via - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # stac-fastapi-pgstac pystac==1.10.1 - # via pcstac (pcstac/setup.py) + # via pcstac (pcstac/pyproject.toml) python-dateutil==2.8.2 # via # dateparser @@ -114,18 +114,18 @@ sniffio==1.3.1 # via anyio stac-fastapi-api==3.0.0b2 # via - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # stac-fastapi-extensions # stac-fastapi-pgstac stac-fastapi-extensions==3.0.0b2 # via - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # stac-fastapi-pgstac stac-fastapi-pgstac==3.0.0a4 - # via pcstac (pcstac/setup.py) + # via pcstac (pcstac/pyproject.toml) stac-fastapi-types==3.0.0b2 # via - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # stac-fastapi-api # stac-fastapi-extensions # stac-fastapi-pgstac @@ -142,12 +142,12 @@ tenacity==8.1.0 termcolor==2.4.0 # via fire types-requests==2.32.0.20250328 - # via pcstac (pcstac/setup.py) + # via pcstac (pcstac/pyproject.toml) typing-extensions==4.12.2 # via # anyio # fastapi-slim - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # psycopg # psycopg-pool # pydantic @@ -159,10 +159,10 @@ tzlocal==5.2 # via dateparser urllib3==2.2.2 # via - # pcstac (pcstac/setup.py) + # pcstac (pcstac/pyproject.toml) # types-requests uvicorn[standard]==0.30.1 - # via pcstac (pcstac/setup.py) + # via pcstac (pcstac/pyproject.toml) uvloop==0.19.0 # via uvicorn version-parser==1.0.1 diff --git a/pcstac/setup.cfg b/pcstac/setup.cfg deleted file mode 100644 index f57bfaa9..00000000 --- a/pcstac/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[metadata] -version = attr: pcstac.version.__version__ diff --git a/pcstac/setup.py b/pcstac/setup.py deleted file mode 100644 index 956e4136..00000000 --- a/pcstac/setup.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Setup for pcstac.""" - -from setuptools import find_packages, setup - -# Runtime requirements. -inst_reqs = [ - "idna>=3.7.0", - "stac-fastapi.api==3.0.0b2", - "stac-fastapi.extensions==3.0.0b2", - "stac-fastapi.pgstac==3.0.0a4", - "stac-fastapi.types==3.0.0b2", - "orjson==3.10.4", - # Required due to some imports related to pypgstac CLI usage in startup script - "pypgstac[psycopg]>=0.8.5,<0.9", - "pystac==1.10.1", - "typing_extensions>=4.6.1", - "urllib3>=2.2.2", -] - -extra_reqs = { - "server": [ - "uvicorn[standard]==0.30.1", - ], - "dev": [ - "types-requests" - ] -} - -setup( - name="pcstac", - python_requires=">=3.7", - description="Planetary Computer API - STAC.", - packages=find_packages(exclude=["tests"]), - include_package_data=True, - zip_safe=False, - install_requires=inst_reqs, - extras_require=extra_reqs, -) diff --git a/pctiler/Dockerfile b/pctiler/Dockerfile index cadb53d0..d15661e3 100644 --- a/pctiler/Dockerfile +++ b/pctiler/Dockerfile @@ -10,14 +10,14 @@ COPY pctiler /opt/src/pctiler # Install the local modules in the new environment RUN --mount=type=cache,target=/root/.cache \ - /bin/sh -c "python3 -m pip install -U 'setuptools>=65.5.1'" + /bin/sh -c "python3 -m pip install -U 'setuptools>=65.5.1' uv" # The order of these pip installs is important :( RUN --mount=type=cache,target=/root/.cache \ - /bin/sh -c "python3 -m pip install -r ./pccommon/requirements.txt" + /bin/sh -c "uv pip install --system -r ./pccommon/requirements.txt" RUN --mount=type=cache,target=/root/.cache \ - /bin/sh -c "python3 -m pip install -r ./pctiler/requirements-server.txt" + /bin/sh -c "uv pip install --system -r ./pctiler/requirements-server.txt" RUN --mount=type=cache,target=/root/.cache \ - /bin/sh -c "python3 -m pip install --no-deps -e ./pccommon -e ./pctiler[server]" + /bin/sh -c "uv pip install --system --no-deps -e ./pccommon -e ./pctiler[server]" # GDAL config ENV GDAL_CACHEMAX 200 diff --git a/pctiler/pctiler/endpoints/item.py b/pctiler/pctiler/endpoints/item.py index 187c75b3..9af89b8a 100644 --- a/pctiler/pctiler/endpoints/item.py +++ b/pctiler/pctiler/endpoints/item.py @@ -1,3 +1,4 @@ +import asyncio import logging from typing import Annotated, Optional from urllib.parse import quote_plus, urljoin @@ -15,6 +16,7 @@ from titiler.pgstac.dependencies import get_stac_item from pccommon.config import get_render_config +from pccommon.redis import cached_result, stac_item_cache_key from pctiler.colormaps import PCColorMapParams from pctiler.config import get_settings from pctiler.endpoints.dependencies import get_endpoint_function @@ -29,13 +31,52 @@ logger = logging.getLogger(__name__) -def ItemPathParams( +async def ItemPathParams( request: Request, collection: str = Query(..., description="STAC Collection ID"), item: str = Query(..., description="STAC Item ID"), ) -> pystac.Item: - """STAC Item dependency.""" - return get_stac_item(request.app.state.dbpool, collection, item) + """ + STAC Item dependency. + + We attempt to read STAC item in from the redis cache to ameliorate high + call volumes to the tiler, which can bottleneck on reading from pgstac. + For example, say you have a few thousand calls/second to the tiler to get + crops of STAC item assets. Presumably, someone will have run a STAC query to + enumerage those items and fill the cache. Without the cache the bottleneck + will become the large number of small, single-item queries to pgstac. + Pretty soon, pgstac will be overwhelmed with queued queries and all the + client requests will start to timeout. + """ + + # Async to sync nonsense + def _get_stac_item_dict() -> dict: + stac_item: pystac.Item = get_stac_item( + request.app.state.dbpool, collection, item + ) + return stac_item.to_dict() + + async def _fetch() -> dict: + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, _get_stac_item_dict) + + # It would have been great to reuse the cached value that the STAC service + # fills, but there are two problems: + # 1. titiler's get_stac_item() returns just the STAC item content from the database + # without injected links, and calling stac_fastapi here to reproduct + # the behavior of the STAC API feels wrong. + # 2. We have no guarantee of temporal locality between the two services. + # In practice, say a client first enumerates many STAC items for analysis + # (cache filled). Then some time has passed, the cache expires, and the + # client starts issuing tiler calls to read asset data. Then, the cache, + # which was full, will be empty and we will have to call pgstac again. + # It remains to be seen how we will handle this situation in general, + # but for now we will make the STAC service and the tiler use different + # keys in the cache, so they can individually fill their own caches. + _item = await cached_result( + _fetch, f"tiler:{stac_item_cache_key(collection, item)}", request + ) + return pystac.Item.from_dict(_item) # TODO: mypy fails in python 3.9, we need to find a proper way to do this diff --git a/pctiler/pctiler/main.py b/pctiler/pctiler/main.py index f6557303..f82ff17d 100755 --- a/pctiler/pctiler/main.py +++ b/pctiler/pctiler/main.py @@ -23,6 +23,7 @@ from pccommon.logging import ServiceName, init_logging from pccommon.middleware import TraceMiddleware, add_timeout, http_exception_handler from pccommon.openapi import fixup_schema +from pccommon.redis import connect_to_redis from pctiler.config import get_settings from pctiler.endpoints import ( configuration, @@ -47,6 +48,7 @@ async def lifespan(app: FastAPI) -> AsyncGenerator: """FastAPI Lifespan.""" await connect_to_db(app) + await connect_to_redis(app) yield await close_db_connection(app) diff --git a/pctiler/pyproject.toml b/pctiler/pyproject.toml new file mode 100644 index 00000000..a776bf18 --- /dev/null +++ b/pctiler/pyproject.toml @@ -0,0 +1,45 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "pctiler" +dynamic = ["version"] +description = "Planetary Computer API - Tiler." +license = { text = "MIT" } +requires-python = ">=3.7" +dependencies = [ + "fastapi-slim==0.111.0", + "geojson-pydantic==1.1.0", + "idna>=3.7.0", + "importlib_resources>=1.1.0;python_version<'3.9'", + "jinja2==3.1.5", + "matplotlib==3.9.0", + "orjson==3.10.4", + "pillow==10.3.0", + "planetary-computer==1.0.0", + "psycopg[binary,pool]", + "pydantic>=2.7,<2.8", + "pystac==1.10.1", + "rasterio==1.3.10", + "requests==2.32.3", + "titiler.core==0.18.3", + "titiler.mosaic==0.18.3", + "titiler.pgstac==1.3.0", +] + +[project.optional-dependencies] +dev = [ + "types-requests", +] +server = [ + "uvicorn[standard]==0.30.1", +] + +[tool.hatch.version] +path = "pctiler/version.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/pctiler", +] diff --git a/pctiler/requirements-dev.txt b/pctiler/requirements-dev.txt index 0267a535..a96f8449 100644 --- a/pctiler/requirements-dev.txt +++ b/pctiler/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=dev --output-file=pctiler/requirements-dev.txt ./pctiler/setup.py +# pip-compile --extra=dev --output-file=pctiler/requirements-dev.txt ./pctiler/pyproject.toml # affine==2.4.0 # via rasterio @@ -53,13 +53,13 @@ exceptiongroup==1.2.0 # via anyio fastapi-slim==0.111.0 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-core fonttools==4.53.0 # via matplotlib geojson-pydantic==1.1.0 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-core # titiler-pgstac h11==0.14.0 @@ -74,7 +74,7 @@ idna==3.7 # via # anyio # httpx - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # requests importlib-metadata==7.1.0 # via rasterio @@ -82,14 +82,14 @@ importlib-resources==6.4.0 # via matplotlib jinja2==3.1.5 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-core kiwisolver==1.4.5 # via matplotlib markupsafe==2.1.5 # via jinja2 matplotlib==3.9.0 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) morecantile==5.3.0 # via # cogeo-mosaic @@ -110,7 +110,7 @@ numpy==1.26.4 # snuggs # titiler-core orjson==3.10.4 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) packaging==24.1 # via # matplotlib @@ -118,11 +118,11 @@ packaging==24.1 pillow==10.3.0 # via # matplotlib - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) planetary-computer==1.0.0 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) psycopg[binary,pool]==3.1.18 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.2.1 @@ -133,7 +133,7 @@ pydantic==2.7.4 # fastapi-slim # geojson-pydantic # morecantile - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # planetary-computer # pydantic-settings # rio-tiler @@ -153,7 +153,7 @@ pyproj==3.6.1 # via morecantile pystac==1.10.1 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # planetary-computer # pystac-client # rio-tiler @@ -173,13 +173,13 @@ pytz==2024.1 rasterio==1.3.10 # via # cogeo-mosaic - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # rio-tiler # supermorecado # titiler-core requests==2.32.3 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # planetary-computer # pystac-client rio-tiler==6.6.1 @@ -204,17 +204,17 @@ supermorecado==0.1.2 # via cogeo-mosaic titiler-core==0.18.3 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-mosaic # titiler-pgstac titiler-mosaic==0.18.3 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-pgstac titiler-pgstac==1.3.0 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) types-requests==2.31.0.6 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.10.0 diff --git a/pctiler/requirements-server.txt b/pctiler/requirements-server.txt index 9f0d4c29..bb396935 100644 --- a/pctiler/requirements-server.txt +++ b/pctiler/requirements-server.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=server --output-file=pctiler/requirements-server.txt ./pctiler/setup.py +# pip-compile --extra=server --output-file=pctiler/requirements-server.txt ./pctiler/pyproject.toml # affine==2.4.0 # via rasterio @@ -55,13 +55,13 @@ exceptiongroup==1.2.0 # via anyio fastapi-slim==0.111.0 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-core fonttools==4.53.0 # via matplotlib geojson-pydantic==1.1.0 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-core # titiler-pgstac h11==0.14.0 @@ -80,7 +80,7 @@ idna==3.7 # via # anyio # httpx - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # requests importlib-metadata==7.1.0 # via rasterio @@ -88,14 +88,14 @@ importlib-resources==6.4.0 # via matplotlib jinja2==3.1.5 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-core kiwisolver==1.4.5 # via matplotlib markupsafe==2.1.5 # via jinja2 matplotlib==3.9.0 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) morecantile==5.3.0 # via # cogeo-mosaic @@ -116,7 +116,7 @@ numpy==1.26.4 # snuggs # titiler-core orjson==3.10.4 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) packaging==24.1 # via # matplotlib @@ -124,11 +124,11 @@ packaging==24.1 pillow==10.3.0 # via # matplotlib - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) planetary-computer==1.0.0 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) psycopg[binary,pool]==3.1.18 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.2.1 @@ -139,7 +139,7 @@ pydantic==2.7.4 # fastapi-slim # geojson-pydantic # morecantile - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # planetary-computer # pydantic-settings # rio-tiler @@ -159,7 +159,7 @@ pyproj==3.6.1 # via morecantile pystac==1.10.1 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # planetary-computer # pystac-client # rio-tiler @@ -182,13 +182,13 @@ pyyaml==6.0.1 rasterio==1.3.10 # via # cogeo-mosaic - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # rio-tiler # supermorecado # titiler-core requests==2.32.3 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # planetary-computer # pystac-client rio-tiler==6.6.1 @@ -213,15 +213,15 @@ supermorecado==0.1.2 # via cogeo-mosaic titiler-core==0.18.3 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-mosaic # titiler-pgstac titiler-mosaic==0.18.3 # via - # pctiler (pctiler/setup.py) + # pctiler (pctiler/pyproject.toml) # titiler-pgstac titiler-pgstac==1.3.0 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) typing-extensions==4.10.0 # via # fastapi-slim @@ -235,7 +235,7 @@ typing-extensions==4.10.0 urllib3==1.26.19 # via requests uvicorn[standard]==0.30.1 - # via pctiler (pctiler/setup.py) + # via pctiler (pctiler/pyproject.toml) uvloop==0.19.0 # via uvicorn watchfiles==0.22.0 diff --git a/pctiler/setup.cfg b/pctiler/setup.cfg deleted file mode 100644 index f8fd3bcb..00000000 --- a/pctiler/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[metadata] -version = attr: pctiler.version.__version__ diff --git a/pctiler/setup.py b/pctiler/setup.py deleted file mode 100644 index b21671e8..00000000 --- a/pctiler/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Setup for pctiler.""" - -from typing import List -from setuptools import find_packages, setup - -# Runtime requirements, see environment.yaml -inst_reqs: List[str] = [ - "fastapi-slim==0.111.0", - "geojson-pydantic==1.1.0", - "jinja2==3.1.5", - "pystac==1.10.1", - "planetary-computer==1.0.0", - "rasterio==1.3.10", - "titiler.core==0.18.3", - "titiler.mosaic==0.18.3", - "pillow==10.3.0", - "pydantic>=2.7,<2.8", - "idna>=3.7.0", - "requests==2.32.3", - # titiler-pgstac - "psycopg[binary,pool]", - "titiler.pgstac==1.3.0", - # colormap dependencies - "matplotlib==3.9.0", - "orjson==3.10.4", - "importlib_resources>=1.1.0;python_version<'3.9'", -] - -extra_reqs = { - "dev": ["types-requests"], - "server": [ - "uvicorn[standard]==0.30.1", - ], -} - -setup( - name="pctiler", - python_requires=">=3.7", - description="Planetary Computer API - Tiler.", - packages=find_packages(exclude=["tests"]), - package_data={"pctiler": ["endpoints/templates/*.html"]}, - include_package_data=True, - zip_safe=False, - install_requires=inst_reqs, - extras_require=extra_reqs, -) diff --git a/pctiler/tests/conftest.py b/pctiler/tests/conftest.py index 4c75dbe7..ea288f40 100644 --- a/pctiler/tests/conftest.py +++ b/pctiler/tests/conftest.py @@ -4,6 +4,8 @@ from httpx import ASGITransport, AsyncClient from pytest import Config, Item, Parser +from pccommon.redis import connect_to_redis + def pytest_addoption(parser: Parser) -> None: parser.addoption( @@ -36,6 +38,7 @@ async def client() -> AsyncClient: from pctiler.main import app await connect_to_db(app) + await connect_to_redis(app) async with AsyncClient( transport=ASGITransport(app=app), base_url="http://test", diff --git a/scripts/cipublish b/scripts/cipublish index a6a85a99..076b7c15 100755 --- a/scripts/cipublish +++ b/scripts/cipublish @@ -51,18 +51,31 @@ if [[ -z ${IMAGE_TAG} ]]; then fi function install_oras() { - # https://oras.land/docs/installation/ - VERSION="1.1.0" - curl -LO "https://github.com/oras-project/oras/releases/download/v${VERSION}/oras_${VERSION}_linux_amd64.tar.gz" - mkdir -p oras-install/ - tar -zxf oras_${VERSION}_*.tar.gz -C oras-install/ - sudo mv oras-install/oras /usr/local/bin/ - rm -rf oras_${VERSION}_*.tar.gz oras-install/ + if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS: install with Homebrew + if ! command -v brew &> /dev/null; then + echo "Homebrew is not installed. Please install Homebrew first." + exit 1 + fi + brew install oras + else + # https://oras.land/docs/installation/ + VERSION="1.2.2" + curl -LO "https://github.com/oras-project/oras/releases/download/v${VERSION}/oras_${VERSION}_linux_amd64.tar.gz" + mkdir -p oras-install/ + tar -zxf oras_${VERSION}_*.tar.gz -C oras-install/ + sudo mv oras-install/oras /usr/local/bin/ + rm -rf oras_${VERSION}_*.tar.gz oras-install/ + fi } function deprecate_image() { local full_image_name_with_digest=$1 - deprecated_since=$(date --utc --iso-8601=seconds) + if [[ "$OSTYPE" == "darwin"* ]]; then + deprecated_since=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + else + deprecated_since=$(date --utc --iso-8601=seconds) + fi if oras discover $full_image_name_with_digest -o json | jq '.manifests[].annotations' | grep -q "vnd.microsoft.lifecycle.end-of-life.date" ; then echo "Lifecycle metadata annotation for $full_image_name_with_digest already exists, skip." else diff --git a/scripts/console b/scripts/console index abae3947..3011ad92 100755 --- a/scripts/console +++ b/scripts/console @@ -42,6 +42,10 @@ while [[ "$#" > 0 ]]; do case $1 in DEPLOY_CONSOLE=1 shift ;; + --redis) + REDIS=1 + shift + ;; --help) usage exit 0 @@ -64,6 +68,16 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then exit 0 fi + if [[ "${REDIS}" ]]; then + docker compose \ + -f docker-compose.yml \ + exec -it \ + redis /bin/bash + # exampe commands: + # redis-cli -a devcache -p 6380 keys "*" + exit 0 + fi + if [[ "${DEV_TILER_CONSOLE}" ]]; then docker compose \ -f docker-compose.yml \ diff --git a/scripts/generate-requirements b/scripts/generate-requirements index 70ae10a7..c4853991 100755 --- a/scripts/generate-requirements +++ b/scripts/generate-requirements @@ -10,21 +10,21 @@ docker compose \ -f docker-compose.dev.yml \ run --rm \ stac-dev \ - pip-compile ./pcstac/setup.py --extra server --extra dev -o pcstac/requirements-server.txt $pip_compile_options + pip-compile ./pcstac/pyproject.toml --extra server --extra dev -o pcstac/requirements-server.txt $pip_compile_options docker compose \ -f docker-compose.yml \ -f docker-compose.dev.yml \ run --rm \ tiler-dev \ - pip-compile ./pctiler/setup.py --extra server -o pctiler/requirements-server.txt $pip_compile_options + pip-compile ./pctiler/pyproject.toml --extra server -o pctiler/requirements-server.txt $pip_compile_options docker compose \ -f docker-compose.yml \ -f docker-compose.dev.yml \ run --rm \ tiler-dev \ - pip-compile ./pctiler/setup.py --extra dev -o pctiler/requirements-dev.txt $pip_compile_options + pip-compile ./pctiler/pyproject.toml --extra dev -o pctiler/requirements-dev.txt $pip_compile_options docker compose \ -f docker-compose.yml \ diff --git a/scripts/test b/scripts/test index 26a41272..f5c7ec35 100755 --- a/scripts/test +++ b/scripts/test @@ -63,6 +63,14 @@ while [[ $# -gt 0 ]]; do case $1 in if [ "${BASH_SOURCE[0]}" = "${0}" ]; then + if docker ps | grep -q redis; then + echo "Flushing redis" + docker compose \ + -f docker-compose.yml \ + exec redis \ + redis-cli -a devcache -p 6380 FLUSHALL + fi + if [ -z "${TILER_ONLY}${STAC_ONLY}${FUNCS_ONLY}" ]; then docker compose \