Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ repos:
]
additional_dependencies: [
"types-attrs",
"types-requests"
"types-requests",
"types-redis"
]
- repo: https://github.com/PyCQA/pydocstyle
rev: 6.1.1
Expand Down
26 changes: 26 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
FROM python:3.13-slim

RUN apt-get update && apt-get install -y \
build-essential \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

WORKDIR /app

COPY README.md .
COPY stac_fastapi/opensearch/setup.py stac_fastapi/opensearch/
COPY stac_fastapi/core/setup.py stac_fastapi/core/
COPY stac_fastapi/sfeos_helpers/setup.py stac_fastapi/sfeos_helpers/


RUN pip install --no-cache-dir --upgrade pip setuptools wheel

COPY stac_fastapi/ stac_fastapi/

RUN pip install --no-cache-dir ./stac_fastapi/core
RUN pip install --no-cache-dir ./stac_fastapi/sfeos_helpers
RUN pip install --no-cache-dir ./stac_fastapi/opensearch[server]

EXPOSE 8080

CMD ["uvicorn", "stac_fastapi.opensearch.app:app", "--host", "0.0.0.0", "--port", "8080"]
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ docker-shell-os:

.PHONY: test-elasticsearch
test-elasticsearch:
-$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
-$(run_es) /bin/bash -c 'pip install redis==6.4.0 export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
docker compose down

.PHONY: test-opensearch
Expand Down
10 changes: 10 additions & 0 deletions compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ services:
- ES_VERIFY_CERTS=false
- BACKEND=elasticsearch
- DATABASE_REFRESH=true
- REDIS_HOST=redis
- REDIS_PORT=6379
ports:
- "8080:8080"
volumes:
Expand All @@ -30,6 +32,7 @@ services:
- ./esdata:/usr/share/elasticsearch/data
depends_on:
- elasticsearch
- redis
command:
bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app"

Expand Down Expand Up @@ -94,3 +97,10 @@ services:
- ./opensearch/snapshots:/usr/share/opensearch/snapshots
ports:
- "9202:9202"

redis:
container_name: stac-redis
image: redis:7.2-alpine
restart: always
ports:
- "6379:6379"
2 changes: 2 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[mypy-stac_fastapi.core.stac_fastapi.core.core]
ignore_errors = True
1 change: 1 addition & 0 deletions stac_fastapi/core/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
"pygeofilter~=0.3.1",
"jsonschema~=4.0.0",
"slowapi~=0.1.9",
"redis==6.4.0",
]

setup(
Expand Down
95 changes: 95 additions & 0 deletions stac_fastapi/core/stac_fastapi/core/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,12 @@
from stac_fastapi.core.base_settings import ApiBaseSettings
from stac_fastapi.core.datetime_utils import format_datetime_range
from stac_fastapi.core.models.links import PagingLinks
from stac_fastapi.core.redis_utils import (
add_previous_link,
cache_current_url,
cache_previous_url,
connect_redis,
)
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
from stac_fastapi.core.session import Session
from stac_fastapi.core.utilities import filter_fields
Expand Down Expand Up @@ -237,6 +243,12 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
base_url = str(request.base_url)
limit = int(request.query_params.get("limit", os.getenv("STAC_ITEM_LIMIT", 10)))
token = request.query_params.get("token")
current_url = str(request.url)
redis = None
try:
redis = await connect_redis()
except Exception:
redis = None

collections, next_token = await self.database.get_all_collections(
token=token, limit=limit, request=request
Expand All @@ -252,6 +264,10 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
},
]

await add_previous_link(redis, links, "collections", current_url, token)
if redis:
await cache_previous_url(redis, current_url, "collections")

if next_token:
next_link = PagingLinks(next=next_token, request=request).link_next()
links.append(next_link)
Expand Down Expand Up @@ -323,6 +339,31 @@ async def item_collection(
Raises:
HTTPException: 404 if the collection does not exist.
"""
request: Request = kwargs["request"]
token = request.query_params.get("token")
base_url = str(request.base_url)

current_url = str(request.url)

try:
redis = await connect_redis()
except Exception:
redis = None

if redis:
await cache_current_url(redis, current_url, collection_id)

collection = await self.get_collection(
collection_id=collection_id, request=request
)
collection_id = collection.get("id")
if collection_id is None:
raise HTTPException(status_code=404, detail="Collection not found")

search = self.database.make_search()
search = self.database.apply_collections_filter(
search=search, collection_ids=[collection_id]
)
try:
await self.get_collection(collection_id=collection_id, request=request)
except Exception:
Expand All @@ -336,6 +377,45 @@ async def item_collection(
datetime=datetime,
limit=limit,
token=token,
collection_ids=[collection_id],
datetime_search=datetime_search,
)

items = [
self.item_serializer.db_to_stac(item, base_url=base_url) for item in items
]

collection_links = [
{
"rel": "collection",
"type": "application/json",
"href": urljoin(str(request.base_url), f"collections/{collection_id}"),
},
{
"rel": "parent",
"type": "application/json",
"href": urljoin(str(request.base_url), f"collections/{collection_id}"),
},
]

paging_links = await PagingLinks(request=request, next=next_token).get_links()

if redis:
await add_previous_link(
redis, paging_links, collection_id, current_url, token
)

if redis:
await cache_previous_url(redis, current_url, collection_id)

links = collection_links + paging_links

return stac_types.ItemCollection(
type="FeatureCollection",
features=items,
links=links,
numReturned=len(items),
numMatched=maybe_count,
sortby=sortby,
query=query,
filter_expr=filter_expr,
Expand Down Expand Up @@ -482,7 +562,14 @@ async def post_search(
HTTPException: If there is an error with the cql2_json filter.
"""
base_url = str(request.base_url)
current_url = str(request.url)
try:
redis = await connect_redis()
except Exception:
redis = None

if redis:
await cache_current_url(redis, current_url, "search_result")
search = self.database.make_search()

if search_request.ids:
Expand Down Expand Up @@ -592,6 +679,14 @@ async def post_search(
]
links = await PagingLinks(request=request, next=next_token).get_links()

if redis:
await add_previous_link(
redis, links, "search_result", current_url, search_request.token
)

if redis:
await cache_previous_url(redis, current_url, "search_result")

return stac_types.ItemCollection(
type="FeatureCollection",
features=items,
Expand Down
Loading
Loading