Skip to content

Commit 75e61d2

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
add redis
1 parent 041b729 commit 75e61d2

File tree

11 files changed

+295
-24
lines changed

11 files changed

+295
-24
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ repos:
3131
]
3232
additional_dependencies: [
3333
"types-attrs",
34-
"types-requests"
34+
"types-requests",
35+
"types-redis"
3536
]
3637
- repo: https://github.com/PyCQA/pydocstyle
3738
rev: 6.1.1

Dockerfile

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
FROM python:3.13-slim
2+
3+
RUN apt-get update && apt-get install -y \
4+
build-essential \
5+
&& apt-get clean \
6+
&& rm -rf /var/lib/apt/lists/*
7+
8+
WORKDIR /app
9+
10+
COPY README.md .
11+
COPY stac_fastapi/opensearch/setup.py stac_fastapi/opensearch/
12+
COPY stac_fastapi/core/setup.py stac_fastapi/core/
13+
COPY stac_fastapi/sfeos_helpers/setup.py stac_fastapi/sfeos_helpers/
14+
15+
16+
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
17+
18+
COPY stac_fastapi/ stac_fastapi/
19+
20+
RUN pip install --no-cache-dir ./stac_fastapi/core
21+
RUN pip install --no-cache-dir ./stac_fastapi/sfeos_helpers
22+
RUN pip install --no-cache-dir ./stac_fastapi/opensearch[server]
23+
24+
EXPOSE 8080
25+
26+
CMD ["uvicorn", "stac_fastapi.opensearch.app:app", "--host", "0.0.0.0", "--port", "8080"]

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ docker-shell-os:
6363

6464
.PHONY: test-elasticsearch
6565
test-elasticsearch:
66-
-$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
66+
-$(run_es) /bin/bash -c 'pip install redis==6.4.0 export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
6767
docker compose down
6868

6969
.PHONY: test-opensearch

compose.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ services:
2222
- ES_VERIFY_CERTS=false
2323
- BACKEND=elasticsearch
2424
- DATABASE_REFRESH=true
25+
- REDIS_HOST=redis
26+
- REDIS_PORT=6379
2527
ports:
2628
- "8080:8080"
2729
volumes:
@@ -30,6 +32,7 @@ services:
3032
- ./esdata:/usr/share/elasticsearch/data
3133
depends_on:
3234
- elasticsearch
35+
- redis
3336
command:
3437
bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app"
3538

@@ -94,3 +97,10 @@ services:
9497
- ./opensearch/snapshots:/usr/share/opensearch/snapshots
9598
ports:
9699
- "9202:9202"
100+
101+
redis:
102+
container_name: stac-redis
103+
image: redis:7.2-alpine
104+
restart: always
105+
ports:
106+
- "6379:6379"

stac_fastapi/core/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"pygeofilter~=0.3.1",
2020
"jsonschema~=4.0.0",
2121
"slowapi~=0.1.9",
22+
"redis==6.4.0",
2223
]
2324

2425
setup(

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 52 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,12 @@
2424
from stac_fastapi.core.base_settings import ApiBaseSettings
2525
from stac_fastapi.core.datetime_utils import format_datetime_range
2626
from stac_fastapi.core.models.links import PagingLinks
27+
from stac_fastapi.core.redis_utils import (
28+
add_previous_link,
29+
cache_current_url,
30+
cache_previous_url,
31+
connect_redis,
32+
)
2733
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2834
from stac_fastapi.core.session import Session
2935
from stac_fastapi.core.utilities import filter_fields
@@ -237,6 +243,12 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
237243
base_url = str(request.base_url)
238244
limit = int(request.query_params.get("limit", os.getenv("STAC_ITEM_LIMIT", 10)))
239245
token = request.query_params.get("token")
246+
current_url = str(request.url)
247+
redis = None
248+
try:
249+
redis = await connect_redis()
250+
except Exception:
251+
redis = None
240252

241253
collections, next_token = await self.database.get_all_collections(
242254
token=token, limit=limit, request=request
@@ -252,6 +264,10 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
252264
},
253265
]
254266

267+
await add_previous_link(redis, links, "collections", current_url, token)
268+
if redis:
269+
await cache_previous_url(redis, current_url, "collections")
270+
255271
if next_token:
256272
next_link = PagingLinks(next=next_token, request=request).link_next()
257273
links.append(next_link)
@@ -310,20 +326,18 @@ async def item_collection(
310326
"""
311327
request: Request = kwargs["request"]
312328
token = request.query_params.get("token")
313-
if not hasattr(self, '_prev_links'):
314-
self._prev_links = {}
315-
316-
session_id = request.cookies.get('stac_session', 'default_session')
317-
current_self_link = str(request.url)
318-
319-
if session_id not in self._prev_links:
320-
self._prev_links[session_id] = []
321-
322-
history = self._prev_links[session_id]
323-
if not history or current_self_link != history[-1]:
324-
history.append(current_self_link)
325329
base_url = str(request.base_url)
326330

331+
current_url = str(request.url)
332+
333+
try:
334+
redis = await connect_redis()
335+
except Exception:
336+
redis = None
337+
338+
if redis:
339+
await cache_current_url(redis, current_url, collection_id)
340+
327341
collection = await self.get_collection(
328342
collection_id=collection_id, request=request
329343
)
@@ -374,21 +388,22 @@ async def item_collection(
374388
"href": urljoin(str(request.base_url), f"collections/{collection_id}"),
375389
},
376390
{
377-
"rel": "parent",
391+
"rel": "parent",
378392
"type": "application/json",
379393
"href": urljoin(str(request.base_url), f"collections/{collection_id}"),
380-
}
394+
},
381395
]
382396

383397
paging_links = await PagingLinks(request=request, next=next_token).get_links()
384-
history = self._prev_links.get(session_id, [])
385-
if len(history) > 1:
386-
previous_self_link = history[-2]
387-
paging_links.append({
388-
"rel": "previous",
389-
"type": "application/json",
390-
"href": previous_self_link,
391-
})
398+
399+
if redis:
400+
await add_previous_link(
401+
redis, paging_links, collection_id, current_url, token
402+
)
403+
404+
if redis:
405+
await cache_previous_url(redis, current_url, collection_id)
406+
392407
links = collection_links + paging_links
393408

394409
return stac_types.ItemCollection(
@@ -529,7 +544,14 @@ async def post_search(
529544
HTTPException: If there is an error with the cql2_json filter.
530545
"""
531546
base_url = str(request.base_url)
547+
current_url = str(request.url)
548+
try:
549+
redis = await connect_redis()
550+
except Exception:
551+
redis = None
532552

553+
if redis:
554+
await cache_current_url(redis, current_url, "search_result")
533555
search = self.database.make_search()
534556

535557
if search_request.ids:
@@ -628,6 +650,14 @@ async def post_search(
628650
]
629651
links = await PagingLinks(request=request, next=next_token).get_links()
630652

653+
if redis:
654+
await add_previous_link(
655+
redis, links, "search_result", current_url, search_request.token
656+
)
657+
658+
if redis:
659+
await cache_previous_url(redis, current_url, "search_result")
660+
631661
return stac_types.ItemCollection(
632662
type="FeatureCollection",
633663
features=items,

0 commit comments

Comments
 (0)