Skip to content

Commit bea818d

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
dummy
1 parent 7d6b741 commit bea818d

File tree

6 files changed

+220
-4
lines changed

6 files changed

+220
-4
lines changed

Makefile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,22 +63,22 @@ docker-shell-os:
6363

6464
.PHONY: test-elasticsearch
6565
test-elasticsearch:
66-
-$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
66+
-$(run_es) /bin/bash -c 'pip install redis==6.4.0 export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
6767
docker compose down
6868

6969
.PHONY: test-opensearch
7070
test-opensearch:
71-
-$(run_os) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest'
71+
-$(run_os) /bin/bash -c 'pip install redis==6.4.0 export && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest'
7272
docker compose down
7373

7474
.PHONY: test-datetime-filtering-es
7575
test-datetime-filtering-es:
76-
-$(run_es) /bin/bash -c 'export ENABLE_DATETIME_INDEX_FILTERING=true && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest -s --cov=stac_fastapi --cov-report=term-missing -m datetime_filtering'
76+
-$(run_es) /bin/bash -c 'pip install redis==6.4.0 && export ENABLE_DATETIME_INDEX_FILTERING=true && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest -s --cov=stac_fastapi --cov-report=term-missing -m datetime_filtering'
7777
docker compose down
7878

7979
.PHONY: test-datetime-filtering-os
8080
test-datetime-filtering-os:
81-
-$(run_os) /bin/bash -c 'export ENABLE_DATETIME_INDEX_FILTERING=true && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest -s --cov=stac_fastapi --cov-report=term-missing -m datetime_filtering'
81+
-$(run_os) /bin/bash -c 'pip install redis==6.4.0 && export ENABLE_DATETIME_INDEX_FILTERING=true && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest -s --cov=stac_fastapi --cov-report=term-missing -m datetime_filtering'
8282
docker compose down
8383

8484
.PHONY: test

docker-compose.redis.yml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
version: '3.8'
2+
3+
services:
4+
redis:
5+
image: redis:7-alpine
6+
container_name: stac-fastapi-redis
7+
ports:
8+
- "6379:6379"
9+
command: redis-server --appendonly yes
10+
volumes:
11+
- redis_data:/data
12+
environment:
13+
- REDIS_PORT=6379
14+
healthcheck:
15+
test: ["CMD", "redis-cli", "ping"]
16+
interval: 10s
17+
timeout: 5s
18+
retries: 3
19+
networks:
20+
- stac-network
21+
22+
volumes:
23+
redis_data:
24+
25+
networks:
26+
stac-network:
27+
driver: bridge

mypy.ini

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[mypy]
2+
[mypy-redis.*]
3+
ignore_missing_imports = True

stac_fastapi/core/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"pygeofilter~=0.3.1",
2020
"jsonschema~=4.0.0",
2121
"slowapi~=0.1.9",
22+
"redis==6.4.0",
2223
]
2324

2425
setup(

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
from stac_fastapi.core.base_settings import ApiBaseSettings
2525
from stac_fastapi.core.datetime_utils import format_datetime_range
2626
from stac_fastapi.core.models.links import PagingLinks
27+
from stac_fastapi.core.redis_utils import connect_redis_sentinel, get_prev_link, save_self_link
2728
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2829
from stac_fastapi.core.session import Session
2930
from stac_fastapi.core.utilities import filter_fields
@@ -255,6 +256,13 @@ async def all_collections(
255256
if parsed_sort:
256257
sort = parsed_sort
257258

259+
current_url = str(request.url)
260+
redis = None
261+
try:
262+
redis = await connect_redis_sentinel()
263+
except Exception:
264+
redis = None
265+
258266
collections, next_token = await self.database.get_all_collections(
259267
token=token, limit=limit, request=request, sort=sort
260268
)
@@ -269,6 +277,22 @@ async def all_collections(
269277
},
270278
]
271279

280+
if redis:
281+
if next_token:
282+
await save_self_link(redis, next_token, current_url)
283+
284+
prev_link = await get_prev_link(redis, token)
285+
if prev_link:
286+
links.insert(
287+
0,
288+
{
289+
"rel": "prev",
290+
"type": "application/json",
291+
"method": "GET",
292+
"href": prev_link,
293+
},
294+
)
295+
272296
if next_token:
273297
next_link = PagingLinks(next=next_token, request=request).link_next()
274298
links.append(next_link)
@@ -499,6 +523,10 @@ async def post_search(
499523
HTTPException: If there is an error with the cql2_json filter.
500524
"""
501525
base_url = str(request.base_url)
526+
try:
527+
redis = await connect_redis_sentinel()
528+
except Exception:
529+
redis = None
502530

503531
search = self.database.make_search()
504532

@@ -609,6 +637,39 @@ async def post_search(
609637
]
610638
links = await PagingLinks(request=request, next=next_token).get_links()
611639

640+
collection_links = []
641+
if search_request.collections:
642+
for collection_id in search_request.collections:
643+
collection_links.extend([
644+
{
645+
"rel": "collection",
646+
"type": "application/json",
647+
"href": urljoin(base_url, f"collections/{collection_id}"),
648+
},
649+
{
650+
"rel": "parent",
651+
"type": "application/json",
652+
"href": urljoin(base_url, f"collections/{collection_id}"),
653+
},
654+
])
655+
links.extend(collection_links)
656+
657+
if redis:
658+
self_link = str(request.url)
659+
await save_self_link(redis, next_token, self_link)
660+
661+
prev_link = await get_prev_link(redis, token_param)
662+
if prev_link:
663+
links.insert(
664+
0,
665+
{
666+
"rel": "prev",
667+
"type": "application/json",
668+
"method": "GET",
669+
"href": prev_link,
670+
},
671+
)
672+
612673
return stac_types.ItemCollection(
613674
type="FeatureCollection",
614675
features=items,
Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
"""Utilities for connecting to and managing Redis connections."""
2+
3+
from typing import Optional
4+
5+
from pydantic_settings import BaseSettings
6+
from redis import asyncio as aioredis
7+
from redis.asyncio.sentinel import Sentinel
8+
9+
redis_pool: Optional[aioredis.Redis] = None
10+
11+
12+
class RedisSentinelSettings(BaseSettings):
13+
"""Configuration for connecting to Redis Sentinel."""
14+
15+
REDIS_SENTINEL_HOSTS: str = ""
16+
REDIS_SENTINEL_PORTS: str = "26379"
17+
REDIS_SENTINEL_MASTER_NAME: str = "master"
18+
REDIS_DB: int = 15
19+
20+
REDIS_MAX_CONNECTIONS: int = 10
21+
REDIS_RETRY_TIMEOUT: bool = True
22+
REDIS_DECODE_RESPONSES: bool = True
23+
REDIS_CLIENT_NAME: str = "stac-fastapi-app"
24+
REDIS_HEALTH_CHECK_INTERVAL: int = 30
25+
26+
27+
class RedisSettings(BaseSettings):
28+
"""Configuration for connecting Redis Sentinel."""
29+
30+
REDIS_HOST: str = ""
31+
REDIS_PORT: int = 6379
32+
REDIS_DB: int = 0
33+
34+
REDIS_MAX_CONNECTIONS: int = 10
35+
REDIS_RETRY_TIMEOUT: bool = True
36+
REDIS_DECODE_RESPONSES: bool = True
37+
REDIS_CLIENT_NAME: str = "stac-fastapi-app"
38+
REDIS_HEALTH_CHECK_INTERVAL: int = 30
39+
40+
41+
# Select the Redis or Redis Sentinel configuration
42+
redis_settings: BaseSettings = RedisSentinelSettings()
43+
44+
45+
async def connect_redis(settings: Optional[RedisSettings] = None) -> aioredis.Redis:
46+
"""Return a Redis connection."""
47+
global redis_pool
48+
settings = settings or redis_settings
49+
50+
if not settings.REDIS_HOST or not settings.REDIS_PORT:
51+
return None
52+
53+
if redis_pool is None:
54+
pool = aioredis.ConnectionPool(
55+
host=settings.REDIS_HOST,
56+
port=settings.REDIS_PORT,
57+
db=settings.REDIS_DB,
58+
max_connections=settings.REDIS_MAX_CONNECTIONS,
59+
decode_responses=settings.REDIS_DECODE_RESPONSES,
60+
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
61+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
62+
)
63+
redis_pool = aioredis.Redis(
64+
connection_pool=pool, client_name=settings.REDIS_CLIENT_NAME
65+
)
66+
return redis_pool
67+
68+
69+
async def connect_redis_sentinel(
70+
settings: Optional[RedisSentinelSettings] = None,
71+
) -> Optional[aioredis.Redis]:
72+
"""Return a Redis Sentinel connection."""
73+
global redis_pool
74+
75+
settings = settings or redis_settings
76+
77+
if (
78+
not settings.REDIS_SENTINEL_HOSTS
79+
or not settings.REDIS_SENTINEL_PORTS
80+
or not settings.REDIS_SENTINEL_MASTER_NAME
81+
):
82+
return None
83+
84+
hosts = [h.strip() for h in settings.REDIS_SENTINEL_HOSTS.split(",") if h.strip()]
85+
ports = [
86+
int(p.strip()) for p in settings.REDIS_SENTINEL_PORTS.split(",") if p.strip()
87+
]
88+
89+
if redis_pool is None:
90+
try:
91+
sentinel = Sentinel(
92+
[(h, p) for h, p in zip(hosts, ports)],
93+
decode_responses=settings.REDIS_DECODE_RESPONSES,
94+
)
95+
master = sentinel.master_for(
96+
service_name=settings.REDIS_SENTINEL_MASTER_NAME,
97+
db=settings.REDIS_DB,
98+
decode_responses=settings.REDIS_DECODE_RESPONSES,
99+
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
100+
client_name=settings.REDIS_CLIENT_NAME,
101+
max_connections=settings.REDIS_MAX_CONNECTIONS,
102+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
103+
)
104+
redis_pool = master
105+
106+
except Exception:
107+
return None
108+
109+
return redis_pool
110+
111+
112+
async def save_self_link(
113+
redis: aioredis.Redis, token: Optional[str], self_href: str
114+
) -> None:
115+
"""Save the self link for the current token with 30 min TTL."""
116+
if token:
117+
await redis.setex(f"nav:self:{token}", 1800, self_href)
118+
119+
120+
async def get_prev_link(redis: aioredis.Redis, token: Optional[str]) -> Optional[str]:
121+
"""Get the previous page link for the current token (if exists)."""
122+
if not token:
123+
return None
124+
return await redis.get(f"nav:self:{token}")

0 commit comments

Comments
 (0)