24
24
from stac_fastapi .core .base_settings import ApiBaseSettings
25
25
from stac_fastapi .core .datetime_utils import format_datetime_range
26
26
from stac_fastapi .core .models .links import PagingLinks
27
+ from stac_fastapi .core .redis_utils import (
28
+ add_previous_link ,
29
+ cache_current_url ,
30
+ cache_previous_url ,
31
+ connect_redis ,
32
+ )
27
33
from stac_fastapi .core .serializers import CollectionSerializer , ItemSerializer
28
34
from stac_fastapi .core .session import Session
29
35
from stac_fastapi .core .utilities import filter_fields
@@ -237,6 +243,12 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
237
243
base_url = str (request .base_url )
238
244
limit = int (request .query_params .get ("limit" , os .getenv ("STAC_ITEM_LIMIT" , 10 )))
239
245
token = request .query_params .get ("token" )
246
+ current_url = str (request .url )
247
+ redis = None
248
+ try :
249
+ redis = await connect_redis ()
250
+ except Exception :
251
+ redis = None
240
252
241
253
collections , next_token = await self .database .get_all_collections (
242
254
token = token , limit = limit , request = request
@@ -252,6 +264,10 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
252
264
},
253
265
]
254
266
267
+ await add_previous_link (redis , links , "collections" , current_url , token )
268
+ if redis :
269
+ await cache_previous_url (redis , current_url , "collections" )
270
+
255
271
if next_token :
256
272
next_link = PagingLinks (next = next_token , request = request ).link_next ()
257
273
links .append (next_link )
@@ -310,20 +326,18 @@ async def item_collection(
310
326
"""
311
327
request : Request = kwargs ["request" ]
312
328
token = request .query_params .get ("token" )
313
- if not hasattr (self , '_prev_links' ):
314
- self ._prev_links = {}
315
-
316
- session_id = request .cookies .get ('stac_session' , 'default_session' )
317
- current_self_link = str (request .url )
318
-
319
- if session_id not in self ._prev_links :
320
- self ._prev_links [session_id ] = []
321
-
322
- history = self ._prev_links [session_id ]
323
- if not history or current_self_link != history [- 1 ]:
324
- history .append (current_self_link )
325
329
base_url = str (request .base_url )
326
330
331
+ current_url = str (request .url )
332
+
333
+ try :
334
+ redis = await connect_redis ()
335
+ except Exception :
336
+ redis = None
337
+
338
+ if redis :
339
+ await cache_current_url (redis , current_url , collection_id )
340
+
327
341
collection = await self .get_collection (
328
342
collection_id = collection_id , request = request
329
343
)
@@ -374,21 +388,22 @@ async def item_collection(
374
388
"href" : urljoin (str (request .base_url ), f"collections/{ collection_id } " ),
375
389
},
376
390
{
377
- "rel" : "parent" ,
391
+ "rel" : "parent" ,
378
392
"type" : "application/json" ,
379
393
"href" : urljoin (str (request .base_url ), f"collections/{ collection_id } " ),
380
- }
394
+ },
381
395
]
382
396
383
397
paging_links = await PagingLinks (request = request , next = next_token ).get_links ()
384
- history = self ._prev_links .get (session_id , [])
385
- if len (history ) > 1 :
386
- previous_self_link = history [- 2 ]
387
- paging_links .append ({
388
- "rel" : "previous" ,
389
- "type" : "application/json" ,
390
- "href" : previous_self_link ,
391
- })
398
+
399
+ if redis :
400
+ await add_previous_link (
401
+ redis , paging_links , collection_id , current_url , token
402
+ )
403
+
404
+ if redis :
405
+ await cache_previous_url (redis , current_url , collection_id )
406
+
392
407
links = collection_links + paging_links
393
408
394
409
return stac_types .ItemCollection (
@@ -529,7 +544,14 @@ async def post_search(
529
544
HTTPException: If there is an error with the cql2_json filter.
530
545
"""
531
546
base_url = str (request .base_url )
547
+ current_url = str (request .url )
548
+ try :
549
+ redis = await connect_redis ()
550
+ except Exception :
551
+ redis = None
532
552
553
+ if redis :
554
+ await cache_current_url (redis , current_url , "search_result" )
533
555
search = self .database .make_search ()
534
556
535
557
if search_request .ids :
@@ -628,6 +650,14 @@ async def post_search(
628
650
]
629
651
links = await PagingLinks (request = request , next = next_token ).get_links ()
630
652
653
+ if redis :
654
+ await add_previous_link (
655
+ redis , links , "search_result" , current_url , search_request .token
656
+ )
657
+
658
+ if redis :
659
+ await cache_previous_url (redis , current_url , "search_result" )
660
+
631
661
return stac_types .ItemCollection (
632
662
type = "FeatureCollection" ,
633
663
features = items ,
0 commit comments