5
5
6
6
from stac_fastapi .pgstac .db import close_db_connection , connect_to_db
7
7
8
- root_path_value = "/stac/v1"
9
-
10
- # Append the root path to the base URL, this is key to reproducing the issue where the root path appears twice in some links
11
- base_url = f"http://api.acme.com{ root_path_value } "
8
+ BASE_URL = "http://api.acme.com"
9
+ ROOT_PATH = "/stac/v1"
12
10
13
11
14
12
@pytest .fixture (scope = "function" )
@@ -18,7 +16,7 @@ async def app_with_root_path(database, monkeypatch):
18
16
specific ROOT_PATH environment variable and connected to the test database.
19
17
"""
20
18
21
- monkeypatch .setenv ("ROOT_PATH" , root_path_value )
19
+ monkeypatch .setenv ("ROOT_PATH" , ROOT_PATH )
22
20
monkeypatch .setenv ("PGUSER" , database .user )
23
21
monkeypatch .setenv ("PGPASSWORD" , database .password )
24
22
monkeypatch .setenv ("PGHOST" , database .host )
@@ -35,8 +33,8 @@ async def app_with_root_path(database, monkeypatch):
35
33
36
34
# Ensure the app's root_path is configured as expected
37
35
assert (
38
- app .root_path == root_path_value
39
- ), f"app_with_root_path fixture: app.root_path is '{ app .root_path } ', expected '{ root_path_value } '"
36
+ app .root_path == ROOT_PATH
37
+ ), f"app_with_root_path fixture: app.root_path is '{ app .root_path } ', expected '{ ROOT_PATH } '"
40
38
41
39
await connect_to_db (app , add_write_connection_pool = with_transactions )
42
40
yield app
@@ -47,7 +45,8 @@ async def app_with_root_path(database, monkeypatch):
47
45
def client_with_root_path (app_with_root_path ):
48
46
with TestClient (
49
47
app_with_root_path ,
50
- root_path = root_path_value ,
48
+ base_url = BASE_URL ,
49
+ root_path = ROOT_PATH ,
51
50
) as c :
52
51
yield c
53
52
@@ -75,66 +74,48 @@ def loaded_client(client_with_root_path, load_test_data):
75
74
yield client_with_root_path
76
75
77
76
78
- def test_search_links_are_valid (loaded_client ):
79
- resp = loaded_client .get ("/search?limit=1" )
80
- assert resp .status_code == 200
81
- response_json = resp .json ()
82
- assert_links_href (response_json .get ("links" , []), base_url )
83
-
84
-
85
- def test_collection_links_are_valid (loaded_client ):
86
- resp = loaded_client .get ("/collections?limit=1" )
87
- assert resp .status_code == 200
88
- response_json = resp .json ()
89
- assert_links_href (response_json .get ("links" , []), base_url )
90
-
91
-
92
- def test_items_collection_links_are_valid (loaded_client ):
93
- resp = loaded_client .get ("/collections/test-collection/items?limit=1" )
77
+ @pytest .mark .parametrize (
78
+ "path" ,
79
+ [
80
+ "/search?limit=1" ,
81
+ "/collections?limit=1" ,
82
+ "/collections/test-collection/items?limit=1" ,
83
+ ],
84
+ )
85
+ def test_search_links_are_valid (loaded_client , path ):
86
+ resp = loaded_client .get (path )
94
87
assert resp .status_code == 200
95
88
response_json = resp .json ()
96
- assert_links_href (response_json .get ("links" , []), base_url )
97
-
98
-
99
- def assert_links_href (links , url_prefix ):
100
- """
101
- Ensure all links start with the expected URL prefix and check that
102
- there is no root_path duplicated in the URL.
103
-
104
- Args:
105
- links: List of link dictionaries with 'href' keys
106
- url_prefix: Expected URL prefix (e.g., 'http://test/stac/v1')
107
- """
108
- from urllib .parse import urlparse
109
89
90
+ # Ensure all links start with the expected URL prefix and check that
91
+ # there is no root_path duplicated in the URL.
110
92
failed_links = []
111
- parsed_prefix = urlparse (url_prefix )
112
- root_path = parsed_prefix .path # e.g., '/stac/v1'
93
+ expected_prefix = f"{ BASE_URL } { ROOT_PATH } "
113
94
114
- for link in links :
95
+ for link in response_json . get ( " links" , []) :
115
96
href = link ["href" ]
116
97
rel = link .get ("rel" , "unknown" )
117
98
118
99
# Check if link starts with the expected prefix
119
- if not href .startswith (url_prefix ):
100
+ if not href .startswith (expected_prefix ):
120
101
failed_links .append (
121
102
{
122
103
"rel" : rel ,
123
104
"href" : href ,
124
- "error" : f"does not start with expected prefix '{ url_prefix } '" ,
105
+ "error" : f"does not start with expected prefix '{ expected_prefix } '" ,
125
106
}
126
107
)
127
108
continue
128
109
129
110
# Check for duplicated root path
130
- if root_path and root_path != "/" :
131
- remainder = href [len (url_prefix ) :]
132
- if remainder .startswith (root_path ):
111
+ if ROOT_PATH and ROOT_PATH != "/" :
112
+ remainder = href [len (expected_prefix ) :]
113
+ if remainder .startswith (ROOT_PATH ):
133
114
failed_links .append (
134
115
{
135
116
"rel" : rel ,
136
117
"href" : href ,
137
- "error" : f"contains duplicated root path '{ root_path } '" ,
118
+ "error" : f"contains duplicated root path '{ ROOT_PATH } '" ,
138
119
}
139
120
)
140
121
0 commit comments