Skip to content

Commit 13afdd7

Browse files
mathbouMathieu
authored andcommitted
✨ query objects.inv from homepage url
1 parent 7560389 commit 13afdd7

File tree

1 file changed

+24
-19
lines changed

1 file changed

+24
-19
lines changed

seed_intersphinx_mapping/__init__.py

Lines changed: 24 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,15 @@
3333
# stdlib
3434
import functools
3535
import json
36+
import os.path
3637
import re
38+
from urllib.parse import urlparse
3739
from typing import Dict, Optional, Tuple, Union
3840

3941
# 3rd party
42+
4043
import dist_meta
4144
import requests
42-
from dist_meta.metadata_mapping import MetadataMapping
4345
from domdf_python_tools.compat import importlib_resources
4446
from domdf_python_tools.utils import stderr_writer
4547
from packaging.requirements import Requirement
@@ -60,7 +62,7 @@
6062
_DOCUMENTATION_RE = re.compile(r"^[dD]oc(s|umentation)")
6163

6264

63-
def _get_project_links(project_name: str) -> MetadataMapping:
65+
def _get_project_links(project_name: str) -> list:
6466
"""
6567
Returns the web links for the given project.
6668
@@ -69,17 +71,19 @@ def _get_project_links(project_name: str) -> MetadataMapping:
6971
:param project_name:
7072
"""
7173

72-
urls = MetadataMapping()
74+
urls = []
7375

7476
# Try a local package first
7577
try:
7678
dist = dist_meta.distributions.get_distribution(project_name)
7779
raw_urls = dist.get_metadata().get_all("Project-URL", default=())
7880

7981
for url in raw_urls:
80-
label, url, *_ = map(str.strip, url.split(','))
82+
label, url = url.split(",", 1)
8183
if _DOCUMENTATION_RE.match(label):
82-
urls[label] = url
84+
urls.append(url)
85+
86+
urls.append(dist.get_metadata().get("Home-Page"))
8387

8488
except dist_meta.distributions.DistributionNotFoundError:
8589
# Fall back to PyPI
@@ -90,8 +94,11 @@ def _get_project_links(project_name: str) -> MetadataMapping:
9094
if "project_urls" in metadata and metadata["project_urls"]:
9195
for label, url in metadata["project_urls"].items():
9296
if _DOCUMENTATION_RE.match(label):
93-
urls[label] = url
97+
urls.append(url)
9498

99+
urls.append(metadata["home_page"])
100+
101+
urls = [url.strip() for url in filter(None, urls)]
95102
return urls
96103

97104

@@ -126,26 +133,24 @@ def get_sphinx_doc_url(pypi_name: str) -> str:
126133
Now raises :exc:`~packaging.requirements.InvalidRequirement` rather than
127134
:exc:`apeye.slumber_url.exceptions.HttpNotFoundError` if the project could not be found on PyPI.
128135
"""
129-
130-
for key, value in _get_project_links(pypi_name).items():
131-
136+
docs_urls = []
137+
for value in _get_project_links(pypi_name):
132138
# Follow redirects to get actual URL
133139
r = requests.head(value, allow_redirects=True, timeout=10)
134-
if r.status_code != 200: # pragma: no cover
135-
raise ValueError(f"Documentation URL not found: HTTP Status {r.status_code}.")
136140

137-
docs_url = r.url
141+
if r.status_code == 200:
142+
has_extension = os.path.splitext(urlparse(r.url).path)[-1]
143+
url = os.path.dirname(r.url) if has_extension else r.url
144+
docs_urls.append(url)
138145

139-
if docs_url.endswith('/'):
140-
objects_inv_url = f"{docs_url}objects.inv"
141-
else: # pragma: no cover
142-
objects_inv_url = f"{docs_url}/objects.inv"
146+
for docs_url in docs_urls:
147+
objects_inv_url = f"{docs_url.rstrip('/')}/objects.inv"
143148

144149
r = requests.head(objects_inv_url, allow_redirects=True, timeout=10)
145150
if r.status_code != 200:
146-
raise ValueError(f"objects.inv not found at url {objects_inv_url}: HTTP Status {r.status_code}.")
147-
148-
return docs_url
151+
stderr_writer(f"WARNING: objects.inv not found at url {objects_inv_url}: HTTP Status {r.status_code}.")
152+
else:
153+
return docs_url
149154

150155
raise ValueError("Documentation URL not found in data from PyPI.")
151156

0 commit comments

Comments
 (0)