33
33
# stdlib
34
34
import functools
35
35
import json
36
+ import os .path
36
37
import re
38
+ from urllib .parse import urlparse
37
39
from typing import Dict , Optional , Tuple , Union
38
40
39
41
# 3rd party
42
+
40
43
import dist_meta
41
44
import requests
42
- from dist_meta .metadata_mapping import MetadataMapping
43
45
from domdf_python_tools .compat import importlib_resources
44
46
from domdf_python_tools .utils import stderr_writer
45
47
from packaging .requirements import Requirement
60
62
_DOCUMENTATION_RE = re .compile (r"^[dD]oc(s|umentation)" )
61
63
62
64
63
- def _get_project_links (project_name : str ) -> MetadataMapping :
65
+ def _get_project_links (project_name : str ) -> list :
64
66
"""
65
67
Returns the web links for the given project.
66
68
@@ -69,17 +71,19 @@ def _get_project_links(project_name: str) -> MetadataMapping:
69
71
:param project_name:
70
72
"""
71
73
72
- urls = MetadataMapping ()
74
+ urls = []
73
75
74
76
# Try a local package first
75
77
try :
76
78
dist = dist_meta .distributions .get_distribution (project_name )
77
79
raw_urls = dist .get_metadata ().get_all ("Project-URL" , default = ())
78
80
79
81
for url in raw_urls :
80
- label , url , * _ = map ( str . strip , url .split (',' ) )
82
+ label , url = url .split ("," , 1 )
81
83
if _DOCUMENTATION_RE .match (label ):
82
- urls [label ] = url
84
+ urls .append (url )
85
+
86
+ urls .append (dist .get_metadata ().get ("Home-Page" ))
83
87
84
88
except dist_meta .distributions .DistributionNotFoundError :
85
89
# Fall back to PyPI
@@ -90,8 +94,11 @@ def _get_project_links(project_name: str) -> MetadataMapping:
90
94
if "project_urls" in metadata and metadata ["project_urls" ]:
91
95
for label , url in metadata ["project_urls" ].items ():
92
96
if _DOCUMENTATION_RE .match (label ):
93
- urls [ label ] = url
97
+ urls . append ( url )
94
98
99
+ urls .append (metadata ["home_page" ])
100
+
101
+ urls = [url .strip () for url in filter (None , urls )]
95
102
return urls
96
103
97
104
@@ -126,26 +133,24 @@ def get_sphinx_doc_url(pypi_name: str) -> str:
126
133
Now raises :exc:`~packaging.requirements.InvalidRequirement` rather than
127
134
:exc:`apeye.slumber_url.exceptions.HttpNotFoundError` if the project could not be found on PyPI.
128
135
"""
129
-
130
- for key , value in _get_project_links (pypi_name ).items ():
131
-
136
+ docs_urls = []
137
+ for value in _get_project_links (pypi_name ):
132
138
# Follow redirects to get actual URL
133
139
r = requests .head (value , allow_redirects = True , timeout = 10 )
134
- if r .status_code != 200 : # pragma: no cover
135
- raise ValueError (f"Documentation URL not found: HTTP Status { r .status_code } ." )
136
140
137
- docs_url = r .url
141
+ if r .status_code == 200 :
142
+ has_extension = os .path .splitext (urlparse (r .url ).path )[- 1 ]
143
+ url = os .path .dirname (r .url ) if has_extension else r .url
144
+ docs_urls .append (url )
138
145
139
- if docs_url .endswith ('/' ):
140
- objects_inv_url = f"{ docs_url } objects.inv"
141
- else : # pragma: no cover
142
- objects_inv_url = f"{ docs_url } /objects.inv"
146
+ for docs_url in docs_urls :
147
+ objects_inv_url = f"{ docs_url .rstrip ('/' )} /objects.inv"
143
148
144
149
r = requests .head (objects_inv_url , allow_redirects = True , timeout = 10 )
145
150
if r .status_code != 200 :
146
- raise ValueError (f"objects.inv not found at url { objects_inv_url } : HTTP Status { r .status_code } ." )
147
-
148
- return docs_url
151
+ stderr_writer (f"WARNING: objects.inv not found at url { objects_inv_url } : HTTP Status { r .status_code } ." )
152
+ else :
153
+ return docs_url
149
154
150
155
raise ValueError ("Documentation URL not found in data from PyPI." )
151
156
0 commit comments