33
33
# stdlib
34
34
import functools
35
35
import json
36
+ import os .path
36
37
import re
38
+ from urllib .parse import urlparse
37
39
from typing import Dict , Optional , Tuple , Union
38
40
39
41
# 3rd party
42
+
40
43
import dist_meta
41
44
import requests
42
- from apeye .requests_url import RequestsURL
43
- from dist_meta .metadata_mapping import MetadataMapping
44
45
from domdf_python_tools .compat import importlib_resources
45
46
from domdf_python_tools .utils import stderr_writer
46
47
from packaging .requirements import Requirement
61
62
_DOCUMENTATION_RE = re .compile (r"^[dD]oc(s|umentation)" )
62
63
63
64
64
- def _get_project_links (project_name : str ) -> MetadataMapping :
65
+ def _get_project_links (project_name : str ) -> list :
65
66
"""
66
67
Returns the web links for the given project.
67
68
@@ -70,17 +71,19 @@ def _get_project_links(project_name: str) -> MetadataMapping:
70
71
:param project_name:
71
72
"""
72
73
73
- urls = MetadataMapping ()
74
+ urls = []
74
75
75
76
# Try a local package first
76
77
try :
77
78
dist = dist_meta .distributions .get_distribution (project_name )
78
79
raw_urls = dist .get_metadata ().get_all ("Project-URL" , default = ())
79
80
80
81
for url in raw_urls :
81
- label , url , * _ = map ( str . strip , url .split (',' ) )
82
+ label , url = url .split ("," , 1 )
82
83
if _DOCUMENTATION_RE .match (label ):
83
- urls [label ] = url
84
+ urls .append (url )
85
+
86
+ urls .append (dist .get_metadata ().get ("Home-Page" ))
84
87
85
88
except dist_meta .distributions .DistributionNotFoundError :
86
89
# Fall back to PyPI
@@ -91,8 +94,11 @@ def _get_project_links(project_name: str) -> MetadataMapping:
91
94
if "project_urls" in metadata and metadata ["project_urls" ]:
92
95
for label , url in metadata ["project_urls" ].items ():
93
96
if _DOCUMENTATION_RE .match (label ):
94
- urls [ label ] = url
97
+ urls . append ( url )
95
98
99
+ urls .append (metadata ["home_page" ])
100
+
101
+ urls = [url .strip () for url in filter (None , urls )]
96
102
return urls
97
103
98
104
@@ -127,26 +133,24 @@ def get_sphinx_doc_url(pypi_name: str) -> str:
127
133
Now raises :exc:`~packaging.requirements.InvalidRequirement` rather than
128
134
:exc:`apeye.slumber_url.exceptions.HttpNotFoundError` if the project could not be found on PyPI.
129
135
"""
130
-
131
- for key , value in _get_project_links (pypi_name ).items ():
132
-
136
+ docs_urls = []
137
+ for value in _get_project_links (pypi_name ):
133
138
# Follow redirects to get actual URL
134
- r = RequestsURL (value ).head (allow_redirects = True , timeout = 10 )
135
- if r .status_code != 200 : # pragma: no cover
136
- raise ValueError (f"Documentation URL not found: HTTP Status { r .status_code } ." )
139
+ r = requests .head (value , allow_redirects = True , timeout = 10 )
137
140
138
- docs_url = r .url
141
+ if r .status_code == 200 :
142
+ has_extension = os .path .splitext (urlparse (r .url ).path )[- 1 ]
143
+ url = os .path .dirname (r .url ) if has_extension else r .url
144
+ docs_urls .append (url )
139
145
140
- if docs_url .endswith ('/' ):
141
- objects_inv_url = f"{ docs_url } objects.inv"
142
- else : # pragma: no cover
143
- objects_inv_url = f"{ docs_url } /objects.inv"
146
+ for docs_url in docs_urls :
147
+ objects_inv_url = f"{ docs_url .rstrip ('/' )} /objects.inv"
144
148
145
149
r = requests .head (objects_inv_url )
146
150
if r .status_code != 200 :
147
- raise ValueError (f"objects.inv not found at url { objects_inv_url } : HTTP Status { r .status_code } ." )
148
-
149
- return docs_url
151
+ stderr_writer (f"WARNING: objects.inv not found at url { objects_inv_url } : HTTP Status { r .status_code } ." )
152
+ else :
153
+ return docs_url
150
154
151
155
raise ValueError ("Documentation URL not found in data from PyPI." )
152
156
0 commit comments