4
4
from glob import iglob
5
5
from pathlib import Path
6
6
from urllib .parse import unquote , quote
7
-
7
+ import ast
8
8
import frontmatter
9
9
import markdown
10
10
from bs4 import BeautifulSoup
11
11
from mdx_wikilink_plus .mdx_wikilink_plus import WikiLinkPlusExtension
12
12
from mkdocs .config import config_options
13
13
from mkdocs .plugins import BasePlugin
14
+ from mkdocs_callouts .plugin import CalloutsPlugin
15
+ from custom_attributes .plugin import read_custom , convert_hashtags , convert_text_attributes
14
16
15
17
16
18
def search_in_file (citation_part : str , contents : str ):
@@ -53,38 +55,43 @@ def search_in_file(citation_part: str, contents: str):
53
55
54
56
55
57
def mini_ez_links (urlo , base , end , url_whitespace , url_case ):
56
- base , url_blog = base
58
+ base , url_blog , md_link_path = base
57
59
url_blog_path = [x for x in url_blog .split ('/' ) if len (x ) > 0 ]
58
60
url_blog_path = url_blog_path [len (url_blog_path ) - 1 ]
59
- file_name = urlo [2 ].replace ('index' , '' )
60
- file_name = file_name .replace ('../' , '' )
61
- file_name = file_name .replace ('./' , '' )
61
+ internal_link = Path (md_link_path , urlo [2 ]).resolve ()
62
+ if os .path .isfile (internal_link ):
63
+ internal_link = str (internal_link ).replace (base , '' )
64
+ else : # fallback to searching
65
+ file_name = urlo [2 ].replace ('index' , '' )
66
+ file_name = file_name .replace ('../' , '' )
67
+ file_name = file_name .replace ('./' , '' )
62
68
63
- all_docs = [
64
- re .sub (rf"(.*)({ url_blog_path } )?/docs/*" , '' , x .replace ('\\ ' , '/' )).replace (
65
- '.md' , ''
66
- )
67
- for x in iglob (str (base ) + os .sep + '**' , recursive = True )
68
- if os .path .isfile (x )
69
- ]
70
- file_found = [
71
- '/' + x for x in all_docs if os .path .basename (x ) == file_name or x == file_name
72
- ]
73
- if file_found :
74
- file_path = file_found [0 ].replace (base , '' )
75
- url = file_path .replace ('\\ ' , '/' ).replace ('.md' , '' )
76
- url = url .replace ('//' , '/' )
77
- url = url_blog [:- 1 ] + quote (url )
78
- if not url .startswith (('https:/' , 'http:/' )):
79
- url = 'https://' + url
80
- if not url .endswith ('/' ) and not url .endswith (('png' , 'jpg' , 'jpeg' , 'gif' , 'webm' )):
81
- url = url + '/'
82
- else :
83
- url = file_name
69
+ all_docs = [
70
+ re .sub (rf"(.*)({ url_blog_path } )?/docs/*" , '' , x .replace ('\\ ' , '/' )).replace (
71
+ '.md' , ''
72
+ )
73
+ for x in iglob (str (base ) + os .sep + '**' , recursive = True )
74
+ if os .path .isfile (x )
75
+ ]
76
+ file_found = [
77
+ '/' + x for x in all_docs if os .path .basename (x ) == file_name or x == file_name
78
+ ]
79
+ if file_found :
80
+ internal_link = file_found [0 ]
81
+ else :
82
+ return file_name
83
+ file_path = internal_link .replace (base , '' )
84
+ url = file_path .replace ('\\ ' , '/' ).replace ('.md' , '' )
85
+ url = url .replace ('//' , '/' )
86
+ url = url_blog [:- 1 ] + quote (url )
87
+ if not url .startswith (('https:/' , 'http:/' )):
88
+ url = 'https://' + url
89
+ if not url .endswith ('/' ) and not url .endswith (('png' , 'jpg' , 'jpeg' , 'gif' , 'webm' )):
90
+ url = url + '/'
84
91
return url
85
92
86
93
87
- def cite (md_link_path , link , soup , citation_part , config ):
94
+ def cite (md_link_path , link , soup , citation_part , config , callouts , custom_attr ):
88
95
"""Append the content of the founded file to the original file.
89
96
90
97
Args:
@@ -97,9 +104,10 @@ def cite(md_link_path, link, soup, citation_part, config):
97
104
"""
98
105
docs = config ['docs_dir' ]
99
106
url = config ['site_url' ]
107
+
100
108
md_config = {
101
109
'mdx_wikilink_plus' : {
102
- 'base_url' : (docs , url ),
110
+ 'base_url' : (docs , url , md_link_path ),
103
111
'build_url' : mini_ez_links ,
104
112
'image_class' : 'wikilink' ,
105
113
}
@@ -115,6 +123,14 @@ def cite(md_link_path, link, soup, citation_part, config):
115
123
contents = frontmatter .loads (text ).content
116
124
quote = search_in_file (citation_part , contents )
117
125
if len (quote ) > 0 :
126
+ if callouts :
127
+ quote = CalloutsPlugin ().on_page_markdown (quote , None , None , None )
128
+ if len (custom_attr ) > 0 :
129
+ config_attr = {
130
+ 'file' : custom_attr ,
131
+ 'docs_dir' : docs
132
+ }
133
+ quote = convert_text_attributes (quote , config_attr )
118
134
html = markdown .markdown (
119
135
quote ,
120
136
extensions = [
@@ -132,19 +148,19 @@ def cite(md_link_path, link, soup, citation_part, config):
132
148
link_soup = BeautifulSoup (html , 'html.parser' )
133
149
if link_soup :
134
150
tooltip_template = (
135
- "<a href='"
136
- + str (new_uri )
137
- + "' class='link_citation'><i class='fas fa-link'></i> </a> <div"
138
- " class='citation'>"
139
- + str (link_soup ).replace (
140
- '!<img class="wikilink' , '<img class="wikilink'
141
- )
142
- + '</div>'
151
+ "<a href='"
152
+ + str (new_uri )
153
+ + "' class='link_citation'><i class='fas fa-link'></i> </a> <div"
154
+ " class='citation'>"
155
+ + str (link_soup ).replace (
156
+ '!<img class="wikilink' , '<img class="wikilink'
157
+ )
158
+ + '</div>'
143
159
)
144
160
else :
145
161
tooltip_template = (
146
- "<div class='not_found'>" +
147
- str (link ['src' ].replace ('/' , '' )) + '</div>'
162
+ "<div class='not_found'>" +
163
+ str (link ['src' ].replace ('/' , '' )) + '</div>'
148
164
)
149
165
new_soup = str (soup ).replace (str (link ), str (tooltip_template ))
150
166
soup = BeautifulSoup (new_soup , 'html.parser' )
@@ -170,14 +186,19 @@ def search_doc(md_link_path, all_docs):
170
186
return file [0 ]
171
187
return 0
172
188
189
+
173
190
def create_link (link ):
174
191
if link .endswith ('/' ):
175
192
return link [:- 1 ] + '.md'
176
193
else :
177
194
return link + '.md'
178
195
196
+
179
197
class EmbedFile (BasePlugin ):
180
- config_scheme = (('param' , config_options .Type (str , default = '' )),)
198
+ config_scheme = (
199
+ ('callouts' , config_options .Type (str | bool , default = 'false' )),
200
+ ('custom-attributes' , config_options .Type (str , default = '' ))
201
+ )
181
202
182
203
def __init__ (self ):
183
204
self .enabled = True
@@ -187,11 +208,9 @@ def on_post_page(self, output_content, page, config):
187
208
soup = BeautifulSoup (output_content , 'html.parser' )
188
209
docs = Path (config ['docs_dir' ])
189
210
md_link_path = ''
190
- all_docs = [
191
- x
192
- for x in iglob (str (docs ) + os .sep + '**' , recursive = True )
193
- if x .endswith ('.md' )
194
- ]
211
+ callout = self .config ['callouts' ]
212
+ if isinstance (callout , str ):
213
+ callout = ast .literal_eval (callout .title ())
195
214
196
215
for link in soup .findAll (
197
216
'img' ,
@@ -200,19 +219,19 @@ def on_post_page(self, output_content, page, config):
200
219
):
201
220
if len (link ['src' ]) > 0 :
202
221
203
- if link ['src' ][0 ] == '.' : # relative links
204
- md_src = create_link (unquote (link ['src' ]))
205
- md_link_path = Path (
222
+ if link ['src' ][0 ] == '.' : # relative links
223
+ md_src = create_link (unquote (link ['src' ]))
224
+ md_link_path = Path (
206
225
os .path .dirname (page .file .abs_src_path ), md_src ).resolve ()
207
226
208
227
elif link ['src' ][0 ] == '/' :
209
- md_src_path = create_link (unquote (link ['src' ]))
228
+ md_src_path = create_link (unquote (link ['src' ]))
210
229
md_link_path = os .path .join (
211
230
config ['docs_dir' ], md_src_path )
212
231
md_link_path = Path (unquote (md_link_path )).resolve ()
213
232
214
233
elif link ['src' ][0 ] != '#' :
215
- md_src_path = create_link (unquote (link ['src' ]))
234
+ md_src_path = create_link (unquote (link ['src' ]))
216
235
217
236
md_link_path = os .path .join (
218
237
os .path .dirname (page .file .abs_src_path ), md_src_path
@@ -240,10 +259,15 @@ def on_post_page(self, output_content, page, config):
240
259
md_link_path = Path (md_link_path )
241
260
if os .path .isfile (md_link_path ):
242
261
soup = cite (md_link_path , link , soup ,
243
- citation_part , config )
262
+ citation_part , config , callout , self . config [ 'custom-attributes' ] )
244
263
else :
264
+ all_docs = [
265
+ x
266
+ for x in iglob (str (docs ) + os .sep + '**' , recursive = True )
267
+ if x .endswith ('.md' )
268
+ ]
245
269
link_found = search_doc (md_link_path , all_docs )
246
270
if link_found != 0 :
247
271
soup = cite (link_found , link , soup ,
248
- citation_part , config )
272
+ citation_part , config , callout , self . config [ 'custom-attributes' ] )
249
273
return str (soup )
0 commit comments