From bb0a7927d8cf6dcabea3afb70f71610e26c2481e Mon Sep 17 00:00:00 2001 From: Arun Persaud Date: Sat, 29 Mar 2025 08:54:41 -0700 Subject: [PATCH 01/11] Replacing io.open with pathlib --- nikola/filters.py | 23 +++++++++++------------ nikola/nikola.py | 11 +++++------ 2 files changed, 16 insertions(+), 18 deletions(-) diff --git a/nikola/filters.py b/nikola/filters.py index 576efad656..b014864cb4 100644 --- a/nikola/filters.py +++ b/nikola/filters.py @@ -29,10 +29,10 @@ All filters defined in this module are registered in Nikola.__init__. """ -import io import json import os import re +from pathlib import Path import shutil import shlex import subprocess @@ -92,11 +92,10 @@ def apply_to_text_file(f): @wraps(f) def f_in_file(fname, *args, **kwargs): - with io.open(fname, 'r', encoding='utf-8-sig') as inf: - data = inf.read() + fpath = Path(fname) + data = fpath.read_text(encoding='utf-8-sig') data = f(data, *args, **kwargs) - with io.open(fname, 'w+', encoding='utf-8') as outf: - outf.write(data) + fpath.write_text(data, encoding='utf-8') return f_in_file @@ -407,9 +406,8 @@ def php_template_injection(data): r'<\!-- __NIKOLA_PHP_TEMPLATE_INJECTION source\:(.*) checksum\:(.*)__ -->', data ) if template: - source = template.group(1) - with io.open(source, 'r', encoding='utf-8-sig') as in_file: - phpdata = in_file.read() + source = Path(template.group(1)) + phpdata = source.read_text(encoding='utf-8-sig') _META_SEPARATOR = ( '(' + os.linesep * 2 + '|' + ('\n' * 2) + '|' + ('\r\n' * 2) + ')' ) @@ -498,8 +496,7 @@ def add_header_permalinks(fname, xpath_list=None, file_blacklist=None): file_blacklist = file_blacklist or [] if fname in file_blacklist: return - with io.open(fname, 'r', encoding='utf-8-sig') as inf: - data = inf.read() + data = Path(fname).read_text(encoding='utf-8-sig') doc = lxml.html.document_fromstring(data) # Get language for slugify try: @@ -538,8 +535,10 @@ def add_header_permalinks(fname, xpath_list=None, file_blacklist=None): ) node.append(new_node) - with io.open(fname, 'w', encoding='utf-8') as outf: - outf.write('\n' + lxml.html.tostring(doc, encoding='unicode')) + Path(fname).write_text( + '\n' + lxml.html.tostring(doc, encoding='unicode'), + encoding='utf-8', + ) @_ConfigurableFilter(top_classes='DEDUPLICATE_IDS_TOP_CLASSES') diff --git a/nikola/nikola.py b/nikola/nikola.py index 6f80377c1f..d18fe15c1c 100644 --- a/nikola/nikola.py +++ b/nikola/nikola.py @@ -27,10 +27,10 @@ """The main Nikola site object.""" import datetime -import io import json import functools import logging +from pathlib import Path import operator import os import pathlib @@ -2571,11 +2571,10 @@ def atom_post_text(post, text): dst_dir = os.path.dirname(output_path) utils.makedirs(dst_dir) - with io.open(output_path, "w+", encoding="utf-8") as atom_file: - data = lxml.etree.tostring(feed_root.getroottree(), encoding="UTF-8", pretty_print=True, xml_declaration=True) - if isinstance(data, bytes): - data = data.decode('utf-8') - atom_file.write(data) + data = lxml.etree.tostring(feed_root.getroottree(), encoding="UTF-8", pretty_print=True, xml_declaration=True) + if isinstance(data, bytes): + data = data.decode('utf-8') + Path(output_path).write_text(data, encoding="utf-8") def generic_index_renderer(self, lang, posts, indexes_title, template_name, context_source, kw, basename, page_link, page_path, additional_dependencies=None): """Create an index page. From 52f9352d8c7f0272772463f5274bef5e2e880514 Mon Sep 17 00:00:00 2001 From: Arun Persaud Date: Sat, 29 Mar 2025 09:26:16 -0700 Subject: [PATCH 02/11] replace io.open with Pathlib --- nikola/plugin_categories.py | 13 ++++++------ nikola/plugins/basic_import.py | 17 +++++++-------- nikola/plugins/command/import_wordpress.py | 5 ++--- nikola/plugins/command/init.py | 6 +++--- nikola/plugins/command/new_post.py | 9 ++++---- nikola/plugins/command/rst2html/__init__.py | 11 ++++------ nikola/plugins/command/theme.py | 23 ++++++++++----------- nikola/plugins/compile/html.py | 20 +++++++----------- 8 files changed, 47 insertions(+), 57 deletions(-) diff --git a/nikola/plugin_categories.py b/nikola/plugin_categories.py index 140f12779d..dc3f7314ef 100644 --- a/nikola/plugin_categories.py +++ b/nikola/plugin_categories.py @@ -26,9 +26,9 @@ """Nikola plugin categories.""" -import io import logging import os +from pathlib import Path from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple @@ -289,11 +289,12 @@ def get_dep_filename(self, post: Post, lang: str) -> str: def _read_extra_deps(self, post: Post, lang: str) -> List[str]: """Read contents of .dep file and return them as a list.""" - dep_path = self.get_dep_filename(post, lang) - if os.path.isfile(dep_path): - with io.open(dep_path, 'r+', encoding='utf-8-sig') as depf: - deps = [l.strip() for l in depf.readlines()] - return deps + dep_path = Path(self.get_dep_filename(post, lang)) + + if dep_path.is_file(): + deps = dep_path.read_text(encoding='utf-8-sig') + deps = [line.strip() for line in depf.splitlines()] + return deps return [] def register_extra_dependencies(self, post: Post): diff --git a/nikola/plugins/basic_import.py b/nikola/plugins/basic_import.py index 59fb053411..9ca2fbe74c 100644 --- a/nikola/plugins/basic_import.py +++ b/nikola/plugins/basic_import.py @@ -26,10 +26,10 @@ """Mixin for importer plugins.""" -import io import csv import datetime import os +from pathlib import Path from urllib.parse import urlparse from lxml import etree, html @@ -157,17 +157,17 @@ def write_metadata(self, filename, title, slug, post_date, description, tags, ** description = "" utils.makedirs(os.path.dirname(filename)) - with io.open(filename, "w+", encoding="utf8") as fd: - data = {'title': title, 'slug': slug, 'date': post_date, 'tags': ','.join(tags), 'description': description} - data.update(kwargs) - fd.write(utils.write_metadata(data, site=self.site, comment_wrap=False)) + data = {'title': title, 'slug': slug, 'date': post_date, 'tags': ','.join(tags), 'description': description} + data.update(kwargs) + metadata = utils.write_metadata(data, site=self.site, comment_wrap=False) + Path(filename).write_text(metadata, encoding="utf8") @staticmethod def write_urlmap_csv(output_file, url_map): """Write urlmap to csv file.""" utils.makedirs(os.path.dirname(output_file)) - fmode = 'w+' - with io.open(output_file, fmode) as fd: + + with Path(output_file_.open('w+') as fd: csv_writer = csv.writer(fd) for item in url_map.items(): csv_writer.writerow(item) @@ -189,8 +189,7 @@ def get_configuration_output_path(self): def write_configuration(filename, rendered_template): """Write the configuration file.""" utils.makedirs(os.path.dirname(filename)) - with io.open(filename, 'w+', encoding='utf8') as fd: - fd.write(rendered_template) + Path(filename).write_text(rendered_template, encoding='utf8') def replacer(dst): diff --git a/nikola/plugins/command/import_wordpress.py b/nikola/plugins/command/import_wordpress.py index 9a8b130bd3..1e64bf5e92 100644 --- a/nikola/plugins/command/import_wordpress.py +++ b/nikola/plugins/command/import_wordpress.py @@ -27,10 +27,10 @@ """Import a WordPress dump.""" import datetime -import io import json import os import re +from pathlib import Path import sys from collections import defaultdict from urllib.parse import urlparse, unquote @@ -1120,8 +1120,7 @@ def process_item_if_attachment(self, item): def write_attachments_info(self, path, attachments): """Write attachments info file.""" - with io.open(path, "wb") as file: - file.write(json.dumps(attachments).encode('utf-8')) + Path(path).write_text(json.dumps(attachments), encoding='utf-8') def process_item_if_post_or_page(self, item): """Process posts and pages.""" diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py index b42af58256..9c3c453f24 100644 --- a/nikola/plugins/command/init.py +++ b/nikola/plugins/command/init.py @@ -280,9 +280,9 @@ def create_configuration(target): """Create configuration file.""" template_path = pkg_resources_path('nikola', 'conf.py.in') conf_template = Template(filename=template_path) - conf_path = os.path.join(target, 'conf.py') - with io.open(conf_path, 'w+', encoding='utf8') as fd: - fd.write(conf_template.render(**prepare_config(SAMPLE_CONF))) + conf_path = Path(target) / 'conf.py' + output = conf_template.render(**prepare_config(SAMPLE_CONF)) + conf_path.write_text(output, encoding='utf8') @staticmethod def create_configuration_to_string(): diff --git a/nikola/plugins/command/new_post.py b/nikola/plugins/command/new_post.py index 0734ec8b84..15a6bc0324 100644 --- a/nikola/plugins/command/new_post.py +++ b/nikola/plugins/command/new_post.py @@ -26,10 +26,10 @@ """Create a new post.""" -import io import datetime import operator import os +from pathlib import Path import shutil import subprocess import sys @@ -414,8 +414,7 @@ def _execute(self, options, args): LOGGER.warning('This compiler does not support one-file posts.') if onefile and import_file: - with io.open(import_file, 'r', encoding='utf-8-sig') as fh: - content = fh.read() + content = Path(import_file).read_text(encoding='utf-8-sig') elif not import_file: if is_page: content = self.site.MESSAGES[self.site.default_lang]["Write your page here."] @@ -433,8 +432,8 @@ def _execute(self, options, args): event = dict(path=txt_path) if not onefile: # write metadata file - with io.open(meta_path, "w+", encoding="utf8") as fd: - fd.write(utils.write_metadata(data, comment_wrap=False, site=self.site)) + output = utils.write_metadata(data, comment_wrap=False, site=self.site) + Path(meta_path).write_text(output, encoding="utf8") LOGGER.info("Your {0}'s metadata is at: {1}".format(content_type, meta_path)) event['meta_path'] = meta_path LOGGER.info("Your {0}'s text is at: {1}".format(content_type, txt_path)) diff --git a/nikola/plugins/command/rst2html/__init__.py b/nikola/plugins/command/rst2html/__init__.py index 4e8fcf6bfa..a8ed354264 100644 --- a/nikola/plugins/command/rst2html/__init__.py +++ b/nikola/plugins/command/rst2html/__init__.py @@ -26,8 +26,7 @@ """Compile reStructuredText to HTML, using Nikola architecture.""" - -import io +from pathlib import Path import lxml.html from mako.template import Template from nikola.plugin_categories import Command @@ -49,13 +48,11 @@ def _execute(self, options, args): print("This command takes only one argument (input file name).") return 2 source = args[0] - with io.open(source, "r", encoding="utf-8-sig") as in_file: - data = in_file.read() - output, error_level, deps, shortcode_deps = compiler.compile_string(data, source, True) + data = Path(source).read_text(encoding="utf-8-sig") + output, error_level, deps, shortcode_deps = compiler.compile_string(data, source, True) rstcss_path = pkg_resources_path('nikola', 'data/themes/base/assets/css/rst_base.css') - with io.open(rstcss_path, "r", encoding="utf-8-sig") as fh: - rstcss = fh.read() + rstcss = Path(rstcss_path).read_text(encoding="utf-8-sig") template_path = pkg_resources_path('nikola', 'plugins/command/rst2html/rst2html.tmpl') template = Template(filename=template_path) diff --git a/nikola/plugins/command/theme.py b/nikola/plugins/command/theme.py index 23eec6882f..1bc16f1e63 100644 --- a/nikola/plugins/command/theme.py +++ b/nikola/plugins/command/theme.py @@ -30,6 +30,7 @@ import io import json.decoder import os +from pathlib import Path import shutil import sys import time @@ -235,11 +236,11 @@ def do_install(self, name, data): if os.path.exists(confpypath): LOGGER.warning('This theme has a sample config file. Integrate it with yours in order to make this theme work!') print('Contents of the conf.py.sample file:\n') - with io.open(confpypath, 'r', encoding='utf-8-sig') as fh: - if self.site.colorful: - print(pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter())) - else: - print(fh.read()) + text = Path(confpypath).read_text(encoding='utf-8-sig') + if self.site.colorful: + print(pygments.highlight(text, PythonLexer(), TerminalFormatter())) + else: + print(text) return True def do_uninstall(self, name): @@ -355,17 +356,15 @@ def new_theme(self, name, engine, parent, create_legacy_meta=False): } theme_meta_path = os.path.join(themedir, name + '.theme') - with io.open(theme_meta_path, 'w', encoding='utf-8') as fh: + with Path(theme_meta_path).open('w', encoding='utf-8') as fh: cp.write(fh) LOGGER.info("Created file {0}".format(theme_meta_path)) if create_legacy_meta: - with io.open(os.path.join(themedir, 'parent'), 'w', encoding='utf-8') as fh: - fh.write(parent + '\n') - LOGGER.info("Created file {0}".format(os.path.join(themedir, 'parent'))) - with io.open(os.path.join(themedir, 'engine'), 'w', encoding='utf-8') as fh: - fh.write(engine + '\n') - LOGGER.info("Created file {0}".format(os.path.join(themedir, 'engine'))) + (Path(themedir) / 'parent').write_text(parent + '\n', encoding='utf-8') + LOGGER.info("Created file {0}".format(os.path.join(themedir, 'parent'))) + (Path(themedir) / 'engine').write_text(engine + '\n', encoding='utf-8') + LOGGER.info("Created file {0}".format(os.path.join(themedir, 'engine'))) LOGGER.info("Theme {0} created successfully.".format(themedir)) LOGGER.info('Remember to set THEME="{0}" in conf.py to use this theme.'.format(name)) diff --git a/nikola/plugins/compile/html.py b/nikola/plugins/compile/html.py index f476f8aa71..78833d6a1a 100644 --- a/nikola/plugins/compile/html.py +++ b/nikola/plugins/compile/html.py @@ -27,8 +27,8 @@ """Page compiler plugin for HTML source files.""" -import io import os +from pathlib import Path import lxml.html @@ -54,11 +54,9 @@ def compile_string(self, data, source_path=None, is_two_file=True, post=None, la def compile(self, source, dest, is_two_file=True, post=None, lang=None): """Compile the source file into HTML and save as dest.""" makedirs(os.path.dirname(dest)) - with io.open(dest, "w+", encoding="utf-8") as out_file: - with io.open(source, "r", encoding="utf-8-sig") as in_file: - data = in_file.read() - data, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang) - out_file.write(data) + data = Path(source).read_text(encoding="utf-8-sig") + data, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang) + Path(dest).write_text(data, encoding="utf-8") if post is None: if shortcode_deps: self.logger.error( @@ -80,10 +78,9 @@ def create_post(self, path, **kw): makedirs(os.path.dirname(path)) if not content.endswith('\n'): content += '\n' - with io.open(path, "w+", encoding="utf-8") as fd: - if onefile: - fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self)) - fd.write(content) + if onefile: + content = write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self) + content + Path(path).write_text(content, encoding="utf-8") def read_metadata(self, post, file_metadata_regexp=None, unslugify_titles=False, lang=None): """Read the metadata from a post's meta tags, and return a metadata dict.""" @@ -91,8 +88,7 @@ def read_metadata(self, post, file_metadata_regexp=None, unslugify_titles=False, lang = LocaleBorg().current_lang source_path = post.translated_source_path(lang) - with io.open(source_path, 'r', encoding='utf-8-sig') as inf: - data = inf.read() + data = Path(source_path).read_text(encoding='utf-8-sig') metadata = {} try: From b330e9ecd437b566fc85b180dfdb85190a2c1e7e Mon Sep 17 00:00:00 2001 From: Arun Persaud Date: Sat, 29 Mar 2025 09:32:08 -0700 Subject: [PATCH 03/11] fix typos --- nikola/plugin_categories.py | 2 +- nikola/plugins/basic_import.py | 2 +- nikola/plugins/command/init.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nikola/plugin_categories.py b/nikola/plugin_categories.py index dc3f7314ef..c1fdefa34c 100644 --- a/nikola/plugin_categories.py +++ b/nikola/plugin_categories.py @@ -293,7 +293,7 @@ def _read_extra_deps(self, post: Post, lang: str) -> List[str]: if dep_path.is_file(): deps = dep_path.read_text(encoding='utf-8-sig') - deps = [line.strip() for line in depf.splitlines()] + deps = [line.strip() for line in deps.splitlines()] return deps return [] diff --git a/nikola/plugins/basic_import.py b/nikola/plugins/basic_import.py index 9ca2fbe74c..f82910d172 100644 --- a/nikola/plugins/basic_import.py +++ b/nikola/plugins/basic_import.py @@ -167,7 +167,7 @@ def write_urlmap_csv(output_file, url_map): """Write urlmap to csv file.""" utils.makedirs(os.path.dirname(output_file)) - with Path(output_file_.open('w+') as fd: + with Path(output_file).open('w+') as fd: csv_writer = csv.writer(fd) for item in url_map.items(): csv_writer.writerow(item) diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py index 9c3c453f24..c5e155ce17 100644 --- a/nikola/plugins/command/init.py +++ b/nikola/plugins/command/init.py @@ -27,9 +27,9 @@ """Create a new site.""" import datetime -import io import json import os +from pathlib import Path import shutil import textwrap import unidecode From 7a4ce94738fa0b30b272988d031ccae6a25ded7e Mon Sep 17 00:00:00 2001 From: Arun Persaud Date: Sat, 29 Mar 2025 09:43:08 -0700 Subject: [PATCH 04/11] remove unused os import --- nikola/plugin_categories.py | 169 +++++++++++++++++++++--------------- 1 file changed, 101 insertions(+), 68 deletions(-) diff --git a/nikola/plugin_categories.py b/nikola/plugin_categories.py index c1fdefa34c..b2bdd5812f 100644 --- a/nikola/plugin_categories.py +++ b/nikola/plugin_categories.py @@ -27,7 +27,6 @@ """Nikola plugin categories.""" import logging -import os from pathlib import Path from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple @@ -60,7 +59,7 @@ class BasePlugin: """Base plugin class.""" logger = None - site: Optional['nikola.nikola.Nikola'] = None # NOQA # Circular import not easy to fix. + site: Optional['nikola.nikola.Nikola'] = None # NOQA # Circular import not easy to fix. def set_site(self, site): """Set site, which is a Nikola instance.""" @@ -100,10 +99,10 @@ def supported_extensions(self) -> Optional[List[str]]: class Command(BasePlugin, DoitCommand): """Doit command implementation.""" - name = "dummy_command" + name = 'dummy_command' - doc_purpose = "A short explanation." - doc_usage = "" + doc_purpose = 'A short explanation.' + doc_usage = '' doc_description = None # None value will completely omit line from doc # see https://pydoit.org/cmd_run.html#parameters cmd_options = () @@ -126,7 +125,7 @@ def execute(self, options=None, args=None) -> int: args = args or [] if self.needs_config and not self.site.configured: - LOGGER.error("This command needs to run inside an existing Nikola site.") + LOGGER.error('This command needs to run inside an existing Nikola site.') return 3 try: return self._execute(options, args) @@ -136,6 +135,7 @@ def execute(self, options=None, args=None) -> int: else: # Do the import only now to evade a circular import problems: from .__main__ import _print_exception + _print_exception() return 3 @@ -151,19 +151,19 @@ def _execute(self, options, args) -> int: def help(self): """Return help text for a command.""" text = [] - text.append("Purpose: %s" % self.doc_purpose) - text.append("Usage: nikola %s %s" % (self.name, self.doc_usage)) + text.append('Purpose: %s' % self.doc_purpose) + text.append('Usage: nikola %s %s' % (self.name, self.doc_usage)) text.append('') - text.append("Options:") + text.append('Options:') for opt in self.cmdparser.options: text.extend(opt.help_doc()) if self.doc_description is not None: - text.append("") - text.append("Description:") + text.append('') + text.append('Description:') text.append(self.doc_description) - return "\n".join(text) + return '\n'.join(text) # we need to patch DoitCommand.help with doit <0.31.0 @@ -174,7 +174,7 @@ def help(self): class BaseTask(BasePlugin): """Base for task generators.""" - name = "dummy_task" + name = 'dummy_task' # default tasks are executed by default. # the others have to be specifie in the command line. @@ -196,19 +196,19 @@ def group_task(self) -> Dict[str, Optional[str]]: class Task(BaseTask): """Task generator.""" - name = "dummy_task" + name = 'dummy_task' class LateTask(BaseTask): """Late task generator (plugin executed after all Task plugins).""" - name = "dummy_latetask" + name = 'dummy_latetask' class TemplateSystem(BasePlugin): """Provide support for templating systems.""" - name = "dummy_templates" + name = 'dummy_templates' def set_directories(self, directories: List[str], cache_folder: str) -> None: """Set the list of folders where templates are located and cache.""" @@ -230,7 +230,9 @@ def get_string_deps(self, text: str, context=None): """Find dependencies for a template string.""" raise NotImplementedError() - def render_template(self, template_name: str, output_name: str, context: Dict[str, str]) -> str: + def render_template( + self, template_name: str, output_name: str, context: Dict[str, str] + ) -> str: """Render template to a file using context. This must save the data to output_name *and* return it @@ -254,7 +256,7 @@ def get_template_path(self, template_name: str) -> str: class TaskMultiplier(BasePlugin): """Take a task and return *more* tasks.""" - name = "dummy multiplier" + name = 'dummy multiplier' def process(self, task) -> list: """Examine task and create more tasks. Returns extra tasks only.""" @@ -264,7 +266,7 @@ def process(self, task) -> list: class PageCompiler(BasePlugin): """Compile text files into HTML.""" - name = "dummy_compiler" + name = 'dummy_compiler' friendly_name = '' demote_headers = False supports_onefile = True @@ -299,6 +301,7 @@ def _read_extra_deps(self, post: Post, lang: str) -> List[str]: def register_extra_dependencies(self, post: Post): """Add dependency to post object to check .dep file.""" + def create_lambda(lang: str) -> Callable: # We create a lambda like this so we can pass `lang` to it, because if we didn’t # add that function, `lang` would always be the last language in TRANSLATIONS. @@ -319,7 +322,9 @@ def compile(self, source: str, dest: str, is_two_file=True, post=None, lang=None """Compile the source file into HTML and save as dest.""" raise NotImplementedError() - def compile_string(self, data: str, source_path=None, is_two_file=True, post=None, lang=None) -> str: + def compile_string( + self, data: str, source_path=None, is_two_file=True, post=None, lang=None + ) -> str: """Compile the source file into HTML strings (with shortcode support). Returns a tuple of at least two elements: HTML string [0] and shortcode dependencies [last]. @@ -333,7 +338,7 @@ def create_post(self, path: str, content=None, onefile=False, is_page=False, **k def extension(self) -> str: """Return the preferred extension for the output of this compiler.""" - return ".html" + return '.html' def read_metadata(self, post: Post, lang=None) -> Dict[str, str]: """Read the metadata from a post, and return a metadata dict.""" @@ -345,6 +350,7 @@ def split_metadata(self, data: str, post=None, lang=None) -> Tuple[str, str]: extractor = post.used_extractor[lang] else: import nikola.metadata_extractors + extractor = nikola.metadata_extractors.DEFAULT_EXTRACTOR if isinstance(extractor, MetadataExtractor): @@ -356,7 +362,10 @@ def get_compiler_extensions(self) -> list: """Activate all the compiler extension plugins for a given compiler and return them.""" plugins = [] for plugin_info in self.site.compiler_extensions: - if plugin_info.compiler == self.name or plugin_info.plugin_object.compiler_name == self.name: + if ( + plugin_info.compiler == self.name + or plugin_info.plugin_object.compiler_name == self.name + ): plugins.append(plugin_info) return plugins @@ -372,29 +381,29 @@ class CompilerExtension(BasePlugin): this category, getting the compiler name with `plugin_info.compiler`. """ - name = "dummy_compiler_extension" - compiler_name = "dummy_compiler" + name = 'dummy_compiler_extension' + compiler_name = 'dummy_compiler' class RestExtension(CompilerExtension): """Extensions for reStructuredText.""" - name = "dummy_rest_extension" - compiler_name = "rest" + name = 'dummy_rest_extension' + compiler_name = 'rest' class MarkdownExtension(CompilerExtension): """Extensions for Markdown.""" - name = "dummy_markdown_extension" - compiler_name = "markdown" + name = 'dummy_markdown_extension' + compiler_name = 'markdown' class MetadataExtractor(BasePlugin): """Plugins that can extract meta information from post files.""" # Name of the extractor. (required) - name = "unknown" + name = 'unknown' # Where to get metadata from. (MetaSource; required) source = None # Priority of extractor. (MetaPriority; required) @@ -417,9 +426,11 @@ def _extract_metadata_from_text(self, source_text: str) -> Dict[str, str]: def split_metadata_from_text(self, source_text: str) -> Tuple[str, str]: """Split text into metadata and content (both strings).""" if self.split_metadata_re is None: - return "", source_text + return '', source_text else: - split_result = self.split_metadata_re.split(source_text.lstrip(), maxsplit=1) + split_result = self.split_metadata_re.split( + source_text.lstrip(), maxsplit=1 + ) if len(split_result) == 1: return split_result[0], split_result[0] else: @@ -454,31 +465,36 @@ def check_requirements(self): try: __import__(import_name) except ImportError: - req_missing([pip_name], "use {0} metadata".format(friendly_name), python=True, optional=False) + req_missing( + [pip_name], + 'use {0} metadata'.format(friendly_name), + python=True, + optional=False, + ) class SignalHandler(BasePlugin): """Signal handlers.""" - name = "dummy_signal_handler" + name = 'dummy_signal_handler' class ConfigPlugin(BasePlugin): """A plugin that can edit config (or modify the site) on-the-fly.""" - name = "dummy_config_plugin" + name = 'dummy_config_plugin' class CommentSystem(BasePlugin): """A plugn that offers a new comment system.""" - name = "dummy_comment_system" + name = 'dummy_comment_system' class ShortcodePlugin(BasePlugin): """A plugin that adds a shortcode.""" - name = "dummy_shortcode_plugin" + name = 'dummy_shortcode_plugin' def set_site(self, site): """Set Nikola site.""" @@ -512,7 +528,7 @@ class Importer(Command): write_urlmap """ - name = "dummy_importer" + name = 'dummy_importer' def _execute(self, options={}, args=[]): """Import the data into Nikola.""" @@ -689,22 +705,22 @@ class Taxonomy(BasePlugin): handler will not be created. """ - name = "dummy_taxonomy" + name = 'dummy_taxonomy' # Adjust the following values in your plugin! - classification_name = "taxonomy" - overview_page_variable_name = "taxonomy" - overview_page_items_variable_name = "items" - overview_page_hierarchy_variable_name = "taxonomy_hierarchy" + classification_name = 'taxonomy' + overview_page_variable_name = 'taxonomy' + overview_page_items_variable_name = 'items' + overview_page_hierarchy_variable_name = 'taxonomy_hierarchy' more_than_one_classifications_per_post = False has_hierarchy = False include_posts_from_subhierarchies = False include_posts_into_hierarchy_root = False show_list_as_subcategories_list = False show_list_as_index = False - subcategories_list_template = "taxonomy_list.tmpl" - template_for_single_list = "tagindex.tmpl" - template_for_classification_overview = "list.tmpl" + subcategories_list_template = 'taxonomy_list.tmpl' + template_for_single_list = 'tagindex.tmpl' + template_for_classification_overview = 'list.tmpl' always_disable_atom = False always_disable_rss = False apply_to_posts = True @@ -762,7 +778,9 @@ def sort_classifications(self, classifications: List[str], lang: str, level=None """ pass - def get_classification_friendly_name(self, classification: str, lang: str, only_last_component=False) -> str: + def get_classification_friendly_name( + self, classification: str, lang: str, only_last_component=False + ) -> str: """Extract a friendly name from the classification. The result of this function is usually displayed to the user, instead @@ -848,7 +866,9 @@ def provide_overview_context_and_uptodate(self, lang: str) -> str: """ raise NotImplementedError() - def provide_context_and_uptodate(self, classification: str, lang: str, node=None) -> Tuple[Dict, Dict]: + def provide_context_and_uptodate( + self, classification: str, lang: str, node=None + ) -> Tuple[Dict, Dict]: """Provide data for the context and the uptodate list for the list of the given classification. Must return a tuple of two dicts. The first is merged into the page's context, @@ -861,19 +881,30 @@ def provide_context_and_uptodate(self, classification: str, lang: str, node=None """ raise NotImplementedError() - def should_generate_classification_page(self, classification: str, post_list: List[Post], lang: str) -> bool: + def should_generate_classification_page( + self, classification: str, post_list: List[Post], lang: str + ) -> bool: """Only generates list of posts for classification if this function returns True.""" return True - def should_generate_atom_for_classification_page(self, classification: str, post_list: List[Post], lang: str) -> bool: + def should_generate_atom_for_classification_page( + self, classification: str, post_list: List[Post], lang: str + ) -> bool: """Only generates Atom feed for list of posts for classification if this function returns True.""" return self.should_generate_classification_page(classification, post_list, lang) - def should_generate_rss_for_classification_page(self, classification: str, post_list: List[Post], lang: str) -> bool: + def should_generate_rss_for_classification_page( + self, classification: str, post_list: List[Post], lang: str + ) -> bool: """Only generates RSS feed for list of posts for classification if this function returns True.""" return self.should_generate_classification_page(classification, post_list, lang) - def postprocess_posts_per_classification(self, posts_per_classification_per_language: List[Post], flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None) -> None: + def postprocess_posts_per_classification( + self, + posts_per_classification_per_language: List[Post], + flat_hierarchy_per_lang=None, + hierarchy_lookup_per_lang=None, + ) -> None: """Rearrange, modify or otherwise use the list of posts per classification and per language. For compatibility reasons, the list could be stored somewhere else as well. @@ -885,7 +916,9 @@ def postprocess_posts_per_classification(self, posts_per_classification_per_lang """ pass - def get_other_language_variants(self, classification: str, lang: str, classifications_per_language: List[str]) -> List[str]: + def get_other_language_variants( + self, classification: str, lang: str, classifications_per_language: List[str] + ) -> List[str]: """Return a list of variants of the same classification in other languages. Given a `classification` in a language `lang`, return a list of pairs @@ -901,20 +934,20 @@ def get_other_language_variants(self, classification: str, lang: str, classifica CATEGORIES = { - "Command": Command, - "Task": Task, - "LateTask": LateTask, - "TemplateSystem": TemplateSystem, - "PageCompiler": PageCompiler, - "TaskMultiplier": TaskMultiplier, - "CompilerExtension": CompilerExtension, - "MarkdownExtension": MarkdownExtension, - "RestExtension": RestExtension, - "MetadataExtractor": MetadataExtractor, - "ShortcodePlugin": ShortcodePlugin, - "SignalHandler": SignalHandler, - "ConfigPlugin": ConfigPlugin, - "CommentSystem": CommentSystem, - "PostScanner": PostScanner, - "Taxonomy": Taxonomy, + 'Command': Command, + 'Task': Task, + 'LateTask': LateTask, + 'TemplateSystem': TemplateSystem, + 'PageCompiler': PageCompiler, + 'TaskMultiplier': TaskMultiplier, + 'CompilerExtension': CompilerExtension, + 'MarkdownExtension': MarkdownExtension, + 'RestExtension': RestExtension, + 'MetadataExtractor': MetadataExtractor, + 'ShortcodePlugin': ShortcodePlugin, + 'SignalHandler': SignalHandler, + 'ConfigPlugin': ConfigPlugin, + 'CommentSystem': CommentSystem, + 'PostScanner': PostScanner, + 'Taxonomy': Taxonomy, } From ee5af2794d673af49ee39b5a71033f0e4a9d899f Mon Sep 17 00:00:00 2001 From: Arun Persaud Date: Sun, 30 Mar 2025 09:35:03 -0700 Subject: [PATCH 05/11] more io.open replacements --- nikola/nikola.py | 6 +- nikola/plugins/compile/ipynb.py | 18 +++--- nikola/plugins/compile/markdown/__init__.py | 34 +++++----- nikola/plugins/compile/pandoc.py | 8 +-- nikola/plugins/compile/php.py | 17 +++-- nikola/plugins/compile/rest/__init__.py | 38 +++++------ nikola/plugins/compile/rest/listing.py | 14 ++-- nikola/plugins/task/galleries.py | 11 ++-- nikola/plugins/template/jinja.py | 8 +-- nikola/plugins/template/mako.py | 5 +- nikola/post.py | 14 ++-- scripts/import_po.py | 5 +- tests/integration/helper.py | 16 ++--- .../test_check_absolute_subfolder.py | 8 +-- tests/integration/test_empty_build.py | 17 ++--- .../test_page_index_normal_urls.py | 64 +++++++------------ tests/test_rss_feeds.py | 4 +- 17 files changed, 119 insertions(+), 168 deletions(-) diff --git a/nikola/nikola.py b/nikola/nikola.py index d18fe15c1c..1ae14d8d2f 100644 --- a/nikola/nikola.py +++ b/nikola/nikola.py @@ -30,10 +30,10 @@ import json import functools import logging -from pathlib import Path import operator import os import pathlib +from pathlib import Path import sys import typing from typing import Any, Callable, Dict, Iterable, List, Optional, Set @@ -1003,7 +1003,7 @@ def _filter_duplicate_plugins(self, plugin_list: Iterable[PluginCandidate]): def plugin_position_in_places(plugin: PluginInfo): # plugin here is a tuple: # (path to the .plugin file, path to plugin module w/o .py, plugin metadata) - place: pathlib.Path + place: Path for i, place in enumerate(self._plugin_places): try: # Path.is_relative_to backport @@ -1036,7 +1036,7 @@ def init_plugins(self, commands_only=False, load_all=False) -> None: os.path.expanduser(os.path.join('~', '.nikola', 'plugins')), os.path.join(os.getcwd(), 'plugins'), ] + [path for path in extra_plugins_dirs if path] - self._plugin_places = [pathlib.Path(p) for p in self._plugin_places] + self._plugin_places = [Path(p) for p in self._plugin_places] self.plugin_manager = PluginManager(plugin_places=self._plugin_places) diff --git a/nikola/plugins/compile/ipynb.py b/nikola/plugins/compile/ipynb.py index 32a19624fb..e87b41d088 100644 --- a/nikola/plugins/compile/ipynb.py +++ b/nikola/plugins/compile/ipynb.py @@ -26,9 +26,9 @@ """Page compiler plugin for nbconvert.""" -import io import json import os +from pathlib import Path try: import nbconvert @@ -87,13 +87,11 @@ def compile_string(self, data, source_path=None, is_two_file=True, post=None, la def compile(self, source, dest, is_two_file=False, post=None, lang=None): """Compile the source file into HTML and save as dest.""" makedirs(os.path.dirname(dest)) - with io.open(dest, "w+", encoding="utf-8") as out_file: - with io.open(source, "r", encoding="utf-8-sig") as in_file: - nb_str = in_file.read() - output, shortcode_deps = self.compile_string(nb_str, source, - is_two_file, post, - lang) - out_file.write(output) + nb_str = Path(source).read_text(encoding="utf-8-sig") + output, shortcode_deps = self.compile_string(nb_str, source, + is_two_file, post, + lang) + Path(dest).write_text(output, encoding="utf-8") if post is None: if shortcode_deps: self.logger.error( @@ -112,7 +110,7 @@ def read_metadata(self, post, lang=None): if lang is None: lang = LocaleBorg().current_lang source = post.translated_source_path(lang) - with io.open(source, "r", encoding="utf-8-sig") as in_file: + with Path(source).open("r", encoding="utf-8-sig") as in_file: nb_json = nbformat.read(in_file, current_nbformat) # Metadata might not exist in two-file posts or in hand-crafted # .ipynb files. @@ -161,7 +159,7 @@ def create_post(self, path, **kw): if onefile: nb["metadata"]["nikola"] = metadata - with io.open(path, "w+", encoding="utf-8") as fd: + with Path(path).open("w+", encoding="utf-8") as fd: nbformat.write(nb, fd, 4) diff --git a/nikola/plugins/compile/markdown/__init__.py b/nikola/plugins/compile/markdown/__init__.py index d6f137e431..43378e7816 100644 --- a/nikola/plugins/compile/markdown/__init__.py +++ b/nikola/plugins/compile/markdown/__init__.py @@ -26,9 +26,9 @@ """Page compiler plugin for Markdown.""" -import io import json import os +from pathlib import Path import threading from nikola import shortcodes as sc @@ -123,11 +123,9 @@ def compile(self, source, dest, is_two_file=True, post=None, lang=None): if Markdown is None: req_missing(['markdown'], 'build this site (compile Markdown)') makedirs(os.path.dirname(dest)) - with io.open(dest, "w+", encoding="utf-8") as out_file: - with io.open(source, "r", encoding="utf-8-sig") as in_file: - data = in_file.read() - output, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang) - out_file.write(output) + data = Path(source).read_text(encoding="utf-8-sig") + output, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang) + Path(dest).write_text(output, encoding="utf-8") if post is None: if shortcode_deps: self.logger.error( @@ -149,10 +147,9 @@ def create_post(self, path, **kw): makedirs(os.path.dirname(path)) if not content.endswith('\n'): content += '\n' - with io.open(path, "w+", encoding="utf-8") as fd: - if onefile: - fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self)) - fd.write(content) + if onefile: + content = write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self) + content + Path(path).write_text(content, encoding="utf-8") def read_metadata(self, post, lang=None): """Read the metadata from a post, and return a metadata dict.""" @@ -164,15 +161,14 @@ def read_metadata(self, post, lang=None): if lang is None: lang = LocaleBorg().current_lang source = post.translated_source_path(lang) - with io.open(source, 'r', encoding='utf-8-sig') as inf: - # Note: markdown meta returns lowercase keys - data = inf.read() - # If the metadata starts with "---" it's actually YAML and - # we should not let markdown parse it, because it will do - # bad things like setting empty tags to "''" - if data.startswith('---\n'): - return {} - _, meta = self.converters[lang].convert(data) + data = Path(source).read_text(encoding='utf-8-sig') + # Note: markdown meta returns lowercase keys + # If the metadata starts with "---" it's actually YAML and + # we should not let markdown parse it, because it will do + # bad things like setting empty tags to "''" + if data.startswith('---\n'): + return {} + _, meta = self.converters[lang].convert(data) # Map metadata from other platforms to names Nikola expects (Issue #2817) map_metadata(meta, 'markdown_metadata', self.site.config) return meta diff --git a/nikola/plugins/compile/pandoc.py b/nikola/plugins/compile/pandoc.py index 8e6c6a8101..9771590ad8 100644 --- a/nikola/plugins/compile/pandoc.py +++ b/nikola/plugins/compile/pandoc.py @@ -30,7 +30,6 @@ """ -import io import os import subprocess from typing import List @@ -105,7 +104,6 @@ def create_post(self, path, **kw): makedirs(os.path.dirname(path)) if not content.endswith('\n'): content += '\n' - with io.open(path, "w+", encoding="utf8") as fd: - if onefile: - fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self)) - fd.write(content) + if onefile: + content = write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self) + content + Path(path).write_text(content, encoding="utf8") diff --git a/nikola/plugins/compile/php.py b/nikola/plugins/compile/php.py index 3438bcb72a..915b75cd08 100644 --- a/nikola/plugins/compile/php.py +++ b/nikola/plugins/compile/php.py @@ -26,9 +26,9 @@ """Page compiler plugin for PHP.""" -import io import os from hashlib import md5 +from pathlib import Path from nikola.plugin_categories import PageCompiler from nikola.utils import makedirs, write_metadata @@ -43,10 +43,10 @@ class CompilePhp(PageCompiler): def compile(self, source, dest, is_two_file=True, post=None, lang=None): """Compile the source file into HTML and save as dest.""" makedirs(os.path.dirname(dest)) - with io.open(dest, "w+", encoding="utf8") as out_file: - with open(source, "rb") as in_file: - hash = md5(in_file.read()).hexdigest() - out_file.write(''.format(source, hash)) + text = Path(source).read_bytes() + hash = md5(text).hexdigest() + out = f'' + Path(dest).write_text(out, encoding="utf8") return True def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None): @@ -76,10 +76,9 @@ def create_post(self, path, **kw): makedirs(os.path.dirname(path)) if not content.endswith('\n'): content += '\n' - with io.open(path, "w+", encoding="utf8") as fd: - if onefile: - fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self)) - fd.write(content) + if onefile: + content = write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self) + content + Path(path).write_text(content, encoding="utf8") def extension(self): """Return extension used for PHP files.""" diff --git a/nikola/plugins/compile/rest/__init__.py b/nikola/plugins/compile/rest/__init__.py index 97b57a5e36..716f8b239a 100644 --- a/nikola/plugins/compile/rest/__init__.py +++ b/nikola/plugins/compile/rest/__init__.py @@ -26,9 +26,9 @@ """reStructuredText compiler for Nikola.""" -import io import logging import os +from pathlib import Path import docutils.core import docutils.nodes @@ -71,9 +71,8 @@ def read_metadata(self, post, lang=None): # environment. Real issues will be reported while compiling. null_logger = logging.getLogger('NULL') null_logger.setLevel(1000) - with io.open(source_path, 'r', encoding='utf-8-sig') as inf: - data = inf.read() - _, _, _, document = rst2html(data, logger=null_logger, source_path=source_path, transforms=self.site.rst_transforms) + data = Path(source_path).read_text(encoding='utf-8-sig') + _, _, _, document = rst2html(data, logger=null_logger, source_path=source_path, transforms=self.site.rst_transforms) meta = {} if 'title' in document: meta['title'] = document['title'] @@ -145,19 +144,17 @@ def compile(self, source, dest, is_two_file=True, post=None, lang=None): """Compile the source file into HTML and save as dest.""" makedirs(os.path.dirname(dest)) error_level = 100 - with io.open(dest, "w+", encoding="utf-8") as out_file: - with io.open(source, "r", encoding="utf-8-sig") as in_file: - data = in_file.read() - output, error_level, deps, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang) - out_file.write(output) - if post is None: - if deps.list: - self.logger.error( - "Cannot save dependencies for post {0} (post unknown)", - source) - else: - post._depfile[dest] += deps.list - post._depfile[dest] += shortcode_deps + data = Path(source).read_text(encoding="utf-8-sig") + output, error_level, deps, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang) + Path(dest).write_text(output, encoding="utf-8") + if post is None: + if deps.list: + self.logger.error( + "Cannot save dependencies for post {0} (post unknown)", + source) + else: + post._depfile[dest] += deps.list + post._depfile[dest] += shortcode_deps if error_level < 3: return True else: @@ -175,10 +172,9 @@ def create_post(self, path, **kw): makedirs(os.path.dirname(path)) if not content.endswith('\n'): content += '\n' - with io.open(path, "w+", encoding="utf-8") as fd: - if onefile: - fd.write(write_metadata(metadata, comment_wrap=False, site=self.site, compiler=self)) - fd.write(content) + if onefile: + content = write_metadata(metadata, comment_wrap=False, site=self.site, compiler=self) + content + Path(path).write_text(content, encoding="utf-8") def set_site(self, site): """Set Nikola site.""" diff --git a/nikola/plugins/compile/rest/listing.py b/nikola/plugins/compile/rest/listing.py index f358f281f7..17acb5b213 100644 --- a/nikola/plugins/compile/rest/listing.py +++ b/nikola/plugins/compile/rest/listing.py @@ -28,8 +28,8 @@ """Define and register a listing directive using the existing CodeBlock.""" -import io import os +from pathlib import Path import uuid from urllib.parse import urlunsplit @@ -209,16 +209,16 @@ def run(self): self.arguments.insert(0, fpath) if 'linenos' in self.options: self.options['number-lines'] = self.options['linenos'] - with io.open(fpath, 'r+', encoding='utf-8-sig') as fileobject: - self.content = fileobject.read().splitlines() + self.content = Path(fpath).read_text(encoding='utf-8-sig').splitlines() self.state.document.settings.record_dependencies.add(fpath) target = urlunsplit(("link", 'listing', fpath.replace('\\', '/'), '', '')) src_target = urlunsplit(("link", 'listing_source', fpath.replace('\\', '/'), '', '')) src_label = self.site.MESSAGES('Source') - generated_nodes = ( - [core.publish_doctree('`{0} <{1}>`_ `({2}) <{3}>`_' .format( - _fname, target, src_label, src_target))[0]]) - generated_nodes += self.get_code_from_file(fileobject) + with Path(fpath).open('r+', encoding='utf-8-sig') as fileobject: + generated_nodes = ( + [core.publish_doctree('`{0} <{1}>`_ `({2}) <{3}>`_' .format( + _fname, target, src_label, src_target))[0]]) + generated_nodes += self.get_code_from_file(fileobject) return generated_nodes def get_code_from_file(self, data): diff --git a/nikola/plugins/task/galleries.py b/nikola/plugins/task/galleries.py index bb4a458512..190a060eec 100644 --- a/nikola/plugins/task/galleries.py +++ b/nikola/plugins/task/galleries.py @@ -28,11 +28,11 @@ import datetime import glob -import io import json import mimetypes import os import pathlib +from pathlib import Path from collections import OrderedDict from urllib.parse import urljoin @@ -774,8 +774,7 @@ def forward_slashes(path): rss_obj.rss_attrs["xmlns:atom"] = "http://www.w3.org/2005/Atom" dst_dir = os.path.dirname(output_path) utils.makedirs(dst_dir) - with io.open(output_path, "w+", encoding="utf-8") as rss_file: - data = rss_obj.to_xml(encoding='utf-8') - if isinstance(data, bytes): - data = data.decode('utf-8') - rss_file.write(data) + data = rss_obj.to_xml(encoding='utf-8') + if isinstance(data, bytes): + data = data.decode('utf-8') + Path(output_path).write_text(data, encoding="utf-8") diff --git a/nikola/plugins/template/jinja.py b/nikola/plugins/template/jinja.py index 16467b5987..af4893f8c7 100644 --- a/nikola/plugins/template/jinja.py +++ b/nikola/plugins/template/jinja.py @@ -26,9 +26,9 @@ """Jinja template handler.""" -import io import json import os +from pathlib import Path from typing import Callable, Optional from nikola.plugin_categories import TemplateSystem @@ -114,8 +114,7 @@ def render_template(self, template_name, output_name, context): data = template.render(**context) if output_name is not None: makedirs(os.path.dirname(output_name)) - with io.open(output_name, 'w', encoding='utf-8') as output: - output.write(data) + Path(output_name).write_text(data, encoding='utf-8') return data def render_template_to_string(self, template, context): @@ -142,8 +141,7 @@ def get_string_deps(self, text, context=None): def get_deps(self, filename, context=None): """Return paths to dependencies for the template loaded from filename.""" - with io.open(filename, 'r', encoding='utf-8-sig') as fd: - text = fd.read() + text = Path(filename).read_text(encoding='utf-8-sig') return self.get_string_deps(text, context) def template_deps(self, template_name, context=None): diff --git a/nikola/plugins/template/mako.py b/nikola/plugins/template/mako.py index ed555b31bc..a0fbc22a64 100644 --- a/nikola/plugins/template/mako.py +++ b/nikola/plugins/template/mako.py @@ -26,9 +26,9 @@ """Mako template handler.""" -import io import os import re +from pathlib import Path import shutil from typing import Callable @@ -131,8 +131,7 @@ def render_template(self, template_name, output_name, context): data = template.render_unicode(**context) if output_name is not None: makedirs(os.path.dirname(output_name)) - with io.open(output_name, 'w', encoding='utf-8') as output: - output.write(data) + Path(output_name).write_text(data, encoding='utf-8') return data def render_template_to_string(self, template, context): diff --git a/nikola/post.py b/nikola/post.py index d7c1b9e38a..66a0acc105 100644 --- a/nikola/post.py +++ b/nikola/post.py @@ -26,12 +26,12 @@ """The Post class.""" -import io import datetime import hashlib import json import os import re +from pathlib import Path from collections import defaultdict from math import ceil # for reading time feature from urllib.parse import urljoin @@ -664,8 +664,7 @@ def write_depfile(dest, deps_list, post=None, lang=None): deps_path = post.compiler.get_dep_filename(post, lang) if deps_list or (post.compiler.use_dep_file if post else False): deps_list = [p for p in deps_list if p != dest] # Don't depend on yourself (#1671) - with io.open(deps_path, "w+", encoding="utf-8") as deps_file: - deps_file.write('\n'.join(deps_list)) + Path(deps_path).write_text('\n'.join(deps_list), encoding="utf-8") else: if os.path.isfile(deps_path): os.unlink(deps_path) @@ -892,8 +891,7 @@ def text(self, lang=None, teaser_only=False, strip_html=False, show_read_more_li if not os.path.isfile(file_name): self.compile(lang) - with io.open(file_name, "r", encoding="utf-8-sig") as post_file: - data = post_file.read().strip() + data = Path(file_name).read_text(encoding="utf-8-sig").strip() if self.compiler.extension() == '.php': return data @@ -987,8 +985,7 @@ def paragraph_count(self): # duplicated with Post.text() lang = nikola.utils.LocaleBorg().current_lang file_name, _ = self._translated_file_path(lang) - with io.open(file_name, "r", encoding="utf-8-sig") as post_file: - data = post_file.read().strip() + data = Path(file_name).read_text(encoding="utf-8-sig").strip() try: document = lxml.html.fragment_fromstring(data, "body") except lxml.etree.ParserError as e: @@ -1121,8 +1118,7 @@ def get_metadata_from_file(source_path, post, config, lang, metadata_extractors_ source_path = get_translation_candidate(config, source_path, lang) elif lang: source_path += '.' + lang - with io.open(source_path, "r", encoding="utf-8-sig") as meta_file: - source_text = meta_file.read() + source_text = Path(source_path).read_text(encoding="utf-8-sig") except (UnicodeDecodeError, UnicodeEncodeError): msg = 'Error reading {0}: Nikola only supports UTF-8 files'.format(source_path) LOGGER.error(msg) diff --git a/scripts/import_po.py b/scripts/import_po.py index 3f2c984294..cb4c2c1067 100755 --- a/scripts/import_po.py +++ b/scripts/import_po.py @@ -2,9 +2,9 @@ # -*- coding: utf-8 -*- """Download translations from transifex and regenerate files.""" -import io from glob import glob import os +from pathlib import Path import sys import polib @@ -31,5 +31,4 @@ lines.extend(sorted(lines2)) lines.append("}\n") print("Generating:", outf) - with io.open(outf, "w+", encoding="utf8") as outfile: - outfile.write('\n'.join(lines)) + Path(outf).write_text('\n'.join(lines), encoding="utf8") diff --git a/tests/integration/helper.py b/tests/integration/helper.py index c120721fdc..8a210dcbde 100644 --- a/tests/integration/helper.py +++ b/tests/integration/helper.py @@ -1,5 +1,5 @@ -import io import os +from pathlib import Path import shutil from ..helper import cd @@ -17,14 +17,11 @@ def create_simple_post(directory, filename, title_slug, text='', date='2013-03-0 """Create a simple post in a given directory.""" path = os.path.join(directory, filename) text_processed = '\n' + text if text else '' - with io.open(path, "w+", encoding="utf8") as outf: - outf.write( - """ + Path(path).write_text(""" .. title: {0} .. slug: {0} .. date: {1} -{2}""".format(title_slug, date, text_processed) - ) +{2}""".format(title_slug, date, text_processed), encoding="utf8") def copy_example_post(destination_dir): @@ -38,19 +35,18 @@ def copy_example_post(destination_dir): def append_config(config_dir, appendix): """Append text to the config file.""" config_path = os.path.join(config_dir, "conf.py") - with io.open(config_path, "a", encoding="utf8") as outf: + with Path(config_path).open("a", encoding="utf8") as outf: outf.write(appendix) def patch_config(config_dir, *replacements): """Patch the config file with new values (find and replace).""" config_path = os.path.join(config_dir, "conf.py") - with io.open(config_path, "r", encoding="utf-8") as inf: - data = inf.read() + data = Path(config_path).read_text(encoding="utf-8") for old, new in replacements: data = data.replace(old, new) - with io.open(config_path, "w+", encoding="utf8") as outf: + with Path(config_path).open("w+", encoding="utf8") as outf: outf.write(data) outf.flush() diff --git a/tests/integration/test_check_absolute_subfolder.py b/tests/integration/test_check_absolute_subfolder.py index 86af065d64..8fc659bf23 100644 --- a/tests/integration/test_check_absolute_subfolder.py +++ b/tests/integration/test_check_absolute_subfolder.py @@ -5,8 +5,7 @@ * deployable to a subfolder (BASE_URL="https://example.com/foo/") """ -import io -import os +from pathlib import Path import pytest @@ -28,9 +27,8 @@ def test_index_in_sitemap(build, output_dir): The correct path ends in /foo/ because this is where we deploy to. """ - sitemap_path = os.path.join(output_dir, "sitemap.xml") - with io.open(sitemap_path, "r", encoding="utf8") as inf: - sitemap_data = inf.read() + sitemap_path = Path(output_dir) / "sitemap.xml" + sitemap_data = sitemap_path.read_text(encoding="utf8") assert "https://example.com/foo/" in sitemap_data diff --git a/tests/integration/test_empty_build.py b/tests/integration/test_empty_build.py index a21bf73f6e..bd11688a79 100644 --- a/tests/integration/test_empty_build.py +++ b/tests/integration/test_empty_build.py @@ -1,7 +1,6 @@ """Performaning the build of an empty site.""" -import io -import os +from pathlib import Path import pytest @@ -22,25 +21,23 @@ def test_check_files(build, target_dir): def test_index_in_sitemap(build, output_dir): - sitemap_path = os.path.join(output_dir, "sitemap.xml") - with io.open(sitemap_path, "r", encoding="utf8") as inf: - sitemap_data = inf.read() + sitemap_path = Path(output_dir) / "sitemap.xml" + sitemap_data = sitemap_path.read_text(encoding="utf8") assert "https://example.com/" in sitemap_data def test_avoid_double_slash_in_rss(build, output_dir): - rss_path = os.path.join(output_dir, "rss.xml") - with io.open(rss_path, "r", encoding="utf8") as inf: - rss_data = inf.read() + rss_path = Path(output_dir) / "rss.xml" + rss_data = rss_path.read_text(encoding="utf8") assert "https://example.com//" not in rss_data def test_archive_exists(build, output_dir): """Ensure the build did something.""" - index_path = os.path.join(output_dir, "archive.html") - assert os.path.isfile(index_path) + index_path = Path(output_dir) / "archive.html" + assert index_path.is_file() @pytest.fixture(scope="module") diff --git a/tests/integration/test_page_index_normal_urls.py b/tests/integration/test_page_index_normal_urls.py index 4dbedfd3de..c6c36acee0 100644 --- a/tests/integration/test_page_index_normal_urls.py +++ b/tests/integration/test_page_index_normal_urls.py @@ -1,7 +1,7 @@ """Test if PAGE_INDEX works, with different PRETTY_URLS=False settings.""" -import io import os +from pathlib import Path import pytest @@ -77,10 +77,9 @@ def output_path(dir, name): def test_page_index_content_in_pages(build, output_dir): """Do the indexes only contain the pages the should?""" - pages = os.path.join(output_dir, "pages") + pages = Path(output_dir) / "pages" - with io.open(os.path.join(pages, "index.html"), "r", encoding="utf-8") as fh: - pages_index = fh.read() + pages_index = (pages / "index.html").read_text(encoding="utf-8") assert "Page 0" in pages_index assert "Page 1" not in pages_index @@ -92,10 +91,9 @@ def test_page_index_content_in_pages(build, output_dir): def test_page_index_content_in_subdir1(build, output_dir): """Do the indexes only contain the pages the should?""" - subdir1 = os.path.join(output_dir, "pages", "subdir1") + subdir1 = Path(output_dir) / "pages" / "subdir1" - with io.open(os.path.join(subdir1, "index.html"), "r", encoding="utf-8") as fh: - subdir1_index = fh.read() + subdir1_index = (subdir1 / "index.html").read_text(encoding="utf-8") assert "Page 0" not in subdir1_index assert "Page 1" in subdir1_index @@ -107,10 +105,9 @@ def test_page_index_content_in_subdir1(build, output_dir): def test_page_index_content_in_subdir2(build, output_dir): """Do the indexes only contain the pages the should?""" - subdir2 = os.path.join(output_dir, "pages", "subdir2") + subdir2 = Path(output_dir) / "pages" / "subdir2" - with io.open(os.path.join(subdir2, "index.html"), "r", encoding="utf-8") as fh: - subdir2_index = fh.read() + subdir2_index = (subdir2 / "index.html").read_text(encoding="utf-8") assert "Page 0" not in subdir2_index assert "Page 1" not in subdir2_index @@ -122,10 +119,9 @@ def test_page_index_content_in_subdir2(build, output_dir): def test_page_index_content_in_subdir3(build, output_dir): """Do the indexes only contain the pages the should?""" - subdir3 = os.path.join(output_dir, "pages", "subdir3") + subdir3 = Path(output_dir) / "pages" / "subdir3" - with io.open(os.path.join(subdir3, "index.php"), "r", encoding="utf-8") as fh: - subdir3_index = fh.read() + subdir3_index = (subdir3 / "index.php").read_text(encoding="utf-8") assert "Page 0" not in subdir3_index assert "Page 1" not in subdir3_index @@ -167,72 +163,58 @@ def create_pages(target_dir): makedirs(subdir2) makedirs(subdir3) - with io.open(os.path.join(pages, "page0.txt"), "w+", encoding="utf8") as outf: - outf.write( + (Path(pages) / "page0.txt").write_text( """\ .. title: Page 0 .. slug: page0 This is page 0. -""" - ) +""", encoding="utf8") - with io.open(os.path.join(subdir1, "page1.txt"), "w+", encoding="utf8") as outf: - outf.write( + (Path(subdir1) / "page1.txt").write_text( """\ .. title: Page 1 .. slug: page1 This is page 1. -""" - ) +""", encoding="utf8") - with io.open(os.path.join(subdir1, "page2.txt"), "w+", encoding="utf8") as outf: - outf.write( + (Path(subdir1) / "page2.txt").write_text( """\ .. title: Page 2 .. slug: page2 This is page 2. -""" - ) +""", encoding="utf8") - with io.open(os.path.join(subdir2, "page3.txt"), "w+", encoding="utf8") as outf: - outf.write( + (Path(subdir2) / "page3.txt").write_text( """\ .. title: Page 3 .. slug: page3 This is page 3. -""" - ) +""", encoding="utf8") - with io.open(os.path.join(subdir2, "foo.txt"), "w+", encoding="utf8") as outf: - outf.write( + (Path(subdir2) / "foo.txt").write_text( """\ .. title: Not the page index .. slug: index This is not the page index. -""" - ) +""", encoding="utf8") - with io.open(os.path.join(subdir3, "page4.txt"), "w+", encoding="utf8") as outf: - outf.write( + (Path(subdir3) / "page4.txt").write_text( """\ .. title: Page 4 .. slug: page4 This is page 4. -""" - ) +""", encoding="utf8") - with io.open(os.path.join(subdir3, "bar.php"), "w+", encoding="utf8") as outf: - outf.write( + (Path(subdir3) / "bar.php").write_text( """\ .. title: Still not the page index .. slug: index This is not the page index either. -""" - ) +""", encoding="utf8") diff --git a/tests/test_rss_feeds.py b/tests/test_rss_feeds.py index d976579d63..b214667c5b 100644 --- a/tests/test_rss_feeds.py +++ b/tests/test_rss_feeds.py @@ -120,7 +120,7 @@ def rss_feed_content(blog_url, config, default_locale): filename = "testfeed.rss" opener_mock = mock.mock_open() - with mock.patch("nikola.nikola.io.open", opener_mock, create=True): + with mock.patch("nikola.nikola.pathlib.Path.write_text", opener_mock, create=True): Nikola().generic_rss_renderer( default_locale, "blog_title", @@ -132,7 +132,7 @@ def rss_feed_content(blog_url, config, default_locale): False, ) - opener_mock.assert_called_once_with(filename, "w+", encoding="utf-8") + opener_mock.assert_called_once_with(encoding="utf-8") # Python 3 / unicode strings workaround # lxml will complain if the encoding is specified in the From 5e5669b61172a75e2e16ad0691b5a7eec7a1afde Mon Sep 17 00:00:00 2001 From: Arun Persaud Date: Sun, 30 Mar 2025 10:01:11 -0700 Subject: [PATCH 06/11] fix some more test for io.open replacements --- tests/integration/test_future_post.py | 52 ++++++++++----------------- tests/integration/test_redirection.py | 49 ++++++++++++------------- tests/test_utils.py | 15 +++----- 3 files changed, 46 insertions(+), 70 deletions(-) diff --git a/tests/integration/test_future_post.py b/tests/integration/test_future_post.py index 4645464b37..620fa1e831 100644 --- a/tests/integration/test_future_post.py +++ b/tests/integration/test_future_post.py @@ -1,8 +1,8 @@ """Test a site with future posts.""" -import io import os from datetime import timedelta +from pathlib import Path import pytest @@ -43,11 +43,11 @@ def test_future_post_deployment(build, output_dir, target_dir): @pytest.mark.parametrize("filename", ["index.html", "sitemap.xml"]) def test_future_post_not_in_indexes(build, output_dir, filename): """ Ensure that the future post is not present in the index and sitemap.""" - filepath = os.path.join(output_dir, filename) - assert os.path.isfile(filepath) + filepath = Path(output_dir) / filename + assert filepath.is_file() + + content = filepath.read_text(encoding="utf8") - with io.open(filepath, "r", encoding="utf8") as inf: - content = inf.read() assert "foo/" in content assert "bar/" not in content assert "baz" not in content @@ -67,43 +67,29 @@ def format_datetime(datetime): return datetime.strftime("%Y-%m-%d %H:%M:%S") past_datetime = format_datetime(current_time() + timedelta(days=-1)) - with io.open( - os.path.join(target_dir, "posts", "empty1.txt"), "w+", encoding="utf8" - ) as past_post: - past_post.write( - """\ + (Path(target_dir) / "posts" / "empty1.txt").write_text( + f"""\ .. title: foo .. slug: foo -.. date: %s -""" - % past_datetime - ) +.. date: {past_datetime} +""", encoding="utf8") future_datetime = format_datetime(current_time() + timedelta(days=1)) - with io.open( - os.path.join(target_dir, "posts", "empty2.txt"), "w+", encoding="utf8" - ) as future_post: - future_post.write( - """\ + + (Path(target_dir) / "posts" / "empty2.txt").write_text( + f"""\ .. title: bar .. slug: bar -.. date: %s -""" - % future_datetime - ) - - with io.open( - os.path.join(target_dir, "posts", "empty3.txt"), "w+", encoding="utf8" - ) as future_post: - future_post.write( - """\ +.. date: {future_datetime} +""", encoding="utf8") + + (Path(target_dir) / "posts" / "empty3.txt").write_text( + f"""\ .. title: baz .. slug: baz -.. date: %s +.. date: {future_datetime} .. pretty_url: false -""" - % future_datetime - ) +""", encoding="utf8") with cd(target_dir): __main__.main(["build"]) diff --git a/tests/integration/test_redirection.py b/tests/integration/test_redirection.py index 323740d53d..88992ea0a4 100644 --- a/tests/integration/test_redirection.py +++ b/tests/integration/test_redirection.py @@ -5,8 +5,8 @@ Each of the different redirect types is specified in the config and then tested by at least one test.""" -import io import os +from pathlib import Path import pytest @@ -25,49 +25,46 @@ def test_absolute_redirection(build, output_dir): - abs_source = os.path.join(output_dir, "redirects", "absolute_source.html") - assert os.path.exists(abs_source) + abs_source = Path(output_dir) / "redirects" / "absolute_source.html" + assert abs_source.exists() - abs_destination = os.path.join(output_dir, "posts", "absolute.html") - assert os.path.exists(abs_destination) + abs_destination = Path(output_dir) / "posts" / "absolute.html" + assert abs_destination.exists() - with open(abs_destination) as abs_destination_fd: - abs_destination_content = abs_destination_fd.read() + abs_destination_content = abs_destination.read_text() redirect_tag = '' assert redirect_tag in abs_destination_content - with open(abs_source) as abs_source_fd: - absolute_source_content = abs_source_fd.read() + absolute_source_content = abs_source.read_text() assert absolute_source_content == "absolute" def test_external_redirection(build, output_dir): - ext_link = os.path.join(output_dir, "external.html") + ext_link = Path(output_dir) / "external.html" - assert os.path.exists(ext_link) - with open(ext_link) as ext_link_fd: - ext_link_content = ext_link_fd.read() + assert ext_link.exists() + + ext_link_content = ext_link.read_text() redirect_tag = '' assert redirect_tag in ext_link_content def test_relative_redirection(build, output_dir): - rel_destination = os.path.join(output_dir, "relative.html") - assert os.path.exists(rel_destination) - rel_source = os.path.join(output_dir, "redirects", "rel_src.html") - assert os.path.exists(rel_source) + rel_destination = Path(output_dir) / "relative.html" + assert rel_destination.exists() + + rel_source = Path(output_dir) / "redirects" / "rel_src.html" + assert rel_source.exists() - with open(rel_destination) as rel_destination_fd: - rel_destination_content = rel_destination_fd.read() + rel_destination_content = rel_destination.read_text() redirect_tag = '' assert redirect_tag in rel_destination_content - with open(rel_source) as rel_source_fd: - rel_source_content = rel_source_fd.read() + rel_source_content = rel_source.read_text() assert rel_source_content == "relative" @@ -81,14 +78,12 @@ def build(target_dir): nikola.utils.makedirs(redirects_dir) # Source file for absolute redirect - target_path = os.path.join(redirects_dir, "absolute_source.html") - with io.open(target_path, "w+", encoding="utf8") as outf: - outf.write("absolute") + target_path = Path(redirects_dir) / "absolute_source.html" + target_path.write_text("absolute", encoding="utf8") # Source file for relative redirect - target_path = os.path.join(redirects_dir, "rel_src.html") - with io.open(target_path, "w+", encoding="utf8") as outf: - outf.write("relative") + target_path = Path(redirects_dir) / "rel_src.html" + target_path.write_text("relative", encoding="utf8") # Configure usage of specific redirects append_config( diff --git a/tests/test_utils.py b/tests/test_utils.py index 764eec2c2e..e4734bd274 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -41,8 +41,7 @@ def test_getting_metadata_from_content(post): Post content """ - opener_mock = mock.mock_open(read_data=file_content) - with mock.patch("nikola.post.io.open", opener_mock, create=True): + with mock.patch("nikola.post.Path.read_text", return_value=file_content, create=True): meta = get_meta(post, None)[0] assert "Nikola needs more tests!" == meta["title"] @@ -64,8 +63,7 @@ def test_get_title_from_fname(post): .. link: .. description: """ - opener_mock = mock.mock_open(read_data=file_content) - with mock.patch("nikola.post.io.open", opener_mock, create=True): + with mock.patch("nikola.post.Path.read_text", return_value=file_content, create=True): meta = get_meta(post, None)[0] assert "file_with_metadata" == meta["title"] @@ -89,8 +87,7 @@ def test_use_filename_as_slug_fallback(post): Post content """ - opener_mock = mock.mock_open(read_data=file_content) - with mock.patch("nikola.post.io.open", opener_mock, create=True): + with mock.patch("nikola.post.Path.read_text", return_value=file_content, create=True): meta = get_meta(post, None)[0] assert "Nikola needs more tests!" == meta["title"] @@ -113,8 +110,7 @@ def test_extracting_metadata_from_filename(post, unslugify, expected_title): ] = r"(?P\d{4}-\d{2}-\d{2})-(?P.*)-(?P.*)\.md" post.config["FILE_METADATA_UNSLUGIFY_TITLES"] = unslugify - no_metadata_opener = mock.mock_open(read_data="No metadata in the file!") - with mock.patch("nikola.post.io.open", no_metadata_opener, create=True): + with mock.patch("nikola.post.Path.read_text", return_value="No metadata in the file!", create=True): meta = get_meta(post, None)[0] assert expected_title == meta["title"] @@ -126,8 +122,7 @@ def test_get_meta_slug_only_from_filename(post): post.source_path = "some/path/the_slug.md" post.metadata_path = "some/path/the_slug.meta" - no_metadata_opener = mock.mock_open(read_data="No metadata in the file!") - with mock.patch("nikola.post.io.open", no_metadata_opener, create=True): + with mock.patch("nikola.post.Path.read_text", return_value="No metadata in the file!", create=True): meta = get_meta(post, None)[0] assert "the_slug" == meta["slug"] From f4baf18c15e571d369f948e1bc611d27771c51e7 Mon Sep 17 00:00:00 2001 From: Arun Persaud <apersaud@lbl.gov> Date: Sun, 30 Mar 2025 10:12:50 -0700 Subject: [PATCH 07/11] final round of io.open replacements --- tests/integration/test_check_failure.py | 8 ++++---- tests/integration/test_relative_links.py | 13 +++++-------- .../test_relative_links_with_pages_in_root.py | 13 +++++-------- tests/test_compile_markdown.py | 8 +++----- 4 files changed, 17 insertions(+), 25 deletions(-) diff --git a/tests/integration/test_check_failure.py b/tests/integration/test_check_failure.py index 08e944753c..63635c59ef 100644 --- a/tests/integration/test_check_failure.py +++ b/tests/integration/test_check_failure.py @@ -7,6 +7,7 @@ import io import os +from pathlib import Path import pytest @@ -21,7 +22,7 @@ def test_check_links_fail(build, output_dir, target_dir): - os.unlink(os.path.join(output_dir, "archive.html")) + os.unlink(Path(output_dir) / "archive.html") with cd(target_dir): result = __main__.main(["check", "-l"]) @@ -29,9 +30,8 @@ def test_check_links_fail(build, output_dir, target_dir): def test_check_files_fail(build, output_dir, target_dir): - manually_added_file = os.path.join(output_dir, "foobar") - with io.open(manually_added_file, "w+", encoding="utf8") as outf: - outf.write("foo") + manually_added_file = Path(output_dir) / "foobar" + manually_added_file.write_text("foo", encoding="utf8") with cd(target_dir): result = __main__.main(["check", "-f"]) diff --git a/tests/integration/test_relative_links.py b/tests/integration/test_relative_links.py index ea1545566c..239126a618 100644 --- a/tests/integration/test_relative_links.py +++ b/tests/integration/test_relative_links.py @@ -1,7 +1,6 @@ """Check that SITE_URL with a path doesn't break links.""" -import io -import os +from pathlib import Path import lxml.html import pytest @@ -20,10 +19,9 @@ def test_relative_links(build, output_dir): """Check that the links in output/index.html are correct""" - test_path = os.path.join(output_dir, "index.html") + test_path = Path(output_dir) / "index.html" - with io.open(test_path, "rb") as inf: - data = inf.read() + data = test_path.read_bytes() assert not any( url.startswith("..") @@ -34,9 +32,8 @@ def test_relative_links(build, output_dir): def test_index_in_sitemap(build, output_dir): """Test that the correct path is in sitemap, and not the wrong one.""" - sitemap_path = os.path.join(output_dir, "sitemap.xml") - with io.open(sitemap_path, "r", encoding="utf8") as inf: - sitemap_data = inf.read() + sitemap_path = Path(output_dir) / "sitemap.xml" + sitemap_data = sitemap_path.read_text(encoding="utf8") assert "<loc>https://example.com/</loc>" not in sitemap_data assert "<loc>https://example.com/foo/bar/</loc>" in sitemap_data diff --git a/tests/integration/test_relative_links_with_pages_in_root.py b/tests/integration/test_relative_links_with_pages_in_root.py index b072682453..4193905943 100644 --- a/tests/integration/test_relative_links_with_pages_in_root.py +++ b/tests/integration/test_relative_links_with_pages_in_root.py @@ -1,7 +1,6 @@ """Check that dropping pages to the root doesn't break links.""" -import io -import os +from pathlib import Pagesath import lxml.html import pytest @@ -19,10 +18,9 @@ def test_relative_links(build, output_dir): """Check that the links in a page are correct""" - test_path = os.path.join(output_dir, "about-nikola.html") + test_path = Path(output_dir) / "about-nikola.html" - with io.open(test_path, "rb") as inf: - data = inf.read() + data = test_path.read_bytes() assert not any( url.startswith("..") @@ -33,9 +31,8 @@ def test_relative_links(build, output_dir): def test_index_in_sitemap(build, output_dir): """Test that the correct path is in sitemap, and not the wrong one.""" - sitemap_path = os.path.join(output_dir, "sitemap.xml") - with io.open(sitemap_path, "r", encoding="utf8") as inf: - sitemap_data = inf.read() + sitemap_path = Path(output_dir) / "sitemap.xml" + sitemap_data = sitemap_path.read_text(encoding="utf8") assert "<loc>https://example.com/</loc>" not in sitemap_data assert "<loc>https://example.com/blog/index.html</loc>" in sitemap_data diff --git a/tests/test_compile_markdown.py b/tests/test_compile_markdown.py index c9e71fd061..315e66321e 100644 --- a/tests/test_compile_markdown.py +++ b/tests/test_compile_markdown.py @@ -1,6 +1,6 @@ -import io import sys from os import path +from pathlib import Path import pytest @@ -72,10 +72,8 @@ def output_path(tmpdir): def markdown_compile(compiler, input_path, output_path, text): - with io.open(input_path, "w+", encoding="utf8") as input_file: - input_file.write(text) + Path(input_path).write_text(text, encoding="utf8") compiler.compile(input_path, output_path, lang="en") - with io.open(output_path, "r", encoding="utf8") as output_path: - return output_path.read() + return Path(output_path).read_text(encoding="utf8") From 96a8e7f01a46c56998326d59f5ddc83e18176fe7 Mon Sep 17 00:00:00 2001 From: Arun Persaud <apersaud@lbl.gov> Date: Sat, 17 May 2025 08:54:51 -0700 Subject: [PATCH 08/11] fix import error (typo) --- tests/integration/test_relative_links_with_pages_in_root.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_relative_links_with_pages_in_root.py b/tests/integration/test_relative_links_with_pages_in_root.py index 4193905943..0a6465b7a2 100644 --- a/tests/integration/test_relative_links_with_pages_in_root.py +++ b/tests/integration/test_relative_links_with_pages_in_root.py @@ -1,6 +1,6 @@ """Check that dropping pages to the root doesn't break links.""" -from pathlib import Pagesath +from pathlib import Path import lxml.html import pytest From 0519e95cce7ab6220207e34f509ce27b544836f8 Mon Sep 17 00:00:00 2001 From: Arun Persaud <apersaud@lbl.gov> Date: Sat, 17 May 2025 09:00:56 -0700 Subject: [PATCH 09/11] cleaned up some flake8 warnings --- nikola/plugin_categories.py | 4 ++-- nikola/plugins/compile/php.py | 2 +- nikola/plugins/compile/rest/__init__.py | 2 +- nikola/plugins/compile/rest/listing.py | 8 ++++---- tests/integration/test_check_failure.py | 1 - 5 files changed, 8 insertions(+), 9 deletions(-) diff --git a/nikola/plugin_categories.py b/nikola/plugin_categories.py index b2bdd5812f..5f42ceba51 100644 --- a/nikola/plugin_categories.py +++ b/nikola/plugin_categories.py @@ -363,8 +363,8 @@ def get_compiler_extensions(self) -> list: plugins = [] for plugin_info in self.site.compiler_extensions: if ( - plugin_info.compiler == self.name - or plugin_info.plugin_object.compiler_name == self.name + plugin_info.compiler == self.name or + plugin_info.plugin_object.compiler_name == self.name ): plugins.append(plugin_info) return plugins diff --git a/nikola/plugins/compile/php.py b/nikola/plugins/compile/php.py index 915b75cd08..a699d7e4bb 100644 --- a/nikola/plugins/compile/php.py +++ b/nikola/plugins/compile/php.py @@ -46,7 +46,7 @@ def compile(self, source, dest, is_two_file=True, post=None, lang=None): text = Path(source).read_bytes() hash = md5(text).hexdigest() out = f'<!-- __NIKOLA_PHP_TEMPLATE_INJECTION source:{source} checksum:{hash}__ -->' - Path(dest).write_text(out, encoding="utf8") + Path(dest).write_text(out, encoding="utf8") return True def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None): diff --git a/nikola/plugins/compile/rest/__init__.py b/nikola/plugins/compile/rest/__init__.py index 716f8b239a..940a9544b6 100644 --- a/nikola/plugins/compile/rest/__init__.py +++ b/nikola/plugins/compile/rest/__init__.py @@ -173,7 +173,7 @@ def create_post(self, path, **kw): if not content.endswith('\n'): content += '\n' if onefile: - content = write_metadata(metadata, comment_wrap=False, site=self.site, compiler=self) + content + content = write_metadata(metadata, comment_wrap=False, site=self.site, compiler=self) + content Path(path).write_text(content, encoding="utf-8") def set_site(self, site): diff --git a/nikola/plugins/compile/rest/listing.py b/nikola/plugins/compile/rest/listing.py index 17acb5b213..7080b0ec91 100644 --- a/nikola/plugins/compile/rest/listing.py +++ b/nikola/plugins/compile/rest/listing.py @@ -215,10 +215,10 @@ def run(self): src_target = urlunsplit(("link", 'listing_source', fpath.replace('\\', '/'), '', '')) src_label = self.site.MESSAGES('Source') with Path(fpath).open('r+', encoding='utf-8-sig') as fileobject: - generated_nodes = ( - [core.publish_doctree('`{0} <{1}>`_ `({2}) <{3}>`_' .format( - _fname, target, src_label, src_target))[0]]) - generated_nodes += self.get_code_from_file(fileobject) + generated_nodes = ( + [core.publish_doctree('`{0} <{1}>`_ `({2}) <{3}>`_' .format( + _fname, target, src_label, src_target))[0]]) + generated_nodes += self.get_code_from_file(fileobject) return generated_nodes def get_code_from_file(self, data): diff --git a/tests/integration/test_check_failure.py b/tests/integration/test_check_failure.py index 63635c59ef..c6b44f040d 100644 --- a/tests/integration/test_check_failure.py +++ b/tests/integration/test_check_failure.py @@ -5,7 +5,6 @@ Green path tests (working as expected) can be found in `test_demo_build`. """ -import io import os from pathlib import Path From a343a20b41dbb49297e14d6f3ab41fe5c2447bb7 Mon Sep 17 00:00:00 2001 From: Arun Persaud <apersaud@lbl.gov> Date: Sat, 17 May 2025 09:11:26 -0700 Subject: [PATCH 10/11] more flak8 fixes --- tests/integration/test_future_post.py | 36 ++++---- .../test_page_index_normal_urls.py | 88 ++++++++++--------- 2 files changed, 68 insertions(+), 56 deletions(-) diff --git a/tests/integration/test_future_post.py b/tests/integration/test_future_post.py index 620fa1e831..f2b6e18609 100644 --- a/tests/integration/test_future_post.py +++ b/tests/integration/test_future_post.py @@ -3,6 +3,7 @@ import os from datetime import timedelta from pathlib import Path +from textwrap import dedent import pytest @@ -68,28 +69,31 @@ def format_datetime(datetime): past_datetime = format_datetime(current_time() + timedelta(days=-1)) (Path(target_dir) / "posts" / "empty1.txt").write_text( - f"""\ -.. title: foo -.. slug: foo -.. date: {past_datetime} -""", encoding="utf8") + dedent(f"""\ + .. title: foo + .. slug: foo + .. date: {past_datetime} + """), + encoding="utf8") future_datetime = format_datetime(current_time() + timedelta(days=1)) (Path(target_dir) / "posts" / "empty2.txt").write_text( - f"""\ -.. title: bar -.. slug: bar -.. date: {future_datetime} -""", encoding="utf8") + dedent(f"""\ + .. title: bar + .. slug: bar + .. date: {future_datetime} + """), + encoding="utf8") (Path(target_dir) / "posts" / "empty3.txt").write_text( - f"""\ -.. title: baz -.. slug: baz -.. date: {future_datetime} -.. pretty_url: false -""", encoding="utf8") + dedent(f"""\ + .. title: baz + .. slug: baz + .. date: {future_datetime} + .. pretty_url: false + """), + encoding="utf8") with cd(target_dir): __main__.main(["build"]) diff --git a/tests/integration/test_page_index_normal_urls.py b/tests/integration/test_page_index_normal_urls.py index c6c36acee0..33affc6c01 100644 --- a/tests/integration/test_page_index_normal_urls.py +++ b/tests/integration/test_page_index_normal_urls.py @@ -2,6 +2,7 @@ import os from pathlib import Path +from textwrap import dedent import pytest @@ -142,11 +143,11 @@ def build(target_dir): append_config( target_dir, - """ -PAGE_INDEX = True -PRETTY_URLS = False -PAGES = PAGES + (('pages/*.php', 'pages', 'page.tmpl'),) -""", + dedent("""\ + PAGE_INDEX = True + PRETTY_URLS = False + PAGES = PAGES + (('pages/*.php', 'pages', 'page.tmpl'),) + """), ) with cd(target_dir): @@ -164,57 +165,64 @@ def create_pages(target_dir): makedirs(subdir3) (Path(pages) / "page0.txt").write_text( - """\ -.. title: Page 0 -.. slug: page0 + dedent("""\ + .. title: Page 0 + .. slug: page0 -This is page 0. -""", encoding="utf8") + This is page 0. + """), + encoding="utf8") (Path(subdir1) / "page1.txt").write_text( - """\ -.. title: Page 1 -.. slug: page1 + dedent("""\ + .. title: Page 1 + .. slug: page1 -This is page 1. -""", encoding="utf8") + This is page 1. + """), + encoding="utf8") (Path(subdir1) / "page2.txt").write_text( - """\ -.. title: Page 2 -.. slug: page2 + dedent("""\ + .. title: Page 2 + .. slug: page2 -This is page 2. -""", encoding="utf8") + This is page 2. + """), + encoding="utf8") (Path(subdir2) / "page3.txt").write_text( - """\ -.. title: Page 3 -.. slug: page3 + dedent("""\ + .. title: Page 3 + .. slug: page3 -This is page 3. -""", encoding="utf8") + This is page 3. + """), + encoding="utf8") (Path(subdir2) / "foo.txt").write_text( - """\ -.. title: Not the page index -.. slug: index + dedent("""\ + .. title: Not the page index + .. slug: index -This is not the page index. -""", encoding="utf8") + This is not the page index. + """), + encoding="utf8") (Path(subdir3) / "page4.txt").write_text( - """\ -.. title: Page 4 -.. slug: page4 + dedent("""\ + .. title: Page 4 + .. slug: page4 -This is page 4. -""", encoding="utf8") + This is page 4. + """), + encoding="utf8") (Path(subdir3) / "bar.php").write_text( - """\ -.. title: Still not the page index -.. slug: index + dedent("""\ + .. title: Still not the page index + .. slug: index -This is not the page index either. -""", encoding="utf8") + This is not the page index either. + """), + encoding="utf8") From af778ad31263bfbc6b487193731a64c3d84bca60 Mon Sep 17 00:00:00 2001 From: Arun Persaud <apersaud@lbl.gov> Date: Sat, 17 May 2025 09:31:51 -0700 Subject: [PATCH 11/11] fix a test (that is still using io.open) --- tests/test_rss_feeds.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_rss_feeds.py b/tests/test_rss_feeds.py index b214667c5b..002e195733 100644 --- a/tests/test_rss_feeds.py +++ b/tests/test_rss_feeds.py @@ -120,7 +120,7 @@ def rss_feed_content(blog_url, config, default_locale): filename = "testfeed.rss" opener_mock = mock.mock_open() - with mock.patch("nikola.nikola.pathlib.Path.write_text", opener_mock, create=True): + with mock.patch("nikola.nikola.utils.io.open", opener_mock, create=True): Nikola().generic_rss_renderer( default_locale, "blog_title", @@ -132,7 +132,7 @@ def rss_feed_content(blog_url, config, default_locale): False, ) - opener_mock.assert_called_once_with(encoding="utf-8") + opener_mock.assert_called_once_with(filename, "w+", encoding="utf-8") # Python 3 / unicode strings workaround # lxml will complain if the encoding is specified in the