Skip to content

Commit

Permalink
Embrace most Ruff lint rules
Browse files Browse the repository at this point in the history
Because Ruff is fast and has many nice rules. Nasty ones we can ignore,
but most of them do make sense.
  • Loading branch information
ikalnytskyi committed Dec 24, 2024
1 parent b8f23b5 commit b10dfdc
Show file tree
Hide file tree
Showing 34 changed files with 227 additions and 303 deletions.
48 changes: 42 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ description = "An extendable static site generator powered by the Force. =/"
readme = "README.rst"
requires-python = ">= 3.10"
license = "BSD-3-Clause"
authors = [
{ name = "Ihor Kalnytskyi", email = "[email protected]" }
]
authors = [{ name = "Ihor Kalnytskyi", email = "[email protected]" }]
keywords = ["static", "site", "blog", "generator", "markdown"]
classifiers = [
"Environment :: Console",
Expand Down Expand Up @@ -76,12 +74,50 @@ dependencies = ["ruff == 0.8.*"]
scripts.check = ["ruff check {args:.}", "ruff format --check --diff {args:.}"]
scripts.fmt = ["ruff check --fix {args:.}", "ruff format {args:.}"]

[tool.ruff]
line-length = 100

[tool.ruff.lint]
select = ["F", "E", "W", "I", "S", "FBT", "B", "C4", "DTZ", "T10", "ISC", "RET", "SLF", "RUF"]
ignore = ["S603", "S701", "B904", "ISC001"]
select = ["ALL"]
ignore = [
"A001",
"A002",
"ANN",
"ARG001",
"ARG002",
"B904",
"C",
"COM812",
"D",
"INP001",
"ISC001",
"N801",
"N802",
"PERF203",
"PLR",
"PLW2901",
"PTH",
"S603",
"S701",
"SIM117",
"TID252",
]

[tool.ruff.lint.isort]
known-first-party = ["holocron"]

[tool.ruff.lint.per-file-ignores]
"tests/*" = ["E501", "S101", "S607", "SLF001", "RUF001"]
"src/holocron/__main__.py" = ["T201"]
"tests/*" = [
"BLE001",
"D",
"E501",
"PT011",
"PT012",
"RUF001",
"S101",
"S607",
"SIM108",
"SLF001",
"UP031",
]
17 changes: 6 additions & 11 deletions src/holocron/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@

def create_app_from_yml(path):
"""Return an application instance created from YAML."""

try:
with open(path, "rt", encoding="UTF-8") as f:
with open(path, encoding="UTF-8") as f:
try:
# Substitute ALL occurrences of '%(here)s' with a path to a
# directory with '.holocron.yml'. Please note, we also want
Expand All @@ -34,9 +33,7 @@ def create_app_from_yml(path):

conf = yaml.safe_load(interpolated)
except yaml.YAMLError as exc:
raise RuntimeError(
"Cannot parse a configuration file. Context: " + str(exc)
)
raise RuntimeError("Cannot parse a configuration file. Context: " + str(exc))

except FileNotFoundError:
conf = {"metadata": None, "pipes": {}}
Expand All @@ -46,8 +43,7 @@ def create_app_from_yml(path):

@contextlib.contextmanager
def configure_logger(level):
"""
Configure a root logger to print records in pretty format.
"""Configure a root logger to print records in pretty format.
The format is more readable for end users, since it's not necessary at
all to know a record's dateime and a source of the record.
Expand All @@ -63,15 +59,15 @@ def configure_logger(level):

class _PendingHandler(logging.handlers.MemoryHandler):
def __init__(self, target):
return super(_PendingHandler, self).__init__(capacity=-1, target=target)
super().__init__(capacity=-1, target=target)

def shouldFlush(self, record):
return False

class _Formatter(logging.Formatter):
def format(self, record):
record.levelname = record.levelname[:4]
return super(_Formatter, self).format(record)
return super().format(record)

# create stream handler with custom formatter
stream_handler = logging.StreamHandler()
Expand All @@ -90,8 +86,7 @@ def format(self, record):


def parse_command_line(args):
"""
Builds a command line interface, and parses its arguments. Returns
"""Builds a command line interface, and parses its arguments. Returns
an object with attributes, that are represent CLI arguments.
:param args: a list of command line arguments
Expand Down
20 changes: 9 additions & 11 deletions src/holocron/_core/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import collections
import logging

from .._processors import _misc
from holocron._processors import _misc

_logger = logging.getLogger("holocron")

Expand Down Expand Up @@ -55,7 +55,8 @@ def add_processor(self, name, processor):

def add_processor_wrapper(self, name, processor):
if name in self._processor_reserved_props:
raise ValueError(f"illegal wrapper name: {name}")
msg = f"illegal wrapper name: {name}"
raise ValueError(msg)

self.add_processor(name, processor)
self._processor_wrappers.add(name)
Expand All @@ -73,7 +74,8 @@ def invoke(self, pipe, stream=None):
# from some processor.
if isinstance(pipe, str):
if pipe not in self._pipes:
raise ValueError(f"no such pipe: '{pipe}'")
msg = f"no such pipe: '{pipe}'"
raise ValueError(msg)
pipe = self._pipes[pipe]

# Since processors expect an input stream to be an iterator, we cast a
Expand All @@ -87,16 +89,15 @@ def invoke(self, pipe, stream=None):
# parameters. Please note, we're doing this so late because we
# want to take into account metadata and other changes produced
# by previous processors in the pipe.
processor = _misc.resolve_json_references(
processor, {"metadata:": self.metadata}
)
processor = _misc.resolve_json_references(processor, {"metadata:": self.metadata})

name, args, kwargs = _unpack_and_wrap_processor(
processor, self._processor_reserved_props
)

if name not in self._processors:
raise ValueError(f"no such processor: '{name}'")
msg = f"no such processor: '{name}'"
raise ValueError(msg)

processfn = self._processors[name]
stream = processfn(self, stream, *args, **kwargs)
Expand All @@ -123,15 +124,12 @@ def _unpack_and_wrap_processor(processor, processor_reserved_props):
processor. So this function naturally wraps `commonmark` and so we
effectively resolve syntax sugar.
"""

processor_name = processor["name"]
processor_args = []
processor_kwrs = {}
processor_opts = processor.get("args", {})

wrapper_name = next(
(k for k in processor if k not in processor_reserved_props), None
)
wrapper_name = next((k for k in processor if k not in processor_reserved_props), None)

if wrapper_name:
processor_name = wrapper_name
Expand Down
4 changes: 2 additions & 2 deletions src/holocron/_core/factories.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""Factory functions to create core instances."""

from .._processors import import_processors, when
from holocron._processors import import_processors, when

from . import Application


def create_app(metadata, processors=None, pipes=None):
"""Return an application instance with processors & pipes setup."""

instance = Application(metadata)

# In order to avoid code duplication, we use existing built-in import
Expand Down
15 changes: 7 additions & 8 deletions src/holocron/_core/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ def __init__(self, *mappings, **properties):
# behaviour. Anyway, passing more than one mapping to '__init__' is
# senseless and confusing.
if len(mappings) > 1:
raise TypeError("expected at most 1 argument, got 2")
msg = "expected at most 1 argument, got 2"
raise TypeError(msg)

for mapping in itertools.chain(mappings, (properties,)):
self._mapping.update(mapping)
Expand Down Expand Up @@ -64,9 +65,7 @@ def as_mapping(self):
key: value.__get__(self)
for key, value in vars(self.__class__).items()
if not key.startswith("_")
and (
inspect.isdatadescriptor(value) or inspect.ismethoddescriptor(value)
)
and (inspect.isdatadescriptor(value) or inspect.ismethoddescriptor(value))
},
**self._mapping,
)
Expand All @@ -76,14 +75,14 @@ class WebSiteItem(Item):
"""Pipeline item wrapper for a static web site."""

def __init__(self, *mappings, **properties):
super(WebSiteItem, self).__init__(*mappings, **properties)
super().__init__(*mappings, **properties)

missing = {"destination", "baseurl"} - self.keys()
if missing:
raise TypeError(
"WebSiteItem is missing some required properties: %s"
% ", ".join(("'%s'" % prop for prop in sorted(missing)))
msg = "WebSiteItem is missing some required properties: {}".format(
", ".join(f"'{prop}'" for prop in sorted(missing))
)
raise TypeError(msg)

@property
def url(self):
Expand Down
3 changes: 2 additions & 1 deletion src/holocron/_processors/chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
)
def process(app, stream, *, order_by=None, direction=None):
if direction and not order_by:
raise ValueError("'direction' cannot be set without 'order_by'")
msg = "'direction' cannot be set without 'order_by'"
raise ValueError(msg)

if order_by:
# WARNING: Sorting the stream requires evaluating all items from the
Expand Down
3 changes: 1 addition & 2 deletions src/holocron/_processors/commonmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import json
import logging
import subprocess
import typing as t

import markdown_it
import markdown_it.renderer
Expand Down Expand Up @@ -40,7 +39,7 @@ def fence(self, tokens, idx, options, env) -> str:
return super().fence(tokens, idx, options, env)


def _exec_pipe(args: t.List[str], input_: t.ByteString, timeout: int = 1000) -> bytes:
def _exec_pipe(args: list[str], input_: bytes, timeout: int = 1000) -> bytes:
try:
completed_process = subprocess.run(
args,
Expand Down
30 changes: 8 additions & 22 deletions src/holocron/_processors/feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,19 +55,15 @@ def _resolvefeed(name):
return resolve_json_references(feed.get(name), {"feed:": feed})

def _resolveitem(name, streamitem):
return resolve_json_references(
item.get(name), {"item:": streamitem, "feed:": feed}
)
return resolve_json_references(item.get(name), {"item:": streamitem, "feed:": feed})

feed_generator = feedgen.feed.FeedGenerator()

if any((key.startswith("itunes_") for key in feed)):
if any(key.startswith("itunes_") for key in feed):
feed_generator.load_extension("podcast")
feed_generator.podcast.itunes_author(_resolvefeed("itunes_author"))
feed_generator.podcast.itunes_block(_resolvefeed("itunes_block"))
feed_generator.podcast.itunes_category(
_resolvefeed("itunes_category"), replace=True
)
feed_generator.podcast.itunes_category(_resolvefeed("itunes_category"), replace=True)
feed_generator.podcast.itunes_image(_resolvefeed("itunes_image"))
feed_generator.podcast.itunes_explicit(_resolvefeed("itunes_explicit"))
feed_generator.podcast.itunes_complete(_resolvefeed("itunes_complete"))
Expand Down Expand Up @@ -125,25 +121,15 @@ def _resolveitem(name, streamitem):
feed_entry.podcast.itunes_author(_resolveitem("itunes_author", streamitem))
feed_entry.podcast.itunes_block(_resolveitem("itunes_block", streamitem))
feed_entry.podcast.itunes_image(_resolveitem("itunes_image", streamitem))
feed_entry.podcast.itunes_duration(
_resolveitem("itunes_duration", streamitem)
)
feed_entry.podcast.itunes_duration(
_resolveitem("itunes_duration", streamitem)
)
feed_entry.podcast.itunes_explicit(
_resolveitem("itunes_explicit", streamitem)
)
feed_entry.podcast.itunes_duration(_resolveitem("itunes_duration", streamitem))
feed_entry.podcast.itunes_duration(_resolveitem("itunes_duration", streamitem))
feed_entry.podcast.itunes_explicit(_resolveitem("itunes_explicit", streamitem))
feed_entry.podcast.itunes_is_closed_captioned(
_resolveitem("itunes_is_closed_captioned", streamitem)
)
feed_entry.podcast.itunes_order(_resolveitem("itunes_order", streamitem))
feed_entry.podcast.itunes_subtitle(
_resolveitem("itunes_subtitle", streamitem)
)
feed_entry.podcast.itunes_summary(
_resolveitem("itunes_summary", streamitem)
)
feed_entry.podcast.itunes_subtitle(_resolveitem("itunes_subtitle", streamitem))
feed_entry.podcast.itunes_summary(_resolveitem("itunes_summary", streamitem))

to_bytes = {"atom": feed_generator.atom_str, "rss": feed_generator.rss_str}
to_bytes = to_bytes[syndication_format]
Expand Down
6 changes: 2 additions & 4 deletions src/holocron/_processors/frontmatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,8 @@ def process(app, stream, *, format="yaml", delimiter=None, overwrite=True):
frontmatter = loader(match.group("frontmatter"))

if not isinstance(frontmatter, collections.abc.Mapping):
raise ValueError(
"Frontmatter must be a mapping (i.e. key-value pairs), "
"not arrays."
)
msg = "Frontmatter must be a mapping (i.e. key-value pairs), " "not arrays."
raise ValueError(msg)

for key, value in frontmatter.items():
if overwrite or key not in item:
Expand Down
5 changes: 1 addition & 4 deletions src/holocron/_processors/jinja2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,7 @@ def process(app, stream, *, template="item.j2", context=None, themes=None):

env = jinja2.Environment(
loader=jinja2.ChoiceLoader(
[
jinja2.FileSystemLoader(str(pathlib.Path(theme, "templates")))
for theme in themes
]
[jinja2.FileSystemLoader(str(pathlib.Path(theme, "templates"))) for theme in themes]
),
trim_blocks=True,
lstrip_blocks=True,
Expand Down
4 changes: 1 addition & 3 deletions src/holocron/_processors/restructuredtext.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,7 @@ def process(app, stream, *, settings=None):
# translator to fit our needs.
writer.translator_class = _HTMLTranslator

parts = publish_parts(
item["content"], writer=writer, settings_overrides=settings
)
parts = publish_parts(item["content"], writer=writer, settings_overrides=settings)

item["content"] = parts["fragment"].strip()
item["destination"] = item["destination"].with_suffix(".html")
Expand Down
3 changes: 2 additions & 1 deletion src/holocron/_processors/sitemap.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,13 @@ def _create_sitemap_xml(stream, sitemap, pretty):

for item in stream:
if not item["absurl"].startswith(owned_url):
raise ValueError(
msg = (
f"The location of a Sitemap file determines the set of URLs "
f"that can be included in that Sitemap. A Sitemap file located "
f"at {sitemap['absurl']} can include any URLs starting with "
f"{owned_url} but can not include {item['absurl']}."
)
raise ValueError(msg)

url = dom.createElement("url")
loc = dom.createElement("loc")
Expand Down
4 changes: 1 addition & 3 deletions src/holocron/_processors/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,7 @@ def _finditems(app, path, pattern, encoding, tzinfo):
if pattern and not re_name.match(str(source)):
continue

yield _createitem(
app, root / filename, source, encoding=encoding, tzinfo=tzinfo
)
yield _createitem(app, root / filename, source, encoding=encoding, tzinfo=tzinfo)


@parameters(
Expand Down
3 changes: 2 additions & 1 deletion src/holocron/_processors/when.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ def process(app, stream, processor, *_condition, condition=None):
condition = _condition or condition

if not condition:
raise TypeError("missing argument or value: 'condition'")
msg = "missing argument or value: 'condition'"
raise TypeError(msg)

def smartstream():
for item in stream:
Expand Down
Loading

0 comments on commit b10dfdc

Please sign in to comment.