Cleanup: spelling in comments, strings (make check_spelling_*)

Also replace some triple-quoted non-doc-string strings with commented
blocks in examples.
This commit is contained in:
Campbell Barton 2025-05-06 00:18:39 +00:00
parent b47332c40d
commit fd6ac498b0
17 changed files with 44 additions and 39 deletions

View File

@ -93,7 +93,7 @@ def openBlendFile(filename):
def Align(handle):
'''
Aligns the filehandle on 4 bytes
Aligns the file-handle on 4 bytes
'''
offset = handle.tell()
trim = offset % 4
@ -107,7 +107,7 @@ def Align(handle):
class BlendFile:
'''
Reads a blend-file and store the header, all the file-blocks, and catalogue
Reads a blend-file and store the header, all the file-blocks, and catalog
structs found in the DNA file-block
- BlendFile.Header (BlendFileHeader instance)

View File

@ -12,7 +12,7 @@ Introduction
To add translations to your Python script, you must define a dictionary formatted like that:
``{locale: {msg_key: msg_translation, ...}, ...}`` where:
- locale is either a lang iso code (e.g. ``fr``), a lang+country code (e.g. ``pt_BR``),
- locale is either a lang ISO code (e.g. ``fr``), a lang+country code (e.g. ``pt_BR``),
a lang+variant code (e.g. ``sr@latin``), or a full code (e.g. ``uz_UZ@cyrilic``).
- msg_key is a tuple (context, org message) - use, as much as possible, the predefined :const:`contexts`.
- msg_translation is the translated message in given language!

View File

@ -26,14 +26,14 @@ import bpy
class CurveTextImport(bpy.types.Operator):
""" Test importer that creates a text object from a .txt file """
"""
Test importer that creates a text object from a text file.
"""
bl_idname = "curve.text_import"
bl_label = "Import a text file as text object"
"""
This Operator supports import one .txt file at the time, we need the
following filepath property that the file handler will use to set file path data.
"""
# This Operator supports import one `.txt` file at the time, we need the
# following file-path property that the file handler will use to set file path data.
filepath: bpy.props.StringProperty(subtype='FILE_PATH', options={'SKIP_SAVE'})
@classmethod
@ -41,7 +41,7 @@ class CurveTextImport(bpy.types.Operator):
return (context.area and context.area.type == "VIEW_3D")
def execute(self, context):
""" Calls to this Operator can set unfiltered filepaths, ensure the file extension is .txt. """
# Calls to this Operator can set unfiltered file-paths, ensure the file extension is `.txt`.
if not self.filepath or not self.filepath.endswith(".txt"):
return {'CANCELLED'}
@ -52,13 +52,11 @@ class CurveTextImport(bpy.types.Operator):
bpy.context.scene.collection.objects.link(text_object)
return {'FINISHED'}
"""
By default the file handler invokes the operator with the filepath property set.
In this example if this property is set the operator is executed, if not the
file select window is invoked.
This depends on setting ``options={'SKIP_SAVE'}`` to the property options to avoid
to reuse filepath data between operator calls.
"""
# By default the file handler invokes the operator with the file-path property set.
# In this example if this property is set the operator is executed, if not the
# file select window is invoked.
# This depends on setting `options={'SKIP_SAVE'}` to the property options to avoid
# to reuse file-path data between operator calls.
def invoke(self, context, event):
if self.filepath:

View File

@ -108,7 +108,7 @@ class MESH_UL_vgroups_slow(bpy.types.UIList):
def filter_items_empty_vgroups(self, context, vgroups):
# This helper function checks vgroups to find out whether they are empty, and what's their average weights.
# TODO: This should be RNA helper actually (a vgroup prop like "raw_data: ((vidx, vweight), etc.)").
# TODO: This should be RNA helper actually (a vgroup prop like `"raw_data: ((vidx, vweight), etc.)"`).
# Too slow for Python!
obj_data = context.active_object.data
ret = {vg.index: [True, 0.0] for vg in vgroups}
@ -131,7 +131,7 @@ class MESH_UL_vgroups_slow(bpy.types.UIList):
ret[vg.group][0] = False
ret[vg.group][1] += vg.weight * fact
elif hasattr(obj_data, "points"): # Lattice data
# XXX no access to lattice editdata?
# XXX: no access to lattice edit-data?
fact = 1 / len(obj_data.points)
for v in obj_data.points:
for vg in v.groups:
@ -142,11 +142,11 @@ class MESH_UL_vgroups_slow(bpy.types.UIList):
def filter_items(self, context, data, propname):
# This function gets the collection property (as the usual tuple (data, propname)), and must return two lists:
# * The first one is for filtering, it must contain 32bit integers were self.bitflag_filter_item marks the
# matching item as filtered (i.e. to be shown). The upper 16 bits (including self.bitflag_filter_item) are
# matching item as filtered (i.e. to be shown). The upper 16 bits (including `self.bitflag_filter_item`) are
# reserved for internal use, the lower 16 bits are free for custom use. Here we use the first bit to mark
# VGROUP_EMPTY.
# * The second one is for reordering, it must return a list containing the new indices of the items (which
# gives us a mapping org_idx -> new_idx).
# gives us a mapping `org_idx -> new_idx`).
# Please note that the default UI_UL_list defines helper functions for common tasks (see its doc for more info).
# If you do not make filtering and/or ordering, return empty list(s) (this will be more efficient than
# returning full lists doing nothing!).

View File

@ -2628,7 +2628,7 @@ def main():
BPY_LOGGER.debug(" %s.py", f)
BPY_LOGGER.debug(" %d total\n", len(EXAMPLE_SET_UNUSED))
# Eventually, build the html docs.
# Eventually, build the HTML docs.
if ARGS.sphinx_build:
import subprocess
subprocess.call(SPHINX_BUILD)

View File

@ -267,7 +267,7 @@ void BVHSpatialSplit::split(BVHBuild *builder,
/* Duplicate or unsplit references intersecting both sides.
*
* Duplication happens into a temporary pre-allocated vector in order to
* reduce number of memmove() calls happening in vector.insert().
* reduce number of `memmove()` calls happening in `vector.insert()`.
*/
vector<BVHReference> &new_refs = storage_->new_references;
new_refs.clear();

View File

@ -298,7 +298,7 @@ TEST_F(LibQueryTest, libquery_subdata)
EXPECT_EQ(context.scene->id.us, 0);
EXPECT_EQ(context.object->id.us, 0);
/* The material's nodetre input node IDProperty uses the target object. */
/* The material's node-tree input node IDProperty uses the target object. */
EXPECT_EQ(context.target->id.us, 42);
EXPECT_EQ(context.mesh->id.us, 0);
}

View File

@ -1007,7 +1007,7 @@ static void write_compositor_legacy_properties(bNodeTree &node_tree)
if (node->type_legacy == CMP_NODE_BILATERALBLUR) {
NodeBilateralBlurData *storage = static_cast<NodeBilateralBlurData *>(node->storage);
/* The size input is ceil(iterations + sigma_space). */
/* The size input is `ceil(iterations + sigma_space)`. */
const bNodeSocket *size_input = blender::bke::node_find_socket(*node, SOCK_IN, "Size");
storage->iter = size_input->default_value_typed<bNodeSocketValueInt>()->value - 1;
storage->sigma_space = 1.0f;

View File

@ -116,7 +116,7 @@ static void invalidate_outdated_caches_if_necessary(const Span<StringRefNull> fi
/* Isolate because a mutex is locked. */
threading::isolate_task([&]() {
/* Invalidation is done while the mutex is locked so that other threads won't see the old
* cached value anymore after we've detected that it's oudated. */
* cached value anymore after we've detected that it's outdated. */
memory_cache::remove_if([&](const GenericKey &other_key) {
if (const auto *other_key_typed = dynamic_cast<const LoadFileKey *>(&other_key)) {
const Span<std::string> other_key_paths = other_key_typed->file_paths();

View File

@ -194,7 +194,7 @@ static bool bake_strokes(Object *ob,
MOD_lineart_destroy_render_data_v3(lmd);
}
else {
/* Use the cacued result, *lc is already valid. */
/* Use the cached result, `*lc` is already valid. */
local_lc = *lc;
}
MOD_lineart_chain_clear_picked_flag(local_lc);

View File

@ -774,7 +774,7 @@ class Preprocessor {
/* To be run before `argument_decorator_macro_injection()`. */
std::string argument_reference_mutation(std::string &str)
{
/* Next two regexes are expensive. Check if they are needed at all. */
/* Next two REGEX checks are expensive. Check if they are needed at all. */
bool valid_match = false;
reference_search(str, [&](int parenthesis_depth, int bracket_depth, char &c) {
/* Check if inside a function signature.

View File

@ -112,9 +112,10 @@ typedef enum eIDPropertyFlag {
* Written to #BHead.code (for file IO)
* and the first 2 bytes of #ID.name (for runtime checks, see #GS macro).
*
* These types should also be available on their corresponding DNA struct. It must be a static
* constexpr data member so that it can be used in compile-time expressions and does not take up
* space in the struct. This is used by e.g. #BKE_id_new_nomain for improved type safety.
* These types should also be available on their corresponding DNA struct.
* It must be a static `constexpr` data member so that it can be used in
* compile-time expressions and does not take up space in the struct.
* This is used by e.g. #BKE_id_new_nomain for improved type safety.
*
* Update #ID_TYPE_IS_DEPRECATED() when deprecating types.
*/

View File

@ -122,7 +122,7 @@ static float map_range(const float value,
static void node_build_multi_function(blender::nodes::NodeMultiFunctionBuilder &builder)
{
static auto no_clamp_function = mf::build::SI5_SO<float, float, float, float, float, float>(
"Map Range No CLamp",
"Map Range No Clamp",
[](const float value,
const float from_min,
const float from_max,

View File

@ -510,7 +510,7 @@ def main() -> None:
"./lib",
# Needs manual handling as it mixes two licenses.
"./intern/atomic",
# Practically an "extern" within an "intern" module, leave as-is.
# Practically an `./extern` within an `./intern` module, leave as-is.
"./intern/itasc/kdl",
# TODO: Files in these directories should be handled but the files have valid licenses.

View File

@ -528,7 +528,9 @@ dict_custom = {
"unsetting",
"unshadowed",
"unshared",
"unsharing",
"unsharp",
"unshearing",
"unspecialized",
"unsqueezed",
"unstretch",

View File

@ -34,6 +34,9 @@ SOURCE_EXT = (
def sort_struct_lists(fn: str, data_src: str) -> str | None:
import re
# Disable for now.
use_datatoc_match = False
# eg:
# struct Foo;
re_match_struct = re.compile(r"struct\s+[A-Za-z_][A-Za-z_0-9]*\s*;")
@ -47,9 +50,10 @@ def sort_struct_lists(fn: str, data_src: str) -> str | None:
re_match_enum = re.compile(r"enum\s+[A-Za-z_][A-Za-z_0-9]*\s*;")
# eg:
# extern char datatoc_splash_png[];
# re_match_datatoc = re.compile(r"extern\s+(char)\s+datatoc_[A-Za-z_].*;")
if use_datatoc_match:
# eg:
# `extern char datatoc_splash_png[];`
re_match_datatoc = re.compile(r"extern\s+(char)\s+datatoc_[A-Za-z_].*;")
lines = data_src.splitlines(keepends=True)
@ -62,9 +66,9 @@ def sort_struct_lists(fn: str, data_src: str) -> str | None:
return 3
if re_match_enum.match(l):
return 4
# Disable for now.
# if re_match_datatoc.match(l):
# return 5
if use_datatoc_match:
if re_match_datatoc.match(l):
return 5
return None
i = 0

View File

@ -1974,7 +1974,7 @@ def run_edits_on_directory(
# needed for when arguments are referenced relatively
os.chdir(build_dir)
# Weak, but we probably don't want to handle extern.
# Weak, but we probably don't want to handle `./extern/`.
# this limit could be removed.
source_paths = (
os.path.join("intern", "ghost"),