2023-08-16 00:20:26 +10:00
|
|
|
# SPDX-FileCopyrightText: 2011-2023 Blender Authors
|
2023-06-15 13:09:04 +10:00
|
|
|
#
|
2022-02-11 09:07:11 +11:00
|
|
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
__all__ = (
|
2011-02-25 16:06:14 +00:00
|
|
|
"paths",
|
|
|
|
"modules",
|
|
|
|
"check",
|
2024-01-25 13:45:54 +11:00
|
|
|
"check_extension",
|
2011-02-25 16:06:14 +00:00
|
|
|
"enable",
|
|
|
|
"disable",
|
2017-03-24 05:20:26 +11:00
|
|
|
"disable_all",
|
2011-02-25 16:06:14 +00:00
|
|
|
"reset_all",
|
|
|
|
"module_bl_info",
|
2024-07-01 15:08:14 +10:00
|
|
|
"extensions_refresh",
|
2024-12-02 11:29:05 +11:00
|
|
|
"stale_pending_remove_paths",
|
2024-08-05 09:49:14 +10:00
|
|
|
"stale_pending_stage_paths",
|
2016-07-29 21:22:27 +10:00
|
|
|
)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2011-02-22 22:24:50 +00:00
|
|
|
import bpy as _bpy
|
2018-12-21 12:47:44 +11:00
|
|
|
_preferences = _bpy.context.preferences
|
2012-12-19 07:27:23 +00:00
|
|
|
|
2011-08-07 04:55:58 +00:00
|
|
|
error_encoding = False
|
2017-02-27 03:54:12 +11:00
|
|
|
# (name, file, path)
|
|
|
|
error_duplicates = []
|
2011-09-23 13:47:29 +00:00
|
|
|
addons_fake_modules = {}
|
2011-09-23 13:29:28 +00:00
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
# Global cached extensions, set before loading extensions on startup.
|
|
|
|
# `{addon_module_name: "Reason for incompatibility", ...}`
|
|
|
|
_extensions_incompatible = {}
|
2024-07-11 17:43:05 +10:00
|
|
|
# Global extension warnings, lazily calculated when displaying extensions.
|
|
|
|
# `{addon_module_name: "Warning", ...}`
|
|
|
|
_extensions_warnings = {}
|
2024-07-01 15:08:14 +10:00
|
|
|
|
2024-08-05 09:49:14 +10:00
|
|
|
# Filename used for stale files (which we can't delete).
|
|
|
|
_stale_filename = ".~stale~"
|
|
|
|
|
2013-11-20 03:38:18 +11:00
|
|
|
|
2015-01-23 16:20:37 +11:00
|
|
|
# called only once at startup, avoids calling 'reset_all', correct but slower.
|
2023-08-09 20:29:33 +10:00
|
|
|
def _initialize_once():
|
2023-08-09 20:34:15 +10:00
|
|
|
for path in paths():
|
2019-08-15 15:53:11 +10:00
|
|
|
_bpy.utils._sys_path_ensure_append(path)
|
2023-07-29 22:05:10 +10:00
|
|
|
|
2024-08-05 09:49:14 +10:00
|
|
|
_stale_pending_check_and_remove_once()
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
_initialize_extensions_repos_once()
|
|
|
|
|
2018-12-21 12:47:44 +11:00
|
|
|
for addon in _preferences.addons:
|
2024-11-09 11:44:31 +11:00
|
|
|
enable(
|
|
|
|
addon.module,
|
|
|
|
# Ensured by `_initialize_extensions_repos_once`.
|
|
|
|
refresh_handled=True,
|
|
|
|
)
|
2015-01-23 16:20:37 +11:00
|
|
|
|
2024-01-24 13:25:27 +11:00
|
|
|
_initialize_ensure_extensions_addon()
|
|
|
|
|
2015-01-23 16:20:37 +11:00
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
def paths():
|
2024-05-15 22:21:00 +10:00
|
|
|
import os
|
|
|
|
|
|
|
|
paths = []
|
2024-05-16 15:01:57 +10:00
|
|
|
for i, p in enumerate(_bpy.utils.script_paths()):
|
|
|
|
# Bundled add-ons are always first.
|
|
|
|
# Since this isn't officially part of the API, print an error so this never silently fails.
|
|
|
|
addon_dir = os.path.join(p, "addons_core" if i == 0 else "addons")
|
2024-05-15 22:21:00 +10:00
|
|
|
if os.path.isdir(addon_dir):
|
|
|
|
paths.append(addon_dir)
|
2024-05-16 15:01:57 +10:00
|
|
|
elif i == 0:
|
|
|
|
print("Internal error:", addon_dir, "was not found!")
|
2024-05-15 22:21:00 +10:00
|
|
|
return paths
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
|
2023-08-09 20:34:15 +10:00
|
|
|
# A version of `paths` that includes extension repositories returning a list `(path, package)` pairs.
|
|
|
|
#
|
|
|
|
# Notes on the ``package`` value.
|
|
|
|
#
|
|
|
|
# - For top-level modules (the "addons" directories, the value is an empty string)
|
|
|
|
# because those add-ons can be imported directly.
|
|
|
|
# - For extension repositories the value is a module string (which can be imported for example)
|
|
|
|
# where any modules within the `path` can be imported as a sub-module.
|
|
|
|
# So for example, given a list value of: `("/tmp/repo", "bl_ext.temp_repo")`.
|
|
|
|
#
|
|
|
|
# An add-on located at `/tmp/repo/my_handy_addon.py` will have a unique module path of:
|
|
|
|
# `bl_ext.temp_repo.my_handy_addon`, which can be imported and will be the value of it's `Addon.module`.
|
|
|
|
def _paths_with_extension_repos():
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
import os
|
|
|
|
addon_paths = [(path, "") for path in paths()]
|
2024-05-15 22:21:00 +10:00
|
|
|
for repo in _preferences.extensions.repos:
|
|
|
|
if not repo.enabled:
|
|
|
|
continue
|
|
|
|
dirpath = repo.directory
|
|
|
|
if not os.path.isdir(dirpath):
|
|
|
|
continue
|
|
|
|
addon_paths.append((dirpath, "{:s}.{:s}".format(_ext_base_pkg_idname, repo.module)))
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
return addon_paths
|
|
|
|
|
|
|
|
|
2024-05-16 09:02:21 +10:00
|
|
|
def _fake_module(mod_name, mod_path, speedy=True):
|
2011-08-07 04:55:58 +00:00
|
|
|
global error_encoding
|
2011-02-25 16:06:14 +00:00
|
|
|
import os
|
|
|
|
|
2023-07-29 22:20:40 +10:00
|
|
|
if _bpy.app.debug_python:
|
|
|
|
print("fake_module", mod_path, mod_name)
|
2024-01-25 13:56:36 +11:00
|
|
|
|
|
|
|
if mod_name.startswith(_ext_base_pkg_idname_with_dot):
|
2024-05-16 09:02:21 +10:00
|
|
|
return _fake_module_from_extension(mod_name, mod_path)
|
2024-01-25 13:56:36 +11:00
|
|
|
|
2023-07-29 22:20:40 +10:00
|
|
|
import ast
|
|
|
|
ModuleType = type(ast)
|
|
|
|
try:
|
|
|
|
file_mod = open(mod_path, "r", encoding='UTF-8')
|
|
|
|
except OSError as ex:
|
|
|
|
print("Error opening file:", mod_path, ex)
|
|
|
|
return None
|
|
|
|
|
|
|
|
with file_mod:
|
|
|
|
if speedy:
|
|
|
|
lines = []
|
|
|
|
line_iter = iter(file_mod)
|
2024-10-27 22:35:58 +11:00
|
|
|
line = ""
|
|
|
|
while not line.startswith("bl_info"):
|
2023-07-29 22:20:40 +10:00
|
|
|
try:
|
2024-10-27 22:35:58 +11:00
|
|
|
line = line_iter.readline()
|
2023-07-29 22:20:40 +10:00
|
|
|
except UnicodeDecodeError as ex:
|
|
|
|
if not error_encoding:
|
|
|
|
error_encoding = True
|
|
|
|
print("Error reading file as UTF-8:", mod_path, ex)
|
|
|
|
return None
|
|
|
|
|
2024-10-27 22:35:58 +11:00
|
|
|
if len(line) == 0:
|
2023-07-29 22:20:40 +10:00
|
|
|
break
|
2024-10-27 22:35:58 +11:00
|
|
|
while line.rstrip():
|
|
|
|
lines.append(line)
|
2023-07-29 22:20:40 +10:00
|
|
|
try:
|
2024-10-27 22:35:58 +11:00
|
|
|
line = line_iter.readline()
|
2023-07-29 22:20:40 +10:00
|
|
|
except UnicodeDecodeError as ex:
|
|
|
|
if not error_encoding:
|
|
|
|
error_encoding = True
|
|
|
|
print("Error reading file as UTF-8:", mod_path, ex)
|
|
|
|
return None
|
|
|
|
|
|
|
|
data = "".join(lines)
|
2011-08-07 04:55:58 +00:00
|
|
|
|
2023-07-29 22:20:40 +10:00
|
|
|
else:
|
|
|
|
data = file_mod.read()
|
|
|
|
del file_mod
|
|
|
|
|
|
|
|
try:
|
|
|
|
ast_data = ast.parse(data, filename=mod_path)
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception:
|
2023-07-29 22:20:40 +10:00
|
|
|
print("Syntax error 'ast.parse' can't read:", repr(mod_path))
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
ast_data = None
|
|
|
|
|
|
|
|
body_info = None
|
|
|
|
|
|
|
|
if ast_data:
|
|
|
|
for body in ast_data.body:
|
|
|
|
if body.__class__ == ast.Assign:
|
|
|
|
if len(body.targets) == 1:
|
|
|
|
if getattr(body.targets[0], "id", "") == "bl_info":
|
|
|
|
body_info = body
|
2015-06-08 21:21:54 +10:00
|
|
|
break
|
2011-02-25 16:06:14 +00:00
|
|
|
|
2023-07-29 22:20:40 +10:00
|
|
|
if body_info:
|
2011-02-25 16:06:14 +00:00
|
|
|
try:
|
2023-07-29 22:20:40 +10:00
|
|
|
mod = ModuleType(mod_name)
|
|
|
|
mod.bl_info = ast.literal_eval(body.value)
|
|
|
|
mod.__file__ = mod_path
|
|
|
|
mod.__time__ = os.path.getmtime(mod_path)
|
2024-10-08 09:41:51 +11:00
|
|
|
except Exception:
|
2023-07-29 22:20:40 +10:00
|
|
|
print("AST error parsing bl_info for:", repr(mod_path))
|
2011-02-25 16:06:14 +00:00
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
return None
|
|
|
|
|
2023-07-29 22:20:40 +10:00
|
|
|
return mod
|
|
|
|
else:
|
2024-02-01 17:03:47 +11:00
|
|
|
print("Warning: add-on missing 'bl_info', this can cause poor performance!:", repr(mod_path))
|
2023-07-29 22:20:40 +10:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def modules_refresh(*, module_cache=addons_fake_modules):
|
|
|
|
global error_encoding
|
|
|
|
import os
|
|
|
|
|
|
|
|
error_encoding = False
|
|
|
|
error_duplicates.clear()
|
|
|
|
|
2011-02-25 16:06:14 +00:00
|
|
|
modules_stale = set(module_cache.keys())
|
|
|
|
|
2023-08-09 20:34:15 +10:00
|
|
|
for path, pkg_id in _paths_with_extension_repos():
|
2023-08-09 20:15:34 +10:00
|
|
|
for mod_name, mod_path in _bpy.path.module_names(path, package=pkg_id):
|
2015-01-23 16:37:00 +11:00
|
|
|
modules_stale.discard(mod_name)
|
2011-02-25 16:06:14 +00:00
|
|
|
mod = module_cache.get(mod_name)
|
2024-02-01 17:03:47 +11:00
|
|
|
if mod is not None:
|
2011-07-18 05:41:46 +00:00
|
|
|
if mod.__file__ != mod_path:
|
2018-07-14 09:30:59 +02:00
|
|
|
print(
|
|
|
|
"multiple addons with the same name:\n"
|
2024-04-27 16:06:51 +10:00
|
|
|
" {!r}\n"
|
|
|
|
" {!r}".format(mod.__file__, mod_path)
|
2018-07-14 09:30:59 +02:00
|
|
|
)
|
2017-02-27 03:54:12 +11:00
|
|
|
error_duplicates.append((mod.bl_info["name"], mod.__file__, mod_path))
|
2011-07-18 05:41:46 +00:00
|
|
|
|
2024-02-01 17:03:47 +11:00
|
|
|
elif (
|
|
|
|
(mod.__time__ != os.path.getmtime(metadata_path := mod_path)) if not pkg_id else
|
|
|
|
# Check the manifest time as this is the source of the cache.
|
|
|
|
(mod.__time_manifest__ != os.path.getmtime(metadata_path := mod.__file_manifest__))
|
|
|
|
):
|
|
|
|
print("reloading addon meta-data:", mod_name, repr(metadata_path), "(time-stamp change detected)")
|
2011-02-25 16:06:14 +00:00
|
|
|
del module_cache[mod_name]
|
|
|
|
mod = None
|
|
|
|
|
|
|
|
if mod is None:
|
2023-07-29 22:20:40 +10:00
|
|
|
mod = _fake_module(
|
2018-02-01 13:58:44 +11:00
|
|
|
mod_name,
|
|
|
|
mod_path,
|
|
|
|
)
|
2011-02-25 16:06:14 +00:00
|
|
|
if mod:
|
|
|
|
module_cache[mod_name] = mod
|
|
|
|
|
2011-10-17 06:58:07 +00:00
|
|
|
# just in case we get stale modules, not likely
|
2011-02-25 16:06:14 +00:00
|
|
|
for mod_stale in modules_stale:
|
|
|
|
del module_cache[mod_stale]
|
|
|
|
del modules_stale
|
|
|
|
|
2013-08-28 06:36:54 +00:00
|
|
|
|
PyAPI: use keyword only arguments
Use keyword only arguments for the following functions.
- addon_utils.module_bl_info 2nd arg `info_basis`.
- addon_utils.modules 1st `module_cache`, 2nd arg `refresh`.
- addon_utils.modules_refresh 1st arg `module_cache`.
- bl_app_template_utils.activate 1nd arg `template_id`.
- bl_app_template_utils.import_from_id 2nd arg `ignore_not_found`.
- bl_app_template_utils.import_from_path 2nd arg `ignore_not_found`.
- bl_keymap_utils.keymap_from_toolbar.generate 2nd & 3rd args `use_fallback_keys` & `use_reset`.
- bl_keymap_utils.platform_helpers.keyconfig_data_oskey_from_ctrl 2nd arg `filter_fn`.
- bl_ui_utils.bug_report_url.url_prefill_from_blender 1st arg `addon_info`.
- bmesh.types.BMFace.copy 1st & 2nd args `verts`, `edges`.
- bmesh.types.BMesh.calc_volume 1st arg `signed`.
- bmesh.types.BMesh.from_mesh 2nd..4th args `face_normals`, `use_shape_key`, `shape_key_index`.
- bmesh.types.BMesh.from_object 3rd & 4th args `cage`, `face_normals`.
- bmesh.types.BMesh.transform 2nd arg `filter`.
- bmesh.types.BMesh.update_edit_mesh 2nd & 3rd args `loop_triangles`, `destructive`.
- bmesh.types.{BMVertSeq,BMEdgeSeq,BMFaceSeq}.sort 1st & 2nd arg `key`, `reverse`.
- bmesh.utils.face_split 4th..6th args `coords`, `use_exist`, `example`.
- bpy.data.libraries.load 2nd..4th args `link`, `relative`, `assets_only`.
- bpy.data.user_map 1st..3rd args `subset`, `key_types, `value_types`.
- bpy.msgbus.subscribe_rna 5th arg `options`.
- bpy.path.abspath 2nd & 3rd args `start` & `library`.
- bpy.path.clean_name 2nd arg `replace`.
- bpy.path.ensure_ext 3rd arg `case_sensitive`.
- bpy.path.module_names 2nd arg `recursive`.
- bpy.path.relpath 2nd arg `start`.
- bpy.types.EditBone.transform 2nd & 3rd arg `scale`, `roll`.
- bpy.types.Operator.as_keywords 1st arg `ignore`.
- bpy.types.Struct.{keyframe_insert,keyframe_delete} 2nd..5th args `index`, `frame`, `group`, `options`.
- bpy.types.WindowManager.popup_menu 2nd & 3rd arg `title`, `icon`.
- bpy.types.WindowManager.popup_menu_pie 3rd & 4th arg `title`, `icon`.
- bpy.utils.app_template_paths 1st arg `subdir`.
- bpy.utils.app_template_paths 1st arg `subdir`.
- bpy.utils.blend_paths 1st..3rd args `absolute`, `packed`, `local`.
- bpy.utils.execfile 2nd arg `mod`.
- bpy.utils.keyconfig_set 2nd arg `report`.
- bpy.utils.load_scripts 1st & 2nd `reload_scripts` & `refresh_scripts`.
- bpy.utils.preset_find 3rd & 4th args `display_name`, `ext`.
- bpy.utils.resource_path 2nd & 3rd arg `major`, `minor`.
- bpy.utils.script_paths 1st..4th args `subdir`, `user_pref`, `check_all`, `use_user`.
- bpy.utils.smpte_from_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.smpte_from_seconds 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.system_resource 2nd arg `subdir`.
- bpy.utils.time_from_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.time_to_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.units.to_string 4th..6th `precision`, `split_unit`, `compatible_unit`.
- bpy.utils.units.to_value 4th arg `str_ref_unit`.
- bpy.utils.user_resource 2nd & 3rd args `subdir`, `create`
- bpy_extras.view3d_utils.location_3d_to_region_2d 4th arg `default`.
- bpy_extras.view3d_utils.region_2d_to_origin_3d 4th arg `clamp`.
- gpu.offscreen.unbind 1st arg `restore`.
- gpu_extras.batch.batch_for_shader 4th arg `indices`.
- gpu_extras.batch.presets.draw_circle_2d 4th arg `segments`.
- gpu_extras.presets.draw_circle_2d 4th arg `segments`.
- imbuf.types.ImBuf.resize 2nd arg `resize`.
- imbuf.write 2nd arg `filepath`.
- mathutils.kdtree.KDTree.find 2nd arg `filter`.
- nodeitems_utils.NodeCategory 3rd & 4th arg `descriptions`, `items`.
- nodeitems_utils.NodeItem 2nd..4th args `label`, `settings`, `poll`.
- nodeitems_utils.NodeItemCustom 1st & 2nd arg `poll`, `draw`.
- rna_prop_ui.draw 5th arg `use_edit`.
- rna_prop_ui.rna_idprop_ui_get 2nd arg `create`.
- rna_prop_ui.rna_idprop_ui_prop_clear 3rd arg `remove`.
- rna_prop_ui.rna_idprop_ui_prop_get 3rd arg `create`.
- rna_xml.xml2rna 2nd arg `root_rna`.
- rna_xml.xml_file_write 4th arg `skip_typemap`.
2021-06-08 18:03:14 +10:00
|
|
|
def modules(*, module_cache=addons_fake_modules, refresh=True):
|
2015-01-23 16:20:37 +11:00
|
|
|
if refresh or ((module_cache is addons_fake_modules) and modules._is_first):
|
PyAPI: use keyword only arguments
Use keyword only arguments for the following functions.
- addon_utils.module_bl_info 2nd arg `info_basis`.
- addon_utils.modules 1st `module_cache`, 2nd arg `refresh`.
- addon_utils.modules_refresh 1st arg `module_cache`.
- bl_app_template_utils.activate 1nd arg `template_id`.
- bl_app_template_utils.import_from_id 2nd arg `ignore_not_found`.
- bl_app_template_utils.import_from_path 2nd arg `ignore_not_found`.
- bl_keymap_utils.keymap_from_toolbar.generate 2nd & 3rd args `use_fallback_keys` & `use_reset`.
- bl_keymap_utils.platform_helpers.keyconfig_data_oskey_from_ctrl 2nd arg `filter_fn`.
- bl_ui_utils.bug_report_url.url_prefill_from_blender 1st arg `addon_info`.
- bmesh.types.BMFace.copy 1st & 2nd args `verts`, `edges`.
- bmesh.types.BMesh.calc_volume 1st arg `signed`.
- bmesh.types.BMesh.from_mesh 2nd..4th args `face_normals`, `use_shape_key`, `shape_key_index`.
- bmesh.types.BMesh.from_object 3rd & 4th args `cage`, `face_normals`.
- bmesh.types.BMesh.transform 2nd arg `filter`.
- bmesh.types.BMesh.update_edit_mesh 2nd & 3rd args `loop_triangles`, `destructive`.
- bmesh.types.{BMVertSeq,BMEdgeSeq,BMFaceSeq}.sort 1st & 2nd arg `key`, `reverse`.
- bmesh.utils.face_split 4th..6th args `coords`, `use_exist`, `example`.
- bpy.data.libraries.load 2nd..4th args `link`, `relative`, `assets_only`.
- bpy.data.user_map 1st..3rd args `subset`, `key_types, `value_types`.
- bpy.msgbus.subscribe_rna 5th arg `options`.
- bpy.path.abspath 2nd & 3rd args `start` & `library`.
- bpy.path.clean_name 2nd arg `replace`.
- bpy.path.ensure_ext 3rd arg `case_sensitive`.
- bpy.path.module_names 2nd arg `recursive`.
- bpy.path.relpath 2nd arg `start`.
- bpy.types.EditBone.transform 2nd & 3rd arg `scale`, `roll`.
- bpy.types.Operator.as_keywords 1st arg `ignore`.
- bpy.types.Struct.{keyframe_insert,keyframe_delete} 2nd..5th args `index`, `frame`, `group`, `options`.
- bpy.types.WindowManager.popup_menu 2nd & 3rd arg `title`, `icon`.
- bpy.types.WindowManager.popup_menu_pie 3rd & 4th arg `title`, `icon`.
- bpy.utils.app_template_paths 1st arg `subdir`.
- bpy.utils.app_template_paths 1st arg `subdir`.
- bpy.utils.blend_paths 1st..3rd args `absolute`, `packed`, `local`.
- bpy.utils.execfile 2nd arg `mod`.
- bpy.utils.keyconfig_set 2nd arg `report`.
- bpy.utils.load_scripts 1st & 2nd `reload_scripts` & `refresh_scripts`.
- bpy.utils.preset_find 3rd & 4th args `display_name`, `ext`.
- bpy.utils.resource_path 2nd & 3rd arg `major`, `minor`.
- bpy.utils.script_paths 1st..4th args `subdir`, `user_pref`, `check_all`, `use_user`.
- bpy.utils.smpte_from_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.smpte_from_seconds 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.system_resource 2nd arg `subdir`.
- bpy.utils.time_from_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.time_to_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.units.to_string 4th..6th `precision`, `split_unit`, `compatible_unit`.
- bpy.utils.units.to_value 4th arg `str_ref_unit`.
- bpy.utils.user_resource 2nd & 3rd args `subdir`, `create`
- bpy_extras.view3d_utils.location_3d_to_region_2d 4th arg `default`.
- bpy_extras.view3d_utils.region_2d_to_origin_3d 4th arg `clamp`.
- gpu.offscreen.unbind 1st arg `restore`.
- gpu_extras.batch.batch_for_shader 4th arg `indices`.
- gpu_extras.batch.presets.draw_circle_2d 4th arg `segments`.
- gpu_extras.presets.draw_circle_2d 4th arg `segments`.
- imbuf.types.ImBuf.resize 2nd arg `resize`.
- imbuf.write 2nd arg `filepath`.
- mathutils.kdtree.KDTree.find 2nd arg `filter`.
- nodeitems_utils.NodeCategory 3rd & 4th arg `descriptions`, `items`.
- nodeitems_utils.NodeItem 2nd..4th args `label`, `settings`, `poll`.
- nodeitems_utils.NodeItemCustom 1st & 2nd arg `poll`, `draw`.
- rna_prop_ui.draw 5th arg `use_edit`.
- rna_prop_ui.rna_idprop_ui_get 2nd arg `create`.
- rna_prop_ui.rna_idprop_ui_prop_clear 3rd arg `remove`.
- rna_prop_ui.rna_idprop_ui_prop_get 3rd arg `create`.
- rna_xml.xml2rna 2nd arg `root_rna`.
- rna_xml.xml_file_write 4th arg `skip_typemap`.
2021-06-08 18:03:14 +10:00
|
|
|
modules_refresh(module_cache=module_cache)
|
2015-01-23 16:20:37 +11:00
|
|
|
modules._is_first = False
|
2013-08-28 06:36:54 +00:00
|
|
|
|
2024-07-02 14:57:35 +10:00
|
|
|
# Dictionaries are ordered in more recent versions of Python,
|
|
|
|
# re-create the dictionary from sorted items.
|
|
|
|
# This avoids having to sort on every call to this function.
|
|
|
|
module_cache_items = list(module_cache.items())
|
|
|
|
# Sort by name with the module name as a tie breaker.
|
|
|
|
module_cache_items.sort(key=lambda item: ((item[1].bl_info.get("name") or item[0]).casefold(), item[0]))
|
|
|
|
module_cache.clear()
|
|
|
|
module_cache.update((key, value) for key, value in module_cache_items)
|
|
|
|
|
|
|
|
return module_cache.values()
|
2018-07-03 06:27:53 +02:00
|
|
|
|
|
|
|
|
2015-01-23 16:20:37 +11:00
|
|
|
modules._is_first = True
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check(module_name):
|
|
|
|
"""
|
|
|
|
Returns the loaded state of the addon.
|
|
|
|
|
|
|
|
:arg module_name: The name of the addon and module.
|
2024-11-03 15:42:19 +11:00
|
|
|
:type module_name: str
|
2011-02-21 07:07:44 +00:00
|
|
|
:return: (loaded_default, loaded_state)
|
2024-11-03 15:42:19 +11:00
|
|
|
:rtype: tuple[bool, bool]
|
2011-02-21 07:07:44 +00:00
|
|
|
"""
|
|
|
|
import sys
|
2018-12-21 12:47:44 +11:00
|
|
|
loaded_default = module_name in _preferences.addons
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
mod = sys.modules.get(module_name)
|
2018-02-01 13:58:44 +11:00
|
|
|
loaded_state = (
|
|
|
|
(mod is not None) and
|
|
|
|
getattr(mod, "__addon_enabled__", Ellipsis)
|
|
|
|
)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
if loaded_state is Ellipsis:
|
2018-07-14 09:30:59 +02:00
|
|
|
print(
|
2020-06-26 12:30:03 +10:00
|
|
|
"Warning: addon-module", module_name, "found module "
|
2018-07-14 09:30:59 +02:00
|
|
|
"but without '__addon_enabled__' field, "
|
|
|
|
"possible name collision from file:",
|
|
|
|
repr(getattr(mod, "__file__", "<unknown>")),
|
|
|
|
)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
loaded_state = False
|
|
|
|
|
2012-04-28 09:00:09 +00:00
|
|
|
if mod and getattr(mod, "__addon_persistent__", False):
|
|
|
|
loaded_default = True
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
return loaded_default, loaded_state
|
|
|
|
|
2024-01-25 13:45:54 +11:00
|
|
|
|
|
|
|
def check_extension(module_name):
|
|
|
|
"""
|
|
|
|
Return true if the module is an extension.
|
|
|
|
"""
|
|
|
|
return module_name.startswith(_ext_base_pkg_idname_with_dot)
|
|
|
|
|
|
|
|
|
2013-05-27 16:12:06 +00:00
|
|
|
# utility functions
|
|
|
|
|
|
|
|
|
|
|
|
def _addon_ensure(module_name):
|
2018-12-21 12:47:44 +11:00
|
|
|
addons = _preferences.addons
|
2014-04-25 05:31:20 +10:00
|
|
|
addon = addons.get(module_name)
|
2013-05-27 16:12:06 +00:00
|
|
|
if not addon:
|
2014-04-25 05:31:20 +10:00
|
|
|
addon = addons.new()
|
2013-05-27 16:12:06 +00:00
|
|
|
addon.module = module_name
|
|
|
|
|
|
|
|
|
|
|
|
def _addon_remove(module_name):
|
2018-12-21 12:47:44 +11:00
|
|
|
addons = _preferences.addons
|
2013-05-27 16:12:06 +00:00
|
|
|
|
|
|
|
while module_name in addons:
|
|
|
|
addon = addons.get(module_name)
|
|
|
|
if addon:
|
|
|
|
addons.remove(addon)
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2024-11-09 11:44:31 +11:00
|
|
|
def enable(module_name, *, default_set=False, persistent=False, refresh_handled=False, handle_error=None):
|
2011-02-21 07:07:44 +00:00
|
|
|
"""
|
|
|
|
Enables an addon by name.
|
|
|
|
|
2016-01-07 23:33:51 +11:00
|
|
|
:arg module_name: the name of the addon and module.
|
2024-11-03 15:42:19 +11:00
|
|
|
:type module_name: str
|
2016-01-07 23:33:51 +11:00
|
|
|
:arg default_set: Set the user-preference.
|
|
|
|
:type default_set: bool
|
|
|
|
:arg persistent: Ensure the addon is enabled for the entire session (after loading new files).
|
|
|
|
:type persistent: bool
|
2024-11-09 11:44:31 +11:00
|
|
|
:arg refresh_handled: When true, :func:`extensions_refresh` must have been called with ``module_name``
|
|
|
|
included in ``addon_modules_pending``.
|
|
|
|
This should be used to avoid many calls to refresh extensions when enabling multiple add-ons at once.
|
|
|
|
:type refresh_handled: bool
|
2016-01-07 23:33:51 +11:00
|
|
|
:arg handle_error: Called in the case of an error, taking an exception argument.
|
2024-11-03 15:42:19 +11:00
|
|
|
:type handle_error: Callable[[Exception], None] | None
|
2011-10-17 06:58:07 +00:00
|
|
|
:return: the loaded module or None on failure.
|
2024-11-03 15:42:19 +11:00
|
|
|
:rtype: ModuleType
|
2011-02-21 07:07:44 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
2023-07-29 22:05:10 +10:00
|
|
|
import importlib
|
2012-12-20 03:56:22 +00:00
|
|
|
from bpy_restrict_state import RestrictBlend
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2013-06-19 05:17:31 +00:00
|
|
|
if handle_error is None:
|
2024-03-21 16:53:37 +11:00
|
|
|
def handle_error(ex):
|
|
|
|
if isinstance(ex, ImportError):
|
|
|
|
# NOTE: checking "Add-on " prefix is rather weak,
|
|
|
|
# it's just a way to avoid the noise of a full trace-back when
|
|
|
|
# an add-on is simply missing on the file-system.
|
|
|
|
if (type(msg := ex.msg) is str) and msg.startswith("Add-on "):
|
|
|
|
print(msg)
|
|
|
|
return
|
2013-06-19 05:17:31 +00:00
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
if (is_extension := module_name.startswith(_ext_base_pkg_idname_with_dot)):
|
2024-11-09 11:44:31 +11:00
|
|
|
if not refresh_handled:
|
2025-02-07 20:14:26 +11:00
|
|
|
extensions_refresh(
|
|
|
|
addon_modules_pending=[module_name],
|
|
|
|
handle_error=handle_error,
|
|
|
|
)
|
2024-11-09 11:44:31 +11:00
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
# Ensure the extensions are compatible.
|
|
|
|
if _extensions_incompatible:
|
|
|
|
if (error := _extensions_incompatible.get(
|
|
|
|
module_name[len(_ext_base_pkg_idname_with_dot):].partition(".")[0::2],
|
|
|
|
)):
|
|
|
|
try:
|
|
|
|
raise RuntimeError("Extension {:s} is incompatible ({:s})".format(module_name, error))
|
|
|
|
except RuntimeError as ex:
|
|
|
|
handle_error(ex)
|
2024-11-09 11:44:31 +11:00
|
|
|
# No need to call `extensions_refresh` because incompatible extensions
|
|
|
|
# will not have their wheels installed.
|
2024-07-01 15:08:14 +10:00
|
|
|
return None
|
|
|
|
|
2024-11-09 11:44:31 +11:00
|
|
|
# NOTE: from now on, before returning None, `extensions_refresh()` must be called
|
|
|
|
# to ensure wheels setup in anticipation for this extension being used are removed upon failure.
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
# reload if the mtime changes
|
|
|
|
mod = sys.modules.get(module_name)
|
2011-11-26 17:41:56 +00:00
|
|
|
# chances of the file _not_ existing are low, but it could be removed
|
2024-03-21 16:53:34 +11:00
|
|
|
|
|
|
|
# Set to `mod.__file__` or None.
|
|
|
|
mod_file = None
|
|
|
|
|
|
|
|
if (
|
|
|
|
(mod is not None) and
|
|
|
|
(mod_file := mod.__file__) is not None and
|
|
|
|
os.path.exists(mod_file)
|
|
|
|
):
|
2015-06-14 22:19:26 +10:00
|
|
|
|
|
|
|
if getattr(mod, "__addon_enabled__", False):
|
|
|
|
# This is an unlikely situation,
|
|
|
|
# re-register if the module is enabled.
|
|
|
|
# Note: the UI doesn't allow this to happen,
|
|
|
|
# in most cases the caller should 'check()' first.
|
|
|
|
try:
|
|
|
|
mod.unregister()
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2024-03-21 16:53:34 +11:00
|
|
|
print("Exception in module unregister():", (mod_file or module_name))
|
2016-01-07 23:33:51 +11:00
|
|
|
handle_error(ex)
|
2024-11-09 11:44:31 +11:00
|
|
|
if is_extension and not refresh_handled:
|
2025-02-07 20:14:26 +11:00
|
|
|
extensions_refresh(handle_error=handle_error)
|
2015-06-14 22:19:26 +10:00
|
|
|
return None
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
mod.__addon_enabled__ = False
|
|
|
|
mtime_orig = getattr(mod, "__time__", 0)
|
2024-03-21 16:53:34 +11:00
|
|
|
mtime_new = os.path.getmtime(mod_file)
|
2011-02-21 07:07:44 +00:00
|
|
|
if mtime_orig != mtime_new:
|
2024-03-21 16:53:34 +11:00
|
|
|
print("module changed on disk:", repr(mod_file), "reloading...")
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
try:
|
2015-01-23 12:37:58 +11:00
|
|
|
importlib.reload(mod)
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2016-01-07 23:33:51 +11:00
|
|
|
handle_error(ex)
|
2011-02-21 07:07:44 +00:00
|
|
|
del sys.modules[module_name]
|
2024-11-09 11:44:31 +11:00
|
|
|
|
|
|
|
if is_extension and not refresh_handled:
|
2025-02-07 20:14:26 +11:00
|
|
|
extensions_refresh(handle_error=handle_error)
|
2011-02-21 07:07:44 +00:00
|
|
|
return None
|
|
|
|
mod.__addon_enabled__ = False
|
|
|
|
|
2013-05-27 16:12:06 +00:00
|
|
|
# add the addon first it may want to initialize its own preferences.
|
|
|
|
# must remove on fail through.
|
|
|
|
if default_set:
|
|
|
|
_addon_ensure(module_name)
|
|
|
|
|
2011-07-31 03:15:37 +00:00
|
|
|
# Split registering up into 3 steps so we can undo
|
|
|
|
# if it fails par way through.
|
2011-11-26 17:41:56 +00:00
|
|
|
|
2019-06-21 09:50:23 +10:00
|
|
|
# Disable the context: using the context at all
|
|
|
|
# while loading an addon is really bad, don't do it!
|
2012-12-20 03:56:22 +00:00
|
|
|
with RestrictBlend():
|
2012-12-19 07:27:23 +00:00
|
|
|
|
2012-12-20 03:56:22 +00:00
|
|
|
# 1) try import
|
|
|
|
try:
|
2023-07-29 22:05:10 +10:00
|
|
|
# Use instead of `__import__` so that sub-modules can eventually be supported.
|
|
|
|
# This is also documented to be the preferred way to import modules.
|
|
|
|
mod = importlib.import_module(module_name)
|
2024-03-21 16:53:34 +11:00
|
|
|
if (mod_file := mod.__file__) is None:
|
2024-03-21 16:53:36 +11:00
|
|
|
# This can happen when:
|
|
|
|
# - The add-on has been removed but there are residual `.pyc` files left behind.
|
|
|
|
# - An extension is a directory that doesn't contain an `__init__.py` file.
|
|
|
|
#
|
|
|
|
# Include a message otherwise the "cause:" for failing to load the module is left blank.
|
|
|
|
# Include the `__path__` when available so there is a reference to the location that failed to load.
|
|
|
|
raise ImportError(
|
2024-04-27 16:06:51 +10:00
|
|
|
"module loaded with no associated file, __path__={!r}, aborting!".format(
|
2024-03-21 16:53:36 +11:00
|
|
|
getattr(mod, "__path__", None)
|
|
|
|
),
|
2024-06-06 11:26:28 +10:00
|
|
|
name=module_name,
|
2024-03-21 16:53:36 +11:00
|
|
|
)
|
2024-03-21 16:53:34 +11:00
|
|
|
mod.__time__ = os.path.getmtime(mod_file)
|
2012-12-20 03:56:22 +00:00
|
|
|
mod.__addon_enabled__ = False
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2023-09-15 17:18:27 +10:00
|
|
|
# If the add-on doesn't exist, don't print full trace-back because the back-trace is in this case
|
|
|
|
# is verbose without any useful details. A missing path is better communicated in a short message.
|
|
|
|
# Account for `ImportError` & `ModuleNotFoundError`.
|
2024-01-17 18:27:45 +11:00
|
|
|
if isinstance(ex, ImportError):
|
|
|
|
if ex.name == module_name:
|
2024-04-27 16:06:51 +10:00
|
|
|
ex.msg = "Add-on not loaded: \"{:s}\", cause: {:s}".format(module_name, str(ex))
|
2024-01-17 18:27:45 +11:00
|
|
|
|
|
|
|
# Issue with an add-on from an extension repository, report a useful message.
|
2024-02-01 17:03:51 +11:00
|
|
|
elif is_extension and module_name.startswith(ex.name + "."):
|
2024-01-25 12:44:08 +11:00
|
|
|
repo_id = module_name[len(_ext_base_pkg_idname_with_dot):].rpartition(".")[0]
|
2024-01-17 18:27:45 +11:00
|
|
|
repo = next(
|
2024-05-15 00:48:29 +10:00
|
|
|
(repo for repo in _preferences.extensions.repos if repo.module == repo_id),
|
2024-01-17 18:27:45 +11:00
|
|
|
None,
|
|
|
|
)
|
|
|
|
if repo is None:
|
2024-03-21 16:53:37 +11:00
|
|
|
ex.msg = (
|
2024-04-27 16:06:51 +10:00
|
|
|
"Add-on not loaded: \"{:s}\", cause: extension repository \"{:s}\" doesn't exist".format(
|
|
|
|
module_name, repo_id,
|
|
|
|
)
|
2024-01-17 18:27:45 +11:00
|
|
|
)
|
|
|
|
elif not repo.enabled:
|
2024-03-21 16:53:37 +11:00
|
|
|
ex.msg = (
|
2024-04-27 16:06:51 +10:00
|
|
|
"Add-on not loaded: \"{:s}\", cause: extension repository \"{:s}\" is disabled".format(
|
|
|
|
module_name, repo_id,
|
|
|
|
)
|
2024-01-17 18:27:45 +11:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
# The repository exists and is enabled, it should have imported.
|
2024-04-27 16:06:51 +10:00
|
|
|
ex.msg = "Add-on not loaded: \"{:s}\", cause: {:s}".format(module_name, str(ex))
|
2024-03-21 16:53:37 +11:00
|
|
|
|
|
|
|
handle_error(ex)
|
2015-02-12 23:42:51 +11:00
|
|
|
|
2015-01-23 21:09:31 +11:00
|
|
|
if default_set:
|
|
|
|
_addon_remove(module_name)
|
2024-11-09 11:44:31 +11:00
|
|
|
if is_extension and not refresh_handled:
|
2025-02-07 20:14:26 +11:00
|
|
|
extensions_refresh(handle_error=handle_error)
|
2012-12-20 03:56:22 +00:00
|
|
|
return None
|
|
|
|
|
2024-02-01 17:03:51 +11:00
|
|
|
if is_extension:
|
|
|
|
# Handle the case the an extension has `bl_info` (which is not used for extensions).
|
|
|
|
# Note that internally a `bl_info` is added based on the extensions manifest - for compatibility.
|
|
|
|
# So it's important not to use this one.
|
|
|
|
bl_info = getattr(mod, "bl_info", None)
|
|
|
|
if bl_info is not None:
|
|
|
|
# Use `_init` to detect when `bl_info` was generated from the manifest, see: `_bl_info_from_extension`.
|
|
|
|
if type(bl_info) is dict and "_init" not in bl_info:
|
2024-03-20 15:43:40 +11:00
|
|
|
# This print is noisy, hide behind a debug flag.
|
|
|
|
# Once `bl_info` is fully deprecated this should be changed to always print a warning.
|
|
|
|
if _bpy.app.debug_python:
|
|
|
|
print(
|
2024-04-27 16:06:51 +10:00
|
|
|
"Add-on \"{:s}\" has a \"bl_info\" which will be ignored in favor of \"{:s}\"".format(
|
|
|
|
module_name, _ext_manifest_filename_toml,
|
|
|
|
)
|
2024-03-20 15:43:40 +11:00
|
|
|
)
|
2024-02-01 17:03:51 +11:00
|
|
|
# Always remove as this is not expected to exist and will be lazily initialized.
|
|
|
|
del mod.bl_info
|
|
|
|
|
2019-06-21 09:50:23 +10:00
|
|
|
# 2) Try register collected modules.
|
|
|
|
# Removed register_module, addons need to handle their own registration now.
|
2012-12-20 03:56:22 +00:00
|
|
|
|
2020-02-03 12:49:09 +01:00
|
|
|
from _bpy import _bl_owner_id_get, _bl_owner_id_set
|
|
|
|
owner_id_prev = _bl_owner_id_get()
|
|
|
|
_bl_owner_id_set(module_name)
|
2018-03-01 01:26:02 +11:00
|
|
|
|
2019-06-21 09:50:23 +10:00
|
|
|
# 3) Try run the modules register function.
|
2012-12-20 03:56:22 +00:00
|
|
|
try:
|
|
|
|
mod.register()
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2024-03-21 16:53:34 +11:00
|
|
|
print("Exception in module register():", (mod_file or module_name))
|
2016-01-07 23:33:51 +11:00
|
|
|
handle_error(ex)
|
2012-12-20 03:56:22 +00:00
|
|
|
del sys.modules[module_name]
|
2015-01-23 21:09:31 +11:00
|
|
|
if default_set:
|
|
|
|
_addon_remove(module_name)
|
2024-11-09 11:44:31 +11:00
|
|
|
if is_extension and not refresh_handled:
|
2025-02-07 20:14:26 +11:00
|
|
|
extensions_refresh(handle_error=handle_error)
|
2012-12-20 03:56:22 +00:00
|
|
|
return None
|
2018-03-01 01:26:02 +11:00
|
|
|
finally:
|
2020-02-03 12:49:09 +01:00
|
|
|
_bl_owner_id_set(owner_id_prev)
|
2012-12-19 07:27:23 +00:00
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
# * OK loaded successfully! *
|
|
|
|
mod.__addon_enabled__ = True
|
2012-04-28 09:00:09 +00:00
|
|
|
mod.__addon_persistent__ = persistent
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2012-03-31 00:59:17 +00:00
|
|
|
if _bpy.app.debug_python:
|
2011-02-21 07:07:44 +00:00
|
|
|
print("\taddon_utils.enable", mod.__name__)
|
|
|
|
|
|
|
|
return mod
|
|
|
|
|
|
|
|
|
2024-11-09 11:44:31 +11:00
|
|
|
def disable(module_name, *, default_set=False, refresh_handled=False, handle_error=None):
|
2011-02-21 07:07:44 +00:00
|
|
|
"""
|
|
|
|
Disables an addon by name.
|
|
|
|
|
|
|
|
:arg module_name: The name of the addon and module.
|
2024-11-03 15:42:19 +11:00
|
|
|
:type module_name: str
|
2016-01-07 23:33:51 +11:00
|
|
|
:arg default_set: Set the user-preference.
|
|
|
|
:type default_set: bool
|
|
|
|
:arg handle_error: Called in the case of an error, taking an exception argument.
|
2024-11-03 15:42:19 +11:00
|
|
|
:type handle_error: Callable[[Exception], None] | None
|
2011-02-21 07:07:44 +00:00
|
|
|
"""
|
|
|
|
import sys
|
2013-06-19 05:17:31 +00:00
|
|
|
|
|
|
|
if handle_error is None:
|
2019-05-09 13:11:36 +10:00
|
|
|
def handle_error(_ex):
|
2013-06-19 05:17:31 +00:00
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
mod = sys.modules.get(module_name)
|
|
|
|
|
2023-09-05 10:49:20 +10:00
|
|
|
# Possible this add-on is from a previous session and didn't load a
|
2011-07-31 03:15:37 +00:00
|
|
|
# module this time. So even if the module is not found, still disable
|
2023-09-05 10:49:20 +10:00
|
|
|
# the add-on in the user preferences.
|
2012-10-10 11:37:38 +00:00
|
|
|
if mod and getattr(mod, "__addon_enabled__", False) is not False:
|
2011-02-21 07:07:44 +00:00
|
|
|
mod.__addon_enabled__ = False
|
2024-03-20 11:51:33 +11:00
|
|
|
mod.__addon_persistent__ = False
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
mod.unregister()
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2018-07-14 09:30:59 +02:00
|
|
|
mod_path = getattr(mod, "__file__", module_name)
|
|
|
|
print("Exception in module unregister():", repr(mod_path))
|
|
|
|
del mod_path
|
2016-01-07 23:33:51 +11:00
|
|
|
handle_error(ex)
|
2011-02-21 07:07:44 +00:00
|
|
|
else:
|
2018-07-14 09:30:59 +02:00
|
|
|
print(
|
2024-04-27 16:06:51 +10:00
|
|
|
"addon_utils.disable: {:s} not {:s}".format(
|
2020-06-26 12:30:03 +10:00
|
|
|
module_name,
|
2024-04-27 16:06:51 +10:00
|
|
|
"loaded" if mod is None else "enabled",
|
|
|
|
)
|
2018-07-14 09:30:59 +02:00
|
|
|
)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2013-04-07 15:09:06 +00:00
|
|
|
# could be in more than once, unlikely but better do this just in case.
|
2011-02-21 07:07:44 +00:00
|
|
|
if default_set:
|
2013-05-27 16:12:06 +00:00
|
|
|
_addon_remove(module_name)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
2024-11-09 11:44:31 +11:00
|
|
|
if not refresh_handled:
|
2025-02-07 20:14:26 +11:00
|
|
|
extensions_refresh(handle_error=handle_error)
|
2024-11-09 11:44:31 +11:00
|
|
|
|
2012-03-31 00:59:17 +00:00
|
|
|
if _bpy.app.debug_python:
|
2011-02-21 07:07:44 +00:00
|
|
|
print("\taddon_utils.disable", module_name)
|
|
|
|
|
|
|
|
|
2015-11-16 09:41:42 +11:00
|
|
|
def reset_all(*, reload_scripts=False):
|
2011-02-21 07:07:44 +00:00
|
|
|
"""
|
|
|
|
Sets the addon state based on the user preferences.
|
|
|
|
"""
|
|
|
|
import sys
|
|
|
|
|
2024-06-10 00:20:32 +10:00
|
|
|
# Ensures stale `addons_fake_modules` isn't used.
|
|
|
|
modules._is_first = True
|
|
|
|
addons_fake_modules.clear()
|
2013-08-28 06:36:54 +00:00
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
# Update extensions compatibility (after reloading preferences).
|
|
|
|
# Potentially refreshing wheels too.
|
2024-11-09 11:44:31 +11:00
|
|
|
extensions_refresh()
|
2024-07-01 15:08:14 +10:00
|
|
|
|
2023-08-09 20:34:15 +10:00
|
|
|
for path, pkg_id in _paths_with_extension_repos():
|
2023-08-09 20:15:34 +10:00
|
|
|
if not pkg_id:
|
|
|
|
_bpy.utils._sys_path_ensure_append(path)
|
|
|
|
|
|
|
|
for mod_name, _mod_path in _bpy.path.module_names(path, package=pkg_id):
|
2011-02-21 07:07:44 +00:00
|
|
|
is_enabled, is_loaded = check(mod_name)
|
|
|
|
|
|
|
|
# first check if reload is needed before changing state.
|
|
|
|
if reload_scripts:
|
2015-01-23 12:37:58 +11:00
|
|
|
import importlib
|
2011-02-21 07:07:44 +00:00
|
|
|
mod = sys.modules.get(mod_name)
|
|
|
|
if mod:
|
2015-01-23 12:37:58 +11:00
|
|
|
importlib.reload(mod)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
if is_enabled == is_loaded:
|
|
|
|
pass
|
|
|
|
elif is_enabled:
|
2024-11-09 11:44:31 +11:00
|
|
|
enable(mod_name, refresh_handled=True)
|
2011-02-21 07:07:44 +00:00
|
|
|
elif is_loaded:
|
|
|
|
print("\taddon_utils.reset_all unloading", mod_name)
|
2011-02-22 22:24:50 +00:00
|
|
|
disable(mod_name)
|
2011-02-21 07:07:44 +00:00
|
|
|
|
|
|
|
|
2017-03-24 05:20:26 +11:00
|
|
|
def disable_all():
|
|
|
|
import sys
|
2018-10-19 17:49:16 +11:00
|
|
|
# Collect modules to disable first because dict can be modified as we disable.
|
2023-08-30 15:45:41 +10:00
|
|
|
|
|
|
|
# NOTE: don't use `getattr(item[1], "__addon_enabled__", False)` because this runs on all modules,
|
|
|
|
# including 3rd party modules unrelated to Blender.
|
|
|
|
#
|
|
|
|
# Some modules may have their own `__getattr__` and either:
|
|
|
|
# - Not raise an `AttributeError` (is they should),
|
|
|
|
# causing `hasattr` & `getattr` to raise an exception instead of treating the attribute as missing.
|
|
|
|
# - Generate modules dynamically, modifying `sys.modules` which is being iterated over,
|
|
|
|
# causing a RuntimeError: "dictionary changed size during iteration".
|
|
|
|
#
|
|
|
|
# Either way, running 3rd party logic here can cause undefined behavior.
|
|
|
|
# Use direct `__dict__` access to bypass `__getattr__`, see: #111649.
|
2024-05-11 08:58:20 +02:00
|
|
|
modules = sys.modules.copy()
|
2018-10-19 17:49:16 +11:00
|
|
|
addon_modules = [
|
2024-05-11 08:58:20 +02:00
|
|
|
item for item in modules.items()
|
2023-10-04 16:09:30 +11:00
|
|
|
if type(mod_dict := getattr(item[1], "__dict__", None)) is dict
|
2023-08-30 15:45:41 +10:00
|
|
|
if mod_dict.get("__addon_enabled__")
|
2018-10-19 17:49:16 +11:00
|
|
|
]
|
2020-08-17 12:33:07 +10:00
|
|
|
# Check the enabled state again since it's possible the disable call
|
|
|
|
# of one add-on disables others.
|
2018-10-19 17:49:16 +11:00
|
|
|
for mod_name, mod in addon_modules:
|
2017-03-24 05:20:26 +11:00
|
|
|
if getattr(mod, "__addon_enabled__", False):
|
2024-11-09 11:44:31 +11:00
|
|
|
disable(mod_name, refresh_handled=True)
|
2017-03-24 05:20:26 +11:00
|
|
|
|
|
|
|
|
2020-03-04 23:24:13 -05:00
|
|
|
def _blender_manual_url_prefix():
|
2024-04-27 16:06:51 +10:00
|
|
|
return "https://docs.blender.org/manual/{:s}/{:d}.{:d}".format(
|
|
|
|
_bpy.utils.manual_language_code(),
|
|
|
|
*_bpy.app.version[:2],
|
|
|
|
)
|
2020-03-04 23:24:13 -05:00
|
|
|
|
|
|
|
|
2024-01-25 13:56:36 +11:00
|
|
|
def _bl_info_basis():
|
|
|
|
return {
|
|
|
|
"name": "",
|
|
|
|
"author": "",
|
|
|
|
"version": (),
|
|
|
|
"blender": (),
|
|
|
|
"location": "",
|
|
|
|
"description": "",
|
|
|
|
"doc_url": "",
|
|
|
|
"support": 'COMMUNITY',
|
|
|
|
"category": "",
|
|
|
|
"warning": "",
|
|
|
|
"show_expanded": False,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
PyAPI: use keyword only arguments
Use keyword only arguments for the following functions.
- addon_utils.module_bl_info 2nd arg `info_basis`.
- addon_utils.modules 1st `module_cache`, 2nd arg `refresh`.
- addon_utils.modules_refresh 1st arg `module_cache`.
- bl_app_template_utils.activate 1nd arg `template_id`.
- bl_app_template_utils.import_from_id 2nd arg `ignore_not_found`.
- bl_app_template_utils.import_from_path 2nd arg `ignore_not_found`.
- bl_keymap_utils.keymap_from_toolbar.generate 2nd & 3rd args `use_fallback_keys` & `use_reset`.
- bl_keymap_utils.platform_helpers.keyconfig_data_oskey_from_ctrl 2nd arg `filter_fn`.
- bl_ui_utils.bug_report_url.url_prefill_from_blender 1st arg `addon_info`.
- bmesh.types.BMFace.copy 1st & 2nd args `verts`, `edges`.
- bmesh.types.BMesh.calc_volume 1st arg `signed`.
- bmesh.types.BMesh.from_mesh 2nd..4th args `face_normals`, `use_shape_key`, `shape_key_index`.
- bmesh.types.BMesh.from_object 3rd & 4th args `cage`, `face_normals`.
- bmesh.types.BMesh.transform 2nd arg `filter`.
- bmesh.types.BMesh.update_edit_mesh 2nd & 3rd args `loop_triangles`, `destructive`.
- bmesh.types.{BMVertSeq,BMEdgeSeq,BMFaceSeq}.sort 1st & 2nd arg `key`, `reverse`.
- bmesh.utils.face_split 4th..6th args `coords`, `use_exist`, `example`.
- bpy.data.libraries.load 2nd..4th args `link`, `relative`, `assets_only`.
- bpy.data.user_map 1st..3rd args `subset`, `key_types, `value_types`.
- bpy.msgbus.subscribe_rna 5th arg `options`.
- bpy.path.abspath 2nd & 3rd args `start` & `library`.
- bpy.path.clean_name 2nd arg `replace`.
- bpy.path.ensure_ext 3rd arg `case_sensitive`.
- bpy.path.module_names 2nd arg `recursive`.
- bpy.path.relpath 2nd arg `start`.
- bpy.types.EditBone.transform 2nd & 3rd arg `scale`, `roll`.
- bpy.types.Operator.as_keywords 1st arg `ignore`.
- bpy.types.Struct.{keyframe_insert,keyframe_delete} 2nd..5th args `index`, `frame`, `group`, `options`.
- bpy.types.WindowManager.popup_menu 2nd & 3rd arg `title`, `icon`.
- bpy.types.WindowManager.popup_menu_pie 3rd & 4th arg `title`, `icon`.
- bpy.utils.app_template_paths 1st arg `subdir`.
- bpy.utils.app_template_paths 1st arg `subdir`.
- bpy.utils.blend_paths 1st..3rd args `absolute`, `packed`, `local`.
- bpy.utils.execfile 2nd arg `mod`.
- bpy.utils.keyconfig_set 2nd arg `report`.
- bpy.utils.load_scripts 1st & 2nd `reload_scripts` & `refresh_scripts`.
- bpy.utils.preset_find 3rd & 4th args `display_name`, `ext`.
- bpy.utils.resource_path 2nd & 3rd arg `major`, `minor`.
- bpy.utils.script_paths 1st..4th args `subdir`, `user_pref`, `check_all`, `use_user`.
- bpy.utils.smpte_from_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.smpte_from_seconds 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.system_resource 2nd arg `subdir`.
- bpy.utils.time_from_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.time_to_frame 2nd & 3rd args `fps`, `fps_base`.
- bpy.utils.units.to_string 4th..6th `precision`, `split_unit`, `compatible_unit`.
- bpy.utils.units.to_value 4th arg `str_ref_unit`.
- bpy.utils.user_resource 2nd & 3rd args `subdir`, `create`
- bpy_extras.view3d_utils.location_3d_to_region_2d 4th arg `default`.
- bpy_extras.view3d_utils.region_2d_to_origin_3d 4th arg `clamp`.
- gpu.offscreen.unbind 1st arg `restore`.
- gpu_extras.batch.batch_for_shader 4th arg `indices`.
- gpu_extras.batch.presets.draw_circle_2d 4th arg `segments`.
- gpu_extras.presets.draw_circle_2d 4th arg `segments`.
- imbuf.types.ImBuf.resize 2nd arg `resize`.
- imbuf.write 2nd arg `filepath`.
- mathutils.kdtree.KDTree.find 2nd arg `filter`.
- nodeitems_utils.NodeCategory 3rd & 4th arg `descriptions`, `items`.
- nodeitems_utils.NodeItem 2nd..4th args `label`, `settings`, `poll`.
- nodeitems_utils.NodeItemCustom 1st & 2nd arg `poll`, `draw`.
- rna_prop_ui.draw 5th arg `use_edit`.
- rna_prop_ui.rna_idprop_ui_get 2nd arg `create`.
- rna_prop_ui.rna_idprop_ui_prop_clear 3rd arg `remove`.
- rna_prop_ui.rna_idprop_ui_prop_get 3rd arg `create`.
- rna_xml.xml2rna 2nd arg `root_rna`.
- rna_xml.xml_file_write 4th arg `skip_typemap`.
2021-06-08 18:03:14 +10:00
|
|
|
def module_bl_info(mod, *, info_basis=None):
|
2015-05-31 15:56:22 +10:00
|
|
|
if info_basis is None:
|
2024-01-25 13:56:36 +11:00
|
|
|
info_basis = _bl_info_basis()
|
2011-07-31 03:15:37 +00:00
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
addon_info = getattr(mod, "bl_info", {})
|
|
|
|
|
|
|
|
# avoid re-initializing
|
|
|
|
if "_init" in addon_info:
|
|
|
|
return addon_info
|
|
|
|
|
|
|
|
if not addon_info:
|
2024-02-01 17:03:51 +11:00
|
|
|
if mod.__name__.startswith(_ext_base_pkg_idname_with_dot):
|
|
|
|
addon_info, filepath_toml = _bl_info_from_extension(mod.__name__, mod.__file__)
|
|
|
|
if addon_info is None:
|
|
|
|
# Unexpected, this is a malformed extension if meta-data can't be loaded.
|
|
|
|
print("module_bl_info: failed to extract meta-data from", filepath_toml)
|
|
|
|
# Continue to initialize dummy data.
|
|
|
|
addon_info = {}
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
mod.bl_info = addon_info
|
|
|
|
|
|
|
|
for key, value in info_basis.items():
|
|
|
|
addon_info.setdefault(key, value)
|
|
|
|
|
|
|
|
if not addon_info["name"]:
|
|
|
|
addon_info["name"] = mod.__name__
|
|
|
|
|
2020-03-04 23:24:13 -05:00
|
|
|
doc_url = addon_info["doc_url"]
|
|
|
|
if doc_url:
|
|
|
|
doc_url_prefix = "{BLENDER_MANUAL_URL}"
|
|
|
|
if doc_url_prefix in doc_url:
|
|
|
|
addon_info["doc_url"] = doc_url.replace(
|
|
|
|
doc_url_prefix,
|
|
|
|
_blender_manual_url_prefix(),
|
|
|
|
)
|
|
|
|
|
2025-03-24 07:35:44 +01:00
|
|
|
# Remove the maintainers email while it's not private, showing prominently
|
|
|
|
# could cause maintainers to get direct emails instead of issue tracking systems.
|
|
|
|
import re
|
|
|
|
if "author" in addon_info:
|
|
|
|
addon_info["author"] = re.sub(r"\s*<.*?>", "", addon_info["author"])
|
|
|
|
|
2011-02-21 07:07:44 +00:00
|
|
|
addon_info["_init"] = None
|
|
|
|
return addon_info
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
|
2024-08-05 09:49:14 +10:00
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# Stale File Handling
|
|
|
|
#
|
|
|
|
# Notes:
|
|
|
|
# - On startup, a file exists that indicates cleanup is needed.
|
|
|
|
# In the common case the file doesn't exist.
|
|
|
|
# Otherwise module paths are scanned for files to remove.
|
|
|
|
# - Since errors resolving paths to remove could result in user data loss,
|
|
|
|
# ensure the paths are always within the (extension/add-on/app-template) directory.
|
|
|
|
# - File locking isn't used, if multiple Blender instances start at the
|
|
|
|
# same time and try to remove the same files, this won't cause errors.
|
|
|
|
# Even so, remove the checking file immediately avoid unnecessary
|
|
|
|
# file-system access overhead for other Blender instances.
|
|
|
|
#
|
|
|
|
# For more implementation details see `_bpy_internal.extensions.stale_file_manager`.
|
|
|
|
# This mainly impacts WIN32 which can't remove open file handles, see: #77837 & #125049.
|
|
|
|
#
|
|
|
|
# Use for all systems as the problem can impact any system if file removal fails
|
|
|
|
# for any reason (typically permissions or file-system error).
|
|
|
|
|
|
|
|
def _stale_pending_filepath():
|
|
|
|
# When this file exists, stale file removal is pending.
|
|
|
|
# Try to remove stale files next launch.
|
|
|
|
import os
|
|
|
|
return os.path.join(_bpy.utils.user_resource('CONFIG'), "stale-pending")
|
|
|
|
|
|
|
|
|
|
|
|
def _stale_pending_stage(debug):
|
|
|
|
import os
|
|
|
|
|
|
|
|
stale_pending_filepath = _stale_pending_filepath()
|
|
|
|
dirpath = os.path.dirname(stale_pending_filepath)
|
|
|
|
|
|
|
|
if os.path.exists(stale_pending_filepath):
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.makedirs(dirpath, exist_ok=True)
|
|
|
|
with open(stale_pending_filepath, "wb") as _:
|
|
|
|
pass
|
|
|
|
except Exception as ex:
|
|
|
|
print("Unable to set stale files pending:", str(ex))
|
|
|
|
|
|
|
|
|
|
|
|
def _stale_file_directory_iter():
|
|
|
|
import os
|
|
|
|
|
|
|
|
for repo in _preferences.extensions.repos:
|
|
|
|
if not repo.enabled:
|
|
|
|
continue
|
|
|
|
if repo.source == 'SYSTEM':
|
|
|
|
continue
|
|
|
|
yield repo.directory
|
|
|
|
|
|
|
|
# Skip `addons_core` because add-ons because these will never be uninstalled by the user.
|
|
|
|
yield from paths()[1:]
|
|
|
|
|
|
|
|
# The `local_dir`, for wheels.
|
|
|
|
yield os.path.join(_bpy.utils.user_resource('EXTENSIONS'), ".local")
|
|
|
|
|
|
|
|
# The `path_app_templates`, for user app-templates.
|
|
|
|
yield _bpy.utils.user_resource(
|
|
|
|
'SCRIPTS',
|
|
|
|
path=os.path.join("startup", "bl_app_templates_user"),
|
|
|
|
create=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _stale_pending_check_and_remove_once():
|
|
|
|
# This runs on every startup, early exit if no stale data removal is staged.
|
|
|
|
import os
|
|
|
|
stale_pending_filepath = _stale_pending_filepath()
|
|
|
|
if not os.path.exists(stale_pending_filepath):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Some stale data needs to be removed, this is an exceptional case.
|
2024-08-26 11:50:15 +10:00
|
|
|
# Allow for slower logic than is typically accepted on startup.
|
2024-08-05 09:49:14 +10:00
|
|
|
from _bpy_internal.extensions.stale_file_manager import StaleFiles
|
|
|
|
debug = _bpy.app.debug_python
|
|
|
|
|
|
|
|
# Remove the pending file if all are removed.
|
|
|
|
is_empty = True
|
|
|
|
|
|
|
|
for dirpath in _stale_file_directory_iter():
|
|
|
|
if not os.path.exists(os.path.join(dirpath, _stale_filename)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
stale_handle = StaleFiles(
|
|
|
|
base_directory=dirpath,
|
|
|
|
stale_filename=_stale_filename,
|
|
|
|
debug=debug,
|
|
|
|
)
|
|
|
|
stale_handle.state_load(check_exists=True)
|
|
|
|
if not stale_handle.is_empty():
|
|
|
|
stale_handle.state_remove_all()
|
|
|
|
if not stale_handle.is_empty():
|
|
|
|
is_empty = False
|
|
|
|
if stale_handle.is_modified():
|
|
|
|
stale_handle.state_store(check_exists=False)
|
|
|
|
except Exception as ex:
|
|
|
|
print("Unexpected error clearing stale data, this is is a bug!", str(ex))
|
|
|
|
|
|
|
|
if is_empty:
|
|
|
|
try:
|
|
|
|
os.remove(stale_pending_filepath)
|
|
|
|
except Exception as ex:
|
|
|
|
if debug:
|
|
|
|
print("Failed to remove stale-pending file:", str(ex))
|
|
|
|
|
|
|
|
|
|
|
|
def stale_pending_stage_paths(path_base, paths):
|
|
|
|
# - `path_base` must a directory iterated over by `_stale_file_directory_iter`.
|
|
|
|
# Otherwise the stale files will never be removed.
|
|
|
|
# - `paths` must be absolute paths which could not be removed.
|
|
|
|
# They must be located within `base_path` otherwise they cannot be removed.
|
|
|
|
from _bpy_internal.extensions.stale_file_manager import StaleFiles
|
|
|
|
|
|
|
|
debug = _bpy.app.debug_python
|
|
|
|
|
|
|
|
stale_handle = StaleFiles(
|
|
|
|
base_directory=path_base,
|
|
|
|
stale_filename=_stale_filename,
|
|
|
|
debug=debug,
|
|
|
|
)
|
|
|
|
# Already checked.
|
|
|
|
if stale_handle.state_load_add_and_store(paths=paths):
|
|
|
|
# Force clearing stale files on next restart.
|
|
|
|
_stale_pending_stage(debug)
|
|
|
|
|
|
|
|
|
Fix #128175: Updating extension can remove the extension on WIN32
On Windows an entire directory may be locked when any files inside it
are opened by another process. This can cause operations that
recursively remove a directory (uninstalling & updating) to fail
with a partially removed extension.
The case of uninstalling was already handled, where failure to remove
a directory would stage the extension for later removal.
In the case of updating however, the user could be left with a broken
(partially removed) extension where some files were removed, as the
directory was locked, the update would fail to extract new files.
Address this issue by renaming the directory before recursive removal.
The following logic has been implemented:
- If any files in the directory are locked, renaming will fail.
So even though the operation fails the extension is left intact.
- If renaming succeeds, it's possible to apply the update.
While it's possible (albeit unlikely) recursive removal fails,
which could be caused by file-system permissions issues corruption or
a process could open a file between rename & removal.
In this case the renamed directory is staged for later removal.
Other changes:
- Resolve a related problem where the user could install an
extension previously staged for removal, now installing an extension
ensured it's not removed later.
This would occur if uninstalling failed, the user resolves
directory-lock, uninstalls again, then re-installs the extension.
- When an extension fails to be removed, don't attempt to remove
user configuration for that extension.
Prefer to keep the extension & it's settings in their "current state"
if it can't be removed.
2024-10-03 12:15:27 +10:00
|
|
|
def stale_pending_remove_paths(path_base, paths):
|
|
|
|
# The reverse of: `stale_pending_stage_paths`.
|
|
|
|
from _bpy_internal.extensions.stale_file_manager import StaleFiles
|
|
|
|
|
|
|
|
debug = _bpy.app.debug_python
|
|
|
|
|
|
|
|
stale_handle = StaleFiles(
|
|
|
|
base_directory=path_base,
|
|
|
|
stale_filename=_stale_filename,
|
|
|
|
debug=debug,
|
|
|
|
)
|
|
|
|
# Already checked.
|
|
|
|
if stale_handle.state_load_remove_and_store(paths=paths):
|
|
|
|
# Don't attempt to reverse the `_stale_pending_stage` call.
|
|
|
|
# This is not trivial since other repositories may need to be cleared.
|
|
|
|
# There will be a minor performance hit on restart but this is enough
|
|
|
|
# of a corner case that it's not worth attempting to calculate if
|
|
|
|
# removal of pending files is needed or not.
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# Extension Pre-Flight Compatibility Check
|
|
|
|
#
|
|
|
|
# Check extension compatibility on startup so any extensions which are incompatible with Blender are marked as
|
|
|
|
# incompatible and wont be loaded. This cache avoids having to scan all extensions on startup on *every* startup.
|
|
|
|
#
|
|
|
|
# Implementation:
|
|
|
|
#
|
|
|
|
# The emphasis for this cache is to have minimum overhead for the common case where:
|
|
|
|
# - The simple case where there are no extensions enabled (running tests, background tasks etc).
|
|
|
|
# - The more involved case where extensions are enabled and have not changed since last time Blender started.
|
|
|
|
# In this case do as little as possible since it runs on every startup, the following steps are unavoidable.
|
|
|
|
# - When reading compatibility cache, then run the following tests, regenerating when changes are detected.
|
|
|
|
# - Compare with previous blender version/platform.
|
|
|
|
# - Stat the manifests of all enabled extensions, testing that their modification-time and size are unchanged.
|
|
|
|
# - When any changes are detected,
|
|
|
|
# regenerate compatibility information which does more expensive operations
|
|
|
|
# (loading manifests, check version ranges etc).
|
|
|
|
#
|
|
|
|
# Other notes:
|
|
|
|
#
|
|
|
|
# - This internal format may change at any point, regenerating the cache should be reasonably fast
|
|
|
|
# but may introduce a small but noticeable pause on startup for user configurations that contain many extensions.
|
|
|
|
# - Failure to load will simply ignore the file and regenerate the file as needed.
|
|
|
|
#
|
|
|
|
# Format:
|
|
|
|
#
|
|
|
|
# - The cache is ZLIB compressed pickled Python dictionary.
|
|
|
|
# - The dictionary keys are as follows:
|
|
|
|
# `"blender": (bpy.app.version, platform.system(), platform.machine(), python_version, magic_number)`
|
|
|
|
# `"filesystem": [(repo_module, pkg_id, manifest_time, manifest_size), ...]`
|
|
|
|
# `"incompatible": {(repo_module, pkg_id): "Reason for being incompatible", ...}`
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
|
|
def _pickle_zlib_file_read(filepath):
|
|
|
|
import pickle
|
|
|
|
import gzip
|
|
|
|
|
|
|
|
with gzip.GzipFile(filepath, "rb") as fh:
|
|
|
|
data = pickle.load(fh)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
def _pickle_zlib_file_write(filepath, data) -> None:
|
|
|
|
import pickle
|
|
|
|
import gzip
|
|
|
|
|
|
|
|
with gzip.GzipFile(filepath, "wb", compresslevel=9) as fh:
|
|
|
|
pickle.dump(data, fh)
|
|
|
|
|
|
|
|
|
|
|
|
def _extension_repos_module_to_directory_map():
|
|
|
|
return {repo.module: repo.directory for repo in _preferences.extensions.repos if repo.enabled}
|
|
|
|
|
|
|
|
|
|
|
|
def _extension_compat_cache_update_needed(
|
2024-10-18 10:39:58 +11:00
|
|
|
cache_data, # `dict[str, Any]`
|
|
|
|
blender_id, # `tuple[Any, ...]`
|
|
|
|
extensions_enabled, # `set[tuple[str, str]]`
|
|
|
|
print_debug, # `Callable[[Any], None] | None`
|
2024-07-01 15:08:14 +10:00
|
|
|
): # `-> bool`
|
|
|
|
|
|
|
|
# Detect when Blender itself changes.
|
|
|
|
if cache_data.get("blender") != blender_id:
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("blender changed")
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Detect when any of the extensions paths change.
|
|
|
|
cache_filesystem = cache_data.get("filesystem", [])
|
|
|
|
|
|
|
|
# Avoid touching the file-system if at all possible.
|
|
|
|
# When the length is the same and all cached ID's are in this set, we can be sure they are a 1:1 patch.
|
|
|
|
if len(cache_filesystem) != len(extensions_enabled):
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("length changes ({:d} -> {:d}).".format(len(cache_filesystem), len(extensions_enabled)))
|
|
|
|
return True
|
|
|
|
|
|
|
|
from os import stat
|
|
|
|
from os.path import join
|
|
|
|
repos_module_to_directory_map = _extension_repos_module_to_directory_map()
|
|
|
|
|
|
|
|
for repo_module, pkg_id, cache_stat_time, cache_stat_size in cache_filesystem:
|
|
|
|
if (repo_module, pkg_id) not in extensions_enabled:
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("\"{:s}.{:s}\" no longer enabled.".format(repo_module, pkg_id))
|
|
|
|
return True
|
|
|
|
|
|
|
|
if repo_directory := repos_module_to_directory_map.get(repo_module, ""):
|
|
|
|
pkg_manifest_filepath = join(repo_directory, pkg_id, _ext_manifest_filename_toml)
|
|
|
|
else:
|
|
|
|
pkg_manifest_filepath = ""
|
|
|
|
|
|
|
|
# It's possible an extension has been set as an add-on but cannot find the repository it came from.
|
|
|
|
# In this case behave as if the file can't be found (because it can't) instead of ignoring it.
|
|
|
|
# This is done because it's important to match.
|
|
|
|
if pkg_manifest_filepath:
|
|
|
|
try:
|
|
|
|
statinfo = stat(pkg_manifest_filepath)
|
|
|
|
except Exception:
|
|
|
|
statinfo = None
|
|
|
|
else:
|
|
|
|
statinfo = None
|
|
|
|
|
|
|
|
if statinfo is None:
|
|
|
|
test_time = 0
|
|
|
|
test_size = 0
|
|
|
|
else:
|
|
|
|
test_time = statinfo.st_mtime
|
|
|
|
test_size = statinfo.st_size
|
|
|
|
|
|
|
|
# Detect changes to any files manifest.
|
|
|
|
if cache_stat_time != test_time:
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("\"{:s}.{:s}\" time changed ({:g} -> {:g}).".format(
|
|
|
|
repo_module, pkg_id, cache_stat_time, test_time,
|
|
|
|
))
|
|
|
|
return True
|
|
|
|
if cache_stat_size != test_size:
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("\"{:s}.{:s}\" size changed ({:d} -> {:d}).".format(
|
|
|
|
repo_module, pkg_id, cache_stat_size, test_size,
|
|
|
|
))
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
# This function should not run every startup, so it can afford to be slower,
|
|
|
|
# although users should not have to wait for it either.
|
|
|
|
def _extension_compat_cache_create(
|
2024-10-18 10:39:58 +11:00
|
|
|
blender_id, # `tuple[Any, ...]`
|
|
|
|
extensions_enabled, # `set[tuple[str, str]]`
|
2024-10-23 12:48:09 +11:00
|
|
|
wheel_list, # `list[tuple[str, list[str]]]`
|
2024-10-18 10:39:58 +11:00
|
|
|
print_debug, # `Callable[[Any], None] | None`
|
|
|
|
): # `-> dict[str, Any]`
|
2024-07-01 15:08:14 +10:00
|
|
|
import os
|
|
|
|
from os.path import join
|
|
|
|
|
|
|
|
filesystem = []
|
|
|
|
incompatible = {}
|
|
|
|
|
|
|
|
cache_data = {
|
|
|
|
"blender": blender_id,
|
|
|
|
"filesystem": filesystem,
|
|
|
|
"incompatible": incompatible,
|
|
|
|
}
|
|
|
|
|
|
|
|
repos_module_to_directory_map = _extension_repos_module_to_directory_map()
|
|
|
|
|
2024-07-11 14:08:55 +10:00
|
|
|
# Only import this module once (if at all).
|
|
|
|
bl_pkg = None
|
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
for repo_module, pkg_id in extensions_enabled:
|
|
|
|
if repo_directory := repos_module_to_directory_map.get(repo_module, ""):
|
|
|
|
pkg_manifest_filepath = join(repo_directory, pkg_id, _ext_manifest_filename_toml)
|
|
|
|
else:
|
|
|
|
pkg_manifest_filepath = ""
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("directory for module \"{:s}\" not found!".format(repo_module))
|
|
|
|
|
|
|
|
if pkg_manifest_filepath:
|
|
|
|
try:
|
|
|
|
statinfo = os.stat(pkg_manifest_filepath)
|
|
|
|
except Exception:
|
|
|
|
statinfo = None
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("unable to find \"{:s}\"".format(pkg_manifest_filepath))
|
|
|
|
else:
|
|
|
|
statinfo = None
|
|
|
|
|
|
|
|
if statinfo is None:
|
|
|
|
test_time = 0.0
|
2024-07-11 14:08:55 +10:00
|
|
|
test_size = 0
|
2024-07-01 15:08:14 +10:00
|
|
|
else:
|
|
|
|
test_time = statinfo.st_mtime
|
|
|
|
test_size = statinfo.st_size
|
|
|
|
# Store the reason for failure, to print when attempting to load.
|
2024-07-11 14:08:55 +10:00
|
|
|
|
|
|
|
# Only load the module once.
|
|
|
|
if bl_pkg is None:
|
|
|
|
# Without `bl_pkg.__time__` this will detect as having been changed and
|
|
|
|
# reload the module when loading the add-on.
|
|
|
|
import bl_pkg
|
|
|
|
if getattr(bl_pkg, "__time__", 0) == 0:
|
|
|
|
try:
|
|
|
|
bl_pkg.__time__ = os.path.getmtime(bl_pkg.__file__)
|
|
|
|
except Exception as ex:
|
2024-07-11 14:22:49 +10:00
|
|
|
if print_debug is not None:
|
|
|
|
print_debug(str(ex))
|
2024-07-11 14:08:55 +10:00
|
|
|
|
|
|
|
if (error := bl_pkg.manifest_compatible_with_wheel_data_or_error(
|
2024-07-01 15:08:14 +10:00
|
|
|
pkg_manifest_filepath,
|
|
|
|
repo_module,
|
|
|
|
pkg_id,
|
|
|
|
repo_directory,
|
|
|
|
wheel_list,
|
|
|
|
)) is not None:
|
|
|
|
incompatible[(repo_module, pkg_id)] = error
|
|
|
|
|
|
|
|
filesystem.append((repo_module, pkg_id, test_time, test_size))
|
|
|
|
|
|
|
|
return cache_data
|
|
|
|
|
|
|
|
|
|
|
|
def _initialize_extensions_compat_ensure_up_to_date(extensions_directory, extensions_enabled, print_debug):
|
|
|
|
import os
|
|
|
|
import platform
|
|
|
|
import sys
|
|
|
|
|
|
|
|
global _extensions_incompatible
|
|
|
|
|
|
|
|
updated = False
|
|
|
|
wheel_list = []
|
|
|
|
|
|
|
|
# Number to bump to change this format and force re-generation.
|
|
|
|
magic_number = 0
|
|
|
|
|
|
|
|
blender_id = (_bpy.app.version, platform.system(), platform.machine(), sys.version_info[0:2], magic_number)
|
|
|
|
|
|
|
|
filepath_compat = os.path.join(extensions_directory, ".cache", "compat.dat")
|
|
|
|
|
|
|
|
# Cache data contains a dict of:
|
|
|
|
# {
|
|
|
|
# "blender": (...)
|
|
|
|
# "paths": [path data to detect changes]
|
|
|
|
# "incompatible": {set of incompatible extensions}
|
|
|
|
# }
|
|
|
|
if os.path.exists(filepath_compat):
|
|
|
|
try:
|
|
|
|
cache_data = _pickle_zlib_file_read(filepath_compat)
|
|
|
|
except Exception as ex:
|
|
|
|
cache_data = None
|
|
|
|
# While this should not happen continuously (that would point to writing invalid cache),
|
|
|
|
# it is not a problem if there is some corruption with the cache and it needs to be re-generated.
|
|
|
|
# Show a message since this should be a rare occurrence - if it happens often it's likely to be a bug.
|
|
|
|
print("Extensions: reading cache failed ({:s}), creating...".format(str(ex)))
|
|
|
|
else:
|
|
|
|
cache_data = None
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("doesn't exist, creating...")
|
|
|
|
|
|
|
|
if cache_data is not None:
|
|
|
|
# NOTE: the exception handling here is fairly paranoid and accounts for invalid values in the loaded cache.
|
2025-05-29 21:21:18 +02:00
|
|
|
# An example would be values expected to be lists/dictionaries being other types (None or strings for example).
|
2024-07-01 15:08:14 +10:00
|
|
|
# While this should not happen, some bad value should not prevent Blender from loading properly,
|
|
|
|
# so report the error and regenerate cache.
|
|
|
|
try:
|
|
|
|
if _extension_compat_cache_update_needed(cache_data, blender_id, extensions_enabled, print_debug):
|
|
|
|
cache_data = None
|
2024-10-01 10:30:04 +10:00
|
|
|
except Exception:
|
2024-07-01 15:08:14 +10:00
|
|
|
print("Extension: unexpected error reading cache, this is is a bug! (regenerating)")
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
cache_data = None
|
|
|
|
|
|
|
|
if cache_data is None:
|
|
|
|
cache_data = _extension_compat_cache_create(blender_id, extensions_enabled, wheel_list, print_debug)
|
|
|
|
try:
|
|
|
|
os.makedirs(os.path.dirname(filepath_compat), exist_ok=True)
|
|
|
|
_pickle_zlib_file_write(filepath_compat, cache_data)
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("update written to disk.")
|
|
|
|
except Exception as ex:
|
|
|
|
# Should be rare but should not cause this function to fail.
|
|
|
|
print("Extensions: writing cache failed ({:s}).".format(str(ex)))
|
|
|
|
|
|
|
|
# Set to true even when not written to disk as the run-time data *has* been updated,
|
|
|
|
# cache will attempt to be generated next time this is called.
|
|
|
|
updated = True
|
|
|
|
else:
|
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("up to date.")
|
|
|
|
|
|
|
|
_extensions_incompatible = cache_data["incompatible"]
|
|
|
|
|
|
|
|
return updated, wheel_list
|
|
|
|
|
|
|
|
|
2025-02-07 20:14:26 +11:00
|
|
|
def _initialize_extensions_compat_ensure_up_to_date_wheels(extensions_directory, wheel_list, debug, error_fn):
|
2024-07-01 15:08:14 +10:00
|
|
|
import os
|
|
|
|
_extension_sync_wheels(
|
|
|
|
local_dir=os.path.join(extensions_directory, ".local"),
|
|
|
|
wheel_list=wheel_list,
|
2024-08-05 09:49:14 +10:00
|
|
|
debug=debug,
|
2025-02-07 20:14:26 +11:00
|
|
|
error_fn=error_fn,
|
2024-07-01 15:08:14 +10:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-08-16 18:01:48 +10:00
|
|
|
def _initialize_extensions_compat_data(
|
|
|
|
extensions_directory, # `str`
|
|
|
|
*,
|
|
|
|
ensure_wheels, # `bool`
|
2024-10-18 10:39:58 +11:00
|
|
|
addon_modules_pending, # `Sequence[str] | None`
|
2024-08-16 18:01:48 +10:00
|
|
|
use_startup_fastpath, # `bool`
|
2025-02-07 20:14:26 +11:00
|
|
|
error_fn, # `Callable[[Exception], None] | None`
|
2024-08-16 18:01:48 +10:00
|
|
|
):
|
2024-07-01 15:08:14 +10:00
|
|
|
# WARNING: this function must *never* raise an exception because it would interfere with low level initialization.
|
|
|
|
# As the function deals with file IO, use what are typically over zealous exception checks so as to rule out
|
|
|
|
# interfering with Blender loading properly in unexpected cases such as disk-full, read-only file-system
|
|
|
|
# or any other rare but possible scenarios.
|
|
|
|
|
|
|
|
_extensions_incompatible.clear()
|
|
|
|
|
|
|
|
# Create a set of all extension ID's.
|
|
|
|
extensions_enabled = set()
|
|
|
|
extensions_prefix_len = len(_ext_base_pkg_idname_with_dot)
|
|
|
|
for addon in _preferences.addons:
|
|
|
|
module_name = addon.module
|
|
|
|
if check_extension(module_name):
|
|
|
|
extensions_enabled.add(module_name[extensions_prefix_len:].partition(".")[0::2])
|
|
|
|
|
|
|
|
if addon_modules_pending is not None:
|
|
|
|
for module_name in addon_modules_pending:
|
|
|
|
if check_extension(module_name):
|
|
|
|
extensions_enabled.add(module_name[extensions_prefix_len:].partition(".")[0::2])
|
|
|
|
|
2024-08-05 09:49:14 +10:00
|
|
|
debug = _bpy.app.debug_python
|
|
|
|
print_debug = (lambda *args, **kwargs: print("Extension version cache:", *args, **kwargs)) if debug else None
|
2024-07-01 15:08:14 +10:00
|
|
|
|
|
|
|
# Early exit, use for automated tests.
|
|
|
|
# Avoid (relatively) expensive file-system scanning if at all possible.
|
2024-08-16 18:01:48 +10:00
|
|
|
#
|
|
|
|
# - On startup when there are no extensions enabled, scanning and synchronizing wheels
|
|
|
|
# adds unnecessary overhead. Especially considering this will run for automated tasks.
|
|
|
|
# - When disabling an add-on from the UI, there may be no extensions enabled afterwards,
|
|
|
|
# however the extension that was disabled may have had wheels installed which must be removed,
|
|
|
|
# so in this case it's important not to skip synchronizing wheels, see: #125958.
|
|
|
|
if use_startup_fastpath and (not extensions_enabled):
|
2024-07-01 15:08:14 +10:00
|
|
|
if print_debug is not None:
|
|
|
|
print_debug("no extensions, skipping cache data.")
|
|
|
|
return
|
|
|
|
|
|
|
|
# While this isn't expected to fail, any failure here is a bug
|
|
|
|
# but it should not cause Blender's startup to fail.
|
|
|
|
try:
|
|
|
|
updated, wheel_list = _initialize_extensions_compat_ensure_up_to_date(
|
|
|
|
extensions_directory,
|
|
|
|
extensions_enabled,
|
|
|
|
print_debug,
|
|
|
|
)
|
2024-10-01 10:30:04 +10:00
|
|
|
except Exception:
|
2024-07-01 15:08:14 +10:00
|
|
|
print("Extension: unexpected error detecting cache, this is is a bug!")
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
updated = False
|
|
|
|
|
|
|
|
if ensure_wheels:
|
|
|
|
if updated:
|
2025-02-07 20:14:26 +11:00
|
|
|
if error_fn is None:
|
|
|
|
def error_fn(ex):
|
|
|
|
print("Error:", str(ex))
|
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
try:
|
2025-02-07 20:14:26 +11:00
|
|
|
_initialize_extensions_compat_ensure_up_to_date_wheels(
|
|
|
|
extensions_directory,
|
|
|
|
wheel_list,
|
|
|
|
debug,
|
|
|
|
error_fn=error_fn,
|
|
|
|
)
|
2024-10-01 10:30:04 +10:00
|
|
|
except Exception:
|
2024-07-01 15:08:14 +10:00
|
|
|
print("Extension: unexpected error updating wheels, this is is a bug!")
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
|
|
|
|
|
2024-01-25 13:56:36 +11:00
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# Extension Utilities
|
|
|
|
|
2024-06-04 16:21:25 +02:00
|
|
|
def _version_int_left_digits(x):
|
|
|
|
# Parse as integer until the first non-digit.
|
|
|
|
return int(x[:next((i for i, c in enumerate(x) if not c.isdigit()), len(x))] or "0")
|
|
|
|
|
|
|
|
|
2024-05-16 09:02:21 +10:00
|
|
|
def _bl_info_from_extension(mod_name, mod_path):
|
2024-01-25 13:56:36 +11:00
|
|
|
# Extract the `bl_info` from an extensions manifest.
|
|
|
|
# This is returned as a module which has a `bl_info` variable.
|
|
|
|
# When support for non-extension add-ons is dropped (Blender v5.0 perhaps)
|
|
|
|
# this can be updated not to use a fake module.
|
|
|
|
import os
|
|
|
|
import tomllib
|
|
|
|
|
|
|
|
bl_info = _bl_info_basis()
|
|
|
|
|
2024-02-01 17:03:40 +11:00
|
|
|
filepath_toml = os.path.join(os.path.dirname(mod_path), _ext_manifest_filename_toml)
|
2024-01-25 13:56:36 +11:00
|
|
|
try:
|
2024-02-01 17:03:40 +11:00
|
|
|
with open(filepath_toml, "rb") as fh:
|
2024-01-25 13:56:36 +11:00
|
|
|
data = tomllib.load(fh)
|
2024-02-01 17:03:47 +11:00
|
|
|
except FileNotFoundError:
|
|
|
|
print("Warning: add-on missing manifest, this can cause poor performance!:", repr(filepath_toml))
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2024-02-01 17:03:40 +11:00
|
|
|
print("Error:", str(ex), "in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-01-25 13:56:36 +11:00
|
|
|
|
2024-01-31 17:19:57 +11:00
|
|
|
# This isn't a full validation which happens on package install/update.
|
|
|
|
if (value := data.get("name", None)) is None:
|
2024-05-27 18:41:04 +02:00
|
|
|
print("Error: missing \"name\" in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-01-31 17:19:57 +11:00
|
|
|
if type(value) is not str:
|
2024-02-01 17:03:40 +11:00
|
|
|
print("Error: \"name\" is not a string in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-01-31 17:19:57 +11:00
|
|
|
bl_info["name"] = value
|
|
|
|
|
|
|
|
if (value := data.get("version", None)) is None:
|
2024-05-27 18:41:04 +02:00
|
|
|
print("Error: missing \"version\" in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-01-31 17:19:57 +11:00
|
|
|
if type(value) is not str:
|
2024-02-01 17:03:40 +11:00
|
|
|
print("Error: \"version\" is not a string in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-06-04 16:21:25 +02:00
|
|
|
try:
|
|
|
|
value = tuple(
|
|
|
|
(int if i < 2 else _version_int_left_digits)(x)
|
|
|
|
for i, x in enumerate(value.split(".", 2))
|
|
|
|
)
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2024-10-01 10:30:04 +10:00
|
|
|
print("Error: \"version\" is not a semantic version (X.Y.Z) in ", filepath_toml, str(ex))
|
2024-06-04 16:21:25 +02:00
|
|
|
return None, filepath_toml
|
2024-01-31 17:19:57 +11:00
|
|
|
bl_info["version"] = value
|
|
|
|
|
2024-02-01 17:03:51 +11:00
|
|
|
if (value := data.get("blender_version_min", None)) is None:
|
2024-05-27 18:41:04 +02:00
|
|
|
print("Error: missing \"blender_version_min\" in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
|
|
|
if type(value) is not str:
|
|
|
|
print("Error: \"blender_version_min\" is not a string in", filepath_toml)
|
|
|
|
return None, filepath_toml
|
|
|
|
try:
|
|
|
|
value = tuple(int(x) for x in value.split("."))
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception as ex:
|
2024-02-01 22:40:42 +11:00
|
|
|
print("Error:", str(ex), "in \"blender_version_min\"", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
|
|
|
bl_info["blender"] = value
|
|
|
|
|
2024-05-27 18:41:04 +02:00
|
|
|
# Only print warnings since description is not a mandatory field.
|
|
|
|
if (value := data.get("tagline", None)) is None:
|
|
|
|
print("Warning: missing \"tagline\" in", filepath_toml)
|
|
|
|
elif type(value) is not str:
|
|
|
|
print("Warning: \"tagline\" is not a string", filepath_toml)
|
|
|
|
else:
|
|
|
|
bl_info["description"] = value
|
|
|
|
|
2024-02-01 22:40:42 +11:00
|
|
|
if (value := data.get("maintainer", None)) is None:
|
2024-05-27 18:41:04 +02:00
|
|
|
print("Error: missing \"author\" in", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-02-01 22:40:42 +11:00
|
|
|
if type(value) is not str:
|
|
|
|
print("Error: \"maintainer\" is not a string", filepath_toml)
|
2024-02-01 17:03:51 +11:00
|
|
|
return None, filepath_toml
|
2024-02-01 22:40:42 +11:00
|
|
|
bl_info["author"] = value
|
2024-01-31 17:19:57 +11:00
|
|
|
|
|
|
|
bl_info["category"] = "Development" # Dummy, will be removed.
|
2024-01-25 13:56:36 +11:00
|
|
|
|
2024-02-01 17:03:51 +11:00
|
|
|
return bl_info, filepath_toml
|
|
|
|
|
|
|
|
|
2024-05-16 09:02:21 +10:00
|
|
|
def _fake_module_from_extension(mod_name, mod_path):
|
2024-02-01 17:03:51 +11:00
|
|
|
import os
|
|
|
|
|
2024-05-16 09:02:21 +10:00
|
|
|
bl_info, filepath_toml = _bl_info_from_extension(mod_name, mod_path)
|
2024-02-01 17:03:51 +11:00
|
|
|
if bl_info is None:
|
|
|
|
return None
|
2024-01-25 13:56:36 +11:00
|
|
|
|
|
|
|
ModuleType = type(os)
|
|
|
|
mod = ModuleType(mod_name)
|
|
|
|
mod.bl_info = bl_info
|
|
|
|
mod.__file__ = mod_path
|
|
|
|
mod.__time__ = os.path.getmtime(mod_path)
|
2024-02-01 17:03:47 +11:00
|
|
|
|
|
|
|
# NOTE(@ideasman42): Add non-standard manifest variables to the "fake" module,
|
|
|
|
# this isn't ideal as it moves further away from the return value being minimal fake-module
|
|
|
|
# (where `__name__` and `__file__` are typically used).
|
|
|
|
# A custom type could be used, however this needs to be done carefully
|
|
|
|
# as all users of `addon_utils.modules(..)` need to be updated.
|
|
|
|
mod.__file_manifest__ = filepath_toml
|
|
|
|
mod.__time_manifest__ = os.path.getmtime(filepath_toml)
|
2024-01-25 13:56:36 +11:00
|
|
|
return mod
|
|
|
|
|
|
|
|
|
2024-07-01 09:56:24 +10:00
|
|
|
def _extension_sync_wheels(
|
|
|
|
*,
|
|
|
|
local_dir, # `str`
|
2024-10-23 12:48:09 +11:00
|
|
|
wheel_list, # `list[WheelSource]`
|
2024-08-05 09:49:14 +10:00
|
|
|
debug, # `bool`
|
2025-02-07 20:14:26 +11:00
|
|
|
error_fn, # `Callable[[Exception], None]`
|
2024-07-01 09:56:24 +10:00
|
|
|
): # `-> None`
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
from _bpy_internal.extensions.wheel_manager import apply_action
|
|
|
|
|
|
|
|
local_dir_site_packages = os.path.join(
|
|
|
|
local_dir,
|
|
|
|
"lib",
|
|
|
|
"python{:d}.{:d}".format(*sys.version_info[0:2]),
|
|
|
|
"site-packages",
|
|
|
|
)
|
|
|
|
|
2024-08-05 09:49:14 +10:00
|
|
|
paths_stale = []
|
|
|
|
|
|
|
|
def remove_error_fn(filepath: str, _ex: Exception) -> None:
|
|
|
|
paths_stale.append(filepath)
|
|
|
|
|
2024-07-01 09:56:24 +10:00
|
|
|
apply_action(
|
|
|
|
local_dir=local_dir,
|
|
|
|
local_dir_site_packages=local_dir_site_packages,
|
|
|
|
wheel_list=wheel_list,
|
2025-02-07 20:14:26 +11:00
|
|
|
error_fn=error_fn,
|
2024-08-05 09:49:14 +10:00
|
|
|
remove_error_fn=remove_error_fn,
|
|
|
|
debug=debug,
|
2024-07-01 09:56:24 +10:00
|
|
|
)
|
2024-08-05 09:49:14 +10:00
|
|
|
|
|
|
|
if paths_stale:
|
|
|
|
stale_pending_stage_paths(local_dir, paths_stale)
|
|
|
|
|
2024-07-01 09:56:24 +10:00
|
|
|
if os.path.exists(local_dir_site_packages):
|
|
|
|
if local_dir_site_packages not in sys.path:
|
|
|
|
sys.path.append(local_dir_site_packages)
|
|
|
|
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# Extensions
|
|
|
|
|
2024-01-24 13:25:27 +11:00
|
|
|
def _initialize_ensure_extensions_addon():
|
2024-05-15 22:21:00 +10:00
|
|
|
module_name = "bl_pkg"
|
|
|
|
if module_name not in _preferences.addons:
|
|
|
|
enable(module_name, default_set=True, persistent=True)
|
2024-01-24 13:25:27 +11:00
|
|
|
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
# Module-like class, store singletons.
|
|
|
|
class _ext_global:
|
|
|
|
__slots__ = ()
|
|
|
|
|
2024-05-15 00:48:29 +10:00
|
|
|
# Store a map of `preferences.extensions.repos` -> `module_id`.
|
2023-08-09 20:15:34 +10:00
|
|
|
# Only needed to detect renaming between `bpy.app.handlers.extension_repos_update_{pre & post}` events.
|
2024-01-19 17:19:09 +11:00
|
|
|
#
|
|
|
|
# The first dictionary is for enabled repositories, the second for disabled repositories
|
|
|
|
# which can be ignored in most cases and is only needed for a module rename.
|
|
|
|
idmap_pair = {}, {}
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
# The base package created by `JunctionModuleHandle`.
|
|
|
|
module_handle = None
|
|
|
|
|
|
|
|
|
|
|
|
# The name (in `sys.modules`) keep this short because it's stored as part of add-on modules name.
|
|
|
|
_ext_base_pkg_idname = "bl_ext"
|
2024-01-25 12:44:08 +11:00
|
|
|
_ext_base_pkg_idname_with_dot = _ext_base_pkg_idname + "."
|
2024-01-30 09:42:01 +11:00
|
|
|
_ext_manifest_filename_toml = "blender_manifest.toml"
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
|
2024-06-26 14:23:17 +10:00
|
|
|
def _extension_module_name_decompose(package):
|
2024-11-03 16:42:13 +11:00
|
|
|
# Returns the repository module name and the extensions ID from an extensions module name (``__package__``).
|
|
|
|
#
|
|
|
|
# :arg module_name: The extensions module name.
|
|
|
|
# :type module_name: str
|
|
|
|
# :return: (repo_module_name, extension_id)
|
|
|
|
# :rtype: tuple[str, str]
|
2024-06-26 14:23:17 +10:00
|
|
|
|
|
|
|
if not package.startswith(_ext_base_pkg_idname_with_dot):
|
|
|
|
raise ValueError("The \"package\" does not name an extension")
|
|
|
|
|
|
|
|
repo_module, pkg_idname = package[len(_ext_base_pkg_idname_with_dot):].partition(".")[0::2]
|
|
|
|
if not (repo_module and pkg_idname):
|
|
|
|
raise ValueError("The \"package\" is expected to be a module name containing 3 components")
|
|
|
|
|
|
|
|
if "." in pkg_idname:
|
|
|
|
raise ValueError("The \"package\" is expected to be a module name containing 3 components, found {:d}".format(
|
|
|
|
pkg_idname.count(".") + 3
|
|
|
|
))
|
|
|
|
|
|
|
|
# Unlikely but possible.
|
|
|
|
if not (repo_module.isidentifier() and pkg_idname.isidentifier()):
|
|
|
|
raise ValueError("The \"package\" contains non-identifier characters")
|
|
|
|
|
|
|
|
return repo_module, pkg_idname
|
|
|
|
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
def _extension_preferences_idmap():
|
|
|
|
repos_idmap = {}
|
2024-01-19 17:19:09 +11:00
|
|
|
repos_idmap_disabled = {}
|
2024-05-15 22:21:00 +10:00
|
|
|
for repo in _preferences.extensions.repos:
|
|
|
|
if repo.enabled:
|
|
|
|
repos_idmap[repo.as_pointer()] = repo.module
|
|
|
|
else:
|
|
|
|
repos_idmap_disabled[repo.as_pointer()] = repo.module
|
2024-01-19 17:19:09 +11:00
|
|
|
return repos_idmap, repos_idmap_disabled
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
|
|
|
|
def _extension_dirpath_from_preferences():
|
|
|
|
repos_dict = {}
|
2024-05-15 22:21:00 +10:00
|
|
|
for repo in _preferences.extensions.repos:
|
|
|
|
if not repo.enabled:
|
|
|
|
continue
|
|
|
|
repos_dict[repo.module] = repo.directory
|
2023-08-09 20:15:34 +10:00
|
|
|
return repos_dict
|
|
|
|
|
|
|
|
|
|
|
|
def _extension_dirpath_from_handle():
|
|
|
|
repos_info = {}
|
|
|
|
for module_id, module in _ext_global.module_handle.submodule_items():
|
|
|
|
# Account for it being unset although this should never happen unless script authors
|
|
|
|
# meddle with the modules.
|
|
|
|
try:
|
|
|
|
dirpath = module.__path__[0]
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception:
|
2023-08-09 20:15:34 +10:00
|
|
|
dirpath = ""
|
|
|
|
repos_info[module_id] = dirpath
|
|
|
|
return repos_info
|
|
|
|
|
2024-01-19 17:19:09 +11:00
|
|
|
|
|
|
|
# Ensure the add-ons follow changes to repositories, enabling, disabling and module renaming.
|
|
|
|
def _initialize_extension_repos_post_addons_prepare(
|
|
|
|
module_handle,
|
|
|
|
*,
|
|
|
|
submodules_del,
|
|
|
|
submodules_add,
|
|
|
|
submodules_rename_module,
|
|
|
|
submodules_del_disabled,
|
|
|
|
submodules_rename_module_disabled,
|
|
|
|
):
|
|
|
|
addons_to_enable = []
|
|
|
|
if not (
|
|
|
|
submodules_del or
|
|
|
|
submodules_add or
|
|
|
|
submodules_rename_module or
|
|
|
|
submodules_del_disabled or
|
|
|
|
submodules_rename_module_disabled
|
|
|
|
):
|
|
|
|
return addons_to_enable
|
|
|
|
|
|
|
|
# All preferences info.
|
|
|
|
# Map: `repo_id -> {submodule_id -> addon, ...}`.
|
|
|
|
addon_userdef_info = {}
|
|
|
|
for addon in _preferences.addons:
|
|
|
|
module = addon.module
|
2024-01-25 12:44:08 +11:00
|
|
|
if not module.startswith(_ext_base_pkg_idname_with_dot):
|
2024-01-19 17:19:09 +11:00
|
|
|
continue
|
2024-01-25 12:44:08 +11:00
|
|
|
module_id, submodule_id = module[len(_ext_base_pkg_idname_with_dot):].partition(".")[0::2]
|
2024-01-19 17:19:09 +11:00
|
|
|
try:
|
|
|
|
addon_userdef_info[module_id][submodule_id] = addon
|
|
|
|
except KeyError:
|
|
|
|
addon_userdef_info[module_id] = {submodule_id: addon}
|
|
|
|
|
|
|
|
# All run-time info.
|
|
|
|
# Map: `module_id -> {submodule_id -> module, ...}`.
|
|
|
|
addon_runtime_info = {}
|
|
|
|
for module_id, repo_module in module_handle.submodule_items():
|
|
|
|
extensions_info = {}
|
|
|
|
for submodule_id in dir(repo_module):
|
|
|
|
if submodule_id.startswith("_"):
|
|
|
|
continue
|
|
|
|
mod = getattr(repo_module, submodule_id)
|
|
|
|
# Filter out non add-on, non-modules.
|
|
|
|
if not hasattr(mod, "__addon_enabled__"):
|
|
|
|
continue
|
|
|
|
extensions_info[submodule_id] = mod
|
|
|
|
addon_runtime_info[module_id] = extensions_info
|
|
|
|
del extensions_info
|
|
|
|
|
|
|
|
# Apply changes to add-ons.
|
|
|
|
if submodules_add:
|
|
|
|
# Re-enable add-ons that exist in the user preferences,
|
|
|
|
# this lets the add-ons state be restored when toggling a repository.
|
|
|
|
for module_id, _dirpath in submodules_add:
|
|
|
|
repo_userdef = addon_userdef_info.get(module_id, {})
|
|
|
|
repo_runtime = addon_runtime_info.get(module_id, {})
|
|
|
|
|
|
|
|
for submodule_id, addon in repo_userdef.items():
|
2024-04-27 16:06:51 +10:00
|
|
|
module_name_next = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, module_id, submodule_id)
|
2024-01-19 17:19:09 +11:00
|
|
|
# Only default & persistent add-ons are kept for re-activation.
|
|
|
|
default_set = True
|
|
|
|
persistent = True
|
|
|
|
addons_to_enable.append((module_name_next, addon, default_set, persistent))
|
|
|
|
|
|
|
|
for module_id_prev, module_id_next in submodules_rename_module:
|
|
|
|
repo_userdef = addon_userdef_info.get(module_id_prev, {})
|
|
|
|
repo_runtime = addon_runtime_info.get(module_id_prev, {})
|
|
|
|
for submodule_id, mod in repo_runtime.items():
|
|
|
|
if not getattr(mod, "__addon_enabled__", False):
|
|
|
|
continue
|
2024-04-27 16:06:51 +10:00
|
|
|
module_name_prev = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, module_id_prev, submodule_id)
|
|
|
|
module_name_next = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, module_id_next, submodule_id)
|
2024-11-09 11:44:31 +11:00
|
|
|
disable(module_name_prev, default_set=False, refresh_handled=True)
|
2024-01-19 17:19:09 +11:00
|
|
|
addon = repo_userdef.get(submodule_id)
|
|
|
|
default_set = addon is not None
|
|
|
|
persistent = getattr(mod, "__addon_persistent__", False)
|
|
|
|
addons_to_enable.append((module_name_next, addon, default_set, persistent))
|
|
|
|
|
|
|
|
for module_id_prev, module_id_next in submodules_rename_module_disabled:
|
|
|
|
repo_userdef = addon_userdef_info.get(module_id_prev, {})
|
|
|
|
repo_runtime = addon_runtime_info.get(module_id_prev, {})
|
|
|
|
for submodule_id, addon in repo_userdef.items():
|
|
|
|
mod = repo_runtime.get(submodule_id)
|
|
|
|
if mod is not None and getattr(mod, "__addon_enabled__", False):
|
|
|
|
continue
|
|
|
|
# Either there is no run-time data or the module wasn't enabled.
|
|
|
|
# Rename the add-on without enabling it so the next time it's enabled it's preferences are kept.
|
2024-04-27 16:06:51 +10:00
|
|
|
module_name_next = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, module_id_next, submodule_id)
|
2024-01-19 17:19:09 +11:00
|
|
|
addon.module = module_name_next
|
|
|
|
|
|
|
|
if submodules_del:
|
2024-05-15 00:48:29 +10:00
|
|
|
repo_module_map = {repo.module: repo for repo in _preferences.extensions.repos}
|
2024-01-19 17:19:09 +11:00
|
|
|
for module_id in submodules_del:
|
|
|
|
repo_userdef = addon_userdef_info.get(module_id, {})
|
|
|
|
repo_runtime = addon_runtime_info.get(module_id, {})
|
|
|
|
|
|
|
|
repo = repo_module_map.get(module_id)
|
|
|
|
default_set = True
|
|
|
|
if repo and not repo.enabled:
|
|
|
|
# The repository exists but has been disabled, keep the add-on preferences
|
|
|
|
# because the user may want to re-enable the repository temporarily.
|
|
|
|
default_set = False
|
|
|
|
|
|
|
|
for submodule_id, mod in repo_runtime.items():
|
2024-04-27 16:06:51 +10:00
|
|
|
module_name_prev = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, module_id, submodule_id)
|
2024-11-09 11:44:31 +11:00
|
|
|
disable(module_name_prev, default_set=default_set, refresh_handled=True)
|
2024-01-19 17:19:09 +11:00
|
|
|
del repo
|
|
|
|
del repo_module_map
|
|
|
|
|
|
|
|
if submodules_del_disabled:
|
|
|
|
for module_id_prev in submodules_del_disabled:
|
|
|
|
repo_userdef = addon_userdef_info.get(module_id_prev, {})
|
|
|
|
for submodule_id in repo_userdef.keys():
|
2024-04-27 16:06:51 +10:00
|
|
|
module_name_prev = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, module_id_prev, submodule_id)
|
2024-11-09 11:44:31 +11:00
|
|
|
disable(module_name_prev, default_set=True, refresh_handled=True)
|
2024-01-19 17:19:09 +11:00
|
|
|
|
|
|
|
return addons_to_enable
|
|
|
|
|
|
|
|
|
|
|
|
# Enable add-ons after the modules have been manipulated.
|
|
|
|
def _initialize_extension_repos_post_addons_restore(addons_to_enable):
|
|
|
|
if not addons_to_enable:
|
|
|
|
return
|
|
|
|
|
2024-11-09 11:00:41 +11:00
|
|
|
# Important to refresh wheels & compatibility data before enabling.
|
|
|
|
extensions_refresh(addon_modules_pending=[module_name_next for (module_name_next, _, _, _) in addons_to_enable])
|
|
|
|
|
|
|
|
any_failed = False
|
2024-01-19 17:19:09 +11:00
|
|
|
for (module_name_next, addon, default_set, persistent) in addons_to_enable:
|
|
|
|
# Ensure the preferences are kept.
|
|
|
|
if addon is not None:
|
|
|
|
addon.module = module_name_next
|
2024-11-09 11:00:41 +11:00
|
|
|
if enable(module_name_next, default_set=default_set, persistent=persistent) is None:
|
|
|
|
any_failed = True
|
|
|
|
|
|
|
|
# Remove wheels for any add-ons that failed to enable.
|
|
|
|
if any_failed:
|
|
|
|
extensions_refresh()
|
|
|
|
|
2024-01-19 17:19:09 +11:00
|
|
|
# Needed for module rename.
|
2024-07-11 17:43:05 +10:00
|
|
|
_is_first_reset()
|
2024-01-19 17:19:09 +11:00
|
|
|
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
# Use `bpy.app.handlers.extension_repos_update_{pre/post}` to track changes to extension repositories
|
|
|
|
# and sync the changes to the Python module.
|
|
|
|
|
|
|
|
|
|
|
|
@_bpy.app.handlers.persistent
|
|
|
|
def _initialize_extension_repos_pre(*_):
|
2024-01-19 17:19:09 +11:00
|
|
|
_ext_global.idmap_pair = _extension_preferences_idmap()
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
|
|
|
|
@_bpy.app.handlers.persistent
|
2024-01-19 17:19:09 +11:00
|
|
|
def _initialize_extension_repos_post(*_, is_first=False):
|
2024-01-24 13:25:27 +11:00
|
|
|
|
|
|
|
# When enabling extensions for the first time, ensure the add-on is enabled.
|
|
|
|
_initialize_ensure_extensions_addon()
|
|
|
|
|
2024-01-19 17:19:09 +11:00
|
|
|
do_addons = not is_first
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
# Map `module_id` -> `dirpath`.
|
|
|
|
repos_info_prev = _extension_dirpath_from_handle()
|
|
|
|
repos_info_next = _extension_dirpath_from_preferences()
|
|
|
|
|
|
|
|
# Map `repo.as_pointer()` -> `module_id`.
|
2024-01-19 17:19:09 +11:00
|
|
|
repos_idmap_prev, repos_idmap_prev_disabled = _ext_global.idmap_pair
|
|
|
|
repos_idmap_next, repos_idmap_next_disabled = _extension_preferences_idmap()
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
# Map `module_id` -> `repo.as_pointer()`.
|
|
|
|
repos_idmap_next_reverse = {value: key for key, value in repos_idmap_next.items()}
|
|
|
|
|
2024-05-15 22:21:00 +10:00
|
|
|
# Mainly needed when the state of repositories changes at run-time:
|
|
|
|
# factory settings then load preferences for example.
|
2023-08-09 20:15:34 +10:00
|
|
|
#
|
|
|
|
# Filter `repos_idmap_prev` so only items which were also in the `repos_info_prev` are included.
|
|
|
|
# This is an awkward situation, they should be in sync, however when enabling the experimental option
|
|
|
|
# means the preferences wont have changed, but the module will not be in sync with the preferences.
|
|
|
|
# Support this by removing items in `repos_idmap_prev` which aren't also initialized in the managed package.
|
|
|
|
#
|
|
|
|
# The only situation this would be useful to keep is if we want to support renaming a package
|
|
|
|
# that manipulates all add-ons using it, when those add-ons are in the preferences but have not had
|
|
|
|
# their package loaded. It's possible we want to do this but is also reasonably obscure.
|
|
|
|
for repo_id_prev, module_id_prev in list(repos_idmap_prev.items()):
|
|
|
|
if module_id_prev not in repos_info_prev:
|
|
|
|
del repos_idmap_prev[repo_id_prev]
|
|
|
|
|
|
|
|
submodules_add = [] # List of module names to add: `(module_id, dirpath)`.
|
|
|
|
submodules_del = [] # List of module names to remove: `module_id`.
|
|
|
|
submodules_rename_module = [] # List of module names: `(module_id_src, module_id_dst)`.
|
|
|
|
submodules_rename_dirpath = [] # List of module names: `(module_id, dirpath)`.
|
|
|
|
|
|
|
|
renamed_prev = set()
|
|
|
|
renamed_next = set()
|
|
|
|
|
|
|
|
# Detect rename modules & module directories.
|
|
|
|
for module_id_next, dirpath_next in repos_info_next.items():
|
2024-05-15 00:48:29 +10:00
|
|
|
# Lookup never fails, as the "next" values use: `preferences.extensions.repos`.
|
2023-08-09 20:15:34 +10:00
|
|
|
repo_id = repos_idmap_next_reverse[module_id_next]
|
|
|
|
# Lookup may fail if this is a newly added module.
|
|
|
|
# Don't attempt to setup `submodules_add` though as it's possible
|
|
|
|
# the module name persists while the underlying `repo_id` changes.
|
|
|
|
module_id_prev = repos_idmap_prev.get(repo_id)
|
|
|
|
if module_id_prev is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Detect rename.
|
|
|
|
if module_id_next != module_id_prev:
|
|
|
|
submodules_rename_module.append((module_id_prev, module_id_next))
|
|
|
|
renamed_prev.add(module_id_prev)
|
|
|
|
renamed_next.add(module_id_next)
|
|
|
|
|
|
|
|
# Detect `dirpath` change.
|
|
|
|
if dirpath_next != repos_info_prev[module_id_prev]:
|
|
|
|
submodules_rename_dirpath.append((module_id_next, dirpath_next))
|
|
|
|
|
|
|
|
# Detect added modules.
|
|
|
|
for module_id, dirpath in repos_info_next.items():
|
|
|
|
if (module_id not in repos_info_prev) and (module_id not in renamed_next):
|
|
|
|
submodules_add.append((module_id, dirpath))
|
|
|
|
# Detect deleted modules.
|
|
|
|
for module_id, _dirpath in repos_info_prev.items():
|
|
|
|
if (module_id not in repos_info_next) and (module_id not in renamed_prev):
|
|
|
|
submodules_del.append(module_id)
|
|
|
|
|
2024-01-19 17:19:09 +11:00
|
|
|
if do_addons:
|
|
|
|
submodules_del_disabled = [] # A version of `submodules_del` for disabled repositories.
|
|
|
|
submodules_rename_module_disabled = [] # A version of `submodules_rename_module` for disabled repositories.
|
|
|
|
|
|
|
|
# Detect deleted modules.
|
|
|
|
for repo_id_prev, module_id_prev in repos_idmap_prev_disabled.items():
|
|
|
|
if (
|
|
|
|
(repo_id_prev not in repos_idmap_next_disabled) and
|
|
|
|
(repo_id_prev not in repos_idmap_next)
|
|
|
|
):
|
|
|
|
submodules_del_disabled.append(module_id_prev)
|
|
|
|
|
|
|
|
# Detect rename of disabled modules.
|
|
|
|
for repo_id_next, module_id_next in repos_idmap_next_disabled.items():
|
|
|
|
module_id_prev = repos_idmap_prev_disabled.get(repo_id_next)
|
|
|
|
if module_id_prev is None:
|
|
|
|
continue
|
|
|
|
# Detect rename.
|
|
|
|
if module_id_next != module_id_prev:
|
|
|
|
submodules_rename_module_disabled.append((module_id_prev, module_id_next))
|
|
|
|
|
|
|
|
addons_to_enable = _initialize_extension_repos_post_addons_prepare(
|
|
|
|
_ext_global.module_handle,
|
|
|
|
submodules_del=submodules_del,
|
|
|
|
submodules_add=submodules_add,
|
|
|
|
submodules_rename_module=submodules_rename_module,
|
|
|
|
submodules_del_disabled=submodules_del_disabled,
|
|
|
|
submodules_rename_module_disabled=submodules_rename_module_disabled,
|
|
|
|
)
|
|
|
|
del submodules_del_disabled, submodules_rename_module_disabled
|
|
|
|
|
|
|
|
# Apply changes to the `_ext_base_pkg_idname` named module so it matches extension data from the preferences.
|
2023-08-09 20:15:34 +10:00
|
|
|
module_handle = _ext_global.module_handle
|
|
|
|
for module_id in submodules_del:
|
|
|
|
module_handle.unregister_submodule(module_id)
|
|
|
|
for module_id, dirpath in submodules_add:
|
|
|
|
module_handle.register_submodule(module_id, dirpath)
|
|
|
|
for module_id_prev, module_id_next in submodules_rename_module:
|
|
|
|
module_handle.rename_submodule(module_id_prev, module_id_next)
|
|
|
|
for module_id, dirpath in submodules_rename_dirpath:
|
|
|
|
module_handle.rename_directory(module_id, dirpath)
|
|
|
|
|
2024-01-19 17:19:09 +11:00
|
|
|
_ext_global.idmap_pair[0].clear()
|
|
|
|
_ext_global.idmap_pair[1].clear()
|
|
|
|
|
|
|
|
if do_addons:
|
|
|
|
_initialize_extension_repos_post_addons_restore(addons_to_enable)
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
# Force refreshing if directory paths change.
|
|
|
|
if submodules_del or submodules_add or submodules_rename_dirpath:
|
2024-07-11 17:43:05 +10:00
|
|
|
_is_first_reset()
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
def _initialize_extensions_site_packages(*, extensions_directory, create=False):
|
2024-05-02 11:30:16 +10:00
|
|
|
# Add extension site-packages to `sys.path` (if it exists).
|
|
|
|
# Use for wheels.
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
|
|
|
|
# NOTE: follow the structure of `~/.local/lib/python#.##/site-packages`
|
|
|
|
# because some wheels contain paths pointing to parent directories,
|
|
|
|
# referencing `../../../bin` for example - to install binaries into `~/.local/bin`,
|
|
|
|
# so this can't simply be treated as a module directory unless those files would be excluded
|
|
|
|
# which may interfere with the wheels functionality.
|
|
|
|
site_packages = os.path.join(
|
2024-07-01 15:08:14 +10:00
|
|
|
extensions_directory,
|
2024-05-02 11:30:16 +10:00
|
|
|
".local",
|
|
|
|
"lib",
|
|
|
|
"python{:d}.{:d}".format(sys.version_info.major, sys.version_info.minor),
|
|
|
|
"site-packages",
|
|
|
|
)
|
|
|
|
if create:
|
|
|
|
if not os.path.exists(site_packages):
|
|
|
|
os.makedirs(site_packages)
|
|
|
|
found = True
|
|
|
|
else:
|
|
|
|
found = os.path.exists(site_packages)
|
|
|
|
|
|
|
|
if found:
|
2024-05-08 21:27:39 +10:00
|
|
|
# Ensure the wheels `site-packages` are added before all other site-packages.
|
|
|
|
# This is important for extensions modules get priority over system modules.
|
|
|
|
# Without this, installing a module into the systems site-packages (`/usr/lib/python#.##/site-packages`)
|
|
|
|
# could break an extension which already had a different version of this module installed locally.
|
|
|
|
from site import getsitepackages
|
|
|
|
index = None
|
|
|
|
if builtin_site_packages := set(getsitepackages()):
|
|
|
|
for i, dirpath in enumerate(sys.path):
|
|
|
|
if dirpath in builtin_site_packages:
|
|
|
|
index = i
|
|
|
|
break
|
|
|
|
if index is None:
|
|
|
|
sys.path.append(site_packages)
|
|
|
|
else:
|
|
|
|
sys.path.insert(index, site_packages)
|
2024-05-02 11:30:16 +10:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
sys.path.remove(site_packages)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return site_packages if found else None
|
|
|
|
|
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
def _initialize_extensions_repos_once():
|
2024-06-28 14:30:11 +10:00
|
|
|
from _bpy_internal.extensions.junction_module import JunctionModuleHandle
|
2023-08-09 20:15:34 +10:00
|
|
|
module_handle = JunctionModuleHandle(_ext_base_pkg_idname)
|
|
|
|
module_handle.register_module()
|
|
|
|
_ext_global.module_handle = module_handle
|
|
|
|
|
2024-07-01 15:08:14 +10:00
|
|
|
extensions_directory = _bpy.utils.user_resource('EXTENSIONS')
|
|
|
|
|
2024-05-02 11:30:16 +10:00
|
|
|
# Ensure extensions wheels can be loaded (when found).
|
2024-07-01 15:08:14 +10:00
|
|
|
_initialize_extensions_site_packages(extensions_directory=extensions_directory)
|
|
|
|
|
|
|
|
# Ensure extension compatibility data has been loaded and matches the manifests.
|
2024-08-16 18:01:48 +10:00
|
|
|
_initialize_extensions_compat_data(
|
|
|
|
extensions_directory,
|
|
|
|
ensure_wheels=True,
|
|
|
|
addon_modules_pending=None,
|
|
|
|
use_startup_fastpath=True,
|
2025-02-07 20:14:26 +11:00
|
|
|
# Runs on startup, fall back to printing.
|
|
|
|
error_fn=None,
|
2024-08-16 18:01:48 +10:00
|
|
|
)
|
2024-05-02 11:30:16 +10:00
|
|
|
|
2023-08-09 20:15:34 +10:00
|
|
|
# Setup repositories for the first time.
|
|
|
|
# Intentionally don't call `_initialize_extension_repos_pre` as this is the first time,
|
|
|
|
# the previous state is not useful to read.
|
2024-01-19 17:19:09 +11:00
|
|
|
_initialize_extension_repos_post(is_first=True)
|
2023-08-09 20:15:34 +10:00
|
|
|
|
|
|
|
# Internal handlers intended for Blender's own handling of repositories.
|
|
|
|
_bpy.app.handlers._extension_repos_update_pre.append(_initialize_extension_repos_pre)
|
|
|
|
_bpy.app.handlers._extension_repos_update_post.append(_initialize_extension_repos_post)
|
2024-07-01 15:08:14 +10:00
|
|
|
|
|
|
|
|
|
|
|
# -----------------------------------------------------------------------------
|
|
|
|
# Extension Public API
|
|
|
|
|
2025-02-07 20:14:26 +11:00
|
|
|
def extensions_refresh(
|
|
|
|
ensure_wheels=True,
|
|
|
|
addon_modules_pending=None,
|
|
|
|
handle_error=None,
|
|
|
|
):
|
2024-11-09 13:19:27 +11:00
|
|
|
"""
|
|
|
|
Ensure data relating to extensions is up to date.
|
|
|
|
This should be called after extensions on the file-system have changed.
|
|
|
|
|
|
|
|
:arg ensure_wheels: When true, refresh installed wheels with wheels used by extensions.
|
|
|
|
:type ensure_wheels: bool
|
|
|
|
:arg addon_modules_pending: Refresh these add-ons by listing their package names, as if they are enabled.
|
|
|
|
This is needed so wheels can be setup before the add-on is enabled.
|
|
|
|
:type addon_modules_pending: Sequence[str] | None
|
2025-02-07 20:14:26 +11:00
|
|
|
:arg handle_error: Called in the case of an error, taking an exception argument.
|
|
|
|
:type handle_error: Callable[[Exception], None] | None
|
2024-11-09 13:19:27 +11:00
|
|
|
"""
|
2024-07-01 15:08:14 +10:00
|
|
|
|
|
|
|
# Ensure any changes to extensions refresh `_extensions_incompatible`.
|
|
|
|
_initialize_extensions_compat_data(
|
|
|
|
_bpy.utils.user_resource('EXTENSIONS'),
|
|
|
|
ensure_wheels=ensure_wheels,
|
|
|
|
addon_modules_pending=addon_modules_pending,
|
2024-08-16 18:01:48 +10:00
|
|
|
use_startup_fastpath=False,
|
2025-02-07 20:14:26 +11:00
|
|
|
error_fn=handle_error,
|
2024-07-01 15:08:14 +10:00
|
|
|
)
|
2024-07-11 17:43:05 +10:00
|
|
|
|
|
|
|
|
|
|
|
def _extensions_warnings_get():
|
|
|
|
if _extensions_warnings_get._is_first is False:
|
|
|
|
return _extensions_warnings
|
|
|
|
|
|
|
|
# Calculate warnings which are shown in the UI but not calculated at load time
|
|
|
|
# because this incurs some overhead.
|
|
|
|
#
|
|
|
|
# Currently this checks for scripts violating policies:
|
|
|
|
# - Adding their directories or sub-directories to `sys.path`.
|
|
|
|
# - Loading any bundled scripts as modules directly into `sys.modules`.
|
|
|
|
#
|
|
|
|
# These warnings are shown:
|
|
|
|
# - In the add-on UI.
|
|
|
|
# - In the extension UI.
|
|
|
|
# - When listing extensions via `blender -c extension list`.
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
|
|
|
|
_extensions_warnings_get._is_first = False
|
|
|
|
_extensions_warnings.clear()
|
|
|
|
|
|
|
|
# This could be empty, it just avoid a lot of redundant lookups to skip known module paths.
|
|
|
|
dirs_skip_expected = (
|
|
|
|
os.path.normpath(os.path.join(os.path.dirname(_bpy.__file__), "..")) + os.sep,
|
|
|
|
os.path.normpath(os.path.join(os.path.dirname(__import__("bl_ui").__file__), "..")) + os.sep,
|
|
|
|
os.path.normpath(os.path.dirname(os.__file__)) + os.sep,
|
|
|
|
# Legacy add add-on paths.
|
|
|
|
*(os.path.normpath(path) + os.sep for path in paths()),
|
|
|
|
)
|
|
|
|
|
|
|
|
extensions_directory_map = {}
|
|
|
|
modules_other = []
|
|
|
|
|
|
|
|
for module_name, module in sys.modules.items():
|
|
|
|
|
|
|
|
if module_name == "__main__":
|
|
|
|
continue
|
|
|
|
|
|
|
|
module_file = getattr(module, "__file__", None) or ""
|
|
|
|
if not module_file:
|
|
|
|
# In most cases these are PY-CAPI modules.
|
|
|
|
continue
|
|
|
|
|
|
|
|
module_file = os.path.normpath(module_file)
|
|
|
|
|
|
|
|
if module_file.startswith(dirs_skip_expected):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if module_name.startswith(_ext_base_pkg_idname_with_dot):
|
|
|
|
# Check this is a sub-module (an extension).
|
|
|
|
if module_name.find(".", len(_ext_base_pkg_idname_with_dot)) != -1:
|
|
|
|
# Ignore extension sub-modules because there is no need to handle their directories.
|
|
|
|
# The extensions directory accounts for any paths which may be found in the sub-modules path.
|
|
|
|
if module_name.count(".") > 2:
|
|
|
|
continue
|
|
|
|
extensions_directory_map[module_name] = os.path.dirname(module_file) + os.sep
|
|
|
|
else:
|
|
|
|
# Any non extension module.
|
|
|
|
modules_other.append((module_name, module_file))
|
|
|
|
|
|
|
|
dirs_extensions = tuple(path for path in extensions_directory_map.values())
|
|
|
|
dirs_extensions_noslash = set(path.rstrip(os.sep) for path in dirs_extensions)
|
|
|
|
if dirs_extensions:
|
|
|
|
for module_other_name, module_other_file in modules_other:
|
|
|
|
if not module_other_file.startswith(dirs_extensions):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Need 2x lookups, not ideal but `str.startswith` doesn't let us know which argument matched.
|
|
|
|
found = False
|
|
|
|
for module_name, module_dirpath in extensions_directory_map.items():
|
|
|
|
if not module_other_file.startswith(module_dirpath):
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
warning_list = _extensions_warnings[module_name]
|
|
|
|
except KeyError:
|
|
|
|
warning_list = _extensions_warnings[module_name] = []
|
|
|
|
warning_list.append("Policy violation with top level module: {:s}".format(module_other_name))
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
assert found
|
|
|
|
|
|
|
|
for path in sys.path:
|
|
|
|
path = os.path.normpath(path)
|
|
|
|
if path.startswith(dirs_skip_expected):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not (path in dirs_extensions_noslash or path.startswith(dirs_extensions)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
found = False
|
|
|
|
for module_name, module_dirpath in extensions_directory_map.items():
|
|
|
|
if not (path == module_dirpath.rstrip(os.sep) or path.startswith(module_dirpath)):
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
warning_list = _extensions_warnings[module_name]
|
|
|
|
except KeyError:
|
|
|
|
warning_list = _extensions_warnings[module_name] = []
|
|
|
|
# Use an extension relative path as an absolute path may be too verbose for the UI.
|
|
|
|
warning_list.append(
|
|
|
|
"Policy violation with sys.path: {:s}".format(
|
|
|
|
".{:s}{:s}".format(os.sep, os.path.relpath(path, module_dirpath))
|
|
|
|
)
|
|
|
|
)
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
assert found
|
|
|
|
|
|
|
|
return _extensions_warnings
|
|
|
|
|
|
|
|
|
|
|
|
_extensions_warnings_get._is_first = True
|
|
|
|
|
|
|
|
|
|
|
|
def _is_first_reset():
|
|
|
|
# Reset all values which are lazily initialized,
|
|
|
|
# use this to force re-creating extension warnings and cached modules.
|
|
|
|
_extensions_warnings_get._is_first = True
|
|
|
|
modules._is_first = True
|