Merge upstream changes from Marlin 2.1.2.2
This commit is contained in:
@@ -9,7 +9,7 @@ if pioutil.is_pio_build():
|
||||
board = marlin.env.BoardConfig()
|
||||
|
||||
def calculate_crc(contents, seed):
|
||||
accumulating_xor_value = seed;
|
||||
accumulating_xor_value = seed
|
||||
|
||||
for i in range(0, len(contents), 4):
|
||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||
@@ -68,7 +68,7 @@ if pioutil.is_pio_build():
|
||||
uid_value = uuid.uuid4()
|
||||
file_key = int(uid_value.hex[0:8], 16)
|
||||
|
||||
xor_crc = 0xEF3D4323;
|
||||
xor_crc = 0xEF3D4323
|
||||
|
||||
# the input file is exepcted to be in chunks of 0x800
|
||||
# so round the size
|
||||
@@ -123,4 +123,4 @@ if pioutil.is_pio_build():
|
||||
fwpath.unlink()
|
||||
|
||||
marlin.relocate_firmware("0x08008800")
|
||||
marlin.add_post_action(encrypt);
|
||||
marlin.add_post_action(encrypt)
|
||||
|
@@ -33,7 +33,7 @@
|
||||
//
|
||||
#if ENABLED(SR_LCD_3W_NL)
|
||||
// Feature checks for SR_LCD_3W_NL
|
||||
#elif EITHER(LCD_I2C_TYPE_MCP23017, LCD_I2C_TYPE_MCP23008)
|
||||
#elif ANY(LCD_I2C_TYPE_MCP23017, LCD_I2C_TYPE_MCP23008)
|
||||
#define USES_LIQUIDTWI2
|
||||
#elif ENABLED(LCD_I2C_TYPE_PCA8574)
|
||||
#define USES_LIQUIDCRYSTAL_I2C
|
||||
@@ -54,21 +54,21 @@
|
||||
#define HAS_MENU_BACKLASH
|
||||
#endif
|
||||
#if ENABLED(LCD_BED_TRAMMING)
|
||||
#define HAS_MENU_BED_CORNERS
|
||||
#define HAS_MENU_BED_TRAMMING
|
||||
#endif
|
||||
#if ENABLED(CANCEL_OBJECTS)
|
||||
#define HAS_MENU_CANCELOBJECT
|
||||
#endif
|
||||
#if EITHER(DELTA_CALIBRATION_MENU, DELTA_AUTO_CALIBRATION)
|
||||
#if ANY(DELTA_CALIBRATION_MENU, DELTA_AUTO_CALIBRATION)
|
||||
#define HAS_MENU_DELTA_CALIBRATE
|
||||
#endif
|
||||
#if EITHER(LED_CONTROL_MENU, CASE_LIGHT_MENU)
|
||||
#if ANY(LED_CONTROL_MENU, CASE_LIGHT_MENU)
|
||||
#define HAS_MENU_LED
|
||||
#endif
|
||||
#if ENABLED(ADVANCED_PAUSE_FEATURE)
|
||||
#define HAS_MENU_FILAMENT
|
||||
#endif
|
||||
#if ENABLED(SDSUPPORT)
|
||||
#if HAS_MEDIA
|
||||
#define HAS_MENU_MEDIA
|
||||
#endif
|
||||
#if ENABLED(MIXING_EXTRUDER)
|
||||
@@ -99,7 +99,7 @@
|
||||
#define HAS_MENU_TOUCH_SCREEN
|
||||
#endif
|
||||
#if ENABLED(ASSISTED_TRAMMING_WIZARD)
|
||||
#define HAS_MENU_TRAMMING
|
||||
#define HAS_MENU_TRAMMING_WIZARD
|
||||
#endif
|
||||
#if ENABLED(AUTO_BED_LEVELING_UBL)
|
||||
#define HAS_MENU_UBL
|
||||
|
@@ -5,7 +5,9 @@
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import subprocess,os,re
|
||||
import subprocess,os,re,fnmatch,glob
|
||||
srcfilepattern = re.compile(r".*[.](cpp|c)$")
|
||||
marlinbasedir = os.path.join(os.getcwd(), "Marlin/")
|
||||
Import("env")
|
||||
|
||||
from platformio.package.meta import PackageSpec
|
||||
@@ -53,10 +55,11 @@ if pioutil.is_pio_build():
|
||||
# Get a reference to the FEATURE_CONFIG under construction
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
|
||||
# Split up passed lines on commas or newlines and iterate
|
||||
# Add common options to the features config under construction
|
||||
# For lib_deps replace a previous instance of the same library
|
||||
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
|
||||
# Split up passed lines on commas or newlines and iterate.
|
||||
# Take care to convert Windows '\' paths to Unix-style '/'.
|
||||
# Add common options to the features config under construction.
|
||||
# For lib_deps replace a previous instance of the same library.
|
||||
atoms = re.sub(r',\s*', '\n', flines.replace('\\', '/')).strip().split('\n')
|
||||
for line in atoms:
|
||||
parts = line.split('=')
|
||||
name = parts.pop(0)
|
||||
@@ -91,7 +94,7 @@ if pioutil.is_pio_build():
|
||||
val = None
|
||||
if val:
|
||||
opt = mat[1].upper()
|
||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ), 2)
|
||||
add_to_feat_cnf(opt, val)
|
||||
|
||||
def get_all_known_libs():
|
||||
@@ -128,6 +131,7 @@ if pioutil.is_pio_build():
|
||||
def apply_features_config():
|
||||
load_features()
|
||||
blab("========== Apply enabled features...")
|
||||
build_filters = ' '.join(env.GetProjectOption('build_src_filter'))
|
||||
for feature in FEATURE_CONFIG:
|
||||
if not env.MarlinHas(feature):
|
||||
continue
|
||||
@@ -174,23 +178,87 @@ if pioutil.is_pio_build():
|
||||
|
||||
if 'build_src_filter' in feat:
|
||||
blab("========== Adding build_src_filter for %s... " % feature, 2)
|
||||
build_src_filter = ' '.join(env.GetProjectOption('build_src_filter'))
|
||||
# first we need to remove the references to the same folder
|
||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['build_src_filter'])
|
||||
cur_srcs = re.findall(r'[+-](<.*?>)', build_src_filter)
|
||||
for d in my_srcs:
|
||||
if d in cur_srcs:
|
||||
build_src_filter = re.sub(r'[+-]' + d, '', build_src_filter)
|
||||
|
||||
build_src_filter = feat['build_src_filter'] + ' ' + build_src_filter
|
||||
set_env_field('build_src_filter', [build_src_filter])
|
||||
env.Replace(SRC_FILTER=build_src_filter)
|
||||
build_filters = build_filters + ' ' + feat['build_src_filter']
|
||||
# Just append the filter in the order that the build environment specifies.
|
||||
# Important here is the order of entries in the "features.ini" file.
|
||||
|
||||
if 'lib_ignore' in feat:
|
||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
|
||||
build_src_filter = ""
|
||||
if True:
|
||||
# Build the actual equivalent build_src_filter list based on the inclusions by the features.
|
||||
# PlatformIO doesn't do it this way, but maybe in the future....
|
||||
cur_srcs = set()
|
||||
# Remove the references to the same folder
|
||||
my_srcs = re.findall(r'([+-]<.*?>)', build_filters)
|
||||
for d in my_srcs:
|
||||
# Assume normalized relative paths
|
||||
plain = d[2:-1]
|
||||
if d[0] == '+':
|
||||
def addentry(fullpath, info=None):
|
||||
relp = os.path.relpath(fullpath, marlinbasedir)
|
||||
if srcfilepattern.match(relp):
|
||||
if info:
|
||||
blab("Added src file %s (%s)" % (relp, str(info)), 3)
|
||||
else:
|
||||
blab("Added src file %s " % relp, 3)
|
||||
cur_srcs.add(relp)
|
||||
# Special rule: If a direct folder is specified add all files within.
|
||||
fullplain = os.path.join(marlinbasedir, plain)
|
||||
if os.path.isdir(fullplain):
|
||||
blab("Directory content addition for %s " % plain, 3)
|
||||
gpattern = os.path.join(fullplain, "**")
|
||||
for fname in glob.glob(gpattern, recursive=True):
|
||||
addentry(fname, "dca")
|
||||
else:
|
||||
# Add all the things from the pattern by GLOB.
|
||||
def srepl(matchi):
|
||||
g0 = matchi.group(0)
|
||||
return r"**" + g0[1:]
|
||||
gpattern = re.sub(r'[*]($|[^*])', srepl, plain)
|
||||
gpattern = os.path.join(marlinbasedir, gpattern)
|
||||
|
||||
for fname in glob.glob(gpattern, recursive=True):
|
||||
addentry(fname)
|
||||
else:
|
||||
# Special rule: If a direct folder is specified then remove all files within.
|
||||
def onremove(relp, info=None):
|
||||
if info:
|
||||
blab("Removed src file %s (%s)" % (relp, str(info)), 3)
|
||||
else:
|
||||
blab("Removed src file %s " % relp, 3)
|
||||
fullplain = os.path.join(marlinbasedir, plain)
|
||||
if os.path.isdir(fullplain):
|
||||
blab("Directory content removal for %s " % plain, 2)
|
||||
def filt(x):
|
||||
common = os.path.commonpath([plain, x])
|
||||
if not common == os.path.normpath(plain): return True
|
||||
onremove(x, "dcr")
|
||||
return False
|
||||
cur_srcs = set(filter(filt, cur_srcs))
|
||||
else:
|
||||
# Remove matching source entries.
|
||||
def filt(x):
|
||||
if not fnmatch.fnmatch(x, plain): return True
|
||||
onremove(x)
|
||||
return False
|
||||
cur_srcs = set(filter(filt, cur_srcs))
|
||||
# Transform the resulting set into a string.
|
||||
for x in cur_srcs:
|
||||
if build_src_filter != "": build_src_filter += ' '
|
||||
build_src_filter += "+<" + x + ">"
|
||||
|
||||
#blab("Final build_src_filter: " + build_src_filter, 3)
|
||||
else:
|
||||
build_src_filter = build_filters
|
||||
|
||||
# Update in PlatformIO
|
||||
set_env_field('build_src_filter', [build_src_filter])
|
||||
env.Replace(SRC_FILTER=build_src_filter)
|
||||
|
||||
#
|
||||
# Use the compiler to get a list of all enabled features
|
||||
#
|
||||
@@ -213,7 +281,7 @@ if pioutil.is_pio_build():
|
||||
#
|
||||
def MarlinHas(env, feature):
|
||||
load_marlin_features()
|
||||
r = re.compile('^' + feature + '$')
|
||||
r = re.compile('^' + feature + '$', re.IGNORECASE)
|
||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||
|
||||
# Defines could still be 'false' or '0', so check
|
||||
@@ -226,6 +294,8 @@ if pioutil.is_pio_build():
|
||||
elif val in env['MARLIN_FEATURES']:
|
||||
some_on = env.MarlinHas(val)
|
||||
|
||||
#blab("%s is %s" % (feature, str(some_on)), 2)
|
||||
|
||||
return some_on
|
||||
|
||||
validate_pio()
|
||||
|
51
buildroot/share/PlatformIO/scripts/configuration.py
Normal file → Executable file
51
buildroot/share/PlatformIO/scripts/configuration.py
Normal file → Executable file
@@ -1,8 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# configuration.py
|
||||
# Apply options from config.ini to the existing Configuration headers
|
||||
#
|
||||
import re, shutil, configparser
|
||||
import re, shutil, configparser, datetime
|
||||
from pathlib import Path
|
||||
|
||||
verbose = 0
|
||||
@@ -43,6 +44,7 @@ def apply_opt(name, val, conf=None):
|
||||
if val in ("on", "", None):
|
||||
newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
|
||||
elif val == "off":
|
||||
# TODO: Comment more lines in a multi-line define with \ continuation
|
||||
newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
|
||||
else:
|
||||
# For options with values, enable and set the value
|
||||
@@ -88,9 +90,38 @@ def apply_opt(name, val, conf=None):
|
||||
elif not isdef:
|
||||
break
|
||||
linenum += 1
|
||||
lines.insert(linenum, f"{prefix}#define {added:30} // Added by config.ini\n")
|
||||
currtime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
lines.insert(linenum, f"{prefix}#define {added:30} // Added by config.ini {currtime}\n")
|
||||
fullpath.write_text(''.join(lines), encoding='utf-8')
|
||||
|
||||
# Disable all (most) defined options in the configuration files.
|
||||
# Everything in the named sections. Section hint for exceptions may be added.
|
||||
def disable_all_options():
|
||||
# Create a regex to match the option and capture parts of the line
|
||||
regex = re.compile(r'^(\s*)(#define\s+)([A-Z0-9_]+\b)(\s?)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
|
||||
|
||||
# Disable all enabled options in both Config files
|
||||
for file in ("Configuration.h", "Configuration_adv.h"):
|
||||
fullpath = config_path(file)
|
||||
lines = fullpath.read_text(encoding='utf-8').split('\n')
|
||||
found = False
|
||||
for i in range(len(lines)):
|
||||
line = lines[i]
|
||||
match = regex.match(line)
|
||||
if match:
|
||||
name = match[3].upper()
|
||||
if name in ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION'): continue
|
||||
if name.startswith('_'): continue
|
||||
found = True
|
||||
# Comment out the define
|
||||
# TODO: Comment more lines in a multi-line define with \ continuation
|
||||
lines[i] = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
|
||||
blab(f"Disable {name}")
|
||||
|
||||
# If the option was found, write the modified lines
|
||||
if found:
|
||||
fullpath.write_text('\n'.join(lines), encoding='utf-8')
|
||||
|
||||
# Fetch configuration files from GitHub given the path.
|
||||
# Return True if any files were fetched.
|
||||
def fetch_example(url):
|
||||
@@ -130,7 +161,7 @@ def fetch_example(url):
|
||||
def section_items(cp, sectkey):
|
||||
return cp.items(sectkey) if sectkey in cp.sections() else []
|
||||
|
||||
# Apply all items from a config section
|
||||
# Apply all items from a config section. Ignore ini_ items outside of config:base and config:root.
|
||||
def apply_ini_by_name(cp, sect):
|
||||
iniok = True
|
||||
if sect in ('config:base', 'config:root'):
|
||||
@@ -194,7 +225,7 @@ def apply_config_ini(cp):
|
||||
cp2 = configparser.ConfigParser()
|
||||
cp2.read(config_path(ckey))
|
||||
apply_sections(cp2, sect)
|
||||
ckey = 'base';
|
||||
ckey = 'base'
|
||||
|
||||
# (Allow 'example/' as a shortcut for 'examples/')
|
||||
elif ckey.startswith('example/'):
|
||||
@@ -206,7 +237,17 @@ def apply_config_ini(cp):
|
||||
fetch_example(ckey)
|
||||
ckey = 'base'
|
||||
|
||||
if ckey == 'all':
|
||||
#
|
||||
# [flatten] Write out Configuration.h and Configuration_adv.h files with
|
||||
# just the enabled options and all other content removed.
|
||||
#
|
||||
#if ckey == '[flatten]':
|
||||
# write_flat_configs()
|
||||
|
||||
if ckey == '[disable]':
|
||||
disable_all_options()
|
||||
|
||||
elif ckey == 'all':
|
||||
apply_sections(cp)
|
||||
|
||||
else:
|
||||
|
@@ -14,7 +14,7 @@ if pioutil.is_pio_build():
|
||||
assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||
|
||||
def download_mks_assets():
|
||||
print("Downloading MKS Assets")
|
||||
print("Downloading MKS Assets for TFT_LVGL_UI")
|
||||
r = requests.get(url, stream=True)
|
||||
# the user may have a very clean workspace,
|
||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||
@@ -25,7 +25,7 @@ if pioutil.is_pio_build():
|
||||
fd.write(chunk)
|
||||
|
||||
def copy_mks_assets():
|
||||
print("Copying MKS Assets")
|
||||
print("Copying MKS Assets for TFT_LVGL_UI")
|
||||
output_path = Path(tempfile.mkdtemp())
|
||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||
zip_obj.extractall(output_path)
|
||||
|
@@ -23,7 +23,7 @@ if pioutil.is_pio_build():
|
||||
|
||||
assert isfile(original_file) and isfile(src_file)
|
||||
shutil.copyfile(original_file, backup_file)
|
||||
shutil.copyfile(src_file, original_file);
|
||||
shutil.copyfile(src_file, original_file)
|
||||
|
||||
def _touch(path):
|
||||
with open(path, "w") as fp:
|
||||
|
@@ -5,7 +5,8 @@
|
||||
# the appropriate framework variants folder, so that its contents
|
||||
# will be picked up by PlatformIO just like any other variant.
|
||||
#
|
||||
import pioutil
|
||||
import pioutil, re
|
||||
marlin_variant_pattern = re.compile("marlin_.*")
|
||||
if pioutil.is_pio_build():
|
||||
import shutil,marlin
|
||||
from pathlib import Path
|
||||
@@ -30,10 +31,11 @@ if pioutil.is_pio_build():
|
||||
}
|
||||
platform_name = framewords[platform.__class__.__name__]
|
||||
else:
|
||||
platform_name = PackageSpec(platform_packages[0]).name
|
||||
|
||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
|
||||
platform_name = "framework-arduinoststm32"
|
||||
spec = PackageSpec(platform_packages[0])
|
||||
if spec.uri and '@' in spec.uri:
|
||||
platform_name = re.sub(r'@.+', '', spec.uri)
|
||||
else:
|
||||
platform_name = spec.name
|
||||
|
||||
FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
|
||||
assert FRAMEWORK_DIR.is_dir()
|
||||
@@ -44,15 +46,20 @@ if pioutil.is_pio_build():
|
||||
variant = board.get("build.variant")
|
||||
#series = mcu_type[:7].upper() + "xx"
|
||||
|
||||
# Prepare a new empty folder at the destination
|
||||
variant_dir = FRAMEWORK_DIR / "variants" / variant
|
||||
if variant_dir.is_dir():
|
||||
shutil.rmtree(variant_dir)
|
||||
if not variant_dir.is_dir():
|
||||
variant_dir.mkdir()
|
||||
# Only prepare a new variant if the PlatformIO configuration provides it (board_build.variant).
|
||||
# This check is important to avoid deleting official board config variants.
|
||||
if marlin_variant_pattern.match(str(variant).lower()):
|
||||
# Prepare a new empty folder at the destination
|
||||
variant_dir = FRAMEWORK_DIR / "variants" / variant
|
||||
if variant_dir.is_dir():
|
||||
shutil.rmtree(variant_dir)
|
||||
if not variant_dir.is_dir():
|
||||
variant_dir.mkdir()
|
||||
|
||||
# Source dir is a local variant sub-folder
|
||||
source_dir = Path("buildroot/share/PlatformIO/variants", variant)
|
||||
assert source_dir.is_dir()
|
||||
# Source dir is a local variant sub-folder
|
||||
source_dir = Path("buildroot/share/PlatformIO/variants", variant)
|
||||
assert source_dir.is_dir()
|
||||
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
print("Copying variant " + str(variant) + " to framework directory...")
|
||||
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
|
@@ -32,4 +32,4 @@ if pioutil.is_pio_build():
|
||||
fw_path.rename(fws_path)
|
||||
|
||||
import marlin
|
||||
marlin.add_post_action(addboot);
|
||||
marlin.add_post_action(addboot)
|
||||
|
@@ -70,4 +70,4 @@ def encrypt_mks(source, target, env, new_name):
|
||||
fwpath.unlink()
|
||||
|
||||
def add_post_action(action):
|
||||
env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);
|
||||
env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# offset_and_rename.py
|
||||
#
|
||||
# - If 'build.offset' is provided, either by JSON or by the environment...
|
||||
# - If 'board_build.offset' is provided, either by JSON or by the environment...
|
||||
# - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
|
||||
# - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
|
||||
# - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
|
||||
@@ -60,6 +60,10 @@ if pioutil.is_pio_build():
|
||||
|
||||
def rename_target(source, target, env):
|
||||
from pathlib import Path
|
||||
Path(target[0].path).replace(Path(target[0].dir.path, new_name))
|
||||
from datetime import datetime
|
||||
from os import path
|
||||
_newpath = Path(target[0].dir.path, datetime.now().strftime(new_name.replace('{date}', '%Y%m%d').replace('{time}', '%H%M%S')))
|
||||
Path(target[0].path).replace(_newpath)
|
||||
env['PROGNAME'] = path.splitext(_newpath)[0]
|
||||
|
||||
marlin.add_post_action(rename_target)
|
||||
|
@@ -72,7 +72,7 @@ if pioutil.is_pio_build():
|
||||
result = check_envs("env:"+build_env, board_envs, config)
|
||||
|
||||
if not result:
|
||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these environments: %s" % \
|
||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||
raise SystemExit(err)
|
||||
|
||||
@@ -90,7 +90,7 @@ if pioutil.is_pio_build():
|
||||
# Find the name.cpp.o or name.o and remove it
|
||||
#
|
||||
def rm_ofile(subdir, name):
|
||||
build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
|
||||
build_dir = Path(env['PROJECT_BUILD_DIR'], build_env)
|
||||
for outdir in (build_dir, build_dir / "debug"):
|
||||
for ext in (".cpp.o", ".o"):
|
||||
fpath = outdir / "src/src" / subdir / (name + ext)
|
||||
|
@@ -1,9 +0,0 @@
|
||||
#
|
||||
# random-bin.py
|
||||
# Set a unique firmware name based on current date and time
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
from datetime import datetime
|
||||
Import("env")
|
||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
@@ -2,8 +2,14 @@
|
||||
#
|
||||
# schema.py
|
||||
#
|
||||
# Used by signature.py via common-dependencies.py to generate a schema file during the PlatformIO build.
|
||||
# This script can also be run standalone from within the Marlin repo to generate all schema files.
|
||||
# Used by signature.py via common-dependencies.py to generate a schema file during the PlatformIO build
|
||||
# when CONFIG_EXPORT is defined in the configuration.
|
||||
#
|
||||
# This script can also be run standalone from within the Marlin repo to generate JSON and YAML schema files.
|
||||
#
|
||||
# This script is a companion to abm/js/schema.js in the MarlinFirmware/AutoBuildMarlin project, which has
|
||||
# been extended to evaluate conditions and can determine what options are actually enabled, not just which
|
||||
# options are uncommented. That will be migrated to this script for standalone migration.
|
||||
#
|
||||
import re,json
|
||||
from pathlib import Path
|
||||
@@ -74,7 +80,26 @@ def load_boards():
|
||||
return ''
|
||||
|
||||
#
|
||||
# Extract a schema from the current configuration files
|
||||
# Extract the current configuration files in the form of a structured schema.
|
||||
# Contains the full schema for the configuration files, not just the enabled options,
|
||||
# Contains the current values of the options, not just data structure, so "schema" is a slight misnomer.
|
||||
#
|
||||
# The returned object is a nested dictionary with the following indexing:
|
||||
#
|
||||
# - schema[filekey][section][define_name] = define_info
|
||||
#
|
||||
# Where the define_info contains the following keyed fields:
|
||||
# - section = The @section the define is in
|
||||
# - name = The name of the define
|
||||
# - enabled = True if the define is enabled (not commented out)
|
||||
# - line = The line number of the define
|
||||
# - sid = A serial ID for the define
|
||||
# - value = The value of the define, if it has one
|
||||
# - type = The type of the define, if it has one
|
||||
# - requires = The conditions that must be met for the define to be enabled
|
||||
# - comment = The comment for the define, if it has one
|
||||
# - units = The units for the define, if it has one
|
||||
# - options = The options for the define, if it has one
|
||||
#
|
||||
def extract():
|
||||
# Load board names from boards.h
|
||||
@@ -85,7 +110,8 @@ def extract():
|
||||
NORMAL = 0 # No condition yet
|
||||
BLOCK_COMMENT = 1 # Looking for the end of the block comment
|
||||
EOL_COMMENT = 2 # EOL comment started, maybe add the next comment?
|
||||
GET_SENSORS = 3 # Gathering temperature sensor options
|
||||
SLASH_COMMENT = 3 # Block-like comment, starting with aligned //
|
||||
GET_SENSORS = 4 # Gathering temperature sensor options
|
||||
ERROR = 9 # Syntax error
|
||||
|
||||
# List of files to process, with shorthand
|
||||
@@ -94,6 +120,8 @@ def extract():
|
||||
sch_out = { 'basic':{}, 'advanced':{} }
|
||||
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
|
||||
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
|
||||
# Pattern to match a float value
|
||||
flt = r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?'
|
||||
# Defines to ignore
|
||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
|
||||
# Start with unknown state
|
||||
@@ -107,6 +135,7 @@ def extract():
|
||||
line_number = 0 # Counter for the line number of the file
|
||||
conditions = [] # Create a condition stack for the current file
|
||||
comment_buff = [] # A temporary buffer for comments
|
||||
prev_comment = '' # Copy before reset for an EOL comment
|
||||
options_json = '' # A buffer for the most recent options JSON found
|
||||
eol_options = False # The options came from end of line, so only apply once
|
||||
join_line = False # A flag that the line should be joined with the previous one
|
||||
@@ -143,9 +172,13 @@ def extract():
|
||||
if not defmatch and the_line.startswith('//'):
|
||||
comment_buff.append(the_line[2:].strip())
|
||||
else:
|
||||
last_added_ref['comment'] = ' '.join(comment_buff)
|
||||
comment_buff = []
|
||||
state = Parse.NORMAL
|
||||
cline = ' '.join(comment_buff)
|
||||
comment_buff = []
|
||||
if cline != '':
|
||||
# A (block or slash) comment was already added
|
||||
cfield = 'notes' if 'comment' in last_added_ref else 'comment'
|
||||
last_added_ref[cfield] = cline
|
||||
|
||||
def use_comment(c, opt, sec, bufref):
|
||||
if c.startswith(':'): # If the comment starts with : then it has magic JSON
|
||||
@@ -162,6 +195,15 @@ def extract():
|
||||
bufref.append(c)
|
||||
return opt, sec
|
||||
|
||||
# For slash comments, capture consecutive slash comments.
|
||||
# The comment will be applied to the next #define.
|
||||
if state == Parse.SLASH_COMMENT:
|
||||
if not defmatch and the_line.startswith('//'):
|
||||
use_comment(the_line[2:].strip(), options_json, section, comment_buff)
|
||||
continue
|
||||
else:
|
||||
state = Parse.NORMAL
|
||||
|
||||
# In a block comment, capture lines up to the end of the comment.
|
||||
# Assume nothing follows the comment closure.
|
||||
if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
|
||||
@@ -178,19 +220,19 @@ def extract():
|
||||
state = Parse.NORMAL
|
||||
|
||||
# Strip the leading '*' from block comments
|
||||
if cline.startswith('*'): cline = cline[1:].strip()
|
||||
cline = re.sub(r'^\* ?', '', cline)
|
||||
|
||||
# Collect temperature sensors
|
||||
if state == Parse.GET_SENSORS:
|
||||
sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
|
||||
if sens:
|
||||
s2 = sens[2].replace("'","''")
|
||||
options_json += f"{sens[1]}:'{s2}', "
|
||||
options_json += f"{sens[1]}:'{sens[1]} - {s2}', "
|
||||
|
||||
elif state == Parse.BLOCK_COMMENT:
|
||||
|
||||
# Look for temperature sensors
|
||||
if cline == "Temperature sensors available:":
|
||||
if re.match(r'temperature sensors.*:', cline, re.IGNORECASE):
|
||||
state, cline = Parse.GET_SENSORS, "Temperature Sensors"
|
||||
|
||||
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
||||
@@ -216,15 +258,19 @@ def extract():
|
||||
# Comment after a define may be continued on the following lines
|
||||
if defmatch != None and cpos > 10:
|
||||
state = Parse.EOL_COMMENT
|
||||
prev_comment = '\n'.join(comment_buff)
|
||||
comment_buff = []
|
||||
else:
|
||||
state = Parse.SLASH_COMMENT
|
||||
|
||||
# Process the start of a new comment
|
||||
if cpos != -1:
|
||||
comment_buff = []
|
||||
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
|
||||
|
||||
if state == Parse.BLOCK_COMMENT:
|
||||
# Strip leading '*' from block comments
|
||||
if cline.startswith('*'): cline = cline[1:].strip()
|
||||
cline = re.sub(r'^\* ?', '', cline)
|
||||
else:
|
||||
# Expire end-of-line options after first use
|
||||
if cline.startswith(':'): eol_options = True
|
||||
@@ -295,32 +341,33 @@ def extract():
|
||||
}
|
||||
|
||||
# Type is based on the value
|
||||
if val == '':
|
||||
value_type = 'switch'
|
||||
elif re.match(r'^(true|false)$', val):
|
||||
value_type = 'bool'
|
||||
val = val == 'true'
|
||||
elif re.match(r'^[-+]?\s*\d+$', val):
|
||||
value_type = 'int'
|
||||
val = int(val)
|
||||
elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
|
||||
value_type = 'float'
|
||||
val = float(val.replace('f',''))
|
||||
else:
|
||||
value_type = 'string' if val[0] == '"' \
|
||||
else 'char' if val[0] == "'" \
|
||||
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
|
||||
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
|
||||
else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
|
||||
else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
|
||||
else 'array' if val[0] == '{' \
|
||||
else ''
|
||||
value_type = \
|
||||
'switch' if val == '' \
|
||||
else 'bool' if re.match(r'^(true|false)$', val) \
|
||||
else 'int' if re.match(r'^[-+]?\s*\d+$', val) \
|
||||
else 'ints' if re.match(r'^([-+]?\s*\d+)(\s*,\s*[-+]?\s*\d+)+$', val) \
|
||||
else 'floats' if re.match(rf'({flt}(\s*,\s*{flt})+)', val) \
|
||||
else 'float' if re.match(f'^({flt})$', val) \
|
||||
else 'string' if val[0] == '"' \
|
||||
else 'char' if val[0] == "'" \
|
||||
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
|
||||
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
|
||||
else 'int[]' if re.match(r'^{\s*[-+]?\s*\d+(\s*,\s*[-+]?\s*\d+)*\s*}$', val) \
|
||||
else 'float[]' if re.match(r'^{{\s*{flt}(\s*,\s*{flt})*\s*}}$', val) \
|
||||
else 'array' if val[0] == '{' \
|
||||
else ''
|
||||
|
||||
val = (val == 'true') if value_type == 'bool' \
|
||||
else int(val) if value_type == 'int' \
|
||||
else val.replace('f','') if value_type == 'floats' \
|
||||
else float(val.replace('f','')) if value_type == 'float' \
|
||||
else val
|
||||
|
||||
if val != '': define_info['value'] = val
|
||||
if value_type != '': define_info['type'] = value_type
|
||||
|
||||
# Join up accumulated conditions with &&
|
||||
if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
|
||||
if conditions: define_info['requires'] = '(' + ') && ('.join(sum(conditions, [])) + ')'
|
||||
|
||||
# If the comment_buff is not empty, add the comment to the info
|
||||
if comment_buff:
|
||||
@@ -383,25 +430,35 @@ def main():
|
||||
|
||||
if schema:
|
||||
|
||||
# Get the first command line argument
|
||||
# Get the command line arguments after the script name
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
arg = sys.argv[1]
|
||||
else:
|
||||
arg = 'some'
|
||||
args = sys.argv[1:]
|
||||
if len(args) == 0: args = ['some']
|
||||
|
||||
# Does the given array intersect at all with args?
|
||||
def inargs(c): return len(set(args) & set(c)) > 0
|
||||
|
||||
# Help / Unknown option
|
||||
unk = not inargs(['some','json','jsons','group','yml','yaml'])
|
||||
if (unk): print(f"Unknown option: '{args[0]}'")
|
||||
if inargs(['-h', '--help']) or unk:
|
||||
print("Usage: schema.py [some|json|jsons|group|yml|yaml]...")
|
||||
print(" some = json + yml")
|
||||
print(" jsons = json + group")
|
||||
return
|
||||
|
||||
# JSON schema
|
||||
if arg in ['some', 'json', 'jsons']:
|
||||
if inargs(['some', 'json', 'jsons']):
|
||||
print("Generating JSON ...")
|
||||
dump_json(schema, Path('schema.json'))
|
||||
|
||||
# JSON schema (wildcard names)
|
||||
if arg in ['group', 'jsons']:
|
||||
if inargs(['group', 'jsons']):
|
||||
group_options(schema)
|
||||
dump_json(schema, Path('schema_grouped.json'))
|
||||
|
||||
# YAML
|
||||
if arg in ['some', 'yml', 'yaml']:
|
||||
if inargs(['some', 'yml', 'yaml']):
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
|
416
buildroot/share/PlatformIO/scripts/signature.py
Normal file → Executable file
416
buildroot/share/PlatformIO/scripts/signature.py
Normal file → Executable file
@@ -1,3 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# signature.py
|
||||
#
|
||||
@@ -7,24 +8,54 @@ import subprocess,re,json,hashlib
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
#
|
||||
# Return all macro names in a header as an array, so we can take
|
||||
# the intersection with the preprocessor output, giving a decent
|
||||
# reflection of all enabled options that (probably) came from the
|
||||
# configuration files. We end up with the actual configured state,
|
||||
# better than what the config files say. You can then use the
|
||||
# resulting config.ini to produce more exact configuration files.
|
||||
#
|
||||
def extract_defines(filepath):
|
||||
def enabled_defines(filepath):
|
||||
'''
|
||||
Return all enabled #define items from a given C header file in a dictionary.
|
||||
A "#define" in a multi-line comment could produce a false positive if it's not
|
||||
preceded by a non-space character (like * in a multi-line comment).
|
||||
|
||||
Output:
|
||||
Each entry is a dictionary with a 'name' and a 'section' key. We end up with:
|
||||
{ MOTHERBOARD: { name: "MOTHERBOARD", section: "hardware" }, ... }
|
||||
|
||||
The 'name' key might get dropped as redundant, but it's useful for debugging.
|
||||
|
||||
Because the option names are the keys, only the last occurrence is retained.
|
||||
Use the Schema class for a more complete list of options, soon with full parsing.
|
||||
|
||||
This list is used to filter what is actually a config-defined option versus
|
||||
defines from elsewhere.
|
||||
|
||||
While the Schema class parses the configurations on its own, this script will
|
||||
get the preprocessor output and get the intersection of the enabled options from
|
||||
our crude scraping method and the actual compiler output.
|
||||
We end up with the actual configured state,
|
||||
better than what the config files say. You can then use the
|
||||
a decent reflection of all enabled options that (probably) came from
|
||||
resulting config.ini to produce more exact configuration files.
|
||||
'''
|
||||
outdict = {}
|
||||
section = "user"
|
||||
spatt = re.compile(r".*@section +([-a-zA-Z0-9_\s]+)$") # must match @section ...
|
||||
|
||||
f = open(filepath, encoding="utf8").read().split("\n")
|
||||
|
||||
# Get the full contents of the file and remove all block comments.
|
||||
# This will avoid false positives from #defines in comments
|
||||
f = re.sub(r'/\*.*?\*/', '', '\n'.join(f), flags=re.DOTALL).split("\n")
|
||||
|
||||
a = []
|
||||
for line in f:
|
||||
sline = line.strip()
|
||||
m = re.match(spatt, sline) # @section ...
|
||||
if m:
|
||||
section = m.group(1).strip()
|
||||
continue
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split()
|
||||
a.append(kv[0])
|
||||
return a
|
||||
outdict[kv[0]] = { 'name':kv[0], 'section': section }
|
||||
return outdict
|
||||
|
||||
# Compute the SHA256 hash of a file
|
||||
def get_file_sha256sum(filepath):
|
||||
@@ -43,160 +74,247 @@ def compress_file(filepath, storedname, outpath):
|
||||
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
||||
zipf.write(filepath, arcname=storedname, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
||||
|
||||
#
|
||||
# Compute the build signature. The idea is to extract all defines in the configuration headers
|
||||
# to build a unique reversible signature from this build so it can be included in the binary
|
||||
# We can reverse the signature to get a 1:1 equivalent configuration file
|
||||
#
|
||||
def compute_build_signature(env):
|
||||
if 'BUILD_SIGNATURE' in env:
|
||||
return
|
||||
|
||||
# Definitions from these files will be kept
|
||||
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||
'''
|
||||
Compute the build signature by extracting all configuration settings and
|
||||
building a unique reversible signature that can be included in the binary.
|
||||
The signature can be reversed to get a 1:1 equivalent configuration file.
|
||||
'''
|
||||
if 'BUILD_SIGNATURE' in env: return
|
||||
env.Append(BUILD_SIGNATURE=1)
|
||||
|
||||
build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||
|
||||
# Check if we can skip processing
|
||||
hashes = ''
|
||||
for header in files_to_keep:
|
||||
hashes += get_file_sha256sum(header)[0:10]
|
||||
|
||||
marlin_json = build_path / 'marlin_config.json'
|
||||
marlin_zip = build_path / 'mc.zip'
|
||||
|
||||
# Read existing config file
|
||||
# Definitions from these files will be kept
|
||||
header_paths = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||
|
||||
# Check if we can skip processing
|
||||
hashes = ''
|
||||
for header in header_paths:
|
||||
hashes += get_file_sha256sum(header)[0:10]
|
||||
|
||||
# Read a previously exported JSON file
|
||||
# Same configuration, skip recomputing the build signature
|
||||
same_hash = False
|
||||
try:
|
||||
with marlin_json.open() as infile:
|
||||
conf = json.load(infile)
|
||||
if conf['__INITIAL_HASH'] == hashes:
|
||||
# Same configuration, skip recomputing the building signature
|
||||
same_hash = conf['__INITIAL_HASH'] == hashes
|
||||
if same_hash:
|
||||
compress_file(marlin_json, 'marlin_config.json', marlin_zip)
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
# Get enabled config options based on preprocessor
|
||||
from preprocessor import run_preprocessor
|
||||
complete_cfg = run_preprocessor(env)
|
||||
|
||||
# Dumb #define extraction from the configuration files
|
||||
# Extract "enabled" #define lines by scraping the configuration files.
|
||||
# This data also contains the @section for each option.
|
||||
conf_defines = {}
|
||||
all_defines = []
|
||||
for header in files_to_keep:
|
||||
defines = extract_defines(header)
|
||||
# To filter only the define we want
|
||||
all_defines += defines
|
||||
# To remember from which file it cames from
|
||||
conf_defines[header.split('/')[-1]] = defines
|
||||
conf_names = []
|
||||
for hpath in header_paths:
|
||||
# Get defines in the form of { name: { name:..., section:... }, ... }
|
||||
defines = enabled_defines(hpath)
|
||||
# Get all unique define names into a flat array
|
||||
conf_names += defines.keys()
|
||||
# Remember which file these defines came from
|
||||
conf_defines[hpath.split('/')[-1]] = defines
|
||||
|
||||
# Get enabled config options based on running GCC to preprocess the config files.
|
||||
# The result is a list of line strings, each starting with '#define'.
|
||||
from preprocessor import run_preprocessor
|
||||
build_output = run_preprocessor(env)
|
||||
|
||||
# Dumb regex to filter out some dumb macros
|
||||
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
||||
|
||||
# First step is to collect all valid macros
|
||||
defines = {}
|
||||
for line in complete_cfg:
|
||||
|
||||
# Split the define from the value
|
||||
# Extract all the #define lines in the build output as key/value pairs
|
||||
build_defines = {}
|
||||
for line in build_output:
|
||||
# Split the define from the value.
|
||||
key_val = line[8:].strip().decode().split(' ')
|
||||
key, value = key_val[0], ' '.join(key_val[1:])
|
||||
|
||||
# Ignore values starting with two underscore, since it's low level
|
||||
if len(key) > 2 and key[0:2] == "__" :
|
||||
continue
|
||||
# Ignore values containing a parenthesis (likely a function macro)
|
||||
if '(' in key and ')' in key:
|
||||
continue
|
||||
|
||||
if len(key) > 2 and key[0:2] == "__": continue
|
||||
# Ignore values containing parentheses (likely a function macro)
|
||||
if '(' in key and ')' in key: continue
|
||||
# Then filter dumb values
|
||||
if r.match(value):
|
||||
continue
|
||||
if r.match(value): continue
|
||||
|
||||
defines[key] = value if len(value) else ""
|
||||
build_defines[key] = value if len(value) else ""
|
||||
|
||||
#
|
||||
# Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
|
||||
#
|
||||
if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
|
||||
if not ('CONFIGURATION_EMBEDDING' in build_defines or 'CONFIG_EXPORT' in build_defines):
|
||||
return
|
||||
|
||||
# Second step is to filter useless macro
|
||||
resolved_defines = {}
|
||||
for key in defines:
|
||||
# Filter out useless macros from the output
|
||||
cleaned_build_defines = {}
|
||||
for key in build_defines:
|
||||
# Remove all boards now
|
||||
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
||||
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
|
||||
continue
|
||||
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME": continue
|
||||
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
|
||||
if key.endswith("_T_DECLARED"):
|
||||
continue
|
||||
if key.endswith("_T_DECLARED"): continue
|
||||
# Remove keys that are not in the #define list in the Configuration list
|
||||
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
|
||||
continue
|
||||
if key not in conf_names + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]: continue
|
||||
# Add to a new dictionary for simplicity
|
||||
cleaned_build_defines[key] = build_defines[key]
|
||||
|
||||
# Don't be that smart guy here
|
||||
resolved_defines[key] = defines[key]
|
||||
|
||||
# Generate a build signature now
|
||||
# We are making an object that's a bit more complex than a basic dictionary here
|
||||
data = {}
|
||||
data['__INITIAL_HASH'] = hashes
|
||||
# First create a key for each header here
|
||||
# And we only care about defines that (most likely) came from the config files
|
||||
# Build a dictionary of dictionaries with keys: 'name', 'section', 'value'
|
||||
# { 'file1': { 'option': { 'name':'option', 'section':..., 'value':... }, ... }, 'file2': { ... } }
|
||||
real_config = {}
|
||||
for header in conf_defines:
|
||||
data[header] = {}
|
||||
|
||||
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
||||
for key in resolved_defines:
|
||||
for header in conf_defines:
|
||||
real_config[header] = {}
|
||||
for key in cleaned_build_defines:
|
||||
if key in conf_defines[header]:
|
||||
data[header][key] = resolved_defines[key]
|
||||
if key[0:2] == '__': continue
|
||||
val = cleaned_build_defines[key]
|
||||
real_config[header][key] = { 'file':header, 'name': key, 'value': val, 'section': conf_defines[header][key]['section']}
|
||||
|
||||
# Every python needs this toy
|
||||
def tryint(key):
|
||||
try:
|
||||
return int(defines[key])
|
||||
except:
|
||||
return 0
|
||||
try: return int(build_defines[key])
|
||||
except: return 0
|
||||
|
||||
# Get the CONFIG_EXPORT value and do an extended dump if > 100
|
||||
# For example, CONFIG_EXPORT 102 will make a 'config.ini' with a [config:] group for each schema @section
|
||||
config_dump = tryint('CONFIG_EXPORT')
|
||||
extended_dump = config_dump > 100
|
||||
if extended_dump: config_dump -= 100
|
||||
|
||||
#
|
||||
# Produce an INI file if CONFIG_EXPORT == 2
|
||||
#
|
||||
if config_dump == 2:
|
||||
print("Generating config.ini ...")
|
||||
|
||||
ini_fmt = '{0:40} = {1}'
|
||||
ext_fmt = '{0:40} {1}'
|
||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
|
||||
|
||||
if extended_dump:
|
||||
# Extended export will dump config options by section
|
||||
|
||||
# We'll use Schema class to get the sections
|
||||
try:
|
||||
conf_schema = schema.extract()
|
||||
except Exception as exc:
|
||||
print("Error: " + str(exc))
|
||||
exit(1)
|
||||
|
||||
# Then group options by schema @section
|
||||
sections = {}
|
||||
for header in real_config:
|
||||
for name in real_config[header]:
|
||||
#print(f" name: {name}")
|
||||
if name not in ignore:
|
||||
ddict = real_config[header][name]
|
||||
#print(f" real_config[{header}][{name}]:", ddict)
|
||||
sect = ddict['section']
|
||||
if sect not in sections: sections[sect] = {}
|
||||
sections[sect][name] = ddict
|
||||
|
||||
# Get all sections as a list of strings, with spaces and dashes replaced by underscores
|
||||
long_list = [ re.sub(r'[- ]+', '_', x).lower() for x in sections.keys() ]
|
||||
# Make comma-separated lists of sections with 64 characters or less
|
||||
sec_lines = []
|
||||
while len(long_list):
|
||||
line = long_list.pop(0) + ', '
|
||||
while len(long_list) and len(line) + len(long_list[0]) < 64 - 1:
|
||||
line += long_list.pop(0) + ', '
|
||||
sec_lines.append(line.strip())
|
||||
sec_lines[-1] = sec_lines[-1][:-1] # Remove the last comma
|
||||
|
||||
else:
|
||||
sec_lines = ['all']
|
||||
|
||||
# Build the ini_use_config item
|
||||
sec_list = ini_fmt.format('ini_use_config', sec_lines[0])
|
||||
for line in sec_lines[1:]: sec_list += '\n' + ext_fmt.format('', line)
|
||||
|
||||
config_ini = build_path / 'config.ini'
|
||||
with config_ini.open('w') as outfile:
|
||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
|
||||
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
|
||||
vers = defines["CONFIGURATION_H_VERSION"]
|
||||
vers = build_defines["CONFIGURATION_H_VERSION"]
|
||||
dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
|
||||
ini_fmt = '{0:40}{1}\n'
|
||||
|
||||
outfile.write(
|
||||
'#\n'
|
||||
+ '# Marlin Firmware\n'
|
||||
+ '# config.ini - Options to apply before the build\n'
|
||||
+ '#\n'
|
||||
+ f'# Generated by Marlin build on {dt_string}\n'
|
||||
+ '#\n'
|
||||
+ '\n'
|
||||
+ '[config:base]\n'
|
||||
+ ini_fmt.format('ini_use_config', ' = all')
|
||||
+ ini_fmt.format('ini_config_vers', f' = {vers}')
|
||||
)
|
||||
# Loop through the data array of arrays
|
||||
for header in data:
|
||||
if header.startswith('__'):
|
||||
continue
|
||||
outfile.write('\n[' + filegrp[header] + ']\n')
|
||||
for key in sorted(data[header]):
|
||||
if key not in ignore:
|
||||
val = 'on' if data[header][key] == '' else data[header][key]
|
||||
outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
|
||||
f'''#
|
||||
# Marlin Firmware
|
||||
# config.ini - Options to apply before the build
|
||||
#
|
||||
# Generated by Marlin build on {dt_string}
|
||||
#
|
||||
[config:base]
|
||||
#
|
||||
# ini_use_config - A comma-separated list of actions to apply to the Configuration files.
|
||||
# The actions will be applied in the listed order.
|
||||
# - none
|
||||
# Ignore this file and don't apply any configuration options
|
||||
#
|
||||
# - base
|
||||
# Just apply the options in config:base to the configuration
|
||||
#
|
||||
# - minimal
|
||||
# Just apply the options in config:minimal to the configuration
|
||||
#
|
||||
# - all
|
||||
# Apply all 'config:*' sections in this file to the configuration
|
||||
#
|
||||
# - another.ini
|
||||
# Load another INI file with a path relative to this config.ini file (i.e., within Marlin/)
|
||||
#
|
||||
# - https://me.myserver.com/path/to/configs
|
||||
# Fetch configurations from any URL.
|
||||
#
|
||||
# - example/Creality/Ender-5 Plus @ bugfix-2.1.x
|
||||
# Fetch example configuration files from the MarlinFirmware/Configurations repository
|
||||
# https://raw.githubusercontent.com/MarlinFirmware/Configurations/bugfix-2.1.x/config/examples/Creality/Ender-5%20Plus/
|
||||
#
|
||||
# - example/default @ release-2.0.9.7
|
||||
# Fetch default configuration files from the MarlinFirmware/Configurations repository
|
||||
# https://raw.githubusercontent.com/MarlinFirmware/Configurations/release-2.0.9.7/config/default/
|
||||
#
|
||||
# - [disable]
|
||||
# Comment out all #defines in both Configuration.h and Configuration_adv.h. This is useful
|
||||
# to start with a clean slate before applying any config: options, so only the options explicitly
|
||||
# set in config.ini will be enabled in the configuration.
|
||||
#
|
||||
# - [flatten] (Not yet implemented)
|
||||
# Produce a flattened set of Configuration.h and Configuration_adv.h files with only the enabled
|
||||
# #defines and no comments. A clean look, but context-free.
|
||||
#
|
||||
{sec_list}
|
||||
{ini_fmt.format('ini_config_vers', vers)}
|
||||
''' )
|
||||
|
||||
if extended_dump:
|
||||
|
||||
# Loop through the sections
|
||||
for skey in sorted(sections):
|
||||
#print(f" skey: {skey}")
|
||||
sani = re.sub(r'[- ]+', '_', skey).lower()
|
||||
outfile.write(f"\n[config:{sani}]\n")
|
||||
opts = sections[skey]
|
||||
for name in sorted(opts):
|
||||
val = opts[name]['value']
|
||||
if val == '': val = 'on'
|
||||
#print(f" {name} = {val}")
|
||||
outfile.write(ini_fmt.format(name.lower(), val) + '\n')
|
||||
|
||||
else:
|
||||
|
||||
# Standard export just dumps config:basic and config:advanced sections
|
||||
for header in real_config:
|
||||
outfile.write(f'\n[{filegrp[header]}]\n')
|
||||
for name in sorted(real_config[header]):
|
||||
if name not in ignore:
|
||||
val = real_config[header][name]['value']
|
||||
if val == '': val = 'on'
|
||||
outfile.write(ini_fmt.format(name.lower(), val) + '\n')
|
||||
|
||||
#
|
||||
# Produce a schema.json file if CONFIG_EXPORT == 3
|
||||
# CONFIG_EXPORT 3 = schema.json, 4 = schema.yml
|
||||
#
|
||||
if config_dump >= 3:
|
||||
try:
|
||||
@@ -207,7 +325,7 @@ def compute_build_signature(env):
|
||||
|
||||
if conf_schema:
|
||||
#
|
||||
# Produce a schema.json file if CONFIG_EXPORT == 3
|
||||
# 3 = schema.json
|
||||
#
|
||||
if config_dump in (3, 13):
|
||||
print("Generating schema.json ...")
|
||||
@@ -217,7 +335,7 @@ def compute_build_signature(env):
|
||||
schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
|
||||
|
||||
#
|
||||
# Produce a schema.yml file if CONFIG_EXPORT == 4
|
||||
# 4 = schema.yml
|
||||
#
|
||||
elif config_dump == 4:
|
||||
print("Generating schema.yml ...")
|
||||
@@ -231,33 +349,58 @@ def compute_build_signature(env):
|
||||
import yaml
|
||||
schema.dump_yaml(conf_schema, build_path / 'schema.yml')
|
||||
|
||||
# Append the source code version and date
|
||||
data['VERSION'] = {}
|
||||
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
|
||||
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
|
||||
try:
|
||||
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
||||
data['VERSION']['GIT_REF'] = curver.decode()
|
||||
except:
|
||||
pass
|
||||
|
||||
#
|
||||
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
|
||||
# Skip if an identical JSON file was already present.
|
||||
#
|
||||
if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
|
||||
if not same_hash and (config_dump == 1 or 'CONFIGURATION_EMBEDDING' in build_defines):
|
||||
with marlin_json.open('w') as outfile:
|
||||
json.dump(data, outfile, separators=(',', ':'))
|
||||
|
||||
json_data = {}
|
||||
if extended_dump:
|
||||
print("Extended dump ...")
|
||||
for header in real_config:
|
||||
confs = real_config[header]
|
||||
json_data[header] = {}
|
||||
for name in confs:
|
||||
c = confs[name]
|
||||
s = c['section']
|
||||
if s not in json_data[header]: json_data[header][s] = {}
|
||||
json_data[header][s][name] = c['value']
|
||||
else:
|
||||
for header in real_config:
|
||||
conf = real_config[header]
|
||||
#print(f"real_config[{header}]", conf)
|
||||
for name in conf:
|
||||
json_data[name] = conf[name]['value']
|
||||
|
||||
json_data['__INITIAL_HASH'] = hashes
|
||||
|
||||
# Append the source code version and date
|
||||
json_data['VERSION'] = {
|
||||
'DETAILED_BUILD_VERSION': cleaned_build_defines['DETAILED_BUILD_VERSION'],
|
||||
'STRING_DISTRIBUTION_DATE': cleaned_build_defines['STRING_DISTRIBUTION_DATE']
|
||||
}
|
||||
try:
|
||||
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
||||
json_data['VERSION']['GIT_REF'] = curver.decode()
|
||||
except:
|
||||
pass
|
||||
|
||||
json.dump(json_data, outfile, separators=(',', ':'))
|
||||
|
||||
#
|
||||
# The rest only applies to CONFIGURATION_EMBEDDING
|
||||
#
|
||||
if not 'CONFIGURATION_EMBEDDING' in defines:
|
||||
if not 'CONFIGURATION_EMBEDDING' in build_defines:
|
||||
(build_path / 'mc.zip').unlink(missing_ok=True)
|
||||
return
|
||||
|
||||
# Compress the JSON file as much as we can
|
||||
compress_file(marlin_json, 'marlin_config.json', marlin_zip)
|
||||
if not same_hash:
|
||||
compress_file(marlin_json, 'marlin_config.json', marlin_zip)
|
||||
|
||||
# Generate a C source file for storing this array
|
||||
# Generate a C source file containing the entire ZIP file as an array
|
||||
with open('Marlin/src/mczip.h','wb') as result_file:
|
||||
result_file.write(
|
||||
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
|
||||
@@ -269,8 +412,11 @@ def compute_build_signature(env):
|
||||
for b in (build_path / 'mc.zip').open('rb').read():
|
||||
result_file.write(b' 0x%02X,' % b)
|
||||
count += 1
|
||||
if count % 16 == 0:
|
||||
result_file.write(b'\n ')
|
||||
if count % 16:
|
||||
result_file.write(b'\n')
|
||||
if count % 16 == 0: result_file.write(b'\n ')
|
||||
if count % 16: result_file.write(b'\n')
|
||||
result_file.write(b'};\n')
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Build required. From command line just explain usage.
|
||||
print("Use schema.py to export JSON and YAML from the command-line.")
|
||||
print("Build Marlin with CONFIG_EXPORT 2 to export 'config.ini'.")
|
||||
|
Reference in New Issue
Block a user