Compare commits

..

7 Commits

Author SHA1 Message Date
Scott Lahteine
3b8ab730b2 🔖 Version 2.1.2.7 2026-02-03 23:49:37 -06:00
ellensp
3f4c9834bb 🔨 Preflight check for future config (#28320)
Co-authored-by: Scott Lahteine <thinkyhead@users.noreply.github.com>
2026-02-03 23:49:37 -06:00
Scott Lahteine
e2208ba58f 🔨 Build quality improvements 2026-01-22 21:40:56 -06:00
Andrivet
57906a8466 🐛 Fix TFT LVGL UI + Y-DIR conflict (#28291)
Fixes #22967
2026-01-22 10:44:31 -06:00
Scott Lahteine
c67f981197 🩹 Fix offset_xy build error 2026-01-22 10:44:31 -06:00
Scott Lahteine
cd4dcee8c1 🔨 Remove dead envs 2026-01-22 10:44:31 -06:00
Scott Lahteine
652a619549 🔨 Update build scripts 2026-01-22 10:44:31 -06:00
22 changed files with 494 additions and 261 deletions

View File

@@ -28,7 +28,7 @@
/**
* Marlin release version identifier
*/
//#define SHORT_BUILD_VERSION "2.1.2.6"
//#define SHORT_BUILD_VERSION "2.1.2.7"
/**
* Verbose version identifier which should contain a reference to the location
@@ -41,7 +41,7 @@
* here we define this default string as the date where the latest release
* version was tagged.
*/
//#define STRING_DISTRIBUTION_DATE "2025-12-24"
//#define STRING_DISTRIBUTION_DATE "2026-01-22"
/**
* The protocol for communication to the host. Protocol indicates communication

View File

@@ -39,12 +39,16 @@ static void spi_init(spi_t *obj, uint32_t speed, spi_mode_e mode, uint8_t msb, u
void MarlinSPI::setClockDivider(uint8_t _div) {
_speed = spi_getClkFreq(&_spi);// / _div;
_clockDivider = _div;
if (_clockDivider != _div) {
_clockDivider = _div;
_mustInit = true;
}
}
void MarlinSPI::begin(void) {
//TODO: only call spi_init if any parameter changed!!
if (!_mustInit) return;
spi_init(&_spi, _speed, _dataMode, _bitOrder, _dataSize);
_mustInit = false;
}
void MarlinSPI::setupDma(SPI_HandleTypeDef &_spiHandle, DMA_HandleTypeDef &_dmaHandle, uint32_t direction, bool minc) {

View File

@@ -76,15 +76,23 @@ public:
/* These methods are deprecated and kept for compatibility.
* Use SPISettings with SPI.beginTransaction() to configure SPI parameters.
*/
void setBitOrder(BitOrder _order) { _bitOrder = _order; }
void setBitOrder(BitOrder order) {
if (_bitOrder == order) return;
_bitOrder = order;
_mustInit = true;
}
void setDataMode(uint8_t _mode) {
switch (_mode) {
void setDataMode(uint8_t mode) {
auto previous_mode = _dataMode;
switch (mode) {
case SPI_MODE0: _dataMode = SPI_MODE_0; break;
case SPI_MODE1: _dataMode = SPI_MODE_1; break;
case SPI_MODE2: _dataMode = SPI_MODE_2; break;
case SPI_MODE3: _dataMode = SPI_MODE_3; break;
default: return;
}
if (previous_mode != _dataMode)
_mustInit = true;
}
void setClockDivider(uint8_t _div);
@@ -104,4 +112,5 @@ private:
pin_t _misoPin;
pin_t _sckPin;
pin_t _ssPin;
bool _mustInit = true;
};

View File

@@ -1,56 +1,62 @@
#!/usr/bin/env python
#
# STM32F1/build_flags.py
# Add build_flags for the base STM32F1_maple environment (stm32f1-maple.ini)
#
from __future__ import print_function
import sys
#dynamic build flags for generic compile options
# Dynamic build flags for generic compile options
if __name__ == "__main__":
args = " ".join([ "-std=gnu++14",
"-Os",
"-mcpu=cortex-m3",
"-mthumb",
"-fsigned-char",
"-fno-move-loop-invariants",
"-fno-strict-aliasing",
"-fsingle-precision-constant",
"--specs=nano.specs",
"--specs=nosys.specs",
"-IMarlin/src/HAL/STM32F1",
"-MMD",
"-MP",
"-DTARGET_STM32F1"
])
for i in range(1, len(sys.argv)):
args += " " + sys.argv[i]
print(args)
# extra script for linker options
else:
import pioutil
if pioutil.is_pio_build():
from SCons.Script import DefaultEnvironment
env = DefaultEnvironment()
env.Append(
ARFLAGS=["rcs"],
ASFLAGS=["-x", "assembler-with-cpp"],
CXXFLAGS=[
"-fabi-version=0",
"-fno-use-cxa-atexit",
"-fno-threadsafe-statics"
],
LINKFLAGS=[
# Print these plus the given args when running directly on the command-line
args = [
"-std=gnu++14",
"-Os",
"-mcpu=cortex-m3",
"-ffreestanding",
"-mthumb",
"-fsigned-char",
"-fno-move-loop-invariants",
"-fno-strict-aliasing",
"--specs=nano.specs",
"--specs=nosys.specs",
"-u_printf_float",
],
)
"-MMD", "-MP",
"-IMarlin/src/HAL/STM32F1",
"-DTARGET_STM32F1",
"-DARDUINO_ARCH_STM32",
"-DPLATFORM_M997_SUPPORT"
] + sys.argv[1:]
print(" ".join(args))
else:
# Extra script for stm32f1-maple.ini build_flags
import pioutil
if pioutil.is_pio_build():
pioutil.env.Append(
ARFLAGS=["rcs"],
ASFLAGS=["-x", "assembler-with-cpp"],
CXXFLAGS=[
"-fabi-version=0",
"-fno-use-cxa-atexit",
"-fno-threadsafe-statics"
],
LINKFLAGS=[
"-Os",
"-mcpu=cortex-m3",
"-ffreestanding",
"-mthumb",
"--specs=nano.specs",
"--specs=nosys.specs",
"-u_printf_float",
],
)

View File

@@ -802,3 +802,5 @@
#define _HAS_E_TEMP(N) || TEMP_SENSOR(N)
#define HAS_E_TEMP_SENSOR (0 REPEAT(EXTRUDERS, _HAS_E_TEMP))
#define TEMP_SENSOR_IS_MAX_TC(T) (TEMP_SENSOR(T) == -5 || TEMP_SENSOR(T) == -3 || TEMP_SENSOR(T) == -2)
#define DGUS_UI_IS(...) 0 // Dummy macro needed for future preflight checking

View File

@@ -1013,6 +1013,7 @@
#endif
#if ANY(HAS_DWIN_E3V2_BASIC, DWIN_CREALITY_LCD_JYERSUI)
#define HAS_DWIN_E3V2 1
#undef LCD_BED_LEVELING // allow for MESH_EDIT_MENU
#endif
// E3V2 extras

View File

@@ -1528,7 +1528,7 @@ static_assert(COUNT(arm) == LOGICAL_AXES, "AXIS_RELATIVE_MODES must contain " _L
* LCD_BED_LEVELING requirements
*/
#if ENABLED(LCD_BED_LEVELING)
#if !HAS_MARLINUI_MENU
#if NONE(HAS_MARLINUI_MENU, DWIN_LCD_PROUI)
#error "LCD_BED_LEVELING is not supported by the selected LCD controller."
#elif !(ENABLED(MESH_BED_LEVELING) || HAS_ABL_NOT_UBL)
#error "LCD_BED_LEVELING requires MESH_BED_LEVELING or AUTO_BED_LEVELING."

View File

@@ -25,7 +25,7 @@
* Release version. Leave the Marlin version or apply a custom scheme.
*/
#ifndef SHORT_BUILD_VERSION
#define SHORT_BUILD_VERSION "2.1.2.6"
#define SHORT_BUILD_VERSION "2.1.2.7"
#endif
/**
@@ -42,7 +42,7 @@
* version was tagged.
*/
#ifndef STRING_DISTRIBUTION_DATE
#define STRING_DISTRIBUTION_DATE "2025-12-24"
#define STRING_DISTRIBUTION_DATE "2026-01-22"
#endif
/**

View File

@@ -127,7 +127,7 @@ void DWIN_WriteToMem(uint8_t mem, uint16_t addr, uint16_t length, uint8_t *data)
DWIN_Word(i, addr + indx); // start address of the data block
++i;
for (uint8_t j = 0; j < i; ++j) { LCD_SERIAL.write(DWIN_SendBuf[j]); delayMicroseconds(1); } // Buf header
for (uint16_t j = indx; j <= indx + to_send - 1; j++) LCD_SERIAL.write(*(data + j)); delayMicroseconds(1); // write block of data
for (uint16_t j = indx; j <= indx + to_send - 1; j++) { LCD_SERIAL.write(*(data + j)); delayMicroseconds(1); } // write block of data
for (uint8_t j = 0; j < 4; ++j) { LCD_SERIAL.write(DWIN_BufTail[j]); delayMicroseconds(1); }
block++;
pending -= to_send;

View File

@@ -128,7 +128,7 @@ void onDrawMenuItem(MenuItemClass* menuitem, int8_t line) {
if (menuitem->icon) DWINUI::Draw_Icon(menuitem->icon, ICOX, MBASE(line) - 3);
if (menuitem->frameid)
DWIN_Frame_AreaCopy(menuitem->frameid, menuitem->frame.left, menuitem->frame.top, menuitem->frame.right, menuitem->frame.bottom, LBLX, MBASE(line));
else if (menuitem->caption)
else if (menuitem->caption[0])
DWINUI::Draw_String(LBLX, MBASE(line) - 1, menuitem->caption);
DWIN_Draw_HLine(HMI_data.SplitLine_Color, 16, MYPOS(line + 1), 240);
}

View File

@@ -109,6 +109,8 @@ xyz_pos_t Probe::offset; // Initialized by settings.load
#if HAS_PROBE_XY_OFFSET
const xy_pos_t &Probe::offset_xy = Probe::offset;
#else
constexpr xy_pos_t Probe::offset_xy;
#endif
#if ENABLED(SENSORLESS_PROBING)

View File

@@ -471,9 +471,9 @@
#elif MB(ULTRATRONICS_PRO)
#include "sam/pins_ULTRATRONICS_PRO.h" // SAM3X8E env:DUE
#elif MB(ARCHIM1)
#include "sam/pins_ARCHIM1.h" // SAM3X8E env:DUE_archim env:DUE_archim_debug
#include "sam/pins_ARCHIM1.h" // SAM3X8E env:DUE_archim
#elif MB(ARCHIM2)
#include "sam/pins_ARCHIM2.h" // SAM3X8E env:DUE_archim env:DUE_archim_debug
#include "sam/pins_ARCHIM2.h" // SAM3X8E env:DUE_archim
#elif MB(ALLIGATOR)
#include "sam/pins_ALLIGATOR_R2.h" // SAM3X8E env:DUE
#elif MB(CNCONTROLS_15D)

View File

@@ -4,15 +4,15 @@
#
import pioutil
if pioutil.is_pio_build():
import struct,uuid,marlin
import struct, uuid, marlin
board = marlin.env.BoardConfig()
board = pioutil.env.BoardConfig()
def calculate_crc(contents, seed):
accumulating_xor_value = seed
for i in range(0, len(contents), 4):
value = struct.unpack('<I', contents[ i : i + 4])[0]
value = struct.unpack("<I", contents[i : i + 4])[0]
accumulating_xor_value = accumulating_xor_value ^ value
return accumulating_xor_value
@@ -29,12 +29,12 @@ if pioutil.is_pio_build():
# This is the block counter
block_number = xor_seed * block_number
#load the xor key from the file
r7 = file_key
# load the xor key from the file
r7 = file_key
for loop_counter in range(0, block_size):
# meant to make sure different bits of the key are used.
xor_seed = int(loop_counter / key_length)
xor_seed = loop_counter // key_length
# IP is a scratch register / R12
ip = loop_counter - (key_length * xor_seed)
@@ -54,10 +54,10 @@ if pioutil.is_pio_build():
# and then with IP
xor_seed = xor_seed ^ ip
#Now store the byte back
# Now store the byte back
r1[loop_counter] = xor_seed & 0xFF
#increment the loop_counter
# increment the loop_counter
loop_counter = loop_counter + 1
def encrypt_file(input, output_file, file_length):
@@ -82,15 +82,15 @@ if pioutil.is_pio_build():
# write the file_key
output_file.write(struct.pack("<I", file_key))
#TODO - how to enforce that the firmware aligns to block boundaries?
block_count = int(len(input_file) / block_size)
print ("Block Count is ", block_count)
# TODO: - how to enforce that the firmware aligns to block boundaries?
block_count = len(input_file) // block_size
print("Block Count is ", block_count)
for block_number in range(0, block_count):
block_offset = (block_number * block_size)
block_offset = block_number * block_size
block_end = block_offset + block_size
block_array = bytearray(input_file[block_offset: block_end])
block_array = bytearray(input_file[block_offset:block_end])
xor_block(block_array, block_array, block_number, block_size, file_key)
for n in range (0, block_size):
for n in range(0, block_size):
input_file[block_offset + n] = block_array[n]
# update the expected CRC value.

View File

@@ -5,17 +5,19 @@
import pioutil
if pioutil.is_pio_build():
Import("env")
env = pioutil.env
cxxflags = [
# "-Wno-incompatible-pointer-types",
# "-Wno-unused-const-variable",
# "-Wno-maybe-uninitialized",
# "-Wno-sign-compare"
# "-Wno-sign-compare",
"-fno-sized-deallocation"
]
if "teensy" not in env["PIOENV"]:
if "teensy" not in env["PIOENV"] and "esp32" not in env["PIOENV"]:
cxxflags += ["-Wno-register"]
env.Append(CXXFLAGS=cxxflags)
env.Append(CFLAGS=["-Wno-implicit-function-declaration"])
#
# Add CPU frequency as a compile time constant instead of a runtime variable
@@ -27,8 +29,8 @@ if pioutil.is_pio_build():
# Useful for JTAG debugging
#
# It will separate release and debug build folders.
# It useful to keep two live versions: a debug version for debugging and another for
# release, for flashing when upload is not done automatically by jlink/stlink.
# This is useful to keep two live versions: a debug version and a release version,
# for flashing when upload is not done automatically by jlink/stlink.
# Without this, PIO needs to recompile everything twice for any small change.
if env.GetBuildType() == "debug" and env.get("UPLOAD_PROTOCOL") not in ["jlink", "stlink", "custom"]:
env["BUILD_DIR"] = "$PROJECT_BUILD_DIR/$PIOENV/debug"

View File

@@ -2,10 +2,11 @@
# pioutil.py
#
from SCons.Script import DefaultEnvironment
env = DefaultEnvironment()
# Make sure 'vscode init' is not the current command
def is_pio_build():
from SCons.Script import DefaultEnvironment
env = DefaultEnvironment()
if "IsCleanTarget" in dir(env) and env.IsCleanTarget(): return False
return not env.IsIntegrationDump()

View File

@@ -86,6 +86,16 @@ if pioutil.is_pio_build():
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
raise SystemExit(err)
#
# Check for DGUS_UI_IS in Configuration.h
#
config_path = epath / "Marlin" / "Configuration.h"
if config_path.is_file():
with config_path.open() as f:
if "DGUS_UI_IS" in f.read():
err = "ERROR: Future Configurations (2.1.3 and up) are not supported for Marlin 2.1.2."
raise SystemExit(err)
#
# Find the name.cpp.o or name.o and remove it
#

View File

@@ -1,17 +1,35 @@
#!/usr/bin/env python3
#
# schema.py
#
# Used by signature.py via common-dependencies.py to generate a schema file during the PlatformIO build
# when CONFIG_EXPORT is defined in the configuration.
#
# This script can also be run standalone from within the Marlin repo to generate JSON and YAML schema files.
#
# This script is a companion to abm/js/schema.js in the MarlinFirmware/AutoBuildMarlin project, which has
# been extended to evaluate conditions and can determine what options are actually enabled, not just which
# options are uncommented. That will be migrated to this script for standalone migration.
#
import re,json
"""
schema.py
Extract firmware configuration into structured JSON or YAML schema format.
Used by signature.py via common-dependencies.py to generate a schema file during the
PlatformIO build when CONFIG_EXPORT is defined in the configuration.
This script can also be run standalone from within the Marlin repo, and is a companion to
abm/js/schema.js in the MarlinFirmware/AutoBuildMarlin project, which has been extended to
evaluate conditions and can determine what options are actually enabled, not just which
options are uncommented. That will be migrated to this script for standalone migration.
Usage: schema.py [-h] [some|json|jsons|group|yml|yaml]
Process Marlin firmware configuration files (Configuration.h and Configuration_adv.h)
to produce structured output suitable for documentation, tooling, or automated processing.
Positional arguments:
some Generate both JSON and YAML output (schema.json and schema.yml)
json Generate JSON output (schema.json)
jsons Generate grouped JSON output with wildcard options (schema.json and schema_grouped.json)
group Generate grouped JSON output only (schema_grouped.json)
yml Generate YAML output (schema.yml)
yaml Same as 'yml'
Optional arguments:
-h, --help Show this help message and exit
"""
import re, json
from pathlib import Path
def extend_dict(d:dict, k:tuple):
@@ -43,8 +61,7 @@ def find_grouping(gdict, filekey, sectkey, optkey, pindex):
optparts[pindex] = '*'
wildkey = '_'.join(optparts)
kkey = f'{filekey}|{sectkey}|{wildkey}'
if kkey not in gdict: gdict[kkey] = []
gdict[kkey].append((subkey, modkey))
gdict.setdefault(kkey, []).append((subkey, modkey))
# Build a list of potential groups. Only those with multiple items will be grouped.
def group_options(schema):
@@ -70,7 +87,7 @@ def group_options(schema):
def load_boards():
bpath = Path("Marlin/src/core/boards.h")
if bpath.is_file():
with bpath.open() as bfile:
with bpath.open(encoding='utf-8') as bfile:
boards = []
for line in bfile:
if line.startswith("#define BOARD_"):
@@ -80,7 +97,7 @@ def load_boards():
return ''
#
# Extract the current configuration files in the form of a structured schema.
# Extract the specified configuration files in the form of a structured schema.
# Contains the full schema for the configuration files, not just the enabled options,
# Contains the current values of the options, not just data structure, so "schema" is a slight misnomer.
#
@@ -99,9 +116,9 @@ def load_boards():
# - requires = The conditions that must be met for the define to be enabled
# - comment = The comment for the define, if it has one
# - units = The units for the define, if it has one
# - options = The options for the define, if it has one
# - options = The options for the define, if it has any
#
def extract():
def extract_files(filekey):
# Load board names from boards.h
boards = load_boards()
@@ -114,23 +131,21 @@ def extract():
GET_SENSORS = 4 # Gathering temperature sensor options
ERROR = 9 # Syntax error
# List of files to process, with shorthand
filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
# A JSON object to store the data
sch_out = { 'basic':{}, 'advanced':{} }
sch_out = { key:{} for key in filekey.values() }
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
# Pattern to match a float value
flt = r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?'
# Defines to ignore
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
flt = r'[-+]?\s*(?:\d+\.|\d*\.\d+)(?:[eE][-+]?\d+)?[fF]?'
# Pattern to match an integer expression
int_expr = r'(?:[-+]?\s*\d+(?:\s*[*\/+\-]\s*\d+)*)'
# Start with unknown state
state = Parse.NORMAL
# Serial ID
sid = 0
# Loop through files and parse them line by line
for fn, fk in filekey.items():
with Path("Marlin", fn).open() as fileobj:
with Path("Marlin", fn).open(encoding='utf-8') as fileobj:
section = 'none' # Current Settings section
line_number = 0 # Counter for the line number of the file
conditions = [] # Create a condition stack for the current file
@@ -140,7 +155,7 @@ def extract():
eol_options = False # The options came from end of line, so only apply once
join_line = False # A flag that the line should be joined with the previous one
line = '' # A line buffer to handle \ continuation
last_added_ref = None # Reference to the last added item
last_added_ref = {} # Reference to the last added item
# Loop through the lines in the file
for the_line in fileobj.readlines():
line_number += 1
@@ -180,26 +195,42 @@ def extract():
cfield = 'notes' if 'comment' in last_added_ref else 'comment'
last_added_ref[cfield] = cline
#
# Add the given comment line to the comment buffer, unless:
# - The line starts with ':' and JSON values to assign to 'opt'.
# - The line starts with '@section' so a new section needs to be returned.
# - The line starts with '======' so just skip it.
#
def use_comment(c, opt, sec, bufref):
if c.startswith(':'): # If the comment starts with : then it has magic JSON
d = c[1:].strip() # Strip the leading :
cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
'''
c - The comment line to parse
opt - Options JSON string to return (if not updated)
sec - Section to return (if not updated)
bufref - The comment buffer to add to
'''
sc = c.strip() # Strip for special patterns
if sc.startswith(':'): # If the comment starts with : then it has magic JSON
d = sc[1:].strip() # Strip the leading : and spaces
# Look for a JSON container
cbr = sc.rindex('}') if d.startswith('{') else sc.rindex(']') if d.startswith('[') else 0
if cbr:
opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
opt, cmt = sc[1:cbr+1].strip(), sc[cbr+1:].strip()
if cmt != '': bufref.append(cmt)
else:
opt = c[1:].strip()
elif c.startswith('@section'): # Start a new section
sec = c[8:].strip()
elif not c.startswith('========'):
bufref.append(c)
opt = sc[1:].strip() # Some literal value not in a JSON container?
else:
m = re.match(r'@section\s*(.+)', sc) # Start a new section?
if m:
sec = m[1]
elif not sc.startswith('========'):
bufref.append(c) # Anything else is part of the comment
return opt, sec
# For slash comments, capture consecutive slash comments.
# The comment will be applied to the next #define.
if state == Parse.SLASH_COMMENT:
if not defmatch and the_line.startswith('//'):
use_comment(the_line[2:].strip(), options_json, section, comment_buff)
options_json, section = use_comment(the_line[2:].strip(), options_json, section, comment_buff)
continue
else:
state = Parse.NORMAL
@@ -216,17 +247,16 @@ def extract():
# Temperature sensors are done
if state == Parse.GET_SENSORS:
options_json = f'[ {options_json[:-2]} ]'
state = Parse.NORMAL
# Strip the leading '*' from block comments
# Strip the leading '* ' from block comments
cline = re.sub(r'^\* ?', '', cline)
# Collect temperature sensors
if state == Parse.GET_SENSORS:
sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
sens = re.match(r'^\s*(-?\d+)\s*:\s*(.+)$', cline)
if sens:
s2 = sens[2].replace("'","''")
s2 = sens[2].replace("'", "''")
options_json += f"{sens[1]}:'{sens[1]} - {s2}', "
elif state == Parse.BLOCK_COMMENT:
@@ -251,12 +281,11 @@ def extract():
comment_buff = []
state = Parse.BLOCK_COMMENT
eol_options = False
elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
cpos = cpos2
# Comment after a define may be continued on the following lines
if defmatch != None and cpos > 10:
if defmatch is not None and cpos > 10:
state = Parse.EOL_COMMENT
prev_comment = '\n'.join(comment_buff)
comment_buff = []
@@ -286,9 +315,11 @@ def extract():
# Parenthesize the given expression if needed
def atomize(s):
if s == '' \
or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
s = s.strip()
if not s or s.isidentifier() or (s.startswith('(') and s.endswith(')')):
return s
if re.match(r'^[A-Za-z0-9_]*(\([^)]+\))$', s) \
or re.match(r'^[A-Za-z0-9_]+\s*[=!<>]=?\s*.*$', s):
return s
return f'({s})'
@@ -323,10 +354,10 @@ def extract():
conditions.append([ f'!defined({line[7:].strip()})' ])
# Handle a complete #define line
elif defmatch != None:
elif defmatch is not None:
# Get the match groups into vars
enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
enabled, define_name, val = defmatch[1] is None, defmatch[3], defmatch[4]
# Increment the serial ID
sid += 1
@@ -341,27 +372,30 @@ def extract():
}
# Type is based on the value
value_type = \
'switch' if val == '' \
else 'bool' if re.match(r'^(true|false)$', val) \
else 'int' if re.match(r'^[-+]?\s*\d+$', val) \
else 'ints' if re.match(r'^([-+]?\s*\d+)(\s*,\s*[-+]?\s*\d+)+$', val) \
else 'floats' if re.match(rf'({flt}(\s*,\s*{flt})+)', val) \
else 'float' if re.match(f'^({flt})$', val) \
else 'string' if val[0] == '"' \
else 'char' if val[0] == "'" \
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
else 'int[]' if re.match(r'^{\s*[-+]?\s*\d+(\s*,\s*[-+]?\s*\d+)*\s*}$', val) \
else 'float[]' if re.match(r'^{{\s*{flt}(\s*,\s*{flt})*\s*}}$', val) \
else 'array' if val[0] == '{' \
else ''
value_type = (
'switch' if val == ''
else 'int' if re.match(r'^[-+]?\s*\d+$', val)
else 'ints' if re.match(r'^[-+]?\s*\d+(?:\s*,\s*[-+]?\s*\d+)+$', val)
else 'floats' if re.match(rf"^{flt}(?:\s*,\s*{flt})+$", val)
else 'float' if re.match(rf"^{flt}$", val)
else 'string' if val.startswith('"')
else 'char' if val.startswith("'")
else 'bool' if val in ('true', 'false')
else 'state' if val in ('HIGH', 'LOW')
else 'int[]' if re.match(rf"^\{{\s*{int_expr}(?:\s*,\s*{int_expr})*\s*\}}$", val)
else 'float[]' if re.match(rf"^\{{\s*{flt}(?:\s*,\s*{flt})*\s*\}}$", val)
else 'array' if val.startswith('{')
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val)
else ''
)
val = (val == 'true') if value_type == 'bool' \
else int(val) if value_type == 'int' \
else val.replace('f','') if value_type == 'floats' \
else float(val.replace('f','')) if value_type == 'float' \
else val
val = (
(val == 'true') if value_type == 'bool'
else int(val) if value_type == 'int'
else val.replace('f','') if value_type == 'floats'
else float(val.replace('f','')) if value_type == 'float'
else val
)
if val != '': define_info['value'] = val
if value_type != '': define_info['type'] = value_type
@@ -371,7 +405,7 @@ def extract():
# If the comment_buff is not empty, add the comment to the info
if comment_buff:
full_comment = '\n'.join(comment_buff)
full_comment = '\n'.join(comment_buff).strip()
# An EOL comment will be added later
# The handling could go here instead of above
@@ -385,9 +419,17 @@ def extract():
units = re.match(r'^\(([^)]+)\)', full_comment)
if units:
units = units[1]
if units == 's' or units == 'sec': units = 'seconds'
if units in ('s', 'sec'): units = 'seconds'
define_info['units'] = units
if 'comment' not in define_info or define_info['comment'] == '':
if prev_comment:
define_info['comment'] = prev_comment
prev_comment = ''
if 'comment' in define_info and define_info['comment'] == '':
del define_info['comment']
# Set the options for the current #define
if define_name == "MOTHERBOARD" and boards != '':
define_info['options'] = boards
@@ -412,13 +454,31 @@ def extract():
return sch_out
#
# Extract the current configuration files in the form of a structured schema.
#
def extract():
# List of files to process, with shorthand
return extract_files({ 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' })
def dump_json(schema:dict, jpath:Path):
with jpath.open('w') as jfile:
with jpath.open('w', encoding='utf-8') as jfile:
json.dump(schema, jfile, ensure_ascii=False, indent=2)
def dump_yaml(schema:dict, ypath:Path):
import yaml
with ypath.open('w') as yfile:
# Custom representer for all multi-line strings
def str_literal_representer(dumper, data):
if '\n' in data: # Check for multi-line strings
# Add a newline to trigger '|+'
if not data.endswith('\n'): data += '\n'
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
yaml.add_representer(str, str_literal_representer)
with ypath.open('w', encoding='utf-8') as yfile:
yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
def main():
@@ -439,12 +499,17 @@ def main():
def inargs(c): return len(set(args) & set(c)) > 0
# Help / Unknown option
unk = not inargs(['some','json','jsons','group','yml','yaml'])
unk = not inargs(['some','json','jsons','group','yml','yaml', '-h', '--help'])
if (unk): print(f"Unknown option: '{args[0]}'")
if inargs(['-h', '--help']) or unk:
print("Usage: schema.py [some|json|jsons|group|yml|yaml]...")
print(" some = json + yml")
print(" jsons = json + group")
print("Extract firmware configuration into structured JSON or YAML schema format.")
print("Usage: schema.py [-h] [some|json|jsons|group|yml|yaml]")
print(" some Generate both JSON and YAML output (schema.json and schema.yml)")
print(" json Generate JSON output (schema.json)")
print(" jsons Generate grouped JSON output with wildcard options (schema.json and schema_grouped.json)")
print(" group Generate grouped JSON output only (schema_grouped.json)")
print(" yml Generate YAML output (schema.yml)")
print(" yaml Same as 'yml'")
return
# JSON schema

View File

@@ -2,11 +2,10 @@
#
# signature.py
#
import schema
import subprocess,re,json,hashlib
import schema, subprocess, re, json, hashlib
from datetime import datetime
from pathlib import Path
from functools import reduce
def enabled_defines(filepath):
'''
@@ -35,18 +34,29 @@ def enabled_defines(filepath):
'''
outdict = {}
section = "user"
spatt = re.compile(r".*@section +([-a-zA-Z0-9_\s]+)$") # must match @section ...
spatt = re.compile(r".*@section +([-a-zA-Z0-9_\s]+)$") # @section ...
if not Path(filepath).is_file(): return outdict
f = open(filepath, encoding="utf8").read().split("\n")
# Get the full contents of the file and remove all block comments.
# This will avoid false positives from #defines in comments
f = re.sub(r'/\*.*?\*/', '', '\n'.join(f), flags=re.DOTALL).split("\n")
incomment = False
for line in f:
sline = line.strip()
m = re.match(spatt, sline) # @section ...
if m: section = m.group(1).strip() ; continue
if incomment:
if '*/' in sline:
incomment = False
continue
else:
mpos, spos = sline.find('/*'), sline.find('//')
if mpos >= 0 and (spos < 0 or spos > mpos):
incomment = True
continue
if sline[:7] == "#define":
# Extract the key here (we don't care about the value)
kv = sline[8:].strip().split()
@@ -56,9 +66,10 @@ def enabled_defines(filepath):
# Compute the SHA256 hash of a file
def get_file_sha256sum(filepath):
sha256_hash = hashlib.sha256()
with open(filepath,"rb") as f:
if not Path(filepath).is_file(): return ""
with open(filepath, "rb") as f:
# Read and update hash string value in blocks of 4K
for byte_block in iter(lambda: f.read(4096),b""):
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()
@@ -67,9 +78,14 @@ def get_file_sha256sum(filepath):
#
import zipfile
def compress_file(filepath, storedname, outpath):
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
zipf.write(filepath, arcname=storedname, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_DEFLATED, allowZip64=False, compresslevel=9) as zipf:
zipf.write(filepath, arcname=storedname)
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
#
# Compute a build signature and/or export the configuration
#
def compute_build_signature(env):
'''
Compute the build signature by extracting all configuration settings and
@@ -81,11 +97,17 @@ def compute_build_signature(env):
env.Append(BUILD_SIGNATURE=1)
build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
marlin_json = build_path / 'marlin_config.json'
json_name = 'marlin_config.json'
marlin_json = build_path / json_name
marlin_zip = build_path / 'mc.zip'
# ANSI colors
green = "\u001b[32m"
yellow = "\u001b[33m"
red = "\u001b[31m"
# Definitions from these files will be kept
header_paths = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
header_paths = ('Marlin/Configuration.h', 'Marlin/Configuration_adv.h')
# Check if we can skip processing
hashes = ''
@@ -100,7 +122,7 @@ def compute_build_signature(env):
conf = json.load(infile)
same_hash = conf['__INITIAL_HASH'] == hashes
if same_hash:
compress_file(marlin_json, 'marlin_config.json', marlin_zip)
compress_file(marlin_json, json_name, marlin_zip)
except:
pass
@@ -142,7 +164,8 @@ def compute_build_signature(env):
#
# Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
#
if not ('CONFIGURATION_EMBEDDING' in build_defines or 'CONFIG_EXPORT' in build_defines):
is_embed = 'CONFIGURATION_EMBEDDING' in build_defines
if not (is_embed or 'CONFIG_EXPORT' in build_defines):
return
# Filter out useless macros from the output
@@ -175,29 +198,60 @@ def compute_build_signature(env):
# Get the CONFIG_EXPORT value and do an extended dump if > 100
# For example, CONFIG_EXPORT 102 will make a 'config.ini' with a [config:] group for each schema @section
config_dump = tryint('CONFIG_EXPORT')
config_dump = 1 if is_embed else tryint('CONFIG_EXPORT')
extended_dump = config_dump > 100
if extended_dump: config_dump -= 100
config_dump %= 100
# Get the schema class for exports that require it
if config_dump in (3, 4) or (extended_dump and config_dump in (2, 5)):
try:
conf_schema = schema.extract()
except Exception as exc:
print(red + "Error: " + str(exc))
conf_schema = None
optorder = ('MOTHERBOARD','SERIAL_PORT','BAUDRATE','USE_WATCHDOG','THERMAL_PROTECTION_HOTENDS','THERMAL_PROTECTION_HYSTERESIS','THERMAL_PROTECTION_PERIOD','BUFSIZE','BLOCK_BUFFER_SIZE','MAX_CMD_SIZE','EXTRUDERS','TEMP_SENSOR_0','TEMP_HYSTERESIS','HEATER_0_MINTEMP','HEATER_0_MAXTEMP','PREHEAT_1_TEMP_HOTEND','BANG_MAX','PIDTEMP','PID_K1','PID_MAX','PID_FUNCTIONAL_RANGE','DEFAULT_KP','DEFAULT_KI','DEFAULT_KD','X_DRIVER_TYPE','Y_DRIVER_TYPE','Z_DRIVER_TYPE','E0_DRIVER_TYPE','X_BED_SIZE','X_MIN_POS','X_MAX_POS','Y_BED_SIZE','Y_MIN_POS','Y_MAX_POS','Z_MIN_POS','Z_MAX_POS','X_HOME_DIR','Y_HOME_DIR','Z_HOME_DIR','X_MIN_ENDSTOP_HIT_STATE','Y_MIN_ENDSTOP_HIT_STATE','Z_MIN_ENDSTOP_HIT_STATE','DEFAULT_AXIS_STEPS_PER_UNIT','AXIS_RELATIVE_MODES','DEFAULT_MAX_FEEDRATE','DEFAULT_MAX_ACCELERATION','HOMING_FEEDRATE_MM_M','HOMING_BUMP_DIVISOR','X_ENABLE_ON','Y_ENABLE_ON','Z_ENABLE_ON','E_ENABLE_ON','INVERT_X_DIR','INVERT_Y_DIR','INVERT_Z_DIR','INVERT_E0_DIR','STEP_STATE_E','STEP_STATE_X','STEP_STATE_Y','STEP_STATE_Z','DISABLE_X','DISABLE_Y','DISABLE_Z','DISABLE_E','PROPORTIONAL_FONT_RATIO','DEFAULT_NOMINAL_FILAMENT_DIA','JUNCTION_DEVIATION_MM','DEFAULT_ACCELERATION','DEFAULT_TRAVEL_ACCELERATION','DEFAULT_RETRACT_ACCELERATION','DEFAULT_MINIMUMFEEDRATE','DEFAULT_MINTRAVELFEEDRATE','MINIMUM_PLANNER_SPEED','MIN_STEPS_PER_SEGMENT','DEFAULT_MINSEGMENTTIME','BED_OVERSHOOT','BUSY_WHILE_HEATING','DEFAULT_EJERK','DEFAULT_KEEPALIVE_INTERVAL','DEFAULT_LEVELING_FADE_HEIGHT','DISABLE_OTHER_EXTRUDERS','DISPLAY_CHARSET_HD44780','EEPROM_BOOT_SILENT','EEPROM_CHITCHAT','ENDSTOPPULLUPS','EXTRUDE_MAXLENGTH','EXTRUDE_MINTEMP','HOST_KEEPALIVE_FEATURE','HOTEND_OVERSHOOT','JD_HANDLE_SMALL_SEGMENTS','LCD_INFO_SCREEN_STYLE','LCD_LANGUAGE','MAX_BED_POWER','MESH_INSET','MIN_SOFTWARE_ENDSTOPS','MAX_SOFTWARE_ENDSTOPS','MIN_SOFTWARE_ENDSTOP_X','MIN_SOFTWARE_ENDSTOP_Y','MIN_SOFTWARE_ENDSTOP_Z','MAX_SOFTWARE_ENDSTOP_X','MAX_SOFTWARE_ENDSTOP_Y','MAX_SOFTWARE_ENDSTOP_Z','PREHEAT_1_FAN_SPEED','PREHEAT_1_LABEL','PREHEAT_1_TEMP_BED','PREVENT_COLD_EXTRUSION','PREVENT_LENGTHY_EXTRUDE','PRINTJOB_TIMER_AUTOSTART','PROBING_MARGIN','SHOW_BOOTSCREEN','SOFT_PWM_SCALE','STRING_CONFIG_H_AUTHOR','TEMP_BED_HYSTERESIS','TEMP_BED_RESIDENCY_TIME','TEMP_BED_WINDOW','TEMP_RESIDENCY_TIME','TEMP_WINDOW','VALIDATE_HOMING_ENDSTOPS','XY_PROBE_FEEDRATE','Z_CLEARANCE_BETWEEN_PROBES','Z_CLEARANCE_DEPLOY_PROBE','Z_CLEARANCE_MULTI_PROBE','ARC_SUPPORT','AUTO_REPORT_TEMPERATURES','AUTOTEMP','AUTOTEMP_OLDWEIGHT','BED_CHECK_INTERVAL','DEFAULT_STEPPER_TIMEOUT_SEC','DEFAULT_VOLUMETRIC_EXTRUDER_LIMIT','DISABLE_IDLE_X','DISABLE_IDLE_Y','DISABLE_IDLE_Z','DISABLE_IDLE_E','E0_AUTO_FAN_PIN','ENCODER_100X_STEPS_PER_SEC','ENCODER_10X_STEPS_PER_SEC','ENCODER_RATE_MULTIPLIER','EXTENDED_CAPABILITIES_REPORT','EXTRUDER_AUTO_FAN_SPEED','EXTRUDER_AUTO_FAN_TEMPERATURE','FANMUX0_PIN','FANMUX1_PIN','FANMUX2_PIN','FASTER_GCODE_PARSER','HOMING_BUMP_MM','MAX_ARC_SEGMENT_MM','MIN_ARC_SEGMENT_MM','MIN_CIRCLE_SEGMENTS','N_ARC_CORRECTION','SERIAL_OVERRUN_PROTECTION','SLOWDOWN','SLOWDOWN_DIVISOR','TEMP_SENSOR_BED','THERMAL_PROTECTION_BED_HYSTERESIS','THERMOCOUPLE_MAX_ERRORS','TX_BUFFER_SIZE','WATCH_BED_TEMP_INCREASE','WATCH_BED_TEMP_PERIOD','WATCH_TEMP_INCREASE','WATCH_TEMP_PERIOD')
def optsort(x, optorder):
return optorder.index(x) if x in optorder else float('inf')
#
# Produce an INI file if CONFIG_EXPORT == 2
# CONFIG_EXPORT 102 = config.ini, 105 = Config.h
# Get sections using the schema class
#
if extended_dump and config_dump in (2, 5):
if not conf_schema: exit(1)
# Start with a preferred @section ordering
preorder = ('test','custom','info','machine','eeprom','stepper drivers','multi stepper','idex','extruder','geometry','homing','kinematics','motion','motion control','endstops','filament runout sensors','probe type','probes','bltouch','leveling','temperature','hotend temp','mpctemp','pid temp','mpc temp','bed temp','chamber temp','fans','tool change','advanced pause','calibrate','calibration','media','lcd','lights','caselight','interface','custom main menu','custom config menu','custom buttons','develop','debug matrix','delta','scara','tpara','polar','polargraph','cnc','nozzle park','nozzle clean','gcode','serial','host','filament width','i2c encoders','i2cbus','joystick','multi-material','nanodlp','network','photo','power','psu control','reporting','safety','security','servos','stats','tmc/config','tmc/hybrid','tmc/serial','tmc/smart','tmc/spi','tmc/stallguard','tmc/status','tmc/stealthchop','tmc/tmc26x','units','volumetrics','extras')
sections = { key:{} for key in preorder }
# Group options by schema @section
for header in real_config:
for name in real_config[header]:
#print(f" name: {name}")
if name in ignore: continue
ddict = real_config[header][name]
#print(f" real_config[{header}][{name}]:", ddict)
sect = ddict['section']
if sect not in sections: sections[sect] = {}
sections[sect][name] = ddict
#
# CONFIG_EXPORT 2 or 102 = config.ini
#
if config_dump == 2:
print("Generating config.ini ...")
print(yellow + "Generating config.ini ...")
ini_fmt = '{0:40} = {1}'
ext_fmt = '{0:40} {1}'
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
if extended_dump:
# Extended export will dump config options by section
# We'll use Schema class to get the sections
try:
conf_schema = schema.extract()
except Exception as exc:
print("Error: " + str(exc))
exit(1)
if not conf_schema: exit(1)
# Then group options by schema @section
sections = {}
@@ -230,7 +284,7 @@ def compute_build_signature(env):
for line in sec_lines[1:]: sec_list += '\n' + ext_fmt.format('', line)
config_ini = build_path / 'config.ini'
with config_ini.open('w') as outfile:
with config_ini.open('w', encoding='utf-8', newline='') as outfile:
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
vers = build_defines["CONFIGURATION_H_VERSION"]
dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
@@ -293,7 +347,9 @@ f'''#
sani = re.sub(r'[- ]+', '_', skey).lower()
outfile.write(f"\n[config:{sani}]\n")
opts = sections[skey]
for name in sorted(opts):
opts_keys = sorted(opts.keys(), key=lambda x: optsort(x, optorder))
for name in opts_keys:
if name in ignore: continue
val = opts[name]['value']
if val == '': val = 'on'
#print(f" {name} = {val}")
@@ -304,28 +360,76 @@ f'''#
# Standard export just dumps config:basic and config:advanced sections
for header in real_config:
outfile.write(f'\n[{filegrp[header]}]\n')
for name in sorted(real_config[header]):
if name not in ignore:
val = real_config[header][name]['value']
if val == '': val = 'on'
outfile.write(ini_fmt.format(name.lower(), val) + '\n')
opts = real_config[header]
opts_keys = sorted(opts.keys(), key=lambda x: optsort(x, optorder))
for name in opts_keys:
if name in ignore: continue
val = opts[name]['value']
if val == '': val = 'on'
outfile.write(ini_fmt.format(name.lower(), val) + '\n')
#
# CONFIG_EXPORT 3 = schema.json, 4 = schema.yml
# CONFIG_EXPORT 5 or 105 = Config.h
#
if config_dump >= 3:
try:
conf_schema = schema.extract()
except Exception as exc:
print("Error: " + str(exc))
conf_schema = None
if config_dump == 5:
print(yellow + "Generating Config-export.h ...")
config_h = Path('Marlin', 'Config-export.h')
with config_h.open('w') as outfile:
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
vers = build_defines["CONFIGURATION_H_VERSION"]
dt_string = datetime.utcnow().strftime("%Y-%m-%d at %H:%M:%S")
out_text = f'''/**
* Config.h - Marlin Firmware distilled configuration
* Usage: Place this file in the 'Marlin' folder with the name 'Config.h'.
*
* Exported by Marlin build on {dt_string}.
*/
'''
subs = (('Bltouch','BLTouch'),('hchop','hChop'),('Eeprom','EEPROM'),('Gcode','G-code'),('lguard','lGuard'),('Idex','IDEX'),('Lcd','LCD'),('Mpc','MPC'),('Pid','PID'),('Psu','PSU'),('Scara','SCARA'),('Spi','SPI'),('Tmc','TMC'),('Tpara','TPARA'))
define_fmt = '#define {0:40} {1}'
if extended_dump:
# Loop through the sections
for skey in sections:
#print(f" skey: {skey}")
opts = sections[skey]
headed = False
opts_keys = sorted(opts.keys(), key=lambda x: optsort(x, optorder))
for name in opts_keys:
if name in ignore: continue
val = opts[name]['value']
if not headed:
head = reduce(lambda s, r: s.replace(*r), subs, skey.title())
out_text += f"\n//\n// {head}\n//\n"
headed = True
out_text += define_fmt.format(name, val).strip() + '\n'
else:
# Dump config options in just two sections, by file
for header in real_config:
out_text += f'\n/**\n * Overrides for {header}\n */\n'
opts = real_config[header]
opts_keys = sorted(opts.keys(), key=lambda x: optsort(x, optorder))
for name in opts_keys:
if name in ignore: continue
val = opts[name]['value']
out_text += define_fmt.format(name, val).strip() + '\n'
outfile.write(out_text)
#
# CONFIG_EXPORT 3 = schema.json, 13 = schema_grouped.json, 4 = schema.yml
#
if config_dump in (3, 4, 13):
if conf_schema:
#
# 3 = schema.json
#
if config_dump in (3, 13):
print("Generating schema.json ...")
print(yellow + "Generating schema.json ...")
schema.dump_json(conf_schema, build_path / 'schema.json')
if config_dump == 13:
schema.group_options(conf_schema)
@@ -335,7 +439,7 @@ f'''#
# 4 = schema.yml
#
elif config_dump == 4:
print("Generating schema.yml ...")
print(yellow + "Generating schema.yml ...")
try:
import yaml
except ImportError:
@@ -347,19 +451,20 @@ f'''#
schema.dump_yaml(conf_schema, build_path / 'schema.yml')
#
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1 or 101
# Skip if an identical JSON file was already present.
#
if not same_hash and (config_dump == 1 or 'CONFIGURATION_EMBEDDING' in build_defines):
if not same_hash and config_dump == 1:
with marlin_json.open('w') as outfile:
json_data = {}
if extended_dump:
print("Extended dump ...")
print(yellow + "Extended dump ...")
for header in real_config:
confs = real_config[header]
json_data[header] = {}
for name in confs:
if name in ignore: continue
c = confs[name]
s = c['section']
if s not in json_data[header]: json_data[header][s] = {}
@@ -369,6 +474,7 @@ f'''#
conf = real_config[header]
#print(f"real_config[{header}]", conf)
for name in conf:
if name in ignore: continue
json_data[name] = conf[name]['value']
json_data['__INITIAL_HASH'] = hashes
@@ -389,13 +495,13 @@ f'''#
#
# The rest only applies to CONFIGURATION_EMBEDDING
#
if not 'CONFIGURATION_EMBEDDING' in build_defines:
if not is_embed:
(build_path / 'mc.zip').unlink(missing_ok=True)
return
# Compress the JSON file as much as we can
if not same_hash:
compress_file(marlin_json, 'marlin_config.json', marlin_zip)
compress_file(marlin_json, json_name, marlin_zip)
# Generate a C source file containing the entire ZIP file as an array
with open('Marlin/src/mczip.h','wb') as result_file:
@@ -415,5 +521,12 @@ f'''#
if __name__ == "__main__":
# Build required. From command line just explain usage.
print("Use schema.py to export JSON and YAML from the command-line.")
print("Build Marlin with CONFIG_EXPORT 2 to export 'config.ini'.")
print("*** THIS SCRIPT USED BY common-dependencies.py ***\n\n"
+ "Current options for config and schema export:\n"
+ " - marlin_config.json : Build Marlin with CONFIG_EXPORT 1 or 101. (Use CONFIGURATION_EMBEDDING for 'mc.zip')\n"
+ " - config.ini : Build Marlin with CONFIG_EXPORT 2 or 102.\n"
+ " - schema.json : Run 'schema.py json' (CONFIG_EXPORT 3).\n"
+ " - schema_grouped.json : Run 'schema.py group' (CONFIG_EXPORT 13).\n"
+ " - schema.yml : Run 'schema.py yml' (CONFIG_EXPORT 4).\n"
+ " - Config-export.h : Build Marlin with CONFIG_EXPORT 5 or 105.\n"
)

View File

@@ -1,3 +1,3 @@
# Where have all the configurations gone?
## https://github.com/MarlinFirmware/Configurations/archive/release-2.1.2.6.zip
## https://github.com/MarlinFirmware/Configurations/archive/release-2.1.2.7.zip

View File

@@ -30,10 +30,10 @@ extends = env:linux_native
extra_scripts = ${common.extra_scripts}
post:buildroot/share/PlatformIO/scripts/collect-code-tests.py
build_src_filter = ${env:linux_native.build_src_filter} +<tests>
lib_deps = throwtheswitch/Unity@^2.5.2
lib_deps = throwtheswitch/Unity@^2.6.0
test_build_src = true
build_unflags =
build_flags = ${env:linux_native.build_flags} -Werror
build_flags = ${env:linux_native.build_flags} -Werror -DNO_USER_FEEDBACK_WARNING
#
# Native Simulation
@@ -58,7 +58,7 @@ debug_build_flags = -fstack-protector-strong -g -g3 -ggdb
lib_compat_mode = off
build_src_filter = ${common.default_src_filter} +<src/HAL/NATIVE_SIM>
lib_deps = ${common.lib_deps}
MarlinSimUI=https://github.com/p3p/MarlinSimUI/archive/afe7c1c293.zip
MarlinSimUI=https://github.com/p3p/MarlinSimUI/archive/29c11d4f63.zip
Adafruit NeoPixel=https://github.com/p3p/Adafruit_NeoPixel/archive/c6b319f447.zip
LiquidCrystal=https://github.com/p3p/LiquidCrystal/archive/322fb5fc23.zip
extra_scripts = ${common.extra_scripts}
@@ -81,11 +81,15 @@ build_flags = ${simulator_linux.build_flags} ${simulator_linux.release_flags}
# Simulator for macOS (MacPorts)
#
#
# Use the script buildroot/bin/mac_gcc to prepare your environment.
#
# MacPorts:
# https://www.macports.org/install.php
#
# sudo port install gcc14 glm mesa libsdl2 libsdl2_net
#
# cd /opt/local/bin
# cd $(dirname "$(which port)")
# sudo rm gcc g++ cc ld
# sudo ln -s gcc-mp-14 gcc ; sudo ln -s g++-mp-14 g++ ; sudo ln -s g++ cc
# sudo ln -s ld-classic ld
@@ -99,7 +103,7 @@ build_flags = ${simulator_linux.build_flags} ${simulator_linux.release_flags}
#
# brew install gcc@14 glm mesa sdl2 sdl2_net
#
# cd /opt/homebrew/bin
# cd "$(brew --prefix)/bin"
# sudo rm -f gcc g++ cc
# sudo ln -s gcc-14 gcc ; sudo ln -s g++-14 g++ ; sudo ln -s g++ cc
# cd -
@@ -111,7 +115,7 @@ build_flags = -g2
-DHAS_LIBBSD
-I/opt/local/include
-I/opt/local/include/freetype2
-I/opt/local/include/SDL2/
-I/opt/local/include/SDL2
-L/opt/local/lib
-Wl,-framework,OpenGl
-Wl,-framework,CoreFoundation

View File

@@ -23,7 +23,7 @@
# HAL/STM32F1 Common Environment values
#
[STM32F1_maple]
platform = ststm32@~12.1
platform = ststm32@~15.4.1
board_build.core = maple
build_flags = !python Marlin/src/HAL/STM32F1/build_flags.py
${common.build_flags} -DARDUINO_ARCH_STM32 -DMAPLE_STM32F1 -DPLATFORM_M997_SUPPORT
@@ -31,13 +31,18 @@ build_unflags = -std=gnu11 -std=gnu++11
build_src_filter = ${common.default_src_filter} +<src/HAL/STM32F1> -<src/HAL/STM32F1/tft>
lib_ignore = SPI, FreeRTOS701, FreeRTOS821
lib_deps = ${common.lib_deps}
SoftwareSerialM
SoftwareSerialM
platform_packages = tool-stm32duino
toolchain-gccarmnoneeabi@1.120301.0 # Otherwise it's GCC 7.2.1
extra_scripts = ${common.extra_scripts}
pre:buildroot/share/PlatformIO/scripts/fix_framework_weakness.py
pre:buildroot/share/PlatformIO/scripts/stm32_serialbuffer.py
buildroot/share/PlatformIO/scripts/custom_board.py
buildroot/share/PlatformIO/scripts/offset_and_rename.py
custom_marlin.HAS_SPI_TFT = build_src_filter=+<src/HAL/STM32F1/tft/tft_spi.cpp>
custom_marlin.HAS_TFT_XPT2046 = build_src_filter=+<src/HAL/STM32F1/tft/xpt2046.cpp>
custom_marlin.HAS_FSMC_TFT = build_src_filter=+<src/HAL/STM32F1/tft/tft_fsmc.cpp>
custom_marlin.NEOPIXEL_LED = Adafruit NeoPixel=https://github.com/ccccmagicboy/Adafruit_NeoPixel#meeb_3dp_use
#
# Generic STM32F103RC environment
@@ -61,6 +66,8 @@ monitor_speed = 115200
[env:STM32F103RC_meeb_maple]
extends = env:STM32F103RC_maple
board = marlin_maple_MEEB_3DP
platform_packages = ${env:STM32F103RC_maple.platform_packages}
platformio/tool-dfuutil@~1.11.0
build_flags = ${env:STM32F103RC_maple.build_flags}
-DDEBUG_LEVEL=0
-DSS_TIMER=4
@@ -69,13 +76,13 @@ build_flags = ${env:STM32F103RC_maple.build_flags}
-DUSE_USB_COMPOSITE
-DVECT_TAB_OFFSET=0x2000
-DGENERIC_BOOTLOADER
-DNO_MAPLE_WARNING
board_build.ldscript = STM32F103RC_MEEB_3DP.ld
extra_scripts = ${env:STM32F103RC_maple.extra_scripts}
pre:buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py
buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py
lib_deps = ${env:STM32F103RC_maple.lib_deps}
USBComposite for STM32F1@0.91
custom_marlin.NEOPIXEL_LED = Adafruit NeoPixel=https://github.com/ccccmagicboy/Adafruit_NeoPixel#meeb_3dp_use
USBComposite for STM32F1@1.0.9
debug_tool = stlink
upload_protocol = dfu
@@ -83,13 +90,13 @@ upload_protocol = dfu
# FYSETC STM32F103RC
#
[env:STM32F103RC_fysetc_maple]
extends = env:STM32F103RC_maple
extra_scripts = ${env:STM32F103RC_maple.extra_scripts}
buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py
build_flags = ${env:STM32F103RC_maple.build_flags} -DDEBUG_LEVEL=0
lib_ldf_mode = chain
debug_tool = stlink
upload_protocol = serial
extends = env:STM32F103RC_maple
extra_scripts = ${env:STM32F103RC_maple.extra_scripts}
buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py
build_flags = ${env:STM32F103RC_maple.build_flags} -DDEBUG_LEVEL=0
lib_ldf_mode = chain
debug_tool = stlink
upload_protocol = serial
#
# BigTreeTech SKR Mini V1.1 / SKR Mini E3 & MZ (STM32F103RCT6 ARM Cortex-M3)
@@ -108,7 +115,7 @@ monitor_speed = 115200
extends = env:STM32F103RC_btt_maple
build_flags = ${env:STM32F103RC_btt_maple.build_flags} -DUSE_USB_COMPOSITE
lib_deps = ${env:STM32F103RC_btt_maple.lib_deps}
USBComposite for STM32F1@0.91
USBComposite for STM32F1@1.0.9
#
# Creality 512K (STM32F103RET6)
@@ -160,7 +167,7 @@ upload_protocol = stlink
extends = env:STM32F103RE_btt_maple
build_flags = ${env:STM32F103RE_btt_maple.build_flags} -DUSE_USB_COMPOSITE
lib_deps = ${env:STM32F103RE_btt_maple.lib_deps}
USBComposite for STM32F1@0.91
USBComposite for STM32F1@1.0.9
#
# Geeetech GTM32 (STM32F103VET6)
@@ -185,7 +192,7 @@ board_build.address = 0x08010000
board_build.rename = project.bin
board_build.ldscript = STM32F103VE_longer.ld
build_flags = ${STM32F1_maple.build_flags}
-DMCU_STM32F103VE -DSTM32F1xx -USERIAL_USB -DU20 -DTS_V12
-DMCU_STM32F103VE -DSTM32F1xx -DSERIAL_USB -DU20 -DTS_V12
build_unflags = ${STM32F1_maple.build_unflags}
-DCONFIG_MAPLE_MINI_NO_DISABLE_DEBUG=1 -DERROR_LED_PORT=GPIOE -DERROR_LED_PIN=6
@@ -236,6 +243,7 @@ board_build.ldscript = mks_robin_pro.ld
#
[env:trigorilla_pro_maple]
extends = env:mks_robin_maple
build_flags = ${env:mks_robin_maple.build_flags} -DSTM32_FLASH_SIZE=512
#
# MKS Robin E3D (STM32F103RCT6) and
@@ -311,14 +319,15 @@ lib_ignore = ${STM32F1_maple.lib_ignore}
# Chitu boards like Tronxy X5s (STM32F103ZET6)
#
[env:chitu_f103_maple]
extends = STM32F1_maple
board = marlin_maple_CHITU_F103
extra_scripts = ${STM32F1_maple.extra_scripts}
pre:buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py
buildroot/share/PlatformIO/scripts/chitu_crypt.py
build_flags = ${STM32F1_maple.build_flags} -DSTM32F1xx -DSTM32_XL_DENSITY
build_unflags = ${STM32F1_maple.build_unflags}
-DCONFIG_MAPLE_MINI_NO_DISABLE_DEBUG= -DERROR_LED_PORT=GPIOE -DERROR_LED_PIN=6
extends = STM32F1_maple
board = marlin_maple_CHITU_F103
extra_scripts = ${STM32F1_maple.extra_scripts}
pre:buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py
buildroot/share/PlatformIO/scripts/chitu_crypt.py
build_flags = ${STM32F1_maple.build_flags} -DSTM32F1xx -DSTM32_XL_DENSITY -DSTM32_FLASH_SIZE=512
build_unflags = ${STM32F1_maple.build_unflags}
-DCONFIG_MAPLE_MINI_NO_DISABLE_DEBUG= -DERROR_LED_PORT=GPIOE -DERROR_LED_PIN=6
board_build.crypt_chitu = update.cbd
#
# Some Chitu V5 boards have a problem with GPIO init.
@@ -353,8 +362,8 @@ board_upload.maximum_size = 237568
build_flags = ${STM32F1_maple.build_flags}
-D__STM32F1__=1 -DDEBUG_LEVEL=0 -DSS_TIMER=4 -DSERIAL_USB
lib_deps = ${STM32F1_maple.lib_deps}
USBComposite for STM32F1@0.91
lib_ignore = Adafruit NeoPixel, SPI, SailfishLCD, SailfishRGB_LED, SlowSoftI2CMaster, TMCStepper
USBComposite for STM32F1@1.0.9
lib_ignore = Adafruit NeoPixel, SPI, SailfishLCD, SailfishRGB_LED, SlowSoftI2CMaster
[env:STM32F103RC_ZM3E2_USB_maple]
extends = ZONESTAR_ZM3E_maple
@@ -385,6 +394,7 @@ build_flags = ${STM32F1_maple.build_flags}
-DMCU_STM32F103VE -DARDUINO_GENERIC_STM32F103V -DARDUINO_ARCH_STM32F1
-DDEBUG_LEVEL=DEBUG_NONE -DCONFIG_MAPLE_MINI_NO_DISABLE_DEBUG=1
-DSS_TIMER=4
-DNO_MAPLE_WARNING
board_build.variant = MARLIN_F103Vx
board_build.ldscript = eryone_ery32_mini.ld
board_build.address = 0x08004000
@@ -395,7 +405,7 @@ build_unflags = ${STM32F1_maple.build_unflags}
#
[env:GD32F103RET6_sovol_maple]
extends = env:STM32F103RE_maple
build_flags = ${STM32F1_maple.build_flags} -DTEMP_TIMER_CHAN=4
build_flags = ${STM32F1_maple.build_flags} -DTEMP_TIMER_CHAN=4 -DNO_MAPLE_WARNING
board_build.address = 0x08007000
board_build.ldscript = sovol.ld
board_build.rename = firmware-{date}-{time}.bin

View File

@@ -63,15 +63,15 @@ build_flags = ${common_STM32F103RC_variant.build_flags}
board_build.offset = 0x7000
board_upload.offset_address = 0x08007000
[USBD_CDC_MSC]
build_flags = -DUSE_USB_FS -DUSBD_USE_CDC_MSC -DUSBD_IRQ_PRIO=5 -DUSBD_IRQ_SUBPRIO=6
build_unflags = -DUSBD_USE_CDC
[env:STM32F103RC_btt_USB]
extends = env:STM32F103RC_btt
platform_packages = ${stm_flash_drive.platform_packages}
build_flags = ${env:STM32F103RC_btt.build_flags}
-DUSE_USB_FS
-DUSBD_IRQ_PRIO=5
-DUSBD_IRQ_SUBPRIO=6
-DUSBD_USE_CDC_MSC
build_unflags = ${common_stm32.build_unflags} -DUSBD_USE_CDC
build_flags = ${env:STM32F103RC_btt.build_flags} ${USBD_CDC_MSC.build_flags}
build_unflags = ${env:STM32F103RC_btt.build_unflags} ${USBD_CDC_MSC.build_unflags}
#
# Panda Pi V2.9 - Standalone (STM32F103RC)
@@ -83,8 +83,8 @@ build_flags = ${common_STM32F103RC_variant.build_flags}
-DTIMER_SERVO=TIM1
board_build.offset = 0x5000
board_upload.offset_address = 0x08005000
lib_deps =
markyue/Panda_SoftMasterI2C@1.0.3
lib_deps = markyue/Panda_SoftMasterI2C@1.0.3
#
# MKS Robin (STM32F103ZET6)
# Uses HAL STM32 to support Marlin UI for TFT screen with optional touch panel
@@ -95,6 +95,7 @@ board = genericSTM32F103ZE
board_build.variant = MARLIN_F103Zx
board_build.encrypt_mks = Robin.bin
board_build.offset = 0x7000
board_build.offset_address = 0x08007000
build_flags = ${stm32_variant.build_flags}
-DENABLE_HWSERIAL3 -DTIMER_SERIAL=TIM5
build_unflags = ${stm32_variant.build_unflags}
@@ -121,8 +122,8 @@ debug_tool = stlink
extends = stm32_variant
board_build.variant = MARLIN_F103Rx
board_build.offset = 0x7000
board_build.rename = firmware-{date}-{time}.bin
board_upload.offset_address = 0x08007000
board_build.rename = firmware-{date}-{time}.bin
build_flags = ${stm32_variant.build_flags}
-DMCU_STM32F103RE -DHAL_SD_MODULE_ENABLED
-DSS_TIMER=4 -DTIMER_SERVO=TIM5
@@ -177,6 +178,7 @@ board_upload.offset_address = 0x08010000
extends = env:STM32F103RE_creality
board_build.offset = 0x8000
board_upload.offset_address = 0x08008000
board_build.rename = main_board_{date}_{time}.bin
#
# Creality 256K (STM32F103RC)
@@ -199,7 +201,7 @@ board = genericSTM32F103RC
extends = STM32F103Rx_creality
board = genericSTM32F103VE
board_build.variant = MARLIN_F103Vx
build_flags = ${stm32_variant.build_flags}
build_flags = ${STM32F103Rx_creality.build_flags}
-DSS_TIMER=4 -DTIMER_SERVO=TIM5
-DENABLE_HWSERIAL3 -DTRANSFER_CLOCK_DIV=8
#
@@ -225,10 +227,8 @@ upload_protocol = jlink
[env:STM32F103RE_btt_USB]
extends = env:STM32F103RE_btt
platform_packages = ${stm_flash_drive.platform_packages}
build_flags = ${env:STM32F103RE_btt.build_flags}
-DUSE_USB_FS -DUSBD_IRQ_PRIO=5
-DUSBD_IRQ_SUBPRIO=6 -DUSBD_USE_CDC_MSC
build_unflags = ${env:STM32F103RE_btt.build_unflags} -DUSBD_USE_CDC
build_flags = ${env:STM32F103RE_btt.build_flags} ${USBD_CDC_MSC.build_flags}
build_unflags = ${env:STM32F103RE_btt.build_unflags} ${USBD_CDC_MSC.build_unflags}
#
# Mingda MPX_ARM_MINI
@@ -238,6 +238,7 @@ extends = stm32_variant
board = genericSTM32F103ZE
board_build.variant = MARLIN_F103Zx
board_build.offset = 0x10000
board_build.offset_address = 0x08010000
build_flags = ${stm32_variant.build_flags}
-DENABLE_HWSERIAL3 -DTIMER_SERIAL=TIM5
build_unflags = ${stm32_variant.build_unflags}
@@ -252,7 +253,7 @@ board = malyanm200_f103cb
build_flags = ${common_stm32.build_flags}
-DHAL_PCD_MODULE_ENABLED -DDISABLE_GENERIC_SERIALUSB
-DHAL_UART_MODULE_ENABLED
build_src_filter = ${common.default_src_filter} +<src/HAL/STM32> -<src/HAL/STM32/tft>
build_src_filter = ${common_stm32.build_src_filter} +<src/HAL/STM32> -<src/HAL/STM32/tft>
#
# FLYmaker FLY Mini (STM32F103RCT6)
@@ -263,7 +264,7 @@ board = genericSTM32F103RC
board_build.variant = MARLIN_F103Rx
board_build.offset = 0x5000
board_upload.offset_address = 0x08005000
build_flags = ${stm32_variant.build_flags} -DSS_TIMER=4
build_flags = ${stm32_variant.build_flags} -DSS_TIMER=4 -DSTM32_FLASH_SIZE=256
#
# (STM32F103VE_robin)
@@ -361,6 +362,8 @@ board_build.offset = 0xA000
board_upload.offset_address = 0x0800A000
build_flags = ${stm32_variant.build_flags}
-DSTM32F1xx -DSTM32_XL_DENSITY
build_unflags = ${stm32_variant.build_unflags}
-DUSBCON -DUSBD_USE_CDC
extra_scripts = ${stm32_variant.extra_scripts}
buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py
@@ -402,7 +405,7 @@ extends = stm32_variant
board = genericSTM32F103ZE
board_build.variant = MARLIN_F103Zx
build_flags = ${stm32_variant.build_flags}
-DENABLE_HWSERIAL3 -DTIMER_SERIAL=TIM5
-DENABLE_HWSERIAL3 -DTIMER_SERIAL=TIM5 -DSTM32_FLASH_SIZE=512
build_unflags = ${stm32_variant.build_unflags}
-DUSBCON -DUSBD_USE_CDC
@@ -416,6 +419,7 @@ board = genericSTM32F103ZE
board_build.crypt_chitu = update.zw
board_build.variant = MARLIN_F103Zx
board_build.offset = 0x8800
board_build.offset_address = 0x08008800
build_flags = ${stm32_variant.build_flags}
-DENABLE_HWSERIAL3 -DTIMER_SERIAL=TIM5
build_unflags = ${stm32_variant.build_unflags}
@@ -433,8 +437,8 @@ board = genericSTM32F103ZE
board_build.crypt_chitu = update.cbd
board_build.variant = MARLIN_F103Zx
board_build.offset = 0x8800
build_flags = ${stm32_variant.build_flags}
-DSTM32F1xx
board_build.offset_address = 0x08008800
build_flags = ${stm32_variant.build_flags} -DSTM32F1xx -DSTM32_FLASH_SIZE=512
build_unflags = ${stm32_variant.build_unflags}
extra_scripts = ${stm32_variant.extra_scripts}
buildroot/share/PlatformIO/scripts/chitu_crypt.py
@@ -457,13 +461,13 @@ build_flags = ${env:chitu_f103.build_flags} -DCHITU_V5_Z_MIN_BUGFIX
[ZONESTAR_ZM3E]
extends = stm32_variant
platform_packages = ${stm_flash_drive.platform_packages}
board_upload.offset_address = 0x08005000
board_build.offset = 0x5000
board_upload.offset_address = 0x08005000
board_upload.maximum_size = 237568
extra_scripts = ${stm32_variant.extra_scripts}
build_flags = ${common_stm32.build_flags}
-DSS_TIMER=4 -DTIMER_SERVO=TIM5 -DUSE_USB_FS -DUSBD_IRQ_PRIO=5 -DUSBD_IRQ_SUBPRIO=6 -DUSBD_USE_CDC_MSC
build_unflags = ${stm32_variant.build_unflags} -DUSBD_USE_CDC
build_flags = ${stm32_variant.build_flags} ${USBD_CDC_MSC.build_flags}
-DSS_TIMER=4 -DTIMER_SERVO=TIM5
build_unflags = ${stm32_variant.build_unflags} ${USBD_CDC_MSC.build_unflags}
[env:STM32F103RC_ZM3E2_USB]
extends = ZONESTAR_ZM3E