🧑💻 Use spaces indent for Python
This commit is contained in:
parent
d93c41a257
commit
ff09ea13a4
@ -14,6 +14,10 @@ end_of_line = lf
|
|||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[{*.py,*.conf,*.sublime-project}]
|
[{*.py}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[{*.conf,*.sublime-project}]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
@ -9,127 +9,127 @@ from __future__ import print_function
|
|||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
target_filename = "FIRMWARE.CUR"
|
target_filename = "FIRMWARE.CUR"
|
||||||
target_drive = "REARM"
|
target_drive = "REARM"
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
current_OS = platform.system()
|
current_OS = platform.system()
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
def print_error(e):
|
def print_error(e):
|
||||||
print('\nUnable to find destination disk (%s)\n' \
|
print('\nUnable to find destination disk (%s)\n' \
|
||||||
'Please select it in platformio.ini using the upload_port keyword ' \
|
'Please select it in platformio.ini using the upload_port keyword ' \
|
||||||
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
||||||
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
||||||
%(e, env.get('PIOENV')))
|
%(e, env.get('PIOENV')))
|
||||||
|
|
||||||
def before_upload(source, target, env):
|
def before_upload(source, target, env):
|
||||||
try:
|
try:
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
#
|
#
|
||||||
# Find a disk for upload
|
# Find a disk for upload
|
||||||
#
|
#
|
||||||
upload_disk = 'Disk not found'
|
upload_disk = 'Disk not found'
|
||||||
target_file_found = False
|
target_file_found = False
|
||||||
target_drive_found = False
|
target_drive_found = False
|
||||||
if current_OS == 'Windows':
|
if current_OS == 'Windows':
|
||||||
#
|
#
|
||||||
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
||||||
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
||||||
import subprocess,string
|
import subprocess,string
|
||||||
from ctypes import windll
|
from ctypes import windll
|
||||||
from pathlib import PureWindowsPath
|
from pathlib import PureWindowsPath
|
||||||
|
|
||||||
# getting list of drives
|
# getting list of drives
|
||||||
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
||||||
drives = []
|
drives = []
|
||||||
bitmask = windll.kernel32.GetLogicalDrives()
|
bitmask = windll.kernel32.GetLogicalDrives()
|
||||||
for letter in string.ascii_uppercase:
|
for letter in string.ascii_uppercase:
|
||||||
if bitmask & 1:
|
if bitmask & 1:
|
||||||
drives.append(letter)
|
drives.append(letter)
|
||||||
bitmask >>= 1
|
bitmask >>= 1
|
||||||
|
|
||||||
for drive in drives:
|
for drive in drives:
|
||||||
final_drive_name = drive + ':'
|
final_drive_name = drive + ':'
|
||||||
# print ('disc check: {}'.format(final_drive_name))
|
# print ('disc check: {}'.format(final_drive_name))
|
||||||
try:
|
try:
|
||||||
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print ('error:{}'.format(e))
|
print ('error:{}'.format(e))
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
||||||
target_drive_found = True
|
target_drive_found = True
|
||||||
upload_disk = PureWindowsPath(final_drive_name)
|
upload_disk = PureWindowsPath(final_drive_name)
|
||||||
if target_filename in volume_info:
|
if target_filename in volume_info:
|
||||||
if not target_file_found:
|
if not target_file_found:
|
||||||
upload_disk = PureWindowsPath(final_drive_name)
|
upload_disk = PureWindowsPath(final_drive_name)
|
||||||
target_file_found = True
|
target_file_found = True
|
||||||
|
|
||||||
elif current_OS == 'Linux':
|
elif current_OS == 'Linux':
|
||||||
#
|
#
|
||||||
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
||||||
#
|
#
|
||||||
import getpass
|
import getpass
|
||||||
user = getpass.getuser()
|
user = getpass.getuser()
|
||||||
mpath = Path('media', user)
|
mpath = Path('media', user)
|
||||||
drives = [ x for x in mpath.iterdir() if x.is_dir() ]
|
drives = [ x for x in mpath.iterdir() if x.is_dir() ]
|
||||||
if target_drive in drives: # If target drive is found, use it.
|
if target_drive in drives: # If target drive is found, use it.
|
||||||
target_drive_found = True
|
target_drive_found = True
|
||||||
upload_disk = mpath / target_drive
|
upload_disk = mpath / target_drive
|
||||||
else:
|
else:
|
||||||
for drive in drives:
|
for drive in drives:
|
||||||
try:
|
try:
|
||||||
fpath = mpath / drive
|
fpath = mpath / drive
|
||||||
filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
|
filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if target_filename in filenames:
|
if target_filename in filenames:
|
||||||
upload_disk = mpath / drive
|
upload_disk = mpath / drive
|
||||||
target_file_found = True
|
target_file_found = True
|
||||||
break
|
break
|
||||||
#
|
#
|
||||||
# set upload_port to drive if found
|
# set upload_port to drive if found
|
||||||
#
|
#
|
||||||
|
|
||||||
if target_file_found or target_drive_found:
|
if target_file_found or target_drive_found:
|
||||||
env.Replace(
|
env.Replace(
|
||||||
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
||||||
)
|
)
|
||||||
|
|
||||||
elif current_OS == 'Darwin': # MAC
|
elif current_OS == 'Darwin': # MAC
|
||||||
#
|
#
|
||||||
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
||||||
#
|
#
|
||||||
dpath = Path('/Volumes') # human readable names
|
dpath = Path('/Volumes') # human readable names
|
||||||
drives = [ x for x in dpath.iterdir() if x.is_dir() ]
|
drives = [ x for x in dpath.iterdir() if x.is_dir() ]
|
||||||
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
||||||
target_drive_found = True
|
target_drive_found = True
|
||||||
upload_disk = dpath / target_drive
|
upload_disk = dpath / target_drive
|
||||||
for drive in drives:
|
for drive in drives:
|
||||||
try:
|
try:
|
||||||
fpath = dpath / drive # will get an error if the drive is protected
|
fpath = dpath / drive # will get an error if the drive is protected
|
||||||
filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
|
filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if target_filename in filenames:
|
if target_filename in filenames:
|
||||||
upload_disk = dpath / drive
|
upload_disk = dpath / drive
|
||||||
target_file_found = True
|
target_file_found = True
|
||||||
break
|
break
|
||||||
|
|
||||||
#
|
#
|
||||||
# Set upload_port to drive if found
|
# Set upload_port to drive if found
|
||||||
#
|
#
|
||||||
if target_file_found or target_drive_found:
|
if target_file_found or target_drive_found:
|
||||||
env.Replace(UPLOAD_PORT=str(upload_disk))
|
env.Replace(UPLOAD_PORT=str(upload_disk))
|
||||||
print('\nUpload disk: ', upload_disk, '\n')
|
print('\nUpload disk: ', upload_disk, '\n')
|
||||||
else:
|
else:
|
||||||
print_error('Autodetect Error')
|
print_error('Autodetect Error')
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print_error(str(e))
|
print_error(str(e))
|
||||||
|
|
||||||
env.AddPreAction("upload", before_upload)
|
env.AddPreAction("upload", before_upload)
|
||||||
|
@ -285,7 +285,7 @@ public:
|
|||||||
if (!menuPower) menuPower = cpwr_to_upwr(SPEED_POWER_STARTUP);
|
if (!menuPower) menuPower = cpwr_to_upwr(SPEED_POWER_STARTUP);
|
||||||
power = upower_to_ocr(menuPower);
|
power = upower_to_ocr(menuPower);
|
||||||
apply_power(power);
|
apply_power(power);
|
||||||
} else
|
} else
|
||||||
apply_power(0);
|
apply_power(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,17 +4,17 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
from os.path import join, isfile
|
from os.path import join, isfile
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
mf = env["MARLIN_FEATURES"]
|
mf = env["MARLIN_FEATURES"]
|
||||||
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
||||||
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
||||||
|
|
||||||
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
||||||
|
|
||||||
build_flags = env.get('BUILD_FLAGS')
|
build_flags = env.get('BUILD_FLAGS')
|
||||||
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
||||||
env.Replace(BUILD_FLAGS=build_flags)
|
env.Replace(BUILD_FLAGS=build_flags)
|
||||||
|
@ -4,123 +4,123 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
import struct,uuid,marlin
|
import struct,uuid,marlin
|
||||||
|
|
||||||
board = marlin.env.BoardConfig()
|
board = marlin.env.BoardConfig()
|
||||||
|
|
||||||
def calculate_crc(contents, seed):
|
def calculate_crc(contents, seed):
|
||||||
accumulating_xor_value = seed;
|
accumulating_xor_value = seed;
|
||||||
|
|
||||||
for i in range(0, len(contents), 4):
|
for i in range(0, len(contents), 4):
|
||||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||||
accumulating_xor_value = accumulating_xor_value ^ value
|
accumulating_xor_value = accumulating_xor_value ^ value
|
||||||
return accumulating_xor_value
|
return accumulating_xor_value
|
||||||
|
|
||||||
def xor_block(r0, r1, block_number, block_size, file_key):
|
def xor_block(r0, r1, block_number, block_size, file_key):
|
||||||
# This is the loop counter
|
# This is the loop counter
|
||||||
loop_counter = 0x0
|
loop_counter = 0x0
|
||||||
|
|
||||||
# This is the key length
|
# This is the key length
|
||||||
key_length = 0x18
|
key_length = 0x18
|
||||||
|
|
||||||
# This is an initial seed
|
# This is an initial seed
|
||||||
xor_seed = 0x4BAD
|
xor_seed = 0x4BAD
|
||||||
|
|
||||||
# This is the block counter
|
# This is the block counter
|
||||||
block_number = xor_seed * block_number
|
block_number = xor_seed * block_number
|
||||||
|
|
||||||
#load the xor key from the file
|
#load the xor key from the file
|
||||||
r7 = file_key
|
r7 = file_key
|
||||||
|
|
||||||
for loop_counter in range(0, block_size):
|
for loop_counter in range(0, block_size):
|
||||||
# meant to make sure different bits of the key are used.
|
# meant to make sure different bits of the key are used.
|
||||||
xor_seed = int(loop_counter / key_length)
|
xor_seed = int(loop_counter / key_length)
|
||||||
|
|
||||||
# IP is a scratch register / R12
|
# IP is a scratch register / R12
|
||||||
ip = loop_counter - (key_length * xor_seed)
|
ip = loop_counter - (key_length * xor_seed)
|
||||||
|
|
||||||
# xor_seed = (loop_counter * loop_counter) + block_number
|
# xor_seed = (loop_counter * loop_counter) + block_number
|
||||||
xor_seed = (loop_counter * loop_counter) + block_number
|
xor_seed = (loop_counter * loop_counter) + block_number
|
||||||
|
|
||||||
# shift the xor_seed left by the bits in IP.
|
# shift the xor_seed left by the bits in IP.
|
||||||
xor_seed = xor_seed >> ip
|
xor_seed = xor_seed >> ip
|
||||||
|
|
||||||
# load a byte into IP
|
# load a byte into IP
|
||||||
ip = r0[loop_counter]
|
ip = r0[loop_counter]
|
||||||
|
|
||||||
# XOR the seed with r7
|
# XOR the seed with r7
|
||||||
xor_seed = xor_seed ^ r7
|
xor_seed = xor_seed ^ r7
|
||||||
|
|
||||||
# and then with IP
|
# and then with IP
|
||||||
xor_seed = xor_seed ^ ip
|
xor_seed = xor_seed ^ ip
|
||||||
|
|
||||||
#Now store the byte back
|
#Now store the byte back
|
||||||
r1[loop_counter] = xor_seed & 0xFF
|
r1[loop_counter] = xor_seed & 0xFF
|
||||||
|
|
||||||
#increment the loop_counter
|
#increment the loop_counter
|
||||||
loop_counter = loop_counter + 1
|
loop_counter = loop_counter + 1
|
||||||
|
|
||||||
def encrypt_file(input, output_file, file_length):
|
def encrypt_file(input, output_file, file_length):
|
||||||
input_file = bytearray(input.read())
|
input_file = bytearray(input.read())
|
||||||
block_size = 0x800
|
block_size = 0x800
|
||||||
key_length = 0x18
|
key_length = 0x18
|
||||||
|
|
||||||
uid_value = uuid.uuid4()
|
uid_value = uuid.uuid4()
|
||||||
file_key = int(uid_value.hex[0:8], 16)
|
file_key = int(uid_value.hex[0:8], 16)
|
||||||
|
|
||||||
xor_crc = 0xEF3D4323;
|
xor_crc = 0xEF3D4323;
|
||||||
|
|
||||||
# the input file is exepcted to be in chunks of 0x800
|
# the input file is exepcted to be in chunks of 0x800
|
||||||
# so round the size
|
# so round the size
|
||||||
while len(input_file) % block_size != 0:
|
while len(input_file) % block_size != 0:
|
||||||
input_file.extend(b'0x0')
|
input_file.extend(b'0x0')
|
||||||
|
|
||||||
# write the file header
|
# write the file header
|
||||||
output_file.write(struct.pack(">I", 0x443D2D3F))
|
output_file.write(struct.pack(">I", 0x443D2D3F))
|
||||||
# encrypt the contents using a known file header key
|
# encrypt the contents using a known file header key
|
||||||
|
|
||||||
# write the file_key
|
# write the file_key
|
||||||
output_file.write(struct.pack("<I", file_key))
|
output_file.write(struct.pack("<I", file_key))
|
||||||
|
|
||||||
#TODO - how to enforce that the firmware aligns to block boundaries?
|
#TODO - how to enforce that the firmware aligns to block boundaries?
|
||||||
block_count = int(len(input_file) / block_size)
|
block_count = int(len(input_file) / block_size)
|
||||||
print ("Block Count is ", block_count)
|
print ("Block Count is ", block_count)
|
||||||
for block_number in range(0, block_count):
|
for block_number in range(0, block_count):
|
||||||
block_offset = (block_number * block_size)
|
block_offset = (block_number * block_size)
|
||||||
block_end = block_offset + block_size
|
block_end = block_offset + block_size
|
||||||
block_array = bytearray(input_file[block_offset: block_end])
|
block_array = bytearray(input_file[block_offset: block_end])
|
||||||
xor_block(block_array, block_array, block_number, block_size, file_key)
|
xor_block(block_array, block_array, block_number, block_size, file_key)
|
||||||
for n in range (0, block_size):
|
for n in range (0, block_size):
|
||||||
input_file[block_offset + n] = block_array[n]
|
input_file[block_offset + n] = block_array[n]
|
||||||
|
|
||||||
# update the expected CRC value.
|
# update the expected CRC value.
|
||||||
xor_crc = calculate_crc(block_array, xor_crc)
|
xor_crc = calculate_crc(block_array, xor_crc)
|
||||||
|
|
||||||
# write CRC
|
# write CRC
|
||||||
output_file.write(struct.pack("<I", xor_crc))
|
output_file.write(struct.pack("<I", xor_crc))
|
||||||
|
|
||||||
# finally, append the encrypted results.
|
# finally, append the encrypted results.
|
||||||
output_file.write(input_file)
|
output_file.write(input_file)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
||||||
def encrypt(source, target, env):
|
def encrypt(source, target, env):
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
fwpath = Path(target[0].path)
|
fwpath = Path(target[0].path)
|
||||||
fwsize = fwpath.stat().st_size
|
fwsize = fwpath.stat().st_size
|
||||||
|
|
||||||
enname = board.get("build.crypt_chitu")
|
enname = board.get("build.crypt_chitu")
|
||||||
enpath = Path(target[0].dir.path)
|
enpath = Path(target[0].dir.path)
|
||||||
|
|
||||||
fwfile = fwpath.open("rb")
|
fwfile = fwpath.open("rb")
|
||||||
enfile = (enpath / enname).open("wb")
|
enfile = (enpath / enname).open("wb")
|
||||||
|
|
||||||
print(f"Encrypting {fwpath} to {enname}")
|
print(f"Encrypting {fwpath} to {enname}")
|
||||||
encrypt_file(fwfile, enfile, fwsize)
|
encrypt_file(fwfile, enfile, fwsize)
|
||||||
fwfile.close()
|
fwfile.close()
|
||||||
enfile.close()
|
enfile.close()
|
||||||
fwpath.unlink()
|
fwpath.unlink()
|
||||||
|
|
||||||
marlin.relocate_firmware("0x08008800")
|
marlin.relocate_firmware("0x08008800")
|
||||||
marlin.add_post_action(encrypt);
|
marlin.add_post_action(encrypt);
|
||||||
|
@ -4,13 +4,13 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
Import("env", "projenv")
|
Import("env", "projenv")
|
||||||
|
|
||||||
def apply_board_build_flags():
|
def apply_board_build_flags():
|
||||||
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
||||||
return
|
return
|
||||||
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
||||||
|
|
||||||
# We need to add the board build flags in a post script
|
# We need to add the board build flags in a post script
|
||||||
# so the platform build script doesn't overwrite the custom CCFLAGS
|
# so the platform build script doesn't overwrite the custom CCFLAGS
|
||||||
apply_board_build_flags()
|
apply_board_build_flags()
|
||||||
|
@ -5,247 +5,247 @@
|
|||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
import subprocess,os,re
|
import subprocess,os,re
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
from platformio.package.meta import PackageSpec
|
from platformio.package.meta import PackageSpec
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
|
|
||||||
verbose = 0
|
verbose = 0
|
||||||
FEATURE_CONFIG = {}
|
FEATURE_CONFIG = {}
|
||||||
|
|
||||||
def validate_pio():
|
def validate_pio():
|
||||||
PIO_VERSION_MIN = (6, 0, 1)
|
PIO_VERSION_MIN = (6, 0, 1)
|
||||||
try:
|
try:
|
||||||
from platformio import VERSION as PIO_VERSION
|
from platformio import VERSION as PIO_VERSION
|
||||||
weights = (1000, 100, 1)
|
weights = (1000, 100, 1)
|
||||||
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
||||||
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
||||||
if version_cur < version_min:
|
if version_cur < version_min:
|
||||||
print()
|
print()
|
||||||
print("**************************************************")
|
print("**************************************************")
|
||||||
print("****** An update to PlatformIO is ******")
|
print("****** An update to PlatformIO is ******")
|
||||||
print("****** required to build Marlin Firmware. ******")
|
print("****** required to build Marlin Firmware. ******")
|
||||||
print("****** ******")
|
print("****** ******")
|
||||||
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
||||||
print("****** Current Version: ", PIO_VERSION, " ******")
|
print("****** Current Version: ", PIO_VERSION, " ******")
|
||||||
print("****** ******")
|
print("****** ******")
|
||||||
print("****** Update PlatformIO and try again. ******")
|
print("****** Update PlatformIO and try again. ******")
|
||||||
print("**************************************************")
|
print("**************************************************")
|
||||||
print()
|
print()
|
||||||
exit(1)
|
exit(1)
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
exit(1)
|
exit(1)
|
||||||
except:
|
except:
|
||||||
print("Can't detect PlatformIO Version")
|
print("Can't detect PlatformIO Version")
|
||||||
|
|
||||||
def blab(str,level=1):
|
def blab(str,level=1):
|
||||||
if verbose >= level:
|
if verbose >= level:
|
||||||
print("[deps] %s" % str)
|
print("[deps] %s" % str)
|
||||||
|
|
||||||
def add_to_feat_cnf(feature, flines):
|
def add_to_feat_cnf(feature, flines):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
feat = FEATURE_CONFIG[feature]
|
feat = FEATURE_CONFIG[feature]
|
||||||
except:
|
except:
|
||||||
FEATURE_CONFIG[feature] = {}
|
FEATURE_CONFIG[feature] = {}
|
||||||
|
|
||||||
# Get a reference to the FEATURE_CONFIG under construction
|
# Get a reference to the FEATURE_CONFIG under construction
|
||||||
feat = FEATURE_CONFIG[feature]
|
feat = FEATURE_CONFIG[feature]
|
||||||
|
|
||||||
# Split up passed lines on commas or newlines and iterate
|
# Split up passed lines on commas or newlines and iterate
|
||||||
# Add common options to the features config under construction
|
# Add common options to the features config under construction
|
||||||
# For lib_deps replace a previous instance of the same library
|
# For lib_deps replace a previous instance of the same library
|
||||||
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
|
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
|
||||||
for line in atoms:
|
for line in atoms:
|
||||||
parts = line.split('=')
|
parts = line.split('=')
|
||||||
name = parts.pop(0)
|
name = parts.pop(0)
|
||||||
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
||||||
feat[name] = '='.join(parts)
|
feat[name] = '='.join(parts)
|
||||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||||
else:
|
else:
|
||||||
for dep in re.split(r',\s*', line):
|
for dep in re.split(r',\s*', line):
|
||||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||||
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
||||||
|
|
||||||
def load_features():
|
def load_features():
|
||||||
blab("========== Gather [features] entries...")
|
blab("========== Gather [features] entries...")
|
||||||
for key in ProjectConfig().items('features'):
|
for key in ProjectConfig().items('features'):
|
||||||
feature = key[0].upper()
|
feature = key[0].upper()
|
||||||
if not feature in FEATURE_CONFIG:
|
if not feature in FEATURE_CONFIG:
|
||||||
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
||||||
add_to_feat_cnf(feature, key[1])
|
add_to_feat_cnf(feature, key[1])
|
||||||
|
|
||||||
# Add options matching custom_marlin.MY_OPTION to the pile
|
# Add options matching custom_marlin.MY_OPTION to the pile
|
||||||
blab("========== Gather custom_marlin entries...")
|
blab("========== Gather custom_marlin entries...")
|
||||||
for n in env.GetProjectOptions():
|
for n in env.GetProjectOptions():
|
||||||
key = n[0]
|
key = n[0]
|
||||||
mat = re.match(r'custom_marlin\.(.+)', key)
|
mat = re.match(r'custom_marlin\.(.+)', key)
|
||||||
if mat:
|
if mat:
|
||||||
try:
|
try:
|
||||||
val = env.GetProjectOption(key)
|
val = env.GetProjectOption(key)
|
||||||
except:
|
except:
|
||||||
val = None
|
val = None
|
||||||
if val:
|
if val:
|
||||||
opt = mat[1].upper()
|
opt = mat[1].upper()
|
||||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||||
add_to_feat_cnf(opt, val)
|
add_to_feat_cnf(opt, val)
|
||||||
|
|
||||||
def get_all_known_libs():
|
def get_all_known_libs():
|
||||||
known_libs = []
|
known_libs = []
|
||||||
for feature in FEATURE_CONFIG:
|
for feature in FEATURE_CONFIG:
|
||||||
feat = FEATURE_CONFIG[feature]
|
feat = FEATURE_CONFIG[feature]
|
||||||
if not 'lib_deps' in feat:
|
if not 'lib_deps' in feat:
|
||||||
continue
|
continue
|
||||||
for dep in feat['lib_deps']:
|
for dep in feat['lib_deps']:
|
||||||
known_libs.append(PackageSpec(dep).name)
|
known_libs.append(PackageSpec(dep).name)
|
||||||
return known_libs
|
return known_libs
|
||||||
|
|
||||||
def get_all_env_libs():
|
def get_all_env_libs():
|
||||||
env_libs = []
|
env_libs = []
|
||||||
lib_deps = env.GetProjectOption('lib_deps')
|
lib_deps = env.GetProjectOption('lib_deps')
|
||||||
for dep in lib_deps:
|
for dep in lib_deps:
|
||||||
env_libs.append(PackageSpec(dep).name)
|
env_libs.append(PackageSpec(dep).name)
|
||||||
return env_libs
|
return env_libs
|
||||||
|
|
||||||
def set_env_field(field, value):
|
def set_env_field(field, value):
|
||||||
proj = env.GetProjectConfig()
|
proj = env.GetProjectConfig()
|
||||||
proj.set("env:" + env['PIOENV'], field, value)
|
proj.set("env:" + env['PIOENV'], field, value)
|
||||||
|
|
||||||
# All unused libs should be ignored so that if a library
|
# All unused libs should be ignored so that if a library
|
||||||
# exists in .pio/lib_deps it will not break compilation.
|
# exists in .pio/lib_deps it will not break compilation.
|
||||||
def force_ignore_unused_libs():
|
def force_ignore_unused_libs():
|
||||||
env_libs = get_all_env_libs()
|
env_libs = get_all_env_libs()
|
||||||
known_libs = get_all_known_libs()
|
known_libs = get_all_known_libs()
|
||||||
diff = (list(set(known_libs) - set(env_libs)))
|
diff = (list(set(known_libs) - set(env_libs)))
|
||||||
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
||||||
blab("Ignore libraries: %s" % lib_ignore)
|
blab("Ignore libraries: %s" % lib_ignore)
|
||||||
set_env_field('lib_ignore', lib_ignore)
|
set_env_field('lib_ignore', lib_ignore)
|
||||||
|
|
||||||
def apply_features_config():
|
def apply_features_config():
|
||||||
load_features()
|
load_features()
|
||||||
blab("========== Apply enabled features...")
|
blab("========== Apply enabled features...")
|
||||||
for feature in FEATURE_CONFIG:
|
for feature in FEATURE_CONFIG:
|
||||||
if not env.MarlinHas(feature):
|
if not env.MarlinHas(feature):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
feat = FEATURE_CONFIG[feature]
|
feat = FEATURE_CONFIG[feature]
|
||||||
|
|
||||||
if 'lib_deps' in feat and len(feat['lib_deps']):
|
if 'lib_deps' in feat and len(feat['lib_deps']):
|
||||||
blab("========== Adding lib_deps for %s... " % feature, 2)
|
blab("========== Adding lib_deps for %s... " % feature, 2)
|
||||||
|
|
||||||
# feat to add
|
# feat to add
|
||||||
deps_to_add = {}
|
deps_to_add = {}
|
||||||
for dep in feat['lib_deps']:
|
for dep in feat['lib_deps']:
|
||||||
deps_to_add[PackageSpec(dep).name] = dep
|
deps_to_add[PackageSpec(dep).name] = dep
|
||||||
blab("==================== %s... " % dep, 2)
|
blab("==================== %s... " % dep, 2)
|
||||||
|
|
||||||
# Does the env already have the dependency?
|
# Does the env already have the dependency?
|
||||||
deps = env.GetProjectOption('lib_deps')
|
deps = env.GetProjectOption('lib_deps')
|
||||||
for dep in deps:
|
for dep in deps:
|
||||||
name = PackageSpec(dep).name
|
name = PackageSpec(dep).name
|
||||||
if name in deps_to_add:
|
if name in deps_to_add:
|
||||||
del deps_to_add[name]
|
del deps_to_add[name]
|
||||||
|
|
||||||
# Are there any libraries that should be ignored?
|
# Are there any libraries that should be ignored?
|
||||||
lib_ignore = env.GetProjectOption('lib_ignore')
|
lib_ignore = env.GetProjectOption('lib_ignore')
|
||||||
for dep in deps:
|
for dep in deps:
|
||||||
name = PackageSpec(dep).name
|
name = PackageSpec(dep).name
|
||||||
if name in deps_to_add:
|
if name in deps_to_add:
|
||||||
del deps_to_add[name]
|
del deps_to_add[name]
|
||||||
|
|
||||||
# Is there anything left?
|
# Is there anything left?
|
||||||
if len(deps_to_add) > 0:
|
if len(deps_to_add) > 0:
|
||||||
# Only add the missing dependencies
|
# Only add the missing dependencies
|
||||||
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
||||||
|
|
||||||
if 'build_flags' in feat:
|
if 'build_flags' in feat:
|
||||||
f = feat['build_flags']
|
f = feat['build_flags']
|
||||||
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
||||||
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
||||||
env.Replace(BUILD_FLAGS=new_flags)
|
env.Replace(BUILD_FLAGS=new_flags)
|
||||||
|
|
||||||
if 'extra_scripts' in feat:
|
if 'extra_scripts' in feat:
|
||||||
blab("Running extra_scripts for %s... " % feature, 2)
|
blab("Running extra_scripts for %s... " % feature, 2)
|
||||||
env.SConscript(feat['extra_scripts'], exports="env")
|
env.SConscript(feat['extra_scripts'], exports="env")
|
||||||
|
|
||||||
if 'src_filter' in feat:
|
if 'src_filter' in feat:
|
||||||
blab("========== Adding build_src_filter for %s... " % feature, 2)
|
blab("========== Adding build_src_filter for %s... " % feature, 2)
|
||||||
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
||||||
# first we need to remove the references to the same folder
|
# first we need to remove the references to the same folder
|
||||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
||||||
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
||||||
for d in my_srcs:
|
for d in my_srcs:
|
||||||
if d in cur_srcs:
|
if d in cur_srcs:
|
||||||
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
||||||
|
|
||||||
src_filter = feat['src_filter'] + ' ' + src_filter
|
src_filter = feat['src_filter'] + ' ' + src_filter
|
||||||
set_env_field('build_src_filter', [src_filter])
|
set_env_field('build_src_filter', [src_filter])
|
||||||
env.Replace(SRC_FILTER=src_filter)
|
env.Replace(SRC_FILTER=src_filter)
|
||||||
|
|
||||||
if 'lib_ignore' in feat:
|
if 'lib_ignore' in feat:
|
||||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||||
set_env_field('lib_ignore', lib_ignore)
|
set_env_field('lib_ignore', lib_ignore)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Use the compiler to get a list of all enabled features
|
# Use the compiler to get a list of all enabled features
|
||||||
#
|
#
|
||||||
def load_marlin_features():
|
def load_marlin_features():
|
||||||
if 'MARLIN_FEATURES' in env:
|
if 'MARLIN_FEATURES' in env:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Process defines
|
# Process defines
|
||||||
from preprocessor import run_preprocessor
|
from preprocessor import run_preprocessor
|
||||||
define_list = run_preprocessor(env)
|
define_list = run_preprocessor(env)
|
||||||
marlin_features = {}
|
marlin_features = {}
|
||||||
for define in define_list:
|
for define in define_list:
|
||||||
feature = define[8:].strip().decode().split(' ')
|
feature = define[8:].strip().decode().split(' ')
|
||||||
feature, definition = feature[0], ' '.join(feature[1:])
|
feature, definition = feature[0], ' '.join(feature[1:])
|
||||||
marlin_features[feature] = definition
|
marlin_features[feature] = definition
|
||||||
env['MARLIN_FEATURES'] = marlin_features
|
env['MARLIN_FEATURES'] = marlin_features
|
||||||
|
|
||||||
#
|
#
|
||||||
# Return True if a matching feature is enabled
|
# Return True if a matching feature is enabled
|
||||||
#
|
#
|
||||||
def MarlinHas(env, feature):
|
def MarlinHas(env, feature):
|
||||||
load_marlin_features()
|
load_marlin_features()
|
||||||
r = re.compile('^' + feature + '$')
|
r = re.compile('^' + feature + '$')
|
||||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||||
|
|
||||||
# Defines could still be 'false' or '0', so check
|
# Defines could still be 'false' or '0', so check
|
||||||
some_on = False
|
some_on = False
|
||||||
if len(found):
|
if len(found):
|
||||||
for f in found:
|
for f in found:
|
||||||
val = env['MARLIN_FEATURES'][f]
|
val = env['MARLIN_FEATURES'][f]
|
||||||
if val in [ '', '1', 'true' ]:
|
if val in [ '', '1', 'true' ]:
|
||||||
some_on = True
|
some_on = True
|
||||||
elif val in env['MARLIN_FEATURES']:
|
elif val in env['MARLIN_FEATURES']:
|
||||||
some_on = env.MarlinHas(val)
|
some_on = env.MarlinHas(val)
|
||||||
|
|
||||||
return some_on
|
return some_on
|
||||||
|
|
||||||
validate_pio()
|
validate_pio()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
#
|
#
|
||||||
# Add a method for other PIO scripts to query enabled features
|
# Add a method for other PIO scripts to query enabled features
|
||||||
#
|
#
|
||||||
env.AddMethod(MarlinHas)
|
env.AddMethod(MarlinHas)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Add dependencies for enabled Marlin features
|
# Add dependencies for enabled Marlin features
|
||||||
#
|
#
|
||||||
apply_features_config()
|
apply_features_config()
|
||||||
force_ignore_unused_libs()
|
force_ignore_unused_libs()
|
||||||
|
|
||||||
#print(env.Dump())
|
#print(env.Dump())
|
||||||
|
|
||||||
from signature import compute_build_signature
|
from signature import compute_build_signature
|
||||||
compute_build_signature(env)
|
compute_build_signature(env)
|
||||||
|
@ -7,229 +7,229 @@ from pathlib import Path
|
|||||||
|
|
||||||
verbose = 0
|
verbose = 0
|
||||||
def blab(str,level=1):
|
def blab(str,level=1):
|
||||||
if verbose >= level: print(f"[config] {str}")
|
if verbose >= level: print(f"[config] {str}")
|
||||||
|
|
||||||
def config_path(cpath):
|
def config_path(cpath):
|
||||||
return Path("Marlin", cpath)
|
return Path("Marlin", cpath)
|
||||||
|
|
||||||
# Apply a single name = on/off ; name = value ; etc.
|
# Apply a single name = on/off ; name = value ; etc.
|
||||||
# TODO: Limit to the given (optional) configuration
|
# TODO: Limit to the given (optional) configuration
|
||||||
def apply_opt(name, val, conf=None):
|
def apply_opt(name, val, conf=None):
|
||||||
if name == "lcd": name, val = val, "on"
|
if name == "lcd": name, val = val, "on"
|
||||||
|
|
||||||
# Create a regex to match the option and capture parts of the line
|
# Create a regex to match the option and capture parts of the line
|
||||||
regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
|
regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
|
||||||
|
|
||||||
# Find and enable and/or update all matches
|
# Find and enable and/or update all matches
|
||||||
for file in ("Configuration.h", "Configuration_adv.h"):
|
for file in ("Configuration.h", "Configuration_adv.h"):
|
||||||
fullpath = config_path(file)
|
fullpath = config_path(file)
|
||||||
lines = fullpath.read_text().split('\n')
|
lines = fullpath.read_text().split('\n')
|
||||||
found = False
|
found = False
|
||||||
for i in range(len(lines)):
|
for i in range(len(lines)):
|
||||||
line = lines[i]
|
line = lines[i]
|
||||||
match = regex.match(line)
|
match = regex.match(line)
|
||||||
if match and match[4].upper() == name.upper():
|
if match and match[4].upper() == name.upper():
|
||||||
found = True
|
found = True
|
||||||
# For boolean options un/comment the define
|
# For boolean options un/comment the define
|
||||||
if val in ("on", "", None):
|
if val in ("on", "", None):
|
||||||
newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
|
newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
|
||||||
elif val == "off":
|
elif val == "off":
|
||||||
newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
|
newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
|
||||||
else:
|
else:
|
||||||
# For options with values, enable and set the value
|
# For options with values, enable and set the value
|
||||||
newline = match[1] + match[3] + match[4] + match[5] + val
|
newline = match[1] + match[3] + match[4] + match[5] + val
|
||||||
if match[8]:
|
if match[8]:
|
||||||
sp = match[7] if match[7] else ' '
|
sp = match[7] if match[7] else ' '
|
||||||
newline += sp + match[8]
|
newline += sp + match[8]
|
||||||
lines[i] = newline
|
lines[i] = newline
|
||||||
blab(f"Set {name} to {val}")
|
blab(f"Set {name} to {val}")
|
||||||
|
|
||||||
# If the option was found, write the modified lines
|
# If the option was found, write the modified lines
|
||||||
if found:
|
if found:
|
||||||
fullpath.write_text('\n'.join(lines))
|
fullpath.write_text('\n'.join(lines))
|
||||||
break
|
break
|
||||||
|
|
||||||
# If the option didn't appear in either config file, add it
|
# If the option didn't appear in either config file, add it
|
||||||
if not found:
|
if not found:
|
||||||
# OFF options are added as disabled items so they appear
|
# OFF options are added as disabled items so they appear
|
||||||
# in config dumps. Useful for custom settings.
|
# in config dumps. Useful for custom settings.
|
||||||
prefix = ""
|
prefix = ""
|
||||||
if val == "off":
|
if val == "off":
|
||||||
prefix, val = "//", "" # Item doesn't appear in config dump
|
prefix, val = "//", "" # Item doesn't appear in config dump
|
||||||
#val = "false" # Item appears in config dump
|
#val = "false" # Item appears in config dump
|
||||||
|
|
||||||
# Uppercase the option unless already mixed/uppercase
|
# Uppercase the option unless already mixed/uppercase
|
||||||
added = name.upper() if name.islower() else name
|
added = name.upper() if name.islower() else name
|
||||||
|
|
||||||
# Add the provided value after the name
|
# Add the provided value after the name
|
||||||
if val != "on" and val != "" and val is not None:
|
if val != "on" and val != "" and val is not None:
|
||||||
added += " " + val
|
added += " " + val
|
||||||
|
|
||||||
# Prepend the new option after the first set of #define lines
|
# Prepend the new option after the first set of #define lines
|
||||||
fullpath = config_path("Configuration.h")
|
fullpath = config_path("Configuration.h")
|
||||||
with fullpath.open() as f:
|
with fullpath.open() as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
linenum = 0
|
linenum = 0
|
||||||
gotdef = False
|
gotdef = False
|
||||||
for line in lines:
|
for line in lines:
|
||||||
isdef = line.startswith("#define")
|
isdef = line.startswith("#define")
|
||||||
if not gotdef:
|
if not gotdef:
|
||||||
gotdef = isdef
|
gotdef = isdef
|
||||||
elif not isdef:
|
elif not isdef:
|
||||||
break
|
break
|
||||||
linenum += 1
|
linenum += 1
|
||||||
lines.insert(linenum, f"{prefix}#define {added} // Added by config.ini\n")
|
lines.insert(linenum, f"{prefix}#define {added} // Added by config.ini\n")
|
||||||
fullpath.write_text('\n'.join(lines))
|
fullpath.write_text('\n'.join(lines))
|
||||||
|
|
||||||
# Fetch configuration files from GitHub given the path.
|
# Fetch configuration files from GitHub given the path.
|
||||||
# Return True if any files were fetched.
|
# Return True if any files were fetched.
|
||||||
def fetch_example(url):
|
def fetch_example(url):
|
||||||
if url.endswith("/"): url = url[:-1]
|
if url.endswith("/"): url = url[:-1]
|
||||||
if url.startswith('http'):
|
if url.startswith('http'):
|
||||||
url = url.replace("%", "%25").replace(" ", "%20")
|
url = url.replace("%", "%25").replace(" ", "%20")
|
||||||
else:
|
else:
|
||||||
brch = "bugfix-2.1.x"
|
brch = "bugfix-2.1.x"
|
||||||
if '@' in path: path, brch = map(str.strip, path.split('@'))
|
if '@' in path: path, brch = map(str.strip, path.split('@'))
|
||||||
url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
|
url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
|
||||||
|
|
||||||
# Find a suitable fetch command
|
# Find a suitable fetch command
|
||||||
if shutil.which("curl") is not None:
|
if shutil.which("curl") is not None:
|
||||||
fetch = "curl -L -s -S -f -o"
|
fetch = "curl -L -s -S -f -o"
|
||||||
elif shutil.which("wget") is not None:
|
elif shutil.which("wget") is not None:
|
||||||
fetch = "wget -q -O"
|
fetch = "wget -q -O"
|
||||||
else:
|
else:
|
||||||
blab("Couldn't find curl or wget", -1)
|
blab("Couldn't find curl or wget", -1)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# Reset configurations to default
|
# Reset configurations to default
|
||||||
os.system("git reset --hard HEAD")
|
os.system("git reset --hard HEAD")
|
||||||
|
|
||||||
# Try to fetch the remote files
|
# Try to fetch the remote files
|
||||||
gotfile = False
|
gotfile = False
|
||||||
for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
|
for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
|
||||||
if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
|
if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
|
||||||
shutil.move('wgot', config_path(fn))
|
shutil.move('wgot', config_path(fn))
|
||||||
gotfile = True
|
gotfile = True
|
||||||
|
|
||||||
if Path('wgot').exists(): shutil.rmtree('wgot')
|
if Path('wgot').exists(): shutil.rmtree('wgot')
|
||||||
|
|
||||||
return gotfile
|
return gotfile
|
||||||
|
|
||||||
def section_items(cp, sectkey):
|
def section_items(cp, sectkey):
|
||||||
return cp.items(sectkey) if sectkey in cp.sections() else []
|
return cp.items(sectkey) if sectkey in cp.sections() else []
|
||||||
|
|
||||||
# Apply all items from a config section
|
# Apply all items from a config section
|
||||||
def apply_ini_by_name(cp, sect):
|
def apply_ini_by_name(cp, sect):
|
||||||
iniok = True
|
iniok = True
|
||||||
if sect in ('config:base', 'config:root'):
|
if sect in ('config:base', 'config:root'):
|
||||||
iniok = False
|
iniok = False
|
||||||
items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
|
items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
|
||||||
else:
|
else:
|
||||||
items = cp.items(sect)
|
items = cp.items(sect)
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
if iniok or not item[0].startswith('ini_'):
|
if iniok or not item[0].startswith('ini_'):
|
||||||
apply_opt(item[0], item[1])
|
apply_opt(item[0], item[1])
|
||||||
|
|
||||||
# Apply all config sections from a parsed file
|
# Apply all config sections from a parsed file
|
||||||
def apply_all_sections(cp):
|
def apply_all_sections(cp):
|
||||||
for sect in cp.sections():
|
for sect in cp.sections():
|
||||||
if sect.startswith('config:'):
|
if sect.startswith('config:'):
|
||||||
apply_ini_by_name(cp, sect)
|
apply_ini_by_name(cp, sect)
|
||||||
|
|
||||||
# Apply certain config sections from a parsed file
|
# Apply certain config sections from a parsed file
|
||||||
def apply_sections(cp, ckey='all'):
|
def apply_sections(cp, ckey='all'):
|
||||||
blab(f"Apply section key: {ckey}")
|
blab(f"Apply section key: {ckey}")
|
||||||
if ckey == 'all':
|
if ckey == 'all':
|
||||||
apply_all_sections(cp)
|
apply_all_sections(cp)
|
||||||
else:
|
else:
|
||||||
# Apply the base/root config.ini settings after external files are done
|
# Apply the base/root config.ini settings after external files are done
|
||||||
if ckey in ('base', 'root'):
|
if ckey in ('base', 'root'):
|
||||||
apply_ini_by_name(cp, 'config:base')
|
apply_ini_by_name(cp, 'config:base')
|
||||||
|
|
||||||
# Apply historically 'Configuration.h' settings everywhere
|
# Apply historically 'Configuration.h' settings everywhere
|
||||||
if ckey == 'basic':
|
if ckey == 'basic':
|
||||||
apply_ini_by_name(cp, 'config:basic')
|
apply_ini_by_name(cp, 'config:basic')
|
||||||
|
|
||||||
# Apply historically Configuration_adv.h settings everywhere
|
# Apply historically Configuration_adv.h settings everywhere
|
||||||
# (Some of which rely on defines in 'Conditionals_LCD.h')
|
# (Some of which rely on defines in 'Conditionals_LCD.h')
|
||||||
elif ckey in ('adv', 'advanced'):
|
elif ckey in ('adv', 'advanced'):
|
||||||
apply_ini_by_name(cp, 'config:advanced')
|
apply_ini_by_name(cp, 'config:advanced')
|
||||||
|
|
||||||
# Apply a specific config:<name> section directly
|
# Apply a specific config:<name> section directly
|
||||||
elif ckey.startswith('config:'):
|
elif ckey.startswith('config:'):
|
||||||
apply_ini_by_name(cp, ckey)
|
apply_ini_by_name(cp, ckey)
|
||||||
|
|
||||||
# Apply settings from a top level config.ini
|
# Apply settings from a top level config.ini
|
||||||
def apply_config_ini(cp):
|
def apply_config_ini(cp):
|
||||||
blab("=" * 20 + " Gather 'config.ini' entries...")
|
blab("=" * 20 + " Gather 'config.ini' entries...")
|
||||||
|
|
||||||
# Pre-scan for ini_use_config to get config_keys
|
# Pre-scan for ini_use_config to get config_keys
|
||||||
base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
|
base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
|
||||||
config_keys = ['base']
|
config_keys = ['base']
|
||||||
for ikey, ival in base_items:
|
for ikey, ival in base_items:
|
||||||
if ikey == 'ini_use_config':
|
if ikey == 'ini_use_config':
|
||||||
config_keys = map(str.strip, ival.split(','))
|
config_keys = map(str.strip, ival.split(','))
|
||||||
|
|
||||||
# For each ini_use_config item perform an action
|
# For each ini_use_config item perform an action
|
||||||
for ckey in config_keys:
|
for ckey in config_keys:
|
||||||
addbase = False
|
addbase = False
|
||||||
|
|
||||||
# For a key ending in .ini load and parse another .ini file
|
# For a key ending in .ini load and parse another .ini file
|
||||||
if ckey.endswith('.ini'):
|
if ckey.endswith('.ini'):
|
||||||
sect = 'base'
|
sect = 'base'
|
||||||
if '@' in ckey: sect, ckey = ckey.split('@')
|
if '@' in ckey: sect, ckey = ckey.split('@')
|
||||||
other_ini = configparser.ConfigParser()
|
other_ini = configparser.ConfigParser()
|
||||||
other_ini.read(config_path(ckey))
|
other_ini.read(config_path(ckey))
|
||||||
apply_sections(other_ini, sect)
|
apply_sections(other_ini, sect)
|
||||||
|
|
||||||
# (Allow 'example/' as a shortcut for 'examples/')
|
# (Allow 'example/' as a shortcut for 'examples/')
|
||||||
elif ckey.startswith('example/'):
|
elif ckey.startswith('example/'):
|
||||||
ckey = 'examples' + ckey[7:]
|
ckey = 'examples' + ckey[7:]
|
||||||
|
|
||||||
# For 'examples/<path>' fetch an example set from GitHub.
|
# For 'examples/<path>' fetch an example set from GitHub.
|
||||||
# For https?:// do a direct fetch of the URL.
|
# For https?:// do a direct fetch of the URL.
|
||||||
elif ckey.startswith('examples/') or ckey.startswith('http'):
|
elif ckey.startswith('examples/') or ckey.startswith('http'):
|
||||||
fetch_example(ckey)
|
fetch_example(ckey)
|
||||||
ckey = 'base'
|
ckey = 'base'
|
||||||
|
|
||||||
# Apply keyed sections after external files are done
|
# Apply keyed sections after external files are done
|
||||||
apply_sections(cp, 'config:' + ckey)
|
apply_sections(cp, 'config:' + ckey)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
#
|
#
|
||||||
# From command line use the given file name
|
# From command line use the given file name
|
||||||
#
|
#
|
||||||
import sys
|
import sys
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
if len(args) > 0:
|
if len(args) > 0:
|
||||||
if args[0].endswith('.ini'):
|
if args[0].endswith('.ini'):
|
||||||
ini_file = args[0]
|
ini_file = args[0]
|
||||||
else:
|
else:
|
||||||
print("Usage: %s <.ini file>" % sys.argv[0])
|
print("Usage: %s <.ini file>" % sys.argv[0])
|
||||||
else:
|
else:
|
||||||
ini_file = config_path('config.ini')
|
ini_file = config_path('config.ini')
|
||||||
|
|
||||||
if ini_file:
|
if ini_file:
|
||||||
user_ini = configparser.ConfigParser()
|
user_ini = configparser.ConfigParser()
|
||||||
user_ini.read(ini_file)
|
user_ini.read(ini_file)
|
||||||
apply_config_ini(user_ini)
|
apply_config_ini(user_ini)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
#
|
#
|
||||||
# From within PlatformIO use the loaded INI file
|
# From within PlatformIO use the loaded INI file
|
||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from platformio.project.config import ProjectConfig
|
from platformio.project.config import ProjectConfig
|
||||||
apply_config_ini(ProjectConfig())
|
apply_config_ini(ProjectConfig())
|
||||||
|
@ -6,13 +6,13 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
import marlin
|
import marlin
|
||||||
board = marlin.env.BoardConfig()
|
board = marlin.env.BoardConfig()
|
||||||
|
|
||||||
address = board.get("build.address", "")
|
address = board.get("build.address", "")
|
||||||
if address:
|
if address:
|
||||||
marlin.relocate_firmware(address)
|
marlin.relocate_firmware(address)
|
||||||
|
|
||||||
ldscript = board.get("build.ldscript", "")
|
ldscript = board.get("build.ldscript", "")
|
||||||
if ldscript:
|
if ldscript:
|
||||||
marlin.custom_ld_script(ldscript)
|
marlin.custom_ld_script(ldscript)
|
||||||
|
@ -4,50 +4,50 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
Import("env")
|
Import("env")
|
||||||
import requests,zipfile,tempfile,shutil
|
import requests,zipfile,tempfile,shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
||||||
deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
|
deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
|
||||||
zip_path = deps_path / "mks-assets.zip"
|
zip_path = deps_path / "mks-assets.zip"
|
||||||
assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||||
|
|
||||||
def download_mks_assets():
|
def download_mks_assets():
|
||||||
print("Downloading MKS Assets")
|
print("Downloading MKS Assets")
|
||||||
r = requests.get(url, stream=True)
|
r = requests.get(url, stream=True)
|
||||||
# the user may have a very clean workspace,
|
# the user may have a very clean workspace,
|
||||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||||
if not deps_path.exists():
|
if not deps_path.exists():
|
||||||
deps_path.mkdir()
|
deps_path.mkdir()
|
||||||
with zip_path.open('wb') as fd:
|
with zip_path.open('wb') as fd:
|
||||||
for chunk in r.iter_content(chunk_size=128):
|
for chunk in r.iter_content(chunk_size=128):
|
||||||
fd.write(chunk)
|
fd.write(chunk)
|
||||||
|
|
||||||
def copy_mks_assets():
|
def copy_mks_assets():
|
||||||
print("Copying MKS Assets")
|
print("Copying MKS Assets")
|
||||||
output_path = Path(tempfile.mkdtemp())
|
output_path = Path(tempfile.mkdtemp())
|
||||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||||
zip_obj.extractall(output_path)
|
zip_obj.extractall(output_path)
|
||||||
zip_obj.close()
|
zip_obj.close()
|
||||||
if assets_path.exists() and not assets_path.is_dir():
|
if assets_path.exists() and not assets_path.is_dir():
|
||||||
assets_path.unlink()
|
assets_path.unlink()
|
||||||
if not assets_path.exists():
|
if not assets_path.exists():
|
||||||
assets_path.mkdir()
|
assets_path.mkdir()
|
||||||
base_path = ''
|
base_path = ''
|
||||||
for filename in output_path.iterdir():
|
for filename in output_path.iterdir():
|
||||||
base_path = filename
|
base_path = filename
|
||||||
fw_path = (output_path / base_path / 'Firmware')
|
fw_path = (output_path / base_path / 'Firmware')
|
||||||
font_path = fw_path / 'mks_font'
|
font_path = fw_path / 'mks_font'
|
||||||
for filename in font_path.iterdir():
|
for filename in font_path.iterdir():
|
||||||
shutil.copy(font_path / filename, assets_path)
|
shutil.copy(font_path / filename, assets_path)
|
||||||
pic_path = fw_path / 'mks_pic'
|
pic_path = fw_path / 'mks_pic'
|
||||||
for filename in pic_path.iterdir():
|
for filename in pic_path.iterdir():
|
||||||
shutil.copy(pic_path / filename, assets_path)
|
shutil.copy(pic_path / filename, assets_path)
|
||||||
shutil.rmtree(output_path, ignore_errors=True)
|
shutil.rmtree(output_path, ignore_errors=True)
|
||||||
|
|
||||||
if not zip_path.exists():
|
if not zip_path.exists():
|
||||||
download_mks_assets()
|
download_mks_assets()
|
||||||
|
|
||||||
if not assets_path.exists():
|
if not assets_path.exists():
|
||||||
copy_mks_assets()
|
copy_mks_assets()
|
||||||
|
@ -4,32 +4,32 @@
|
|||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
import shutil
|
import shutil
|
||||||
from os.path import join, isfile
|
from os.path import join, isfile
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
if env.MarlinHas("POSTMORTEM_DEBUGGING"):
|
if env.MarlinHas("POSTMORTEM_DEBUGGING"):
|
||||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
||||||
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
||||||
|
|
||||||
# patch file only if we didn't do it before
|
# patch file only if we didn't do it before
|
||||||
if not isfile(patchflag_path):
|
if not isfile(patchflag_path):
|
||||||
print("Patching libmaple exception handlers")
|
print("Patching libmaple exception handlers")
|
||||||
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
||||||
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
||||||
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
||||||
|
|
||||||
assert isfile(original_file) and isfile(src_file)
|
assert isfile(original_file) and isfile(src_file)
|
||||||
shutil.copyfile(original_file, backup_file)
|
shutil.copyfile(original_file, backup_file)
|
||||||
shutil.copyfile(src_file, original_file);
|
shutil.copyfile(src_file, original_file);
|
||||||
|
|
||||||
def _touch(path):
|
def _touch(path):
|
||||||
with open(path, "w") as fp:
|
with open(path, "w") as fp:
|
||||||
fp.write("")
|
fp.write("")
|
||||||
|
|
||||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||||
print("Done patching exception handler")
|
print("Done patching exception handler")
|
||||||
|
|
||||||
print("Libmaple modified and ready for post mortem debugging")
|
print("Libmaple modified and ready for post mortem debugging")
|
||||||
|
@ -7,52 +7,52 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
import shutil,marlin
|
import shutil,marlin
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
#
|
#
|
||||||
# Get the platform name from the 'platform_packages' option,
|
# Get the platform name from the 'platform_packages' option,
|
||||||
# or look it up by the platform.class.name.
|
# or look it up by the platform.class.name.
|
||||||
#
|
#
|
||||||
env = marlin.env
|
env = marlin.env
|
||||||
platform = env.PioPlatform()
|
platform = env.PioPlatform()
|
||||||
|
|
||||||
from platformio.package.meta import PackageSpec
|
from platformio.package.meta import PackageSpec
|
||||||
platform_packages = env.GetProjectOption('platform_packages')
|
platform_packages = env.GetProjectOption('platform_packages')
|
||||||
|
|
||||||
# Remove all tool items from platform_packages
|
# Remove all tool items from platform_packages
|
||||||
platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
|
platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
|
||||||
|
|
||||||
if len(platform_packages) == 0:
|
if len(platform_packages) == 0:
|
||||||
framewords = {
|
framewords = {
|
||||||
"Ststm32Platform": "framework-arduinoststm32",
|
"Ststm32Platform": "framework-arduinoststm32",
|
||||||
"AtmelavrPlatform": "framework-arduino-avr"
|
"AtmelavrPlatform": "framework-arduino-avr"
|
||||||
}
|
}
|
||||||
platform_name = framewords[platform.__class__.__name__]
|
platform_name = framewords[platform.__class__.__name__]
|
||||||
else:
|
else:
|
||||||
platform_name = PackageSpec(platform_packages[0]).name
|
platform_name = PackageSpec(platform_packages[0]).name
|
||||||
|
|
||||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
|
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
|
||||||
platform_name = "framework-arduinoststm32"
|
platform_name = "framework-arduinoststm32"
|
||||||
|
|
||||||
FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
|
FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
|
||||||
assert FRAMEWORK_DIR.is_dir()
|
assert FRAMEWORK_DIR.is_dir()
|
||||||
|
|
||||||
board = env.BoardConfig()
|
board = env.BoardConfig()
|
||||||
|
|
||||||
#mcu_type = board.get("build.mcu")[:-2]
|
#mcu_type = board.get("build.mcu")[:-2]
|
||||||
variant = board.get("build.variant")
|
variant = board.get("build.variant")
|
||||||
#series = mcu_type[:7].upper() + "xx"
|
#series = mcu_type[:7].upper() + "xx"
|
||||||
|
|
||||||
# Prepare a new empty folder at the destination
|
# Prepare a new empty folder at the destination
|
||||||
variant_dir = FRAMEWORK_DIR / "variants" / variant
|
variant_dir = FRAMEWORK_DIR / "variants" / variant
|
||||||
if variant_dir.is_dir():
|
if variant_dir.is_dir():
|
||||||
shutil.rmtree(variant_dir)
|
shutil.rmtree(variant_dir)
|
||||||
if not variant_dir.is_dir():
|
if not variant_dir.is_dir():
|
||||||
variant_dir.mkdir()
|
variant_dir.mkdir()
|
||||||
|
|
||||||
# Source dir is a local variant sub-folder
|
# Source dir is a local variant sub-folder
|
||||||
source_dir = Path("buildroot/share/PlatformIO/variants", variant)
|
source_dir = Path("buildroot/share/PlatformIO/variants", variant)
|
||||||
assert source_dir.is_dir()
|
assert source_dir.is_dir()
|
||||||
|
|
||||||
marlin.copytree(source_dir, variant_dir)
|
marlin.copytree(source_dir, variant_dir)
|
||||||
|
@ -5,31 +5,31 @@
|
|||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
||||||
def addboot(source, target, env):
|
def addboot(source, target, env):
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
fw_path = Path(target[0].path)
|
fw_path = Path(target[0].path)
|
||||||
fwb_path = fw_path.parent / 'firmware_with_bootloader.bin'
|
fwb_path = fw_path.parent / 'firmware_with_bootloader.bin'
|
||||||
with fwb_path.open("wb") as fwb_file:
|
with fwb_path.open("wb") as fwb_file:
|
||||||
bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin")
|
bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin")
|
||||||
bl_file = bl_path.open("rb")
|
bl_file = bl_path.open("rb")
|
||||||
while True:
|
while True:
|
||||||
b = bl_file.read(1)
|
b = bl_file.read(1)
|
||||||
if b == b'': break
|
if b == b'': break
|
||||||
else: fwb_file.write(b)
|
else: fwb_file.write(b)
|
||||||
|
|
||||||
with fw_path.open("rb") as fw_file:
|
with fw_path.open("rb") as fw_file:
|
||||||
while True:
|
while True:
|
||||||
b = fw_file.read(1)
|
b = fw_file.read(1)
|
||||||
if b == b'': break
|
if b == b'': break
|
||||||
else: fwb_file.write(b)
|
else: fwb_file.write(b)
|
||||||
|
|
||||||
fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin')
|
fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin')
|
||||||
if fws_path.exists():
|
if fws_path.exists():
|
||||||
fws_path.unlink()
|
fws_path.unlink()
|
||||||
|
|
||||||
fw_path.rename(fws_path)
|
fw_path.rename(fws_path)
|
||||||
|
|
||||||
import marlin
|
import marlin
|
||||||
marlin.add_post_action(addboot);
|
marlin.add_post_action(addboot);
|
||||||
|
@ -7,41 +7,41 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
import os,marlin
|
import os,marlin
|
||||||
|
|
||||||
board = marlin.env.BoardConfig()
|
board = marlin.env.BoardConfig()
|
||||||
|
|
||||||
def encryptByte(byte):
|
def encryptByte(byte):
|
||||||
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
||||||
i = 0x58 + byte
|
i = 0x58 + byte
|
||||||
j = 0x05 + byte + (i >> 8)
|
j = 0x05 + byte + (i >> 8)
|
||||||
byte = (0xF8 & i) | (0x07 & j)
|
byte = (0xF8 & i) | (0x07 & j)
|
||||||
return byte
|
return byte
|
||||||
|
|
||||||
def encrypt_file(input, output_file, file_length):
|
def encrypt_file(input, output_file, file_length):
|
||||||
input_file = bytearray(input.read())
|
input_file = bytearray(input.read())
|
||||||
for i in range(len(input_file)):
|
for i in range(len(input_file)):
|
||||||
input_file[i] = encryptByte(input_file[i])
|
input_file[i] = encryptByte(input_file[i])
|
||||||
output_file.write(input_file)
|
output_file.write(input_file)
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
|
# Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
|
||||||
def encrypt(source, target, env):
|
def encrypt(source, target, env):
|
||||||
fwpath = target[0].path
|
fwpath = target[0].path
|
||||||
enname = board.get("build.crypt_lerdge")
|
enname = board.get("build.crypt_lerdge")
|
||||||
print("Encrypting %s to %s" % (fwpath, enname))
|
print("Encrypting %s to %s" % (fwpath, enname))
|
||||||
fwfile = open(fwpath, "rb")
|
fwfile = open(fwpath, "rb")
|
||||||
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
||||||
length = os.path.getsize(fwpath)
|
length = os.path.getsize(fwpath)
|
||||||
|
|
||||||
encrypt_file(fwfile, enfile, length)
|
encrypt_file(fwfile, enfile, length)
|
||||||
|
|
||||||
fwfile.close()
|
fwfile.close()
|
||||||
enfile.close()
|
enfile.close()
|
||||||
os.remove(fwpath)
|
os.remove(fwpath)
|
||||||
|
|
||||||
if 'crypt_lerdge' in board.get("build").keys():
|
if 'crypt_lerdge' in board.get("build").keys():
|
||||||
if board.get("build.crypt_lerdge") != "":
|
if board.get("build.crypt_lerdge") != "":
|
||||||
marlin.add_post_action(encrypt)
|
marlin.add_post_action(encrypt)
|
||||||
else:
|
else:
|
||||||
print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
|
print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
@ -9,64 +9,64 @@ from SCons.Script import DefaultEnvironment
|
|||||||
env = DefaultEnvironment()
|
env = DefaultEnvironment()
|
||||||
|
|
||||||
def copytree(src, dst, symlinks=False, ignore=None):
|
def copytree(src, dst, symlinks=False, ignore=None):
|
||||||
for item in src.iterdir():
|
for item in src.iterdir():
|
||||||
if item.is_dir():
|
if item.is_dir():
|
||||||
shutil.copytree(item, dst / item.name, symlinks, ignore)
|
shutil.copytree(item, dst / item.name, symlinks, ignore)
|
||||||
else:
|
else:
|
||||||
shutil.copy2(item, dst / item.name)
|
shutil.copy2(item, dst / item.name)
|
||||||
|
|
||||||
def replace_define(field, value):
|
def replace_define(field, value):
|
||||||
for define in env['CPPDEFINES']:
|
for define in env['CPPDEFINES']:
|
||||||
if define[0] == field:
|
if define[0] == field:
|
||||||
env['CPPDEFINES'].remove(define)
|
env['CPPDEFINES'].remove(define)
|
||||||
env['CPPDEFINES'].append((field, value))
|
env['CPPDEFINES'].append((field, value))
|
||||||
|
|
||||||
# Relocate the firmware to a new address, such as "0x08005000"
|
# Relocate the firmware to a new address, such as "0x08005000"
|
||||||
def relocate_firmware(address):
|
def relocate_firmware(address):
|
||||||
replace_define("VECT_TAB_ADDR", address)
|
replace_define("VECT_TAB_ADDR", address)
|
||||||
|
|
||||||
# Relocate the vector table with a new offset
|
# Relocate the vector table with a new offset
|
||||||
def relocate_vtab(address):
|
def relocate_vtab(address):
|
||||||
replace_define("VECT_TAB_OFFSET", address)
|
replace_define("VECT_TAB_OFFSET", address)
|
||||||
|
|
||||||
# Replace the existing -Wl,-T with the given ldscript path
|
# Replace the existing -Wl,-T with the given ldscript path
|
||||||
def custom_ld_script(ldname):
|
def custom_ld_script(ldname):
|
||||||
apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve())
|
apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve())
|
||||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||||
if "-Wl,-T" in flag:
|
if "-Wl,-T" in flag:
|
||||||
env["LINKFLAGS"][i] = "-Wl,-T" + apath
|
env["LINKFLAGS"][i] = "-Wl,-T" + apath
|
||||||
elif flag == "-T":
|
elif flag == "-T":
|
||||||
env["LINKFLAGS"][i + 1] = apath
|
env["LINKFLAGS"][i + 1] = apath
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it with a new name. This applies (mostly) to MKS boards
|
# Encrypt ${PROGNAME}.bin and save it with a new name. This applies (mostly) to MKS boards
|
||||||
# This PostAction is set up by offset_and_rename.py for envs with 'build.encrypt_mks'.
|
# This PostAction is set up by offset_and_rename.py for envs with 'build.encrypt_mks'.
|
||||||
def encrypt_mks(source, target, env, new_name):
|
def encrypt_mks(source, target, env, new_name):
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
|
key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
|
||||||
|
|
||||||
# If FIRMWARE_BIN is defined by config, override all
|
# If FIRMWARE_BIN is defined by config, override all
|
||||||
mf = env["MARLIN_FEATURES"]
|
mf = env["MARLIN_FEATURES"]
|
||||||
if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
|
if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
|
||||||
|
|
||||||
fwpath = Path(target[0].path)
|
fwpath = Path(target[0].path)
|
||||||
fwfile = fwpath.open("rb")
|
fwfile = fwpath.open("rb")
|
||||||
enfile = Path(target[0].dir.path, new_name).open("wb")
|
enfile = Path(target[0].dir.path, new_name).open("wb")
|
||||||
length = fwpath.stat().st_size
|
length = fwpath.stat().st_size
|
||||||
position = 0
|
position = 0
|
||||||
try:
|
try:
|
||||||
while position < length:
|
while position < length:
|
||||||
byte = fwfile.read(1)
|
byte = fwfile.read(1)
|
||||||
if 320 <= position < 31040:
|
if 320 <= position < 31040:
|
||||||
byte = chr(ord(byte) ^ key[position & 31])
|
byte = chr(ord(byte) ^ key[position & 31])
|
||||||
if sys.version_info[0] > 2:
|
if sys.version_info[0] > 2:
|
||||||
byte = bytes(byte, 'latin1')
|
byte = bytes(byte, 'latin1')
|
||||||
enfile.write(byte)
|
enfile.write(byte)
|
||||||
position += 1
|
position += 1
|
||||||
finally:
|
finally:
|
||||||
fwfile.close()
|
fwfile.close()
|
||||||
enfile.close()
|
enfile.close()
|
||||||
fwpath.unlink()
|
fwpath.unlink()
|
||||||
|
|
||||||
def add_post_action(action):
|
def add_post_action(action):
|
||||||
env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);
|
env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);
|
||||||
|
@ -11,59 +11,59 @@ opt_output = '--opt' in sys.argv
|
|||||||
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
|
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open('marlin_config.json', 'r') as infile:
|
with open('marlin_config.json', 'r') as infile:
|
||||||
conf = json.load(infile)
|
conf = json.load(infile)
|
||||||
for key in conf:
|
for key in conf:
|
||||||
# We don't care about the hash when restoring here
|
# We don't care about the hash when restoring here
|
||||||
if key == '__INITIAL_HASH':
|
if key == '__INITIAL_HASH':
|
||||||
continue
|
continue
|
||||||
if key == 'VERSION':
|
if key == 'VERSION':
|
||||||
for k, v in sorted(conf[key].items()):
|
for k, v in sorted(conf[key].items()):
|
||||||
print(k + ': ' + v)
|
print(k + ': ' + v)
|
||||||
continue
|
continue
|
||||||
# The key is the file name, so let's build it now
|
# The key is the file name, so let's build it now
|
||||||
outfile = open('Marlin/' + key + output_suffix, 'w')
|
outfile = open('Marlin/' + key + output_suffix, 'w')
|
||||||
for k, v in sorted(conf[key].items()):
|
for k, v in sorted(conf[key].items()):
|
||||||
# Make define line now
|
# Make define line now
|
||||||
if opt_output:
|
if opt_output:
|
||||||
if v != '':
|
if v != '':
|
||||||
if '"' in v:
|
if '"' in v:
|
||||||
v = "'%s'" % v
|
v = "'%s'" % v
|
||||||
elif ' ' in v:
|
elif ' ' in v:
|
||||||
v = '"%s"' % v
|
v = '"%s"' % v
|
||||||
define = 'opt_set ' + k + ' ' + v + '\n'
|
define = 'opt_set ' + k + ' ' + v + '\n'
|
||||||
else:
|
else:
|
||||||
define = 'opt_enable ' + k + '\n'
|
define = 'opt_enable ' + k + '\n'
|
||||||
else:
|
else:
|
||||||
define = '#define ' + k + ' ' + v + '\n'
|
define = '#define ' + k + ' ' + v + '\n'
|
||||||
outfile.write(define)
|
outfile.write(define)
|
||||||
outfile.close()
|
outfile.close()
|
||||||
|
|
||||||
# Try to apply changes to the actual configuration file (in order to keep useful comments)
|
# Try to apply changes to the actual configuration file (in order to keep useful comments)
|
||||||
if output_suffix != '':
|
if output_suffix != '':
|
||||||
# Move the existing configuration so it doesn't interfere
|
# Move the existing configuration so it doesn't interfere
|
||||||
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
|
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
|
||||||
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
|
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
|
||||||
outfile = open('Marlin/' + key, 'w')
|
outfile = open('Marlin/' + key, 'w')
|
||||||
for line in infile_lines:
|
for line in infile_lines:
|
||||||
sline = line.strip(" \t\n\r")
|
sline = line.strip(" \t\n\r")
|
||||||
if sline[:7] == "#define":
|
if sline[:7] == "#define":
|
||||||
# Extract the key here (we don't care about the value)
|
# Extract the key here (we don't care about the value)
|
||||||
kv = sline[8:].strip().split(' ')
|
kv = sline[8:].strip().split(' ')
|
||||||
if kv[0] in conf[key]:
|
if kv[0] in conf[key]:
|
||||||
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
|
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
|
||||||
# Remove the key from the dict, so we can still write all missing keys at the end of the file
|
# Remove the key from the dict, so we can still write all missing keys at the end of the file
|
||||||
del conf[key][kv[0]]
|
del conf[key][kv[0]]
|
||||||
else:
|
else:
|
||||||
outfile.write(line + '\n')
|
outfile.write(line + '\n')
|
||||||
else:
|
else:
|
||||||
outfile.write(line + '\n')
|
outfile.write(line + '\n')
|
||||||
# Process any remaining defines here
|
# Process any remaining defines here
|
||||||
for k, v in sorted(conf[key].items()):
|
for k, v in sorted(conf[key].items()):
|
||||||
define = '#define ' + k + ' ' + v + '\n'
|
define = '#define ' + k + ' ' + v + '\n'
|
||||||
outfile.write(define)
|
outfile.write(define)
|
||||||
outfile.close()
|
outfile.close()
|
||||||
|
|
||||||
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
|
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
|
||||||
except:
|
except:
|
||||||
print('No marlin_config.json found.')
|
print('No marlin_config.json found.')
|
||||||
|
@ -2,59 +2,59 @@
|
|||||||
# offset_and_rename.py
|
# offset_and_rename.py
|
||||||
#
|
#
|
||||||
# - If 'build.offset' is provided, either by JSON or by the environment...
|
# - If 'build.offset' is provided, either by JSON or by the environment...
|
||||||
# - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
|
# - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
|
||||||
# - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
|
# - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
|
||||||
# - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
|
# - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
|
||||||
#
|
#
|
||||||
# - For 'board_build.rename' add a post-action to rename the firmware file.
|
# - For 'board_build.rename' add a post-action to rename the firmware file.
|
||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
import sys,marlin
|
import sys,marlin
|
||||||
|
|
||||||
env = marlin.env
|
env = marlin.env
|
||||||
board = env.BoardConfig()
|
board = env.BoardConfig()
|
||||||
board_keys = board.get("build").keys()
|
board_keys = board.get("build").keys()
|
||||||
|
|
||||||
#
|
#
|
||||||
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
||||||
#
|
#
|
||||||
if 'offset' in board_keys:
|
if 'offset' in board_keys:
|
||||||
LD_FLASH_OFFSET = board.get("build.offset")
|
LD_FLASH_OFFSET = board.get("build.offset")
|
||||||
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
||||||
|
|
||||||
# Flash size
|
# Flash size
|
||||||
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
||||||
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
||||||
|
|
||||||
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
||||||
maximum_ram_size = board.get("upload.maximum_ram_size")
|
maximum_ram_size = board.get("upload.maximum_ram_size")
|
||||||
|
|
||||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||||
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
||||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
||||||
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
||||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
||||||
|
|
||||||
#
|
#
|
||||||
# For build.encrypt_mks rename and encode the firmware file.
|
# For build.encrypt_mks rename and encode the firmware file.
|
||||||
#
|
#
|
||||||
if 'encrypt_mks' in board_keys:
|
if 'encrypt_mks' in board_keys:
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
|
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
|
||||||
def encrypt(source, target, env):
|
def encrypt(source, target, env):
|
||||||
marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
|
marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
|
||||||
|
|
||||||
if board.get("build.encrypt_mks") != "":
|
if board.get("build.encrypt_mks") != "":
|
||||||
marlin.add_post_action(encrypt)
|
marlin.add_post_action(encrypt)
|
||||||
|
|
||||||
#
|
#
|
||||||
# For build.rename simply rename the firmware file.
|
# For build.rename simply rename the firmware file.
|
||||||
#
|
#
|
||||||
if 'rename' in board_keys:
|
if 'rename' in board_keys:
|
||||||
|
|
||||||
def rename_target(source, target, env):
|
def rename_target(source, target, env):
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
Path(target[0].path).replace(Path(target[0].dir.path, board.get("build.rename")))
|
Path(target[0].path).replace(Path(target[0].dir.path, board.get("build.rename")))
|
||||||
|
|
||||||
marlin.add_post_action(rename_target)
|
marlin.add_post_action(rename_target)
|
||||||
|
@ -3,18 +3,18 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
import os,sys
|
import os,sys
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
board = env.BoardConfig()
|
board = env.BoardConfig()
|
||||||
board_keys = board.get("build").keys()
|
board_keys = board.get("build").keys()
|
||||||
if 'encode' in board_keys:
|
if 'encode' in board_keys:
|
||||||
env.AddPostAction(
|
env.AddPostAction(
|
||||||
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
||||||
env.VerboseAction(" ".join([
|
env.VerboseAction(" ".join([
|
||||||
"$OBJCOPY", "-O", "srec",
|
"$OBJCOPY", "-O", "srec",
|
||||||
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
|
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
|
||||||
]), "Building " + board.get("build.encode"))
|
]), "Building " + board.get("build.encode"))
|
||||||
)
|
)
|
||||||
|
@ -4,10 +4,10 @@
|
|||||||
|
|
||||||
# Make sure 'vscode init' is not the current command
|
# Make sure 'vscode init' is not the current command
|
||||||
def is_pio_build():
|
def is_pio_build():
|
||||||
from SCons.Script import DefaultEnvironment
|
from SCons.Script import DefaultEnvironment
|
||||||
env = DefaultEnvironment()
|
env = DefaultEnvironment()
|
||||||
return not env.IsIntegrationDump()
|
return not env.IsIntegrationDump()
|
||||||
|
|
||||||
def get_pio_version():
|
def get_pio_version():
|
||||||
from platformio import util
|
from platformio import util
|
||||||
return util.pioversion_to_intstr()
|
return util.pioversion_to_intstr()
|
||||||
|
@ -5,123 +5,123 @@
|
|||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
import os,re,sys
|
import os,re,sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
def get_envs_for_board(board):
|
def get_envs_for_board(board):
|
||||||
ppath = Path("Marlin/src/pins/pins.h")
|
ppath = Path("Marlin/src/pins/pins.h")
|
||||||
with ppath.open() as file:
|
with ppath.open() as file:
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
envregex = r"(?:env|win):"
|
envregex = r"(?:env|win):"
|
||||||
elif sys.platform == 'darwin':
|
elif sys.platform == 'darwin':
|
||||||
envregex = r"(?:env|mac|uni):"
|
envregex = r"(?:env|mac|uni):"
|
||||||
elif sys.platform == 'linux':
|
elif sys.platform == 'linux':
|
||||||
envregex = r"(?:env|lin|uni):"
|
envregex = r"(?:env|lin|uni):"
|
||||||
else:
|
else:
|
||||||
envregex = r"(?:env):"
|
envregex = r"(?:env):"
|
||||||
|
|
||||||
r = re.compile(r"if\s+MB\((.+)\)")
|
r = re.compile(r"if\s+MB\((.+)\)")
|
||||||
if board.startswith("BOARD_"):
|
if board.startswith("BOARD_"):
|
||||||
board = board[6:]
|
board = board[6:]
|
||||||
|
|
||||||
for line in file:
|
for line in file:
|
||||||
mbs = r.findall(line)
|
mbs = r.findall(line)
|
||||||
if mbs and board in re.split(r",\s*", mbs[0]):
|
if mbs and board in re.split(r",\s*", mbs[0]):
|
||||||
line = file.readline()
|
line = file.readline()
|
||||||
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
||||||
if found_envs:
|
if found_envs:
|
||||||
envlist = re.findall(envregex + r"(\w+)", line)
|
envlist = re.findall(envregex + r"(\w+)", line)
|
||||||
return [ "env:"+s for s in envlist ]
|
return [ "env:"+s for s in envlist ]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def check_envs(build_env, board_envs, config):
|
def check_envs(build_env, board_envs, config):
|
||||||
if build_env in board_envs:
|
if build_env in board_envs:
|
||||||
return True
|
return True
|
||||||
ext = config.get(build_env, 'extends', default=None)
|
ext = config.get(build_env, 'extends', default=None)
|
||||||
if ext:
|
if ext:
|
||||||
if isinstance(ext, str):
|
if isinstance(ext, str):
|
||||||
return check_envs(ext, board_envs, config)
|
return check_envs(ext, board_envs, config)
|
||||||
elif isinstance(ext, list):
|
elif isinstance(ext, list):
|
||||||
for ext_env in ext:
|
for ext_env in ext:
|
||||||
if check_envs(ext_env, board_envs, config):
|
if check_envs(ext_env, board_envs, config):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def sanity_check_target():
|
def sanity_check_target():
|
||||||
# Sanity checks:
|
# Sanity checks:
|
||||||
if 'PIOENV' not in env:
|
if 'PIOENV' not in env:
|
||||||
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
||||||
|
|
||||||
# Require PlatformIO 6.1.1 or later
|
# Require PlatformIO 6.1.1 or later
|
||||||
vers = pioutil.get_pio_version()
|
vers = pioutil.get_pio_version()
|
||||||
if vers < [6, 1, 1]:
|
if vers < [6, 1, 1]:
|
||||||
raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.")
|
raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.")
|
||||||
|
|
||||||
if 'MARLIN_FEATURES' not in env:
|
if 'MARLIN_FEATURES' not in env:
|
||||||
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
||||||
|
|
||||||
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
||||||
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
||||||
|
|
||||||
build_env = env['PIOENV']
|
build_env = env['PIOENV']
|
||||||
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
||||||
board_envs = get_envs_for_board(motherboard)
|
board_envs = get_envs_for_board(motherboard)
|
||||||
config = env.GetProjectConfig()
|
config = env.GetProjectConfig()
|
||||||
result = check_envs("env:"+build_env, board_envs, config)
|
result = check_envs("env:"+build_env, board_envs, config)
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||||
raise SystemExit(err)
|
raise SystemExit(err)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Check for Config files in two common incorrect places
|
# Check for Config files in two common incorrect places
|
||||||
#
|
#
|
||||||
epath = Path(env['PROJECT_DIR'])
|
epath = Path(env['PROJECT_DIR'])
|
||||||
for p in [ epath, epath / "config" ]:
|
for p in [ epath, epath / "config" ]:
|
||||||
for f in ("Configuration.h", "Configuration_adv.h"):
|
for f in ("Configuration.h", "Configuration_adv.h"):
|
||||||
if (p / f).is_file():
|
if (p / f).is_file():
|
||||||
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
||||||
raise SystemExit(err)
|
raise SystemExit(err)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Find the name.cpp.o or name.o and remove it
|
# Find the name.cpp.o or name.o and remove it
|
||||||
#
|
#
|
||||||
def rm_ofile(subdir, name):
|
def rm_ofile(subdir, name):
|
||||||
build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
|
build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
|
||||||
for outdir in (build_dir, build_dir / "debug"):
|
for outdir in (build_dir, build_dir / "debug"):
|
||||||
for ext in (".cpp.o", ".o"):
|
for ext in (".cpp.o", ".o"):
|
||||||
fpath = outdir / "src/src" / subdir / (name + ext)
|
fpath = outdir / "src/src" / subdir / (name + ext)
|
||||||
if fpath.exists():
|
if fpath.exists():
|
||||||
fpath.unlink()
|
fpath.unlink()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Give warnings on every build
|
# Give warnings on every build
|
||||||
#
|
#
|
||||||
rm_ofile("inc", "Warnings")
|
rm_ofile("inc", "Warnings")
|
||||||
|
|
||||||
#
|
#
|
||||||
# Rebuild 'settings.cpp' for EEPROM_INIT_NOW
|
# Rebuild 'settings.cpp' for EEPROM_INIT_NOW
|
||||||
#
|
#
|
||||||
if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
|
if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
|
||||||
rm_ofile("module", "settings")
|
rm_ofile("module", "settings")
|
||||||
|
|
||||||
#
|
#
|
||||||
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
||||||
#
|
#
|
||||||
mixedin = []
|
mixedin = []
|
||||||
p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm")
|
p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm")
|
||||||
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
||||||
if (p / f).is_file():
|
if (p / f).is_file():
|
||||||
mixedin += [ f ]
|
mixedin += [ f ]
|
||||||
p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl")
|
p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl")
|
||||||
for f in [ "abl.cpp", "abl.h" ]:
|
for f in [ "abl.cpp", "abl.h" ]:
|
||||||
if (p / f).is_file():
|
if (p / f).is_file():
|
||||||
mixedin += [ f ]
|
mixedin += [ f ]
|
||||||
if mixedin:
|
if mixedin:
|
||||||
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
||||||
raise SystemExit(err)
|
raise SystemExit(err)
|
||||||
|
|
||||||
sanity_check_target()
|
sanity_check_target()
|
||||||
|
@ -7,8 +7,8 @@ nocache = 1
|
|||||||
verbose = 0
|
verbose = 0
|
||||||
|
|
||||||
def blab(str):
|
def blab(str):
|
||||||
if verbose:
|
if verbose:
|
||||||
print(str)
|
print(str)
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
#
|
#
|
||||||
@ -16,36 +16,36 @@ def blab(str):
|
|||||||
#
|
#
|
||||||
preprocessor_cache = {}
|
preprocessor_cache = {}
|
||||||
def run_preprocessor(env, fn=None):
|
def run_preprocessor(env, fn=None):
|
||||||
filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
|
filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
|
||||||
if filename in preprocessor_cache:
|
if filename in preprocessor_cache:
|
||||||
return preprocessor_cache[filename]
|
return preprocessor_cache[filename]
|
||||||
|
|
||||||
# Process defines
|
# Process defines
|
||||||
build_flags = env.get('BUILD_FLAGS')
|
build_flags = env.get('BUILD_FLAGS')
|
||||||
build_flags = env.ParseFlagsExtended(build_flags)
|
build_flags = env.ParseFlagsExtended(build_flags)
|
||||||
|
|
||||||
cxx = search_compiler(env)
|
cxx = search_compiler(env)
|
||||||
cmd = ['"' + cxx + '"']
|
cmd = ['"' + cxx + '"']
|
||||||
|
|
||||||
# Build flags from board.json
|
# Build flags from board.json
|
||||||
#if 'BOARD' in env:
|
#if 'BOARD' in env:
|
||||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||||
for s in build_flags['CPPDEFINES']:
|
for s in build_flags['CPPDEFINES']:
|
||||||
if isinstance(s, tuple):
|
if isinstance(s, tuple):
|
||||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||||
else:
|
else:
|
||||||
cmd += ['-D' + s]
|
cmd += ['-D' + s]
|
||||||
|
|
||||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
|
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
|
||||||
depcmd = cmd + [ filename ]
|
depcmd = cmd + [ filename ]
|
||||||
cmd = ' '.join(depcmd)
|
cmd = ' '.join(depcmd)
|
||||||
blab(cmd)
|
blab(cmd)
|
||||||
try:
|
try:
|
||||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||||
except:
|
except:
|
||||||
define_list = {}
|
define_list = {}
|
||||||
preprocessor_cache[filename] = define_list
|
preprocessor_cache[filename] = define_list
|
||||||
return define_list
|
return define_list
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
@ -54,41 +54,41 @@ def run_preprocessor(env, fn=None):
|
|||||||
#
|
#
|
||||||
def search_compiler(env):
|
def search_compiler(env):
|
||||||
|
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||||
GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path"
|
GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
gccpath = env.GetProjectOption('custom_gcc')
|
gccpath = env.GetProjectOption('custom_gcc')
|
||||||
blab("Getting compiler from env")
|
blab("Getting compiler from env")
|
||||||
return gccpath
|
return gccpath
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Warning: The cached .gcc_path will obscure a newly-installed toolkit
|
# Warning: The cached .gcc_path will obscure a newly-installed toolkit
|
||||||
if not nocache and GCC_PATH_CACHE.exists():
|
if not nocache and GCC_PATH_CACHE.exists():
|
||||||
blab("Getting g++ path from cache")
|
blab("Getting g++ path from cache")
|
||||||
return GCC_PATH_CACHE.read_text()
|
return GCC_PATH_CACHE.read_text()
|
||||||
|
|
||||||
# Use any item in $PATH corresponding to a platformio toolchain bin folder
|
# Use any item in $PATH corresponding to a platformio toolchain bin folder
|
||||||
path_separator = ':'
|
path_separator = ':'
|
||||||
gcc_exe = '*g++'
|
gcc_exe = '*g++'
|
||||||
if env['PLATFORM'] == 'win32':
|
if env['PLATFORM'] == 'win32':
|
||||||
path_separator = ';'
|
path_separator = ';'
|
||||||
gcc_exe += ".exe"
|
gcc_exe += ".exe"
|
||||||
|
|
||||||
# Search for the compiler in PATH
|
# Search for the compiler in PATH
|
||||||
for ppath in map(Path, env['ENV']['PATH'].split(path_separator)):
|
for ppath in map(Path, env['ENV']['PATH'].split(path_separator)):
|
||||||
if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"):
|
if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"):
|
||||||
for gpath in ppath.glob(gcc_exe):
|
for gpath in ppath.glob(gcc_exe):
|
||||||
gccpath = str(gpath.resolve())
|
gccpath = str(gpath.resolve())
|
||||||
# Cache the g++ path to no search always
|
# Cache the g++ path to no search always
|
||||||
if not nocache and ENV_BUILD_PATH.exists():
|
if not nocache and ENV_BUILD_PATH.exists():
|
||||||
blab("Caching g++ for current env")
|
blab("Caching g++ for current env")
|
||||||
GCC_PATH_CACHE.write_text(gccpath)
|
GCC_PATH_CACHE.write_text(gccpath)
|
||||||
return gccpath
|
return gccpath
|
||||||
|
|
||||||
gccpath = env.get('CXX')
|
gccpath = env.get('CXX')
|
||||||
blab("Couldn't find a compiler! Fallback to %s" % gccpath)
|
blab("Couldn't find a compiler! Fallback to %s" % gccpath)
|
||||||
return gccpath
|
return gccpath
|
||||||
|
@ -4,6 +4,6 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
Import("env")
|
Import("env")
|
||||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
||||||
|
@ -9,413 +9,413 @@ import re,json
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
def extend_dict(d:dict, k:tuple):
|
def extend_dict(d:dict, k:tuple):
|
||||||
if len(k) >= 1 and k[0] not in d:
|
if len(k) >= 1 and k[0] not in d:
|
||||||
d[k[0]] = {}
|
d[k[0]] = {}
|
||||||
if len(k) >= 2 and k[1] not in d[k[0]]:
|
if len(k) >= 2 and k[1] not in d[k[0]]:
|
||||||
d[k[0]][k[1]] = {}
|
d[k[0]][k[1]] = {}
|
||||||
if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
|
if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
|
||||||
d[k[0]][k[1]][k[2]] = {}
|
d[k[0]][k[1]][k[2]] = {}
|
||||||
|
|
||||||
grouping_patterns = [
|
grouping_patterns = [
|
||||||
re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
|
re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
|
||||||
re.compile(r'^AXIS\d$'),
|
re.compile(r'^AXIS\d$'),
|
||||||
re.compile(r'^(MIN|MAX)$'),
|
re.compile(r'^(MIN|MAX)$'),
|
||||||
re.compile(r'^[0-8]$'),
|
re.compile(r'^[0-8]$'),
|
||||||
re.compile(r'^HOTEND[0-7]$'),
|
re.compile(r'^HOTEND[0-7]$'),
|
||||||
re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
|
re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
|
||||||
re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
|
re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
|
||||||
]
|
]
|
||||||
# If the indexed part of the option name matches a pattern
|
# If the indexed part of the option name matches a pattern
|
||||||
# then add it to the dictionary.
|
# then add it to the dictionary.
|
||||||
def find_grouping(gdict, filekey, sectkey, optkey, pindex):
|
def find_grouping(gdict, filekey, sectkey, optkey, pindex):
|
||||||
optparts = optkey.split('_')
|
optparts = optkey.split('_')
|
||||||
if 1 < len(optparts) > pindex:
|
if 1 < len(optparts) > pindex:
|
||||||
for patt in grouping_patterns:
|
for patt in grouping_patterns:
|
||||||
if patt.match(optparts[pindex]):
|
if patt.match(optparts[pindex]):
|
||||||
subkey = optparts[pindex]
|
subkey = optparts[pindex]
|
||||||
modkey = '_'.join(optparts)
|
modkey = '_'.join(optparts)
|
||||||
optparts[pindex] = '*'
|
optparts[pindex] = '*'
|
||||||
wildkey = '_'.join(optparts)
|
wildkey = '_'.join(optparts)
|
||||||
kkey = f'{filekey}|{sectkey}|{wildkey}'
|
kkey = f'{filekey}|{sectkey}|{wildkey}'
|
||||||
if kkey not in gdict: gdict[kkey] = []
|
if kkey not in gdict: gdict[kkey] = []
|
||||||
gdict[kkey].append((subkey, modkey))
|
gdict[kkey].append((subkey, modkey))
|
||||||
|
|
||||||
# Build a list of potential groups. Only those with multiple items will be grouped.
|
# Build a list of potential groups. Only those with multiple items will be grouped.
|
||||||
def group_options(schema):
|
def group_options(schema):
|
||||||
for pindex in range(10, -1, -1):
|
for pindex in range(10, -1, -1):
|
||||||
found_groups = {}
|
found_groups = {}
|
||||||
for filekey, f in schema.items():
|
for filekey, f in schema.items():
|
||||||
for sectkey, s in f.items():
|
for sectkey, s in f.items():
|
||||||
for optkey in s:
|
for optkey in s:
|
||||||
find_grouping(found_groups, filekey, sectkey, optkey, pindex)
|
find_grouping(found_groups, filekey, sectkey, optkey, pindex)
|
||||||
|
|
||||||
fkeys = [ k for k in found_groups.keys() ]
|
fkeys = [ k for k in found_groups.keys() ]
|
||||||
for kkey in fkeys:
|
for kkey in fkeys:
|
||||||
items = found_groups[kkey]
|
items = found_groups[kkey]
|
||||||
if len(items) > 1:
|
if len(items) > 1:
|
||||||
f, s, w = kkey.split('|')
|
f, s, w = kkey.split('|')
|
||||||
extend_dict(schema, (f, s, w)) # Add wildcard group to schema
|
extend_dict(schema, (f, s, w)) # Add wildcard group to schema
|
||||||
for subkey, optkey in items: # Add all items to wildcard group
|
for subkey, optkey in items: # Add all items to wildcard group
|
||||||
schema[f][s][w][subkey] = schema[f][s][optkey] # Move non-wildcard item to wildcard group
|
schema[f][s][w][subkey] = schema[f][s][optkey] # Move non-wildcard item to wildcard group
|
||||||
del schema[f][s][optkey]
|
del schema[f][s][optkey]
|
||||||
del found_groups[kkey]
|
del found_groups[kkey]
|
||||||
|
|
||||||
# Extract all board names from boards.h
|
# Extract all board names from boards.h
|
||||||
def load_boards():
|
def load_boards():
|
||||||
bpath = Path("Marlin/src/core/boards.h")
|
bpath = Path("Marlin/src/core/boards.h")
|
||||||
if bpath.is_file():
|
if bpath.is_file():
|
||||||
with bpath.open() as bfile:
|
with bpath.open() as bfile:
|
||||||
boards = []
|
boards = []
|
||||||
for line in bfile:
|
for line in bfile:
|
||||||
if line.startswith("#define BOARD_"):
|
if line.startswith("#define BOARD_"):
|
||||||
bname = line.split()[1]
|
bname = line.split()[1]
|
||||||
if bname != "BOARD_UNKNOWN": boards.append(bname)
|
if bname != "BOARD_UNKNOWN": boards.append(bname)
|
||||||
return "['" + "','".join(boards) + "']"
|
return "['" + "','".join(boards) + "']"
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
#
|
#
|
||||||
# Extract a schema from the current configuration files
|
# Extract a schema from the current configuration files
|
||||||
#
|
#
|
||||||
def extract():
|
def extract():
|
||||||
# Load board names from boards.h
|
# Load board names from boards.h
|
||||||
boards = load_boards()
|
boards = load_boards()
|
||||||
|
|
||||||
# Parsing states
|
# Parsing states
|
||||||
class Parse:
|
class Parse:
|
||||||
NORMAL = 0 # No condition yet
|
NORMAL = 0 # No condition yet
|
||||||
BLOCK_COMMENT = 1 # Looking for the end of the block comment
|
BLOCK_COMMENT = 1 # Looking for the end of the block comment
|
||||||
EOL_COMMENT = 2 # EOL comment started, maybe add the next comment?
|
EOL_COMMENT = 2 # EOL comment started, maybe add the next comment?
|
||||||
GET_SENSORS = 3 # Gathering temperature sensor options
|
GET_SENSORS = 3 # Gathering temperature sensor options
|
||||||
ERROR = 9 # Syntax error
|
ERROR = 9 # Syntax error
|
||||||
|
|
||||||
# List of files to process, with shorthand
|
# List of files to process, with shorthand
|
||||||
filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
|
filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
|
||||||
# A JSON object to store the data
|
# A JSON object to store the data
|
||||||
sch_out = { 'basic':{}, 'advanced':{} }
|
sch_out = { 'basic':{}, 'advanced':{} }
|
||||||
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
|
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
|
||||||
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
|
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
|
||||||
# Defines to ignore
|
# Defines to ignore
|
||||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
|
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
|
||||||
# Start with unknown state
|
# Start with unknown state
|
||||||
state = Parse.NORMAL
|
state = Parse.NORMAL
|
||||||
# Serial ID
|
# Serial ID
|
||||||
sid = 0
|
sid = 0
|
||||||
# Loop through files and parse them line by line
|
# Loop through files and parse them line by line
|
||||||
for fn, fk in filekey.items():
|
for fn, fk in filekey.items():
|
||||||
with Path("Marlin", fn).open() as fileobj:
|
with Path("Marlin", fn).open() as fileobj:
|
||||||
section = 'none' # Current Settings section
|
section = 'none' # Current Settings section
|
||||||
line_number = 0 # Counter for the line number of the file
|
line_number = 0 # Counter for the line number of the file
|
||||||
conditions = [] # Create a condition stack for the current file
|
conditions = [] # Create a condition stack for the current file
|
||||||
comment_buff = [] # A temporary buffer for comments
|
comment_buff = [] # A temporary buffer for comments
|
||||||
options_json = '' # A buffer for the most recent options JSON found
|
options_json = '' # A buffer for the most recent options JSON found
|
||||||
eol_options = False # The options came from end of line, so only apply once
|
eol_options = False # The options came from end of line, so only apply once
|
||||||
join_line = False # A flag that the line should be joined with the previous one
|
join_line = False # A flag that the line should be joined with the previous one
|
||||||
line = '' # A line buffer to handle \ continuation
|
line = '' # A line buffer to handle \ continuation
|
||||||
last_added_ref = None # Reference to the last added item
|
last_added_ref = None # Reference to the last added item
|
||||||
# Loop through the lines in the file
|
# Loop through the lines in the file
|
||||||
for the_line in fileobj.readlines():
|
for the_line in fileobj.readlines():
|
||||||
line_number += 1
|
line_number += 1
|
||||||
|
|
||||||
# Clean the line for easier parsing
|
# Clean the line for easier parsing
|
||||||
the_line = the_line.strip()
|
the_line = the_line.strip()
|
||||||
|
|
||||||
if join_line: # A previous line is being made longer
|
if join_line: # A previous line is being made longer
|
||||||
line += (' ' if line else '') + the_line
|
line += (' ' if line else '') + the_line
|
||||||
else: # Otherwise, start the line anew
|
else: # Otherwise, start the line anew
|
||||||
line, line_start = the_line, line_number
|
line, line_start = the_line, line_number
|
||||||
|
|
||||||
# If the resulting line ends with a \, don't process now.
|
# If the resulting line ends with a \, don't process now.
|
||||||
# Strip the end off. The next line will be joined with it.
|
# Strip the end off. The next line will be joined with it.
|
||||||
join_line = line.endswith("\\")
|
join_line = line.endswith("\\")
|
||||||
if join_line:
|
if join_line:
|
||||||
line = line[:-1].strip()
|
line = line[:-1].strip()
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
line_end = line_number
|
line_end = line_number
|
||||||
|
|
||||||
defmatch = defgrep.match(line)
|
defmatch = defgrep.match(line)
|
||||||
|
|
||||||
# Special handling for EOL comments after a #define.
|
# Special handling for EOL comments after a #define.
|
||||||
# At this point the #define is already digested and inserted,
|
# At this point the #define is already digested and inserted,
|
||||||
# so we have to extend it
|
# so we have to extend it
|
||||||
if state == Parse.EOL_COMMENT:
|
if state == Parse.EOL_COMMENT:
|
||||||
# If the line is not a comment, we're done with the EOL comment
|
# If the line is not a comment, we're done with the EOL comment
|
||||||
if not defmatch and the_line.startswith('//'):
|
if not defmatch and the_line.startswith('//'):
|
||||||
comment_buff.append(the_line[2:].strip())
|
comment_buff.append(the_line[2:].strip())
|
||||||
else:
|
else:
|
||||||
last_added_ref['comment'] = ' '.join(comment_buff)
|
last_added_ref['comment'] = ' '.join(comment_buff)
|
||||||
comment_buff = []
|
comment_buff = []
|
||||||
state = Parse.NORMAL
|
state = Parse.NORMAL
|
||||||
|
|
||||||
def use_comment(c, opt, sec, bufref):
|
def use_comment(c, opt, sec, bufref):
|
||||||
if c.startswith(':'): # If the comment starts with : then it has magic JSON
|
if c.startswith(':'): # If the comment starts with : then it has magic JSON
|
||||||
d = c[1:].strip() # Strip the leading :
|
d = c[1:].strip() # Strip the leading :
|
||||||
cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
|
cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
|
||||||
if cbr:
|
if cbr:
|
||||||
opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
|
opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
|
||||||
if cmt != '': bufref.append(cmt)
|
if cmt != '': bufref.append(cmt)
|
||||||
else:
|
else:
|
||||||
opt = c[1:].strip()
|
opt = c[1:].strip()
|
||||||
elif c.startswith('@section'): # Start a new section
|
elif c.startswith('@section'): # Start a new section
|
||||||
sec = c[8:].strip()
|
sec = c[8:].strip()
|
||||||
elif not c.startswith('========'):
|
elif not c.startswith('========'):
|
||||||
bufref.append(c)
|
bufref.append(c)
|
||||||
return opt, sec
|
return opt, sec
|
||||||
|
|
||||||
# In a block comment, capture lines up to the end of the comment.
|
# In a block comment, capture lines up to the end of the comment.
|
||||||
# Assume nothing follows the comment closure.
|
# Assume nothing follows the comment closure.
|
||||||
if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
|
if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
|
||||||
endpos = line.find('*/')
|
endpos = line.find('*/')
|
||||||
if endpos < 0:
|
if endpos < 0:
|
||||||
cline = line
|
cline = line
|
||||||
else:
|
else:
|
||||||
cline, line = line[:endpos].strip(), line[endpos+2:].strip()
|
cline, line = line[:endpos].strip(), line[endpos+2:].strip()
|
||||||
|
|
||||||
# Temperature sensors are done
|
# Temperature sensors are done
|
||||||
if state == Parse.GET_SENSORS:
|
if state == Parse.GET_SENSORS:
|
||||||
options_json = f'[ {options_json[:-2]} ]'
|
options_json = f'[ {options_json[:-2]} ]'
|
||||||
|
|
||||||
state = Parse.NORMAL
|
state = Parse.NORMAL
|
||||||
|
|
||||||
# Strip the leading '*' from block comments
|
# Strip the leading '*' from block comments
|
||||||
if cline.startswith('*'): cline = cline[1:].strip()
|
if cline.startswith('*'): cline = cline[1:].strip()
|
||||||
|
|
||||||
# Collect temperature sensors
|
# Collect temperature sensors
|
||||||
if state == Parse.GET_SENSORS:
|
if state == Parse.GET_SENSORS:
|
||||||
sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
|
sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
|
||||||
if sens:
|
if sens:
|
||||||
s2 = sens[2].replace("'","''")
|
s2 = sens[2].replace("'","''")
|
||||||
options_json += f"{sens[1]}:'{s2}', "
|
options_json += f"{sens[1]}:'{s2}', "
|
||||||
|
|
||||||
elif state == Parse.BLOCK_COMMENT:
|
elif state == Parse.BLOCK_COMMENT:
|
||||||
|
|
||||||
# Look for temperature sensors
|
# Look for temperature sensors
|
||||||
if cline == "Temperature sensors available:":
|
if cline == "Temperature sensors available:":
|
||||||
state, cline = Parse.GET_SENSORS, "Temperature Sensors"
|
state, cline = Parse.GET_SENSORS, "Temperature Sensors"
|
||||||
|
|
||||||
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
||||||
|
|
||||||
# For the normal state we're looking for any non-blank line
|
# For the normal state we're looking for any non-blank line
|
||||||
elif state == Parse.NORMAL:
|
elif state == Parse.NORMAL:
|
||||||
# Skip a commented define when evaluating comment opening
|
# Skip a commented define when evaluating comment opening
|
||||||
st = 2 if re.match(r'^//\s*#define', line) else 0
|
st = 2 if re.match(r'^//\s*#define', line) else 0
|
||||||
cpos1 = line.find('/*') # Start a block comment on the line?
|
cpos1 = line.find('/*') # Start a block comment on the line?
|
||||||
cpos2 = line.find('//', st) # Start an end of line comment on the line?
|
cpos2 = line.find('//', st) # Start an end of line comment on the line?
|
||||||
|
|
||||||
# Only the first comment starter gets evaluated
|
# Only the first comment starter gets evaluated
|
||||||
cpos = -1
|
cpos = -1
|
||||||
if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
|
if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
|
||||||
cpos = cpos1
|
cpos = cpos1
|
||||||
comment_buff = []
|
comment_buff = []
|
||||||
state = Parse.BLOCK_COMMENT
|
state = Parse.BLOCK_COMMENT
|
||||||
eol_options = False
|
eol_options = False
|
||||||
|
|
||||||
elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
|
elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
|
||||||
cpos = cpos2
|
cpos = cpos2
|
||||||
|
|
||||||
# Comment after a define may be continued on the following lines
|
# Comment after a define may be continued on the following lines
|
||||||
if defmatch != None and cpos > 10:
|
if defmatch != None and cpos > 10:
|
||||||
state = Parse.EOL_COMMENT
|
state = Parse.EOL_COMMENT
|
||||||
comment_buff = []
|
comment_buff = []
|
||||||
|
|
||||||
# Process the start of a new comment
|
# Process the start of a new comment
|
||||||
if cpos != -1:
|
if cpos != -1:
|
||||||
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
|
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
|
||||||
|
|
||||||
if state == Parse.BLOCK_COMMENT:
|
if state == Parse.BLOCK_COMMENT:
|
||||||
# Strip leading '*' from block comments
|
# Strip leading '*' from block comments
|
||||||
if cline.startswith('*'): cline = cline[1:].strip()
|
if cline.startswith('*'): cline = cline[1:].strip()
|
||||||
else:
|
else:
|
||||||
# Expire end-of-line options after first use
|
# Expire end-of-line options after first use
|
||||||
if cline.startswith(':'): eol_options = True
|
if cline.startswith(':'): eol_options = True
|
||||||
|
|
||||||
# Buffer a non-empty comment start
|
# Buffer a non-empty comment start
|
||||||
if cline != '':
|
if cline != '':
|
||||||
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
||||||
|
|
||||||
# If the line has nothing before the comment, go to the next line
|
# If the line has nothing before the comment, go to the next line
|
||||||
if line == '':
|
if line == '':
|
||||||
options_json = ''
|
options_json = ''
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Parenthesize the given expression if needed
|
# Parenthesize the given expression if needed
|
||||||
def atomize(s):
|
def atomize(s):
|
||||||
if s == '' \
|
if s == '' \
|
||||||
or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
|
or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
|
||||||
or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
|
or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
|
||||||
return s
|
return s
|
||||||
return f'({s})'
|
return f'({s})'
|
||||||
|
|
||||||
#
|
#
|
||||||
# The conditions stack is an array containing condition-arrays.
|
# The conditions stack is an array containing condition-arrays.
|
||||||
# Each condition-array lists the conditions for the current block.
|
# Each condition-array lists the conditions for the current block.
|
||||||
# IF/N/DEF adds a new condition-array to the stack.
|
# IF/N/DEF adds a new condition-array to the stack.
|
||||||
# ELSE/ELIF/ENDIF pop the condition-array.
|
# ELSE/ELIF/ENDIF pop the condition-array.
|
||||||
# ELSE/ELIF negate the last item in the popped condition-array.
|
# ELSE/ELIF negate the last item in the popped condition-array.
|
||||||
# ELIF adds a new condition to the end of the array.
|
# ELIF adds a new condition to the end of the array.
|
||||||
# ELSE/ELIF re-push the condition-array.
|
# ELSE/ELIF re-push the condition-array.
|
||||||
#
|
#
|
||||||
cparts = line.split()
|
cparts = line.split()
|
||||||
iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
|
iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
|
||||||
if iselif or iselse or cparts[0] == '#endif':
|
if iselif or iselse or cparts[0] == '#endif':
|
||||||
if len(conditions) == 0:
|
if len(conditions) == 0:
|
||||||
raise Exception(f'no #if block at line {line_number}')
|
raise Exception(f'no #if block at line {line_number}')
|
||||||
|
|
||||||
# Pop the last condition-array from the stack
|
# Pop the last condition-array from the stack
|
||||||
prev = conditions.pop()
|
prev = conditions.pop()
|
||||||
|
|
||||||
if iselif or iselse:
|
if iselif or iselse:
|
||||||
prev[-1] = '!' + prev[-1] # Invert the last condition
|
prev[-1] = '!' + prev[-1] # Invert the last condition
|
||||||
if iselif: prev.append(atomize(line[5:].strip()))
|
if iselif: prev.append(atomize(line[5:].strip()))
|
||||||
conditions.append(prev)
|
conditions.append(prev)
|
||||||
|
|
||||||
elif cparts[0] == '#if':
|
elif cparts[0] == '#if':
|
||||||
conditions.append([ atomize(line[3:].strip()) ])
|
conditions.append([ atomize(line[3:].strip()) ])
|
||||||
elif cparts[0] == '#ifdef':
|
elif cparts[0] == '#ifdef':
|
||||||
conditions.append([ f'defined({line[6:].strip()})' ])
|
conditions.append([ f'defined({line[6:].strip()})' ])
|
||||||
elif cparts[0] == '#ifndef':
|
elif cparts[0] == '#ifndef':
|
||||||
conditions.append([ f'!defined({line[7:].strip()})' ])
|
conditions.append([ f'!defined({line[7:].strip()})' ])
|
||||||
|
|
||||||
# Handle a complete #define line
|
# Handle a complete #define line
|
||||||
elif defmatch != None:
|
elif defmatch != None:
|
||||||
|
|
||||||
# Get the match groups into vars
|
# Get the match groups into vars
|
||||||
enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
|
enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
|
||||||
|
|
||||||
# Increment the serial ID
|
# Increment the serial ID
|
||||||
sid += 1
|
sid += 1
|
||||||
|
|
||||||
# Create a new dictionary for the current #define
|
# Create a new dictionary for the current #define
|
||||||
define_info = {
|
define_info = {
|
||||||
'section': section,
|
'section': section,
|
||||||
'name': define_name,
|
'name': define_name,
|
||||||
'enabled': enabled,
|
'enabled': enabled,
|
||||||
'line': line_start,
|
'line': line_start,
|
||||||
'sid': sid
|
'sid': sid
|
||||||
}
|
}
|
||||||
|
|
||||||
# Type is based on the value
|
# Type is based on the value
|
||||||
if val == '':
|
if val == '':
|
||||||
value_type = 'switch'
|
value_type = 'switch'
|
||||||
elif re.match(r'^(true|false)$', val):
|
elif re.match(r'^(true|false)$', val):
|
||||||
value_type = 'bool'
|
value_type = 'bool'
|
||||||
val = val == 'true'
|
val = val == 'true'
|
||||||
elif re.match(r'^[-+]?\s*\d+$', val):
|
elif re.match(r'^[-+]?\s*\d+$', val):
|
||||||
value_type = 'int'
|
value_type = 'int'
|
||||||
val = int(val)
|
val = int(val)
|
||||||
elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
|
elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
|
||||||
value_type = 'float'
|
value_type = 'float'
|
||||||
val = float(val.replace('f',''))
|
val = float(val.replace('f',''))
|
||||||
else:
|
else:
|
||||||
value_type = 'string' if val[0] == '"' \
|
value_type = 'string' if val[0] == '"' \
|
||||||
else 'char' if val[0] == "'" \
|
else 'char' if val[0] == "'" \
|
||||||
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
|
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
|
||||||
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
|
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
|
||||||
else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
|
else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
|
||||||
else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
|
else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
|
||||||
else 'array' if val[0] == '{' \
|
else 'array' if val[0] == '{' \
|
||||||
else ''
|
else ''
|
||||||
|
|
||||||
if val != '': define_info['value'] = val
|
if val != '': define_info['value'] = val
|
||||||
if value_type != '': define_info['type'] = value_type
|
if value_type != '': define_info['type'] = value_type
|
||||||
|
|
||||||
# Join up accumulated conditions with &&
|
# Join up accumulated conditions with &&
|
||||||
if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
|
if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
|
||||||
|
|
||||||
# If the comment_buff is not empty, add the comment to the info
|
# If the comment_buff is not empty, add the comment to the info
|
||||||
if comment_buff:
|
if comment_buff:
|
||||||
full_comment = '\n'.join(comment_buff)
|
full_comment = '\n'.join(comment_buff)
|
||||||
|
|
||||||
# An EOL comment will be added later
|
# An EOL comment will be added later
|
||||||
# The handling could go here instead of above
|
# The handling could go here instead of above
|
||||||
if state == Parse.EOL_COMMENT:
|
if state == Parse.EOL_COMMENT:
|
||||||
define_info['comment'] = ''
|
define_info['comment'] = ''
|
||||||
else:
|
else:
|
||||||
define_info['comment'] = full_comment
|
define_info['comment'] = full_comment
|
||||||
comment_buff = []
|
comment_buff = []
|
||||||
|
|
||||||
# If the comment specifies units, add that to the info
|
# If the comment specifies units, add that to the info
|
||||||
units = re.match(r'^\(([^)]+)\)', full_comment)
|
units = re.match(r'^\(([^)]+)\)', full_comment)
|
||||||
if units:
|
if units:
|
||||||
units = units[1]
|
units = units[1]
|
||||||
if units == 's' or units == 'sec': units = 'seconds'
|
if units == 's' or units == 'sec': units = 'seconds'
|
||||||
define_info['units'] = units
|
define_info['units'] = units
|
||||||
|
|
||||||
# Set the options for the current #define
|
# Set the options for the current #define
|
||||||
if define_name == "MOTHERBOARD" and boards != '':
|
if define_name == "MOTHERBOARD" and boards != '':
|
||||||
define_info['options'] = boards
|
define_info['options'] = boards
|
||||||
elif options_json != '':
|
elif options_json != '':
|
||||||
define_info['options'] = options_json
|
define_info['options'] = options_json
|
||||||
if eol_options: options_json = ''
|
if eol_options: options_json = ''
|
||||||
|
|
||||||
# Create section dict if it doesn't exist yet
|
# Create section dict if it doesn't exist yet
|
||||||
if section not in sch_out[fk]: sch_out[fk][section] = {}
|
if section not in sch_out[fk]: sch_out[fk][section] = {}
|
||||||
|
|
||||||
# If define has already been seen...
|
# If define has already been seen...
|
||||||
if define_name in sch_out[fk][section]:
|
if define_name in sch_out[fk][section]:
|
||||||
info = sch_out[fk][section][define_name]
|
info = sch_out[fk][section][define_name]
|
||||||
if isinstance(info, dict): info = [ info ] # Convert a single dict into a list
|
if isinstance(info, dict): info = [ info ] # Convert a single dict into a list
|
||||||
info.append(define_info) # Add to the list
|
info.append(define_info) # Add to the list
|
||||||
else:
|
else:
|
||||||
# Add the define dict with name as key
|
# Add the define dict with name as key
|
||||||
sch_out[fk][section][define_name] = define_info
|
sch_out[fk][section][define_name] = define_info
|
||||||
|
|
||||||
if state == Parse.EOL_COMMENT:
|
if state == Parse.EOL_COMMENT:
|
||||||
last_added_ref = define_info
|
last_added_ref = define_info
|
||||||
|
|
||||||
return sch_out
|
return sch_out
|
||||||
|
|
||||||
def dump_json(schema:dict, jpath:Path):
|
def dump_json(schema:dict, jpath:Path):
|
||||||
with jpath.open('w') as jfile:
|
with jpath.open('w') as jfile:
|
||||||
json.dump(schema, jfile, ensure_ascii=False, indent=2)
|
json.dump(schema, jfile, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
def dump_yaml(schema:dict, ypath:Path):
|
def dump_yaml(schema:dict, ypath:Path):
|
||||||
import yaml
|
import yaml
|
||||||
with ypath.open('w') as yfile:
|
with ypath.open('w') as yfile:
|
||||||
yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
|
yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
try:
|
try:
|
||||||
schema = extract()
|
schema = extract()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
print("Error: " + str(exc))
|
print("Error: " + str(exc))
|
||||||
schema = None
|
schema = None
|
||||||
|
|
||||||
if schema:
|
if schema:
|
||||||
|
|
||||||
# Get the first command line argument
|
# Get the first command line argument
|
||||||
import sys
|
import sys
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
arg = sys.argv[1]
|
arg = sys.argv[1]
|
||||||
else:
|
else:
|
||||||
arg = 'some'
|
arg = 'some'
|
||||||
|
|
||||||
# JSON schema
|
# JSON schema
|
||||||
if arg in ['some', 'json', 'jsons']:
|
if arg in ['some', 'json', 'jsons']:
|
||||||
print("Generating JSON ...")
|
print("Generating JSON ...")
|
||||||
dump_json(schema, Path('schema.json'))
|
dump_json(schema, Path('schema.json'))
|
||||||
|
|
||||||
# JSON schema (wildcard names)
|
# JSON schema (wildcard names)
|
||||||
if arg in ['group', 'jsons']:
|
if arg in ['group', 'jsons']:
|
||||||
group_options(schema)
|
group_options(schema)
|
||||||
dump_json(schema, Path('schema_grouped.json'))
|
dump_json(schema, Path('schema_grouped.json'))
|
||||||
|
|
||||||
# YAML
|
# YAML
|
||||||
if arg in ['some', 'yml', 'yaml']:
|
if arg in ['some', 'yml', 'yaml']:
|
||||||
try:
|
try:
|
||||||
import yaml
|
import yaml
|
||||||
except ImportError:
|
except ImportError:
|
||||||
print("Installing YAML module ...")
|
print("Installing YAML module ...")
|
||||||
import subprocess
|
import subprocess
|
||||||
try:
|
try:
|
||||||
subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
|
subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
|
||||||
import yaml
|
import yaml
|
||||||
except:
|
except:
|
||||||
print("Failed to install YAML module")
|
print("Failed to install YAML module")
|
||||||
return
|
return
|
||||||
|
|
||||||
print("Generating YML ...")
|
print("Generating YML ...")
|
||||||
dump_yaml(schema, Path('schema.yml'))
|
dump_yaml(schema, Path('schema.yml'))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
@ -16,32 +16,32 @@ from pathlib import Path
|
|||||||
# resulting config.ini to produce more exact configuration files.
|
# resulting config.ini to produce more exact configuration files.
|
||||||
#
|
#
|
||||||
def extract_defines(filepath):
|
def extract_defines(filepath):
|
||||||
f = open(filepath, encoding="utf8").read().split("\n")
|
f = open(filepath, encoding="utf8").read().split("\n")
|
||||||
a = []
|
a = []
|
||||||
for line in f:
|
for line in f:
|
||||||
sline = line.strip()
|
sline = line.strip()
|
||||||
if sline[:7] == "#define":
|
if sline[:7] == "#define":
|
||||||
# Extract the key here (we don't care about the value)
|
# Extract the key here (we don't care about the value)
|
||||||
kv = sline[8:].strip().split()
|
kv = sline[8:].strip().split()
|
||||||
a.append(kv[0])
|
a.append(kv[0])
|
||||||
return a
|
return a
|
||||||
|
|
||||||
# Compute the SHA256 hash of a file
|
# Compute the SHA256 hash of a file
|
||||||
def get_file_sha256sum(filepath):
|
def get_file_sha256sum(filepath):
|
||||||
sha256_hash = hashlib.sha256()
|
sha256_hash = hashlib.sha256()
|
||||||
with open(filepath,"rb") as f:
|
with open(filepath,"rb") as f:
|
||||||
# Read and update hash string value in blocks of 4K
|
# Read and update hash string value in blocks of 4K
|
||||||
for byte_block in iter(lambda: f.read(4096),b""):
|
for byte_block in iter(lambda: f.read(4096),b""):
|
||||||
sha256_hash.update(byte_block)
|
sha256_hash.update(byte_block)
|
||||||
return sha256_hash.hexdigest()
|
return sha256_hash.hexdigest()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Compress a JSON file into a zip file
|
# Compress a JSON file into a zip file
|
||||||
#
|
#
|
||||||
import zipfile
|
import zipfile
|
||||||
def compress_file(filepath, outpath):
|
def compress_file(filepath, outpath):
|
||||||
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
||||||
zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Compute the build signature. The idea is to extract all defines in the configuration headers
|
# Compute the build signature. The idea is to extract all defines in the configuration headers
|
||||||
@ -49,228 +49,228 @@ def compress_file(filepath, outpath):
|
|||||||
# We can reverse the signature to get a 1:1 equivalent configuration file
|
# We can reverse the signature to get a 1:1 equivalent configuration file
|
||||||
#
|
#
|
||||||
def compute_build_signature(env):
|
def compute_build_signature(env):
|
||||||
if 'BUILD_SIGNATURE' in env:
|
if 'BUILD_SIGNATURE' in env:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Definitions from these files will be kept
|
# Definitions from these files will be kept
|
||||||
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||||
|
|
||||||
build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||||
|
|
||||||
# Check if we can skip processing
|
# Check if we can skip processing
|
||||||
hashes = ''
|
hashes = ''
|
||||||
for header in files_to_keep:
|
for header in files_to_keep:
|
||||||
hashes += get_file_sha256sum(header)[0:10]
|
hashes += get_file_sha256sum(header)[0:10]
|
||||||
|
|
||||||
marlin_json = build_path / 'marlin_config.json'
|
marlin_json = build_path / 'marlin_config.json'
|
||||||
marlin_zip = build_path / 'mc.zip'
|
marlin_zip = build_path / 'mc.zip'
|
||||||
|
|
||||||
# Read existing config file
|
# Read existing config file
|
||||||
try:
|
try:
|
||||||
with marlin_json.open() as infile:
|
with marlin_json.open() as infile:
|
||||||
conf = json.load(infile)
|
conf = json.load(infile)
|
||||||
if conf['__INITIAL_HASH'] == hashes:
|
if conf['__INITIAL_HASH'] == hashes:
|
||||||
# Same configuration, skip recomputing the building signature
|
# Same configuration, skip recomputing the building signature
|
||||||
compress_file(marlin_json, marlin_zip)
|
compress_file(marlin_json, marlin_zip)
|
||||||
return
|
return
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Get enabled config options based on preprocessor
|
# Get enabled config options based on preprocessor
|
||||||
from preprocessor import run_preprocessor
|
from preprocessor import run_preprocessor
|
||||||
complete_cfg = run_preprocessor(env)
|
complete_cfg = run_preprocessor(env)
|
||||||
|
|
||||||
# Dumb #define extraction from the configuration files
|
# Dumb #define extraction from the configuration files
|
||||||
conf_defines = {}
|
conf_defines = {}
|
||||||
all_defines = []
|
all_defines = []
|
||||||
for header in files_to_keep:
|
for header in files_to_keep:
|
||||||
defines = extract_defines(header)
|
defines = extract_defines(header)
|
||||||
# To filter only the define we want
|
# To filter only the define we want
|
||||||
all_defines += defines
|
all_defines += defines
|
||||||
# To remember from which file it cames from
|
# To remember from which file it cames from
|
||||||
conf_defines[header.split('/')[-1]] = defines
|
conf_defines[header.split('/')[-1]] = defines
|
||||||
|
|
||||||
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
||||||
|
|
||||||
# First step is to collect all valid macros
|
# First step is to collect all valid macros
|
||||||
defines = {}
|
defines = {}
|
||||||
for line in complete_cfg:
|
for line in complete_cfg:
|
||||||
|
|
||||||
# Split the define from the value
|
# Split the define from the value
|
||||||
key_val = line[8:].strip().decode().split(' ')
|
key_val = line[8:].strip().decode().split(' ')
|
||||||
key, value = key_val[0], ' '.join(key_val[1:])
|
key, value = key_val[0], ' '.join(key_val[1:])
|
||||||
|
|
||||||
# Ignore values starting with two underscore, since it's low level
|
# Ignore values starting with two underscore, since it's low level
|
||||||
if len(key) > 2 and key[0:2] == "__" :
|
if len(key) > 2 and key[0:2] == "__" :
|
||||||
continue
|
continue
|
||||||
# Ignore values containing a parenthesis (likely a function macro)
|
# Ignore values containing a parenthesis (likely a function macro)
|
||||||
if '(' in key and ')' in key:
|
if '(' in key and ')' in key:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Then filter dumb values
|
# Then filter dumb values
|
||||||
if r.match(value):
|
if r.match(value):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
defines[key] = value if len(value) else ""
|
defines[key] = value if len(value) else ""
|
||||||
|
|
||||||
#
|
#
|
||||||
# Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
|
# Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
|
||||||
#
|
#
|
||||||
if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
|
if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Second step is to filter useless macro
|
# Second step is to filter useless macro
|
||||||
resolved_defines = {}
|
resolved_defines = {}
|
||||||
for key in defines:
|
for key in defines:
|
||||||
# Remove all boards now
|
# Remove all boards now
|
||||||
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
|
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
|
||||||
continue
|
continue
|
||||||
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
||||||
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
|
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
|
||||||
continue
|
continue
|
||||||
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
|
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
|
||||||
if key.endswith("_T_DECLARED"):
|
if key.endswith("_T_DECLARED"):
|
||||||
continue
|
continue
|
||||||
# Remove keys that are not in the #define list in the Configuration list
|
# Remove keys that are not in the #define list in the Configuration list
|
||||||
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
|
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Don't be that smart guy here
|
# Don't be that smart guy here
|
||||||
resolved_defines[key] = defines[key]
|
resolved_defines[key] = defines[key]
|
||||||
|
|
||||||
# Generate a build signature now
|
# Generate a build signature now
|
||||||
# We are making an object that's a bit more complex than a basic dictionary here
|
# We are making an object that's a bit more complex than a basic dictionary here
|
||||||
data = {}
|
data = {}
|
||||||
data['__INITIAL_HASH'] = hashes
|
data['__INITIAL_HASH'] = hashes
|
||||||
# First create a key for each header here
|
# First create a key for each header here
|
||||||
for header in conf_defines:
|
for header in conf_defines:
|
||||||
data[header] = {}
|
data[header] = {}
|
||||||
|
|
||||||
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
||||||
for key in resolved_defines:
|
for key in resolved_defines:
|
||||||
for header in conf_defines:
|
for header in conf_defines:
|
||||||
if key in conf_defines[header]:
|
if key in conf_defines[header]:
|
||||||
data[header][key] = resolved_defines[key]
|
data[header][key] = resolved_defines[key]
|
||||||
|
|
||||||
# Every python needs this toy
|
# Every python needs this toy
|
||||||
def tryint(key):
|
def tryint(key):
|
||||||
try:
|
try:
|
||||||
return int(defines[key])
|
return int(defines[key])
|
||||||
except:
|
except:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
config_dump = tryint('CONFIG_EXPORT')
|
config_dump = tryint('CONFIG_EXPORT')
|
||||||
|
|
||||||
#
|
#
|
||||||
# Produce an INI file if CONFIG_EXPORT == 2
|
# Produce an INI file if CONFIG_EXPORT == 2
|
||||||
#
|
#
|
||||||
if config_dump == 2:
|
if config_dump == 2:
|
||||||
print("Generating config.ini ...")
|
print("Generating config.ini ...")
|
||||||
config_ini = build_path / 'config.ini'
|
config_ini = build_path / 'config.ini'
|
||||||
with config_ini.open('w') as outfile:
|
with config_ini.open('w') as outfile:
|
||||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
|
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
|
||||||
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
|
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
|
||||||
vers = defines["CONFIGURATION_H_VERSION"]
|
vers = defines["CONFIGURATION_H_VERSION"]
|
||||||
dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
|
dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
|
||||||
ini_fmt = '{0:40}{1}\n'
|
ini_fmt = '{0:40}{1}\n'
|
||||||
outfile.write(
|
outfile.write(
|
||||||
'#\n'
|
'#\n'
|
||||||
+ '# Marlin Firmware\n'
|
+ '# Marlin Firmware\n'
|
||||||
+ '# config.ini - Options to apply before the build\n'
|
+ '# config.ini - Options to apply before the build\n'
|
||||||
+ '#\n'
|
+ '#\n'
|
||||||
+ f'# Generated by Marlin build on {dt_string}\n'
|
+ f'# Generated by Marlin build on {dt_string}\n'
|
||||||
+ '#\n'
|
+ '#\n'
|
||||||
+ '\n'
|
+ '\n'
|
||||||
+ '[config:base]\n'
|
+ '[config:base]\n'
|
||||||
+ ini_fmt.format('ini_use_config', ' = all')
|
+ ini_fmt.format('ini_use_config', ' = all')
|
||||||
+ ini_fmt.format('ini_config_vers', f' = {vers}')
|
+ ini_fmt.format('ini_config_vers', f' = {vers}')
|
||||||
)
|
)
|
||||||
# Loop through the data array of arrays
|
# Loop through the data array of arrays
|
||||||
for header in data:
|
for header in data:
|
||||||
if header.startswith('__'):
|
if header.startswith('__'):
|
||||||
continue
|
continue
|
||||||
outfile.write('\n[' + filegrp[header] + ']\n')
|
outfile.write('\n[' + filegrp[header] + ']\n')
|
||||||
for key in sorted(data[header]):
|
for key in sorted(data[header]):
|
||||||
if key not in ignore:
|
if key not in ignore:
|
||||||
val = 'on' if data[header][key] == '' else data[header][key]
|
val = 'on' if data[header][key] == '' else data[header][key]
|
||||||
outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
|
outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
|
||||||
|
|
||||||
#
|
#
|
||||||
# Produce a schema.json file if CONFIG_EXPORT == 3
|
# Produce a schema.json file if CONFIG_EXPORT == 3
|
||||||
#
|
#
|
||||||
if config_dump >= 3:
|
if config_dump >= 3:
|
||||||
try:
|
try:
|
||||||
conf_schema = schema.extract()
|
conf_schema = schema.extract()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
print("Error: " + str(exc))
|
print("Error: " + str(exc))
|
||||||
conf_schema = None
|
conf_schema = None
|
||||||
|
|
||||||
if conf_schema:
|
if conf_schema:
|
||||||
#
|
#
|
||||||
# Produce a schema.json file if CONFIG_EXPORT == 3
|
# Produce a schema.json file if CONFIG_EXPORT == 3
|
||||||
#
|
#
|
||||||
if config_dump in (3, 13):
|
if config_dump in (3, 13):
|
||||||
print("Generating schema.json ...")
|
print("Generating schema.json ...")
|
||||||
schema.dump_json(conf_schema, build_path / 'schema.json')
|
schema.dump_json(conf_schema, build_path / 'schema.json')
|
||||||
if config_dump == 13:
|
if config_dump == 13:
|
||||||
schema.group_options(conf_schema)
|
schema.group_options(conf_schema)
|
||||||
schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
|
schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
|
||||||
|
|
||||||
#
|
#
|
||||||
# Produce a schema.yml file if CONFIG_EXPORT == 4
|
# Produce a schema.yml file if CONFIG_EXPORT == 4
|
||||||
#
|
#
|
||||||
elif config_dump == 4:
|
elif config_dump == 4:
|
||||||
print("Generating schema.yml ...")
|
print("Generating schema.yml ...")
|
||||||
try:
|
try:
|
||||||
import yaml
|
import yaml
|
||||||
except ImportError:
|
except ImportError:
|
||||||
env.Execute(env.VerboseAction(
|
env.Execute(env.VerboseAction(
|
||||||
'$PYTHONEXE -m pip install "pyyaml"',
|
'$PYTHONEXE -m pip install "pyyaml"',
|
||||||
"Installing YAML for schema.yml export",
|
"Installing YAML for schema.yml export",
|
||||||
))
|
))
|
||||||
import yaml
|
import yaml
|
||||||
schema.dump_yaml(conf_schema, build_path / 'schema.yml')
|
schema.dump_yaml(conf_schema, build_path / 'schema.yml')
|
||||||
|
|
||||||
# Append the source code version and date
|
# Append the source code version and date
|
||||||
data['VERSION'] = {}
|
data['VERSION'] = {}
|
||||||
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
|
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
|
||||||
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
|
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
|
||||||
try:
|
try:
|
||||||
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
||||||
data['VERSION']['GIT_REF'] = curver.decode()
|
data['VERSION']['GIT_REF'] = curver.decode()
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
#
|
#
|
||||||
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
|
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
|
||||||
#
|
#
|
||||||
if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
|
if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
|
||||||
with marlin_json.open('w') as outfile:
|
with marlin_json.open('w') as outfile:
|
||||||
json.dump(data, outfile, separators=(',', ':'))
|
json.dump(data, outfile, separators=(',', ':'))
|
||||||
|
|
||||||
#
|
#
|
||||||
# The rest only applies to CONFIGURATION_EMBEDDING
|
# The rest only applies to CONFIGURATION_EMBEDDING
|
||||||
#
|
#
|
||||||
if not 'CONFIGURATION_EMBEDDING' in defines:
|
if not 'CONFIGURATION_EMBEDDING' in defines:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Compress the JSON file as much as we can
|
# Compress the JSON file as much as we can
|
||||||
compress_file(marlin_json, marlin_zip)
|
compress_file(marlin_json, marlin_zip)
|
||||||
|
|
||||||
# Generate a C source file for storing this array
|
# Generate a C source file for storing this array
|
||||||
with open('Marlin/src/mczip.h','wb') as result_file:
|
with open('Marlin/src/mczip.h','wb') as result_file:
|
||||||
result_file.write(
|
result_file.write(
|
||||||
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
|
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
|
||||||
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
|
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
|
||||||
+ b'#endif\n'
|
+ b'#endif\n'
|
||||||
+ b'const unsigned char mc_zip[] PROGMEM = {\n '
|
+ b'const unsigned char mc_zip[] PROGMEM = {\n '
|
||||||
)
|
)
|
||||||
count = 0
|
count = 0
|
||||||
for b in (build_path / 'mc.zip').open('rb').read():
|
for b in (build_path / 'mc.zip').open('rb').read():
|
||||||
result_file.write(b' 0x%02X,' % b)
|
result_file.write(b' 0x%02X,' % b)
|
||||||
count += 1
|
count += 1
|
||||||
if count % 16 == 0:
|
if count % 16 == 0:
|
||||||
result_file.write(b'\n ')
|
result_file.write(b'\n ')
|
||||||
if count % 16:
|
if count % 16:
|
||||||
result_file.write(b'\n')
|
result_file.write(b'\n')
|
||||||
result_file.write(b'};\n')
|
result_file.write(b'};\n')
|
||||||
|
@ -5,49 +5,49 @@
|
|||||||
|
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
# Get the environment thus far for the build
|
# Get the environment thus far for the build
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
#print(env.Dump())
|
#print(env.Dump())
|
||||||
|
|
||||||
#
|
#
|
||||||
# Give the binary a distinctive name
|
# Give the binary a distinctive name
|
||||||
#
|
#
|
||||||
|
|
||||||
env['PROGNAME'] = "MarlinSimulator"
|
env['PROGNAME'] = "MarlinSimulator"
|
||||||
|
|
||||||
#
|
#
|
||||||
# If Xcode is installed add the path to its Frameworks folder,
|
# If Xcode is installed add the path to its Frameworks folder,
|
||||||
# or if Mesa is installed try to use its GL/gl.h.
|
# or if Mesa is installed try to use its GL/gl.h.
|
||||||
#
|
#
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
if sys.platform == 'darwin':
|
if sys.platform == 'darwin':
|
||||||
|
|
||||||
#
|
#
|
||||||
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
||||||
#
|
#
|
||||||
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
||||||
|
|
||||||
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
||||||
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
||||||
mesa_path = "/opt/local/include/GL/gl.h"
|
mesa_path = "/opt/local/include/GL/gl.h"
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
if os.path.exists(xcode_path):
|
if os.path.exists(xcode_path):
|
||||||
|
|
||||||
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
||||||
print("Using OpenGL framework headers from Xcode.app")
|
print("Using OpenGL framework headers from Xcode.app")
|
||||||
|
|
||||||
elif os.path.exists(mesa_path):
|
elif os.path.exists(mesa_path):
|
||||||
|
|
||||||
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
||||||
print("Using OpenGL header from", mesa_path)
|
print("Using OpenGL header from", mesa_path)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
||||||
|
|
||||||
# Break out of the PIO build immediately
|
# Break out of the PIO build immediately
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -3,59 +3,59 @@
|
|||||||
#
|
#
|
||||||
import pioutil
|
import pioutil
|
||||||
if pioutil.is_pio_build():
|
if pioutil.is_pio_build():
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
# Get a build flag's value or None
|
# Get a build flag's value or None
|
||||||
def getBuildFlagValue(name):
|
def getBuildFlagValue(name):
|
||||||
for flag in build_flags:
|
for flag in build_flags:
|
||||||
if isinstance(flag, list) and flag[0] == name:
|
if isinstance(flag, list) and flag[0] == name:
|
||||||
return flag[1]
|
return flag[1]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Get an overriding buffer size for RX or TX from the build flags
|
# Get an overriding buffer size for RX or TX from the build flags
|
||||||
def getInternalSize(side):
|
def getInternalSize(side):
|
||||||
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
||||||
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
||||||
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
||||||
|
|
||||||
# Get the largest defined buffer size for RX or TX
|
# Get the largest defined buffer size for RX or TX
|
||||||
def getBufferSize(side, default):
|
def getBufferSize(side, default):
|
||||||
# Get a build flag value or fall back to the given default
|
# Get a build flag value or fall back to the given default
|
||||||
internal = int(getInternalSize(side) or default)
|
internal = int(getInternalSize(side) or default)
|
||||||
flag = side + "_BUFFER_SIZE"
|
flag = side + "_BUFFER_SIZE"
|
||||||
# Return the largest value
|
# Return the largest value
|
||||||
return max(int(mf[flag]), internal) if flag in mf else internal
|
return max(int(mf[flag]), internal) if flag in mf else internal
|
||||||
|
|
||||||
# Add a build flag if it's not already defined
|
# Add a build flag if it's not already defined
|
||||||
def tryAddFlag(name, value):
|
def tryAddFlag(name, value):
|
||||||
if getBuildFlagValue(name) is None:
|
if getBuildFlagValue(name) is None:
|
||||||
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
||||||
|
|
||||||
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
||||||
# configure buffer sizes for receiving \ transmitting serial data.
|
# configure buffer sizes for receiving \ transmitting serial data.
|
||||||
# Stm32duino uses another set of defines for the same purpose, so this
|
# Stm32duino uses another set of defines for the same purpose, so this
|
||||||
# script gets the values from the configuration and uses them to define
|
# script gets the values from the configuration and uses them to define
|
||||||
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
||||||
# flags so they are available for use by the platform.
|
# flags so they are available for use by the platform.
|
||||||
#
|
#
|
||||||
# The script will set the value as the default one (64 bytes)
|
# The script will set the value as the default one (64 bytes)
|
||||||
# or the user-configured one, whichever is higher.
|
# or the user-configured one, whichever is higher.
|
||||||
#
|
#
|
||||||
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
||||||
# The highest value is taken (128/64).
|
# The highest value is taken (128/64).
|
||||||
#
|
#
|
||||||
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
||||||
# defined, the first of these values will be used as the minimum.
|
# defined, the first of these values will be used as the minimum.
|
||||||
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
||||||
mf = env["MARLIN_FEATURES"]
|
mf = env["MARLIN_FEATURES"]
|
||||||
|
|
||||||
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
||||||
rxBuf = getBufferSize("RX", 128)
|
rxBuf = getBufferSize("RX", 128)
|
||||||
txBuf = getBufferSize("TX", 64)
|
txBuf = getBufferSize("TX", 64)
|
||||||
|
|
||||||
# Provide serial buffer sizes to the stm32duino platform
|
# Provide serial buffer sizes to the stm32duino platform
|
||||||
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
||||||
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
||||||
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
||||||
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
||||||
|
@ -25,320 +25,320 @@ import MarlinBinaryProtocol
|
|||||||
#-----------------#
|
#-----------------#
|
||||||
def Upload(source, target, env):
|
def Upload(source, target, env):
|
||||||
|
|
||||||
#-------#
|
#-------#
|
||||||
# Debug #
|
# Debug #
|
||||||
#-------#
|
#-------#
|
||||||
Debug = False # Set to True to enable script debug
|
Debug = False # Set to True to enable script debug
|
||||||
def debugPrint(data):
|
def debugPrint(data):
|
||||||
if Debug: print(f"[Debug]: {data}")
|
if Debug: print(f"[Debug]: {data}")
|
||||||
|
|
||||||
#------------------#
|
#------------------#
|
||||||
# Marlin functions #
|
# Marlin functions #
|
||||||
#------------------#
|
#------------------#
|
||||||
def _GetMarlinEnv(marlinEnv, feature):
|
def _GetMarlinEnv(marlinEnv, feature):
|
||||||
if not marlinEnv: return None
|
if not marlinEnv: return None
|
||||||
return marlinEnv[feature] if feature in marlinEnv else None
|
return marlinEnv[feature] if feature in marlinEnv else None
|
||||||
|
|
||||||
#----------------#
|
#----------------#
|
||||||
# Port functions #
|
# Port functions #
|
||||||
#----------------#
|
#----------------#
|
||||||
def _GetUploadPort(env):
|
def _GetUploadPort(env):
|
||||||
debugPrint('Autodetecting upload port...')
|
debugPrint('Autodetecting upload port...')
|
||||||
env.AutodetectUploadPort(env)
|
env.AutodetectUploadPort(env)
|
||||||
portName = env.subst('$UPLOAD_PORT')
|
portName = env.subst('$UPLOAD_PORT')
|
||||||
if not portName:
|
if not portName:
|
||||||
raise Exception('Error detecting the upload port.')
|
raise Exception('Error detecting the upload port.')
|
||||||
debugPrint('OK')
|
debugPrint('OK')
|
||||||
return portName
|
return portName
|
||||||
|
|
||||||
#-------------------------#
|
#-------------------------#
|
||||||
# Simple serial functions #
|
# Simple serial functions #
|
||||||
#-------------------------#
|
#-------------------------#
|
||||||
def _OpenPort():
|
def _OpenPort():
|
||||||
# Open serial port
|
# Open serial port
|
||||||
if port.is_open: return
|
if port.is_open: return
|
||||||
debugPrint('Opening upload port...')
|
debugPrint('Opening upload port...')
|
||||||
port.open()
|
port.open()
|
||||||
port.reset_input_buffer()
|
port.reset_input_buffer()
|
||||||
debugPrint('OK')
|
debugPrint('OK')
|
||||||
|
|
||||||
def _ClosePort():
|
def _ClosePort():
|
||||||
# Open serial port
|
# Open serial port
|
||||||
if port is None: return
|
if port is None: return
|
||||||
if not port.is_open: return
|
if not port.is_open: return
|
||||||
debugPrint('Closing upload port...')
|
debugPrint('Closing upload port...')
|
||||||
port.close()
|
port.close()
|
||||||
debugPrint('OK')
|
debugPrint('OK')
|
||||||
|
|
||||||
def _Send(data):
|
def _Send(data):
|
||||||
debugPrint(f'>> {data}')
|
debugPrint(f'>> {data}')
|
||||||
strdata = bytearray(data, 'utf8') + b'\n'
|
strdata = bytearray(data, 'utf8') + b'\n'
|
||||||
port.write(strdata)
|
port.write(strdata)
|
||||||
time.sleep(0.010)
|
time.sleep(0.010)
|
||||||
|
|
||||||
def _Recv():
|
def _Recv():
|
||||||
clean_responses = []
|
clean_responses = []
|
||||||
responses = port.readlines()
|
responses = port.readlines()
|
||||||
for Resp in responses:
|
for Resp in responses:
|
||||||
# Suppress invalid chars (coming from debug info)
|
# Suppress invalid chars (coming from debug info)
|
||||||
try:
|
try:
|
||||||
clean_response = Resp.decode('utf8').rstrip().lstrip()
|
clean_response = Resp.decode('utf8').rstrip().lstrip()
|
||||||
clean_responses.append(clean_response)
|
clean_responses.append(clean_response)
|
||||||
debugPrint(f'<< {clean_response}')
|
debugPrint(f'<< {clean_response}')
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
return clean_responses
|
return clean_responses
|
||||||
|
|
||||||
#------------------#
|
#------------------#
|
||||||
# SDCard functions #
|
# SDCard functions #
|
||||||
#------------------#
|
#------------------#
|
||||||
def _CheckSDCard():
|
def _CheckSDCard():
|
||||||
debugPrint('Checking SD card...')
|
debugPrint('Checking SD card...')
|
||||||
_Send('M21')
|
_Send('M21')
|
||||||
Responses = _Recv()
|
Responses = _Recv()
|
||||||
if len(Responses) < 1 or not any('SD card ok' in r for r in Responses):
|
if len(Responses) < 1 or not any('SD card ok' in r for r in Responses):
|
||||||
raise Exception('Error accessing SD card')
|
raise Exception('Error accessing SD card')
|
||||||
debugPrint('SD Card OK')
|
debugPrint('SD Card OK')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#----------------#
|
#----------------#
|
||||||
# File functions #
|
# File functions #
|
||||||
#----------------#
|
#----------------#
|
||||||
def _GetFirmwareFiles(UseLongFilenames):
|
def _GetFirmwareFiles(UseLongFilenames):
|
||||||
debugPrint('Get firmware files...')
|
debugPrint('Get firmware files...')
|
||||||
_Send(f"M20 F{'L' if UseLongFilenames else ''}")
|
_Send(f"M20 F{'L' if UseLongFilenames else ''}")
|
||||||
Responses = _Recv()
|
Responses = _Recv()
|
||||||
if len(Responses) < 3 or not any('file list' in r for r in Responses):
|
if len(Responses) < 3 or not any('file list' in r for r in Responses):
|
||||||
raise Exception('Error getting firmware files')
|
raise Exception('Error getting firmware files')
|
||||||
debugPrint('OK')
|
debugPrint('OK')
|
||||||
return Responses
|
return Responses
|
||||||
|
|
||||||
def _FilterFirmwareFiles(FirmwareList, UseLongFilenames):
|
def _FilterFirmwareFiles(FirmwareList, UseLongFilenames):
|
||||||
Firmwares = []
|
Firmwares = []
|
||||||
for FWFile in FirmwareList:
|
for FWFile in FirmwareList:
|
||||||
# For long filenames take the 3rd column of the firmwares list
|
# For long filenames take the 3rd column of the firmwares list
|
||||||
if UseLongFilenames:
|
if UseLongFilenames:
|
||||||
Space = 0
|
Space = 0
|
||||||
Space = FWFile.find(' ')
|
Space = FWFile.find(' ')
|
||||||
if Space >= 0: Space = FWFile.find(' ', Space + 1)
|
if Space >= 0: Space = FWFile.find(' ', Space + 1)
|
||||||
if Space >= 0: FWFile = FWFile[Space + 1:]
|
if Space >= 0: FWFile = FWFile[Space + 1:]
|
||||||
if not '/' in FWFile and '.BIN' in FWFile.upper():
|
if not '/' in FWFile and '.BIN' in FWFile.upper():
|
||||||
Firmwares.append(FWFile[:FWFile.upper().index('.BIN') + 4])
|
Firmwares.append(FWFile[:FWFile.upper().index('.BIN') + 4])
|
||||||
return Firmwares
|
return Firmwares
|
||||||
|
|
||||||
def _RemoveFirmwareFile(FirmwareFile):
|
def _RemoveFirmwareFile(FirmwareFile):
|
||||||
_Send(f'M30 /{FirmwareFile}')
|
_Send(f'M30 /{FirmwareFile}')
|
||||||
Responses = _Recv()
|
Responses = _Recv()
|
||||||
Removed = len(Responses) >= 1 and any('File deleted' in r for r in Responses)
|
Removed = len(Responses) >= 1 and any('File deleted' in r for r in Responses)
|
||||||
if not Removed:
|
if not Removed:
|
||||||
raise Exception(f"Firmware file '{FirmwareFile}' not removed")
|
raise Exception(f"Firmware file '{FirmwareFile}' not removed")
|
||||||
return Removed
|
return Removed
|
||||||
|
|
||||||
def _RollbackUpload(FirmwareFile):
|
def _RollbackUpload(FirmwareFile):
|
||||||
if not rollback: return
|
if not rollback: return
|
||||||
print(f"Rollback: trying to delete firmware '{FirmwareFile}'...")
|
print(f"Rollback: trying to delete firmware '{FirmwareFile}'...")
|
||||||
_OpenPort()
|
_OpenPort()
|
||||||
# Wait for SD card release
|
# Wait for SD card release
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
# Remount SD card
|
# Remount SD card
|
||||||
_CheckSDCard()
|
_CheckSDCard()
|
||||||
print(' OK' if _RemoveFirmwareFile(FirmwareFile) else ' Error!')
|
print(' OK' if _RemoveFirmwareFile(FirmwareFile) else ' Error!')
|
||||||
_ClosePort()
|
_ClosePort()
|
||||||
|
|
||||||
|
|
||||||
#---------------------#
|
#---------------------#
|
||||||
# Callback Entrypoint #
|
# Callback Entrypoint #
|
||||||
#---------------------#
|
#---------------------#
|
||||||
port = None
|
port = None
|
||||||
protocol = None
|
protocol = None
|
||||||
filetransfer = None
|
filetransfer = None
|
||||||
rollback = False
|
rollback = False
|
||||||
|
|
||||||
# Get Marlin evironment vars
|
# Get Marlin evironment vars
|
||||||
MarlinEnv = env['MARLIN_FEATURES']
|
MarlinEnv = env['MARLIN_FEATURES']
|
||||||
marlin_pioenv = _GetMarlinEnv(MarlinEnv, 'PIOENV')
|
marlin_pioenv = _GetMarlinEnv(MarlinEnv, 'PIOENV')
|
||||||
marlin_motherboard = _GetMarlinEnv(MarlinEnv, 'MOTHERBOARD')
|
marlin_motherboard = _GetMarlinEnv(MarlinEnv, 'MOTHERBOARD')
|
||||||
marlin_board_info_name = _GetMarlinEnv(MarlinEnv, 'BOARD_INFO_NAME')
|
marlin_board_info_name = _GetMarlinEnv(MarlinEnv, 'BOARD_INFO_NAME')
|
||||||
marlin_board_custom_build_flags = _GetMarlinEnv(MarlinEnv, 'BOARD_CUSTOM_BUILD_FLAGS')
|
marlin_board_custom_build_flags = _GetMarlinEnv(MarlinEnv, 'BOARD_CUSTOM_BUILD_FLAGS')
|
||||||
marlin_firmware_bin = _GetMarlinEnv(MarlinEnv, 'FIRMWARE_BIN')
|
marlin_firmware_bin = _GetMarlinEnv(MarlinEnv, 'FIRMWARE_BIN')
|
||||||
marlin_long_filename_host_support = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_HOST_SUPPORT') is not None
|
marlin_long_filename_host_support = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_HOST_SUPPORT') is not None
|
||||||
marlin_longname_write = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_WRITE_SUPPORT') is not None
|
marlin_longname_write = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_WRITE_SUPPORT') is not None
|
||||||
marlin_custom_firmware_upload = _GetMarlinEnv(MarlinEnv, 'CUSTOM_FIRMWARE_UPLOAD') is not None
|
marlin_custom_firmware_upload = _GetMarlinEnv(MarlinEnv, 'CUSTOM_FIRMWARE_UPLOAD') is not None
|
||||||
marlin_short_build_version = _GetMarlinEnv(MarlinEnv, 'SHORT_BUILD_VERSION')
|
marlin_short_build_version = _GetMarlinEnv(MarlinEnv, 'SHORT_BUILD_VERSION')
|
||||||
marlin_string_config_h_author = _GetMarlinEnv(MarlinEnv, 'STRING_CONFIG_H_AUTHOR')
|
marlin_string_config_h_author = _GetMarlinEnv(MarlinEnv, 'STRING_CONFIG_H_AUTHOR')
|
||||||
|
|
||||||
# Get firmware upload params
|
# Get firmware upload params
|
||||||
upload_firmware_source_name = str(source[0]) # Source firmware filename
|
upload_firmware_source_name = str(source[0]) # Source firmware filename
|
||||||
upload_speed = env['UPLOAD_SPEED'] if 'UPLOAD_SPEED' in env else 115200
|
upload_speed = env['UPLOAD_SPEED'] if 'UPLOAD_SPEED' in env else 115200
|
||||||
# baud rate of serial connection
|
# baud rate of serial connection
|
||||||
upload_port = _GetUploadPort(env) # Serial port to use
|
upload_port = _GetUploadPort(env) # Serial port to use
|
||||||
|
|
||||||
# Set local upload params
|
# Set local upload params
|
||||||
upload_firmware_target_name = os.path.basename(upload_firmware_source_name)
|
upload_firmware_target_name = os.path.basename(upload_firmware_source_name)
|
||||||
# Target firmware filename
|
# Target firmware filename
|
||||||
upload_timeout = 1000 # Communication timout, lossy/slow connections need higher values
|
upload_timeout = 1000 # Communication timout, lossy/slow connections need higher values
|
||||||
upload_blocksize = 512 # Transfer block size. 512 = Autodetect
|
upload_blocksize = 512 # Transfer block size. 512 = Autodetect
|
||||||
upload_compression = True # Enable compression
|
upload_compression = True # Enable compression
|
||||||
upload_error_ratio = 0 # Simulated corruption ratio
|
upload_error_ratio = 0 # Simulated corruption ratio
|
||||||
upload_test = False # Benchmark the serial link without storing the file
|
upload_test = False # Benchmark the serial link without storing the file
|
||||||
upload_reset = True # Trigger a soft reset for firmware update after the upload
|
upload_reset = True # Trigger a soft reset for firmware update after the upload
|
||||||
|
|
||||||
# Set local upload params based on board type to change script behavior
|
# Set local upload params based on board type to change script behavior
|
||||||
# "upload_delete_old_bins": delete all *.bin files in the root of SD Card
|
# "upload_delete_old_bins": delete all *.bin files in the root of SD Card
|
||||||
upload_delete_old_bins = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
|
upload_delete_old_bins = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
|
||||||
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
|
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
|
||||||
'BOARD_CREALITY_V24S1']
|
'BOARD_CREALITY_V24S1']
|
||||||
# "upload_random_name": generate a random 8.3 firmware filename to upload
|
# "upload_random_name": generate a random 8.3 firmware filename to upload
|
||||||
upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
|
upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
|
||||||
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
|
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
|
||||||
'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support
|
'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# Start upload job
|
# Start upload job
|
||||||
print(f"Uploading firmware '{os.path.basename(upload_firmware_target_name)}' to '{marlin_motherboard}' via '{upload_port}'")
|
print(f"Uploading firmware '{os.path.basename(upload_firmware_target_name)}' to '{marlin_motherboard}' via '{upload_port}'")
|
||||||
|
|
||||||
# Dump some debug info
|
# Dump some debug info
|
||||||
if Debug:
|
if Debug:
|
||||||
print('Upload using:')
|
print('Upload using:')
|
||||||
print('---- Marlin -----------------------------------')
|
print('---- Marlin -----------------------------------')
|
||||||
print(f' PIOENV : {marlin_pioenv}')
|
print(f' PIOENV : {marlin_pioenv}')
|
||||||
print(f' SHORT_BUILD_VERSION : {marlin_short_build_version}')
|
print(f' SHORT_BUILD_VERSION : {marlin_short_build_version}')
|
||||||
print(f' STRING_CONFIG_H_AUTHOR : {marlin_string_config_h_author}')
|
print(f' STRING_CONFIG_H_AUTHOR : {marlin_string_config_h_author}')
|
||||||
print(f' MOTHERBOARD : {marlin_motherboard}')
|
print(f' MOTHERBOARD : {marlin_motherboard}')
|
||||||
print(f' BOARD_INFO_NAME : {marlin_board_info_name}')
|
print(f' BOARD_INFO_NAME : {marlin_board_info_name}')
|
||||||
print(f' CUSTOM_BUILD_FLAGS : {marlin_board_custom_build_flags}')
|
print(f' CUSTOM_BUILD_FLAGS : {marlin_board_custom_build_flags}')
|
||||||
print(f' FIRMWARE_BIN : {marlin_firmware_bin}')
|
print(f' FIRMWARE_BIN : {marlin_firmware_bin}')
|
||||||
print(f' LONG_FILENAME_HOST_SUPPORT : {marlin_long_filename_host_support}')
|
print(f' LONG_FILENAME_HOST_SUPPORT : {marlin_long_filename_host_support}')
|
||||||
print(f' LONG_FILENAME_WRITE_SUPPORT : {marlin_longname_write}')
|
print(f' LONG_FILENAME_WRITE_SUPPORT : {marlin_longname_write}')
|
||||||
print(f' CUSTOM_FIRMWARE_UPLOAD : {marlin_custom_firmware_upload}')
|
print(f' CUSTOM_FIRMWARE_UPLOAD : {marlin_custom_firmware_upload}')
|
||||||
print('---- Upload parameters ------------------------')
|
print('---- Upload parameters ------------------------')
|
||||||
print(f' Source : {upload_firmware_source_name}')
|
print(f' Source : {upload_firmware_source_name}')
|
||||||
print(f' Target : {upload_firmware_target_name}')
|
print(f' Target : {upload_firmware_target_name}')
|
||||||
print(f' Port : {upload_port} @ {upload_speed} baudrate')
|
print(f' Port : {upload_port} @ {upload_speed} baudrate')
|
||||||
print(f' Timeout : {upload_timeout}')
|
print(f' Timeout : {upload_timeout}')
|
||||||
print(f' Block size : {upload_blocksize}')
|
print(f' Block size : {upload_blocksize}')
|
||||||
print(f' Compression : {upload_compression}')
|
print(f' Compression : {upload_compression}')
|
||||||
print(f' Error ratio : {upload_error_ratio}')
|
print(f' Error ratio : {upload_error_ratio}')
|
||||||
print(f' Test : {upload_test}')
|
print(f' Test : {upload_test}')
|
||||||
print(f' Reset : {upload_reset}')
|
print(f' Reset : {upload_reset}')
|
||||||
print('-----------------------------------------------')
|
print('-----------------------------------------------')
|
||||||
|
|
||||||
# Custom implementations based on board parameters
|
# Custom implementations based on board parameters
|
||||||
# Generate a new 8.3 random filename
|
# Generate a new 8.3 random filename
|
||||||
if upload_random_filename:
|
if upload_random_filename:
|
||||||
upload_firmware_target_name = f"fw-{''.join(random.choices('ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=5))}.BIN"
|
upload_firmware_target_name = f"fw-{''.join(random.choices('ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=5))}.BIN"
|
||||||
print(f"Board {marlin_motherboard}: Overriding firmware filename to '{upload_firmware_target_name}'")
|
print(f"Board {marlin_motherboard}: Overriding firmware filename to '{upload_firmware_target_name}'")
|
||||||
|
|
||||||
# Delete all *.bin files on the root of SD Card (if flagged)
|
# Delete all *.bin files on the root of SD Card (if flagged)
|
||||||
if upload_delete_old_bins:
|
if upload_delete_old_bins:
|
||||||
# CUSTOM_FIRMWARE_UPLOAD is needed for this feature
|
# CUSTOM_FIRMWARE_UPLOAD is needed for this feature
|
||||||
if not marlin_custom_firmware_upload:
|
if not marlin_custom_firmware_upload:
|
||||||
raise Exception(f"CUSTOM_FIRMWARE_UPLOAD must be enabled in 'Configuration_adv.h' for '{marlin_motherboard}'")
|
raise Exception(f"CUSTOM_FIRMWARE_UPLOAD must be enabled in 'Configuration_adv.h' for '{marlin_motherboard}'")
|
||||||
|
|
||||||
# Init & Open serial port
|
# Init & Open serial port
|
||||||
port = serial.Serial(upload_port, baudrate = upload_speed, write_timeout = 0, timeout = 0.1)
|
port = serial.Serial(upload_port, baudrate = upload_speed, write_timeout = 0, timeout = 0.1)
|
||||||
_OpenPort()
|
_OpenPort()
|
||||||
|
|
||||||
# Check SD card status
|
# Check SD card status
|
||||||
_CheckSDCard()
|
_CheckSDCard()
|
||||||
|
|
||||||
# Get firmware files
|
# Get firmware files
|
||||||
FirmwareFiles = _GetFirmwareFiles(marlin_long_filename_host_support)
|
FirmwareFiles = _GetFirmwareFiles(marlin_long_filename_host_support)
|
||||||
if Debug:
|
if Debug:
|
||||||
for FirmwareFile in FirmwareFiles:
|
for FirmwareFile in FirmwareFiles:
|
||||||
print(f'Found: {FirmwareFile}')
|
print(f'Found: {FirmwareFile}')
|
||||||
|
|
||||||
# Get all 1st level firmware files (to remove)
|
# Get all 1st level firmware files (to remove)
|
||||||
OldFirmwareFiles = _FilterFirmwareFiles(FirmwareFiles[1:len(FirmwareFiles)-2], marlin_long_filename_host_support) # Skip header and footers of list
|
OldFirmwareFiles = _FilterFirmwareFiles(FirmwareFiles[1:len(FirmwareFiles)-2], marlin_long_filename_host_support) # Skip header and footers of list
|
||||||
if len(OldFirmwareFiles) == 0:
|
if len(OldFirmwareFiles) == 0:
|
||||||
print('No old firmware files to delete')
|
print('No old firmware files to delete')
|
||||||
else:
|
else:
|
||||||
print(f"Remove {len(OldFirmwareFiles)} old firmware file{'s' if len(OldFirmwareFiles) != 1 else ''}:")
|
print(f"Remove {len(OldFirmwareFiles)} old firmware file{'s' if len(OldFirmwareFiles) != 1 else ''}:")
|
||||||
for OldFirmwareFile in OldFirmwareFiles:
|
for OldFirmwareFile in OldFirmwareFiles:
|
||||||
print(f" -Removing- '{OldFirmwareFile}'...")
|
print(f" -Removing- '{OldFirmwareFile}'...")
|
||||||
print(' OK' if _RemoveFirmwareFile(OldFirmwareFile) else ' Error!')
|
print(' OK' if _RemoveFirmwareFile(OldFirmwareFile) else ' Error!')
|
||||||
|
|
||||||
# Close serial
|
# Close serial
|
||||||
_ClosePort()
|
_ClosePort()
|
||||||
|
|
||||||
# Cleanup completed
|
# Cleanup completed
|
||||||
debugPrint('Cleanup completed')
|
debugPrint('Cleanup completed')
|
||||||
|
|
||||||
# WARNING! The serial port must be closed here because the serial transfer that follow needs it!
|
# WARNING! The serial port must be closed here because the serial transfer that follow needs it!
|
||||||
|
|
||||||
# Upload firmware file
|
# Upload firmware file
|
||||||
debugPrint(f"Copy '{upload_firmware_source_name}' --> '{upload_firmware_target_name}'")
|
debugPrint(f"Copy '{upload_firmware_source_name}' --> '{upload_firmware_target_name}'")
|
||||||
protocol = MarlinBinaryProtocol.Protocol(upload_port, upload_speed, upload_blocksize, float(upload_error_ratio), int(upload_timeout))
|
protocol = MarlinBinaryProtocol.Protocol(upload_port, upload_speed, upload_blocksize, float(upload_error_ratio), int(upload_timeout))
|
||||||
#echologger = MarlinBinaryProtocol.EchoProtocol(protocol)
|
#echologger = MarlinBinaryProtocol.EchoProtocol(protocol)
|
||||||
protocol.connect()
|
protocol.connect()
|
||||||
# Mark the rollback (delete broken transfer) from this point on
|
# Mark the rollback (delete broken transfer) from this point on
|
||||||
rollback = True
|
rollback = True
|
||||||
filetransfer = MarlinBinaryProtocol.FileTransferProtocol(protocol)
|
filetransfer = MarlinBinaryProtocol.FileTransferProtocol(protocol)
|
||||||
transferOK = filetransfer.copy(upload_firmware_source_name, upload_firmware_target_name, upload_compression, upload_test)
|
transferOK = filetransfer.copy(upload_firmware_source_name, upload_firmware_target_name, upload_compression, upload_test)
|
||||||
protocol.disconnect()
|
protocol.disconnect()
|
||||||
|
|
||||||
# Notify upload completed
|
# Notify upload completed
|
||||||
protocol.send_ascii('M117 Firmware uploaded' if transferOK else 'M117 Firmware upload failed')
|
protocol.send_ascii('M117 Firmware uploaded' if transferOK else 'M117 Firmware upload failed')
|
||||||
|
|
||||||
# Remount SD card
|
# Remount SD card
|
||||||
print('Wait for SD card release...')
|
print('Wait for SD card release...')
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
print('Remount SD card')
|
print('Remount SD card')
|
||||||
protocol.send_ascii('M21')
|
protocol.send_ascii('M21')
|
||||||
|
|
||||||
# Transfer failed?
|
# Transfer failed?
|
||||||
if not transferOK:
|
if not transferOK:
|
||||||
protocol.shutdown()
|
protocol.shutdown()
|
||||||
_RollbackUpload(upload_firmware_target_name)
|
_RollbackUpload(upload_firmware_target_name)
|
||||||
else:
|
else:
|
||||||
# Trigger firmware update
|
# Trigger firmware update
|
||||||
if upload_reset:
|
if upload_reset:
|
||||||
print('Trigger firmware update...')
|
print('Trigger firmware update...')
|
||||||
protocol.send_ascii('M997', True)
|
protocol.send_ascii('M997', True)
|
||||||
protocol.shutdown()
|
protocol.shutdown()
|
||||||
|
|
||||||
print('Firmware update completed' if transferOK else 'Firmware update failed')
|
print('Firmware update completed' if transferOK else 'Firmware update failed')
|
||||||
return 0 if transferOK else -1
|
return 0 if transferOK else -1
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print('Aborted by user')
|
print('Aborted by user')
|
||||||
if filetransfer: filetransfer.abort()
|
if filetransfer: filetransfer.abort()
|
||||||
if protocol:
|
if protocol:
|
||||||
protocol.disconnect()
|
protocol.disconnect()
|
||||||
protocol.shutdown()
|
protocol.shutdown()
|
||||||
_RollbackUpload(upload_firmware_target_name)
|
_RollbackUpload(upload_firmware_target_name)
|
||||||
_ClosePort()
|
_ClosePort()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except serial.SerialException as se:
|
except serial.SerialException as se:
|
||||||
# This exception is raised only for send_ascii data (not for binary transfer)
|
# This exception is raised only for send_ascii data (not for binary transfer)
|
||||||
print(f'Serial excepion: {se}, transfer aborted')
|
print(f'Serial excepion: {se}, transfer aborted')
|
||||||
if protocol:
|
if protocol:
|
||||||
protocol.disconnect()
|
protocol.disconnect()
|
||||||
protocol.shutdown()
|
protocol.shutdown()
|
||||||
_RollbackUpload(upload_firmware_target_name)
|
_RollbackUpload(upload_firmware_target_name)
|
||||||
_ClosePort()
|
_ClosePort()
|
||||||
raise Exception(se)
|
raise Exception(se)
|
||||||
|
|
||||||
except MarlinBinaryProtocol.FatalError:
|
except MarlinBinaryProtocol.FatalError:
|
||||||
print('Too many retries, transfer aborted')
|
print('Too many retries, transfer aborted')
|
||||||
if protocol:
|
if protocol:
|
||||||
protocol.disconnect()
|
protocol.disconnect()
|
||||||
protocol.shutdown()
|
protocol.shutdown()
|
||||||
_RollbackUpload(upload_firmware_target_name)
|
_RollbackUpload(upload_firmware_target_name)
|
||||||
_ClosePort()
|
_ClosePort()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print(f"\nException: {ex}, transfer aborted")
|
print(f"\nException: {ex}, transfer aborted")
|
||||||
if protocol:
|
if protocol:
|
||||||
protocol.disconnect()
|
protocol.disconnect()
|
||||||
protocol.shutdown()
|
protocol.shutdown()
|
||||||
_RollbackUpload(upload_firmware_target_name)
|
_RollbackUpload(upload_firmware_target_name)
|
||||||
_ClosePort()
|
_ClosePort()
|
||||||
print('Firmware not updated')
|
print('Firmware not updated')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# Attach custom upload callback
|
# Attach custom upload callback
|
||||||
env.Replace(UPLOADCMD=Upload)
|
env.Replace(UPLOADCMD=Upload)
|
||||||
|
@ -6,7 +6,7 @@ import yaml
|
|||||||
|
|
||||||
|
|
||||||
with open('.github/workflows/test-builds.yml') as f:
|
with open('.github/workflows/test-builds.yml') as f:
|
||||||
github_configuration = yaml.safe_load(f)
|
github_configuration = yaml.safe_load(f)
|
||||||
test_platforms = github_configuration\
|
test_platforms = github_configuration\
|
||||||
['jobs']['test_builds']['strategy']['matrix']['test-platform']
|
['jobs']['test_builds']['strategy']['matrix']['test-platform']
|
||||||
print(' '.join(test_platforms))
|
print(' '.join(test_platforms))
|
||||||
|
Loading…
Reference in New Issue
Block a user