🔨 Fix IntelliSense / PIO conflicts (#23058)
Co-authored-by: Scott Lahteine <github@thinkyhead.com>
This commit is contained in:
parent
f97635de36
commit
fc2020c6ec
@ -4,15 +4,16 @@
|
||||
# Windows: bossac.exe
|
||||
# Other: leave unchanged
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import platform
|
||||
current_OS = platform.system()
|
||||
|
||||
import platform
|
||||
current_OS = platform.system()
|
||||
if current_OS == 'Windows':
|
||||
|
||||
if current_OS == 'Windows':
|
||||
Import("env")
|
||||
|
||||
Import("env")
|
||||
|
||||
# Use bossac.exe on Windows
|
||||
env.Replace(
|
||||
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
|
||||
)
|
||||
# Use bossac.exe on Windows
|
||||
env.Replace(
|
||||
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
|
||||
)
|
||||
|
@ -1,123 +1,127 @@
|
||||
#
|
||||
# sets output_port
|
||||
# upload_extra_script.py
|
||||
# set the output_port
|
||||
# if target_filename is found then that drive is used
|
||||
# else if target_drive is found then that drive is used
|
||||
#
|
||||
from __future__ import print_function
|
||||
|
||||
target_filename = "FIRMWARE.CUR"
|
||||
target_drive = "REARM"
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import os,getpass,platform
|
||||
target_filename = "FIRMWARE.CUR"
|
||||
target_drive = "REARM"
|
||||
|
||||
current_OS = platform.system()
|
||||
Import("env")
|
||||
import os,getpass,platform
|
||||
|
||||
def print_error(e):
|
||||
print('\nUnable to find destination disk (%s)\n' \
|
||||
'Please select it in platformio.ini using the upload_port keyword ' \
|
||||
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
||||
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
||||
%(e, env.get('PIOENV')))
|
||||
current_OS = platform.system()
|
||||
Import("env")
|
||||
|
||||
def before_upload(source, target, env):
|
||||
try:
|
||||
#
|
||||
# Find a disk for upload
|
||||
#
|
||||
upload_disk = 'Disk not found'
|
||||
target_file_found = False
|
||||
target_drive_found = False
|
||||
if current_OS == 'Windows':
|
||||
def print_error(e):
|
||||
print('\nUnable to find destination disk (%s)\n' \
|
||||
'Please select it in platformio.ini using the upload_port keyword ' \
|
||||
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
||||
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
||||
%(e, env.get('PIOENV')))
|
||||
|
||||
def before_upload(source, target, env):
|
||||
try:
|
||||
#
|
||||
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
||||
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
||||
import subprocess,string
|
||||
from ctypes import windll
|
||||
# Find a disk for upload
|
||||
#
|
||||
upload_disk = 'Disk not found'
|
||||
target_file_found = False
|
||||
target_drive_found = False
|
||||
if current_OS == 'Windows':
|
||||
#
|
||||
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
||||
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
||||
import subprocess,string
|
||||
from ctypes import windll
|
||||
|
||||
# getting list of drives
|
||||
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
||||
drives = []
|
||||
bitmask = windll.kernel32.GetLogicalDrives()
|
||||
for letter in string.ascii_uppercase:
|
||||
if bitmask & 1:
|
||||
drives.append(letter)
|
||||
bitmask >>= 1
|
||||
# getting list of drives
|
||||
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
||||
drives = []
|
||||
bitmask = windll.kernel32.GetLogicalDrives()
|
||||
for letter in string.ascii_uppercase:
|
||||
if bitmask & 1:
|
||||
drives.append(letter)
|
||||
bitmask >>= 1
|
||||
|
||||
for drive in drives:
|
||||
final_drive_name = drive + ':\\'
|
||||
# print ('disc check: {}'.format(final_drive_name))
|
||||
try:
|
||||
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
||||
except Exception as e:
|
||||
print ('error:{}'.format(e))
|
||||
continue
|
||||
else:
|
||||
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = final_drive_name
|
||||
if target_filename in volume_info:
|
||||
if not target_file_found:
|
||||
for drive in drives:
|
||||
final_drive_name = drive + ':\\'
|
||||
# print ('disc check: {}'.format(final_drive_name))
|
||||
try:
|
||||
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
||||
except Exception as e:
|
||||
print ('error:{}'.format(e))
|
||||
continue
|
||||
else:
|
||||
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = final_drive_name
|
||||
target_file_found = True
|
||||
if target_filename in volume_info:
|
||||
if not target_file_found:
|
||||
upload_disk = final_drive_name
|
||||
target_file_found = True
|
||||
|
||||
elif current_OS == 'Linux':
|
||||
#
|
||||
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser()))
|
||||
if target_drive in drives: # If target drive is found, use it.
|
||||
target_drive_found = True
|
||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep
|
||||
else:
|
||||
elif current_OS == 'Linux':
|
||||
#
|
||||
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser()))
|
||||
if target_drive in drives: # If target drive is found, use it.
|
||||
target_drive_found = True
|
||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep
|
||||
else:
|
||||
for drive in drives:
|
||||
try:
|
||||
files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive))
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in files:
|
||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep
|
||||
target_file_found = True
|
||||
break
|
||||
#
|
||||
# set upload_port to drive if found
|
||||
#
|
||||
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(
|
||||
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
||||
)
|
||||
|
||||
elif current_OS == 'Darwin': # MAC
|
||||
#
|
||||
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
drives = os.listdir('/Volumes') # human readable names
|
||||
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = '/Volumes/' + target_drive + '/'
|
||||
for drive in drives:
|
||||
try:
|
||||
files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive))
|
||||
filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in files:
|
||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep
|
||||
if target_filename in filenames:
|
||||
if not target_file_found:
|
||||
upload_disk = '/Volumes/' + drive + '/'
|
||||
target_file_found = True
|
||||
break
|
||||
#
|
||||
# set upload_port to drive if found
|
||||
#
|
||||
|
||||
#
|
||||
# Set upload_port to drive if found
|
||||
#
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(
|
||||
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
||||
)
|
||||
env.Replace(UPLOAD_PORT=upload_disk)
|
||||
print('\nUpload disk: ', upload_disk, '\n')
|
||||
else:
|
||||
print_error('Autodetect Error')
|
||||
|
||||
elif current_OS == 'Darwin': # MAC
|
||||
#
|
||||
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
drives = os.listdir('/Volumes') # human readable names
|
||||
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = '/Volumes/' + target_drive + '/'
|
||||
for drive in drives:
|
||||
try:
|
||||
filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in filenames:
|
||||
if not target_file_found:
|
||||
upload_disk = '/Volumes/' + drive + '/'
|
||||
target_file_found = True
|
||||
except Exception as e:
|
||||
print_error(str(e))
|
||||
|
||||
#
|
||||
# Set upload_port to drive if found
|
||||
#
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(UPLOAD_PORT=upload_disk)
|
||||
print('\nUpload disk: ', upload_disk, '\n')
|
||||
else:
|
||||
print_error('Autodetect Error')
|
||||
|
||||
except Exception as e:
|
||||
print_error(str(e))
|
||||
|
||||
env.AddPreAction("upload", before_upload)
|
||||
env.AddPreAction("upload", before_upload)
|
||||
|
@ -30,25 +30,27 @@ if __name__ == "__main__":
|
||||
|
||||
# extra script for linker options
|
||||
else:
|
||||
from SCons.Script import DefaultEnvironment
|
||||
env = DefaultEnvironment()
|
||||
env.Append(
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
from SCons.Script import DefaultEnvironment
|
||||
env = DefaultEnvironment()
|
||||
env.Append(
|
||||
ARFLAGS=["rcs"],
|
||||
|
||||
ASFLAGS=["-x", "assembler-with-cpp"],
|
||||
|
||||
CXXFLAGS=[
|
||||
"-fabi-version=0",
|
||||
"-fno-use-cxa-atexit",
|
||||
"-fno-threadsafe-statics"
|
||||
"-fabi-version=0",
|
||||
"-fno-use-cxa-atexit",
|
||||
"-fno-threadsafe-statics"
|
||||
],
|
||||
LINKFLAGS=[
|
||||
"-Os",
|
||||
"-mcpu=cortex-m3",
|
||||
"-ffreestanding",
|
||||
"-mthumb",
|
||||
"--specs=nano.specs",
|
||||
"--specs=nosys.specs",
|
||||
"-u_printf_float",
|
||||
"-Os",
|
||||
"-mcpu=cortex-m3",
|
||||
"-ffreestanding",
|
||||
"-mthumb",
|
||||
"--specs=nano.specs",
|
||||
"--specs=nosys.specs",
|
||||
"-u_printf_float",
|
||||
],
|
||||
)
|
||||
)
|
||||
|
@ -2,18 +2,20 @@
|
||||
# SAMD51_grandcentral_m4.py
|
||||
# Customizations for env:SAMD51_grandcentral_m4
|
||||
#
|
||||
from os.path import join, isfile
|
||||
import shutil
|
||||
from pprint import pprint
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
from os.path import join, isfile
|
||||
import shutil
|
||||
from pprint import pprint
|
||||
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
||||
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
||||
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
||||
|
||||
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
||||
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
||||
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
||||
env.Replace(BUILD_FLAGS=build_flags)
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
||||
env.Replace(BUILD_FLAGS=build_flags)
|
||||
|
@ -1,40 +1,43 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py
|
||||
# STM32F103RC_MEEB_3DP.py
|
||||
#
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import os
|
||||
Import("env", "projenv")
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read("platformio.ini")
|
||||
import os
|
||||
Import("env", "projenv")
|
||||
|
||||
#
|
||||
# Upload actions
|
||||
#
|
||||
def before_upload(source, target, env):
|
||||
env.Execute("pwd")
|
||||
config = configparser.ConfigParser()
|
||||
config.read("platformio.ini")
|
||||
|
||||
def after_upload(source, target, env):
|
||||
env.Execute("pwd")
|
||||
#
|
||||
# Upload actions
|
||||
#
|
||||
def before_upload(source, target, env):
|
||||
env.Execute("pwd")
|
||||
|
||||
env.AddPreAction("upload", before_upload)
|
||||
env.AddPostAction("upload", after_upload)
|
||||
def after_upload(source, target, env):
|
||||
env.Execute("pwd")
|
||||
|
||||
flash_size = 0
|
||||
vect_tab_addr = 0
|
||||
env.AddPreAction("upload", before_upload)
|
||||
env.AddPostAction("upload", after_upload)
|
||||
|
||||
for define in env['CPPDEFINES']:
|
||||
if define[0] == "VECT_TAB_ADDR":
|
||||
vect_tab_addr = define[1]
|
||||
if define[0] == "STM32_FLASH_SIZE":
|
||||
flash_size = define[1]
|
||||
flash_size = 0
|
||||
vect_tab_addr = 0
|
||||
|
||||
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
|
||||
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))
|
||||
for define in env['CPPDEFINES']:
|
||||
if define[0] == "VECT_TAB_ADDR":
|
||||
vect_tab_addr = define[1]
|
||||
if define[0] == "STM32_FLASH_SIZE":
|
||||
flash_size = define[1]
|
||||
|
||||
import marlin
|
||||
marlin.custom_ld_script("STM32F103RC_MEEB_3DP.ld")
|
||||
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
|
||||
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))
|
||||
|
||||
import marlin
|
||||
marlin.custom_ld_script("STM32F103RC_MEEB_3DP.ld")
|
||||
|
@ -1,25 +1,28 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py
|
||||
# STM32F103RC_fysetc.py
|
||||
#
|
||||
from os.path import join
|
||||
from os.path import expandvars
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os
|
||||
from os.path import join
|
||||
from os.path import expandvars
|
||||
Import("env")
|
||||
|
||||
# Custom HEX from ELF
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.elf"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O ihex", "$TARGET",
|
||||
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
|
||||
]), "Building $TARGET"))
|
||||
# Custom HEX from ELF
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.elf"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O ihex", "$TARGET",
|
||||
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
|
||||
]), "Building $TARGET"))
|
||||
|
||||
# In-line command with arguments
|
||||
UPLOAD_TOOL="stm32flash"
|
||||
platform = env.PioPlatform()
|
||||
if platform.get_package_dir("tool-stm32duino") != None:
|
||||
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
|
||||
# In-line command with arguments
|
||||
UPLOAD_TOOL="stm32flash"
|
||||
platform = env.PioPlatform()
|
||||
if platform.get_package_dir("tool-stm32duino") != None:
|
||||
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
|
||||
|
||||
env.Replace(
|
||||
UPLOADER=UPLOAD_TOOL,
|
||||
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
|
||||
)
|
||||
env.Replace(
|
||||
UPLOADER=UPLOAD_TOOL,
|
||||
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
|
||||
)
|
||||
|
@ -1,30 +1,32 @@
|
||||
#
|
||||
# STM32F1_create_variant.py
|
||||
#
|
||||
import os,shutil,marlin
|
||||
from SCons.Script import DefaultEnvironment
|
||||
from platformio import util
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,shutil,marlin
|
||||
from SCons.Script import DefaultEnvironment
|
||||
from platformio import util
|
||||
|
||||
env = DefaultEnvironment()
|
||||
platform = env.PioPlatform()
|
||||
board = env.BoardConfig()
|
||||
env = DefaultEnvironment()
|
||||
platform = env.PioPlatform()
|
||||
board = env.BoardConfig()
|
||||
|
||||
FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple")
|
||||
assert os.path.isdir(FRAMEWORK_DIR)
|
||||
FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple")
|
||||
assert os.path.isdir(FRAMEWORK_DIR)
|
||||
|
||||
source_root = os.path.join("buildroot", "share", "PlatformIO", "variants")
|
||||
assert os.path.isdir(source_root)
|
||||
source_root = os.path.join("buildroot", "share", "PlatformIO", "variants")
|
||||
assert os.path.isdir(source_root)
|
||||
|
||||
variant = board.get("build.variant")
|
||||
variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant)
|
||||
variant = board.get("build.variant")
|
||||
variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant)
|
||||
|
||||
source_dir = os.path.join(source_root, variant)
|
||||
assert os.path.isdir(source_dir)
|
||||
source_dir = os.path.join(source_root, variant)
|
||||
assert os.path.isdir(source_dir)
|
||||
|
||||
if os.path.isdir(variant_dir):
|
||||
shutil.rmtree(variant_dir)
|
||||
if os.path.isdir(variant_dir):
|
||||
shutil.rmtree(variant_dir)
|
||||
|
||||
if not os.path.isdir(variant_dir):
|
||||
os.mkdir(variant_dir)
|
||||
if not os.path.isdir(variant_dir):
|
||||
os.mkdir(variant_dir)
|
||||
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
|
@ -2,4 +2,5 @@
|
||||
# add_nanolib.py
|
||||
#
|
||||
Import("env")
|
||||
|
||||
env.Append(LINKFLAGS=["--specs=nano.specs"])
|
||||
|
@ -1,116 +1,117 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/chitu_crypt.py
|
||||
# chitu_crypt.py
|
||||
# Customizations for Chitu boards
|
||||
#
|
||||
import os,random,struct,uuid,marlin
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,random,struct,uuid,marlin
|
||||
# Relocate firmware from 0x08000000 to 0x08008800
|
||||
marlin.relocate_firmware("0x08008800")
|
||||
|
||||
# Relocate firmware from 0x08000000 to 0x08008800
|
||||
marlin.relocate_firmware("0x08008800")
|
||||
def calculate_crc(contents, seed):
|
||||
accumulating_xor_value = seed;
|
||||
|
||||
def calculate_crc(contents, seed):
|
||||
accumulating_xor_value = seed;
|
||||
for i in range(0, len(contents), 4):
|
||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||
accumulating_xor_value = accumulating_xor_value ^ value
|
||||
return accumulating_xor_value
|
||||
|
||||
for i in range(0, len(contents), 4):
|
||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||
accumulating_xor_value = accumulating_xor_value ^ value
|
||||
return accumulating_xor_value
|
||||
def xor_block(r0, r1, block_number, block_size, file_key):
|
||||
# This is the loop counter
|
||||
loop_counter = 0x0
|
||||
|
||||
def xor_block(r0, r1, block_number, block_size, file_key):
|
||||
# This is the loop counter
|
||||
loop_counter = 0x0
|
||||
# This is the key length
|
||||
key_length = 0x18
|
||||
|
||||
# This is the key length
|
||||
key_length = 0x18
|
||||
# This is an initial seed
|
||||
xor_seed = 0x4BAD
|
||||
|
||||
# This is an initial seed
|
||||
xor_seed = 0x4BAD
|
||||
# This is the block counter
|
||||
block_number = xor_seed * block_number
|
||||
|
||||
# This is the block counter
|
||||
block_number = xor_seed * block_number
|
||||
#load the xor key from the file
|
||||
r7 = file_key
|
||||
|
||||
#load the xor key from the file
|
||||
r7 = file_key
|
||||
for loop_counter in range(0, block_size):
|
||||
# meant to make sure different bits of the key are used.
|
||||
xor_seed = int(loop_counter / key_length)
|
||||
|
||||
for loop_counter in range(0, block_size):
|
||||
# meant to make sure different bits of the key are used.
|
||||
xor_seed = int(loop_counter / key_length)
|
||||
# IP is a scratch register / R12
|
||||
ip = loop_counter - (key_length * xor_seed)
|
||||
|
||||
# IP is a scratch register / R12
|
||||
ip = loop_counter - (key_length * xor_seed)
|
||||
# xor_seed = (loop_counter * loop_counter) + block_number
|
||||
xor_seed = (loop_counter * loop_counter) + block_number
|
||||
|
||||
# xor_seed = (loop_counter * loop_counter) + block_number
|
||||
xor_seed = (loop_counter * loop_counter) + block_number
|
||||
# shift the xor_seed left by the bits in IP.
|
||||
xor_seed = xor_seed >> ip
|
||||
|
||||
# shift the xor_seed left by the bits in IP.
|
||||
xor_seed = xor_seed >> ip
|
||||
# load a byte into IP
|
||||
ip = r0[loop_counter]
|
||||
|
||||
# load a byte into IP
|
||||
ip = r0[loop_counter]
|
||||
# XOR the seed with r7
|
||||
xor_seed = xor_seed ^ r7
|
||||
|
||||
# XOR the seed with r7
|
||||
xor_seed = xor_seed ^ r7
|
||||
# and then with IP
|
||||
xor_seed = xor_seed ^ ip
|
||||
|
||||
# and then with IP
|
||||
xor_seed = xor_seed ^ ip
|
||||
#Now store the byte back
|
||||
r1[loop_counter] = xor_seed & 0xFF
|
||||
|
||||
#Now store the byte back
|
||||
r1[loop_counter] = xor_seed & 0xFF
|
||||
#increment the loop_counter
|
||||
loop_counter = loop_counter + 1
|
||||
|
||||
#increment the loop_counter
|
||||
loop_counter = loop_counter + 1
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
block_size = 0x800
|
||||
key_length = 0x18
|
||||
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
block_size = 0x800
|
||||
key_length = 0x18
|
||||
uid_value = uuid.uuid4()
|
||||
file_key = int(uid_value.hex[0:8], 16)
|
||||
|
||||
uid_value = uuid.uuid4()
|
||||
file_key = int(uid_value.hex[0:8], 16)
|
||||
xor_crc = 0xEF3D4323;
|
||||
|
||||
xor_crc = 0xEF3D4323;
|
||||
# the input file is exepcted to be in chunks of 0x800
|
||||
# so round the size
|
||||
while len(input_file) % block_size != 0:
|
||||
input_file.extend(b'0x0')
|
||||
|
||||
# the input file is exepcted to be in chunks of 0x800
|
||||
# so round the size
|
||||
while len(input_file) % block_size != 0:
|
||||
input_file.extend(b'0x0')
|
||||
# write the file header
|
||||
output_file.write(struct.pack(">I", 0x443D2D3F))
|
||||
# encrypt the contents using a known file header key
|
||||
|
||||
# write the file header
|
||||
output_file.write(struct.pack(">I", 0x443D2D3F))
|
||||
# encrypt the contents using a known file header key
|
||||
# write the file_key
|
||||
output_file.write(struct.pack("<I", file_key))
|
||||
|
||||
# write the file_key
|
||||
output_file.write(struct.pack("<I", file_key))
|
||||
#TODO - how to enforce that the firmware aligns to block boundaries?
|
||||
block_count = int(len(input_file) / block_size)
|
||||
print ("Block Count is ", block_count)
|
||||
for block_number in range(0, block_count):
|
||||
block_offset = (block_number * block_size)
|
||||
block_end = block_offset + block_size
|
||||
block_array = bytearray(input_file[block_offset: block_end])
|
||||
xor_block(block_array, block_array, block_number, block_size, file_key)
|
||||
for n in range (0, block_size):
|
||||
input_file[block_offset + n] = block_array[n]
|
||||
|
||||
#TODO - how to enforce that the firmware aligns to block boundaries?
|
||||
block_count = int(len(input_file) / block_size)
|
||||
print ("Block Count is ", block_count)
|
||||
for block_number in range(0, block_count):
|
||||
block_offset = (block_number * block_size)
|
||||
block_end = block_offset + block_size
|
||||
block_array = bytearray(input_file[block_offset: block_end])
|
||||
xor_block(block_array, block_array, block_number, block_size, file_key)
|
||||
for n in range (0, block_size):
|
||||
input_file[block_offset + n] = block_array[n]
|
||||
# update the expected CRC value.
|
||||
xor_crc = calculate_crc(block_array, xor_crc)
|
||||
|
||||
# update the expected CRC value.
|
||||
xor_crc = calculate_crc(block_array, xor_crc)
|
||||
# write CRC
|
||||
output_file.write(struct.pack("<I", xor_crc))
|
||||
|
||||
# write CRC
|
||||
output_file.write(struct.pack("<I", xor_crc))
|
||||
# finally, append the encrypted results.
|
||||
output_file.write(input_file)
|
||||
return
|
||||
|
||||
# finally, append the encrypted results.
|
||||
output_file.write(input_file)
|
||||
return
|
||||
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
||||
def encrypt(source, target, env):
|
||||
firmware = open(target[0].path, "rb")
|
||||
update = open(target[0].dir.path + '/update.cbd', "wb")
|
||||
length = os.path.getsize(target[0].path)
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
||||
def encrypt(source, target, env):
|
||||
firmware = open(target[0].path, "rb")
|
||||
update = open(target[0].dir.path + '/update.cbd', "wb")
|
||||
length = os.path.getsize(target[0].path)
|
||||
encrypt_file(firmware, update, length)
|
||||
|
||||
encrypt_file(firmware, update, length)
|
||||
firmware.close()
|
||||
update.close()
|
||||
|
||||
firmware.close()
|
||||
update.close()
|
||||
|
||||
marlin.add_post_action(encrypt);
|
||||
marlin.add_post_action(encrypt);
|
||||
|
@ -2,36 +2,38 @@
|
||||
# common-cxxflags.py
|
||||
# Convenience script to apply customizations to CPP flags
|
||||
#
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env")
|
||||
|
||||
cxxflags = [
|
||||
#"-Wno-incompatible-pointer-types",
|
||||
#"-Wno-unused-const-variable",
|
||||
#"-Wno-maybe-uninitialized",
|
||||
#"-Wno-sign-compare"
|
||||
]
|
||||
if "teensy" not in env['PIOENV']:
|
||||
cxxflags += ["-Wno-register"]
|
||||
env.Append(CXXFLAGS=cxxflags)
|
||||
cxxflags = [
|
||||
#"-Wno-incompatible-pointer-types",
|
||||
#"-Wno-unused-const-variable",
|
||||
#"-Wno-maybe-uninitialized",
|
||||
#"-Wno-sign-compare"
|
||||
]
|
||||
if "teensy" not in env['PIOENV']:
|
||||
cxxflags += ["-Wno-register"]
|
||||
env.Append(CXXFLAGS=cxxflags)
|
||||
|
||||
#
|
||||
# Add CPU frequency as a compile time constant instead of a runtime variable
|
||||
#
|
||||
def add_cpu_freq():
|
||||
if 'BOARD_F_CPU' in env:
|
||||
env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU'])
|
||||
#
|
||||
# Add CPU frequency as a compile time constant instead of a runtime variable
|
||||
#
|
||||
def add_cpu_freq():
|
||||
if 'BOARD_F_CPU' in env:
|
||||
env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU'])
|
||||
|
||||
# Useful for JTAG debugging
|
||||
#
|
||||
# It will separate release and debug build folders.
|
||||
# It useful to keep two live versions: a debug version for debugging and another for
|
||||
# release, for flashing when upload is not done automatically by jlink/stlink.
|
||||
# Without this, PIO needs to recompile everything twice for any small change.
|
||||
if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink']:
|
||||
env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug'
|
||||
# Useful for JTAG debugging
|
||||
#
|
||||
# It will separate release and debug build folders.
|
||||
# It useful to keep two live versions: a debug version for debugging and another for
|
||||
# release, for flashing when upload is not done automatically by jlink/stlink.
|
||||
# Without this, PIO needs to recompile everything twice for any small change.
|
||||
if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink']:
|
||||
env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug'
|
||||
|
||||
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
|
||||
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
|
||||
# 30 cycles) to be generated correctly. By using a compile time constant instead
|
||||
# the compiler will perform the computation and this overhead will be avoided
|
||||
add_cpu_freq()
|
||||
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
|
||||
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
|
||||
# 30 cycles) to be generated correctly. By using a compile time constant instead
|
||||
# the compiler will perform the computation and this overhead will be avoided
|
||||
add_cpu_freq()
|
||||
|
@ -1,16 +1,16 @@
|
||||
#
|
||||
# common-dependencies-post.py
|
||||
# post:common-dependencies-post.py
|
||||
# Convenience script to add build flags for Marlin Enabled Features
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env", "projenv")
|
||||
|
||||
Import("env")
|
||||
Import("projenv")
|
||||
def apply_board_build_flags():
|
||||
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
||||
return
|
||||
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
||||
|
||||
def apply_board_build_flags():
|
||||
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
||||
return
|
||||
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
||||
|
||||
# We need to add the board build flags in a post script
|
||||
# so the platform build script doesn't overwrite the custom CCFLAGS
|
||||
apply_board_build_flags()
|
||||
# We need to add the board build flags in a post script
|
||||
# so the platform build script doesn't overwrite the custom CCFLAGS
|
||||
apply_board_build_flags()
|
||||
|
@ -2,320 +2,317 @@
|
||||
# common-dependencies.py
|
||||
# Convenience script to check dependencies and add libs and sources for Marlin Enabled Features
|
||||
#
|
||||
import subprocess,os,re,pioutil
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
# Detect that 'vscode init' is running
|
||||
if pioutil.is_vscode_init():
|
||||
env.Exit(0)
|
||||
import subprocess,os,re
|
||||
Import("env")
|
||||
|
||||
PIO_VERSION_MIN = (5, 0, 3)
|
||||
try:
|
||||
from platformio import VERSION as PIO_VERSION
|
||||
weights = (1000, 100, 1)
|
||||
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
||||
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
||||
if version_cur < version_min:
|
||||
print()
|
||||
print("**************************************************")
|
||||
print("****** An update to PlatformIO is ******")
|
||||
print("****** required to build Marlin Firmware. ******")
|
||||
print("****** ******")
|
||||
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
||||
print("****** Current Version: ", PIO_VERSION, " ******")
|
||||
print("****** ******")
|
||||
print("****** Update PlatformIO and try again. ******")
|
||||
print("**************************************************")
|
||||
print()
|
||||
exit(1)
|
||||
except SystemExit:
|
||||
exit(1)
|
||||
except:
|
||||
print("Can't detect PlatformIO Version")
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
#print(env.Dump())
|
||||
|
||||
try:
|
||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||
except:
|
||||
verbose = 0
|
||||
FEATURE_CONFIG = {}
|
||||
|
||||
def blab(str,level=1):
|
||||
if verbose >= level:
|
||||
print("[deps] %s" % str)
|
||||
def validate_pio():
|
||||
PIO_VERSION_MIN = (5, 0, 3)
|
||||
try:
|
||||
from platformio import VERSION as PIO_VERSION
|
||||
weights = (1000, 100, 1)
|
||||
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
||||
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
||||
if version_cur < version_min:
|
||||
print()
|
||||
print("**************************************************")
|
||||
print("****** An update to PlatformIO is ******")
|
||||
print("****** required to build Marlin Firmware. ******")
|
||||
print("****** ******")
|
||||
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
||||
print("****** Current Version: ", PIO_VERSION, " ******")
|
||||
print("****** ******")
|
||||
print("****** Update PlatformIO and try again. ******")
|
||||
print("**************************************************")
|
||||
print()
|
||||
exit(1)
|
||||
except SystemExit:
|
||||
exit(1)
|
||||
except:
|
||||
print("Can't detect PlatformIO Version")
|
||||
|
||||
FEATURE_CONFIG = {}
|
||||
def blab(str,level=1):
|
||||
if verbose >= level:
|
||||
print("[deps] %s" % str)
|
||||
|
||||
def add_to_feat_cnf(feature, flines):
|
||||
def add_to_feat_cnf(feature, flines):
|
||||
|
||||
try:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
except:
|
||||
FEATURE_CONFIG[feature] = {}
|
||||
|
||||
# Get a reference to the FEATURE_CONFIG under construction
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
|
||||
# Split up passed lines on commas or newlines and iterate
|
||||
# Add common options to the features config under construction
|
||||
# For lib_deps replace a previous instance of the same library
|
||||
atoms = re.sub(r',\\s*', '\n', flines).strip().split('\n')
|
||||
for line in atoms:
|
||||
parts = line.split('=')
|
||||
name = parts.pop(0)
|
||||
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
||||
feat[name] = '='.join(parts)
|
||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||
else:
|
||||
for dep in re.split(r",\s*", line):
|
||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
||||
|
||||
def load_config():
|
||||
blab("========== Gather [features] entries...")
|
||||
items = ProjectConfig().items('features')
|
||||
for key in items:
|
||||
feature = key[0].upper()
|
||||
if not feature in FEATURE_CONFIG:
|
||||
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
||||
add_to_feat_cnf(feature, key[1])
|
||||
|
||||
# Add options matching custom_marlin.MY_OPTION to the pile
|
||||
blab("========== Gather custom_marlin entries...")
|
||||
all_opts = env.GetProjectOptions()
|
||||
for n in all_opts:
|
||||
key = n[0]
|
||||
mat = re.match(r'custom_marlin\.(.+)', key)
|
||||
if mat:
|
||||
try:
|
||||
val = env.GetProjectOption(key)
|
||||
except:
|
||||
val = None
|
||||
if val:
|
||||
opt = mat.group(1).upper()
|
||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||
add_to_feat_cnf(opt, val)
|
||||
|
||||
def get_all_known_libs():
|
||||
known_libs = []
|
||||
for feature in FEATURE_CONFIG:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
if not 'lib_deps' in feat:
|
||||
continue
|
||||
for dep in feat['lib_deps']:
|
||||
known_libs.append(PackageSpec(dep).name)
|
||||
return known_libs
|
||||
|
||||
def get_all_env_libs():
|
||||
env_libs = []
|
||||
lib_deps = env.GetProjectOption('lib_deps')
|
||||
for dep in lib_deps:
|
||||
env_libs.append(PackageSpec(dep).name)
|
||||
return env_libs
|
||||
|
||||
def set_env_field(field, value):
|
||||
proj = env.GetProjectConfig()
|
||||
proj.set("env:" + env['PIOENV'], field, value)
|
||||
|
||||
# All unused libs should be ignored so that if a library
|
||||
# exists in .pio/lib_deps it will not break compilation.
|
||||
def force_ignore_unused_libs():
|
||||
env_libs = get_all_env_libs()
|
||||
known_libs = get_all_known_libs()
|
||||
diff = (list(set(known_libs) - set(env_libs)))
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
||||
blab("Ignore libraries: %s" % lib_ignore)
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
|
||||
def apply_features_config():
|
||||
load_config()
|
||||
blab("========== Apply enabled features...")
|
||||
for feature in FEATURE_CONFIG:
|
||||
if not env.MarlinFeatureIsEnabled(feature):
|
||||
continue
|
||||
try:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
except:
|
||||
FEATURE_CONFIG[feature] = {}
|
||||
|
||||
# Get a reference to the FEATURE_CONFIG under construction
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
|
||||
if 'lib_deps' in feat and len(feat['lib_deps']):
|
||||
blab("========== Adding lib_deps for %s... " % feature, 2)
|
||||
# Split up passed lines on commas or newlines and iterate
|
||||
# Add common options to the features config under construction
|
||||
# For lib_deps replace a previous instance of the same library
|
||||
atoms = re.sub(r',\\s*', '\n', flines).strip().split('\n')
|
||||
for line in atoms:
|
||||
parts = line.split('=')
|
||||
name = parts.pop(0)
|
||||
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
||||
feat[name] = '='.join(parts)
|
||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||
else:
|
||||
for dep in re.split(r",\s*", line):
|
||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
||||
|
||||
# feat to add
|
||||
deps_to_add = {}
|
||||
def load_config():
|
||||
blab("========== Gather [features] entries...")
|
||||
items = ProjectConfig().items('features')
|
||||
for key in items:
|
||||
feature = key[0].upper()
|
||||
if not feature in FEATURE_CONFIG:
|
||||
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
||||
add_to_feat_cnf(feature, key[1])
|
||||
|
||||
# Add options matching custom_marlin.MY_OPTION to the pile
|
||||
blab("========== Gather custom_marlin entries...")
|
||||
all_opts = env.GetProjectOptions()
|
||||
for n in all_opts:
|
||||
key = n[0]
|
||||
mat = re.match(r'custom_marlin\.(.+)', key)
|
||||
if mat:
|
||||
try:
|
||||
val = env.GetProjectOption(key)
|
||||
except:
|
||||
val = None
|
||||
if val:
|
||||
opt = mat.group(1).upper()
|
||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||
add_to_feat_cnf(opt, val)
|
||||
|
||||
def get_all_known_libs():
|
||||
known_libs = []
|
||||
for feature in FEATURE_CONFIG:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
if not 'lib_deps' in feat:
|
||||
continue
|
||||
for dep in feat['lib_deps']:
|
||||
deps_to_add[PackageSpec(dep).name] = dep
|
||||
blab("==================== %s... " % dep, 2)
|
||||
known_libs.append(PackageSpec(dep).name)
|
||||
return known_libs
|
||||
|
||||
# Does the env already have the dependency?
|
||||
deps = env.GetProjectOption('lib_deps')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
def get_all_env_libs():
|
||||
env_libs = []
|
||||
lib_deps = env.GetProjectOption('lib_deps')
|
||||
for dep in lib_deps:
|
||||
env_libs.append(PackageSpec(dep).name)
|
||||
return env_libs
|
||||
|
||||
# Are there any libraries that should be ignored?
|
||||
lib_ignore = env.GetProjectOption('lib_ignore')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
def set_env_field(field, value):
|
||||
proj = env.GetProjectConfig()
|
||||
proj.set("env:" + env['PIOENV'], field, value)
|
||||
|
||||
# Is there anything left?
|
||||
if len(deps_to_add) > 0:
|
||||
# Only add the missing dependencies
|
||||
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
||||
# All unused libs should be ignored so that if a library
|
||||
# exists in .pio/lib_deps it will not break compilation.
|
||||
def force_ignore_unused_libs():
|
||||
env_libs = get_all_env_libs()
|
||||
known_libs = get_all_known_libs()
|
||||
diff = (list(set(known_libs) - set(env_libs)))
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
||||
blab("Ignore libraries: %s" % lib_ignore)
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
|
||||
if 'build_flags' in feat:
|
||||
f = feat['build_flags']
|
||||
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
||||
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
||||
env.Replace(BUILD_FLAGS=new_flags)
|
||||
def apply_features_config():
|
||||
load_config()
|
||||
blab("========== Apply enabled features...")
|
||||
for feature in FEATURE_CONFIG:
|
||||
if not env.MarlinFeatureIsEnabled(feature):
|
||||
continue
|
||||
|
||||
if 'extra_scripts' in feat:
|
||||
blab("Running extra_scripts for %s... " % feature, 2)
|
||||
env.SConscript(feat['extra_scripts'], exports="env")
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
|
||||
if 'src_filter' in feat:
|
||||
blab("========== Adding src_filter for %s... " % feature, 2)
|
||||
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
||||
# first we need to remove the references to the same folder
|
||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
||||
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
||||
for d in my_srcs:
|
||||
if d in cur_srcs:
|
||||
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
||||
if 'lib_deps' in feat and len(feat['lib_deps']):
|
||||
blab("========== Adding lib_deps for %s... " % feature, 2)
|
||||
|
||||
src_filter = feat['src_filter'] + ' ' + src_filter
|
||||
set_env_field('src_filter', [src_filter])
|
||||
env.Replace(SRC_FILTER=src_filter)
|
||||
# feat to add
|
||||
deps_to_add = {}
|
||||
for dep in feat['lib_deps']:
|
||||
deps_to_add[PackageSpec(dep).name] = dep
|
||||
blab("==================== %s... " % dep, 2)
|
||||
|
||||
if 'lib_ignore' in feat:
|
||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
# Does the env already have the dependency?
|
||||
deps = env.GetProjectOption('lib_deps')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
|
||||
#
|
||||
# Find a compiler, considering the OS
|
||||
#
|
||||
ENV_BUILD_PATH = os.path.join(env.Dictionary('PROJECT_BUILD_DIR'), env['PIOENV'])
|
||||
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
|
||||
def search_compiler():
|
||||
try:
|
||||
filepath = env.GetProjectOption('custom_gcc')
|
||||
blab("Getting compiler from env")
|
||||
# Are there any libraries that should be ignored?
|
||||
lib_ignore = env.GetProjectOption('lib_ignore')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
|
||||
# Is there anything left?
|
||||
if len(deps_to_add) > 0:
|
||||
# Only add the missing dependencies
|
||||
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
||||
|
||||
if 'build_flags' in feat:
|
||||
f = feat['build_flags']
|
||||
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
||||
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
||||
env.Replace(BUILD_FLAGS=new_flags)
|
||||
|
||||
if 'extra_scripts' in feat:
|
||||
blab("Running extra_scripts for %s... " % feature, 2)
|
||||
env.SConscript(feat['extra_scripts'], exports="env")
|
||||
|
||||
if 'src_filter' in feat:
|
||||
blab("========== Adding src_filter for %s... " % feature, 2)
|
||||
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
||||
# first we need to remove the references to the same folder
|
||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
||||
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
||||
for d in my_srcs:
|
||||
if d in cur_srcs:
|
||||
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
||||
|
||||
src_filter = feat['src_filter'] + ' ' + src_filter
|
||||
set_env_field('src_filter', [src_filter])
|
||||
env.Replace(SRC_FILTER=src_filter)
|
||||
|
||||
if 'lib_ignore' in feat:
|
||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
|
||||
#
|
||||
# Find a compiler, considering the OS
|
||||
#
|
||||
ENV_BUILD_PATH = os.path.join(env.Dictionary('PROJECT_BUILD_DIR'), env['PIOENV'])
|
||||
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
|
||||
def search_compiler():
|
||||
try:
|
||||
filepath = env.GetProjectOption('custom_gcc')
|
||||
blab("Getting compiler from env")
|
||||
return filepath
|
||||
except:
|
||||
pass
|
||||
|
||||
if os.path.exists(GCC_PATH_CACHE):
|
||||
with open(GCC_PATH_CACHE, 'r') as f:
|
||||
return f.read()
|
||||
|
||||
# Find the current platform compiler by searching the $PATH
|
||||
# which will be in a platformio toolchain bin folder
|
||||
path_regex = re.escape(env['PROJECT_PACKAGES_DIR'])
|
||||
|
||||
# See if the environment provides a default compiler
|
||||
try:
|
||||
gcc = env.GetProjectOption('custom_deps_gcc')
|
||||
except:
|
||||
gcc = "g++"
|
||||
|
||||
if env['PLATFORM'] == 'win32':
|
||||
path_separator = ';'
|
||||
path_regex += r'.*\\bin'
|
||||
gcc += ".exe"
|
||||
else:
|
||||
path_separator = ':'
|
||||
path_regex += r'/.+/bin'
|
||||
|
||||
# Search for the compiler
|
||||
for pathdir in env['ENV']['PATH'].split(path_separator):
|
||||
if not re.search(path_regex, pathdir, re.IGNORECASE):
|
||||
continue
|
||||
for filepath in os.listdir(pathdir):
|
||||
if not filepath.endswith(gcc):
|
||||
continue
|
||||
# Use entire path to not rely on env PATH
|
||||
filepath = os.path.sep.join([pathdir, filepath])
|
||||
# Cache the g++ path to no search always
|
||||
if os.path.exists(ENV_BUILD_PATH):
|
||||
with open(GCC_PATH_CACHE, 'w+') as f:
|
||||
f.write(filepath)
|
||||
|
||||
return filepath
|
||||
|
||||
filepath = env.get('CXX')
|
||||
if filepath == 'CC':
|
||||
filepath = gcc
|
||||
blab("Couldn't find a compiler! Fallback to %s" % filepath)
|
||||
return filepath
|
||||
|
||||
#
|
||||
# Use the compiler to get a list of all enabled features
|
||||
#
|
||||
def load_marlin_features():
|
||||
if 'MARLIN_FEATURES' in env:
|
||||
return
|
||||
|
||||
# Process defines
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags = env.ParseFlagsExtended(build_flags)
|
||||
|
||||
cxx = search_compiler()
|
||||
cmd = ['"' + cxx + '"']
|
||||
|
||||
# Build flags from board.json
|
||||
#if 'BOARD' in env:
|
||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||
for s in build_flags['CPPDEFINES']:
|
||||
if isinstance(s, tuple):
|
||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||
else:
|
||||
cmd += ['-D' + s]
|
||||
|
||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++ buildroot/share/PlatformIO/scripts/common-dependencies.h']
|
||||
cmd = ' '.join(cmd)
|
||||
blab(cmd, 4)
|
||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||
marlin_features = {}
|
||||
for define in define_list:
|
||||
feature = define[8:].strip().decode().split(' ')
|
||||
feature, definition = feature[0], ' '.join(feature[1:])
|
||||
marlin_features[feature] = definition
|
||||
env['MARLIN_FEATURES'] = marlin_features
|
||||
|
||||
#
|
||||
# Return True if a matching feature is enabled
|
||||
#
|
||||
def MarlinFeatureIsEnabled(env, feature):
|
||||
load_marlin_features()
|
||||
r = re.compile('^' + feature + '$')
|
||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||
|
||||
# Defines could still be 'false' or '0', so check
|
||||
some_on = False
|
||||
if len(found):
|
||||
for f in found:
|
||||
val = env['MARLIN_FEATURES'][f]
|
||||
if val in [ '', '1', 'true' ]:
|
||||
some_on = True
|
||||
elif val in env['MARLIN_FEATURES']:
|
||||
some_on = env.MarlinFeatureIsEnabled(val)
|
||||
|
||||
return some_on
|
||||
|
||||
validate_pio()
|
||||
|
||||
try:
|
||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||
except:
|
||||
pass
|
||||
|
||||
if os.path.exists(GCC_PATH_CACHE):
|
||||
with open(GCC_PATH_CACHE, 'r') as f:
|
||||
return f.read()
|
||||
# Add a method for other PIO scripts to query enabled features
|
||||
env.AddMethod(MarlinFeatureIsEnabled)
|
||||
|
||||
# Find the current platform compiler by searching the $PATH
|
||||
# which will be in a platformio toolchain bin folder
|
||||
path_regex = re.escape(env['PROJECT_PACKAGES_DIR'])
|
||||
|
||||
# See if the environment provides a default compiler
|
||||
try:
|
||||
gcc = env.GetProjectOption('custom_deps_gcc')
|
||||
except:
|
||||
gcc = "g++"
|
||||
|
||||
if env['PLATFORM'] == 'win32':
|
||||
path_separator = ';'
|
||||
path_regex += r'.*\\bin'
|
||||
gcc += ".exe"
|
||||
else:
|
||||
path_separator = ':'
|
||||
path_regex += r'/.+/bin'
|
||||
|
||||
# Search for the compiler
|
||||
for pathdir in env['ENV']['PATH'].split(path_separator):
|
||||
if not re.search(path_regex, pathdir, re.IGNORECASE):
|
||||
continue
|
||||
for filepath in os.listdir(pathdir):
|
||||
if not filepath.endswith(gcc):
|
||||
continue
|
||||
# Use entire path to not rely on env PATH
|
||||
filepath = os.path.sep.join([pathdir, filepath])
|
||||
# Cache the g++ path to no search always
|
||||
if os.path.exists(ENV_BUILD_PATH):
|
||||
with open(GCC_PATH_CACHE, 'w+') as f:
|
||||
f.write(filepath)
|
||||
|
||||
return filepath
|
||||
|
||||
filepath = env.get('CXX')
|
||||
if filepath == 'CC':
|
||||
filepath = gcc
|
||||
blab("Couldn't find a compiler! Fallback to %s" % filepath)
|
||||
return filepath
|
||||
|
||||
#
|
||||
# Use the compiler to get a list of all enabled features
|
||||
#
|
||||
def load_marlin_features():
|
||||
if 'MARLIN_FEATURES' in env:
|
||||
return
|
||||
|
||||
# Process defines
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags = env.ParseFlagsExtended(build_flags)
|
||||
|
||||
cxx = search_compiler()
|
||||
cmd = ['"' + cxx + '"']
|
||||
|
||||
# Build flags from board.json
|
||||
#if 'BOARD' in env:
|
||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||
for s in build_flags['CPPDEFINES']:
|
||||
if isinstance(s, tuple):
|
||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||
else:
|
||||
cmd += ['-D' + s]
|
||||
|
||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++ buildroot/share/PlatformIO/scripts/common-dependencies.h']
|
||||
cmd = ' '.join(cmd)
|
||||
blab(cmd, 4)
|
||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||
marlin_features = {}
|
||||
for define in define_list:
|
||||
feature = define[8:].strip().decode().split(' ')
|
||||
feature, definition = feature[0], ' '.join(feature[1:])
|
||||
marlin_features[feature] = definition
|
||||
env['MARLIN_FEATURES'] = marlin_features
|
||||
|
||||
#
|
||||
# Return True if a matching feature is enabled
|
||||
#
|
||||
def MarlinFeatureIsEnabled(env, feature):
|
||||
load_marlin_features()
|
||||
r = re.compile('^' + feature + '$')
|
||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||
|
||||
# Defines could still be 'false' or '0', so check
|
||||
some_on = False
|
||||
if len(found):
|
||||
for f in found:
|
||||
val = env['MARLIN_FEATURES'][f]
|
||||
if val in [ '', '1', 'true' ]:
|
||||
some_on = True
|
||||
elif val in env['MARLIN_FEATURES']:
|
||||
some_on = env.MarlinFeatureIsEnabled(val)
|
||||
|
||||
return some_on
|
||||
|
||||
#
|
||||
# Add a method for other PIO scripts to query enabled features
|
||||
#
|
||||
env.AddMethod(MarlinFeatureIsEnabled)
|
||||
|
||||
#
|
||||
# Add dependencies for enabled Marlin features
|
||||
#
|
||||
apply_features_config()
|
||||
force_ignore_unused_libs()
|
||||
# Add dependencies for enabled Marlin features
|
||||
apply_features_config()
|
||||
force_ignore_unused_libs()
|
||||
|
@ -1,16 +1,18 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/custom_board.py
|
||||
# custom_board.py
|
||||
#
|
||||
# - For build.address replace VECT_TAB_ADDR to relocate the firmware
|
||||
# - For build.ldscript use one of the linker scripts in buildroot/share/PlatformIO/ldscripts
|
||||
#
|
||||
import marlin
|
||||
board = marlin.env.BoardConfig()
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import marlin
|
||||
board = marlin.env.BoardConfig()
|
||||
|
||||
address = board.get("build.address", "")
|
||||
if address:
|
||||
marlin.relocate_firmware(address)
|
||||
address = board.get("build.address", "")
|
||||
if address:
|
||||
marlin.relocate_firmware(address)
|
||||
|
||||
ldscript = board.get("build.ldscript", "")
|
||||
if ldscript:
|
||||
marlin.custom_ld_script(ldscript)
|
||||
ldscript = board.get("build.ldscript", "")
|
||||
if ldscript:
|
||||
marlin.custom_ld_script(ldscript)
|
||||
|
@ -1,51 +1,49 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/download_mks_assets.py
|
||||
# download_mks_assets.py
|
||||
# Added by HAS_TFT_LVGL_UI to download assets from Makerbase repo
|
||||
#
|
||||
Import("env")
|
||||
import os,requests,zipfile,tempfile,shutil,pioutil
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env")
|
||||
import os,requests,zipfile,tempfile,shutil
|
||||
|
||||
# Detect that 'vscode init' is running
|
||||
if pioutil.is_vscode_init():
|
||||
env.Exit(0)
|
||||
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
||||
deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR")
|
||||
zip_path = os.path.join(deps_path, "mks-assets.zip")
|
||||
assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||
|
||||
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
||||
deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR")
|
||||
zip_path = os.path.join(deps_path, "mks-assets.zip")
|
||||
assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||
def download_mks_assets():
|
||||
print("Downloading MKS Assets")
|
||||
r = requests.get(url, stream=True)
|
||||
# the user may have a very clean workspace,
|
||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||
if os.path.exists(deps_path) == False:
|
||||
os.mkdir(deps_path)
|
||||
with open(zip_path, 'wb') as fd:
|
||||
for chunk in r.iter_content(chunk_size=128):
|
||||
fd.write(chunk)
|
||||
|
||||
def download_mks_assets():
|
||||
print("Downloading MKS Assets")
|
||||
r = requests.get(url, stream=True)
|
||||
# the user may have a very clean workspace,
|
||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||
if os.path.exists(deps_path) == False:
|
||||
os.mkdir(deps_path)
|
||||
with open(zip_path, 'wb') as fd:
|
||||
for chunk in r.iter_content(chunk_size=128):
|
||||
fd.write(chunk)
|
||||
def copy_mks_assets():
|
||||
print("Copying MKS Assets")
|
||||
output_path = tempfile.mkdtemp()
|
||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||
zip_obj.extractall(output_path)
|
||||
zip_obj.close()
|
||||
if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False:
|
||||
os.unlink(assets_path)
|
||||
if os.path.exists(assets_path) == False:
|
||||
os.mkdir(assets_path)
|
||||
base_path = ''
|
||||
for filename in os.listdir(output_path):
|
||||
base_path = filename
|
||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')):
|
||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path)
|
||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')):
|
||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path)
|
||||
shutil.rmtree(output_path, ignore_errors=True)
|
||||
|
||||
if os.path.exists(zip_path) == False:
|
||||
download_mks_assets()
|
||||
|
||||
def copy_mks_assets():
|
||||
print("Copying MKS Assets")
|
||||
output_path = tempfile.mkdtemp()
|
||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||
zip_obj.extractall(output_path)
|
||||
zip_obj.close()
|
||||
if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False:
|
||||
os.unlink(assets_path)
|
||||
if os.path.exists(assets_path) == False:
|
||||
os.mkdir(assets_path)
|
||||
base_path = ''
|
||||
for filename in os.listdir(output_path):
|
||||
base_path = filename
|
||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')):
|
||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path)
|
||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')):
|
||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path)
|
||||
shutil.rmtree(output_path, ignore_errors=True)
|
||||
|
||||
if os.path.exists(zip_path) == False:
|
||||
download_mks_assets()
|
||||
|
||||
if os.path.exists(assets_path) == False:
|
||||
copy_mks_assets()
|
||||
copy_mks_assets()
|
||||
|
@ -1,32 +1,35 @@
|
||||
#
|
||||
# fix_framework_weakness.py
|
||||
#
|
||||
from os.path import join, isfile
|
||||
import shutil
|
||||
from pprint import pprint
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
Import("env")
|
||||
import shutil
|
||||
from os.path import join, isfile
|
||||
from pprint import pprint
|
||||
|
||||
if env.MarlinFeatureIsEnabled("POSTMORTEM_DEBUGGING"):
|
||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
||||
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
||||
Import("env")
|
||||
|
||||
# patch file only if we didn't do it before
|
||||
if not isfile(patchflag_path):
|
||||
print("Patching libmaple exception handlers")
|
||||
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
||||
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
||||
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
||||
if env.MarlinFeatureIsEnabled("POSTMORTEM_DEBUGGING"):
|
||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
||||
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
||||
|
||||
assert isfile(original_file) and isfile(src_file)
|
||||
shutil.copyfile(original_file, backup_file)
|
||||
shutil.copyfile(src_file, original_file);
|
||||
# patch file only if we didn't do it before
|
||||
if not isfile(patchflag_path):
|
||||
print("Patching libmaple exception handlers")
|
||||
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
||||
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
||||
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
||||
|
||||
def _touch(path):
|
||||
with open(path, "w") as fp:
|
||||
fp.write("")
|
||||
assert isfile(original_file) and isfile(src_file)
|
||||
shutil.copyfile(original_file, backup_file)
|
||||
shutil.copyfile(src_file, original_file);
|
||||
|
||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||
print("Done patching exception handler")
|
||||
def _touch(path):
|
||||
with open(path, "w") as fp:
|
||||
fp.write("")
|
||||
|
||||
print("Libmaple modified and ready for post mortem debugging")
|
||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||
print("Done patching exception handler")
|
||||
|
||||
print("Libmaple modified and ready for post mortem debugging")
|
||||
|
@ -5,50 +5,52 @@
|
||||
# the appropriate framework variants folder, so that its contents
|
||||
# will be picked up by PlatformIO just like any other variant.
|
||||
#
|
||||
import os,shutil,marlin
|
||||
from SCons.Script import DefaultEnvironment
|
||||
from platformio import util
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,shutil,marlin
|
||||
from SCons.Script import DefaultEnvironment
|
||||
from platformio import util
|
||||
|
||||
env = DefaultEnvironment()
|
||||
env = DefaultEnvironment()
|
||||
|
||||
#
|
||||
# Get the platform name from the 'platform_packages' option,
|
||||
# or look it up by the platform.class.name.
|
||||
#
|
||||
platform = env.PioPlatform()
|
||||
#
|
||||
# Get the platform name from the 'platform_packages' option,
|
||||
# or look it up by the platform.class.name.
|
||||
#
|
||||
platform = env.PioPlatform()
|
||||
|
||||
from platformio.package.meta import PackageSpec
|
||||
platform_packages = env.GetProjectOption('platform_packages')
|
||||
if len(platform_packages) == 0:
|
||||
framewords = {
|
||||
"Ststm32Platform": "framework-arduinoststm32",
|
||||
"AtmelavrPlatform": "framework-arduino-avr"
|
||||
}
|
||||
platform_name = framewords[platform.__class__.__name__]
|
||||
else:
|
||||
platform_name = PackageSpec(platform_packages[0]).name
|
||||
from platformio.package.meta import PackageSpec
|
||||
platform_packages = env.GetProjectOption('platform_packages')
|
||||
if len(platform_packages) == 0:
|
||||
framewords = {
|
||||
"Ststm32Platform": "framework-arduinoststm32",
|
||||
"AtmelavrPlatform": "framework-arduino-avr"
|
||||
}
|
||||
platform_name = framewords[platform.__class__.__name__]
|
||||
else:
|
||||
platform_name = PackageSpec(platform_packages[0]).name
|
||||
|
||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino" ]:
|
||||
platform_name = "framework-arduinoststm32"
|
||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino" ]:
|
||||
platform_name = "framework-arduinoststm32"
|
||||
|
||||
FRAMEWORK_DIR = platform.get_package_dir(platform_name)
|
||||
assert os.path.isdir(FRAMEWORK_DIR)
|
||||
FRAMEWORK_DIR = platform.get_package_dir(platform_name)
|
||||
assert os.path.isdir(FRAMEWORK_DIR)
|
||||
|
||||
board = env.BoardConfig()
|
||||
board = env.BoardConfig()
|
||||
|
||||
#mcu_type = board.get("build.mcu")[:-2]
|
||||
variant = board.get("build.variant")
|
||||
#series = mcu_type[:7].upper() + "xx"
|
||||
#mcu_type = board.get("build.mcu")[:-2]
|
||||
variant = board.get("build.variant")
|
||||
#series = mcu_type[:7].upper() + "xx"
|
||||
|
||||
# Prepare a new empty folder at the destination
|
||||
variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant)
|
||||
if os.path.isdir(variant_dir):
|
||||
shutil.rmtree(variant_dir)
|
||||
if not os.path.isdir(variant_dir):
|
||||
os.mkdir(variant_dir)
|
||||
# Prepare a new empty folder at the destination
|
||||
variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant)
|
||||
if os.path.isdir(variant_dir):
|
||||
shutil.rmtree(variant_dir)
|
||||
if not os.path.isdir(variant_dir):
|
||||
os.mkdir(variant_dir)
|
||||
|
||||
# Source dir is a local variant sub-folder
|
||||
source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant)
|
||||
assert os.path.isdir(source_dir)
|
||||
# Source dir is a local variant sub-folder
|
||||
source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant)
|
||||
assert os.path.isdir(source_dir)
|
||||
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
|
@ -1,39 +1,40 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py
|
||||
# jgaurora_a5s_a1_with_bootloader.py
|
||||
# Customizations for env:jgaurora_a5s_a1
|
||||
#
|
||||
import os,marlin
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,marlin
|
||||
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
||||
def addboot(source, target, env):
|
||||
firmware = open(target[0].path, "rb")
|
||||
lengthfirmware = os.path.getsize(target[0].path)
|
||||
bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin"
|
||||
bootloader = open(bootloader_bin, "rb")
|
||||
lengthbootloader = os.path.getsize(bootloader_bin)
|
||||
|
||||
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
||||
def addboot(source, target, env):
|
||||
firmware = open(target[0].path, "rb")
|
||||
lengthfirmware = os.path.getsize(target[0].path)
|
||||
bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin"
|
||||
bootloader = open(bootloader_bin, "rb")
|
||||
lengthbootloader = os.path.getsize(bootloader_bin)
|
||||
firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin'
|
||||
if os.path.exists(firmware_with_boothloader_bin):
|
||||
os.remove(firmware_with_boothloader_bin)
|
||||
firmwareimage = open(firmware_with_boothloader_bin, "wb")
|
||||
position = 0
|
||||
while position < lengthbootloader:
|
||||
byte = bootloader.read(1)
|
||||
firmwareimage.write(byte)
|
||||
position += 1
|
||||
position = 0
|
||||
while position < lengthfirmware:
|
||||
byte = firmware.read(1)
|
||||
firmwareimage.write(byte)
|
||||
position += 1
|
||||
bootloader.close()
|
||||
firmware.close()
|
||||
firmwareimage.close()
|
||||
|
||||
firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin'
|
||||
if os.path.exists(firmware_with_boothloader_bin):
|
||||
os.remove(firmware_with_boothloader_bin)
|
||||
firmwareimage = open(firmware_with_boothloader_bin, "wb")
|
||||
position = 0
|
||||
while position < lengthbootloader:
|
||||
byte = bootloader.read(1)
|
||||
firmwareimage.write(byte)
|
||||
position += 1
|
||||
position = 0
|
||||
while position < lengthfirmware:
|
||||
byte = firmware.read(1)
|
||||
firmwareimage.write(byte)
|
||||
position += 1
|
||||
bootloader.close()
|
||||
firmware.close()
|
||||
firmwareimage.close()
|
||||
firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin'
|
||||
if os.path.exists(firmware_without_bootloader_bin):
|
||||
os.remove(firmware_without_bootloader_bin)
|
||||
os.rename(target[0].path, firmware_without_bootloader_bin)
|
||||
#os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin')
|
||||
|
||||
firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin'
|
||||
if os.path.exists(firmware_without_bootloader_bin):
|
||||
os.remove(firmware_without_bootloader_bin)
|
||||
os.rename(target[0].path, firmware_without_bootloader_bin)
|
||||
#os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin')
|
||||
|
||||
marlin.add_post_action(addboot);
|
||||
marlin.add_post_action(addboot);
|
||||
|
@ -1,47 +1,49 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/lerdge.py
|
||||
# lerdge.py
|
||||
# Customizations for Lerdge build environments:
|
||||
# env:LERDGEX env:LERDGEX_usb_flash_drive
|
||||
# env:LERDGES env:LERDGES_usb_flash_drive
|
||||
# env:LERDGEK env:LERDGEK_usb_flash_drive
|
||||
#
|
||||
import os,marlin
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,marlin
|
||||
Import("env")
|
||||
|
||||
from SCons.Script import DefaultEnvironment
|
||||
board = DefaultEnvironment().BoardConfig()
|
||||
from SCons.Script import DefaultEnvironment
|
||||
board = DefaultEnvironment().BoardConfig()
|
||||
|
||||
def encryptByte(byte):
|
||||
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
||||
i = 0x58 + byte
|
||||
j = 0x05 + byte + (i >> 8)
|
||||
byte = (0xF8 & i) | (0x07 & j)
|
||||
return byte
|
||||
def encryptByte(byte):
|
||||
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
||||
i = 0x58 + byte
|
||||
j = 0x05 + byte + (i >> 8)
|
||||
byte = (0xF8 & i) | (0x07 & j)
|
||||
return byte
|
||||
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
for i in range(len(input_file)):
|
||||
input_file[i] = encryptByte(input_file[i])
|
||||
output_file.write(input_file)
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
for i in range(len(input_file)):
|
||||
input_file[i] = encryptByte(input_file[i])
|
||||
output_file.write(input_file)
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
||||
def encrypt(source, target, env):
|
||||
fwpath = target[0].path
|
||||
enname = board.get("build.encrypt")
|
||||
print("Encrypting %s to %s" % (fwpath, enname))
|
||||
fwfile = open(fwpath, "rb")
|
||||
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
||||
length = os.path.getsize(fwpath)
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
||||
def encrypt(source, target, env):
|
||||
fwpath = target[0].path
|
||||
enname = board.get("build.encrypt")
|
||||
print("Encrypting %s to %s" % (fwpath, enname))
|
||||
fwfile = open(fwpath, "rb")
|
||||
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
||||
length = os.path.getsize(fwpath)
|
||||
|
||||
encrypt_file(fwfile, enfile, length)
|
||||
encrypt_file(fwfile, enfile, length)
|
||||
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
os.remove(fwpath)
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
os.remove(fwpath)
|
||||
|
||||
if 'encrypt' in board.get("build").keys():
|
||||
if board.get("build.encrypt") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
else:
|
||||
print("LERDGE builds require output file via board_build.encrypt = 'filename' parameter")
|
||||
exit(1)
|
||||
if 'encrypt' in board.get("build").keys():
|
||||
if board.get("build.encrypt") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
else:
|
||||
print("LERDGE builds require output file via board_build.encrypt = 'filename' parameter")
|
||||
exit(1)
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/marlin.py
|
||||
# marlin.py
|
||||
# Helper module with some commonly-used functions
|
||||
#
|
||||
import os,shutil
|
||||
@ -10,13 +10,13 @@ env = DefaultEnvironment()
|
||||
from os.path import join
|
||||
|
||||
def copytree(src, dst, symlinks=False, ignore=None):
|
||||
for item in os.listdir(src):
|
||||
s = join(src, item)
|
||||
d = join(dst, item)
|
||||
if os.path.isdir(s):
|
||||
shutil.copytree(s, d, symlinks, ignore)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
for item in os.listdir(src):
|
||||
s = join(src, item)
|
||||
d = join(dst, item)
|
||||
if os.path.isdir(s):
|
||||
shutil.copytree(s, d, symlinks, ignore)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
|
||||
def replace_define(field, value):
|
||||
for define in env['CPPDEFINES']:
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin.py
|
||||
# mks_robin.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08007000", "mks_robin.ld", "Robin.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_e3.py
|
||||
# mks_robin_e3.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08005000", "mks_robin_e3.ld", "Robin_e3.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_e3p.py
|
||||
# mks_robin_e3p.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08007000", "mks_robin_e3p.ld", "Robin_e3p.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_lite.py
|
||||
# mks_robin_lite.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08005000", "mks_robin_lite.ld", "mksLite.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_lite3.py
|
||||
# mks_robin_lite3.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08005000", "mks_robin_lite.ld", "mksLite3.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_mini.py
|
||||
# mks_robin_mini.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08007000", "mks_robin_mini.ld", "Robin_mini.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_nano.py
|
||||
# mks_robin_nano.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08007000", "mks_robin_nano.ld", "Robin_nano.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_nano35.py
|
||||
# mks_robin_nano35.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08007000", "mks_robin_nano.ld", "Robin_nano35.bin")
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/mks_robin_pro.py
|
||||
# mks_robin_pro.py
|
||||
#
|
||||
import robin
|
||||
robin.prepare("0x08007000", "mks_robin_pro.ld", "Robin_pro.bin")
|
||||
|
@ -8,54 +8,56 @@
|
||||
#
|
||||
# - For 'board_build.rename' add a post-action to rename the firmware file.
|
||||
#
|
||||
import os,sys,marlin
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,sys,marlin
|
||||
Import("env")
|
||||
|
||||
from SCons.Script import DefaultEnvironment
|
||||
board = DefaultEnvironment().BoardConfig()
|
||||
from SCons.Script import DefaultEnvironment
|
||||
board = DefaultEnvironment().BoardConfig()
|
||||
|
||||
board_keys = board.get("build").keys()
|
||||
board_keys = board.get("build").keys()
|
||||
|
||||
#
|
||||
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
||||
#
|
||||
if 'offset' in board_keys:
|
||||
LD_FLASH_OFFSET = board.get("build.offset")
|
||||
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
||||
#
|
||||
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
||||
#
|
||||
if 'offset' in board_keys:
|
||||
LD_FLASH_OFFSET = board.get("build.offset")
|
||||
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
||||
|
||||
# Flash size
|
||||
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
||||
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
||||
# Flash size
|
||||
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
||||
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
||||
|
||||
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
||||
maximum_ram_size = board.get("upload.maximum_ram_size")
|
||||
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
||||
maximum_ram_size = board.get("upload.maximum_ram_size")
|
||||
|
||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
||||
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
||||
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
||||
|
||||
#
|
||||
# For build.encrypt rename and encode the firmware file.
|
||||
#
|
||||
if 'encrypt' in board_keys:
|
||||
#
|
||||
# For build.encrypt rename and encode the firmware file.
|
||||
#
|
||||
if 'encrypt' in board_keys:
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
||||
def encrypt(source, target, env):
|
||||
marlin.encrypt_mks(source, target, env, board.get("build.encrypt"))
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
||||
def encrypt(source, target, env):
|
||||
marlin.encrypt_mks(source, target, env, board.get("build.encrypt"))
|
||||
|
||||
if board.get("build.encrypt") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
if board.get("build.encrypt") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
|
||||
#
|
||||
# For build.rename simply rename the firmware file.
|
||||
#
|
||||
if 'rename' in board_keys:
|
||||
#
|
||||
# For build.rename simply rename the firmware file.
|
||||
#
|
||||
if 'rename' in board_keys:
|
||||
|
||||
def rename_target(source, target, env):
|
||||
firmware = os.path.join(target[0].dir.path, board.get("build.rename"))
|
||||
import shutil
|
||||
shutil.copy(target[0].path, firmware)
|
||||
def rename_target(source, target, env):
|
||||
firmware = os.path.join(target[0].dir.path, board.get("build.rename"))
|
||||
import shutil
|
||||
shutil.copy(target[0].path, firmware)
|
||||
|
||||
marlin.add_post_action(rename_target)
|
||||
marlin.add_post_action(rename_target)
|
||||
|
@ -1,18 +1,20 @@
|
||||
#
|
||||
# Convert the ELF to an SREC file suitable for some bootloaders
|
||||
#
|
||||
import os,sys
|
||||
from os.path import join
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,sys
|
||||
from os.path import join
|
||||
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
board = env.BoardConfig()
|
||||
board_keys = board.get("build").keys()
|
||||
if 'encrypt' in board_keys:
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O", "srec",
|
||||
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encrypt")) + "\""
|
||||
]), "Building $TARGET")
|
||||
)
|
||||
board = env.BoardConfig()
|
||||
board_keys = board.get("build").keys()
|
||||
if 'encrypt' in board_keys:
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O", "srec",
|
||||
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encrypt")) + "\""
|
||||
]), "Building $TARGET")
|
||||
)
|
||||
|
@ -1,8 +1,8 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/pioutil.py
|
||||
# pioutil.py
|
||||
#
|
||||
|
||||
# Detect that 'vscode init' is running
|
||||
def is_vscode_init():
|
||||
# Make sure 'vscode init' is not the current command
|
||||
def is_pio_build():
|
||||
from SCons.Script import COMMAND_LINE_TARGETS
|
||||
return "idedata" in COMMAND_LINE_TARGETS or "_idedata" in COMMAND_LINE_TARGETS
|
||||
return "idedata" not in COMMAND_LINE_TARGETS and "_idedata" not in COMMAND_LINE_TARGETS
|
||||
|
@ -2,100 +2,99 @@
|
||||
# preflight-checks.py
|
||||
# Check for common issues prior to compiling
|
||||
#
|
||||
import os,re,sys,pioutil
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
# Detect that 'vscode init' is running
|
||||
if pioutil.is_vscode_init():
|
||||
env.Exit(0)
|
||||
import os,re,sys
|
||||
Import("env")
|
||||
|
||||
def get_envs_for_board(board):
|
||||
with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file:
|
||||
def get_envs_for_board(board):
|
||||
with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file:
|
||||
|
||||
if sys.platform == 'win32':
|
||||
envregex = r"(?:env|win):"
|
||||
elif sys.platform == 'darwin':
|
||||
envregex = r"(?:env|mac|uni):"
|
||||
elif sys.platform == 'linux':
|
||||
envregex = r"(?:env|lin|uni):"
|
||||
else:
|
||||
envregex = r"(?:env):"
|
||||
if sys.platform == 'win32':
|
||||
envregex = r"(?:env|win):"
|
||||
elif sys.platform == 'darwin':
|
||||
envregex = r"(?:env|mac|uni):"
|
||||
elif sys.platform == 'linux':
|
||||
envregex = r"(?:env|lin|uni):"
|
||||
else:
|
||||
envregex = r"(?:env):"
|
||||
|
||||
r = re.compile(r"if\s+MB\((.+)\)")
|
||||
if board.startswith("BOARD_"):
|
||||
board = board[6:]
|
||||
r = re.compile(r"if\s+MB\((.+)\)")
|
||||
if board.startswith("BOARD_"):
|
||||
board = board[6:]
|
||||
|
||||
for line in file:
|
||||
mbs = r.findall(line)
|
||||
if mbs and board in re.split(r",\s*", mbs[0]):
|
||||
line = file.readline()
|
||||
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
||||
if found_envs:
|
||||
envlist = re.findall(envregex + r"(\w+)", line)
|
||||
return [ "env:"+s for s in envlist ]
|
||||
return []
|
||||
for line in file:
|
||||
mbs = r.findall(line)
|
||||
if mbs and board in re.split(r",\s*", mbs[0]):
|
||||
line = file.readline()
|
||||
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
||||
if found_envs:
|
||||
envlist = re.findall(envregex + r"(\w+)", line)
|
||||
return [ "env:"+s for s in envlist ]
|
||||
return []
|
||||
|
||||
def check_envs(build_env, board_envs, config):
|
||||
if build_env in board_envs:
|
||||
return True
|
||||
ext = config.get(build_env, 'extends', default=None)
|
||||
if ext:
|
||||
if isinstance(ext, str):
|
||||
return check_envs(ext, board_envs, config)
|
||||
elif isinstance(ext, list):
|
||||
for ext_env in ext:
|
||||
if check_envs(ext_env, board_envs, config):
|
||||
return True
|
||||
return False
|
||||
def check_envs(build_env, board_envs, config):
|
||||
if build_env in board_envs:
|
||||
return True
|
||||
ext = config.get(build_env, 'extends', default=None)
|
||||
if ext:
|
||||
if isinstance(ext, str):
|
||||
return check_envs(ext, board_envs, config)
|
||||
elif isinstance(ext, list):
|
||||
for ext_env in ext:
|
||||
if check_envs(ext_env, board_envs, config):
|
||||
return True
|
||||
return False
|
||||
|
||||
def sanity_check_target():
|
||||
# Sanity checks:
|
||||
if 'PIOENV' not in env:
|
||||
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
||||
def sanity_check_target():
|
||||
# Sanity checks:
|
||||
if 'PIOENV' not in env:
|
||||
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
||||
|
||||
if 'MARLIN_FEATURES' not in env:
|
||||
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
||||
if 'MARLIN_FEATURES' not in env:
|
||||
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
||||
|
||||
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
||||
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
||||
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
||||
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
||||
|
||||
build_env = env['PIOENV']
|
||||
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
||||
board_envs = get_envs_for_board(motherboard)
|
||||
config = env.GetProjectConfig()
|
||||
result = check_envs("env:"+build_env, board_envs, config)
|
||||
build_env = env['PIOENV']
|
||||
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
||||
board_envs = get_envs_for_board(motherboard)
|
||||
config = env.GetProjectConfig()
|
||||
result = check_envs("env:"+build_env, board_envs, config)
|
||||
|
||||
if not result:
|
||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||
raise SystemExit(err)
|
||||
if not result:
|
||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||
raise SystemExit(err)
|
||||
|
||||
#
|
||||
# Check for Config files in two common incorrect places
|
||||
#
|
||||
for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]:
|
||||
for f in [ "Configuration.h", "Configuration_adv.h" ]:
|
||||
#
|
||||
# Check for Config files in two common incorrect places
|
||||
#
|
||||
for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]:
|
||||
for f in [ "Configuration.h", "Configuration_adv.h" ]:
|
||||
if os.path.isfile(os.path.join(p, f)):
|
||||
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
||||
raise SystemExit(err)
|
||||
|
||||
#
|
||||
# Give warnings on every build
|
||||
#
|
||||
warnfile = os.path.join(env['PROJECT_BUILD_DIR'], build_env, "src", "src", "inc", "Warnings.cpp.o")
|
||||
if os.path.exists(warnfile):
|
||||
os.remove(warnfile)
|
||||
|
||||
#
|
||||
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
||||
#
|
||||
mixedin = []
|
||||
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm")
|
||||
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
||||
if os.path.isfile(os.path.join(p, f)):
|
||||
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
||||
raise SystemExit(err)
|
||||
mixedin += [ f ]
|
||||
if mixedin:
|
||||
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
||||
raise SystemExit(err)
|
||||
|
||||
#
|
||||
# Give warnings on every build
|
||||
#
|
||||
warnfile = os.path.join(env['PROJECT_BUILD_DIR'], build_env, "src", "src", "inc", "Warnings.cpp.o")
|
||||
if os.path.exists(warnfile):
|
||||
os.remove(warnfile)
|
||||
|
||||
#
|
||||
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
||||
#
|
||||
mixedin = []
|
||||
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm")
|
||||
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
||||
if os.path.isfile(os.path.join(p, f)):
|
||||
mixedin += [ f ]
|
||||
if mixedin:
|
||||
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
||||
raise SystemExit(err)
|
||||
|
||||
sanity_check_target()
|
||||
sanity_check_target()
|
||||
|
@ -2,8 +2,8 @@
|
||||
# random-bin.py
|
||||
# Set a unique firmware name based on current date and time
|
||||
#
|
||||
Import("env")
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
from datetime import datetime
|
||||
Import("env")
|
||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
||||
|
@ -1,12 +1,14 @@
|
||||
#
|
||||
# buildroot/share/PlatformIO/scripts/robin.py
|
||||
# robin.py
|
||||
#
|
||||
import marlin
|
||||
|
||||
# Apply customizations for a MKS Robin
|
||||
def prepare(address, ldname, fwname):
|
||||
def encrypt(source, target, env):
|
||||
marlin.encrypt_mks(source, target, env, fwname)
|
||||
marlin.relocate_firmware(address)
|
||||
marlin.custom_ld_script(ldname)
|
||||
marlin.add_post_action(encrypt);
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import marlin
|
||||
def encrypt(source, target, env):
|
||||
marlin.encrypt_mks(source, target, env, fwname)
|
||||
marlin.relocate_firmware(address)
|
||||
marlin.custom_ld_script(ldname)
|
||||
marlin.add_post_action(encrypt);
|
||||
|
@ -1,52 +1,54 @@
|
||||
#
|
||||
# simulator.py
|
||||
# PlatformIO pre: script for simulator builds
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
# Get the environment thus far for the build
|
||||
Import("env")
|
||||
|
||||
# Get the environment thus far for the build
|
||||
Import("env")
|
||||
#print(env.Dump())
|
||||
|
||||
#print(env.Dump())
|
||||
#
|
||||
# Give the binary a distinctive name
|
||||
#
|
||||
|
||||
#
|
||||
# Give the binary a distinctive name
|
||||
#
|
||||
env['PROGNAME'] = "MarlinSimulator"
|
||||
|
||||
env['PROGNAME'] = "MarlinSimulator"
|
||||
#
|
||||
# If Xcode is installed add the path to its Frameworks folder,
|
||||
# or if Mesa is installed try to use its GL/gl.h.
|
||||
#
|
||||
|
||||
#
|
||||
# If Xcode is installed add the path to its Frameworks folder,
|
||||
# or if Mesa is installed try to use its GL/gl.h.
|
||||
#
|
||||
import sys
|
||||
if sys.platform == 'darwin':
|
||||
|
||||
import sys
|
||||
if sys.platform == 'darwin':
|
||||
#
|
||||
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
||||
#
|
||||
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
||||
|
||||
#
|
||||
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
||||
#
|
||||
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
||||
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
||||
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
||||
mesa_path = "/opt/local/include/GL/gl.h"
|
||||
|
||||
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
||||
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
||||
mesa_path = "/opt/local/include/GL/gl.h"
|
||||
import os.path
|
||||
|
||||
import os.path
|
||||
if os.path.exists(xcode_path):
|
||||
|
||||
if os.path.exists(xcode_path):
|
||||
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
||||
print("Using OpenGL framework headers from Xcode.app")
|
||||
|
||||
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
||||
print("Using OpenGL framework headers from Xcode.app")
|
||||
elif os.path.exists(mesa_path):
|
||||
|
||||
elif os.path.exists(mesa_path):
|
||||
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
||||
print("Using OpenGL header from", mesa_path)
|
||||
|
||||
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
||||
print("Using OpenGL header from", mesa_path)
|
||||
else:
|
||||
|
||||
else:
|
||||
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
||||
|
||||
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
||||
# Break out of the PIO build immediately
|
||||
sys.exit(1)
|
||||
|
||||
# Break out of the PIO build immediately
|
||||
sys.exit(1)
|
||||
|
||||
env.AddCustomTarget("upload", "$BUILD_DIR/${PROGNAME}", "$BUILD_DIR/${PROGNAME}")
|
||||
env.AddCustomTarget("upload", "$BUILD_DIR/${PROGNAME}", "$BUILD_DIR/${PROGNAME}")
|
||||
|
@ -1,59 +1,61 @@
|
||||
#
|
||||
# stm32_serialbuffer.py
|
||||
#
|
||||
Import("env")
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env")
|
||||
|
||||
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
||||
# configure buffer sizes for receiving \ transmitting serial data.
|
||||
# Stm32duino uses another set of defines for the same purpose, so this
|
||||
# script gets the values from the configuration and uses them to define
|
||||
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
||||
# flags so they are available for use by the platform.
|
||||
#
|
||||
# The script will set the value as the default one (64 bytes)
|
||||
# or the user-configured one, whichever is higher.
|
||||
#
|
||||
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
||||
# The highest value is taken (128/64).
|
||||
#
|
||||
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
||||
# defined, the first of these values will be used as the minimum.
|
||||
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
# Get a build flag's value or None
|
||||
def getBuildFlagValue(name):
|
||||
for flag in build_flags:
|
||||
if isinstance(flag, list) and flag[0] == name:
|
||||
return flag[1]
|
||||
|
||||
# Get a build flag's value or None
|
||||
def getBuildFlagValue(name):
|
||||
for flag in build_flags:
|
||||
if isinstance(flag, list) and flag[0] == name:
|
||||
return flag[1]
|
||||
return None
|
||||
|
||||
return None
|
||||
# Get an overriding buffer size for RX or TX from the build flags
|
||||
def getInternalSize(side):
|
||||
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
||||
|
||||
# Get an overriding buffer size for RX or TX from the build flags
|
||||
def getInternalSize(side):
|
||||
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
||||
# Get the largest defined buffer size for RX or TX
|
||||
def getBufferSize(side, default):
|
||||
# Get a build flag value or fall back to the given default
|
||||
internal = int(getInternalSize(side) or default)
|
||||
flag = side + "_BUFFER_SIZE"
|
||||
# Return the largest value
|
||||
return max(int(mf[flag]), internal) if flag in mf else internal
|
||||
|
||||
# Get the largest defined buffer size for RX or TX
|
||||
def getBufferSize(side, default):
|
||||
# Get a build flag value or fall back to the given default
|
||||
internal = int(getInternalSize(side) or default)
|
||||
flag = side + "_BUFFER_SIZE"
|
||||
# Return the largest value
|
||||
return max(int(mf[flag]), internal) if flag in mf else internal
|
||||
# Add a build flag if it's not already defined
|
||||
def tryAddFlag(name, value):
|
||||
if getBuildFlagValue(name) is None:
|
||||
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
||||
|
||||
# Add a build flag if it's not already defined
|
||||
def tryAddFlag(name, value):
|
||||
if getBuildFlagValue(name) is None:
|
||||
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
||||
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
||||
# configure buffer sizes for receiving \ transmitting serial data.
|
||||
# Stm32duino uses another set of defines for the same purpose, so this
|
||||
# script gets the values from the configuration and uses them to define
|
||||
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
||||
# flags so they are available for use by the platform.
|
||||
#
|
||||
# The script will set the value as the default one (64 bytes)
|
||||
# or the user-configured one, whichever is higher.
|
||||
#
|
||||
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
||||
# The highest value is taken (128/64).
|
||||
#
|
||||
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
||||
# defined, the first of these values will be used as the minimum.
|
||||
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
|
||||
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
||||
rxBuf = getBufferSize("RX", 128)
|
||||
txBuf = getBufferSize("TX", 64)
|
||||
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
||||
rxBuf = getBufferSize("RX", 128)
|
||||
txBuf = getBufferSize("TX", 64)
|
||||
|
||||
# Provide serial buffer sizes to the stm32duino platform
|
||||
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
||||
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
||||
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
||||
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
||||
# Provide serial buffer sizes to the stm32duino platform
|
||||
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
||||
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
||||
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
||||
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
||||
|
@ -326,7 +326,6 @@ platform = ${common_stm32f1.platform}
|
||||
extends = common_stm32f1
|
||||
board = marlin_CHITU_F103
|
||||
extra_scripts = ${common_stm32f1.extra_scripts}
|
||||
pre:buildroot/share/PlatformIO/scripts/common-dependencies.py
|
||||
pre:buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py
|
||||
buildroot/share/PlatformIO/scripts/chitu_crypt.py
|
||||
build_flags = ${common_stm32f1.build_flags}
|
||||
|
Loading…
Reference in New Issue
Block a user