initial commit

This commit is contained in:
2026-02-12 00:45:31 -08:00
commit 5f168f370b
3024 changed files with 804889 additions and 0 deletions

View File

@@ -0,0 +1,114 @@
# pyright: reportUnknownMemberType=false, reportMissingParameterType=false
"""Test for checking namespace includes in C++ headers."""
#!/usr/bin/env python3
"""
Script to check for includes after namespace declarations in C++ files.
This is used in CI/CD to prevent bad code patterns.
"""
import os
import re
import sys
from pathlib import Path
from typing import Any, Dict, List
def find_includes_after_namespace(file_path: Path) -> List[int]:
"""
Check if a C++ file has #include directives after namespace declarations.
Args:
file_path (Path): Path to the C++ file to check
Returns:
List[int]: List of line numbers where includes appear after namespaces
"""
try:
with open(file_path, "r", encoding="utf-8") as f:
content = f.readlines()
violations: List[int] = []
namespace_started = False
# Basic patterns
namespace_pattern = re.compile(r"^\s*namespace\s+\w+\s*\{")
include_pattern = re.compile(r"^\s*#\s*include")
for i, line in enumerate(content, 1):
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith("//") or line.startswith("/*"):
continue
# Check for namespace declaration
if namespace_pattern.match(line):
namespace_started = True
# Check for #include after namespace started
if namespace_started and include_pattern.match(line):
violations.append(i)
return violations
except (UnicodeDecodeError, IOError):
# Skip files that can't be read
return []
def scan_cpp_files(directory: str = ".") -> Dict[str, Any]:
"""
Scan all C++ files in a directory for includes after namespace declarations.
Args:
directory (str): Directory to scan for C++ files
Returns:
Dict[str, Any]: Dictionary mapping file paths to lists of violation line numbers
"""
cpp_extensions = [
".cpp",
".cc",
".cxx",
".c++",
".hpp",
".h",
".hh",
".hxx",
".h++",
]
violations: Dict[str, Any] = {}
for root, dirs, files in os.walk(directory):
for file in files:
file_path = os.path.join(root, file)
# Check if it's a C++ file
if any(file.endswith(ext) for ext in cpp_extensions):
line_numbers: List[int] = find_includes_after_namespace(Path(file_path))
if line_numbers:
violations[file_path] = line_numbers
return violations
def main() -> None:
violations: Dict[str, Any] = scan_cpp_files()
if violations:
print("Found #include directives after namespace declarations:")
for file_path, line_numbers in violations.items():
print(f"\n{file_path}:")
for line_num in line_numbers:
print(f" Line {line_num}")
failing: List[str] = list(violations.keys())
print(f"\nFailing files: {failing}")
sys.exit(1)
else:
print("No violations found.")
sys.exit(0)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
# pyright: reportUnknownMemberType=false, reportMissingParameterType=false
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from typing import List
from ci.util.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
PLATFORMS_DIR = os.path.join(SRC_ROOT, "platforms")
NUM_WORKERS = 1 if os.environ.get("NO_PARALLEL") else (os.cpu_count() or 1) * 4
class NoUsingNamespaceFlInHeaderTester(unittest.TestCase):
def check_file(self, file_path: str) -> List[str]:
if "FastLED.h" in file_path:
return []
failings: List[str] = []
with open(file_path, "r", encoding="utf-8") as f:
for line_number, line in enumerate(f, 1):
if line.startswith("//"):
continue
if "using namespace fl;" in line:
failings.append(f"{file_path}:{line_number}: {line.strip()}")
return failings
def test_no_using_namespace(self) -> None:
"""Searches through the program files to check for banned headers, excluding src/platforms."""
files_to_check: List[str] = []
for root, _, files in os.walk(SRC_ROOT):
for file in files:
if file.endswith(
(".h", ".hpp")
): # Add or remove file extensions as needed
file_path = os.path.join(root, file)
files_to_check.append(file_path)
all_failings: List[str] = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
if all_failings:
msg = (
f'Found {len(all_failings)} header file(s) "using namespace fl": \n'
+ "\n".join(all_failings)
)
for failing in all_failings:
print(failing)
self.fail(msg)
else:
print("No using namespace fl; found in headers.")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,74 @@
import subprocess
import unittest
import warnings
from pathlib import Path
from ci.util.bin_2_elf import bin_to_elf
from ci.util.elf import dump_symbol_sizes
from ci.util.paths import PROJECT_ROOT
from ci.util.tools import Tools, load_tools
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
OUTPUT = HERE / "output"
BUILD_INFO_PATH = PROJECT_ROOT / ".build" / "examples" / "uno" / "build_info.json"
BUILD_INFO_PATH2 = (
PROJECT_ROOT / ".build" / "fled" / "examples" / "uno" / "build_info.json"
)
DISABLED = True
class TestBinToElf(unittest.TestCase):
@classmethod
def setUpClass(cls):
if DISABLED:
return
uno_build = PROJECT_ROOT / ".build" / "uno"
print(f"Checking for Uno build in: {uno_build}")
if not uno_build.exists():
print("Uno build not found. Running compilation...")
try:
subprocess.run(
"uv run python -m ci.ci-compile uno --examples Blink",
shell=True,
check=True,
)
print("Compilation completed successfully.")
except subprocess.CalledProcessError as e:
print(f"Error during compilation: {e}")
raise
@unittest.skip("Skip bin to elf conversion test")
def test_bin_to_elf_conversion(self) -> None:
if DISABLED:
return
tools: Tools
try:
tools = load_tools(BUILD_INFO_PATH)
except FileNotFoundError as e:
warnings.warn(f"Error while loading tools: {e}")
tools = load_tools(BUILD_INFO_PATH2)
bin_file = UNO / "firmware.hex"
map_file = UNO / "firmware.map"
output_elf = OUTPUT / "output.elf"
try:
bin_to_elf(
bin_file,
map_file,
tools.as_path,
tools.ld_path,
tools.objcopy_path,
output_elf,
)
stdout = dump_symbol_sizes(tools.nm_path, tools.cpp_filt_path, output_elf)
print(stdout)
except Exception as e:
warnings.warn(f"Error while converting binary to ELF: {e}")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,53 @@
import unittest
from ci.boards import Board
class TestBoardToPlatformioIni(unittest.TestCase):
"""Tests for Board.to_platformio_ini()."""
def _ini_to_set(self, ini: str) -> set[str]:
"""Return a set with each non-empty, stripped line of the ini snippet."""
return {line.strip() for line in ini.splitlines() if line.strip()}
def test_basic_fields(self):
board = Board(board_name="uno", platform="atmelavr", framework="arduino")
ini = board.to_platformio_ini()
lines = self._ini_to_set(ini)
expected = {
"[env:uno]",
"board = uno",
"platform = atmelavr",
"framework = arduino",
}
self.assertTrue(expected.issubset(lines))
# Should not reference internal attributes
self.assertNotIn("platform_needs_install", ini)
def test_real_board_name(self):
board = Board(
board_name="esp32c3",
real_board_name="esp32-c3-devkitm-1",
platform="espressif32",
)
ini = board.to_platformio_ini()
lines = self._ini_to_set(ini)
self.assertIn("[env:esp32c3]", lines)
self.assertIn("board = esp32-c3-devkitm-1", lines)
def test_flags_and_unflags(self):
board = Board(
board_name="custom",
defines=["FASTLED_TEST=1"],
build_flags=["-O2"],
)
ini = board.to_platformio_ini()
lines = self._ini_to_set(ini)
# The build_flags are in multi-line format - check that both flags are present as separate lines
self.assertIn("build_flags =", lines)
self.assertIn("-DFASTLED_TEST=1", lines)
self.assertIn("-O2", lines)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,357 @@
#!/usr/bin/env python3
"""
Unit tests for BuildFlags TOML parsing and serialization functionality.
Tests the BuildFlags and BuildTools classes for parsing TOML build configuration
files and serializing back to TOML format.
"""
import tempfile
import unittest
from pathlib import Path
from ci.compiler.clang_compiler import ArchiveOptions, BuildFlags, BuildTools
class TestBuildFlagsToml(unittest.TestCase):
"""Test BuildFlags TOML parsing and serialization"""
def setUp(self) -> None:
"""Set up test fixtures"""
self.temp_dir = Path(tempfile.mkdtemp())
def tearDown(self) -> None:
"""Clean up test fixtures"""
# Clean up any test files
for file in self.temp_dir.glob("*.toml"):
file.unlink()
self.temp_dir.rmdir()
def create_test_toml(self, content: str) -> Path:
"""Create a test TOML file with the given content"""
test_file = self.temp_dir / "test_build_flags.toml"
with open(test_file, "w", encoding="utf-8") as f:
f.write(content)
return test_file
def test_build_tools_defaults(self) -> None:
"""Test BuildTools default values"""
tools = BuildTools(
cpp_compiler=[],
archiver=[],
linker=[],
c_compiler=[],
objcopy=[],
nm=[],
strip=[],
ranlib=[],
)
# Test modern command-based fields (should be empty by default)
self.assertEqual(tools.cpp_compiler, [])
self.assertEqual(tools.archiver, [])
self.assertEqual(tools.linker, [])
# Test other important fields
self.assertEqual(tools.c_compiler, [])
self.assertEqual(tools.objcopy, [])
self.assertEqual(tools.nm, [])
self.assertEqual(tools.strip, [])
self.assertEqual(tools.ranlib, [])
def test_parse_minimal_toml(self) -> None:
"""Test parsing minimal TOML file with required tools section"""
toml_content = """
[all]
defines = ["-DTEST=1"]
compiler_flags = ["-Wall"]
include_flags = ["-I."]
[tools]
cpp_compiler = ["uv", "run", "python", "-m", "ziglang", "c++"]
linker = ["uv", "run", "python", "-m", "ziglang", "c++"]
c_compiler = ["clang"]
objcopy = ["uv", "run", "python", "-m", "ziglang", "objcopy"]
nm = ["uv", "run", "python", "-m", "ziglang", "nm"]
strip = ["uv", "run", "python", "-m", "ziglang", "strip"]
ranlib = ["uv", "run", "python", "-m", "ziglang", "ranlib"]
archiver = ["uv", "run", "python", "-m", "ziglang", "ar"]
[archive]
flags = "rcsD"
[linking.base]
flags = ["-pthread"]
[strict_mode]
flags = ["-Werror"]
"""
test_file = self.create_test_toml(toml_content)
flags = BuildFlags.parse(test_file, quick_build=False, strict_mode=False)
# Check basic flags
self.assertEqual(flags.defines, ["-DTEST=1"])
self.assertEqual(flags.compiler_flags, ["-Wall"])
self.assertEqual(flags.include_flags, ["-I."])
self.assertEqual(flags.link_flags, ["-pthread"])
self.assertEqual(flags.strict_mode_flags, ["-Werror"])
# Check tools (from [tools] section) - modern command-based approach
self.assertEqual(flags.tools.c_compiler, ["clang"])
self.assertEqual(
flags.tools.linker, ["uv", "run", "python", "-m", "ziglang", "c++"]
)
self.assertEqual(
flags.tools.cpp_compiler,
["uv", "run", "python", "-m", "ziglang", "c++"],
)
self.assertEqual(
flags.tools.objcopy, ["uv", "run", "python", "-m", "ziglang", "objcopy"]
)
self.assertEqual(flags.tools.nm, ["uv", "run", "python", "-m", "ziglang", "nm"])
self.assertEqual(
flags.tools.strip, ["uv", "run", "python", "-m", "ziglang", "strip"]
)
self.assertEqual(
flags.tools.ranlib, ["uv", "run", "python", "-m", "ziglang", "ranlib"]
)
self.assertEqual(
flags.tools.archiver, ["uv", "run", "python", "-m", "ziglang", "ar"]
)
def test_parse_toml_with_tools(self) -> None:
"""Test parsing TOML file with [tools] section"""
toml_content = """
[all]
defines = ["-DTEST=1"]
compiler_flags = ["-Wall"]
include_flags = ["-I."]
[tools]
cpp_compiler = ["g++"]
archiver = ["gcc-ar"]
linker = ["ld.gold"]
c_compiler = ["gcc"]
objcopy = ["arm-objcopy"]
nm = ["arm-nm"]
strip = ["arm-strip"]
ranlib = ["arm-ranlib"]
[archive]
flags = "rcsD"
[linking.base]
flags = ["-pthread"]
"""
test_file = self.create_test_toml(toml_content)
flags = BuildFlags.parse(test_file, quick_build=False, strict_mode=False)
# Check that tools were parsed correctly - modern command-based approach
self.assertEqual(flags.tools.cpp_compiler, ["g++"])
self.assertEqual(flags.tools.archiver, ["gcc-ar"])
self.assertEqual(flags.tools.linker, ["ld.gold"])
self.assertEqual(flags.tools.c_compiler, ["gcc"])
self.assertEqual(flags.tools.objcopy, ["arm-objcopy"])
self.assertEqual(flags.tools.nm, ["arm-nm"])
self.assertEqual(flags.tools.strip, ["arm-strip"])
self.assertEqual(flags.tools.ranlib, ["arm-ranlib"])
def test_parse_partial_tools_section(self) -> None:
"""Test parsing TOML with all required [tools] section fields"""
toml_content = """
[all]
defines = ["-DTEST=1"]
[tools]
cpp_compiler = ["custom-clang++"]
archiver = ["custom-ar"]
linker = ["custom-linker"]
c_compiler = ["clang"]
objcopy = ["custom-objcopy"]
nm = ["custom-nm"]
strip = ["custom-strip"]
ranlib = ["custom-ranlib"]
# All tools must be provided - no defaults allowed
[archive]
flags = "rcsD"
"""
test_file = self.create_test_toml(toml_content)
flags = BuildFlags.parse(test_file, quick_build=False, strict_mode=False)
# Check all tools are set as specified - strict validation
self.assertEqual(flags.tools.cpp_compiler, ["custom-clang++"])
self.assertEqual(flags.tools.archiver, ["custom-ar"])
self.assertEqual(flags.tools.linker, ["custom-linker"])
self.assertEqual(flags.tools.c_compiler, ["clang"])
self.assertEqual(flags.tools.objcopy, ["custom-objcopy"])
self.assertEqual(flags.tools.nm, ["custom-nm"])
self.assertEqual(flags.tools.strip, ["custom-strip"])
self.assertEqual(flags.tools.ranlib, ["custom-ranlib"])
def test_serialize_build_flags_with_tools(self) -> None:
"""Test serializing BuildFlags with tools to TOML"""
# Create BuildFlags with custom tools
custom_tools = BuildTools(
linker=["arm-none-eabi-ld"],
c_compiler=["arm-none-eabi-gcc"],
objcopy=["arm-none-eabi-objcopy"],
nm=["arm-none-eabi-nm"],
strip=["arm-none-eabi-strip"],
ranlib=["arm-none-eabi-ranlib"],
cpp_compiler=["arm-none-eabi-g++"],
archiver=["arm-none-eabi-ar"],
)
flags = BuildFlags(
defines=["-DARM_BUILD=1"],
compiler_flags=["-mcpu=cortex-m4", "-mthumb"],
include_flags=["-I.", "-Iarm"],
link_flags=["-nostdlib"],
strict_mode_flags=["-Werror"],
tools=custom_tools,
archive=ArchiveOptions(flags="rcsD"),
)
# Serialize to TOML
toml_output = flags.serialize()
# Check that tools section is present with new field names
self.assertIn("[tools]", toml_output)
self.assertIn("cpp_compiler = ['arm-none-eabi-g++']", toml_output)
self.assertIn("archiver = ['arm-none-eabi-ar']", toml_output)
self.assertIn("linker = ['arm-none-eabi-ld']", toml_output)
self.assertIn("c_compiler = ['arm-none-eabi-gcc']", toml_output)
self.assertIn("objcopy = ['arm-none-eabi-objcopy']", toml_output)
self.assertIn("nm = ['arm-none-eabi-nm']", toml_output)
self.assertIn("strip = ['arm-none-eabi-strip']", toml_output)
self.assertIn("ranlib = ['arm-none-eabi-ranlib']", toml_output)
def test_serialize_with_none_linker(self) -> None:
"""Test serializing BuildFlags when linker is None"""
flags = BuildFlags(
defines=["-DTEST=1"],
compiler_flags=[],
include_flags=[],
link_flags=[],
strict_mode_flags=[],
tools=BuildTools(
linker=[], # Empty list instead of None
cpp_compiler=["clang++"],
c_compiler=["clang"],
archiver=[],
objcopy=[],
nm=[],
strip=[],
ranlib=[],
),
archive=ArchiveOptions(flags="rcsD"),
)
toml_output = flags.serialize()
# Check that tools section is present but linker is omitted
self.assertIn("[tools]", toml_output)
self.assertIn("cpp_compiler = ['clang++']", toml_output)
self.assertNotIn("linker =", toml_output) # Should be omitted when empty
def test_round_trip_toml_parsing(self) -> None:
"""Test that parse -> serialize -> parse maintains data integrity"""
# Create original flags
original_tools = BuildTools(
linker=["test-ld"],
c_compiler=["test-gcc"],
cpp_compiler=["test-compiler"],
archiver=["test-ar"],
objcopy=["test-objcopy"],
nm=["test-nm"],
strip=["test-strip"],
ranlib=["test-ranlib"],
)
original_flags = BuildFlags(
defines=["-DROUND_TRIP=1"],
compiler_flags=["-Wall", "-O2"],
include_flags=["-I.", "-Itest"],
link_flags=["-pthread"],
strict_mode_flags=["-Werror"],
tools=original_tools,
archive=ArchiveOptions(flags="rcsD"),
)
# Serialize to TOML
toml_content = original_flags.serialize()
# Write to temporary file
temp_file = self.temp_dir / "roundtrip.toml"
original_flags.to_toml_file(temp_file)
# Parse back from file
parsed_flags = BuildFlags.parse(temp_file, quick_build=False, strict_mode=False)
# Check that all data is preserved
self.assertEqual(parsed_flags.defines, original_flags.defines)
self.assertEqual(parsed_flags.compiler_flags, original_flags.compiler_flags)
self.assertEqual(parsed_flags.include_flags, original_flags.include_flags)
self.assertEqual(parsed_flags.link_flags, original_flags.link_flags)
self.assertEqual(
parsed_flags.strict_mode_flags, original_flags.strict_mode_flags
)
# Check tools - modern command-based approach
self.assertEqual(
parsed_flags.tools.cpp_compiler, original_flags.tools.cpp_compiler
)
self.assertEqual(parsed_flags.tools.archiver, original_flags.tools.archiver)
self.assertEqual(parsed_flags.tools.linker, original_flags.tools.linker)
self.assertEqual(parsed_flags.tools.c_compiler, original_flags.tools.c_compiler)
def test_parse_missing_file(self) -> None:
"""Test parsing non-existent TOML file raises FileNotFoundError"""
nonexistent_file = self.temp_dir / "does_not_exist.toml"
# Should raise FileNotFoundError when file is missing
with self.assertRaises(FileNotFoundError) as context:
BuildFlags.parse(nonexistent_file, quick_build=False, strict_mode=False)
self.assertIn("Required build_flags.toml not found", str(context.exception))
def test_from_toml_file_alias(self) -> None:
"""Test that from_toml_file() is an alias for parse()"""
toml_content = """
[all]
defines = ["-DALIAS_TEST=1"]
[tools]
cpp_compiler = ["alias-compiler"]
linker = ["alias-linker"]
c_compiler = ["clang"]
objcopy = ["alias-objcopy"]
nm = ["alias-nm"]
strip = ["alias-strip"]
ranlib = ["alias-ranlib"]
archiver = ["alias-archiver"]
[archive]
flags = "rcsD"
"""
test_file = self.create_test_toml(toml_content)
# Parse using both methods
flags_parse = BuildFlags.parse(test_file, quick_build=True, strict_mode=False)
flags_alias = BuildFlags.from_toml_file(
test_file, quick_build=True, strict_mode=False
)
# Should be identical
self.assertEqual(flags_parse.defines, flags_alias.defines)
self.assertEqual(flags_parse.tools.cpp_compiler, flags_alias.tools.cpp_compiler)
self.assertEqual(flags_parse.tools.cpp_compiler, ["alias-compiler"])
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python3
"""
Tests for BuildFlags TOML parsing in ci/ directory.
This test discovers all top-level *.toml files under the project ci/ folder
and verifies they can be parsed by BuildFlags.parse without throwing.
"""
import unittest
from pathlib import Path
from typing import List
from ci.compiler.clang_compiler import BuildFlags
def find_ci_toml_files(ci_dir: Path) -> List[Path]:
"""Return a list of top-level *.toml files in ci/.
Args:
ci_dir: Absolute path to the ci/ directory
Returns:
List of absolute paths to .toml files directly under ci/.
"""
toml_files: List[Path] = []
for entry in ci_dir.iterdir():
if entry.is_file() and entry.suffix == ".toml":
toml_files.append(entry.resolve())
return toml_files
class TestBuildFlagsParsing(unittest.TestCase):
def test_parse_all_ci_tomls(self) -> None:
project_root: Path = Path(__file__).resolve().parent.parent.parent
ci_dir: Path = project_root / "ci"
self.assertTrue(ci_dir.exists(), f"ci directory not found at {ci_dir}")
toml_files: List[Path] = find_ci_toml_files(ci_dir)
self.assertTrue(len(toml_files) > 0, "No .toml files found in ci/")
for toml_path in toml_files:
with self.subTest(toml=str(toml_path)):
# Ensure each ci/*.toml parses without throwing
_ = BuildFlags.parse(toml_path, quick_build=True, strict_mode=False)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,65 @@
import subprocess
import unittest
import warnings
from pathlib import Path
from ci.util.elf import dump_symbol_sizes
from ci.util.paths import PROJECT_ROOT
from ci.util.tools import Tools, load_tools
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
OUTPUT = HERE / "output"
ELF_FILE = UNO / "firmware.elf"
BUILD_INFO_PATH = PROJECT_ROOT / ".build" / "uno" / "build_info.json"
BUILD_INFO_PATH2 = (
PROJECT_ROOT / ".build" / "fled" / "examples" / "uno" / "build_info.json"
)
PLATFORMIO_PATH = Path.home() / ".platformio"
PLATFORMIO_PACKAGES_PATH = PLATFORMIO_PATH / "packages"
TOOLCHAIN_AVR = PLATFORMIO_PACKAGES_PATH / "toolchain-atmelavr"
def init() -> None:
uno_build = PROJECT_ROOT / ".build" / "uno"
print(f"Checking for Uno build in: {uno_build}")
if not BUILD_INFO_PATH.exists() or not TOOLCHAIN_AVR.exists():
print("Uno build not found. Running compilation...")
try:
subprocess.run(
"uv run python -m ci.ci-compile uno --examples Blink",
shell=True,
check=True,
cwd=str(PROJECT_ROOT),
)
print("Compilation completed successfully.")
except subprocess.CalledProcessError as e:
print(f"Error during compilation: {e}")
raise
class TestBinToElf(unittest.TestCase):
def test_bin_to_elf_conversion(self) -> None:
# Skip test if required UNO build directory is missing
uno_build_dir = PROJECT_ROOT / ".build" / "uno"
if not uno_build_dir.exists():
warnings.warn(
"Skipping TestBinToElf::test_bin_to_elf_conversion because .build/uno does not exist. "
"Run 'uv run ci/ci-compile.py uno --examples Blink' to generate it."
)
self.skipTest(".build/uno missing; skipping ELF conversion test")
tools: Tools
try:
tools = load_tools(BUILD_INFO_PATH)
except Exception as e:
warnings.warn(f"Error while loading tools: {e}")
tools = load_tools(BUILD_INFO_PATH2)
msg = dump_symbol_sizes(tools.nm_path, tools.cpp_filt_path, ELF_FILE)
print(msg)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,450 @@
"""
Comprehensive test suite for fingerprint cache functionality.
Tests cover:
- Core functionality (cache hits, misses, content changes)
- Edge cases (touched files, copied files, corruption)
- Performance requirements
- Integration scenarios
"""
import json
import os
import shutil
import tempfile
import time
from pathlib import Path
from typing import List, Optional
from unittest import TestCase
# Import the fingerprint cache module
from ci.ci.fingerprint_cache import CacheEntry, FingerprintCache, has_changed
class TestFingerprintCache(TestCase):
"""Test suite for fingerprint cache functionality."""
def setUp(self) -> None:
"""Set up test environment with temporary directory."""
self.test_dir = Path(tempfile.mkdtemp())
self.cache_dir = self.test_dir / "cache"
self.cache_dir.mkdir(exist_ok=True)
self.temp_dir = self.test_dir / "temp"
self.temp_dir.mkdir(exist_ok=True)
def tearDown(self) -> None:
"""Clean up test environment."""
shutil.rmtree(self.test_dir, ignore_errors=True)
def create_test_file(
self, path: Path, content: str, modtime: Optional[float] = None
) -> Path:
"""Create a test file with specific content and optional modification time."""
with open(path, "w") as f:
f.write(content)
if modtime:
# Set specific modification time
os.utime(path, (modtime, modtime))
return path
def touch_file(self, path: Path) -> float:
"""Touch a file to update its modification time without changing content."""
# Wait a small amount to ensure modtime changes
time.sleep(0.01)
path.touch()
return os.path.getmtime(path)
def modify_file_content(self, path: Path, new_content: str) -> float:
"""Modify file content and return new modification time."""
time.sleep(0.01) # Ensure modtime changes
with open(path, "w") as f:
f.write(new_content)
return os.path.getmtime(path)
def test_cache_hit_modtime_unchanged(self) -> None:
"""Test that identical modtime returns False immediately."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
# Create test file
test_file = self.temp_dir / "test.txt"
self.create_test_file(test_file, "original content")
original_modtime = os.path.getmtime(test_file)
# Same modtime should return False (no change detected)
result1 = cache.has_changed(test_file, original_modtime)
self.assertFalse(result1, "Same modtime should return False")
# Verify fast performance (cache hit)
start_time = time.time()
result2 = cache.has_changed(test_file, original_modtime)
elapsed = time.time() - start_time
self.assertFalse(result2, "Same modtime should return False")
self.assertLess(
elapsed, 0.002, f"Cache hit should be <2ms, took {elapsed * 1000:.2f}ms"
)
def test_cache_hit_with_existing_cache(self) -> None:
"""Test cache hit when file exists in cache with current modtime."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
# Create and cache a file
test_file = self.temp_dir / "cached.txt"
self.create_test_file(test_file, "cached content")
current_modtime = os.path.getmtime(test_file)
# Prime the cache
cache.has_changed(
test_file, current_modtime - 1
) # Different modtime to force caching
# Test cache hit
result = cache.has_changed(test_file, current_modtime)
self.assertFalse(
result, "File with current modtime in cache should return False"
)
def test_modtime_changed_content_same(self) -> None:
"""Critical test: modtime changes but content identical should return False."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
# Create test file
test_file = self.temp_dir / "touched.txt"
original_content = "unchanged content"
self.create_test_file(test_file, original_content)
original_modtime = os.path.getmtime(test_file)
# Prime cache with original state
cache.has_changed(test_file, original_modtime - 1) # Force caching
# Touch file (change modtime but not content)
new_modtime = self.touch_file(test_file)
self.assertNotEqual(
new_modtime, original_modtime, "Touch should change modtime"
)
# Verify content is still the same
with open(test_file, "r") as f:
current_content = f.read()
self.assertEqual(
current_content, original_content, "Content should be unchanged"
)
# Test the critical behavior: should return False despite modtime change
result = cache.has_changed(test_file, original_modtime)
self.assertFalse(
result, "File with changed modtime but same content should return False"
)
def test_file_copy_different_timestamp(self) -> None:
"""Test same file with different timestamps but identical content."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
# Create original file
test_file = self.temp_dir / "timestamp_test.txt"
content = "file content for timestamp testing"
self.create_test_file(test_file, content)
original_modtime = os.path.getmtime(test_file)
# Prime cache with original state
cache.has_changed(test_file, original_modtime - 1)
# Touch file to change timestamp but keep content same
time.sleep(0.01)
new_modtime = time.time()
os.utime(test_file, (new_modtime, new_modtime))
# Verify content is still the same
with open(test_file, "r") as f:
current_content = f.read()
self.assertEqual(current_content, content, "Content should be unchanged")
# Test: different modtime, same content should return False
result = cache.has_changed(test_file, original_modtime)
self.assertFalse(
result, "File with different timestamp but same content should return False"
)
def test_content_actually_changed(self) -> None:
"""Test that actual content changes are detected."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
# Create test file
test_file = self.temp_dir / "modified.txt"
original_content = "original content"
self.create_test_file(test_file, original_content)
original_modtime = os.path.getmtime(test_file)
# Prime cache
cache.has_changed(test_file, original_modtime - 1)
# Actually modify content
new_content = "modified content"
new_modtime = self.modify_file_content(test_file, new_content)
# Should detect change
result = cache.has_changed(test_file, original_modtime)
self.assertTrue(result, "File with changed content should return True")
def test_incremental_changes(self) -> None:
"""Test detecting changes at different points in time."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
test_file = self.temp_dir / "incremental.txt"
# Version 1 - baseline
content_v1 = "version 1"
self.create_test_file(test_file, content_v1)
modtime_v1 = os.path.getmtime(test_file)
# Test v1 against earlier time (should be changed)
result = cache.has_changed(test_file, modtime_v1 - 1)
self.assertTrue(result, "File should be detected as changed from earlier time")
# Test v1 against same time (should be unchanged)
result = cache.has_changed(test_file, modtime_v1)
self.assertFalse(result, "File should be unchanged from same time")
# Version 2 - modify content
content_v2 = "version 2"
modtime_v2 = self.modify_file_content(test_file, content_v2)
# Test v2 against v1 time (should be changed)
result = cache.has_changed(test_file, modtime_v1)
self.assertTrue(result, "File should be detected as changed from v1 time")
# Test v2 against v2 time (should be unchanged)
result = cache.has_changed(test_file, modtime_v2)
self.assertFalse(result, "File should be unchanged from v2 time")
def test_cache_persistence(self) -> None:
"""Test that cache persists across FingerprintCache instances."""
cache_file = self.cache_dir / "persistent.json"
# First cache instance
cache1 = FingerprintCache(cache_file)
test_file = self.temp_dir / "persistent.txt"
self.create_test_file(test_file, "persistent content")
modtime = os.path.getmtime(test_file)
# Prime cache
cache1.has_changed(test_file, modtime - 1)
# Second cache instance (reload from disk)
cache2 = FingerprintCache(cache_file)
# Should use cached data
result = cache2.has_changed(test_file, modtime)
self.assertFalse(result, "New cache instance should load existing cache")
def test_cache_corruption_recovery(self) -> None:
"""Test graceful handling of corrupted cache files."""
cache_file = self.cache_dir / "corrupted.json"
# Create corrupted cache file
with open(cache_file, "w") as f:
f.write("{ invalid json content")
# Should handle corruption gracefully
cache = FingerprintCache(cache_file)
test_file = self.temp_dir / "recovery.txt"
self.create_test_file(test_file, "recovery content")
modtime = os.path.getmtime(test_file)
# Should work despite corrupted cache
result = cache.has_changed(test_file, modtime - 1)
self.assertTrue(result, "Should work with corrupted cache")
def test_file_not_found_error(self) -> None:
"""Test FileNotFoundError for non-existent files."""
cache_file = self.cache_dir / "test_cache.json"
cache = FingerprintCache(cache_file)
non_existent_file = self.temp_dir / "does_not_exist.txt"
with self.assertRaises(FileNotFoundError):
cache.has_changed(non_existent_file, time.time())
def test_performance_cache_hit(self) -> None:
"""Benchmark cache hit performance."""
cache_file = self.cache_dir / "perf.json"
cache = FingerprintCache(cache_file)
# Create test files
test_files: List[Path] = []
for i in range(10): # Reduced for faster test execution
test_file = self.temp_dir / f"perf_{i}.txt"
self.create_test_file(test_file, f"content {i}")
test_files.append(test_file)
# Measure cache hit performance
start_time = time.time()
for test_file in test_files:
modtime = os.path.getmtime(test_file)
cache.has_changed(test_file, modtime) # Cache hit
elapsed = time.time() - start_time
avg_time = elapsed / len(test_files) * 1000 # ms per file
self.assertLess(
avg_time, 1.0, f"Cache hit average {avg_time:.2f}ms should be <1.0ms"
)
def test_performance_large_files(self) -> None:
"""Test performance with moderately large files."""
cache_file = self.cache_dir / "large.json"
cache = FingerprintCache(cache_file)
# Create moderately large test file (100KB)
large_file = self.temp_dir / "large.txt"
large_content = "x" * (100 * 1024)
self.create_test_file(large_file, large_content)
modtime = os.path.getmtime(large_file)
# Measure MD5 computation time
start_time = time.time()
result = cache.has_changed(large_file, modtime - 1) # Force MD5 computation
elapsed = time.time() - start_time
self.assertTrue(result, "Large file should be detected as changed")
self.assertLess(
elapsed,
0.5,
f"Large file processing {elapsed * 1000:.2f}ms should be <500ms",
)
def test_convenience_function(self) -> None:
"""Test the convenience has_changed function."""
cache_file = self.cache_dir / "convenience.json"
test_file = self.temp_dir / "convenience.txt"
self.create_test_file(test_file, "convenience test")
modtime = os.path.getmtime(test_file)
# Test convenience function
result = has_changed(test_file, modtime, cache_file)
self.assertFalse(result, "Convenience function should work correctly")
def test_cache_stats(self) -> None:
"""Test cache statistics functionality."""
cache_file = self.cache_dir / "stats.json"
cache = FingerprintCache(cache_file)
# Initially empty cache
stats = cache.get_cache_stats()
self.assertEqual(stats["total_entries"], 0)
self.assertFalse(stats["cache_file_exists"])
# Add some entries
test_file = self.temp_dir / "stats.txt"
self.create_test_file(test_file, "stats content")
cache.has_changed(test_file, time.time() - 1)
# Check stats after entries
stats = cache.get_cache_stats()
self.assertEqual(stats["total_entries"], 1)
self.assertTrue(stats["cache_file_exists"])
self.assertGreater(stats["cache_file_size_bytes"], 0)
def test_clear_cache(self) -> None:
"""Test cache clearing functionality."""
cache_file = self.cache_dir / "clear.json"
cache = FingerprintCache(cache_file)
# Add an entry
test_file = self.temp_dir / "clear.txt"
self.create_test_file(test_file, "clear content")
cache.has_changed(test_file, time.time() - 1)
# Verify cache has entry
self.assertEqual(len(cache.cache), 1)
self.assertTrue(cache_file.exists())
# Clear cache
cache.clear_cache()
# Verify cache is cleared
self.assertEqual(len(cache.cache), 0)
self.assertFalse(cache_file.exists())
def test_build_system_workflow(self) -> None:
"""Test complete build system workflow."""
cache_file = self.cache_dir / "build.json"
cache = FingerprintCache(cache_file)
# Simulate source files
source_files = [
self.temp_dir / "main.cpp",
self.temp_dir / "utils.cpp",
self.temp_dir / "config.h",
]
# Create initial files
for i, src_file in enumerate(source_files):
self.create_test_file(src_file, f"source content {i}")
# First build - all files should be "changed" (new)
changed_files: List[Path] = []
baseline_time = time.time() - 3600 # 1 hour ago
for src_file in source_files:
if cache.has_changed(src_file, baseline_time):
changed_files.append(src_file)
self.assertEqual(
len(changed_files), 3, "All new files should be detected as changed"
)
# Second build - no changes
current_modtimes = [os.path.getmtime(f) for f in source_files]
changed_files: List[Path] = []
for src_file, modtime in zip(source_files, current_modtimes):
if cache.has_changed(src_file, modtime):
changed_files.append(src_file)
self.assertEqual(
len(changed_files), 0, "Unchanged files should not be detected as changed"
)
# Third build - modify one file
self.modify_file_content(source_files[0], "modified main.cpp")
changed_files: List[Path] = []
for src_file, modtime in zip(source_files, current_modtimes):
if cache.has_changed(src_file, modtime):
changed_files.append(src_file)
self.assertEqual(len(changed_files), 1, "Only modified file should be detected")
self.assertEqual(
changed_files[0], source_files[0], "Should detect the correct modified file"
)
def run_all_tests() -> bool:
"""Execute all test scenarios."""
import unittest
# Create test suite
suite = unittest.TestLoader().loadTestsFromTestCase(TestFingerprintCache)
# Run tests
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(suite)
if result.wasSuccessful():
print("✅ All fingerprint cache tests passed!")
return True
else:
print("❌ Some tests failed!")
return False
if __name__ == "__main__":
run_all_tests()

View File

@@ -0,0 +1,276 @@
# pyright: reportUnknownMemberType=false
import os
import unittest
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
import yaml
from ci.util.paths import PROJECT_ROOT
@dataclass
class WorkflowStep:
name: Optional[str] = None
uses: Optional[str] = None
with_config: Optional[Dict[str, Any]] = None
run: Optional[str] = None
@dataclass
class WorkflowJob:
runs_on: Optional[Union[str, List[str]]] = None
steps: Optional[List[WorkflowStep]] = None
permissions: Optional[Dict[str, str]] = None
def __post_init__(self) -> None:
if self.steps is None:
self.steps = []
@dataclass
class WorkflowTrigger:
branches: Optional[List[str]] = None
paths: Optional[List[str]] = None
@dataclass
class GitHubWorkflow:
name: Optional[str] = None
on_config: Optional[Dict[str, Any]] = None
jobs: Optional[Dict[str, WorkflowJob]] = None
permissions: Optional[Dict[str, str]] = None
def __post_init__(self) -> None:
if self.jobs is None:
self.jobs = {}
if self.on_config is None:
self.on_config = {}
class TestGitHubActionsSecurityTest(unittest.TestCase):
"""
Security tests for GitHub Actions workflows to prevent known vulnerabilities.
This test ensures that workflows using pull_request_target have proper
permissions restrictions to prevent the "pwn request" vulnerability:
https://securitylab.github.com/resources/github-actions-preventing-pwn-requests/
"""
def setUp(self):
self.workflows_dir = PROJECT_ROOT / ".github" / "workflows"
self.workflow_files = list(self.workflows_dir.glob("*.yml")) + list(
self.workflows_dir.glob("*.yaml")
)
def _load_workflow(self, workflow_path: Path) -> GitHubWorkflow:
"""Load and parse a GitHub Actions workflow file into a dataclass."""
try:
with open(workflow_path, "r", encoding="utf-8") as f:
raw_content: Any = yaml.safe_load(f)
content: Dict[str, Any] = raw_content or {}
# Handle the case where 'on' is parsed as boolean True instead of string 'on'
# This happens because 'on' is a YAML boolean keyword
if True in content and "on" not in content:
on_data = content.pop(True) # pyright: ignore[reportArgumentType]
content["on"] = on_data
# Parse jobs into dataclass
jobs_dict: Dict[str, WorkflowJob] = {}
raw_jobs: Dict[str, Any] = content.get("jobs", {})
for job_name, job_data in raw_jobs.items():
if not isinstance(job_data, dict):
continue
# Parse steps
steps: List[WorkflowStep] = []
raw_steps: List[Any] = job_data.get("steps", []) # pyright: ignore[reportUnknownVariableType]
for step_data in raw_steps: # pyright: ignore[reportUnknownVariableType]
if isinstance(step_data, dict):
step_dict: Dict[str, Any] = step_data # pyright: ignore[reportUnknownVariableType]
step = WorkflowStep(
name=step_dict.get("name"),
uses=step_dict.get("uses"),
with_config=step_dict.get("with"),
run=step_dict.get("run"),
)
steps.append(step)
job_dict: Dict[str, Any] = job_data # pyright: ignore[reportUnknownVariableType]
job = WorkflowJob(
runs_on=job_dict.get("runs-on"),
steps=steps,
permissions=job_dict.get("permissions"),
)
jobs_dict[job_name] = job
return GitHubWorkflow(
name=content.get("name"),
on_config=content.get("on", {}),
jobs=jobs_dict,
permissions=content.get("permissions"),
)
except Exception as e:
self.fail(f"Failed to parse workflow {workflow_path}: {e}")
def _has_pull_request_target(self, workflow: GitHubWorkflow) -> bool:
"""Check if workflow uses pull_request_target trigger."""
if isinstance(workflow.on_config, list):
return "pull_request_target" in workflow.on_config
elif isinstance(workflow.on_config, dict):
return "pull_request_target" in workflow.on_config
return False
def _has_untrusted_code_checkout(self, workflow: GitHubWorkflow) -> bool:
"""Check if workflow checks out PR code that could be untrusted."""
if workflow.jobs is None:
return False
for job in workflow.jobs.values():
if job.steps is None:
continue
for step in job.steps:
# Check for actions/checkout with PR head reference
if step.uses and step.uses.startswith("actions/checkout"):
if step.with_config:
ref = step.with_config.get("ref", "")
if "pull_request.head" in ref:
return True
return False
def _has_explicit_permissions(self, workflow: GitHubWorkflow) -> bool:
"""Check if workflow has explicit permissions set."""
# Check workflow-level permissions
if workflow.permissions:
return True
# Check job-level permissions
if workflow.jobs is not None:
for job in workflow.jobs.values():
if job.permissions:
return True
return False
def _get_permissions(self, workflow: GitHubWorkflow) -> Dict[str, Any]:
"""Get the permissions configuration from workflow."""
permissions: Dict[str, Any] = {}
# Workflow-level permissions
if workflow.permissions:
permissions.update(workflow.permissions)
# Job-level permissions (overwrites workflow-level)
if workflow.jobs is not None:
for job in workflow.jobs.values():
if job.permissions:
permissions.update(job.permissions)
return permissions
def _is_safe_permissions(self, permissions: Dict[str, Any]) -> bool:
"""Check if permissions are safe (no dangerous write access)."""
# List of dangerous write permissions
dangerous_write_permissions = [
"contents", # Can modify repository contents
"metadata", # Can modify repository metadata
"packages", # Can publish packages
"pages", # Can deploy to GitHub Pages
"deployments", # Can create deployments
"security-events", # Can create security events
]
for perm, value in permissions.items():
if perm in dangerous_write_permissions and value == "write":
return False
return True
def test_pull_request_target_workflows_have_safe_permissions(self) -> None:
"""
Test that all workflows using pull_request_target have explicit
safe permissions to prevent pwn request vulnerabilities.
"""
vulnerable_workflows: List[str] = []
unsafe_permission_workflows: List[str] = []
for workflow_path in self.workflow_files:
workflow = self._load_workflow(workflow_path)
if not self._has_pull_request_target(workflow):
continue
workflow_name = workflow_path.name
# Check if it has untrusted code checkout (potential vulnerability)
if self._has_untrusted_code_checkout(workflow):
if not self._has_explicit_permissions(workflow):
vulnerable_workflows.append(workflow_name)
else:
permissions = self._get_permissions(workflow)
if not self._is_safe_permissions(permissions):
unsafe_permission_workflows.append(
f"{workflow_name}: {permissions}"
)
# Report findings
error_messages: List[str] = []
if vulnerable_workflows:
error_messages.append(
f"CRITICAL: Found {len(vulnerable_workflows)} workflows with pull_request_target "
f"that checkout untrusted code without explicit permissions:\n"
+ "\n".join(f" - {w}" for w in vulnerable_workflows)
+ "\n\nThis is a critical security vulnerability! These workflows can be exploited "
"to gain repository write access through malicious PRs."
)
if unsafe_permission_workflows:
error_messages.append(
f"UNSAFE: Found {len(unsafe_permission_workflows)} workflows with pull_request_target "
f"that have dangerous write permissions:\n"
+ "\n".join(f" - {w}" for w in unsafe_permission_workflows)
+ "\n\nThese workflows should use minimal read-only permissions."
)
if error_messages:
self.fail(
"\n\n".join(error_messages)
+ "\n\nRecommended fix: Add explicit minimal permissions to these workflows:\n"
"permissions:\n"
" contents: read\n"
" actions: read\n"
" id-token: write\n"
" pull-requests: read"
)
def test_no_workflow_uses_excessive_permissions(self) -> None:
"""
Test that no workflow uses overly broad permissions like 'write-all'.
"""
excessive_permission_workflows: List[str] = []
for workflow_path in self.workflow_files:
workflow = self._load_workflow(workflow_path)
permissions = self._get_permissions(workflow)
# Check for dangerous permission patterns
for perm, value in permissions.items():
if value in ["write-all", "admin"]:
excessive_permission_workflows.append(
f"{workflow_path.name}: {perm}={value}"
)
if excessive_permission_workflows:
self.fail(
f"Found workflows with excessive permissions:\n"
+ "\n".join(f" - {w}" for w in excessive_permission_workflows)
+ "\n\nUse minimal required permissions instead."
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,25 @@
import unittest
from pathlib import Path
from ci.util.map_dump import map_dump
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
class TestMapParser(unittest.TestCase):
def test_map_parser(self):
# If the UNO fixture directory is missing, skip with a warning
if not UNO.exists():
import warnings
warnings.warn(
"Skipping TestMapParser::test_map_parser because ci/tests/uno is missing."
)
self.skipTest("ci/tests/uno missing; skipping map parser test")
map_dump(UNO / "firmware.map")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,117 @@
# pyright: reportUnknownMemberType=false
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from typing import List
from ci.util.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
NUM_WORKERS = 1 if os.environ.get("NO_PARALLEL") else (os.cpu_count() or 1) * 4
# Files that are allowed to not have #pragma once
EXCLUDED_FILES: List[str] = [
# Add any exceptions here
]
EXCLUDED_DIRS = [
"third_party",
"platforms",
]
class TestMissingPragmaOnce(unittest.TestCase):
def check_file(self, file_path: str) -> list[str]:
"""Check if a header file has #pragma once directive or if a cpp file incorrectly has it."""
failings: list[str] = []
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
if file_path.endswith(".h"):
# For header files, check if #pragma once is missing
if "#pragma once" not in content:
failings.append(f"Missing #pragma once in {file_path}")
elif file_path.endswith(".cpp"):
# For cpp files, check if #pragma once is incorrectly present
if "#pragma once" in content:
failings.append(f"Incorrect #pragma once in cpp file: {file_path}")
return failings
def test_pragma_once_usage(self) -> None:
"""
Searches through files to:
1. Check for missing #pragma once in header files
2. Check for incorrect #pragma once in cpp files
"""
files_to_check: List[str] = []
current_dir = None
# Collect files to check
for root, dirs, files in os.walk(SRC_ROOT):
# Log when we enter a new directory
rel_path = os.path.relpath(root, SRC_ROOT)
if current_dir != rel_path:
current_dir = rel_path
if rel_path in EXCLUDED_DIRS:
dirs[:] = [] # Skip this directory and its subdirectories
continue
# Check if this directory should be excluded
# if any(os.path.normpath(root).startswith(os.path.normpath(excluded_dir))
# for excluded_dir in EXCLUDED_DIRS):
# print(f" Skipping excluded directory: {rel_path}")
# continue
for excluded_dir in EXCLUDED_DIRS:
npath = os.path.normpath(root)
npath_excluded = os.path.normpath(excluded_dir)
if npath.startswith(npath_excluded):
break
for file in files:
if file.endswith((".h", ".cpp")): # Check both header and cpp files
file_path = os.path.join(root, file)
# Check if file is excluded
# if any(file_path.endswith(excluded) for excluded in EXCLUDED_FILES):
# print(f" Skipping excluded file: {file}")
# continue
for excluded in EXCLUDED_FILES:
# print(f"Checking {file_path} against excluded {excluded}")
if file_path.endswith(excluded):
print(f" Skipping excluded file: {file}")
break
files_to_check.append(file_path)
print(f"Found {len(files_to_check)} files to check")
# Process files in parallel
all_failings: List[str] = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
# Report results
if all_failings:
msg = f"Found {len(all_failings)} pragma once issues: \n" + "\n".join(
all_failings
)
for failing in all_failings:
print(failing)
self.fail(msg)
else:
print("All files have proper pragma once usage.")
print(f"Pragma once check completed. Processed {len(files_to_check)} files.")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,207 @@
# pyright: reportUnknownMemberType=false
import os
import unittest
from typing import Callable, List
from ci.util.check_files import (
EXCLUDED_FILES,
FileContent,
FileContentChecker,
MultiCheckerFileProcessor,
collect_files_to_check,
)
from ci.util.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
ENABLE_PARANOID_GNU_HEADER_INSPECTION = False
if ENABLE_PARANOID_GNU_HEADER_INSPECTION:
BANNED_HEADERS_ESP = ["esp32-hal.h"]
else:
BANNED_HEADERS_ESP = []
BANNED_HEADERS_COMMON = [
"pthread.h",
"assert.h",
"iostream",
"stdio.h",
"cstdio",
"cstdlib",
"vector",
"list",
"map",
"set",
"queue",
"deque",
"algorithm",
"memory",
"thread",
"mutex",
"chrono",
"fstream",
"sstream",
"iomanip",
"exception",
"stdexcept",
"typeinfo",
"ctime",
"cmath",
"complex",
"valarray",
"cfloat",
"cassert",
"cerrno",
"cctype",
"cwctype",
"cstring",
"cwchar",
"cuchar",
"cstdint",
"stdint.h",
"stddef.h",
"cstddef", # this certainally fails
"type_traits", # this certainally fails
]
BANNED_HEADERS_CORE = BANNED_HEADERS_COMMON + BANNED_HEADERS_ESP + ["Arduino.h"]
# Banned headers for platforms directory - specifically checking for Arduino.h
BANNED_HEADERS_PLATFORMS = ["Arduino.h"]
class BannedHeadersChecker(FileContentChecker):
"""Checker class for banned headers."""
def __init__(self, banned_headers_list: List[str]):
"""Initialize with the list of banned headers to check for."""
self.banned_headers_list = banned_headers_list
def should_process_file(self, file_path: str) -> bool:
"""Check if file should be processed for banned headers."""
# Check file extension
if not file_path.endswith((".cpp", ".h", ".hpp", ".ino")):
return False
# Check if file is in excluded list
if any(file_path.endswith(excluded) for excluded in EXCLUDED_FILES):
return False
return True
def check_file_content(self, file_content: FileContent) -> List[str]:
"""Check file content for banned headers."""
failings: List[str] = []
if len(self.banned_headers_list) == 0:
return failings
# Check each line for banned headers
for line_number, line in enumerate(file_content.lines, 1):
if line.strip().startswith("//"):
continue
for header in self.banned_headers_list:
if (
f"#include <{header}>" in line or f'#include "{header}"' in line
) and "// ok include" not in line:
failings.append(
f"Found banned header '{header}' in {file_content.path}:{line_number}"
)
return failings
def _test_no_banned_headers(
test_directories: List[str],
banned_headers_list: List[str],
on_fail: Callable[[str], None],
) -> None:
"""Searches through the program files to check for banned headers."""
# Collect files to check
files_to_check = collect_files_to_check(test_directories)
# Create processor and checker
processor = MultiCheckerFileProcessor()
checker = BannedHeadersChecker(banned_headers_list)
# Process files
results = processor.process_files_with_checkers(files_to_check, [checker])
# Get results for banned headers checker
all_failings = results.get("BannedHeadersChecker", []) or []
if all_failings:
msg = f"Found {len(all_failings)} banned header(s): \n" + "\n".join(
all_failings
)
for failing in all_failings:
print(failing)
on_fail(msg)
else:
print("No banned headers found.")
class TestNoBannedHeaders(unittest.TestCase):
def test_no_banned_headers_src(self) -> None:
"""Searches through the program files to check for banned headers."""
def on_fail(msg: str) -> None:
self.fail(
msg + "\n"
"You can add '// ok include' at the end of the line to silence this error for specific inclusions."
)
# Test directories as requested
test_directories = [
os.path.join(SRC_ROOT, "fl"),
os.path.join(SRC_ROOT, "fx"),
os.path.join(SRC_ROOT, "sensors"),
]
_test_no_banned_headers(
test_directories=test_directories,
banned_headers_list=BANNED_HEADERS_CORE,
on_fail=on_fail,
)
def test_no_banned_headers_examples(self) -> None:
"""Searches through the program files to check for banned headers."""
def on_fail(msg: str) -> None:
self.fail(
msg + "\n"
"You can add '// ok include' at the end of the line to silence this error for specific inclusions."
)
test_directories = ["examples"]
_test_no_banned_headers(
test_directories=test_directories,
banned_headers_list=BANNED_HEADERS_COMMON,
on_fail=on_fail,
)
def test_no_banned_headers_platforms(self) -> None:
"""Searches through the platforms directory to check for Arduino.h usage."""
def on_fail(msg: str) -> None:
self.fail(
msg + "\n"
"You can add '// ok include' at the end of the line to silence this error for specific inclusions."
)
# Test the platforms directory specifically for Arduino.h
test_directories = [
os.path.join(SRC_ROOT, "platforms"),
]
_test_no_banned_headers(
test_directories=test_directories,
banned_headers_list=BANNED_HEADERS_PLATFORMS,
on_fail=on_fail,
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,190 @@
import os
import re
import unittest
from typing import Dict, List, Tuple
from ci.util.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
EXAMPLES_ROOT = PROJECT_ROOT / "examples"
TESTS_ROOT = PROJECT_ROOT / "tests"
# Skip patterns for directories that contain third-party code or build artifacts
SKIP_PATTERNS = [
".venv",
"node_modules",
"build",
".build",
"third_party",
"ziglang",
"greenlet",
".git",
]
# Regex patterns as triple-quoted constants
NAMESPACE_PATTERN = re.compile(
r"""
^\s* # Start of line with optional whitespace
( # Capture group for namespace patterns
namespace\s+\w+ # namespace followed by identifier
| # OR
namespace\s*\{ # namespace followed by optional whitespace and {
)
""",
re.VERBOSE,
)
INCLUDE_PATTERN = re.compile(
r"""
^\s* # Start of line with optional whitespace
\#\s* # Hash with optional whitespace
include\s* # include with optional whitespace
[<"] # Opening bracket or quote
.* # Anything in between
[>"] # Closing bracket or quote
""",
re.VERBOSE,
)
ALLOW_DIRECTIVE_PATTERN = re.compile(r"//\s*allow-include-after-namespace")
def find_includes_after_namespace(file_path: str) -> List[Tuple[int, str]]:
"""
Check if a C++ file has #include directives after namespace declarations.
Args:
file_path (str): Path to the C++ file to check
Returns:
List[Tuple[int, str]]: List of tuples containing line numbers and line content
where includes appear after namespaces. Returns empty list
if no violations found or file cannot be decoded.
"""
try:
with open(file_path, "r", encoding="utf-8") as f:
lines = f.readlines()
except UnicodeDecodeError:
# Skip files that can't be decoded as UTF-8
return []
# Check if the file has the allow directive
for line in lines:
if ALLOW_DIRECTIVE_PATTERN.search(line):
return [] # Return empty list if directive is found
namespace_started = False
violations: List[Tuple[int, str]] = []
for i, line in enumerate(lines, 1):
# Check if we're entering a namespace
if NAMESPACE_PATTERN.match(line):
namespace_started = True
continue
# Check for includes after namespace started
if namespace_started and INCLUDE_PATTERN.match(line):
violations.append((i, line.rstrip("\n")))
return violations
def scan_cpp_files(directory: str = ".") -> Dict[str, List[Tuple[int, str]]]:
"""
Scan all C++ files in a directory for includes after namespace declarations.
Args:
directory (str): Directory to scan for C++ files
Returns:
Dict[str, List[Tuple[int, str]]]: Dictionary mapping file paths to list of tuples
(line numbers, line content) of violations
"""
violations: Dict[str, List[Tuple[int, str]]] = {}
# Find all C++ files
cpp_extensions = [".cpp", ".h", ".hpp", ".cc", ".ino"]
for root, dirs, files in os.walk(directory):
# Skip directories with third-party code
if any(pattern in root for pattern in SKIP_PATTERNS):
continue
for file in files:
if any(file.endswith(ext) for ext in cpp_extensions):
file_path = os.path.join(root, file)
try:
line_info = find_includes_after_namespace(file_path)
if line_info:
violations[file_path] = line_info
except Exception as e:
print(f"Error processing {file_path}: {e}")
return violations
class TestNoIncludeAfterNamespace(unittest.TestCase):
def test_no_includes_after_namespace_in_src(self) -> None:
"""Check that src/ directory doesn't have includes after namespace declarations."""
violations = scan_cpp_files(str(SRC_ROOT))
if violations:
msg = "Found includes after namespace declarations in src/:\n"
for file_path, line_info in violations.items():
msg += f" {file_path}:\n"
for line_num, line_content in line_info:
msg += f" Line {line_num}: {line_content}\n"
self.fail(
msg
+ "\nPlease fix these issues by moving includes to the top of the file.\n"
"See TEST_NAMESPACE_INCLUDES.md for more information."
)
else:
print(
"No violations found in src/! All includes are properly placed before namespace declarations."
)
def test_no_includes_after_namespace_in_examples(self) -> None:
"""Check that examples/ directory doesn't have includes after namespace declarations."""
violations = scan_cpp_files(str(EXAMPLES_ROOT))
if violations:
msg = "Found includes after namespace declarations in examples/:\n"
for file_path, line_info in violations.items():
msg += f" {file_path}:\n"
for line_num, line_content in line_info:
msg += f" Line {line_num}: {line_content}\n"
self.fail(
msg
+ "\nPlease fix these issues by moving includes to the top of the file.\n"
"See TEST_NAMESPACE_INCLUDES.md for more information."
)
else:
print(
"No violations found in examples/! All includes are properly placed before namespace declarations."
)
def test_no_includes_after_namespace_in_tests(self) -> None:
"""Check that tests/ directory doesn't have includes after namespace declarations."""
violations = scan_cpp_files(str(TESTS_ROOT))
if violations:
msg = "Found includes after namespace declarations in tests/:\n"
for file_path, line_info in violations.items():
msg += f" {file_path}:\n"
for line_num, line_content in line_info:
msg += f" Line {line_num}: {line_content}\n"
self.fail(
msg
+ "\nPlease fix these issues by moving includes to the top of the file.\n"
"See TEST_NAMESPACE_INCLUDES.md for more information."
)
else:
print(
"No violations found in tests/! All includes are properly placed before namespace declarations."
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,220 @@
"""
Comprehensive tests for enhanced Arduino package index implementation with Pydantic
Tests all Pydantic models, validation rules, parsing functionality, and error handling.
"""
import json
from pathlib import Path
from typing import Any, Dict
import pytest
from pydantic import ValidationError
# Import the enhanced implementation
from ci.compiler.packages import (
Board,
Help,
Package,
PackageIndex,
PackageIndexParser,
PackageManagerConfig,
PackageParsingError,
Platform,
SystemDownload,
Tool,
ToolDependency,
format_size,
)
class TestHelp:
"""Test Help model validation"""
def test_valid_help(self):
"""Test valid help creation"""
help_data = {"online": "https://github.com/espressif/arduino-esp32"}
help_obj = Help(**help_data) # type: ignore
assert str(help_obj.online) == "https://github.com/espressif/arduino-esp32"
def test_invalid_url(self):
"""Test invalid URL validation"""
with pytest.raises(ValidationError):
Help(online="not-a-valid-url") # type: ignore
class TestBoard:
"""Test Board model validation"""
def test_valid_board(self):
"""Test valid board creation"""
board_data = {
"name": "ESP32 Dev Module",
"properties": {
"upload.tool": "esptool_py",
"upload.maximum_size": "1310720",
},
}
board = Board(**board_data) # type: ignore
assert board.name == "ESP32 Dev Module"
assert board.properties["upload.tool"] == "esptool_py"
def test_empty_name_validation(self):
"""Test empty name validation"""
with pytest.raises(ValidationError):
Board(name="", properties={})
def test_name_trimming(self):
"""Test name trimming functionality"""
board = Board(name=" ESP32 Dev Module ", properties={})
assert board.name == "ESP32 Dev Module"
class TestSystemDownload:
"""Test SystemDownload model validation"""
def test_size_conversion_from_string(self):
"""Test size conversion from string bytes to MB"""
system = SystemDownload(
host="test-host",
url="https://example.com/file.tar.gz", # type: ignore
archiveFileName="file.tar.gz",
checksum="SHA-256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
size="52428800", # type: ignore - 50 MB in bytes, validator converts
)
assert abs(system.size_mb - 50.0) < 0.1 # Should be ~50 MB
def test_invalid_checksum_format(self):
"""Test invalid checksum format validation"""
with pytest.raises(ValidationError):
SystemDownload(
host="test-host",
url="https://example.com/file.tar.gz", # type: ignore
archiveFileName="file.tar.gz",
checksum="invalid-checksum",
size=50.0, # type: ignore
)
class TestPlatform:
"""Test Platform model validation"""
def test_valid_platform(self):
"""Test valid platform creation"""
platform_data: Dict[str, Any] = {
"name": "ESP32 Arduino",
"architecture": "esp32",
"version": "2.0.5",
"category": "ESP32",
"url": "https://github.com/espressif/arduino-esp32/releases/download/2.0.5/esp32-2.0.5.zip",
"archiveFileName": "esp32-2.0.5.zip",
"checksum": "SHA-256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
"size": "50000000",
"boards": [],
"toolsDependencies": [],
"help": {"online": "https://github.com/espressif/arduino-esp32"},
}
platform = Platform(**platform_data) # type: ignore
assert platform.name == "ESP32 Arduino"
assert platform.architecture == "esp32"
assert platform.size_mb == 50000000 / (1024 * 1024)
def test_invalid_archive_extension(self):
"""Test invalid archive extension validation"""
with pytest.raises(ValidationError):
Platform(
name="Test Platform",
architecture="test",
version="1.0.0",
category="Test",
url="https://example.com/file.txt", # type: ignore
archiveFileName="file.txt", # Invalid extension
checksum="SHA-256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
size=50.0, # type: ignore
boards=[],
toolsDependencies=[],
help=Help(online="https://example.com"), # type: ignore
)
class TestPackageIndexParser:
"""Test PackageIndexParser functionality"""
def test_parse_valid_json(self):
"""Test parsing valid package index JSON"""
valid_json: Dict[str, Any] = {
"packages": [
{
"name": "test",
"maintainer": "Test Maintainer",
"websiteURL": "https://example.com",
"email": "test@example.com",
"help": {"online": "https://example.com"},
"platforms": [],
"tools": [],
}
]
}
parser = PackageIndexParser()
package_index = parser.parse_package_index(json.dumps(valid_json))
assert len(package_index.packages) == 1
assert package_index.packages[0].name == "test"
def test_parse_invalid_json(self):
"""Test parsing invalid JSON"""
parser = PackageIndexParser()
with pytest.raises(PackageParsingError):
parser.parse_package_index("invalid json")
class TestUtilityFunctions:
"""Test utility functions"""
def test_format_size(self):
"""Test size formatting function"""
# Test KB
assert format_size(0.5) == "512.0 KB"
# Test MB
assert format_size(1.5) == "1.5 MB"
assert format_size(512.0) == "512.0 MB"
# Test GB
assert format_size(1536.0) == "1.5 GB"
assert format_size(2048.0) == "2.0 GB"
class TestRealDataParsing:
"""Test with real ESP32 package index data (if network available)"""
def test_esp32_package_parsing(self):
"""Test parsing real ESP32 package index"""
ESP32_URL = "https://espressif.github.io/arduino-esp32/package_esp32_index.json"
try:
parser = PackageIndexParser(timeout=10)
package_index = parser.parse_from_url(ESP32_URL)
# Basic validation
assert len(package_index.packages) > 0
esp32_package = package_index.packages[0]
assert esp32_package.name == "esp32"
assert len(esp32_package.platforms) > 0
assert len(esp32_package.tools) > 0
print(f"✅ Successfully parsed ESP32 package index:")
print(f" 📦 Packages: {len(package_index.packages)}")
print(f" 🛠️ Platforms: {len(esp32_package.platforms)}")
print(f" 🔧 Tools: {len(esp32_package.tools)}")
except Exception as e:
# Skip if network not available
pytest.skip(f"Network test skipped: {e}")
if __name__ == "__main__":
# Run tests
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,266 @@
#!/usr/bin/env python3
"""
Test the PlatformIOIni class functionality.
"""
import tempfile
import unittest
from pathlib import Path
from ci.compiler.platformio_ini import PlatformIOIni
# Test data constants
BASIC_INI_CONTENT = """[platformio]
src_dir = src
[env:esp32dev]
board = esp32dev
platform = https://github.com/pioarduino/platform-espressif32/releases/download/55.03.30-2/platform-espressif32.zip
framework = arduino
[env:uno]
board = uno
platform = atmelavr
framework = arduino
"""
INVALID_INI_CONTENT = """[platformio]
src_dir = src
[env:missing_board]
platform = atmelavr
framework = arduino
[env:missing_platform]
board = uno
framework = arduino
"""
DICT_TEST_INI_CONTENT = """[platformio]
src_dir = src
[env:test]
board = esp32dev
platform = espressif32
framework = arduino
"""
STRING_TEST_INI_CONTENT = """[platformio]
src_dir = src
[env:test]
board = esp32dev
platform = espressif32
"""
PARSE_TEST_INI_CONTENT = """[platformio]
src_dir = src
[env:test]
board = esp32dev
platform = espressif32
framework = arduino
"""
ESP32_PLATFORM_URL = "https://github.com/pioarduino/platform-espressif32/releases/download/55.03.30-2/platform-espressif32.zip"
NEW_CACHED_URL = "file:///path/to/cached/platform"
class TestPlatformIOIni(unittest.TestCase):
"""Test the PlatformIOIni class."""
def setUp(self):
"""Set up test environment."""
self.temp_dir = Path(tempfile.mkdtemp())
self.test_ini = self.temp_dir / "platformio.ini"
def tearDown(self):
"""Clean up test environment."""
import shutil
if self.temp_dir.exists():
shutil.rmtree(self.temp_dir)
def test_parse_and_dump(self):
"""Test basic parse and dump functionality."""
# Create a test platformio.ini file
self.test_ini.write_text(BASIC_INI_CONTENT)
# Parse the file
pio_ini = PlatformIOIni.parseFile(self.test_ini)
# Test basic functionality
self.assertIn("env:esp32dev", pio_ini.get_sections())
self.assertIn("env:uno", pio_ini.get_sections())
self.assertEqual(len(pio_ini.get_env_sections()), 2)
# Test option access
self.assertEqual(pio_ini.get_option("env:esp32dev", "board"), "esp32dev")
self.assertEqual(pio_ini.get_option("env:uno", "platform"), "atmelavr")
# Test URL extraction
platform_urls = pio_ini.get_platform_urls()
self.assertEqual(len(platform_urls), 2)
# Find the ESP32 platform URL
esp32_url = None
for section, option, url in platform_urls:
if section == "env:esp32dev":
esp32_url = url
break
self.assertEqual(esp32_url, ESP32_PLATFORM_URL)
# Test URL replacement
self.assertIsNotNone(esp32_url) # Ensure esp32_url is not None
result = pio_ini.replace_url(
"env:esp32dev", "platform", esp32_url or "", NEW_CACHED_URL
)
self.assertTrue(result)
self.assertEqual(pio_ini.get_option("env:esp32dev", "platform"), NEW_CACHED_URL)
# Test dump
output_file = self.temp_dir / "output.ini"
pio_ini.dump(output_file)
# Verify the file was written
self.assertTrue(output_file.exists())
# Parse the output and verify the change
pio_ini2 = PlatformIOIni.parseFile(output_file)
self.assertEqual(
pio_ini2.get_option("env:esp32dev", "platform"), NEW_CACHED_URL
)
self.assertEqual(pio_ini2.get_option("env:uno", "platform"), "atmelavr")
def test_validation(self):
"""Test validation functionality."""
# Create an invalid platformio.ini
self.test_ini.write_text(INVALID_INI_CONTENT)
pio_ini = PlatformIOIni.parseFile(self.test_ini)
issues = pio_ini.validate_structure()
self.assertEqual(len(issues), 2)
self.assertIn("missing 'board' option", issues[0])
self.assertIn("missing required 'platform' option", issues[1])
def test_dict_conversion(self):
"""Test to_dict and from_dict functionality."""
self.test_ini.write_text(DICT_TEST_INI_CONTENT)
pio_ini = PlatformIOIni.parseFile(self.test_ini)
# Convert to dict
config_dict = pio_ini.to_dict()
self.assertIn("env:test", config_dict)
self.assertEqual(config_dict["env:test"]["board"], "esp32dev")
# Create new instance from dict
pio_ini2 = PlatformIOIni.create()
pio_ini2.from_dict(config_dict)
self.assertEqual(pio_ini2.get_option("env:test", "board"), "esp32dev")
self.assertEqual(pio_ini2.get_option("env:test", "platform"), "espressif32")
def test_string_representation(self):
"""Test string representation."""
self.test_ini.write_text(STRING_TEST_INI_CONTENT)
pio_ini = PlatformIOIni.parseFile(self.test_ini)
str_repr = str(pio_ini)
self.assertIn("[platformio]", str_repr)
self.assertIn("src_dir = src", str_repr)
self.assertIn("[env:test]", str_repr)
def test_static_parse_method(self):
"""Test static parse factory method."""
self.test_ini.write_text(PARSE_TEST_INI_CONTENT)
# Test static parse method
pio_ini = PlatformIOIni.parseFile(self.test_ini)
# Verify it worked correctly
self.assertEqual(pio_ini.get_option("env:test", "board"), "esp32dev")
self.assertEqual(pio_ini.get_option("env:test", "platform"), "espressif32")
self.assertEqual(pio_ini.get_option("platformio", "src_dir"), "src")
# Verify file_path is set correctly
self.assertEqual(pio_ini.file_path, self.test_ini)
def test_parse_string_method(self):
"""Test static parseString factory method."""
# Test static parseString method
pio_ini = PlatformIOIni.parseString(PARSE_TEST_INI_CONTENT)
# Verify it worked correctly
self.assertEqual(pio_ini.get_option("env:test", "board"), "esp32dev")
self.assertEqual(pio_ini.get_option("env:test", "platform"), "espressif32")
self.assertEqual(pio_ini.get_option("platformio", "src_dir"), "src")
# Verify file_path is None since parsed from string
self.assertIsNone(pio_ini.file_path)
def test_optimize_method(self):
"""Test the optimize() method that downloads and caches packages."""
# Create content with a real zip URL
test_content = """[platformio]
src_dir = src
[env:esp32dev]
board = esp32dev
platform = https://github.com/pioarduino/platform-espressif32/releases/download/55.03.30-2/platform-espressif32.zip
framework = arduino
[env:esp32c3]
board = esp32-c3-devkitm-1
platform = https://github.com/pioarduino/platform-espressif32/releases/download/55.03.30-2/platform-espressif32.zip
framework = arduino
"""
# Parse from string
pio_ini = PlatformIOIni.parseString(test_content)
# Verify original URL is present
original_str = str(pio_ini)
self.assertIn("https://github.com/pioarduino", original_str)
# Run optimize with test cache instance
from pathlib import Path
from ci.compiler.platformio_cache import PlatformIOCache
cache_dir = Path(".cache/tests/test_platformio_ini_optimize")
cache = PlatformIOCache(cache_dir)
pio_ini.optimize(cache)
# Verify URLs were replaced
optimized_str = str(pio_ini)
self.assertNotIn("https://github.com/pioarduino", optimized_str)
# On Windows, paths are returned directly, not as file:// URLs
import platform
if platform.system() == "Windows":
self.assertIn(
"extracted", optimized_str
) # Check for the extracted directory
else:
self.assertIn("file:///", optimized_str)
# Verify cache directory was created
self.assertTrue(cache_dir.exists())
# Verify both environments have the same local path
esp32dev_platform = pio_ini.get_option("env:esp32dev", "platform")
esp32c3_platform = pio_ini.get_option("env:esp32c3", "platform")
self.assertEqual(esp32dev_platform, esp32c3_platform)
# On Windows, paths are returned directly, not as file:// URLs
import platform
if platform.system() == "Windows":
self.assertTrue(esp32dev_platform.endswith("extracted"))
else:
self.assertTrue(esp32dev_platform.startswith("file:///"))
if __name__ == "__main__":
unittest.main()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,176 @@
#!/usr/bin/env python3
"""
Real test for PlatformIO cache functionality - no mocks.
Downloads actual ESP32 platform, caches it, and verifies transformation.
"""
import shutil
import unittest
from pathlib import Path
import pytest
from ci.compiler.platformio_cache import PlatformIOCache
from ci.compiler.platformio_ini import PlatformIOIni
# Test constants
ESP32_PLATFORM_URL = "https://github.com/pioarduino/platform-espressif32/releases/download/55.03.30-2/platform-espressif32.zip"
PLATFORMIO_INI_TEMPLATE = """[platformio]
src_dir = src
[env:esp32dev]
board = esp32dev
platform = {platform_url}
framework = arduino
build_flags = -DCORE_DEBUG_LEVEL=4
[env:esp32c3]
board = esp32-c3-devkitm-1
platform = {platform_url}
framework = arduino
"""
class TestRealPlatformIOCache(unittest.TestCase):
"""Real test for PlatformIO cache functionality without mocks."""
def setUp(self):
"""Set up test environment."""
self.cache_dir = Path(".cache/tests/test_real_platformio_cache")
self.cache_dir.mkdir(parents=True, exist_ok=True)
self.platformio_ini = self.cache_dir / "platformio.ini"
def tearDown(self):
"""Clean up test environment."""
# Only clean up the platformio.ini file, keep cache for reuse
if self.platformio_ini.exists():
self.platformio_ini.unlink()
def test_real_esp32_platform_download_and_transform(self):
"""Test with real ESP32 platform download - no mocks."""
# Create simple platformio.ini with real ESP32 platform URL
platformio_ini_content = PLATFORMIO_INI_TEMPLATE.format(
platform_url=ESP32_PLATFORM_URL
)
# Write the platformio.ini file
self.platformio_ini.write_text(platformio_ini_content)
# Verify original content has the URL
original_content = self.platformio_ini.read_text()
self.assertIn(ESP32_PLATFORM_URL, original_content)
url_count_before = original_content.count(ESP32_PLATFORM_URL)
self.assertEqual(url_count_before, 2) # Should appear in both environments
print(f"🌐 Downloading real ESP32 platform from: {ESP32_PLATFORM_URL}")
# Process the platformio.ini file through the cache system
# This will make real HTTP requests and download the actual platform
try:
pio_ini = PlatformIOIni.parseFile(self.platformio_ini)
cache = PlatformIOCache(self.cache_dir)
pio_ini.optimize(cache)
pio_ini.dump(self.platformio_ini)
except ValueError as e:
if "checksum verification" in str(e):
pytest.skip(f"Skipping test due to upstream file checksum change: {e}")
else:
raise
print("✅ Download and processing completed!")
# Read the transformed platformio.ini
final_content = self.platformio_ini.read_text()
print(final_content)
# Verify original URL was replaced
self.assertNotIn(ESP32_PLATFORM_URL, final_content)
print(f"✅ Original URL removed from platformio.ini")
# Verify file URLs or paths are present (should be 2 occurrences)
import platform
if platform.system() == "Windows":
# On Windows, paths are returned directly
path_count = final_content.count("extracted")
self.assertEqual(path_count, 2)
print(f"✅ Found {path_count} local paths as expected")
else:
# On Unix systems, file:// URLs are used
file_url_count = final_content.count("file:///")
self.assertEqual(file_url_count, 2)
print(f"✅ Found {file_url_count} file:// URLs as expected")
# Verify the file URLs point to the cache
self.assertIn("extracted", final_content)
print("✅ URLs point to extracted cache directory")
# Verify other content is preserved
self.assertIn("board = esp32dev", final_content)
self.assertIn("board = esp32-c3-devkitm-1", final_content)
# Framework might be resolved from "arduino" to its URL, which is expected behavior
self.assertTrue(
"framework = arduino" in final_content
or "framework = http://arduino.cc" in final_content,
f"Expected framework to be 'arduino' or 'http://arduino.cc', but got: {final_content}",
)
self.assertIn("build_flags = -DCORE_DEBUG_LEVEL=4", final_content)
self.assertIn("[env:esp32dev]", final_content)
self.assertIn("[env:esp32c3]", final_content)
print("✅ All other platformio.ini content preserved")
# Verify cache directories and files were created
self.assertTrue(self.cache_dir.exists())
# Verify actual files were cached in the new structure
# Each artifact has its own directory under cache_dir/
artifact_dirs = [d for d in self.cache_dir.iterdir() if d.is_dir()]
self.assertGreater(len(artifact_dirs), 0)
# Check the artifact directory contains both zip and extracted folder
for artifact_dir in artifact_dirs:
# Look for the specific artifact.zip file
artifact_zip = artifact_dir / "artifact.zip"
self.assertTrue(artifact_zip.exists())
print(f"✅ Platform zip cached: artifact.zip")
# Check extracted directory exists
extracted_dir = artifact_dir / "extracted"
self.assertTrue(extracted_dir.exists())
print(f"✅ Platform extracted alongside zip in: {artifact_dir.name}")
# Verify platform.json exists in extracted content
platform_json_files = list(extracted_dir.rglob("platform.json"))
self.assertGreater(len(platform_json_files), 0)
print(f"✅ Platform extracted with valid platform.json")
# Checksums were removed from the system
print("✅ No checksum verification (removed by design)")
# Verify lock files were created alongside the zip
lock_files = list(artifact_dir.glob("*.lock"))
self.assertGreaterEqual(
len(lock_files), 0
) # Lock files may not exist after completion
print(
f"✅ Lock mechanism used (lock files may be cleaned up after completion)"
)
print("\n📋 Final transformed platformio.ini:")
for line in final_content.split("\n"):
if "platform =" in line:
print(f" {line}")
print(f"\n📊 Cache statistics:")
print(f" Cache directory: {self.cache_dir}")
# Get the first artifact directory
first_artifact_dir = artifact_dirs[0]
zip_file = list(first_artifact_dir.glob("*.zip"))[0]
print(f" Cached zip size: {zip_file.stat().st_size // 1024} KB")
print(f" Extracted platform: {first_artifact_dir.name}")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,206 @@
"""Basic unittest for ci.util.running_process.RunningProcess.
This test executes a trivial Python command via `uv run python -c` and verifies:
- The process exits successfully (return code 0)
- The streamed output contains the expected line
"""
from __future__ import annotations
import time
import unittest
from pathlib import Path
from typing import List, Union
from ci.util.running_process import EndOfStream, RunningProcess
class TestRunningProcess(unittest.TestCase):
def test_sanity(self: "TestRunningProcess") -> None:
"""Run a trivial command and validate output streaming and exit code.
Uses `uv run python -c "print('hello')"` to ensure we respect the
repository rule that all Python execution goes through `uv run python`.
"""
command: list[str] = [
"uv",
"run",
"python",
"-c",
"print('hello')",
]
rp: RunningProcess = RunningProcess(
command=command,
cwd=Path(".").absolute(),
check=False,
auto_run=True,
timeout=30,
enable_stack_trace=True,
on_complete=None,
output_formatter=None,
)
captured_lines: List[str] = []
while True:
out: Union[str, EndOfStream, None] = rp.get_next_line_non_blocking()
if isinstance(out, EndOfStream):
break
if isinstance(out, str):
captured_lines.append(out)
else:
time.sleep(0.01)
rc: int = rp.wait()
self.assertEqual(rc, 0)
combined: str = "\n".join(captured_lines).strip()
self.assertIn("hello", combined)
def test_line_iter_basic(self: "TestRunningProcess") -> None:
"""Validate context-managed line iteration yields only strings and completes."""
command: list[str] = [
"uv",
"run",
"python",
"-c",
"print('a'); print('b'); print('c')",
]
rp: RunningProcess = RunningProcess(
command=command,
cwd=Path(".").absolute(),
check=False,
auto_run=True,
timeout=10,
enable_stack_trace=False,
on_complete=None,
output_formatter=None,
)
iter_lines: List[str] = []
with rp.line_iter(timeout=5) as it:
for ln in it:
# Should always be a string, never None
self.assertIsInstance(ln, str)
iter_lines.append(ln)
# Process should have finished; ensure exit success
rc: int = rp.wait()
self.assertEqual(rc, 0)
self.assertEqual(iter_lines, ["a", "b", "c"])
class _UpperFormatter:
"""Simple OutputFormatter that records begin/end calls and uppercases lines."""
def __init__(self) -> None:
self.begin_called: bool = False
self.end_called: bool = False
def begin(self) -> None:
self.begin_called = True
def transform(self, line: str) -> str:
return line.upper()
def end(self) -> None:
self.end_called = True
class TestRunningProcessAdditional(unittest.TestCase):
def test_timeout_and_kill(self: "TestRunningProcessAdditional") -> None:
"""Process exceeding timeout should be killed and raise TimeoutError."""
command: list[str] = [
"uv",
"run",
"python",
"-c",
"import time; time.sleep(999)",
]
rp: RunningProcess = RunningProcess(
command=command,
cwd=Path(".").absolute(),
check=False,
auto_run=True,
timeout=1,
enable_stack_trace=True,
on_complete=None,
output_formatter=None,
)
# Do not block on output; wait should time out quickly
with self.assertRaises(TimeoutError):
_ = rp.wait()
# After timeout, process should be finished
self.assertTrue(rp.finished)
# EndOfStream should be delivered shortly after
end_seen: bool = False
deadline: float = time.time() + 2.0
while time.time() < deadline:
nxt: Union[str, EndOfStream, None] = rp.get_next_line_non_blocking()
if isinstance(nxt, EndOfStream):
end_seen = True
break
time.sleep(0.01)
self.assertTrue(end_seen)
def test_output_formatter(self: "TestRunningProcessAdditional") -> None:
"""Output formatter hooks are invoked and transform is applied; blanks ignored."""
formatter = _UpperFormatter()
command: list[str] = [
"uv",
"run",
"python",
"-c",
"print(); print('hello'); print('world')",
]
rp: RunningProcess = RunningProcess(
command=command,
cwd=Path(".").absolute(),
check=False,
auto_run=True,
timeout=10,
enable_stack_trace=False,
on_complete=None,
output_formatter=formatter,
)
# Drain output (optional; accumulated_output records lines regardless)
while True:
line: Union[str, EndOfStream, None] = rp.get_next_line_non_blocking()
if isinstance(line, EndOfStream):
break
if line is None:
time.sleep(0.005)
continue
rc: int = rp.wait()
self.assertEqual(rc, 0)
# Verify formatter begin/end were called
self.assertTrue(formatter.begin_called)
self.assertTrue(formatter.end_called)
# Verify transformed, non-empty lines only
output_text: str = rp.stdout.strip()
# Should contain HELLO and WORLD, but not an empty line
self.assertIn("HELLO", output_text)
self.assertIn("WORLD", output_text)
# Ensure no blank-only lines exist in accumulated output
for ln in output_text.split("\n"):
self.assertTrue(len(ln.strip()) > 0)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,426 @@
import json
import subprocess
import threading
import time
import unittest
from pathlib import Path
import pytest
# OPTIMIZED: Disabled by default to avoid expensive imports during test discovery
_ENABLED = True
from ci.util.paths import PROJECT_ROOT
from ci.util.symbol_analysis import (
SymbolInfo,
analyze_map_file,
analyze_symbols,
build_reverse_call_graph,
find_board_build_info,
generate_report,
)
from ci.util.tools import Tools, load_tools
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
OUTPUT = HERE / "output"
ELF_FILE = UNO / "firmware.elf"
BUILD_INFO_PATH = (
PROJECT_ROOT / ".build" / "fled" / "examples" / "uno" / "build_info.json"
)
PLATFORMIO_PATH = Path.home() / ".platformio"
PLATFORMIO_PACKAGES_PATH = PLATFORMIO_PATH / "packages"
TOOLCHAIN_AVR = PLATFORMIO_PACKAGES_PATH / "toolchain-atmelavr"
# Global lock to prevent multiple threads from running compilation simultaneously
_compilation_lock = threading.Lock()
_compilation_done = False
def init() -> None:
global _compilation_done
# Use lock to ensure only one thread runs compilation
with _compilation_lock:
if _compilation_done:
print("Compilation already completed by another thread, skipping.")
return
uno_build = PROJECT_ROOT / ".build" / "uno"
print(
f"Thread {threading.current_thread().ident}: Checking for Uno build in: {uno_build}"
)
print(f"BUILD_INFO_PATH: {BUILD_INFO_PATH}")
print(f"TOOLCHAIN_AVR: {TOOLCHAIN_AVR}")
print(f"BUILD_INFO_PATH exists: {BUILD_INFO_PATH.exists()}")
print(f"TOOLCHAIN_AVR exists: {TOOLCHAIN_AVR.exists()}")
if not BUILD_INFO_PATH.exists() or not TOOLCHAIN_AVR.exists():
print("Uno build not found. Running compilation...")
print(f"Working directory: {PROJECT_ROOT}")
try:
print(
"Starting compilation command: uv run python-m ci.ci-compile uno --examples Blink"
)
start_time = time.time()
result = subprocess.run(
"uv run python -m ci.ci-compile uno --examples Blink",
shell=True,
check=True,
cwd=str(PROJECT_ROOT),
capture_output=True,
text=True,
)
end_time = time.time()
print(
f"Compilation completed successfully in {end_time - start_time:.2f} seconds."
)
print(f"STDOUT: {result.stdout}")
if result.stderr:
print(f"STDERR: {result.stderr}")
_compilation_done = True
except subprocess.CalledProcessError as e:
print(f"Error during compilation (returncode: {e.returncode}): {e}")
if e.stdout:
print(f"STDOUT: {e.stdout}")
if e.stderr:
print(f"STDERR: {e.stderr}")
raise
else:
print("Uno build found, skipping compilation.")
_compilation_done = True
@pytest.mark.full
class TestSymbolAnalysis(unittest.TestCase):
@classmethod
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def setUpClass(cls):
"""Set up test fixtures before running tests."""
if not _ENABLED:
return
init()
# Import Tools dynamically to avoid import errors when disabled
from ci.util.tools import Tools, load_tools
cls.tools: Tools = load_tools(BUILD_INFO_PATH)
# Load build info for testing
with open(BUILD_INFO_PATH) as f:
cls.build_info = json.load(f)
# Get the board key (should be 'uno' for UNO board)
cls.board_key = "uno"
if cls.board_key not in cls.build_info:
# Fallback to first available key
cls.board_key = list(cls.build_info.keys())[0]
cls.board_info = cls.build_info[cls.board_key]
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_analyze_symbols_basic(self) -> None:
"""Test basic symbol analysis functionality."""
print("Testing basic symbol analysis...")
# Test with the test ELF file
symbols = analyze_symbols(
str(ELF_FILE), str(self.tools.nm_path), str(self.tools.cpp_filt_path)
)
# Verify we got some symbols
self.assertGreater(len(symbols), 0, "Should find some symbols in ELF file")
# Verify symbol structure
symbols_with_size = 0
for symbol in symbols:
self.assertIsInstance(symbol.address, str)
self.assertIsInstance(symbol.size, int)
self.assertIsInstance(symbol.type, str)
self.assertIsInstance(symbol.name, str)
self.assertIsInstance(symbol.demangled_name, str)
self.assertGreaterEqual(
symbol.size, 0, "Symbol size should be non-negative"
)
if symbol.size > 0:
symbols_with_size += 1
# Ensure we have at least some symbols with actual size (not all zero-size)
self.assertGreater(
symbols_with_size, 0, "Should have at least some symbols with positive size"
)
print(f"Found {len(symbols)} symbols")
# Check that we have some common symbols we'd expect in a compiled program
symbol_names = [s.demangled_name for s in symbols]
# Should have main function
main_symbols = [name for name in symbol_names if "main" in name.lower()]
self.assertGreater(len(main_symbols), 0, "Should find main function")
print(f"Sample symbols: {symbol_names[:5]}")
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_analyze_symbols_from_build_info(self) -> None:
"""Test symbol analysis using actual build info paths."""
print("Testing symbol analysis with build_info.json paths...")
# Get paths from build info
nm_path = self.board_info["aliases"]["nm"]
cppfilt_path = self.board_info["aliases"]["c++filt"]
elf_file = self.board_info["prog_path"]
# If the ELF file from build_info doesn't exist, try the actual build location
if not Path(elf_file).exists():
# Fallback to the actual ELF file location
actual_elf_file = (
PROJECT_ROOT
/ ".build"
/ "fled"
/ "examples"
/ "uno"
/ "Blink"
/ ".pio"
/ "build"
/ "uno"
/ "firmware.elf"
)
if actual_elf_file.exists():
elf_file = str(actual_elf_file)
print(f"Using actual ELF file location: {elf_file}")
# Verify the ELF file exists
self.assertTrue(Path(elf_file).exists(), f"ELF file should exist: {elf_file}")
# Run analysis
symbols = analyze_symbols(elf_file, nm_path, cppfilt_path)
# Verify results
self.assertGreater(len(symbols), 0, "Should find symbols")
# Check that we have symbols with reasonable sizes
sizes = [s.size for s in symbols]
symbols_with_size = sum(1 for size in sizes if size > 0)
self.assertGreater(
symbols_with_size, 0, "Should have at least some symbols with positive size"
)
# All sizes should be non-negative (zero-size symbols are valid for undefined symbols, labels, etc.)
self.assertTrue(
all(size >= 0 for size in sizes),
"All symbols should have non-negative sizes",
)
print(f"Analyzed {len(symbols)} symbols from build ELF: {elf_file}")
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_generate_report_basic(self) -> None:
"""Test report generation functionality."""
print("Testing basic report generation...")
# Create some test symbols using SymbolInfo dataclass
test_symbols = [
SymbolInfo(
address="0x1000",
size=1000,
type="T",
name="test_function_1",
demangled_name="test_function_1()",
source="test",
),
SymbolInfo(
address="0x2000",
size=500,
type="T",
name="_Z12test_func_2v",
demangled_name="test_function_2()",
source="test",
),
SymbolInfo(
address="0x3000",
size=200,
type="D",
name="test_data",
demangled_name="test_data",
source="test",
),
]
test_dependencies = {"test_module.o": ["test_function_1", "test_data"]}
# Generate report
report = generate_report("TEST_BOARD", test_symbols, test_dependencies)
# Verify report structure - use dataclass field access
self.assertIsInstance(report.board, str)
self.assertIsInstance(report.total_symbols, int)
self.assertIsInstance(report.total_size, int)
self.assertIsInstance(report.largest_symbols, list)
self.assertIsInstance(report.type_breakdown, list)
self.assertIsInstance(report.dependencies, dict)
# Verify values
self.assertEqual(report.board, "TEST_BOARD")
self.assertEqual(report.total_symbols, 3)
self.assertEqual(report.total_size, 1700) # 1000 + 500 + 200
# Verify type breakdown - it's now a list of TypeBreakdown dataclasses
type_breakdown_dict = {tb.type: tb for tb in report.type_breakdown}
self.assertIn("T", type_breakdown_dict)
self.assertIn("D", type_breakdown_dict)
self.assertEqual(type_breakdown_dict["T"].count, 2)
self.assertEqual(type_breakdown_dict["D"].count, 1)
print("Report generation test passed")
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_find_board_build_info(self) -> None:
"""Test the board build info detection functionality."""
print("Testing board build info detection...")
# Test finding UNO board specifically
try:
build_info_path, board_name = find_board_build_info("uno")
self.assertEqual(board_name, "uno")
self.assertTrue(build_info_path.exists())
self.assertEqual(build_info_path.name, "build_info.json")
print(f"Found UNO build info: {build_info_path}")
except SystemExit:
self.skipTest("UNO build not available for testing")
# Test auto-detection (should find any available board)
try:
build_info_path, board_name = find_board_build_info(None)
self.assertTrue(build_info_path.exists())
self.assertEqual(build_info_path.name, "build_info.json")
print(f"Auto-detected board: {board_name} at {build_info_path}")
except SystemExit:
self.skipTest("No builds available for auto-detection testing")
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_analyze_map_file(self) -> None:
"""Test map file analysis if available."""
print("Testing map file analysis...")
# Try to find a map file
elf_file_path = Path(self.board_info["prog_path"])
map_file = elf_file_path.with_suffix(".map")
if not map_file.exists():
print(f"Map file not found at {map_file}, skipping map analysis test")
return
# Analyze the map file
dependencies = analyze_map_file(map_file)
# Verify result structure
self.assertIsInstance(dependencies, dict)
if dependencies:
print(f"Found {len(dependencies)} modules in map file")
# Verify structure of dependencies
for module, symbols in dependencies.items():
self.assertIsInstance(module, str)
self.assertIsInstance(symbols, list)
# Print a sample for debugging
sample_modules = list(dependencies.keys())[:3]
for module in sample_modules:
print(f" {module}: {len(dependencies[module])} symbols")
else:
print("No dependencies found in map file (this may be normal)")
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_build_reverse_call_graph(self) -> None:
"""Test reverse call graph building."""
print("Testing reverse call graph building...")
# Create test call graph
test_call_graph = {
"function_a": ["function_b", "function_c"],
"function_b": ["function_c", "function_d"],
"function_c": ["function_d"],
}
# Build reverse call graph
reverse_graph = build_reverse_call_graph(test_call_graph)
# Verify structure
expected_reverse = {
"function_b": ["function_a"],
"function_c": ["function_a", "function_b"],
"function_d": ["function_b", "function_c"],
}
self.assertEqual(reverse_graph, expected_reverse)
print("Reverse call graph building test passed")
@unittest.skipUnless(_ENABLED, "Tests disabled - set _ENABLED = True to run")
def test_full_symbol_analysis_workflow(self) -> None:
"""Test the complete symbol analysis workflow."""
print("Testing complete symbol analysis workflow...")
# Run full analysis on actual build
elf_file = self.board_info["prog_path"]
# If the ELF file from build_info doesn't exist, try the actual build location
if not Path(elf_file).exists():
# Fallback to the actual ELF file location
actual_elf_file = (
PROJECT_ROOT
/ ".build"
/ "fled"
/ "examples"
/ "uno"
/ "Blink"
/ ".pio"
/ "build"
/ "uno"
/ "firmware.elf"
)
if actual_elf_file.exists():
elf_file = str(actual_elf_file)
print(f"Using actual ELF file location: {elf_file}")
nm_path = self.board_info["aliases"]["nm"]
cppfilt_path = self.board_info["aliases"]["c++filt"]
# Analyze symbols
symbols = analyze_symbols(elf_file, nm_path, cppfilt_path)
# Analyze map file if available
map_file = Path(elf_file).with_suffix(".map")
dependencies = analyze_map_file(map_file)
# Generate report
report = generate_report("UNO", symbols, dependencies)
# Verify the complete workflow produced valid results - use dataclass field access
self.assertGreater(report.total_symbols, 0)
self.assertGreater(report.total_size, 0)
self.assertGreater(len(report.largest_symbols), 0)
self.assertGreater(len(report.type_breakdown), 0)
# Print summary for verification
print("Complete analysis results:")
print(f" Board: {report.board}")
print(f" Total symbols: {report.total_symbols}")
print(
f" Total size: {report.total_size} bytes ({report.total_size / 1024:.1f} KB)"
)
print(
f" Largest symbol: {report.largest_symbols[0].demangled_name} ({report.largest_symbols[0].size} bytes)"
)
# Verify we have expected symbol types for a typical embedded program
type_breakdown_dict = {tb.type: tb for tb in report.type_breakdown}
self.assertIn("T", type_breakdown_dict, "Should have text/code symbols")
print("Full workflow test completed successfully")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,171 @@
"""
Unit tests for unity file generation behavior in the Python compiler.
Validates that:
- Unity content matches expected includes order
- Unity file writes are idempotent (no rewrite when content unchanged)
- Unity file is rewritten when source list changes
"""
import os
import tempfile
import time
import unittest
from pathlib import Path
from typing import List
from unittest import TestCase
from ci.compiler.clang_compiler import BuildFlags, Compiler, CompilerOptions
class TestUnityGeneration(TestCase):
def setUp(self) -> None:
self.temp_dir = Path(tempfile.mkdtemp(prefix="unity_test_")).resolve()
self.unity_dir = self.temp_dir / "unity"
self.unity_dir.mkdir(parents=True, exist_ok=True)
self.src_dir = self.temp_dir / "src"
self.src_dir.mkdir(parents=True, exist_ok=True)
# Create minimal compilable .cpp files
self.cpp1 = self.src_dir / "a.cpp"
self.cpp2 = self.src_dir / "b.cpp"
with open(self.cpp1, "w", encoding="utf-8") as f1:
f1.write(
"""
static int func_a() { return 1; }
int var_a = func_a();
""".lstrip()
)
with open(self.cpp2, "w", encoding="utf-8") as f2:
f2.write(
"""
static int func_b() { return 2; }
int var_b = func_b();
""".lstrip()
)
# Load build flags and create a compiler instance
project_root = Path.cwd()
toml_path = project_root / "ci" / "build_unit.toml"
build_flags: BuildFlags = BuildFlags.parse(
toml_path, quick_build=True, strict_mode=False
)
compiler_args: List[str] = []
for item in build_flags.tools.cpp_compiler:
compiler_args.append(item)
for item in build_flags.compiler_flags:
compiler_args.append(item)
for item in build_flags.include_flags:
compiler_args.append(item)
settings = CompilerOptions(
include_path=str(project_root / "src"),
defines=[],
compiler_args=compiler_args,
)
self.compiler = Compiler(settings, build_flags)
def tearDown(self) -> None:
# Best effort cleanup of temp directory
try:
for root, dirs, files in os.walk(self.temp_dir, topdown=False):
for name in files:
try:
(Path(root) / name).unlink(missing_ok=True)
except Exception:
pass
for name in dirs:
try:
(Path(root) / name).rmdir()
except Exception:
pass
self.temp_dir.rmdir()
except Exception:
# Ignore cleanup issues on Windows
pass
def test_unity_write_idempotent_and_updates_on_change(self) -> None:
# First run: generate and compile one chunk unity
options = CompilerOptions(
include_path=self.compiler.settings.include_path,
defines=[],
compiler_args=self.compiler.settings.compiler_args,
use_pch=False,
additional_flags=["-c"],
)
cpp_list: List[str] = [str(self.cpp1), str(self.cpp2)]
result = self.compiler._compile_unity_chunks_sync( # Internal, synchronous for testing
options,
cpp_list,
chunks=1,
unity_dir=self.unity_dir,
no_parallel=True,
)
self.assertTrue(result.success, "Initial unity compilation should succeed")
unity_cpp = self.unity_dir / "unity1.cpp"
self.assertTrue(unity_cpp.exists(), "unity1.cpp should be created")
first_mtime = unity_cpp.stat().st_mtime_ns
# Second run with identical inputs should not rewrite the file
time.sleep(0.01) # Ensure detectable time difference if rewritten
result2 = self.compiler._compile_unity_chunks_sync(
options,
cpp_list,
chunks=1,
unity_dir=self.unity_dir,
no_parallel=True,
)
self.assertTrue(result2.success, "Second unity compilation should succeed")
second_mtime = unity_cpp.stat().st_mtime_ns
self.assertEqual(
first_mtime,
second_mtime,
"unity1.cpp should not be rewritten when content is unchanged",
)
# Change the input set (add a new file) to force unity content change and rewrite
cpp3 = self.src_dir / "c.cpp"
with open(cpp3, "w", encoding="utf-8") as f3:
f3.write(
"""
static int func_c() { return 3; }
int var_c = func_c();
""".lstrip()
)
# Update list to include the new file
cpp_list.append(str(cpp3))
time.sleep(0.01)
result3 = self.compiler._compile_unity_chunks_sync(
options,
cpp_list,
chunks=1,
unity_dir=self.unity_dir,
no_parallel=True,
)
self.assertTrue(
result3.success, "Unity compilation after input set change should succeed"
)
third_mtime = unity_cpp.stat().st_mtime_ns
self.assertGreater(
third_mtime,
second_mtime,
"unity1.cpp should be rewritten when the included file list changes",
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,81 @@
# pyright: reportUnknownMemberType=false
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from typing import List
from ci.util.paths import PROJECT_ROOT
NUM_WORKERS = 1 if os.environ.get("NO_PARALLEL") else (os.cpu_count() or 1) * 4
WASM_ROOT = PROJECT_ROOT / "src" / "platforms" / "wasm"
class TestMissingPragmaOnce(unittest.TestCase):
def check_file(self, file_path: str) -> list[str]:
"""Check if a header file has #pragma once directive or if a cpp file incorrectly has it."""
failings: list[str] = []
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
if file_path.endswith(".h") or file_path.endswith(".cpp"):
content = f.read()
# For header files, check if #pragma once is missing
if "EM_ASM_" in content and "// clang-format off\n" not in content:
if "clang-format off" not in content:
failings.append(f"Missing clang-format off in {file_path}")
else:
failings.append(f"clang-format off is malformed in {file_path}")
return failings
def test_esm_asm_and_clang_format(self) -> None:
files_to_check: List[str] = []
current_dir = None
# Collect files to check
for root, _, files in os.walk(WASM_ROOT):
# Log when we enter a new directory
rel_path = os.path.relpath(root, WASM_ROOT)
if current_dir != rel_path:
current_dir = rel_path
print(f"Traversing directory: {rel_path}")
for file in files:
if file.endswith((".h", ".cpp")): # Check both header and cpp files
file_path = os.path.join(root, file)
files_to_check.append(file_path)
print(f"Found {len(files_to_check)} files to check")
# Process files in parallel
all_failings: List[str] = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
# Report results
if all_failings:
msg = (
f"Found {len(all_failings)} clang format issues in wasm: \n"
+ "\n".join(all_failings)
)
for failing in all_failings:
print(failing)
print(
"Please be aware you need // then one space then clang-format off then a new line exactly"
)
self.fail(msg)
else:
print("All files passed the check.")
print(f"Clange format check completed. Processed {len(files_to_check)} files.")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,64 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from typing import List
from ci.util.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
# PLATFORMS_DIR = os.path.join(SRC_ROOT, "platforms")
NUM_WORKERS = 1 if os.environ.get("NO_PARALLEL") else (os.cpu_count() or 1) * 4
WRONG_DEFINES: dict[str, str] = {
"#if ESP32": "Use #ifdef ESP32 instead of #if ESP32",
"#if defined(FASTLED_RMT5)": "Use #ifdef FASTLED_RMT5 instead of #if defined(FASTLED_RMT5)",
"#if defined(FASTLED_ESP_HAS_CLOCKLESS_SPI)": "Use #ifdef FASTLED_ESP_HAS_CLOCKLESS_SPI instead of #if defined(FASTLED_ESP_HAS_CLOCKLESS_SPI)",
}
class TestWrongDefines(unittest.TestCase):
def check_file(self, file_path: str) -> List[str]:
failings: List[str] = []
with open(file_path, "r", encoding="utf-8") as f:
for line_number, line in enumerate(f, 1):
line = line.strip()
if line.startswith("//"):
continue
for needle, message in WRONG_DEFINES.items():
if needle in line:
failings.append(f"{file_path}:{line_number}: {message}")
return failings
def test_no_bad_defines(self) -> None:
"""Searches through the program files to check for banned headers, excluding src/platforms."""
files_to_check: List[str] = []
for root, _, files in os.walk(SRC_ROOT):
for file in files:
if file.endswith(
(".cpp", ".h", ".hpp")
): # Add or remove file extensions as needed
file_path = os.path.join(root, file)
files_to_check.append(file_path)
all_failings: List[str] = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
if all_failings:
msg = f"Found {len(all_failings)} bad defines: \n" + "\n".join(all_failings)
for failing in all_failings:
print(failing)
self.fail("Please fix the defines: \n" + msg + "\n")
else:
print("No bad defines found.")
if __name__ == "__main__":
unittest.main()

Binary file not shown.

View File

@@ -0,0 +1,237 @@
:100000000C9435000C945D000C945D000C945D0024
:100010000C945D000C945D000C945D000C945D00EC
:100020000C945D000C945D000C945D000C945D00DC
:100030000C945D000C945D000C945D000C945D00CC
:100040000C948D050C945D000C945D000C945D0087
:100050000C945D000C945D000C945D000C945D00AC
:100060000C945D000C945D00BB0611241FBECFEF05
:10007000D8E0DEBFCDBF11E0A0E0B1E0EEE8FEE0E9
:1000800002C005900D92AE32B107D9F721E0AEE281
:10009000B1E001C01D92A637B207E1F710E0C5E359
:1000A000D0E004C02197FE010E943A07C433D10773
:1000B000C9F70E94D7050C9445070C9400003FB780
:1000C000F8948091550190915601A0915701B091FB
:1000D000580126B5A89B05C02F3F19F00196A11D18
:1000E000B11D3FBFBA2FA92F982F8827BC01CD0182
:1000F000620F711D811D911D42E0660F771F881FE1
:10010000991F4A95D1F708952F923F924F925F928F
:100110006F927F928F929F92AF92BF92CF92DF9217
:10012000EF92FF920F931F93CF93DF93CDB7DEB77C
:10013000C358D1090FB6F894DEBF0FBECDBF782EDD
:100140008091660190916701A0916801B091690169
:10015000892B8A2B8B2BD9F00E945F0000917201B2
:10016000109173012091740130917501601B710B26
:10017000820B930B00916601109167012091680139
:10018000309169016017710782079307C8F20E94D6
:100190005F00609372017093730180937401909378
:1001A0007501E0916E01F0916F01309759F0409127
:1001B0006A0150916B0160916C0170916D01872D06
:1001C0000995782EFE01319680E8DF011D928A950F
:1001D000E9F7E0907001F09071016F015E01B1E804
:1001E000AB0EB11C4F01472C512CE114F10409F462
:1001F0005CC08A149B0409F458C0D701ED91FC91AE
:100200000484F585E02DC7010995F401819391934C
:100210004F0180916401909165018436910518F435
:10022000D7011C961C92D701ED91FC91228033805E
:10023000F501108211821282772019F187010A5F7D
:100240001F4FF0E0E0E0D8012D918D012223A9F0AD
:1002500012966C90662089F030E02F5F3F4FD201FC
:100260000E9405079B01AC01A62DB0E011960E94EB
:100270001B07D501AE0FBF1F8C933196E330F105FC
:1002800011F7F501008111812281D7011D964D9151
:100290005C911E9712966D917C91C701F1010995B1
:1002A000D7011496ED90FC90A0CF009170011091B1
:1002B00071010115110599F0CA14DB0481F0F601F2
:1002C000619171916F01D801ED91FC910684F785E0
:1002D000E02DC8010995F80104811581EACF8091CC
:1002E0006101909162019C012F5F3F4F3093620149
:1002F0002093610149970CF44AC08FB7F89420917C
:10030000590130915A0140915B0150915C018FBFBE
:1003100080915D0190915E01A0915F01B0916001BB
:10032000281B390B4A0B5B0B21F421E030E040E045
:1003300050E0E0916101F091620188EE93E0E89F66
:10034000B001E99F700DF89F700D1124072E000C6D
:10035000880B990B0E94E30630936501209364019A
:1003600010926201109261012FB7F89480915901A7
:1003700090915A01A0915B01B0915C012FBF8093D5
:100380005D0190935E01A0935F01B0936001CD5732
:10039000DF4F0FB6F894DEBF0FBECDBFDF91CF9118
:1003A0001F910F91FF90EF90DF90CF90BF90AF9093
:1003B0009F908F907F906F905F904F903F902F9085
:1003C00008958F929F92AF92BF92CF92DF92EF9259
:1003D000FF920E945F004B015C0184EFC82EDD2478
:1003E000D394E12CF12C0E945F00681979098A09E5
:1003F0009B09683E734081059105A8F321E0C21A6C
:10040000D108E108F10888EE880E83E0981EA11C4F
:10041000B11CC114D104E104F10429F7FF90EF905D
:10042000DF90CF90BF90AF909F908F90089580E91C
:1004300091E008950F931F93CF93DF9320912F01A5
:100440002F5F322F377030932F0120FF2BC020E811
:1004500031FD2064347009F02062205FFC01EC0162
:10046000239600E011E06485662329F070E0C8015E
:100470000E94CF066F5F6187822F869F080E80E003
:10048000811D1124811110C01682662311F0615064
:1004900061873196EC17FD0731F7DF91CF911F91FE
:1004A0000F91089520E0D4CF81508683EECF4F92F4
:1004B0005F927F928F929F92AF92BF92CF92DF9284
:1004C000EF92FF920F931F93CF93DF932C01EB01D9
:1004D0000E945F00F20125893689621B730B6A3026
:1004E0007105B0F3F8948A819B81181619060CF0F7
:1004F000CDC1E881F9816BB1862E689483F83BB158
:10050000377F3BB9DA848F812D2D281B822F2F83D3
:100510004F85042E000C550BAA81BB817D85FC8480
:10052000EE847F5FF394E3949E819884B984AB84D6
:100530001181C12C6C2D0C2D2C2D2181112788941B
:100540002111280F08F42FEF8195889470FD120F68
:100550001795889471FD120F1795889472FD120FEC
:100560001795889473FD120F1795889474FD120FD8
:100570001795889475FD120F1795889476FD120FC4
:100580001795889477FD120F17958894622F711133
:10059000612F8D0D162F002C8BB800C017FF3BB9B3
:1005A00020816627889400C000C0002C3BB921112F
:1005B000290F00C0002C8BB800C016FF3BB908F40F
:1005C0002FEF9195889400C000C0002C3BB9F0FC3F
:1005D000620F00C0002C8BB800C015FF3BB96795B7
:1005E0008894F1FC620F00C000C0002C3BB96795F5
:1005F000889400C0002C8BB800C014FF3BB9F2FCFB
:10060000620F6795889400C000C0002C3BB9F3FCD2
:10061000620F00C0002C8BB800C013FF3BB9679578
:100620008894F4FC620F00C000C0002C3BB96795B1
:10063000889400C0002C8BB800C012FF3BB9F5FCB9
:10064000620F6795889400C000C0002C3BB9F6FC8F
:10065000620F00C0002C8BB800C011FF3BB967953A
:100660008894F7FC620F00C000C0002C3BB967956E
:10067000889400C0002C8BB800C010FF3BB9122F2B
:10068000F110162F9B0D00C000C0002C3BB900C01C
:1006900000C0002C8BB800C017FF3BB92281662731
:1006A000889400C000C0002C3BB92111290D00C066
:1006B000002C8BB800C016FF3BB908F42FEFE40FF5
:1006C000F51F00C000C0002C3BB9E0FC620F00C069
:1006D000002C8BB800C015FF3BB967958894E1FCEE
:1006E000620F00C000C0002C3BB96795889400C021
:1006F000002C8BB800C014FF3BB9E2FC620F679579
:10070000889400C000C0002C3BB9E3FC620F00C01D
:10071000002C8BB800C013FF3BB967958894E4FCAC
:10072000620F00C000C0002C3BB96795889400C0E0
:10073000002C8BB800C012FF3BB9E5FC620F679537
:10074000889400C000C0002C3BB9E6FC620F00C0DA
:10075000002C8BB800C011FF3BB967958894E7FC6B
:10076000620F00C000C0002C3BB96795889400C0A0
:10077000002C8BB800C010FF3BB9122FE110162FD0
:10078000919400C000C0002C3BB99A0C00C000C07E
:100790008BB800C017FF3BB921816627889400C041
:1007A00000C0002C3BB92111280F00C0002C8BB8D1
:1007B00000C016FF3BB908F42FEF8195889400C064
:1007C00000C0002C3BB970FD620F00C0002C8BB83C
:1007D00000C015FF3BB96795889471FD620F00C09A
:1007E00000C0002C3BB96795889400C0002C8BB8E2
:1007F00000C014FF3BB972FD620F6795889400C07A
:1008000000C0002C3BB973FD620F00C0002C8BB8F8
:1008100000C013FF3BB96795889474FD620F00C058
:1008200000C0002C3BB96795889400C0002C8BB8A1
:1008300000C012FF3BB975FD620F6795889400C038
:1008400000C0002C3BB976FD620F00C0002C8BB8B5
:1008500000C011FF3BB96795889477FD620F00C017
:1008600000C0002C3BB96795889400C0002C8BB861
:1008700000C010FF3BB9122F7111162F8D0D00C053
:1008800000C0002C3BB9119709F086CE4A815B81EC
:1008900020EE31E0DA010E941407DC01CB01F4E024
:1008A000B695A79597958795FA95D1F730E020E012
:1008B000B901EAE94E9F040E611D5E9F600D711D36
:1008C0001124650F711D860F971FA11DB11D893E53
:1008D00043E09407A105B10508F434C0885E934055
:1008E000A109B10942E0B695A795979587954A95D4
:1008F000D1F747E0849F080E211D949F200D311DE4
:100900001124290F311D60912E0170E0860F971F71
:10091000820F931F4091590150915A0160915B01E0
:1009200070915C01292F3327420F531F611D711DE8
:100930004093590150935A0160935B0170935C019D
:1009400080932E0178940E945F00F201768B658B74
:10095000DF91CF911F910F91FF90EF90DF90CF909B
:10096000BF90AF909F908F907F905F904F90089531
:1009700081E090E00895539A08956F927F928F924C
:10098000CF92DF92EF92FF920F931F93CF93DF935B
:10099000CDB7DEB762970FB6F894DEBF0FBECDBFFE
:1009A0006C017A013801822EDC011C962C91CA015F
:1009B00057FF04C088279927841B950B7A83698386
:1009C0009C838B839E838D836D867E868F8621306C
:1009D00049F5CE0101960E941A0283E0888B1A8A9B
:1009E000198AF7FE02C08DEF888BD601ED91FC913C
:1009F0000288F389E02DBE016F5F7F4FC601099524
:100A000062960FB6F894DEBF0FBECDBFDF91CF91D7
:100A10001F910F91FF90EF90DF90CF908F907F907C
:100A20006F9008951C861B861A86198618861F8269
:100A3000D4CFEF92FF920F931F93CF93DF93CDB755
:100A4000DEB762970FB6F894DEBF0FBECDBF7C0154
:100A5000DC011C968C917A8369835C834B835E8373
:100A60004D830D871E872F878130F9F4CE010196C3
:100A70000E941A02188A1A8A198AD701ED91FC91EC
:100A80000288F389E02DBE016F5F7F4FC701099592
:100A900062960FB6F894DEBF0FBECDBFDF91CF9147
:100AA0001F910F91FF90EF9008951C861B861A8668
:100AB000198618861F82DECF90E080E00895FC0141
:100AC00064870895FC01848590E00895FC01858584
:100AD000968508950F931F93CF93DF9300D01F92B5
:100AE000CDB7DEB7AB0119821A821B82DC01ED9112
:100AF000FC910190F081E02D00E010E020E0BE01CB
:100B00006F5F7F4F09950F900F900F90DF91CF91FE
:100B10001F910F9108950E9440071F920F920FB6E8
:100B20000F9211242F933F938F939F93AF93BF9373
:100B30008091590190915A01A0915B01B0915C01A3
:100B40003091540123E0230F2D3758F50196A11D54
:100B5000B11D209354018093590190935A01A093A1
:100B60005B01B0935C018091550190915601A09179
:100B70005701B09158010196A11DB11D80935501F7
:100B800090935601A0935701B0935801BF91AF9134
:100B90009F918F913F912F910F900FBE0F901F90BB
:100BA000189526E8230F0296A11DB11DD2CF789487
:100BB00084B5826084BD84B5816084BD85B5826062
:100BC00085BD85B5816085BD80916E008160809313
:100BD0006E0010928100809181008260809381007C
:100BE000809181008160809381008091800081608C
:100BF000809380008091B10084608093B1008091E7
:100C0000B00081608093B00080917A00846080930E
:100C10007A0080917A00826080937A0080917A00D5
:100C2000816080937A0080917A00806880937A0056
:100C30001092C10080914901811155C01092350177
:100C4000109234018FEF80933801809339018093A3
:100C50003A0180933B0180933C0180933D0181E008
:100C600080933E011092400110923F0180E797E18E
:100C7000909342018093410183E090E0909344017E
:100C80008093430110924601109245011092370162
:100C9000109236018091700190917101892B31F48D
:100CA00082E391E09093710180937001E0913001B3
:100CB000F0913101309721F082E391E095838483B4
:100CC00082E391E0909331018093300110924801CA
:100CD000109247018AE191E09093330180933201B1
:100CE00081E080934901539A81E591E09093350129
:100CF0008093340181E090E09093400180933F0124
:100D00008091660190916701A0916801B09169019D
:100D1000843C29E09207A105B10520F484EC99E018
:100D2000A0E0B0E08093660190936701A093680112
:100D3000B0936901CFEF00E010E0C0935101109231
:100D4000520110925301809163010E9484000E941D
:100D5000E1011092510110925201109253018091C1
:100D600063010E9484000E94E1010115110529F32D
:100D70000E940000E2CFE3E6F1E08FEF8083128271
:100D80001182148613868FEF9FEFDC018783908793
:100D9000A187B2871382148215821682089597FB69
:100DA000072E16F4009407D077FD09D00E9426077D
:100DB00007FC05D03EF4909581959F4F089570955E
:100DC00061957F4F0895A1E21A2EAA1BBB1BFD015E
:100DD0000DC0AA1FBB1FEE1FFF1FA217B307E4071A
:100DE000F50720F0A21BB30BE40BF50B661F771F72
:100DF000881F991F1A9469F7609570958095909552
:100E00009B01AC01BD01CF010895A29FB001B39F2A
:100E1000C001A39F700D811D1124911DB29F700D03
:100E2000811D1124911D08950E940507B7FF0895A3
:100E3000821B930B08950E940507A59F900DB49FF8
:100E4000900DA49F800D911D11240895AA1BBB1B1A
:100E500051E107C0AA1FBB1FA617B70710F0A61BBA
:100E6000B70B881F991F5A95A9F780959095BC01DB
:100E7000CD010895EE0FFF1F0590F491E02D099428
:0E0E800081E090E0F8940C944507F894FFCFC1
:100E8E00000000008B058B058B056A056605B8040E
:100E9E0062055F055C05000000001905BD04BB047A
:0E0EAE006A056605B80462055F051702570263
:00000001FF

View File

@@ -0,0 +1,789 @@
Archive member included to satisfy reference by file (symbol)
.pio\build\uno\lib6ec\libsrc.a(FastLED.cpp.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (_ZN14CLEDController7m_pHeadE)
.pio\build\uno\lib6ec\libsrc.a(crgb.cpp.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (_ZN4CRGB17computeAdjustmentEhRKS_S1_)
.pio\build\uno\lib6ec\libsrc.a(lib8tion.cpp.o)
FastLED.cpp.o (symbol from plugin) (memset8)
.pio\build\uno\libFrameworkArduino.a(abi.cpp.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (__cxa_pure_virtual)
.pio\build\uno\libFrameworkArduino.a(hooks.c.o)
FastLED.cpp.o (symbol from plugin) (yield)
.pio\build\uno\libFrameworkArduino.a(main.cpp.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o (main)
.pio\build\uno\libFrameworkArduino.a(wiring.c.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (timer0_millis)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o (exit)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__divmodhi4)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__udivmodsi4)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__do_copy_data)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__do_clear_bss)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__do_global_ctors)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__umulhisi3)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__usmulhisi3)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__muluhisi3)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o) (__udivmodhi4)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o) (__tablejump2__)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (abort)
Discarded input sections
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.text 0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController4sizeEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController5lanesEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13beginShowLedsEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController11endShowLedsEPv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZNK14CLEDController17getMaxRefreshRateEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE5lanesEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZNK19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE17getMaxRefreshRateEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDControllerC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV14CLEDController
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13getAdjustmentEh
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController9showColorERK4CRGBih
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController9clearLedsEi
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CFastLED4showEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN22WS2812Controller800KhzILh3EL6EOrder66EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV22WS2812Controller800KhzILh3EL6EOrder66EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8NEOPIXELILh3EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV8NEOPIXELILh3EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CFastLED7addLedsI8NEOPIXELLh3EEER14CLEDControllerP4CRGBii
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZGVZN8CFastLED7addLedsI8NEOPIXELLh3EEER14CLEDControllerP4CRGBiiE1c
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZZN8CFastLED7addLedsI8NEOPIXELLh3EEER14CLEDControllerP4CRGBiiE1c
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN7_AVRPINILh3ELh8E18__gen_struct_PORTD17__gen_struct_DDRD17__gen_struct_PINDE9setOutputEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE4initEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CMinWaitILi10EE4waitEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE15showRGBInternalER15PixelControllerILS0_66ELi1ELm4294967295EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CMinWaitILi10EE4markEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE10showPixelsER15PixelControllerILS0_66ELi1ELm4294967295EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE11initOffsetsEi
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE21init_binary_ditheringEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE21init_binary_ditheringEvE1R
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE16enable_ditheringEh
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EEC5EPK4CRGBiRS2_h
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE4showEPK4CRGBiS2_
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EEC5ERK4CRGBiRS2_h
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE9showColorERK4CRGBiS2_
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.data 0x00000000 0x0 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
.text 0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController12clearLedDataEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13beginShowLedsEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController11endShowLedsEPv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13getAdjustmentEh
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController8showLedsEh
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController9showColorERK4CRGBh
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZNK14CLEDController17getMaxRefreshRateEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CFastLED4showEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.text 0x00000000 0x0 crgb.cpp.o (symbol from plugin)
.text 0x00000000 0x0 lib8tion.cpp.o (symbol from plugin)
.text 0x00000000 0x0 abi.cpp.o (symbol from plugin)
.text 0x00000000 0x0 hooks.c.o (symbol from plugin)
.text 0x00000000 0x0 main.cpp.o (symbol from plugin)
.text 0x00000000 0x0 wiring.c.o (symbol from plugin)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
Memory Configuration
Name Origin Length Attributes
text 0x00000000 0x00020000 xr
data 0x00800060 0x0000ffa0 rw !x
eeprom 0x00810000 0x00010000 rw !x
fuse 0x00820000 0x00000003 rw !x
lock 0x00830000 0x00000400 rw !x
signature 0x00840000 0x00000400 rw !x
user_signatures 0x00850000 0x00000400 rw !x
*default* 0x00000000 0xffffffff
Linker script and memory map
Address of section .data set to 0x800100
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
LOAD .pio\build\uno\src\Blink.ino.cpp.o
LOAD C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
START GROUP
LOAD .pio\build\uno\liba19\libSoftwareSerial.a
LOAD .pio\build\uno\lib8b0\libSPI.a
LOAD .pio\build\uno\lib6ec\libsrc.a
LOAD .pio\build\uno\libFrameworkArduinoVariant.a
LOAD .pio\build\uno\libFrameworkArduino.a
END GROUP
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libm.a
START GROUP
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libm.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libatmega328p.a
END GROUP
0x00020000 __TEXT_REGION_LENGTH__ = DEFINED (__TEXT_REGION_LENGTH__)?__TEXT_REGION_LENGTH__:0x20000
0x0000ffa0 __DATA_REGION_LENGTH__ = DEFINED (__DATA_REGION_LENGTH__)?__DATA_REGION_LENGTH__:0xffa0
0x00010000 __EEPROM_REGION_LENGTH__ = DEFINED (__EEPROM_REGION_LENGTH__)?__EEPROM_REGION_LENGTH__:0x10000
[0x00000003] __FUSE_REGION_LENGTH__ = DEFINED (__FUSE_REGION_LENGTH__)?__FUSE_REGION_LENGTH__:0x400
0x00000400 __LOCK_REGION_LENGTH__ = DEFINED (__LOCK_REGION_LENGTH__)?__LOCK_REGION_LENGTH__:0x400
0x00000400 __SIGNATURE_REGION_LENGTH__ = DEFINED (__SIGNATURE_REGION_LENGTH__)?__SIGNATURE_REGION_LENGTH__:0x400
0x00000400 __USER_SIGNATURE_REGION_LENGTH__ = DEFINED (__USER_SIGNATURE_REGION_LENGTH__)?__USER_SIGNATURE_REGION_LENGTH__:0x400
.hash
*(.hash)
.dynsym
*(.dynsym)
.dynstr
*(.dynstr)
.gnu.version
*(.gnu.version)
.gnu.version_d
*(.gnu.version_d)
.gnu.version_r
*(.gnu.version_r)
.rel.init
*(.rel.init)
.rela.init
*(.rela.init)
.rel.text
*(.rel.text)
*(.rel.text.*)
*(.rel.gnu.linkonce.t*)
.rela.text
*(.rela.text)
*(.rela.text.*)
*(.rela.gnu.linkonce.t*)
.rel.fini
*(.rel.fini)
.rela.fini
*(.rela.fini)
.rel.rodata
*(.rel.rodata)
*(.rel.rodata.*)
*(.rel.gnu.linkonce.r*)
.rela.rodata
*(.rela.rodata)
*(.rela.rodata.*)
*(.rela.gnu.linkonce.r*)
.rel.data
*(.rel.data)
*(.rel.data.*)
*(.rel.gnu.linkonce.d*)
.rela.data
*(.rela.data)
*(.rela.data.*)
*(.rela.gnu.linkonce.d*)
.rel.ctors
*(.rel.ctors)
.rela.ctors
*(.rela.ctors)
.rel.dtors
*(.rel.dtors)
.rela.dtors
*(.rela.dtors)
.rel.got
*(.rel.got)
.rela.got
*(.rela.got)
.rel.bss
*(.rel.bss)
.rela.bss
*(.rela.bss)
.rel.plt
*(.rel.plt)
.rela.plt
*(.rela.plt)
.text 0x00000000 0xe8e
*(.vectors)
.vectors 0x00000000 0x68 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
0x00000000 __vector_default
0x00000000 __vectors
*(.vectors)
*(.progmem.gcc*)
0x00000068 . = ALIGN (0x2)
0x00000068 __trampolines_start = .
*(.trampolines)
.trampolines 0x00000068 0x0 linker stubs
*(.trampolines*)
0x00000068 __trampolines_end = .
*libprintf_flt.a:*(.progmem.data)
*libc.a:*(.progmem.data)
*(.progmem*)
0x00000068 . = ALIGN (0x2)
*(.jumptables)
*(.jumptables*)
*(.lowtext)
*(.lowtext*)
0x00000068 __ctors_start = .
*(.ctors)
.ctors 0x00000068 0x2 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x0000006a __ctors_end = .
0x0000006a __dtors_start = .
*(.dtors)
0x0000006a __dtors_end = .
SORT(*)(.ctors)
SORT(*)(.dtors)
*(.init0)
.init0 0x0000006a 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
0x0000006a __init
*(.init0)
*(.init1)
*(.init1)
*(.init2)
.init2 0x0000006a 0xc c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
*(.init2)
*(.init3)
*(.init3)
*(.init4)
.init4 0x00000076 0x16 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
0x00000076 __do_copy_data
.init4 0x0000008c 0x10 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
0x0000008c __do_clear_bss
*(.init4)
*(.init5)
*(.init5)
*(.init6)
.init6 0x0000009c 0x16 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
0x0000009c __do_global_ctors
*(.init6)
*(.init7)
*(.init7)
*(.init8)
*(.init8)
*(.init9)
.init9 0x000000b2 0x8 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
*(.init9)
*(.text)
.text 0x000000ba 0x4 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
0x000000ba __vector_22
0x000000ba __vector_1
0x000000ba __vector_24
0x000000ba __vector_12
0x000000ba __bad_interrupt
0x000000ba __vector_6
0x000000ba __vector_3
0x000000ba __vector_23
0x000000ba __vector_25
0x000000ba __vector_11
0x000000ba __vector_13
0x000000ba __vector_17
0x000000ba __vector_19
0x000000ba __vector_7
0x000000ba __vector_5
0x000000ba __vector_4
0x000000ba __vector_9
0x000000ba __vector_2
0x000000ba __vector_21
0x000000ba __vector_15
0x000000ba __vector_8
0x000000ba __vector_14
0x000000ba __vector_10
0x000000ba __vector_18
0x000000ba __vector_20
.text 0x000000be 0xaf0 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x00000b1a __vector_16
0x00000bae . = ALIGN (0x2)
*(.text.*)
.text.startup 0x00000bae 0x1f0 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x00000bae main
.text.libgcc.div
0x00000d9e 0x28 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
0x00000d9e _div
0x00000d9e __divmodhi4
.text.libgcc.div
0x00000dc6 0x44 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
0x00000dc6 __udivmodsi4
.text.libgcc.mul
0x00000e0a 0x1e c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
0x00000e0a __umulhisi3
.text.libgcc.mul
0x00000e28 0xe c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
0x00000e28 __usmulhisi3
0x00000e2c __usmulhisi3_tail
.text.libgcc.mul
0x00000e36 0x16 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
0x00000e36 __muluhisi3
.text.libgcc.div
0x00000e4c 0x28 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
0x00000e4c __udivmodhi4
.text.libgcc 0x00000e74 0xc c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
0x00000e74 __tablejump2__
.text.avr-libc
0x00000e80 0xa c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
0x00000e80 abort
0x00000e8a . = ALIGN (0x2)
*(.fini9)
.fini9 0x00000e8a 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
0x00000e8a _exit
0x00000e8a exit
*(.fini9)
*(.fini8)
*(.fini8)
*(.fini7)
*(.fini7)
*(.fini6)
*(.fini6)
*(.fini5)
*(.fini5)
*(.fini4)
*(.fini4)
*(.fini3)
*(.fini3)
*(.fini2)
*(.fini2)
*(.fini1)
*(.fini1)
*(.fini0)
.fini0 0x00000e8a 0x4 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
*(.fini0)
0x00000e8e _etext = .
.data 0x00800100 0x2e load address 0x00000e8e
0x00800100 PROVIDE (__data_start, .)
*(.data)
*(.data*)
*(.gnu.linkonce.d*)
*(.rodata)
.rodata 0x00800100 0x2e C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
*(.rodata*)
*(.gnu.linkonce.r*)
0x0080012e . = ALIGN (0x2)
0x0080012e _edata = .
0x0080012e PROVIDE (__data_end, .)
.bss 0x0080012e 0x48
0x0080012e PROVIDE (__bss_start, .)
*(.bss)
.bss 0x0080012e 0x48 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
*(.bss*)
*(COMMON)
0x00800176 PROVIDE (__bss_end, .)
0x00000e8e __data_load_start = LOADADDR (.data)
0x00000ebc __data_load_end = (__data_load_start + SIZEOF (.data))
.noinit 0x00800176 0x0
[!provide] PROVIDE (__noinit_start, .)
*(.noinit*)
[!provide] PROVIDE (__noinit_end, .)
0x00800176 _end = .
[!provide] PROVIDE (__heap_start, .)
.eeprom 0x00810000 0x0
*(.eeprom*)
0x00810000 __eeprom_end = .
.fuse
*(.fuse)
*(.lfuse)
*(.hfuse)
*(.efuse)
.lock
*(.lock*)
.signature
*(.signature*)
.user_signatures
*(.user_signatures*)
.stab
*(.stab)
.stabstr
*(.stabstr)
.stab.excl
*(.stab.excl)
.stab.exclstr
*(.stab.exclstr)
.stab.index
*(.stab.index)
.stab.indexstr
*(.stab.indexstr)
.comment 0x00000000 0x11
*(.comment)
.comment 0x00000000 0x11 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x12 (size before relaxing)
.note.gnu.avr.deviceinfo
0x00000000 0x40
.note.gnu.avr.deviceinfo
0x00000000 0x40 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.note.gnu.build-id
*(.note.gnu.build-id)
.debug
*(.debug)
.line
*(.line)
.debug_srcinfo
*(.debug_srcinfo)
.debug_sfnames
*(.debug_sfnames)
.debug_aranges 0x00000000 0x160
*(.debug_aranges)
.debug_aranges
0x00000000 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_aranges
0x00000020 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_aranges
0x00000040 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_aranges
0x00000060 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_aranges
0x00000080 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_aranges
0x000000a0 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_aranges
0x000000c0 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_aranges
0x000000e0 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_aranges
0x00000100 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_aranges
0x00000120 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_aranges
0x00000140 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_pubnames
*(.debug_pubnames)
.debug_info 0x00000000 0xdfd
*(.debug_info .gnu.linkonce.wi.*)
.debug_info 0x00000000 0x5f4 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_info 0x000005f4 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_info 0x000006af 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_info 0x0000076a 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_info 0x00000825 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_info 0x000008e0 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_info 0x0000099b 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_info 0x00000a56 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_info 0x00000b11 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_info 0x00000bcc 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_info 0x00000c87 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_info 0x00000d42 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_abbrev 0x00000000 0x67e
*(.debug_abbrev)
.debug_abbrev 0x00000000 0x5a2 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_abbrev 0x000005a2 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_abbrev 0x000005b6 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_abbrev 0x000005ca 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_abbrev 0x000005de 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_abbrev 0x000005f2 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_abbrev 0x00000606 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_abbrev 0x0000061a 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_abbrev 0x0000062e 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_abbrev 0x00000642 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_abbrev 0x00000656 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_abbrev 0x0000066a 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_line 0x00000000 0x71a
*(.debug_line .debug_line.* .debug_line_end)
.debug_line 0x00000000 0x1a c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_line 0x0000001a 0x62 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_line 0x0000007c 0xc8 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_line 0x00000144 0x122 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_line 0x00000266 0x98 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_line 0x000002fe 0x86 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_line 0x00000384 0x92 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_line 0x00000416 0xb0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_line 0x000004c6 0x7a c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_line 0x00000540 0x92 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_line 0x000005d2 0xce c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_line 0x000006a0 0x7a c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_frame
*(.debug_frame)
.debug_str 0x00000000 0x208
*(.debug_str)
.debug_str 0x00000000 0x208 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_loc
*(.debug_loc)
.debug_macinfo
*(.debug_macinfo)
.debug_weaknames
*(.debug_weaknames)
.debug_funcnames
*(.debug_funcnames)
.debug_typenames
*(.debug_typenames)
.debug_varnames
*(.debug_varnames)
.debug_pubtypes
*(.debug_pubtypes)
.debug_ranges
*(.debug_ranges)
.debug_macro
*(.debug_macro)
OUTPUT(.pio\build\uno\firmware.elf elf32-avr)
LOAD linker stubs