Skip to content

Commit

Permalink
Revert "[nrf fromlist] twister: Account for board & SoC extensions"
Browse files Browse the repository at this point in the history
This reverts commit b2af352.

To be replaced with a new version.

Signed-off-by: Grzegorz Swiderski <[email protected]>
  • Loading branch information
57300 committed Feb 26, 2025
1 parent 1bf500b commit 01a4ae2
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 89 deletions.
38 changes: 26 additions & 12 deletions scripts/pylib/twister/twisterlib/platform.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ def __init__(self):
self.filter_data = dict()
self.uart = ""
self.resc = ""
self.qualifier = None

def load(self, board, target, aliases, data):
"""Load the platform data from the board data and target data
Expand All @@ -94,16 +95,26 @@ def load(self, board, target, aliases, data):
self.name = target
self.aliases = aliases

# Get data for various targets and use the main board data as a
# defauly. Individual variant information will replace the default data
# provded in the main twister configuration for this board.
variants = data.get("variants", {})
variant_data = {}
for alias in aliases:
variant_data = variants.get(alias, {})
if variant_data:
break

self.normalized_name = self.name.replace("/", "_")
self.sysbuild = data.get("sysbuild", self.sysbuild)
self.twister = data.get("twister", self.twister)
self.sysbuild = variant_data.get("sysbuild", data.get("sysbuild", self.sysbuild))
self.twister = variant_data.get("twister", data.get("twister", self.twister))

# if no RAM size is specified by the board, take a default of 128K
self.ram = data.get("ram", self.ram)
self.ram = variant_data.get("ram", data.get("ram", self.ram))
# if no flash size is specified by the board, take a default of 512K
self.flash = data.get("flash", self.flash)
self.flash = variant_data.get("flash", data.get("flash", self.flash))

testing = data.get("testing", {})
testing = variant_data.get("testing", data.get("testing", {}))
self.timeout_multiplier = testing.get("timeout_multiplier", self.timeout_multiplier)
self.ignore_tags = testing.get("ignore_tags", self.ignore_tags)
self.only_tags = testing.get("only_tags", self.only_tags)
Expand All @@ -113,23 +124,26 @@ def load(self, board, target, aliases, data):
self.uart = renode.get("uart", "")
self.resc = renode.get("resc", "")
self.supported = set()
for supp_feature in data.get("supported", []):
for supp_feature in variant_data.get("supported", data.get("supported", [])):
for item in supp_feature.split(":"):
self.supported.add(item)

self.arch = data.get('arch', self.arch)
self.arch = variant_data.get('arch', data.get('arch', self.arch))
self.vendor = board.vendor
self.tier = data.get("tier", self.tier)
self.type = data.get('type', self.type)
self.tier = variant_data.get("tier", data.get("tier", self.tier))
self.type = variant_data.get('type', data.get('type', self.type))

self.simulators = [
Simulator(data) for data in data.get('simulation', self.simulators)
Simulator(data) for data in variant_data.get(
'simulation',
data.get('simulation', self.simulators)
)
]
default_sim = self.simulator_by_name(None)
if default_sim:
self.simulation = default_sim.name

self.supported_toolchains = data.get("toolchain", [])
self.supported_toolchains = variant_data.get("toolchain", data.get("toolchain", []))
if self.supported_toolchains is None:
self.supported_toolchains = []

Expand All @@ -156,7 +170,7 @@ def load(self, board, target, aliases, data):
if toolchain not in self.supported_toolchains:
self.supported_toolchains.append(toolchain)

self.env = data.get("env", [])
self.env = variant_data.get("env", data.get("env", []))
self.env_satisfied = True
for env in self.env:
if not os.environ.get(env, None):
Expand Down
124 changes: 47 additions & 77 deletions scripts/pylib/twister/twisterlib/testplan.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# SPDX-License-Identifier: Apache-2.0
import collections
import copy
import glob
import itertools
import json
import logging
Expand Down Expand Up @@ -436,6 +437,12 @@ def info(what):
sys.stdout.write(what + "\n")
sys.stdout.flush()

def find_twister_data(self, board_data_list, board_aliases):
"""Find the twister data for a board in the list of board data based on the aliases"""
for board_data in board_data_list:
if board_data.get('identifier') in board_aliases:
return board_data

def add_configurations(self):
# Create a list of board roots as defined by the build system in general
# Note, internally in twister a board root includes the `boards` folder
Expand All @@ -445,42 +452,52 @@ def add_configurations(self):
board_roots=board_roots, board=None, board_dir=None)

known_boards = list_boards.find_v2_boards(lb_args)
bdirs = {}
platform_config = self.test_config.get('platforms', {})

alias2target = {}
target2board = {}
target2data = {}
dir2data = {}
legacy_files = []

# helper function to initialize and add platforms
def init_and_add_platforms(data, board, target, aliases, src_dir):
def init_and_add_platforms(data, board, target, qualifier, aliases):
platform = Platform()
if not new_config_found:
data = self.find_twister_data(bdirs[board.dir], aliases)
if not data:
return
platform.load(board, target, aliases, data)
platform.qualifier = qualifier
if platform.name in [p.name for p in self.platforms]:
logger.error(f"Duplicate platform {platform.name} in {src_dir}")
logger.error(f"Duplicate platform {platform.name} in {board.dir}")
raise Exception(f"Duplicate platform identifier {platform.name} found")
if not platform.twister:
return
self.platforms.append(platform)

for board in known_boards.values():
for board_dir in board.directories:
if board_dir in dir2data:
# don't load the same board data twice
continue
legacy_files.extend(
file for file in board_dir.glob("*.yaml") if file.name != "twister.yaml"
)
data = None
file = board_dir / "twister.yaml"
if file.is_file():
new_config_found = False
# don't load the same board data twice
if not bdirs.get(board.dir):
datas = []
for file in glob.glob(os.path.join(board.dir, "*.yaml")):
if os.path.basename(file) == "twister.yaml":
continue
try:
data = scl.yaml_load_verify(file, Platform.platform_schema)
scp = TwisterConfigParser(file, Platform.platform_schema)
sdata = scp.load()
datas.append(sdata)
except Exception as e:
logger.error(f"Error loading {file}: {e!r}")
self.load_errors += 1
dir2data[board_dir] = data
continue
bdirs[board.dir] = datas
data = {}
if os.path.exists(board.dir / 'twister.yaml'):
try:
scp = TwisterConfigParser(board.dir / 'twister.yaml', Platform.platform_schema)
data = scp.load()
except Exception as e:
logger.error(f"Error loading {board.dir / 'twister.yaml'}: {e!r}")
self.load_errors += 1
continue
new_config_found = True



Expand All @@ -490,74 +507,27 @@ def init_and_add_platforms(data, board, target, aliases, src_dir):
for rev in board.revisions:
if rev.name:
target = f"{board.name}@{rev.name}/{qual}"
alias2target[target] = target
aliases = [target]
if rev.name == board.revision_default:
alias2target[f"{board.name}/{qual}"] = target
aliases.append(f"{board.name}/{qual}")
if '/' not in qual and len(board.socs) == 1:
if rev.name == board.revision_default:
alias2target[f"{board.name}"] = target
alias2target[f"{board.name}@{rev.name}"] = target
aliases.append(f"{board.name}")
aliases.append(f"{board.name}@{rev.name}")
else:
target = f"{board.name}/{qual}"
alias2target[target] = target
aliases = [target]
if '/' not in qual and len(board.socs) == 1 \
and rev.name == board.revision_default:
alias2target[f"{board.name}"] = target
aliases.append(f"{board.name}")

target2board[target] = board
init_and_add_platforms(data, board, target, qual, aliases)
else:
target = f"{board.name}/{qual}"
alias2target[target] = target
aliases = [target]
if '/' not in qual and len(board.socs) == 1:
alias2target[board.name] = target
target2board[target] = board

for board_dir, data in dir2data.items():
if data is None:
continue
# Separate the default and variant information in the loaded board data.
# The default (top-level) data can be shared by multiple board targets;
# it will be overlaid by the variant data (if present) for each target.
variant_data = data.pop("variants", {})
for variant in variant_data:
target = alias2target.get(variant)
if target is None:
continue
if target in target2data:
logger.error(f"Duplicate platform {target} in {board_dir}")
raise Exception(f"Duplicate platform identifier {target} found")
target2data[target] = variant_data[variant]

# note: this inverse mapping will only be used for loading legacy files
target2aliases = {}

for target, aliases in itertools.groupby(alias2target, alias2target.get):
aliases = list(aliases)
board = target2board[target]

# Default board data always comes from the primary 'board.dir'.
# Other 'board.directories' can only supply variant data.
data = dir2data[board.dir]
if data is not None:
if target in target2data:
data = copy.deepcopy(data)
data.update(target2data[target])
init_and_add_platforms(data, board, target, aliases, board.dir)

target2aliases[target] = aliases

for file in legacy_files:
try:
data = scl.yaml_load_verify(file, Platform.platform_schema)
except Exception as e:
logger.error(f"Error loading {file}: {e!r}")
self.load_errors += 1
continue
target = alias2target.get(data.get("identifier"))
if target is not None:
init_and_add_platforms(
data, target2board[target], target, target2aliases[target], file.parent
)
aliases.append(board.name)
init_and_add_platforms(data, board, target, qual, aliases)

for platform in self.platforms:
if not platform_config.get('override_default_platforms', False):
Expand Down

0 comments on commit 01a4ae2

Please sign in to comment.