Skip to content

Commit 6cd5501

Browse files
Merge branch 'development' into automated-plugin-docs-update
2 parents 32b0777 + 00e30b7 commit 6cd5501

9 files changed

Lines changed: 878 additions & 88 deletions

File tree

nodescraper/connection/redfish/redfish_connection.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,18 @@ def run_get(self, path: Union[str, RedfishPath]) -> RedfishGetResult:
183183
status_code=None,
184184
)
185185

186+
def copy(self) -> "RedfishConnection":
187+
"""Return a new connection with the same config and its own session (for concurrent use)."""
188+
return RedfishConnection(
189+
base_url=self.base_url,
190+
username=self.username,
191+
password=self.password,
192+
timeout=self.timeout,
193+
use_session_auth=self.use_session_auth,
194+
verify_ssl=self.verify_ssl,
195+
api_root=self.api_root,
196+
)
197+
186198
def get_service_root(self) -> dict[str, Any]:
187199
"""GET service root (e.g. /redfish/v1/)."""
188200
return self.get(RedfishPath(self.api_root))

nodescraper/models/event.py

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,28 @@
2828
import re
2929
import uuid
3030
from enum import Enum
31-
from typing import Optional, Union
31+
from typing import Any, Optional, Union
3232

3333
from pydantic import BaseModel, Field, field_serializer, field_validator
3434

3535
from nodescraper.enums import EventPriority
3636

37+
38+
def _data_to_json_safe(obj: Any) -> Any:
39+
"""Recursively convert event data to JSON-serializable form (e.g. exceptions -> str)."""
40+
if isinstance(obj, BaseException):
41+
return str(obj)
42+
if isinstance(obj, dict):
43+
return {k: _data_to_json_safe(v) for k, v in obj.items()}
44+
if isinstance(obj, (list, tuple)):
45+
return [_data_to_json_safe(v) for v in obj]
46+
if isinstance(obj, (str, int, float, bool, type(None))):
47+
return obj
48+
if isinstance(obj, (Enum, datetime.datetime, uuid.UUID)):
49+
return str(obj)
50+
return str(obj)
51+
52+
3753
LOG_LEVEL_MAP = {
3854
logging.INFO: EventPriority.INFO,
3955
logging.WARNING: EventPriority.WARNING,
@@ -127,6 +143,11 @@ def serialize_priority(self, priority: EventPriority, _info) -> str:
127143
"""
128144
return priority.name
129145

146+
@field_serializer("data")
147+
def serialize_data(self, data: dict, _info) -> dict:
148+
"""Ensure data is JSON-serializable (e.g. convert nested exceptions to str)."""
149+
return _data_to_json_safe(data)
150+
130151
@field_validator("data")
131152
@classmethod
132153
def validate_data(cls, data: dict) -> dict:

nodescraper/plugins/ooband/redfish_endpoint/collector_args.py

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,34 @@
2727

2828

2929
class RedfishEndpointCollectorArgs(BaseModel):
30-
"""Collection args: uris to GET."""
30+
"""Collection args: uris to GET (or discover from tree), optional concurrency and tree discovery."""
3131

32-
uris: list[str] = Field(default_factory=list)
32+
uris: list[str] = Field(
33+
default_factory=list,
34+
description="Redfish URIs to GET. Ignored when discover_tree is True.",
35+
)
36+
discover_tree: bool = Field(
37+
default=False,
38+
description="If True, discover endpoints from the BMC Redfish tree (service root and links) instead of using uris.",
39+
)
40+
tree_max_depth: int = Field(
41+
default=2,
42+
ge=1,
43+
le=10,
44+
description="When discover_tree is True: max traversal depth (1=service root only, 2=root + collections, 3=+ members).",
45+
)
46+
tree_max_endpoints: int = Field(
47+
default=0,
48+
ge=0,
49+
le=10_000,
50+
description="When discover_tree is True: max endpoints to discover (0=no limit).",
51+
)
52+
max_workers: int = Field(
53+
default=1,
54+
ge=1,
55+
le=32,
56+
description="Max concurrent GETs (1=sequential). Use >1 for async endpoint fetches.",
57+
)
3358

3459
@field_validator("uris", mode="before")
3560
@classmethod

nodescraper/plugins/ooband/redfish_endpoint/endpoint_analyzer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,10 +129,10 @@ def analyze_data(
129129
)
130130

131131
if failed:
132-
details = "; ".join(f"{f['uri']} {f['path']}: {f['reason']}" for f in failed)
132+
description = f"Redfish endpoint checks failed: {len(failed)} failure(s)"
133133
self._log_event(
134134
category=EventCategory.TELEMETRY,
135-
description=f"Redfish endpoint checks failed: {len(failed)} failure(s) — {details}",
135+
description=description,
136136
data={"failures": failed},
137137
priority=EventPriority.WARNING,
138138
console_log=True,

nodescraper/plugins/ooband/redfish_endpoint/endpoint_collector.py

Lines changed: 187 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,99 @@
2323
# SOFTWARE.
2424
#
2525
###############################################################################
26-
from typing import Optional
26+
from collections import deque
27+
from concurrent.futures import ThreadPoolExecutor, as_completed
28+
from typing import Any, Optional
29+
from urllib.parse import urlparse
2730

2831
from nodescraper.base import RedfishDataCollector
32+
from nodescraper.connection.redfish import RedfishConnection, RedfishGetResult
2933
from nodescraper.enums import EventCategory, EventPriority, ExecutionStatus
3034
from nodescraper.models import TaskResult
3135

3236
from .collector_args import RedfishEndpointCollectorArgs
3337
from .endpoint_data import RedfishEndpointDataModel
3438

39+
ODATA_ID = "@odata.id"
40+
MEMBERS = "Members"
41+
42+
43+
def _normalize_path(odata_id: str, api_root: str) -> str:
44+
"""Convert @odata.id value (URL or path) to a normalized path under api_root."""
45+
if not odata_id or not isinstance(odata_id, str):
46+
return ""
47+
s = odata_id.strip()
48+
if s.startswith(("http://", "https://")):
49+
parsed = urlparse(s)
50+
s = parsed.path or "/"
51+
if not s.startswith("/"):
52+
s = "/" + s
53+
s = s.rstrip("/") or "/"
54+
api_root_norm = api_root.strip("/")
55+
if api_root_norm and not s.startswith("/" + api_root_norm):
56+
return ""
57+
return s
58+
59+
60+
def _extract_odata_ids(obj: Any) -> list[str]:
61+
"""Recursively extract all @odata.id values from a Redfish JSON body."""
62+
out: list[str] = []
63+
if isinstance(obj, dict):
64+
if ODATA_ID in obj and isinstance(obj[ODATA_ID], str):
65+
out.append(obj[ODATA_ID])
66+
for k, v in obj.items():
67+
if k == MEMBERS and isinstance(v, list):
68+
for item in v:
69+
if (
70+
isinstance(item, dict)
71+
and ODATA_ID in item
72+
and isinstance(item[ODATA_ID], str)
73+
):
74+
out.append(item[ODATA_ID])
75+
elif isinstance(v, dict):
76+
out.extend(_extract_odata_ids(v))
77+
elif isinstance(v, list):
78+
for item in v:
79+
if isinstance(item, dict):
80+
out.extend(_extract_odata_ids(item))
81+
return out
82+
83+
84+
def _discover_tree(
85+
connection: RedfishConnection,
86+
api_root: str,
87+
max_depth: int,
88+
max_endpoints: int,
89+
) -> tuple[list[str], dict[str, dict], list[RedfishGetResult]]:
90+
"""
91+
Traverse the Redfish resource tree from the service root.
92+
93+
max_depth matches collection_args.tree_max_depth: 1 = service root only; 2 = root + one link
94+
level; child links are only enqueued when depth + 1 < max_depth (root is depth 0).
95+
"""
96+
root_path = _normalize_path(api_root, api_root) or ("/" + api_root.strip("/"))
97+
seen: set[str] = set()
98+
to_visit: deque[tuple[str, int]] = deque([(root_path, 0)])
99+
responses: dict[str, dict] = {}
100+
results: list[RedfishGetResult] = []
101+
while to_visit:
102+
if max_endpoints and len(seen) >= max_endpoints:
103+
break
104+
path, depth = to_visit.popleft()
105+
if path in seen or depth > max_depth:
106+
continue
107+
seen.add(path)
108+
res = connection.run_get(path)
109+
results.append(res)
110+
if res.success and res.data is not None:
111+
responses[path] = res.data
112+
for odata_id in _extract_odata_ids(res.data):
113+
link_path = _normalize_path(odata_id, api_root)
114+
# Follow only if the child depth stays strictly below max_depth (1 = root only).
115+
if link_path and link_path not in seen and depth + 1 < max_depth:
116+
to_visit.append((link_path, depth + 1))
117+
return sorted(seen), responses, results
118+
35119

36120
def _uris_from_args(args: Optional[RedfishEndpointCollectorArgs]) -> list[str]:
37121
"""Return list of URIs from collector args.uris."""
@@ -40,6 +124,18 @@ def _uris_from_args(args: Optional[RedfishEndpointCollectorArgs]) -> list[str]:
40124
return list(args.uris) if args.uris else []
41125

42126

127+
def _discover_tree_enabled(args: Optional[RedfishEndpointCollectorArgs]) -> bool:
128+
"""True only when tree discovery is explicitly enabled (avoids string/other truthy junk)."""
129+
if args is None:
130+
return False
131+
return getattr(args, "discover_tree", False) is True
132+
133+
134+
def _fetch_one(connection_copy: RedfishConnection, path: str) -> RedfishGetResult:
135+
"""Run a single GET on a connection copy (used from worker threads)."""
136+
return connection_copy.run_get(path)
137+
138+
43139
class RedfishEndpointCollector(
44140
RedfishDataCollector[RedfishEndpointDataModel, RedfishEndpointCollectorArgs]
45141
):
@@ -50,30 +146,106 @@ class RedfishEndpointCollector(
50146
def collect_data(
51147
self, args: Optional[RedfishEndpointCollectorArgs] = None
52148
) -> tuple[TaskResult, Optional[RedfishEndpointDataModel]]:
53-
"""GET each configured Redfish URI via _run_redfish_get() and store the JSON response."""
149+
"""Collect via tree discovery, or via explicit URIs, or skip if neither is configured."""
54150
uris = _uris_from_args(args)
151+
use_tree = _discover_tree_enabled(args)
152+
153+
# 1) Tree discovery: only when discover_tree is explicitly true
154+
if use_tree:
155+
api_root = getattr(self.connection, "api_root", "redfish/v1")
156+
max_depth = getattr(args, "tree_max_depth", 2) if args else 2
157+
max_endpoints = (getattr(args, "tree_max_endpoints", 0) or 0) if args else 0
158+
_paths, responses, results = _discover_tree(
159+
self.connection,
160+
api_root,
161+
max_depth=max_depth,
162+
max_endpoints=max_endpoints,
163+
)
164+
for res in results:
165+
self.result.artifacts.append(res)
166+
if not res.success and res.error:
167+
self._log_event(
168+
category=EventCategory.RUNTIME,
169+
description=f"Redfish GET failed during tree discovery for {res.path}: {res.error}",
170+
priority=EventPriority.WARNING,
171+
console_log=True,
172+
)
173+
if not responses:
174+
self.result.message = "No Redfish endpoints discovered from tree"
175+
self.result.status = ExecutionStatus.ERROR
176+
return self.result, None
177+
data = RedfishEndpointDataModel(responses=responses)
178+
self.result.message = f"Collected {len(responses)} Redfish endpoint(s) from tree"
179+
self.result.status = ExecutionStatus.OK
180+
return self.result, data
181+
182+
# 2) URI list: when discover_tree is false/absent and uris are provided
55183
if not uris:
56-
self.result.message = "No Redfish URIs configured"
184+
self.result.message = (
185+
"No collection mode configured: set collection_args.discover_tree to true "
186+
"or provide collection_args.uris"
187+
)
57188
self.result.status = ExecutionStatus.NOT_RAN
58189
return self.result, None
59190

60-
responses: dict[str, dict] = {}
191+
paths = []
61192
for uri in uris:
62-
path = uri
193+
path = uri.strip() if uri else ""
63194
if not path:
64195
continue
65196
if not path.startswith("/"):
66197
path = "/" + path
67-
res = self._run_redfish_get(path, log_artifact=True)
68-
if res.success and res.data is not None:
69-
responses[res.path] = res.data
70-
else:
71-
self._log_event(
72-
category=EventCategory.RUNTIME,
73-
description=f"Redfish GET failed for {path}: {res.error or 'unknown'}",
74-
priority=EventPriority.WARNING,
75-
console_log=True,
76-
)
198+
paths.append(path)
199+
200+
max_workers = getattr(args, "max_workers", 1) if args else 1
201+
max_workers = min(max_workers, len(paths))
202+
203+
if max_workers <= 1:
204+
# Sequential
205+
responses = {}
206+
for path in paths:
207+
res = self._run_redfish_get(path, log_artifact=True)
208+
if res.success and res.data is not None:
209+
responses[res.path] = res.data
210+
else:
211+
self._log_event(
212+
category=EventCategory.RUNTIME,
213+
description=f"Redfish GET failed for {path}: {res.error or 'unknown'}",
214+
priority=EventPriority.WARNING,
215+
console_log=True,
216+
)
217+
else:
218+
# Concurrent: one connection copy per worker, collect results in main thread
219+
responses = {}
220+
results = []
221+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
222+
futures = {}
223+
for path in paths:
224+
conn = self.connection.copy()
225+
futures[executor.submit(_fetch_one, conn, path)] = path
226+
for future in as_completed(futures):
227+
path = futures[future]
228+
try:
229+
res = future.result()
230+
results.append(res)
231+
if res.success and res.data is not None:
232+
responses[res.path] = res.data
233+
else:
234+
self._log_event(
235+
category=EventCategory.RUNTIME,
236+
description=f"Redfish GET failed for {path}: {res.error or 'unknown'}",
237+
priority=EventPriority.WARNING,
238+
console_log=True,
239+
)
240+
except Exception as e:
241+
self._log_event(
242+
category=EventCategory.RUNTIME,
243+
description=f"Redfish GET failed for {path}: {e!s}",
244+
priority=EventPriority.WARNING,
245+
console_log=True,
246+
)
247+
for res in results:
248+
self.result.artifacts.append(res)
77249

78250
if not responses:
79251
self.result.message = "No Redfish endpoints could be read"

test/functional/conftest.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,24 @@
2828
import subprocess
2929
import sys
3030
from typing import List
31+
from unittest.mock import MagicMock
3132

3233
import pytest
3334

35+
from nodescraper.models.systeminfo import OSFamily, SystemInfo
36+
37+
38+
@pytest.fixture
39+
def system_info():
40+
"""Minimal SystemInfo for collectors that require it (same shape as test/unit/conftest)."""
41+
return SystemInfo(name="test_host", platform="X", os_family=OSFamily.LINUX, sku="TEST")
42+
43+
44+
@pytest.fixture
45+
def redfish_conn_mock():
46+
"""MagicMock Redfish connection for Redfish plugin tests."""
47+
return MagicMock()
48+
3449

3550
@pytest.fixture
3651
def run_cli_command():

0 commit comments

Comments
 (0)