Merge pull request 'Refactor: Config.load() boilerplate reduction (#199)' (#215) from refactor/issue-199-config-load into main
This commit was merged in pull request #215.
This commit is contained in:
@@ -105,6 +105,7 @@
|
||||
| #99 | Document multi-machine deployment guide | Phase 12 | `COMPLETED` | Markdown | [issue-099.md](issue-099.md) |
|
||||
| #191 | Refactor: add audit logging to Tool Broker service | — | `COMPLETED` | Rust | [issue-191.md](issue-191.md) |
|
||||
| #192 | Refactor: extract _format_memory and result builders to shared utils | — | `COMPLETED` | Python | [issue-192.md](issue-192.md) |
|
||||
| #199 | Refactor: Config.load() boilerplate reduction | — | `COMPLETED` | Python | [issue-199.md](issue-199.md) |
|
||||
| #200 | Refactor: consolidate duplicated _AGENT_TYPE_NAMES mappings | — | `COMPLETED` | Python | [issue-200.md](issue-200.md) |
|
||||
| #201 | Refactor: extract duplicated _CONFIDENCE_MAP and _JSON_BLOCK_RE | — | `COMPLETED` | Python | [issue-201.md](issue-201.md) |
|
||||
| #202 | Bug: Docker network missing internal:true and edge separation | — | `COMPLETED` | Docker / YAML | [issue-202.md](issue-202.md) |
|
||||
|
||||
43
implementation-plans/issue-199.md
Normal file
43
implementation-plans/issue-199.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Implementation Plan — Issue #199: Config.load() boilerplate reduction
|
||||
|
||||
## Metadata
|
||||
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Issue | [#199](https://git.shahondin1624.de/llm-multiverse/llm-multiverse/issues/199) |
|
||||
| Title | Refactor: Config.load() boilerplate — replace manual field-by-field parsing |
|
||||
| Milestone | — (tech debt) |
|
||||
| Labels | `type:refactor`, `priority:medium`, `service:orchestrator` |
|
||||
| Status | `COMPLETED` |
|
||||
| Language | Python |
|
||||
| Related Plans | — |
|
||||
| Blocked by | — |
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- [x] Generic `_load_dataclass(cls, data)` helper using `dataclasses.fields()` introspection
|
||||
- [x] Config.load() reduced from ~180 lines to ~30 lines
|
||||
- [x] All 9 sub-config sections use the generic helper
|
||||
- [x] `ConfidenceConfig.__post_init__` validation still works
|
||||
- [x] All existing tests pass
|
||||
- [x] Lint clean
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
### 1. Add `_load_dataclass` helper
|
||||
Generic function that iterates `dataclasses.fields(cls)`, picks values from `data` dict for present keys, and constructs the dataclass with defaults for missing keys.
|
||||
|
||||
### 2. Replace Config.load() boilerplate
|
||||
Replace 9 manual section-loading blocks with one-liner calls to `_load_dataclass`.
|
||||
|
||||
## Files to Create/Modify
|
||||
|
||||
| File | Action | Purpose |
|
||||
|---|---|---|
|
||||
| `services/orchestrator/src/orchestrator/config.py` | Modify | Add helper, simplify load() |
|
||||
|
||||
## Deviation Log
|
||||
|
||||
| Deviation | Reason |
|
||||
|---|---|
|
||||
| None | — |
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
@@ -121,152 +122,6 @@ class Config:
|
||||
with open(config_path) as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
|
||||
researcher_data = data.get("researcher", {})
|
||||
researcher = AgentConfig(
|
||||
max_iterations=researcher_data.get(
|
||||
"max_iterations", AgentConfig.max_iterations
|
||||
),
|
||||
timeout_seconds=researcher_data.get(
|
||||
"timeout_seconds", AgentConfig.timeout_seconds
|
||||
),
|
||||
max_tokens=researcher_data.get("max_tokens", AgentConfig.max_tokens),
|
||||
)
|
||||
|
||||
coder_data = data.get("coder", {})
|
||||
coder = AgentConfig(
|
||||
max_iterations=coder_data.get(
|
||||
"max_iterations", AgentConfig.max_iterations
|
||||
),
|
||||
timeout_seconds=coder_data.get(
|
||||
"timeout_seconds", AgentConfig.timeout_seconds
|
||||
),
|
||||
max_tokens=coder_data.get("max_tokens", AgentConfig.max_tokens),
|
||||
)
|
||||
|
||||
assistant_data = data.get("assistant", {})
|
||||
assistant = AgentConfig(
|
||||
max_iterations=assistant_data.get(
|
||||
"max_iterations", AgentConfig.max_iterations
|
||||
),
|
||||
timeout_seconds=assistant_data.get(
|
||||
"timeout_seconds", AgentConfig.timeout_seconds
|
||||
),
|
||||
max_tokens=assistant_data.get("max_tokens", AgentConfig.max_tokens),
|
||||
)
|
||||
|
||||
sysadmin_data = data.get("sysadmin", {})
|
||||
sysadmin = AgentConfig(
|
||||
max_iterations=sysadmin_data.get(
|
||||
"max_iterations", AgentConfig.max_iterations
|
||||
),
|
||||
timeout_seconds=sysadmin_data.get(
|
||||
"timeout_seconds", AgentConfig.timeout_seconds
|
||||
),
|
||||
max_tokens=sysadmin_data.get("max_tokens", AgentConfig.max_tokens),
|
||||
)
|
||||
|
||||
decomposer_data = data.get("decomposer", {})
|
||||
decomposer = DecomposerConfig(
|
||||
max_tokens=decomposer_data.get(
|
||||
"max_tokens", DecomposerConfig.max_tokens
|
||||
),
|
||||
max_subtasks=decomposer_data.get(
|
||||
"max_subtasks", DecomposerConfig.max_subtasks
|
||||
),
|
||||
)
|
||||
|
||||
dispatcher_data = data.get("dispatcher", {})
|
||||
dispatcher = DispatcherConfig(
|
||||
subtask_timeout_seconds=dispatcher_data.get(
|
||||
"subtask_timeout_seconds",
|
||||
DispatcherConfig.subtask_timeout_seconds,
|
||||
),
|
||||
overall_timeout_seconds=dispatcher_data.get(
|
||||
"overall_timeout_seconds",
|
||||
DispatcherConfig.overall_timeout_seconds,
|
||||
),
|
||||
max_concurrent_subtasks=dispatcher_data.get(
|
||||
"max_concurrent_subtasks",
|
||||
DispatcherConfig.max_concurrent_subtasks,
|
||||
),
|
||||
)
|
||||
|
||||
compaction_data = data.get("compaction", {})
|
||||
compaction = CompactionConfig(
|
||||
threshold_ratio=compaction_data.get(
|
||||
"threshold_ratio",
|
||||
CompactionConfig.threshold_ratio,
|
||||
),
|
||||
summary_max_tokens=compaction_data.get(
|
||||
"summary_max_tokens",
|
||||
CompactionConfig.summary_max_tokens,
|
||||
),
|
||||
llm_enabled=compaction_data.get(
|
||||
"llm_enabled",
|
||||
CompactionConfig.llm_enabled,
|
||||
),
|
||||
)
|
||||
|
||||
gating_data = data.get("memory_gating", {})
|
||||
memory_gating = MemoryGatingConfig(
|
||||
confidence_threshold=gating_data.get(
|
||||
"confidence_threshold",
|
||||
MemoryGatingConfig.confidence_threshold,
|
||||
),
|
||||
min_content_length=gating_data.get(
|
||||
"min_content_length",
|
||||
MemoryGatingConfig.min_content_length,
|
||||
),
|
||||
max_content_length=gating_data.get(
|
||||
"max_content_length",
|
||||
MemoryGatingConfig.max_content_length,
|
||||
),
|
||||
require_structured_content=gating_data.get(
|
||||
"require_structured_content",
|
||||
MemoryGatingConfig.require_structured_content,
|
||||
),
|
||||
quality_gate_uncertain=gating_data.get(
|
||||
"quality_gate_uncertain",
|
||||
MemoryGatingConfig.quality_gate_uncertain,
|
||||
),
|
||||
enabled=gating_data.get(
|
||||
"enabled",
|
||||
MemoryGatingConfig.enabled,
|
||||
),
|
||||
)
|
||||
|
||||
confidence_data = data.get("confidence", {})
|
||||
confidence = ConfidenceConfig(
|
||||
replan_threshold=confidence_data.get(
|
||||
"replan_threshold",
|
||||
ConfidenceConfig.replan_threshold,
|
||||
),
|
||||
warning_threshold=confidence_data.get(
|
||||
"warning_threshold",
|
||||
ConfidenceConfig.warning_threshold,
|
||||
),
|
||||
max_replan_attempts=confidence_data.get(
|
||||
"max_replan_attempts",
|
||||
ConfidenceConfig.max_replan_attempts,
|
||||
),
|
||||
report_per_subtask=confidence_data.get(
|
||||
"report_per_subtask",
|
||||
ConfidenceConfig.report_per_subtask,
|
||||
),
|
||||
memory_confidence_weight=confidence_data.get(
|
||||
"memory_confidence_weight",
|
||||
ConfidenceConfig.memory_confidence_weight,
|
||||
),
|
||||
aggregation_strategy=confidence_data.get(
|
||||
"aggregation_strategy",
|
||||
ConfidenceConfig.aggregation_strategy,
|
||||
),
|
||||
enabled=confidence_data.get(
|
||||
"enabled",
|
||||
ConfidenceConfig.enabled,
|
||||
),
|
||||
)
|
||||
|
||||
return cls(
|
||||
host=data.get("host", cls.host),
|
||||
port=data.get("port", cls.port),
|
||||
@@ -277,13 +132,22 @@ class Config:
|
||||
memory_addr=data.get("memory_addr", cls.memory_addr),
|
||||
search_addr=data.get("search_addr", cls.search_addr),
|
||||
audit_addr=data.get("audit_addr"),
|
||||
researcher=researcher,
|
||||
coder=coder,
|
||||
assistant=assistant,
|
||||
sysadmin=sysadmin,
|
||||
decomposer=decomposer,
|
||||
dispatcher=dispatcher,
|
||||
compaction=compaction,
|
||||
memory_gating=memory_gating,
|
||||
confidence=confidence,
|
||||
researcher=_load_dataclass(AgentConfig, data.get("researcher", {})),
|
||||
coder=_load_dataclass(AgentConfig, data.get("coder", {})),
|
||||
assistant=_load_dataclass(AgentConfig, data.get("assistant", {})),
|
||||
sysadmin=_load_dataclass(AgentConfig, data.get("sysadmin", {})),
|
||||
decomposer=_load_dataclass(DecomposerConfig, data.get("decomposer", {})),
|
||||
dispatcher=_load_dataclass(DispatcherConfig, data.get("dispatcher", {})),
|
||||
compaction=_load_dataclass(CompactionConfig, data.get("compaction", {})),
|
||||
memory_gating=_load_dataclass(MemoryGatingConfig, data.get("memory_gating", {})),
|
||||
confidence=_load_dataclass(ConfidenceConfig, data.get("confidence", {})),
|
||||
)
|
||||
|
||||
|
||||
def _load_dataclass(cls, data: dict):
|
||||
"""Load a dataclass from a dict, using field defaults for missing keys."""
|
||||
kwargs = {}
|
||||
for f in dataclasses.fields(cls):
|
||||
if f.name in data:
|
||||
kwargs[f.name] = data[f.name]
|
||||
return cls(**kwargs)
|
||||
|
||||
Reference in New Issue
Block a user