Update mcp tool universe

Add rag utility
This commit is contained in:
shahondin1624
2026-02-14 17:59:18 +01:00
parent 3ced9db623
commit a0e36a7fe2
19 changed files with 970 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target/

69
pom.xml Normal file
View File

@@ -0,0 +1,69 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>de.shahondin1624</groupId>
<artifactId>rag</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<langchain4j.version>0.35.0</langchain4j.version>
<slf4j.version>2.0.9</slf4j.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-bom</artifactId>
<version>${langchain4j.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-embeddings-all-minilm-l6-v2</artifactId>
</dependency>
<dependency>
<groupId>io.github.mcp-java</groupId>
<artifactId>mcp-server-lib</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>5.10.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.github.mcp-java</groupId>
<artifactId>mcp-server-lib</artifactId>
<version>1.0.0</version>
<scope>compile</scope>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,105 @@
package de.shahondin1624.rag;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Facade for the RAG service, coordinating embedding, indexing, and searching.
*/
public class LocalRagService {
private static final Logger log = LoggerFactory.getLogger(LocalRagService.class);
private static LocalRagService instance;
private final RagStoreManager storeManager;
private final RagIndexer indexer;
private final RagDocumentSplitter splitter;
public static synchronized LocalRagService getInstance() {
if (instance == null) {
instance = new LocalRagService();
}
return instance;
}
private LocalRagService() {
this.storeManager = new RagStoreManager();
this.splitter = new RagDocumentSplitter();
this.indexer = new RagIndexer(storeManager, splitter);
initialize();
}
public void initialize() {
log.info("Initializing LocalRagService...");
storeManager.loadBundledStore("rag-index.json");
storeManager.ensureStore();
log.info("LocalRagService initialized successfully.");
}
public void indexDirectory(Path path) {
indexDirectory(path, null);
}
public void indexDirectory(Path path, Map<String, String> metadata) {
indexer.indexDirectory(path, metadata);
new RagFileWatcher(path, p -> indexer.indexFile(p, metadata), indexer::removeFile, indexer::isSupportedFile).start();
}
public void indexFile(Path path) {
indexFile(path, null);
}
public void indexFile(Path path, Map<String, String> metadata) {
indexer.indexFile(path, metadata);
}
public void removeFile(Path path) {
indexer.removeFile(path);
}
public void indexDocuments(List<String> docs) {
indexDocuments(docs, null);
}
public void indexDocuments(List<String> docs, Map<String, String> metadata) {
log.info("Indexing {} documents...", docs.size());
for (String docText : docs) {
List<TextSegment> segments = splitter.split(docText, metadata);
if (!segments.isEmpty()) {
var embeddings = storeManager.getEmbeddingModel().embedAll(segments).content();
storeManager.getEmbeddingStore().addAll(embeddings, segments);
}
}
log.info("Indexing completed.");
}
public String search(String query) {
log.info("Searching for: {}", query);
var queryEmbedding = storeManager.getEmbeddingModel().embed(query).content();
List<EmbeddingMatch<TextSegment>> matches = storeManager.getEmbeddingStore().findRelevant(queryEmbedding, 5);
String result = matches.stream()
.map(m -> m.embedded().text())
.collect(Collectors.joining("\n\n---\n\n"));
log.info("Search completed with {} matches.", matches.size());
return result;
}
public EmbeddingStore<TextSegment> getEmbeddingStore() {
return storeManager.getEmbeddingStore();
}
public void saveStore(Path path) {
storeManager.saveStore(path);
}
}

View File

@@ -0,0 +1,48 @@
package de.shahondin1624.rag;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Handles splitting of documents into segments for embedding.
*/
public class RagDocumentSplitter {
/**
* Splits the given document text into segments.
*
* @param docText The text to split.
* @param metadata The metadata to associate with each segment.
* @return A list of text segments.
*/
public List<TextSegment> split(String docText, Map<String, String> metadata) {
List<TextSegment> segments = new ArrayList<>();
Metadata lcMetadata = metadata != null ? Metadata.from(metadata) : new Metadata();
// Try to split by DSL-like blocks or Markdown headers
String[] blocks = docText.split("\n(?=## )|\n(?=\\w+\\s*\\{)");
for (String block : blocks) {
if (!block.isBlank()) {
segments.add(TextSegment.from(block.trim(), lcMetadata));
}
}
if (segments.isEmpty()) {
// Fallback to recursive splitter if no blocks found
DocumentSplitter splitter = DocumentSplitters.recursive(500, 50);
List<TextSegment> splitSegments = splitter.split(Document.from(docText));
for (TextSegment segment : splitSegments) {
segments.add(TextSegment.from(segment.text(), lcMetadata));
}
}
return segments;
}
}

View File

@@ -0,0 +1,60 @@
package de.shahondin1624.rag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.*;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import java.util.function.Predicate;
/**
* Monitors a directory for changes and triggers callbacks.
*/
public class RagFileWatcher {
private static final Logger log = LoggerFactory.getLogger(RagFileWatcher.class);
private final Path dir;
private final Consumer<Path> onFileChanged;
private final Consumer<Path> onFileDeleted;
private final Predicate<Path> fileFilter;
public RagFileWatcher(Path dir, Consumer<Path> onFileChanged, Consumer<Path> onFileDeleted, Predicate<Path> fileFilter) {
this.dir = dir;
this.onFileChanged = onFileChanged;
this.onFileDeleted = onFileDeleted;
this.fileFilter = fileFilter;
}
public void start() {
CompletableFuture.runAsync(() -> {
try (WatchService watcher = FileSystems.getDefault().newWatchService()) {
dir.register(watcher, StandardWatchEventKinds.ENTRY_CREATE,
StandardWatchEventKinds.ENTRY_DELETE,
StandardWatchEventKinds.ENTRY_MODIFY);
log.info("Started watching directory for changes: {}", dir);
while (!Thread.currentThread().isInterrupted()) {
WatchKey key = watcher.take();
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
Path fileName = (Path) event.context();
Path filePath = dir.resolve(fileName);
if (kind == StandardWatchEventKinds.ENTRY_CREATE || kind == StandardWatchEventKinds.ENTRY_MODIFY) {
if (Files.isRegularFile(filePath) && fileFilter.test(filePath)) {
onFileChanged.accept(filePath);
}
} else if (kind == StandardWatchEventKinds.ENTRY_DELETE) {
onFileDeleted.accept(filePath);
}
}
if (!key.reset()) {
break;
}
}
} catch (Exception e) {
log.error("Error in file watcher", e);
}
});
}
}

View File

@@ -0,0 +1,95 @@
package de.shahondin1624.rag;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Stream;
/**
* Handles indexing of files and directories into the vector store.
*/
public class RagIndexer {
private static final Logger log = LoggerFactory.getLogger(RagIndexer.class);
private final RagStoreManager storeManager;
private final RagDocumentSplitter splitter;
private final Map<Path, List<String>> fileToIds = new ConcurrentHashMap<>();
public RagIndexer(RagStoreManager storeManager, RagDocumentSplitter splitter) {
this.storeManager = storeManager;
this.splitter = splitter;
}
public void indexDirectory(Path path) {
indexDirectory(path, null);
}
public void indexDirectory(Path path, Map<String, String> metadata) {
log.info("Indexing directory: {}", path);
try (Stream<Path> paths = Files.walk(path)) {
paths.filter(Files::isRegularFile)
.filter(this::isSupportedFile)
.forEach(p -> indexFile(p, metadata));
} catch (IOException e) {
log.error("Failed to index directory: {}", path, e);
}
}
public void indexFile(Path path) {
indexFile(path, null);
}
public void indexFile(Path path, Map<String, String> extraMetadata) {
removeFile(path);
try {
String content = Files.readString(path);
if (content.isBlank()) {
return;
}
Map<String, String> metadata = new HashMap<>();
metadata.put("file_path", path.toString());
if (extraMetadata != null) {
metadata.putAll(extraMetadata);
}
List<TextSegment> segments = splitter.split(content, metadata);
if (!segments.isEmpty()) {
List<Embedding> embeddings = storeManager.getEmbeddingModel().embedAll(segments).content();
List<String> ids = storeManager.getEmbeddingStore().addAll(embeddings, segments);
fileToIds.put(path, ids);
log.info("Indexed file: {} ({} segments)", path, segments.size());
}
} catch (IOException e) {
log.error("Failed to read file: {}", path, e);
}
}
public void removeFile(Path path) {
List<String> ids = fileToIds.remove(path);
if (ids != null) {
for (String id : ids) {
storeManager.getEmbeddingStore().remove(id);
}
log.info("Removed file from index: {}", path);
}
}
public boolean isSupportedFile(Path path) {
String s = path.toString();
return s.endsWith(".md") || s.endsWith(".dsl") || s.endsWith(".txt");
}
public Map<Path, List<String>> getFileToIds() {
return fileToIds;
}
}

View File

@@ -0,0 +1,65 @@
package de.shahondin1624.rag;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.embedding.onnx.allminilml6v2.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
/**
* Manages the embedding model and the vector store.
*/
public class RagStoreManager {
private static final Logger log = LoggerFactory.getLogger(RagStoreManager.class);
private final EmbeddingModel embeddingModel;
private InMemoryEmbeddingStore<TextSegment> embeddingStore;
public RagStoreManager() {
this.embeddingModel = new AllMiniLmL6V2EmbeddingModel();
}
public void loadBundledStore(String resourcePath) {
try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) {
if (is != null) {
String json = new String(is.readAllBytes(), StandardCharsets.UTF_8);
this.embeddingStore = InMemoryEmbeddingStore.fromJson(json);
log.info("Loaded bundled embedding store from {}", resourcePath);
}
} catch (IOException e) {
log.error("Failed to load bundled embedding store", e);
}
}
public void ensureStore() {
if (this.embeddingStore == null) {
this.embeddingStore = new InMemoryEmbeddingStore<>();
}
}
public void saveStore(Path path) {
if (embeddingStore == null) return;
try {
String json = embeddingStore.serializeToJson();
Files.writeString(path, json);
log.info("Saved embedding store to {}", path);
} catch (IOException e) {
log.error("Failed to save embedding store", e);
}
}
public EmbeddingModel getEmbeddingModel() {
return embeddingModel;
}
public InMemoryEmbeddingStore<TextSegment> getEmbeddingStore() {
return embeddingStore;
}
}

View File

@@ -0,0 +1,99 @@
package de.shahondin1624.rag;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
public class TestDataGenerator {
public static void main(String[] args) {
Path root = Paths.get("test-data");
try {
// 1. Create Directories
Files.createDirectories(root.resolve("docs"));
Files.createDirectories(root.resolve("examples"));
// 2. Generate Documentation (Markdown)
createFile(root.resolve("docs/syntax_guide.md"), """
# DSL Syntax Guide: DiagnosticService
To define a diagnostic service in the OTX/ODX abstraction layer, use the `DiagnosticService` keyword.
## Syntax
```dsl
DiagnosticService <ServiceName> {
request: <HexCode>
response: <HexCode>
timeout: <Integer> ms
}
```
## Rules
* The service name must be UpperCamelCase.
* Timeouts are strictly defined in milliseconds.
* Use the `extends` keyword to inherit properties from a base service.
""");
createFile(root.resolve("docs/units_manual.md"), """
# Working with Units
The DSL strictly types physical units to prevent conversion errors between OTX and ODX.
## Defining a Unit
Use the `Unit` keyword followed by the display symbol.
Example:
`Unit Speed [km/h]`
## Common Errors
Never use string literals for units in a `DiagnosticService`. Always reference the typed Unit definition.
""");
// 3. Generate Code Examples (.dsl)
createFile(root.resolve("examples/powertrain.dsl"), """
// Example: Reading Engine RPM
package com.vehicle.powertrain
Unit Rpm [rotations/m]
DiagnosticService ReadEngineSpeed {
request: 0x22F40C
response: 0x62F40C
timeout: 500 ms
return: Rpm
}
""");
createFile(root.resolve("examples/body_control.dsl"), """
// Example: Door Lock Control
package com.vehicle.body
DiagnosticService LockDoors {
request: 0x2E0101
response: 0x6E0101
timeout: 2000 ms
}
DiagnosticService UnlockDoors extends LockDoors {
request: 0x2E0100
}
""");
System.out.println("✅ Test data generated successfully in: " + root.toAbsolutePath());
} catch (IOException e) {
System.err.println("Failed to generate test data: " + e.getMessage());
e.printStackTrace();
}
}
private static void createFile(Path path, String content) throws IOException {
Files.writeString(path, content, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
System.out.println("Created: " + path);
}
}

View File

@@ -0,0 +1,82 @@
package de.shahondin1624.rag.tooling;
import de.shahondin1624.rag.LocalRagService;
import io.modelcontextprotocol.spec.McpSchema;
import mcp.tools.DefaultMcpTool;
import mcp.tools.McpValidatedTool;
import mcp.tools.helper.SchemaBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Tool for embedding new information into the RAG service.
*/
@DefaultMcpTool()
public class RagEmbedTool extends McpValidatedTool {
private static final Logger logger = LoggerFactory.getLogger(RagEmbedTool.class);
@Override
public String name() {
return "embed_information";
}
@Override
public String title() {
return "Embed Information";
}
@Override
public String description() {
return "Embeds new information into the local vector store to make it available for future searches.";
}
@Override
public McpSchema.JsonSchema inputSchema() {
return new SchemaBuilder()
.addProperty("content", "string", "The information to embed")
.addProperty("metadata", "object", "Optional metadata to associate with the information (key-value pairs)")
.required("content")
.build();
}
@Override
public Map<String, Object> outputSchema() {
return new SchemaBuilder()
.returns("string", "A success message")
.buildMap();
}
@Override
protected boolean isReadOnly() {
return false;
}
@Override
public McpSchema.CallToolResult callValidated(McpSchema.CallToolRequest request, Map<String, Object> arguments) {
String content = (String) arguments.get("content");
@SuppressWarnings("unchecked")
Map<String, Object> rawMetadata = (Map<String, Object>) arguments.get("metadata");
Map<String, String> metadata = null;
if (rawMetadata != null) {
metadata = new HashMap<>();
for (Map.Entry<String, Object> entry : rawMetadata.entrySet()) {
metadata.put(entry.getKey(), String.valueOf(entry.getValue()));
}
}
logger.debug("RagEmbedTool called with content length: {}, metadata: {}", content.length(), metadata);
try {
LocalRagService.getInstance().indexDocuments(List.of(content), metadata);
return successResult("Information successfully embedded and indexed.");
} catch (Exception e) {
logger.error("Error during information embedding", e);
return error("Error during information embedding: " + e.getMessage());
}
}
}

View File

@@ -0,0 +1,63 @@
package de.shahondin1624.rag.tooling;
import de.shahondin1624.rag.LocalRagService;
import io.modelcontextprotocol.spec.McpSchema;
import mcp.tools.DefaultMcpTool;
import mcp.tools.McpValidatedTool;
import mcp.tools.helper.SchemaBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* Tool for searching indexed documents using RAG.
*/
@DefaultMcpTool()
public class RagSearchTool extends McpValidatedTool {
private static final Logger logger = LoggerFactory.getLogger(RagSearchTool.class);
@Override
public String name() {
return "search_dsl_documentation";
}
@Override
public String title() {
return "Search DSL Documentation";
}
@Override
public String description() {
return "Searches locally indexed documents and documentation for relevant information using vector search.";
}
@Override
public McpSchema.JsonSchema inputSchema() {
return new SchemaBuilder()
.addProperty("query", "string", "The search query")
.required("query")
.build();
}
@Override
public Map<String, Object> outputSchema() {
return new SchemaBuilder()
.returns("string", "The retrieved text segments")
.buildMap();
}
@Override
public McpSchema.CallToolResult callValidated(McpSchema.CallToolRequest request, Map<String, Object> arguments) {
String query = (String) arguments.get("query");
logger.debug("RagSearchTool called with query: {}", query);
try {
String result = LocalRagService.getInstance().search(query);
return successResult(result);
} catch (Exception e) {
logger.error("Error during RAG search", e);
return error("Error during RAG search: " + e.getMessage());
}
}
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,54 @@
package de.shahondin1624.rag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.TimeUnit;
import static org.junit.jupiter.api.Assertions.*;
public class LocalRagServiceIncrementalTest {
@TempDir
Path tempDir;
@Test
public void testIncrementalIndexingAndWatching() throws IOException, InterruptedException {
LocalRagService service = LocalRagService.getInstance();
// Use a temporary directory for testing
Path docFile = tempDir.resolve("test-doc.md");
Files.writeString(docFile, "## Test Service\nThis is a test service description.");
service.indexDirectory(tempDir);
// Verify initial index
String result = service.search("test service");
assertTrue(result.contains("Test Service"));
// Modify file
Files.writeString(docFile, "## Test Service\nUpdated description for the test service.");
// Wait for watcher to pick up changes (might be slow on some systems, but should work within 2-5 seconds)
// Since we are using a background thread, we might need a bit of time.
// For testing purposes, we can also manually call indexFile if watcher is too slow in this environment.
// Let's give it some time
TimeUnit.SECONDS.sleep(2);
result = service.search("Updated description");
assertTrue(result.contains("Updated description"), "Index should be updated with new content");
// Delete file
Files.delete(docFile);
TimeUnit.SECONDS.sleep(2);
result = service.search("test service");
// It might still find something if it was similar, but it shouldn't find the exact deleted text if it was the only source.
// Actually, search returns top 5. If we deleted the only file, it should ideally be empty or not contain the text.
assertFalse(result.contains("Updated description"), "Index should no longer contain deleted content");
}
}

View File

@@ -0,0 +1,49 @@
package de.shahondin1624.rag;
import dev.langchain4j.data.segment.TextSegment;
import org.junit.jupiter.api.Test;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
public class LocalRagServiceTest {
@Test
public void testRagService() {
LocalRagService service = LocalRagService.getInstance();
assertNotNull(service);
service.indexDirectory(Paths.get("/home/shahondin1624/Projects/rag/docs"));
String result = service.search("What is DiagnosticService?");
assertNotNull(result);
assertTrue(result.contains("DiagnosticService"), "Result should contain 'DiagnosticService'");
assertTrue(result.contains("core component"), "Result should contain 'core component'");
}
@Test
public void testSplitterWithMetadata() {
RagDocumentSplitter splitter = new RagDocumentSplitter();
Map<String, String> metadata = Map.of("author", "tester", "version", "1.0");
List<TextSegment> segments = splitter.split("Some content", metadata);
assertFalse(segments.isEmpty());
TextSegment segment = segments.get(0);
assertEquals("Some content", segment.text());
assertEquals("tester", segment.metadata().get("author"));
assertEquals("1.0", segment.metadata().get("version"));
}
@Test
public void testIndexerWithMetadata() {
LocalRagService service = LocalRagService.getInstance();
Map<String, String> metadata = Map.of("test_key", "test_value");
service.indexDocuments(List.of("Content for indexer test"), metadata);
// Verification via search
String result = service.search("Content for indexer test");
assertTrue(result.contains("Content for indexer test"));
}
}

View File

@@ -0,0 +1,85 @@
package de.shahondin1624.rag.tooling;
import io.modelcontextprotocol.spec.McpSchema;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
public class RagToolingTest {
@BeforeAll
public static void setup() {
// Ensure RAG service is initialized
}
@Test
public void testRagSearchTool() {
RagSearchTool tool = new RagSearchTool();
assertEquals("search_dsl_documentation", tool.name());
McpSchema.CallToolRequest request = new McpSchema.CallToolRequest("search_dsl_documentation", Map.of("query", "DiagnosticService"));
McpSchema.CallToolResult result = tool.call(request, request.arguments());
assertNotNull(result);
assertFalse(result.isError());
assertFalse(result.content().isEmpty());
// Check structured content
assertNotNull(result.structuredContent());
assertTrue(result.structuredContent().containsKey("result"));
}
@Test
public void testRagEmbedTool() {
RagEmbedTool tool = new RagEmbedTool();
assertEquals("embed_information", tool.name());
String secretInfo = "The secret password for the DSL is 'Blueberry'.";
McpSchema.CallToolRequest request = new McpSchema.CallToolRequest("embed_information", Map.of("content", secretInfo));
McpSchema.CallToolResult result = tool.call(request, request.arguments());
assertNotNull(result);
assertFalse(result.isError());
assertNotNull(result.structuredContent());
assertTrue(result.structuredContent().containsKey("result"));
// Now search for it to verify it was embedded
RagSearchTool searchTool = new RagSearchTool();
McpSchema.CallToolRequest searchRequest = new McpSchema.CallToolRequest("search_dsl_documentation", Map.of("query", "secret password for the DSL"));
McpSchema.CallToolResult searchResult = searchTool.call(searchRequest, searchRequest.arguments());
assertNotNull(searchResult);
assertFalse(searchResult.isError());
String content = ((McpSchema.TextContent) searchResult.content().get(0)).text();
assertTrue(content.contains("Blueberry"), "Search result should contain the embedded information. Got: " + content);
}
@Test
public void testRagEmbedToolWithMetadata() {
RagEmbedTool tool = new RagEmbedTool();
String info = "This info has metadata.";
Map<String, Object> metadata = Map.of("source", "test-case", "priority", "high");
McpSchema.CallToolRequest request = new McpSchema.CallToolRequest("embed_information",
Map.of("content", info, "metadata", metadata));
McpSchema.CallToolResult result = tool.call(request, request.arguments());
assertNotNull(result);
assertFalse(result.isError());
assertNotNull(result.structuredContent());
assertTrue(result.structuredContent().containsKey("result"));
// Verify search still works
RagSearchTool searchTool = new RagSearchTool();
McpSchema.CallToolRequest searchRequest = new McpSchema.CallToolRequest("search_dsl_documentation", Map.of("query", "info has metadata"));
McpSchema.CallToolResult searchResult = searchTool.call(searchRequest, searchRequest.arguments());
assertNotNull(searchResult);
assertFalse(searchResult.isError());
String content = ((McpSchema.TextContent) searchResult.content().get(0)).text();
assertTrue(content.contains("This info has metadata."));
}
}

View File

@@ -0,0 +1,40 @@
# DSL Documentation Guide
## DiagnosticService
The `DiagnosticService` is the core component for managing diagnostics in the system.
It allows you to define diagnostic rules and actions.
Example:
```dsl
DiagnosticService {
rule "CheckEngineLight" {
condition: engine.status == ERROR
action: notify("Engine error detected")
}
}
```
## NotificationService
The `NotificationService` handles sending notifications to various channels.
Example:
```dsl
NotificationService {
channel "Email" {
address: "admin@example.com"
}
}
```
## DataProcessor
The `DataProcessor` is used for processing incoming data streams.
It supports various transformation functions.
Example:
```dsl
DataProcessor {
input: "sensor_data"
transform: map(value -> value * 2)
output: "processed_data"
}
```

View File

@@ -0,0 +1,17 @@
# DSL Syntax Guide: DiagnosticService
To define a diagnostic service in the OTX/ODX abstraction layer, use the `DiagnosticService` keyword.
## Syntax
```dsl
DiagnosticService <ServiceName> {
request: <HexCode>
response: <HexCode>
timeout: <Integer> ms
}
```
## Rules
* The service name must be UpperCamelCase.
* Timeouts are strictly defined in milliseconds.
* Use the `extends` keyword to inherit properties from a base service.

View File

@@ -0,0 +1,12 @@
# Working with Units
The DSL strictly types physical units to prevent conversion errors between OTX and ODX.
## Defining a Unit
Use the `Unit` keyword followed by the display symbol.
Example:
`Unit Speed [km/h]`
## Common Errors
Never use string literals for units in a `DiagnosticService`. Always reference the typed Unit definition.

View File

@@ -0,0 +1,13 @@
// Example: Door Lock Control
package com.vehicle.body
DiagnosticService LockDoors {
request: 0x2E0101
response: 0x6E0101
timeout: 2000 ms
}
DiagnosticService UnlockDoors extends LockDoors {
request: 0x2E0100
}

View File

@@ -0,0 +1,12 @@
// Example: Reading Engine RPM
package com.vehicle.powertrain
Unit Rpm [rotations/m]
DiagnosticService ReadEngineSpeed {
request: 0x22F40C
response: 0x62F40C
timeout: 500 ms
return: Rpm
}