diff --git a/docs/source/docset.yml b/docs/source/docset.yml
index 7a8c022..57ac05b 100644
--- a/docs/source/docset.yml
+++ b/docs/source/docset.yml
@@ -3,14 +3,30 @@ exclude:
- '_*.md'
toc:
- file: index.md
- - file: config.md
- - file: search.md
+ - folder: markup
+ - folder: elastic
children:
- - file: search-part2.md
- - folder: search
- - folder: my-folder1
- - folder: my-folder2
+ - file: index.md
+ - folder: observability
+ - folder: reference
+ - folder: semantic-search
+ - folder: search-labs
+ children:
+ - file: index.md
+ - file: chat.md
+ children:
+ - file: chat/req.md
+ - file: chat/rag.md
+ - file: search.md
+ children:
+ - file: search/req.md
+ - file: search/setup.md
+ - file: install.md
+ children:
+ - file: install/cloud.md
+ - file: install/docker.md
+ - folder: nested
children:
- - file: subpath/file.md
- - file: file.md
- - folder: sub/folder
\ No newline at end of file
+ - folder: content
+ - file: index.md
+ - folder: versioning
\ No newline at end of file
diff --git a/docs/source/elastic/semantic-search/snippets/index-mapping.md b/docs/source/elastic/semantic-search/_snippets/index-mapping.md
similarity index 100%
rename from docs/source/elastic/semantic-search/snippets/index-mapping.md
rename to docs/source/elastic/semantic-search/_snippets/index-mapping.md
diff --git a/docs/source/elastic/semantic-search/snippets/inference-endpoint.md b/docs/source/elastic/semantic-search/_snippets/inference-endpoint.md
similarity index 100%
rename from docs/source/elastic/semantic-search/snippets/inference-endpoint.md
rename to docs/source/elastic/semantic-search/_snippets/inference-endpoint.md
diff --git a/docs/source/elastic/semantic-search/amazon-bedrock.md b/docs/source/elastic/semantic-search/amazon-bedrock.md
index a10fb83..969501a 100644
--- a/docs/source/elastic/semantic-search/amazon-bedrock.md
+++ b/docs/source/elastic/semantic-search/amazon-bedrock.md
@@ -38,7 +38,7 @@ The examples in this tutorial use the `amazon.titan-embed-text-v1` model from th
## Create an inference endpoint
-```{include} snippets/inference-endpoint.md
+```{include} _snippets/inference-endpoint.md
```
```{code-block} bash
@@ -67,7 +67,7 @@ PUT _inference/text_embedding/amazon_bedrock_embeddings
## Create the index mapping
-```{include} snippets/index-mapping.md
+```{include} _snippets/index-mapping.md
```
```{code-block} bash
diff --git a/docs/source/elastic/semantic-search/azure-ai-studio.md b/docs/source/elastic/semantic-search/azure-ai-studio.md
index 29d2e61..bb3e3db 100644
--- a/docs/source/elastic/semantic-search/azure-ai-studio.md
+++ b/docs/source/elastic/semantic-search/azure-ai-studio.md
@@ -39,7 +39,7 @@ The examples in this tutorial use models available through [Azure AI Studio](htt
## Create an inference endpoint
-```{include} snippets/inference-endpoint.md
+```{include} _snippets/inference-endpoint.md
```
```{code-block} bash
@@ -67,7 +67,7 @@ PUT _inference/text_embedding/azure_ai_studio_embeddings
## Create the index mapping
-```{include} snippets/index-mapping.md
+```{include} _snippets/index-mapping.md
```
```{code-block} bash
diff --git a/docs/source/elastic/semantic-search/azure-openai.md b/docs/source/elastic/semantic-search/azure-openai.md
index af1742b..4033acd 100644
--- a/docs/source/elastic/semantic-search/azure-openai.md
+++ b/docs/source/elastic/semantic-search/azure-openai.md
@@ -39,7 +39,7 @@ The examples in this tutorial use models available through [Azure OpenAI](https:
## Create an inference endpoint
-```{include} snippets/inference-endpoint.md
+```{include} _snippets/inference-endpoint.md
```
```{code-block} bash
@@ -67,7 +67,7 @@ PUT _inference/text_embedding/azure_openai_embeddings
## Create the index mapping
-```{include} snippets/index-mapping.md
+```{include} _snippets/index-mapping.md
```
```{code-block} bash
diff --git a/docs/source/elastic/semantic-search/cohere.md b/docs/source/elastic/semantic-search/cohere.md
index 60e8259..3287f3b 100644
--- a/docs/source/elastic/semantic-search/cohere.md
+++ b/docs/source/elastic/semantic-search/cohere.md
@@ -37,7 +37,7 @@ A [Cohere account](https://cohere.com/) is required to use the inference API wit
## Create an inference endpoint
-```{include} snippets/inference-endpoint.md
+```{include} _snippets/inference-endpoint.md
```
```{code-block} bash
@@ -65,7 +65,7 @@ When using this model the recommended similarity measure to use in the dense_vec
## Create the index mapping
-```{include} snippets/index-mapping.md
+```{include} _snippets/index-mapping.md
```
```{code-block} bash
diff --git a/docs/source/elastic/semantic-search/elser.md b/docs/source/elastic/semantic-search/elser.md
index 2a5a809..3a38e5f 100644
--- a/docs/source/elastic/semantic-search/elser.md
+++ b/docs/source/elastic/semantic-search/elser.md
@@ -34,7 +34,7 @@ ELSER is a model trained by Elastic. If you have an Elasticsearch deployment, th
## Create an inference endpoint
-```{include} snippets/inference-endpoint.md
+```{include} _snippets/inference-endpoint.md
```
```{code-block} bash
@@ -57,7 +57,7 @@ You don’t need to download and deploy the ELSER model upfront, the API request
## Create the index mapping
-```{include} snippets/index-mapping.md
+```{include} _snippets/index-mapping.md
```
```{code-block} bash
diff --git a/docs/source/markup/snippets/my_snippet.md b/docs/source/markup/_snippets/my_snippet.md
similarity index 100%
rename from docs/source/markup/snippets/my_snippet.md
rename to docs/source/markup/_snippets/my_snippet.md
diff --git a/docs/source/markup/code.md b/docs/source/markup/code.md
index 338c2a5..d3cf2e5 100644
--- a/docs/source/markup/code.md
+++ b/docs/source/markup/code.md
@@ -42,14 +42,6 @@ project:
subject: MyST Markdown
```
-```{sidebar}
-This code is very helpful.
-
-It does lots of things.
-
-But it does not sing.
-```
-
```{code-block} python
:caption: Code blocks can also have sidebars.
:linenos:
diff --git a/docs/source/markup/diagrams.md b/docs/source/markup/diagrams.md
deleted file mode 100644
index cf5aa1e..0000000
--- a/docs/source/markup/diagrams.md
+++ /dev/null
@@ -1,36 +0,0 @@
----
-title: Diagrams
----
-
-You can add Mermaid diagrams, using `mermaid` extension.
-
-## Flowchart
-
-```{mermaid}
-flowchart LR
- A[Jupyter Notebook] --> C
- B[MyST Markdown] --> C
- C(mystmd) --> D{AST}
- D <--> E[LaTeX]
- E --> F[PDF]
- D --> G[Word]
- D --> H[React]
- D --> I[HTML]
- D <--> J[JATS]
-```
-
-## Sequence Diagram
-
-```{mermaid}
-sequenceDiagram
- participant Alice
- participant Bob
- Alice->John: Hello John, how are you?
- loop Healthcheck
- John->John: Fight against hypochondria
- end
- Note right of John: Rational thoughts
prevail...
- John-->Alice: Great!
- John->Bob: How about you?
- Bob-->John: Jolly good!
-```
diff --git a/docs/source/markup/file_inclusion.md b/docs/source/markup/file_inclusion.md
index f5bb3d5..5eb4ebb 100644
--- a/docs/source/markup/file_inclusion.md
+++ b/docs/source/markup/file_inclusion.md
@@ -11,5 +11,5 @@ The rest of this page is from a snippet and "{{page_title}}" below is taken from
## Snippet
-```{include} snippets/my_snippet.md
+```{include} _snippets/my_snippet.md
```
diff --git a/src/Elastic.Markdown/Elastic.Markdown.csproj b/src/Elastic.Markdown/Elastic.Markdown.csproj
index bc3e753..50d6e2a 100644
--- a/src/Elastic.Markdown/Elastic.Markdown.csproj
+++ b/src/Elastic.Markdown/Elastic.Markdown.csproj
@@ -15,6 +15,7 @@
+
diff --git a/src/Elastic.Markdown/IO/ConfigurationFile.cs b/src/Elastic.Markdown/IO/ConfigurationFile.cs
index 245fef9..4a05e18 100644
--- a/src/Elastic.Markdown/IO/ConfigurationFile.cs
+++ b/src/Elastic.Markdown/IO/ConfigurationFile.cs
@@ -2,11 +2,9 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-using System.Data.SqlTypes;
using System.IO.Abstractions;
-using Cysharp.IO;
+using DotNet.Globbing;
using Elastic.Markdown.Diagnostics;
-using Markdig.Helpers;
using YamlDotNet.Core;
using YamlDotNet.RepresentationModel;
@@ -15,16 +13,22 @@ namespace Elastic.Markdown.IO;
public class ConfigurationFile : DocumentationFile
{
private readonly IFileInfo _sourceFile;
+ private readonly IDirectoryInfo _rootPath;
private readonly BuildContext _context;
public string? Project { get; }
- public string[] Exclude { get; } = [];
+ public Glob[] Exclude { get; } = [];
public IReadOnlyCollection TableOfContents { get; } = [];
+ public HashSet Files { get; } = new(StringComparer.OrdinalIgnoreCase);
+ public HashSet Folders { get; } = new(StringComparer.OrdinalIgnoreCase);
+ public Glob[] Globs { get; } = [];
+
public ConfigurationFile(IFileInfo sourceFile, IDirectoryInfo rootPath, BuildContext context)
: base(sourceFile, rootPath)
{
_sourceFile = sourceFile;
+ _rootPath = rootPath;
_context = context;
if (!sourceFile.Exists)
{
@@ -54,10 +58,12 @@ public ConfigurationFile(IFileInfo sourceFile, IDirectoryInfo rootPath, BuildCon
Project = ReadString(entry);
break;
case "exclude":
- Exclude = ReadStringArray(entry) ?? [];
+ Exclude = ReadStringArray(entry)
+ .Select(Glob.Parse)
+ .ToArray();
break;
case "toc":
- var entries = ReadChildren(entry);
+ var entries = ReadChildren(entry, string.Empty);
TableOfContents = entries;
break;
@@ -66,10 +72,10 @@ public ConfigurationFile(IFileInfo sourceFile, IDirectoryInfo rootPath, BuildCon
break;
}
}
-
+ Globs = Folders.Select(f=> Glob.Parse($"{f}/*.md")).ToArray();
}
- private List ReadChildren(KeyValuePair entry)
+ private List ReadChildren(KeyValuePair entry, string parentPath)
{
var entries = new List();
if (entry.Value is not YamlSequenceNode sequence)
@@ -81,7 +87,7 @@ private List ReadChildren(KeyValuePair entry)
foreach (var tocEntry in sequence.Children.OfType())
{
- var tocItem = ReadChild(tocEntry);
+ var tocItem = ReadChild(tocEntry, parentPath);
if (tocItem is not null)
entries.Add(tocItem);
}
@@ -89,10 +95,11 @@ private List ReadChildren(KeyValuePair entry)
return entries;
}
- private ITocItem? ReadChild(YamlMappingNode tocEntry)
+ private ITocItem? ReadChild(YamlMappingNode tocEntry, string parentPath)
{
string? file = null;
string? folder = null;
+ var found = false;
IReadOnlyCollection? children = null;
foreach (var entry in tocEntry.Children)
{
@@ -100,25 +107,46 @@ private List ReadChildren(KeyValuePair entry)
switch (key)
{
case "file":
- file = ReadString(entry);
+ file = ReadFile(entry, parentPath);
break;
case "folder":
folder = ReadString(entry);
+ parentPath += $"/{folder}";
break;
case "children":
- children = ReadChildren(entry);
+ children = ReadChildren(entry, parentPath);
break;
}
}
if (file is not null)
- return new TocFile(file, children);
+ return new TocFile(file, found, children ?? []);
+
if (folder is not null)
- return new TocFile(folder, children);
+ {
+ if (children is null)
+ Folders.Add(parentPath.TrimStart('/'));
+
+ return new TocFolder(folder, children ?? []);
+ }
return null;
}
+ private string? ReadFile(KeyValuePair entry, string parentPath)
+ {
+ var file = ReadString(entry);
+ if (file is not null)
+ {
+ var path = Path.Combine(_rootPath.FullName, parentPath.TrimStart('/'), file);
+ if (!_context.ReadFileSystem.FileInfo.New(path).Exists)
+ EmitError($"File '{path}' does not exist", entry.Key);
+ }
+
+ Files.Add((parentPath + "/" + file).TrimStart('/'));
+ return file;
+ }
+
private string? ReadString(KeyValuePair entry)
{
if (entry.Value is YamlScalarNode scalar)
@@ -129,27 +157,31 @@ private List ReadChildren(KeyValuePair entry)
EmitError($"'{key}' is not a string", entry.Key);
return null;
}
- private string[]? ReadStringArray(KeyValuePair entry)
+
+ private string[] ReadStringArray(KeyValuePair entry)
{
var values = new List();
- if (entry.Value is YamlSequenceNode sequence)
- {
- foreach (var entryValue in sequence.Children.OfType())
- {
- if (entryValue.Value is not null)
- values.Add(entryValue.Value);
- }
+ if (entry.Value is not YamlSequenceNode sequence)
+ return values.ToArray();
+ foreach (var entryValue in sequence.Children.OfType())
+ {
+ if (entryValue.Value is not null)
+ values.Add(entryValue.Value);
}
+
return values.ToArray();
}
- public void EmitError(string message, YamlNode? node) => EmitError(message, node?.Start, node?.End);
+ private void EmitError(string message, YamlNode? node) =>
+ EmitError(message, node?.Start, node?.End, (node as YamlScalarNode)?.Value?.Length);
- public void EmitWarning(string message, YamlNode? node) => EmitError(message, node?.Start, node?.End);
+ private void EmitWarning(string message, YamlNode? node) =>
+ EmitWarning(message, node?.Start, node?.End, (node as YamlScalarNode)?.Value?.Length);
- public void EmitError(string message, Mark? start = null, Mark? end = null)
+ private void EmitError(string message, Mark? start = null, Mark? end = null, int? length = null)
{
+ length ??= start.HasValue && end.HasValue ? (int)start.Value.Column - (int)end.Value.Column : null;
var d = new Diagnostic
{
Severity = Severity.Error,
@@ -157,13 +189,14 @@ public void EmitError(string message, Mark? start = null, Mark? end = null)
Message = message,
Line = start.HasValue ? (int)start.Value.Line : null,
Column = start.HasValue ? (int)start.Value.Column : null,
- Length = start.HasValue && end.HasValue ? (int)start.Value.Column - (int)end.Value.Column : null
+ Length = length
};
_context.Collector.Channel.Write(d);
}
- public void EmitWarning(string message, Mark? start = null, Mark? end = null)
+ private void EmitWarning(string message, Mark? start = null, Mark? end = null, int? length = null)
{
+ length ??= start.HasValue && end.HasValue ? (int)start.Value.Column - (int)end.Value.Column : null;
var d = new Diagnostic
{
Severity = Severity.Warning,
@@ -171,7 +204,7 @@ public void EmitWarning(string message, Mark? start = null, Mark? end = null)
Message = message,
Line = start.HasValue ? (int)start.Value.Line : null,
Column = start.HasValue ? (int)start.Value.Column : null,
- Length = start.HasValue && end.HasValue ? (int)start.Value.Column - (int)end.Value.Column : null
+ Length = length
};
_context.Collector.Channel.Write(d);
}
@@ -179,9 +212,9 @@ public void EmitWarning(string message, Mark? start = null, Mark? end = null)
public interface ITocItem;
-public record TocFile(string Path, IReadOnlyCollection? Children) : ITocItem;
+public record TocFile(string Path, bool Found, IReadOnlyCollection Children) : ITocItem;
-public record TocFolder(string Path, IReadOnlyCollection? Children) : ITocItem;
+public record TocFolder(string Path, IReadOnlyCollection Children) : ITocItem;
/*
diff --git a/src/Elastic.Markdown/IO/DocumentationFile.cs b/src/Elastic.Markdown/IO/DocumentationFile.cs
index 8a339cc..7486bb9 100644
--- a/src/Elastic.Markdown/IO/DocumentationFile.cs
+++ b/src/Elastic.Markdown/IO/DocumentationFile.cs
@@ -22,3 +22,6 @@ public class ImageFile(IFileInfo sourceFile, IDirectoryInfo rootPath, string mim
public class StaticFile(IFileInfo sourceFile, IDirectoryInfo rootPath)
: DocumentationFile(sourceFile, rootPath);
+
+public class ExcludedFile(IFileInfo sourceFile, IDirectoryInfo rootPath)
+ : DocumentationFile(sourceFile, rootPath);
diff --git a/src/Elastic.Markdown/IO/DocumentationSet.cs b/src/Elastic.Markdown/IO/DocumentationSet.cs
index b3422fd..4f66ed2 100644
--- a/src/Elastic.Markdown/IO/DocumentationSet.cs
+++ b/src/Elastic.Markdown/IO/DocumentationSet.cs
@@ -34,17 +34,20 @@ public DocumentationSet(IDirectoryInfo? sourcePath, IDirectoryInfo? outputPath,
var configurationFile = context.ReadFileSystem.FileInfo.New(Path.Combine(SourcePath.FullName, "docset.yml"));
Configuration = new ConfigurationFile(configurationFile, SourcePath, context);
- Files = context.ReadFileSystem.Directory.EnumerateFiles(SourcePath.FullName, "*.*", SearchOption.AllDirectories)
+ Files = context.ReadFileSystem.Directory
+ .EnumerateFiles(SourcePath.FullName, "*.*", SearchOption.AllDirectories)
.Select(f => context.ReadFileSystem.FileInfo.New(f))
.Select(file => file.Extension switch
{
".svg" => new ImageFile(file, SourcePath, "image/svg+xml"),
".png" => new ImageFile(file, SourcePath),
- ".md" => new MarkdownFile(file, SourcePath, MarkdownParser, context),
+ ".md" => CreateMarkDownFile(file, context),
_ => new StaticFile(file, SourcePath)
})
+
.ToList();
+
LastWrite = Files.Max(f => f.SourceFile.LastWriteTimeUtc);
FlatMappedFiles = Files.ToDictionary(file => file.RelativePath, file => file);
@@ -61,6 +64,25 @@ public DocumentationSet(IDirectoryInfo? sourcePath, IDirectoryInfo? outputPath,
Tree = new DocumentationFolder(markdownFiles, 0, "");
}
+ private DocumentationFile CreateMarkDownFile(IFileInfo file, BuildContext context)
+ {
+ if (Configuration.Exclude.Any(g => g.IsMatch(file.Name)))
+ return new ExcludedFile(file, SourcePath);
+
+ var relativePath = Path.GetRelativePath(SourcePath.FullName, file.FullName);
+ if (Configuration.Files.Contains(relativePath))
+ return new MarkdownFile(file, SourcePath, MarkdownParser, context);
+
+ if (Configuration.Globs.Any(g => g.IsMatch(relativePath)))
+ return new MarkdownFile(file, SourcePath, MarkdownParser, context);
+
+ if (relativePath.IndexOf("/_", StringComparison.Ordinal) > 0 || relativePath.StartsWith("_"))
+ return new ExcludedFile(file, SourcePath);
+
+ context.EmitError(Configuration.SourceFile, $"Not linked in toc: {relativePath}");
+ return new ExcludedFile(file, SourcePath);
+ }
+
public DocumentationFolder Tree { get; }
public List Files { get; }
diff --git a/src/docs-builder/Diagnostics/ErrorCollector.cs b/src/docs-builder/Diagnostics/ErrorCollector.cs
index d10a13a..a4b4f5e 100644
--- a/src/docs-builder/Diagnostics/ErrorCollector.cs
+++ b/src/docs-builder/Diagnostics/ErrorCollector.cs
@@ -57,23 +57,26 @@ public class ConsoleDiagnosticsCollector(ILoggerFactory loggerFactory, ICoreServ
public override async Task StopAsync(Cancel ctx)
{
- _logger.LogError("Stopping...");
- // Create a new report
var report = new Report(new FileSourceRepository());
foreach (var item in _items)
{
var d = item.Severity switch
{
- Severity.Error =>
- Errata.Diagnostic.Error(item.Message)
- .WithLabel(new Label(item.File, new Location(item.Line ?? 0, item.Column ?? 0), "bad substitution")
- .WithLength(item.Length ?? 3)
- .WithPriority(1)
- .WithColor(Color.Red)),
- Severity.Warning =>
- Errata.Diagnostic.Warning(item.Message),
+ Severity.Error => Errata.Diagnostic.Error(item.Message),
+ Severity.Warning => Errata.Diagnostic.Warning(item.Message),
_ => Errata.Diagnostic.Info(item.Message)
};
+ if (item is { Line: not null, Column: not null })
+ {
+ var location = new Location(item.Line ?? 0, item.Column ?? 0);
+ d = d.WithLabel(new Label(item.File, location, "")
+ .WithLength(item.Length == null ? 1 : Math.Clamp(item.Length.Value, 1, item.Length.Value + 3))
+ .WithPriority(1)
+ .WithColor(item.Severity == Severity.Error ? Color.Red : Color.Blue));
+ }
+ else
+ d = d.WithNote(item.File);
+
report.AddDiagnostic(d);
}
diff --git a/tests/Elastic.Markdown.Tests/FileInclusion/IncludeTests.cs b/tests/Elastic.Markdown.Tests/FileInclusion/IncludeTests.cs
index cb160c6..0c46850 100644
--- a/tests/Elastic.Markdown.Tests/FileInclusion/IncludeTests.cs
+++ b/tests/Elastic.Markdown.Tests/FileInclusion/IncludeTests.cs
@@ -11,7 +11,7 @@ namespace Elastic.Markdown.Tests.FileInclusion;
public class IncludeTests(ITestOutputHelper output) : DirectiveTest(output,
"""
-```{include} snippets/test.md
+```{include} _snippets/test.md
```
"""
)
@@ -20,7 +20,7 @@ public override Task InitializeAsync()
{
// language=markdown
var inclusion = "*Hello world*";
- FileSystem.AddFile(@"docs/source/snippets/test.md", inclusion);
+ FileSystem.AddFile(@"docs/source/_snippets/test.md", inclusion);
return base.InitializeAsync();
}
@@ -43,7 +43,7 @@ public class IncludeSubstitutionTests(ITestOutputHelper output) : DirectiveTest<
sub:
foo: "bar"
---
-```{include} snippets/test.md
+```{include} _snippets/test.md
```
"""
)
@@ -52,7 +52,7 @@ public override Task InitializeAsync()
{
// language=markdown
var inclusion = "*Hello {{foo}}*";
- FileSystem.AddFile(@"docs/source/snippets/test.md", inclusion);
+ FileSystem.AddFile(@"docs/source/_snippets/test.md", inclusion);
return base.InitializeAsync();
}
diff --git a/tests/Elastic.Markdown.Tests/FileInclusion/LiteralIncludeTests.cs b/tests/Elastic.Markdown.Tests/FileInclusion/LiteralIncludeTests.cs
index af3f2f9..816ba79 100644
--- a/tests/Elastic.Markdown.Tests/FileInclusion/LiteralIncludeTests.cs
+++ b/tests/Elastic.Markdown.Tests/FileInclusion/LiteralIncludeTests.cs
@@ -11,7 +11,7 @@ namespace Elastic.Markdown.Tests.FileInclusion;
public class LiteralIncludeUsingPropertyTests(ITestOutputHelper output) : DirectiveTest(output,
"""
-```{include} snippets/test.txt
+```{include} _snippets/test.txt
:literal: true
```
"""
@@ -21,7 +21,7 @@ public override Task InitializeAsync()
{
// language=markdown
var inclusion = "*Hello world*";
- FileSystem.AddFile(@"docs/source/snippets/test.txt", inclusion);
+ FileSystem.AddFile(@"docs/source/_snippets/test.txt", inclusion);
return base.InitializeAsync();
}
@@ -38,7 +38,7 @@ public void IncludesInclusionHtml() =>
public class LiteralIncludeTests(ITestOutputHelper output) : DirectiveTest(output,
"""
-```{literalinclude} snippets/test.md
+```{literalinclude} _snippets/test.md
```
"""
)
@@ -47,7 +47,7 @@ public override Task InitializeAsync()
{
// language=markdown
var inclusion = "*Hello world*";
- FileSystem.AddFile(@"docs/source/snippets/test.md", inclusion);
+ FileSystem.AddFile(@"docs/source/_snippets/test.md", inclusion);
return base.InitializeAsync();
}
diff --git a/tests/Elastic.Markdown.Tests/SiteMap/NavigationTests.cs b/tests/Elastic.Markdown.Tests/SiteMap/NavigationTests.cs
index 67972e1..fe30d3a 100644
--- a/tests/Elastic.Markdown.Tests/SiteMap/NavigationTests.cs
+++ b/tests/Elastic.Markdown.Tests/SiteMap/NavigationTests.cs
@@ -1,45 +1,29 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-using System.IO.Abstractions;
-using System.IO.Abstractions.TestingHelpers;
-using Elastic.Markdown.Diagnostics;
-using Elastic.Markdown.IO;
+
using FluentAssertions;
-using Microsoft.Extensions.Logging.Abstractions;
using Xunit.Abstractions;
namespace Elastic.Markdown.Tests.SiteMap;
-public class NavigationTests(ITestOutputHelper output)
+public class NavigationTests(ITestOutputHelper output) : NavigationTestsBase(output)
{
[Fact]
- public async Task CreatesDefaultOutputDirectory()
- {
- var logger = new TestLoggerFactory(output);
- var readFs = new FileSystem(); //use real IO to read docs.
- var writeFs = new MockFileSystem(new MockFileSystemOptions //use in memory mock fs to test generation
- {
- CurrentDirectory = Paths.Root.FullName
- });
- var context = new BuildContext
- {
- Force = false,
- UrlPathPrefix = null,
- ReadFileSystem = readFs,
- WriteFileSystem = writeFs,
- Collector = new DiagnosticsCollector(logger, [])
- };
-
- var set = new DocumentationSet(context);
-
- set.Files.Should().HaveCountGreaterThan(10);
- var generator = new DocumentationGenerator(set, context, logger);
-
- await generator.GenerateAll(default);
-
- var configuration = generator.DocumentationSet.Configuration;
- configuration.TableOfContents.Should().NotBeNullOrEmpty();
+ public void ParsesATableOfContents() =>
+ Configuration.TableOfContents.Should().NotBeNullOrEmpty();
+ [Fact]
+ public void ParsesNestedFoldersAndPrefixesPaths()
+ {
+ Configuration.Folders.Should().NotBeNullOrEmpty();
+ Configuration.Folders.Should()
+ .Contain("markup")
+ .And.Contain("elastic/observability");
}
+ [Fact]
+ public void ParsesFilesAndPrefixesPaths() =>
+ Configuration.Files.Should()
+ .Contain("index.md")
+ .And.Contain("elastic/search-labs/search/req.md");
}
diff --git a/tests/Elastic.Markdown.Tests/SiteMap/NavigationTestsBase.cs b/tests/Elastic.Markdown.Tests/SiteMap/NavigationTestsBase.cs
new file mode 100644
index 0000000..e7bf366
--- /dev/null
+++ b/tests/Elastic.Markdown.Tests/SiteMap/NavigationTestsBase.cs
@@ -0,0 +1,51 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.IO.Abstractions;
+using System.IO.Abstractions.TestingHelpers;
+using Elastic.Markdown.Diagnostics;
+using Elastic.Markdown.IO;
+using FluentAssertions;
+using Xunit.Abstractions;
+
+namespace Elastic.Markdown.Tests.SiteMap;
+
+public class NavigationTestsBase : IAsyncLifetime
+{
+ protected NavigationTestsBase(ITestOutputHelper output)
+ {
+ var logger = new TestLoggerFactory(output);
+ var readFs = new FileSystem(); //use real IO to read docs.
+ var writeFs = new MockFileSystem(new MockFileSystemOptions //use in memory mock fs to test generation
+ {
+ CurrentDirectory = Paths.Root.FullName
+ });
+ var context = new BuildContext
+ {
+ Force = false,
+ UrlPathPrefix = null,
+ ReadFileSystem = readFs,
+ WriteFileSystem = writeFs,
+ Collector = new DiagnosticsCollector(logger, [])
+ };
+
+ var set = new DocumentationSet(context);
+
+ set.Files.Should().HaveCountGreaterThan(10);
+ Generator = new DocumentationGenerator(set, context, logger);
+
+ }
+
+ public DocumentationGenerator Generator { get; }
+
+ public ConfigurationFile Configuration { get; set; } = default!;
+
+ public async Task InitializeAsync()
+ {
+ await Generator.GenerateAll(default);
+ Configuration = Generator.DocumentationSet.Configuration;
+ }
+
+ public Task DisposeAsync() => Task.CompletedTask;
+}