Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
.obsidian/
.obsidian/
.venv/
site/
__pycache__/

26 changes: 26 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,32 @@ A `interactive_graph.js` example can be downloaded from [here](https://raw.githu

Beginning from version `0.3.0` the default graph inside the sidebar was minimized to edges related to the current page only. The previous behavior can be restored by setting `global` to `true` at line `draw_graph_sidebar(myChart, global=false)` at top of javascript file.

# Development
## Testing
To contribute to the project, please ensure all tests pass. We use `pytest` for unit and integration testing.

1. **Install Dependencies:**
Install the package in editable mode along with testing requirements:
```bash
pip install -e .
pip install pytest pytest-benchmark
```

2. **Run Tests:**
Run the full suite from the root directory:
```bash
pytest
```

You can also run specific categories of tests:
```bash
# Run only integration tests (full build cycle)
pytest tests/test_integration.py

# Benchmark performance
pytest tests/test_performance.py
```

# Docker
Adapt the `.env` and `mkdocs.yml` files to your needs. `DEV=ON` will rebuild the `mkdocs-obsidian-interactive-graph-plugin` from local files. If `DEV != ON` the upstream packages of PyPI will be used. Build and start the Docker container via `docker compose up --build [-d]`.

Expand Down
166 changes: 166 additions & 0 deletions tests/test_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
# Standard libraries
import json
import textwrap
from pathlib import Path
from typing import Any, Dict

# Third party libraries
import pytest
from mkdocs.commands.build import build
from mkdocs.config import load_config


def validate_graph_match(
generated_data: Dict[str, Any], reference_data: Dict[str, Any]
):
"""
Compare the generated graph artifact against a strict reference dataset.

This helper asserts that:
1. Node counts match.
2. Node properties (Symbol Size, URL value) match exactly.
3. The topology (Links) matches, using node Names for stability instead of internal IDs.

Args:
generated_data: The actual content parsed from graph.json.
reference_data: Reference dict using node names for link definitions.
"""
gen_nodes, ref_nodes = (
generated_data.get("nodes", []),
reference_data.get("nodes", []),
)
gen_links, ref_links = (
generated_data.get("links", []),
reference_data.get("links", []),
)

assert len(gen_nodes) == len(ref_nodes), (
f"Mismatch in # of nodes. Expected {len(ref_nodes)}, got {len(gen_nodes)}"
)
assert len(gen_links) == len(ref_links), (
f"Mismatch in # of links. Expected {len(ref_links)}, got {len(gen_links)}"
)

# Create Lookup Map for generated data to ease validation
gen_name_to_node = {n["name"]: n for n in gen_nodes}

# Validate Node Properties
for ref_node in ref_nodes:
name = ref_node["name"]
assert name in gen_name_to_node, f"Missing expected node: {name}"

gen_node = gen_name_to_node[name]

assert gen_node["symbolSize"] == ref_node["symbolSize"], (
f"Node '{name}' symbolSize mismatch. "
f"Expected {ref_node['symbolSize']}, got {gen_node['symbolSize']}"
)

assert gen_node["value"] == ref_node["value"], (
f"Node '{name}' value (URL) mismatch. "
f"Expected {ref_node['value']}, got {gen_node['value']}"
)

# Validate Topology
# We map the generated ID-based links to a set of tuples for O(1) lookups.
actual_connections = set()
for link in gen_links:
actual_connections.add((link["source"], link["target"]))

for ref_link in ref_links:
ref_src_name = ref_link["source"]
ref_tgt_name = ref_link["target"]

# Ensure reference names exist in the generated map before proceeding
if ref_src_name not in gen_name_to_node or ref_tgt_name not in gen_name_to_node:
pytest.fail(
f"Reference link uses unknown nodes: {ref_src_name} -> {ref_tgt_name}"
)

src_id = gen_name_to_node[ref_src_name]["id"]
tgt_id = gen_name_to_node[ref_tgt_name]["id"]

if (src_id, tgt_id) not in actual_connections:
pytest.fail(f"Missing expected link: {ref_src_name} -> {ref_tgt_name}")


def test_build_reproduces_demo_topology(tmp_path: Path):
"""
Build the full demo site and assert the output matches our reference topology.

This ensures the plugin correctly calculates symbol sizes (based on connection density),
resolves internal wikilinks, and generates the valid JSON structure expected by the frontend.
"""
# 1. Scaffold the demo file structure
docs_dir = tmp_path / "docs"
docs_dir.mkdir()
(docs_dir / "Usage").mkdir()

(docs_dir / "index.md").write_text(
textwrap.dedent("""
# Welcome
- [[Concept]]
- [[Docker]]
- [[Installation]]
- [[References]]
- [[Usage/ECharts]]
- [[Setup]]
""")
)
(docs_dir / "Concept.md").write_text("# Concept\nSee [[Usage/ECharts]]")
(docs_dir / "Docker.md").write_text("# Docker")
(docs_dir / "Installation.md").write_text("# Installation")
(docs_dir / "References.md").write_text("# References")
(docs_dir / "Usage/ECharts.md").write_text("# ECharts")
(docs_dir / "Usage/Setup.md").write_text("# Setup")

# 2. Configure MkDocs
# Note: We enforce a specific site_url to make 'value' properties deterministic.
config_file = tmp_path / "mkdocs.yml"
config_content = textwrap.dedent(
f"""
site_name: Interactive Graph Demo
site_url: https://example.com/
docs_dir: {docs_dir}
site_dir: {tmp_path / "site"}
plugins:
- obsidian-interactive-graph
"""
)
config_file.write_text(config_content)

# 3. Execution
cfg = load_config(str(config_file))
build(cfg)

# 4. Verification
json_path = tmp_path / "site/assets/javascripts/graph.json"
assert json_path.exists()

with open(json_path) as f:
actual_graph = json.load(f)

# Reference data mirroring the expected "Interactive Graph Demo" structure.
# Symbol Size = Sum of Incoming + Outgoing links.
reference_graph = {
"nodes": [
{"name": "Welcome", "symbolSize": 6, "value": "/"},
{"name": "Concept", "symbolSize": 2, "value": "/Concept/"},
{"name": "Docker", "symbolSize": 1, "value": "/Docker/"},
{"name": "Installation", "symbolSize": 1, "value": "/Installation/"},
{"name": "References", "symbolSize": 1, "value": "/References/"},
{"name": "ECharts", "symbolSize": 2, "value": "/Usage/ECharts/"},
{"name": "Setup", "symbolSize": 1, "value": "/Usage/Setup/"},
],
"links": [
{"source": "Welcome", "target": "Concept"},
{"source": "Welcome", "target": "Docker"},
{"source": "Welcome", "target": "Installation"},
{"source": "Welcome", "target": "References"},
{"source": "Welcome", "target": "ECharts"},
{"source": "Welcome", "target": "Setup"},
{"source": "Concept", "target": "ECharts"},
],
}

validate_graph_match(generated_data=actual_graph, reference_data=reference_graph)
49 changes: 49 additions & 0 deletions tests/test_performance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Standard libraries
import textwrap
from pathlib import Path

# Third party libraries
from mkdocs.commands.build import build
from mkdocs.config import load_config
from pytest_benchmark.fixture import BenchmarkFixture


def test_build_performance(benchmark: BenchmarkFixture, tmp_path: Path) -> None:
"""
Measure build latency for a synthetic site with dense circular linking.

This test constructs a 100-page vault where every page links to the next,
forcing the plugin to perform path resolution and regex parsing at scale.
It serves as a regression test for file I/O and graph construction performance.

Args:
benchmark: Pytest-benchmark fixture to record execution stats.
tmp_path: Pytest fixture providing an isolated temporary directory.
"""
docs_dir = tmp_path / "docs"
docs_dir.mkdir()

# Generate 100 pages with circular dependencies (0 -> 1 ... 99 -> 0)
for i in range(100):
target = i + 1 if i < 99 else 0
(docs_dir / f"page_{i}.md").write_text(
f"# Page {i}\n\nLink to [[page_{target}]]"
)

config_file = tmp_path / "mkdocs.yml"
config_content = textwrap.dedent(
f"""
site_name: PerfTest
docs_dir: {docs_dir}
site_dir: {tmp_path / "site"}
plugins:
- obsidian-interactive-graph
"""
)
config_file.write_text(config_content)

def run_build():
cfg = load_config(str(config_file))
build(cfg)

benchmark(run_build)