515 lines
20 KiB
Bash
Executable File
515 lines
20 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
#
|
|
# Outline Sync — Automated Test Suite
|
|
# Phase 1: TEST-1.1 through TEST-1.11
|
|
#
|
|
# Usage:
|
|
# ./sync_tests.sh Run all Phase 1 tests
|
|
# ./sync_tests.sh --phase 1 Explicit phase selection
|
|
# ./sync_tests.sh --keep Keep test vault on failure (for debugging)
|
|
# ./sync_tests.sh -v Verbose — show sync.sh output
|
|
#
|
|
# Requires: git, docker, jq, python3 (for local JSON parsing)
|
|
#
|
|
# The test creates a dedicated collection in Outline (named _sync_test_<timestamp>),
|
|
# runs sync init into a temp vault, checks all assertions, then cleans up.
|
|
#
|
|
|
|
set -uo pipefail # No -e: we capture failures ourselves
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
SETTINGS="$SCRIPT_DIR/settings.json"
|
|
|
|
# ── Test state ────────────────────────────────────────────────────────────────
|
|
|
|
PASS=0
|
|
FAIL=0
|
|
TOTAL=0
|
|
FAILED_TESTS=()
|
|
|
|
TEST_TS="$(date +%Y%m%d_%H%M%S)"
|
|
TEST_VAULT="/tmp/outline-sync-test-$$"
|
|
TEST_COLLECTION_NAME="_sync_test_${TEST_TS}"
|
|
|
|
# IDs populated by setup_test_data()
|
|
TEST_COLLECTION_ID=""
|
|
TEST_DOC_ROOT_ID="" # "RootDoc One" — leaf at collection root
|
|
TEST_DOC_PARENT_ID="" # "Parent Doc" — has children
|
|
TEST_DOC_CHILD1_ID="" # "Child One" — has grandchild
|
|
TEST_DOC_CHILD2_ID="" # "Child Two" — leaf under Parent Doc
|
|
TEST_DOC_GRANDCHILD_ID=""# "Grandchild" — leaf under Child One
|
|
|
|
# ── CLI flags ─────────────────────────────────────────────────────────────────
|
|
|
|
PHASE_FILTER=""
|
|
KEEP_ON_FAIL=0
|
|
VERBOSE=0
|
|
|
|
while [[ $# -gt 0 ]]; do
|
|
case "$1" in
|
|
--phase) PHASE_FILTER="$2"; shift 2 ;;
|
|
--keep) KEEP_ON_FAIL=1; shift ;;
|
|
-v|--verbose) VERBOSE=1; shift ;;
|
|
*) shift ;;
|
|
esac
|
|
done
|
|
|
|
# ── Colours ───────────────────────────────────────────────────────────────────
|
|
|
|
GREEN='\033[0;32m'
|
|
RED='\033[0;31m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m'
|
|
|
|
# ── Assertion helpers ─────────────────────────────────────────────────────────
|
|
|
|
_assert() {
|
|
local name="$1" result="$2" detail="${3:-}"
|
|
TOTAL=$(( TOTAL + 1 ))
|
|
if [[ "$result" == "pass" ]]; then
|
|
echo -e " ${GREEN}✓${NC} $name"
|
|
PASS=$(( PASS + 1 ))
|
|
else
|
|
echo -e " ${RED}✗${NC} $name"
|
|
[[ -n "$detail" ]] && echo -e " ${RED}↳ $detail${NC}"
|
|
FAIL=$(( FAIL + 1 ))
|
|
FAILED_TESTS+=("$name")
|
|
fi
|
|
}
|
|
|
|
assert_dir() {
|
|
local name="$1" path="$2"
|
|
[[ -d "$path" ]] \
|
|
&& _assert "$name" "pass" \
|
|
|| _assert "$name" "fail" "Directory not found: $path"
|
|
}
|
|
|
|
assert_file() {
|
|
local name="$1" path="$2"
|
|
[[ -f "$path" ]] \
|
|
&& _assert "$name" "pass" \
|
|
|| _assert "$name" "fail" "File not found: $path"
|
|
}
|
|
|
|
assert_contains() {
|
|
local name="$1" path="$2" pattern="$3"
|
|
grep -q "$pattern" "$path" 2>/dev/null \
|
|
&& _assert "$name" "pass" \
|
|
|| _assert "$name" "fail" "Pattern '$pattern' not found in $path"
|
|
}
|
|
|
|
assert_eq() {
|
|
local name="$1" got="$2" want="$3"
|
|
[[ "$got" == "$want" ]] \
|
|
&& _assert "$name" "pass" \
|
|
|| _assert "$name" "fail" "Expected '$want', got '$got'"
|
|
}
|
|
|
|
assert_nonzero_exit() {
|
|
# assert_nonzero_exit <name> <exit_code>
|
|
local name="$1" code="$2"
|
|
[[ "$code" -ne 0 ]] \
|
|
&& _assert "$name" "pass" \
|
|
|| _assert "$name" "fail" "Expected non-zero exit code, got 0"
|
|
}
|
|
|
|
# ── Docker API helper ─────────────────────────────────────────────────────────
|
|
|
|
# Read API credentials once
|
|
_API_URL="$(jq -r '.source.url' "$SETTINGS" 2>/dev/null)"
|
|
_API_TOKEN="$(jq -r '.source.token' "$SETTINGS" 2>/dev/null)"
|
|
|
|
api_py() {
|
|
# api_py [-v vault_path] <python_snippet>
|
|
# Runs Python inside Docker with domnet access.
|
|
# Optional -v mounts a vault directory as /vault (read-only).
|
|
local vault_mount=""
|
|
if [[ "$1" == "-v" ]]; then
|
|
vault_mount="-v $2:/vault:ro"
|
|
shift 2
|
|
fi
|
|
local code="$1"
|
|
|
|
# shellcheck disable=SC2086
|
|
docker run --rm \
|
|
--network domnet \
|
|
${vault_mount} \
|
|
-e OUTLINE_URL="$_API_URL" \
|
|
-e OUTLINE_TOKEN="$_API_TOKEN" \
|
|
python:3.11-slim \
|
|
python3 - <<PYEOF
|
|
import os, sys, json, glob, requests
|
|
|
|
url = os.environ["OUTLINE_URL"].rstrip("/")
|
|
token = os.environ["OUTLINE_TOKEN"]
|
|
headers = {
|
|
"Authorization": f"Bearer {token}",
|
|
"Content-Type": "application/json",
|
|
}
|
|
|
|
def api(endpoint, data=None):
|
|
r = requests.post(f"{url}{endpoint}", headers=headers, json=data or {}, timeout=30)
|
|
r.raise_for_status()
|
|
return r.json()
|
|
|
|
$code
|
|
PYEOF
|
|
}
|
|
|
|
# ── Setup: create test data in Outline ────────────────────────────────────────
|
|
|
|
setup_test_data() {
|
|
echo
|
|
echo -e "${BLUE}Creating test data in Outline...${NC}"
|
|
|
|
local result
|
|
result="$(api_py "
|
|
coll_name = '${TEST_COLLECTION_NAME}'
|
|
|
|
# Collection
|
|
coll = api('/api/collections.create', {
|
|
'name': coll_name,
|
|
'permission': 'read_write',
|
|
})
|
|
coll_id = coll['data']['id']
|
|
|
|
# Root leaf document
|
|
doc_root = api('/api/documents.create', {
|
|
'collectionId': coll_id,
|
|
'title': 'RootDoc One',
|
|
'text': 'Root document one content.',
|
|
'publish': True,
|
|
})
|
|
doc_root_id = doc_root['data']['id']
|
|
|
|
# Parent document (will have children)
|
|
doc_parent = api('/api/documents.create', {
|
|
'collectionId': coll_id,
|
|
'title': 'Parent Doc',
|
|
'text': 'Parent document content.',
|
|
'publish': True,
|
|
})
|
|
doc_parent_id = doc_parent['data']['id']
|
|
|
|
# Child One (will have grandchild)
|
|
doc_child1 = api('/api/documents.create', {
|
|
'collectionId': coll_id,
|
|
'title': 'Child One',
|
|
'text': 'Child one content.',
|
|
'publish': True,
|
|
'parentDocumentId': doc_parent_id,
|
|
})
|
|
doc_child1_id = doc_child1['data']['id']
|
|
|
|
# Child Two (leaf)
|
|
doc_child2 = api('/api/documents.create', {
|
|
'collectionId': coll_id,
|
|
'title': 'Child Two',
|
|
'text': 'Child two content.',
|
|
'publish': True,
|
|
'parentDocumentId': doc_parent_id,
|
|
})
|
|
doc_child2_id = doc_child2['data']['id']
|
|
|
|
# Grandchild (leaf under Child One)
|
|
doc_gc = api('/api/documents.create', {
|
|
'collectionId': coll_id,
|
|
'title': 'Grandchild',
|
|
'text': 'Grandchild content.',
|
|
'publish': True,
|
|
'parentDocumentId': doc_child1_id,
|
|
})
|
|
doc_gc_id = doc_gc['data']['id']
|
|
|
|
print(json.dumps({
|
|
'collection_id': coll_id,
|
|
'doc_root_id': doc_root_id,
|
|
'doc_parent_id': doc_parent_id,
|
|
'doc_child1_id': doc_child1_id,
|
|
'doc_child2_id': doc_child2_id,
|
|
'doc_gc_id': doc_gc_id,
|
|
}))
|
|
")"
|
|
|
|
TEST_COLLECTION_ID="$( echo "$result" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d['collection_id'])")"
|
|
TEST_DOC_ROOT_ID="$( echo "$result" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d['doc_root_id'])")"
|
|
TEST_DOC_PARENT_ID="$( echo "$result" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d['doc_parent_id'])")"
|
|
TEST_DOC_CHILD1_ID="$( echo "$result" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d['doc_child1_id'])")"
|
|
TEST_DOC_CHILD2_ID="$( echo "$result" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d['doc_child2_id'])")"
|
|
TEST_DOC_GRANDCHILD_ID="$(echo "$result"| python3 -c "import sys,json; d=json.load(sys.stdin); print(d['doc_gc_id'])")"
|
|
|
|
echo -e " ${GREEN}✓${NC} Test collection: $TEST_COLLECTION_NAME"
|
|
echo -e " ${GREEN}✓${NC} 5 documents created (hierarchy: root, parent→child1→grandchild, child2)"
|
|
}
|
|
|
|
# ── Teardown ──────────────────────────────────────────────────────────────────
|
|
|
|
teardown() {
|
|
echo
|
|
echo -e "${BLUE}Cleaning up...${NC}"
|
|
|
|
if [[ -n "$TEST_COLLECTION_ID" ]]; then
|
|
api_py "api('/api/collections.delete', {'id': '$TEST_COLLECTION_ID'})" &>/dev/null \
|
|
&& echo -e " ${GREEN}✓${NC} Test collection deleted from Outline" \
|
|
|| echo -e " ${YELLOW}⚠${NC} Could not delete collection $TEST_COLLECTION_ID — delete manually"
|
|
fi
|
|
|
|
if [[ -d "$TEST_VAULT" ]]; then
|
|
if [[ $FAIL -gt 0 && $KEEP_ON_FAIL -eq 1 ]]; then
|
|
echo -e " ${YELLOW}⚠${NC} Keeping test vault for inspection: $TEST_VAULT"
|
|
else
|
|
rm -rf "$TEST_VAULT"
|
|
echo -e " ${GREEN}✓${NC} Test vault removed"
|
|
fi
|
|
fi
|
|
}
|
|
|
|
# ── Run sync init ─────────────────────────────────────────────────────────────
|
|
|
|
run_init() {
|
|
echo
|
|
echo -e "${BLUE}Running sync init...${NC}"
|
|
echo
|
|
|
|
local verbose_flag=""
|
|
[[ $VERBOSE -eq 1 ]] && verbose_flag="-v"
|
|
|
|
# shellcheck disable=SC2086
|
|
if ! "$SCRIPT_DIR/sync.sh" init \
|
|
--vault "$TEST_VAULT" \
|
|
--settings "$SETTINGS" \
|
|
$verbose_flag; then
|
|
echo -e "${RED}✗ sync init failed — cannot run tests${NC}"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# ── Phase 1 tests ─────────────────────────────────────────────────────────────
|
|
|
|
run_phase_1_tests() {
|
|
local COLL_DIR="$TEST_VAULT/$TEST_COLLECTION_NAME"
|
|
|
|
echo
|
|
echo -e "${BLUE}Phase 1 tests${NC}"
|
|
echo
|
|
|
|
# ── TEST-1.1: vault directory created ─────────────────────────────────────
|
|
assert_dir "TEST-1.1 vault directory created" "$TEST_VAULT"
|
|
|
|
# ── TEST-1.2: git repo with outline and main branches ─────────────────────
|
|
local branches
|
|
branches="$(git -C "$TEST_VAULT" branch 2>/dev/null || true)"
|
|
|
|
if git -C "$TEST_VAULT" rev-parse --git-dir &>/dev/null; then
|
|
_assert "TEST-1.2 git repo initialized" "pass"
|
|
else
|
|
_assert "TEST-1.2 git repo initialized" "fail" "No .git directory found"
|
|
fi
|
|
|
|
echo "$branches" | grep -qE "^\*?\s+outline$" \
|
|
&& _assert "TEST-1.2 'outline' branch exists" "pass" \
|
|
|| _assert "TEST-1.2 'outline' branch exists" "fail" "outline branch not found in: $branches"
|
|
|
|
echo "$branches" | grep -qE "^\*?\s+main$" \
|
|
&& _assert "TEST-1.2 'main' branch exists" "pass" \
|
|
|| _assert "TEST-1.2 'main' branch exists" "fail" "main branch not found in: $branches"
|
|
|
|
# ── TEST-1.3: test collection folder created ───────────────────────────────
|
|
assert_dir "TEST-1.3 test collection folder exists" "$COLL_DIR"
|
|
|
|
# ── TEST-1.4: every .md file has frontmatter ──────────────────────────────
|
|
local md_count=0 missing_fm=0
|
|
while IFS= read -r -d '' f; do
|
|
md_count=$(( md_count + 1 ))
|
|
head -1 "$f" | grep -q "^---$" || missing_fm=$(( missing_fm + 1 ))
|
|
done < <(find "$TEST_VAULT" -name "*.md" -print0 2>/dev/null)
|
|
|
|
if [[ $md_count -gt 0 && $missing_fm -eq 0 ]]; then
|
|
_assert "TEST-1.4 all .md files have frontmatter (checked $md_count files)" "pass"
|
|
else
|
|
_assert "TEST-1.4 all .md files have frontmatter" "fail" \
|
|
"$missing_fm / $md_count files missing frontmatter"
|
|
fi
|
|
|
|
# ── TEST-1.5: frontmatter has required fields ──────────────────────────────
|
|
local missing_fields=0
|
|
while IFS= read -r -d '' f; do
|
|
for field in outline_id outline_collection_id outline_updated_at; do
|
|
grep -q "^${field}: " "$f" || {
|
|
missing_fields=$(( missing_fields + 1 ))
|
|
[[ $VERBOSE -eq 1 ]] && echo " missing '$field' in $f"
|
|
}
|
|
done
|
|
done < <(find "$TEST_VAULT" -name "*.md" -print0 2>/dev/null)
|
|
|
|
[[ $missing_fields -eq 0 ]] \
|
|
&& _assert "TEST-1.5 required frontmatter fields present in all files" "pass" \
|
|
|| _assert "TEST-1.5 required frontmatter fields present in all files" "fail" \
|
|
"$missing_fields missing field occurrences across all files"
|
|
|
|
# ── TEST-1.6: outline_id matches actual Outline API ───────────────────────
|
|
local api_result
|
|
api_result="$(api_py -v "$TEST_VAULT" "
|
|
import glob, os
|
|
|
|
results = []
|
|
for fpath in glob.glob('/vault/**/*.md', recursive=True):
|
|
with open(fpath) as fh:
|
|
content = fh.read()
|
|
|
|
if not content.startswith('---\n'):
|
|
continue
|
|
end = content.find('\n---\n', 4)
|
|
if end == -1:
|
|
continue
|
|
|
|
fm_text = content[4:end]
|
|
fm = dict(
|
|
line.split(': ', 1)
|
|
for line in fm_text.splitlines()
|
|
if ': ' in line
|
|
)
|
|
doc_id = fm.get('outline_id', '').strip()
|
|
if not doc_id:
|
|
results.append(f'MISSING_ID:{fpath}')
|
|
continue
|
|
|
|
try:
|
|
r = api('/api/documents.info', {'id': doc_id})
|
|
returned_id = r['data']['id']
|
|
if returned_id != doc_id:
|
|
results.append(f'MISMATCH:{fpath} has {doc_id} but API returned {returned_id}')
|
|
except Exception as e:
|
|
results.append(f'NOTFOUND:{fpath} id={doc_id} err={e}')
|
|
|
|
print('PASS' if not results else 'FAIL:' + ';'.join(results))
|
|
" 2>/dev/null)"
|
|
|
|
[[ "$api_result" == "PASS" ]] \
|
|
&& _assert "TEST-1.6 outline_id matches Outline API for all files" "pass" \
|
|
|| _assert "TEST-1.6 outline_id matches Outline API for all files" "fail" "$api_result"
|
|
|
|
# ── TEST-1.7: folder hierarchy matches Outline document tree ──────────────
|
|
#
|
|
# Expected layout for our test data:
|
|
# $COLL_DIR/
|
|
# RootDoc One.md ← leaf at root
|
|
# Parent Doc/
|
|
# Parent Doc.md ← parent (has children) → inside own folder
|
|
# Child One/
|
|
# Child One.md ← child1 (has grandchild) → inside own folder
|
|
# Grandchild.md ← leaf grandchild
|
|
# Child Two.md ← leaf child2 (no children) → flat file
|
|
|
|
assert_file "TEST-1.7 leaf root doc is flat file" \
|
|
"$COLL_DIR/RootDoc One.md"
|
|
|
|
assert_dir "TEST-1.7 parent-with-children gets its own subfolder" \
|
|
"$COLL_DIR/Parent Doc"
|
|
|
|
assert_file "TEST-1.7 parent doc file lives inside its subfolder" \
|
|
"$COLL_DIR/Parent Doc/Parent Doc.md"
|
|
|
|
assert_dir "TEST-1.7 child-with-grandchild gets its own subfolder" \
|
|
"$COLL_DIR/Parent Doc/Child One"
|
|
|
|
assert_file "TEST-1.7 child doc file lives inside its subfolder" \
|
|
"$COLL_DIR/Parent Doc/Child One/Child One.md"
|
|
|
|
assert_file "TEST-1.7 grandchild (leaf) is flat file in parent's folder" \
|
|
"$COLL_DIR/Parent Doc/Child One/Grandchild.md"
|
|
|
|
assert_file "TEST-1.7 leaf child sibling is flat file in parent's folder" \
|
|
"$COLL_DIR/Parent Doc/Child Two.md"
|
|
|
|
# ── TEST-1.8: settings.json in .gitignore ─────────────────────────────────
|
|
assert_contains "TEST-1.8 settings.json is gitignored" \
|
|
"$TEST_VAULT/.gitignore" "settings.json"
|
|
|
|
# ── TEST-1.9: .obsidian/ in .gitignore ────────────────────────────────────
|
|
assert_contains "TEST-1.9 .obsidian/ is gitignored" \
|
|
"$TEST_VAULT/.gitignore" ".obsidian/"
|
|
|
|
# ── TEST-1.10: outline and main branches at same commit ───────────────────
|
|
local outline_sha main_sha
|
|
outline_sha="$(git -C "$TEST_VAULT" rev-parse outline 2>/dev/null || true)"
|
|
main_sha="$( git -C "$TEST_VAULT" rev-parse main 2>/dev/null || true)"
|
|
assert_eq "TEST-1.10 outline and main branches point to same commit" \
|
|
"$outline_sha" "$main_sha"
|
|
|
|
# ── TEST-1.11: re-running init aborts with non-zero exit ──────────────────
|
|
local reinit_exit=0
|
|
"$SCRIPT_DIR/sync.sh" init \
|
|
--vault "$TEST_VAULT" \
|
|
--settings "$SETTINGS" \
|
|
&>/dev/null \
|
|
|| reinit_exit=$?
|
|
assert_nonzero_exit "TEST-1.11 re-running init on existing vault exits non-zero" \
|
|
"$reinit_exit"
|
|
}
|
|
|
|
# ── Summary ───────────────────────────────────────────────────────────────────
|
|
|
|
print_summary() {
|
|
echo
|
|
echo "════════════════════════════════════════════════════════════"
|
|
echo " TEST SUMMARY"
|
|
echo "════════════════════════════════════════════════════════════"
|
|
echo -e " Passed : ${GREEN}$PASS${NC}"
|
|
echo -e " Failed : ${RED}$FAIL${NC}"
|
|
echo -e " Total : $TOTAL"
|
|
|
|
if [[ ${#FAILED_TESTS[@]} -gt 0 ]]; then
|
|
echo
|
|
echo " Failed tests:"
|
|
for t in "${FAILED_TESTS[@]}"; do
|
|
echo -e " ${RED}✗${NC} $t"
|
|
done
|
|
fi
|
|
echo "════════════════════════════════════════════════════════════"
|
|
echo
|
|
}
|
|
|
|
# ── Main ──────────────────────────────────────────────────────────────────────
|
|
|
|
main() {
|
|
echo
|
|
echo -e "${BLUE}════════════════════════════════════════════════════════════${NC}"
|
|
echo -e "${BLUE} OUTLINE SYNC — Test Suite (Phase 1)${NC}"
|
|
echo -e "${BLUE}════════════════════════════════════════════════════════════${NC}"
|
|
echo
|
|
echo -e " Settings : $SETTINGS"
|
|
echo -e " Vault : ${YELLOW}$TEST_VAULT${NC} (temporary)"
|
|
echo -e " Collection: ${YELLOW}$TEST_COLLECTION_NAME${NC} (temporary)"
|
|
echo
|
|
|
|
# Dependency checks
|
|
command -v git &>/dev/null || { echo -e "${RED}✗ git is required${NC}"; exit 1; }
|
|
command -v docker &>/dev/null || { echo -e "${RED}✗ docker is required${NC}"; exit 1; }
|
|
command -v jq &>/dev/null || { echo -e "${RED}✗ jq is required${NC}"; exit 1; }
|
|
command -v python3 &>/dev/null|| { echo -e "${RED}✗ python3 is required${NC}";exit 1; }
|
|
|
|
[[ -f "$SETTINGS" ]] || {
|
|
echo -e "${RED}✗ settings.json not found at $SETTINGS${NC}"
|
|
exit 1
|
|
}
|
|
|
|
# Register cleanup so it always runs, even on Ctrl-C
|
|
trap teardown EXIT
|
|
|
|
setup_test_data
|
|
run_init
|
|
|
|
if [[ -z "$PHASE_FILTER" || "$PHASE_FILTER" == "1" ]]; then
|
|
run_phase_1_tests
|
|
fi
|
|
|
|
print_summary
|
|
|
|
# Exit with failure code if any test failed
|
|
[[ $FAIL -eq 0 ]]
|
|
}
|
|
|
|
main
|