Compare commits

...

6 Commits

Author SHA1 Message Date
perf3ct
53f31f9c78 feat(docs): try to also sync docs like this 2025-10-21 15:04:55 -07:00
perf3ct
22c5651eeb feat(docs): try to get images to load at least? 2025-09-03 22:53:07 +00:00
perf3ct
68a10a9813 feat(docs): hopefully fix images 2025-09-03 22:39:38 +00:00
perf3ct
b204964e57 feat(docs): also update the Home.md 2025-09-03 21:33:52 +00:00
perf3ct
b2c869d7ab fix(docs): have this ci use explicit token 2025-09-03 18:29:42 +00:00
perf3ct
307e17f9c8 feat(docs): push to wiki when docs change
asdf

asdf
2025-09-03 17:58:04 +00:00
3 changed files with 957 additions and 0 deletions

View File

@@ -0,0 +1,453 @@
#!/usr/bin/env tsx
import * as fs from 'fs/promises';
import * as path from 'path';
import { exec } from 'child_process';
import { promisify } from 'util';
import { Dirent } from 'fs';
const execAsync = promisify(exec);
// Configuration
const FILE_EXTENSIONS = ['.md', '.png', '.jpg', '.jpeg', '.gif', '.svg'] as const;
const README_PATTERN = /^README(?:[-.](.+))?\.md$/;
interface SyncConfig {
mainRepoPath: string;
wikiPath: string;
docsPath: string;
}
/**
* Convert markdown to GitHub Wiki format
* - Images: ![](image.png) → [[image.png]]
* - Links: [text](page.md) → [[text|page]]
*/
async function convertToWikiFormat(wikiDir: string): Promise<void> {
console.log('Converting to GitHub Wiki format...');
const mdFiles = await findFiles(wikiDir, ['.md']);
let convertedCount = 0;
for (const file of mdFiles) {
let content = await fs.readFile(file, 'utf-8');
const originalContent = content;
// Convert image references to wiki format
// ![alt](image.png) → [[image.png]]
content = content.replace(/!\[([^\]]*)\]\(([^)]+)\)/g, (match, alt, src) => {
// Skip external URLs
if (src.startsWith('http://') || src.startsWith('https://')) {
return match;
}
// Decode URL encoding
let imagePath = src;
if (src.includes('%')) {
try {
imagePath = decodeURIComponent(src);
} catch {
imagePath = src;
}
}
// Extract just the filename for wiki syntax
const filename = path.basename(imagePath);
// Use wiki syntax for images
// If alt text exists, add it after pipe
if (alt && alt.trim()) {
return `[[${filename}|alt=${alt}]]`;
} else {
return `[[${filename}]]`;
}
});
// Convert internal markdown links to wiki format
// [text](../path/to/Page.md) → [[text|Page]]
content = content.replace(/\[([^\]]+)\]\(([^)]+)\)/g, (match, text, href) => {
// Skip external URLs, anchors, and images
if (href.startsWith('http://') ||
href.startsWith('https://') ||
href.startsWith('#') ||
href.match(/\.(png|jpg|jpeg|gif|svg)$/i)) {
return match;
}
// Check if it's a markdown file link
if (href.endsWith('.md') || href.includes('.md#')) {
// Decode URL encoding
let decodedHref = href;
if (href.includes('%')) {
try {
decodedHref = decodeURIComponent(href);
} catch {
decodedHref = href;
}
}
// Extract page name without extension and path
let pageName = decodedHref
.replace(/\.md(#.*)?$/, '') // Remove .md and anchor
.split('/') // Split by path
.pop() || ''; // Get last part (filename)
// Convert spaces to hyphens (GitHub wiki convention)
pageName = pageName.replace(/ /g, '-');
// Use wiki link syntax
if (text === pageName || text === pageName.replace(/-/g, ' ')) {
return `[[${pageName}]]`;
} else {
return `[[${text}|${pageName}]]`;
}
}
// For other internal links, just decode URL encoding
if (href.includes('%') && !href.startsWith('http')) {
try {
const decodedHref = decodeURIComponent(href);
return `[${text}](${decodedHref})`;
} catch {
return match;
}
}
return match;
});
// Save if modified
if (content !== originalContent) {
await fs.writeFile(file, content, 'utf-8');
const relativePath = path.relative(wikiDir, file);
console.log(` Converted: ${relativePath}`);
convertedCount++;
}
}
if (convertedCount > 0) {
console.log(`Converted ${convertedCount} files to wiki format`);
} else {
console.log('No files needed conversion');
}
}
/**
* Recursively find all files matching the given extensions
*/
async function findFiles(dir: string, extensions: readonly string[]): Promise<string[]> {
const files: string[] = [];
async function walk(currentDir: string): Promise<void> {
const entries: Dirent[] = await fs.readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentDir, entry.name);
if (entry.isDirectory()) {
await walk(fullPath);
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (extensions.includes(ext)) {
files.push(fullPath);
}
}
}
}
await walk(dir);
return files;
}
/**
* Get all files in a directory recursively
*/
async function getAllFiles(dir: string): Promise<Set<string>> {
const files = new Set<string>();
async function walk(currentDir: string): Promise<void> {
try {
const entries = await fs.readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentDir, entry.name);
const relativePath = path.relative(dir, fullPath);
// Skip .git directory
if (entry.name === '.git' || relativePath.startsWith('.git')) continue;
if (entry.isDirectory()) {
await walk(fullPath);
} else if (entry.isFile()) {
files.add(relativePath);
}
}
} catch (error) {
// Directory might not exist yet
if ((error as any).code !== 'ENOENT') {
throw error;
}
}
}
await walk(dir);
return files;
}
/**
* Flatten directory structure - move all files to root
* GitHub Wiki prefers flat structure
*/
async function flattenStructure(wikiDir: string): Promise<void> {
console.log('Flattening directory structure for wiki...');
const allFiles = await getAllFiles(wikiDir);
let movedCount = 0;
for (const file of allFiles) {
// Skip if already at root
if (!file.includes('/')) continue;
const oldPath = path.join(wikiDir, file);
const basename = path.basename(file);
// Create unique name if file already exists at root
let newName = basename;
let counter = 1;
while (await fileExists(path.join(wikiDir, newName))) {
const ext = path.extname(basename);
const nameWithoutExt = basename.slice(0, -ext.length);
newName = `${nameWithoutExt}-${counter}${ext}`;
counter++;
}
const newPath = path.join(wikiDir, newName);
// Move file to root
await fs.rename(oldPath, newPath);
console.log(` Moved: ${file}${newName}`);
movedCount++;
}
if (movedCount > 0) {
console.log(`Moved ${movedCount} files to root`);
// Clean up empty directories
await cleanEmptyDirectories(wikiDir);
}
}
async function fileExists(path: string): Promise<boolean> {
try {
await fs.access(path);
return true;
} catch {
return false;
}
}
/**
* Remove empty directories recursively
*/
async function cleanEmptyDirectories(dir: string): Promise<void> {
const allDirs = await getAllDirectories(dir);
for (const subDir of allDirs) {
try {
const entries = await fs.readdir(subDir);
if (entries.length === 0 || (entries.length === 1 && entries[0] === '.git')) {
await fs.rmdir(subDir);
}
} catch {
// Ignore errors
}
}
}
/**
* Get all directories recursively
*/
async function getAllDirectories(dir: string): Promise<string[]> {
const dirs: string[] = [];
async function walk(currentDir: string): Promise<void> {
try {
const entries = await fs.readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory() && entry.name !== '.git') {
const fullPath = path.join(currentDir, entry.name);
dirs.push(fullPath);
await walk(fullPath);
}
}
} catch {
// Ignore errors
}
}
await walk(dir);
return dirs.sort((a, b) => b.length - a.length); // Sort longest first for cleanup
}
/**
* Sync files from source to wiki
*/
async function syncFiles(sourceDir: string, wikiDir: string): Promise<void> {
console.log('Syncing files to wiki...');
// Get all valid source files
const sourceFiles = await findFiles(sourceDir, FILE_EXTENSIONS);
const sourceRelativePaths = new Set<string>();
// Copy all source files
console.log(`Found ${sourceFiles.length} files to sync`);
for (const file of sourceFiles) {
const relativePath = path.relative(sourceDir, file);
sourceRelativePaths.add(relativePath);
const targetPath = path.join(wikiDir, relativePath);
const targetDir = path.dirname(targetPath);
// Create directory structure
await fs.mkdir(targetDir, { recursive: true });
// Copy file
await fs.copyFile(file, targetPath);
}
// Remove orphaned files
const wikiFiles = await getAllFiles(wikiDir);
for (const wikiFile of wikiFiles) {
if (!sourceRelativePaths.has(wikiFile) && !wikiFile.startsWith('Home')) {
const fullPath = path.join(wikiDir, wikiFile);
await fs.unlink(fullPath);
}
}
}
/**
* Copy root README.md to wiki as Home.md if it exists
*/
async function copyRootReadme(mainRepoPath: string, wikiPath: string): Promise<void> {
const rootReadmePath = path.join(mainRepoPath, 'README.md');
const wikiHomePath = path.join(wikiPath, 'Home.md');
try {
await fs.access(rootReadmePath);
await fs.copyFile(rootReadmePath, wikiHomePath);
console.log(' Copied root README.md as Home.md');
} catch (error) {
console.log(' No root README.md found to use as Home page');
}
}
/**
* Rename README files to wiki-compatible names
*/
async function renameReadmeFiles(wikiDir: string): Promise<void> {
console.log('Converting README files for wiki compatibility...');
const files = await fs.readdir(wikiDir);
for (const file of files) {
const match = file.match(README_PATTERN);
if (match) {
const oldPath = path.join(wikiDir, file);
let newName: string;
if (match[1]) {
// Language-specific README
newName = `Home-${match[1]}.md`;
} else {
// Main README
newName = 'Home.md';
}
const newPath = path.join(wikiDir, newName);
await fs.rename(oldPath, newPath);
console.log(` Renamed: ${file}${newName}`);
}
}
}
/**
* Check if there are any changes in the wiki
*/
async function hasChanges(wikiDir: string): Promise<boolean> {
try {
const { stdout } = await execAsync('git status --porcelain', { cwd: wikiDir });
return stdout.trim().length > 0;
} catch (error) {
console.error('Error checking git status:', error);
return false;
}
}
/**
* Get configuration from environment variables
*/
function getConfig(): SyncConfig {
const mainRepoPath = process.env.MAIN_REPO_PATH || 'main-repo';
const wikiPath = process.env.WIKI_PATH || 'wiki';
const docsPath = path.join(mainRepoPath, 'docs');
return { mainRepoPath, wikiPath, docsPath };
}
/**
* Main sync function
*/
async function syncDocsToWiki(): Promise<void> {
const config = getConfig();
const flattenWiki = process.env.FLATTEN_WIKI === 'true';
console.log('Starting documentation sync to wiki...');
console.log(`Source: ${config.docsPath}`);
console.log(`Target: ${config.wikiPath}`);
console.log(`Flatten structure: ${flattenWiki}`);
try {
// Verify paths exist
await fs.access(config.docsPath);
await fs.access(config.wikiPath);
// Sync files
await syncFiles(config.docsPath, config.wikiPath);
// Copy root README.md as Home.md
await copyRootReadme(config.mainRepoPath, config.wikiPath);
// Convert to wiki format
await convertToWikiFormat(config.wikiPath);
// Optionally flatten directory structure
if (flattenWiki) {
await flattenStructure(config.wikiPath);
}
// Rename README files to wiki-compatible names
await renameReadmeFiles(config.wikiPath);
// Check for changes
const changed = await hasChanges(config.wikiPath);
if (changed) {
console.log('\nChanges detected in wiki');
process.stdout.write('::set-output name=changes::true\n');
} else {
console.log('\nNo changes detected in wiki');
process.stdout.write('::set-output name=changes::false\n');
}
console.log('Sync completed successfully!');
} catch (error) {
console.error('Error during sync:', error);
process.exit(1);
}
}
// Run if called directly
if (require.main === module) {
syncDocsToWiki();
}
export { syncDocsToWiki };

437
.github/scripts/sync-docs-to-wiki.ts vendored Executable file
View File

@@ -0,0 +1,437 @@
#!/usr/bin/env tsx
import * as fs from 'fs/promises';
import * as path from 'path';
import { exec } from 'child_process';
import { promisify } from 'util';
import { Dirent } from 'fs';
const execAsync = promisify(exec);
// Configuration
const FILE_EXTENSIONS = ['.md', '.png', '.jpg', '.jpeg', '.gif', '.svg'] as const;
const README_PATTERN = /^README(?:[-.](.+))?\.md$/;
interface SyncConfig {
mainRepoPath: string;
wikiPath: string;
docsPath: string;
}
/**
* Recursively find all files matching the given extensions
*/
async function findFiles(dir: string, extensions: readonly string[]): Promise<string[]> {
const files: string[] = [];
async function walk(currentDir: string): Promise<void> {
const entries: Dirent[] = await fs.readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentDir, entry.name);
if (entry.isDirectory()) {
await walk(fullPath);
} else if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (extensions.includes(ext)) {
files.push(fullPath);
}
}
}
}
await walk(dir);
return files;
}
/**
* Get all files in a directory recursively
*/
async function getAllFiles(dir: string): Promise<Set<string>> {
const files = new Set<string>();
async function walk(currentDir: string): Promise<void> {
try {
const entries = await fs.readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentDir, entry.name);
const relativePath = path.relative(dir, fullPath);
// Skip .git directory
if (entry.name === '.git' || relativePath.startsWith('.git')) continue;
if (entry.isDirectory()) {
await walk(fullPath);
} else if (entry.isFile()) {
files.add(relativePath);
}
}
} catch (error) {
// Directory might not exist yet
if ((error as any).code !== 'ENOENT') {
throw error;
}
}
}
await walk(dir);
return files;
}
/**
* Sync files from source to wiki, preserving directory structure and removing orphaned files
*/
async function syncFiles(sourceDir: string, wikiDir: string): Promise<void> {
console.log('Analyzing files to sync...');
// Get all valid source files
const sourceFiles = await findFiles(sourceDir, FILE_EXTENSIONS);
const sourceRelativePaths = new Set<string>();
// Copy all source files and track their paths
console.log(`Found ${sourceFiles.length} files to sync`);
let copiedCount = 0;
let skippedCount = 0;
for (const file of sourceFiles) {
const relativePath = path.relative(sourceDir, file);
sourceRelativePaths.add(relativePath);
const targetPath = path.join(wikiDir, relativePath);
const targetDir = path.dirname(targetPath);
// Create directory structure
await fs.mkdir(targetDir, { recursive: true });
// Check if file needs updating (compare modification times)
let needsCopy = true;
try {
const sourceStat = await fs.stat(file);
const targetStat = await fs.stat(targetPath);
// Only copy if source is newer or sizes differ
needsCopy = sourceStat.mtime > targetStat.mtime || sourceStat.size !== targetStat.size;
} catch {
// Target doesn't exist, needs copy
needsCopy = true;
}
if (needsCopy) {
await fs.copyFile(file, targetPath);
console.log(` Updated: ${relativePath}`);
copiedCount++;
} else {
skippedCount++;
}
}
console.log(`Updated ${copiedCount} files, ${skippedCount} unchanged`);
// Find and remove files that don't exist in source
console.log('Checking for orphaned files in wiki...');
const wikiFiles = await getAllFiles(wikiDir);
let removedCount = 0;
for (const wikiFile of wikiFiles) {
// Check if this file should exist (either as-is or will be renamed)
let shouldExist = sourceRelativePaths.has(wikiFile);
// Special handling for Home files that will be created from READMEs
if (wikiFile.startsWith('Home')) {
const readmeVariant1 = wikiFile.replace(/^Home(-.*)?\.md$/, 'README$1.md');
const readmeVariant2 = wikiFile.replace(/^Home-(.+)\.md$/, 'README.$1.md');
shouldExist = sourceRelativePaths.has(readmeVariant1) || sourceRelativePaths.has(readmeVariant2) || sourceRelativePaths.has('README.md');
}
if (!shouldExist) {
const fullPath = path.join(wikiDir, wikiFile);
await fs.unlink(fullPath);
console.log(` Removed: ${wikiFile}`);
removedCount++;
}
}
if (removedCount > 0) {
console.log(`Removed ${removedCount} orphaned files`);
// Clean up empty directories
await cleanEmptyDirectories(wikiDir);
}
}
/**
* Remove empty directories recursively
*/
async function cleanEmptyDirectories(dir: string): Promise<void> {
async function removeEmptyDirs(currentDir: string): Promise<boolean> {
if (currentDir === dir) return false; // Don't remove root
try {
const entries = await fs.readdir(currentDir, { withFileTypes: true });
// Skip .git directory
const filteredEntries = entries.filter(e => e.name !== '.git');
if (filteredEntries.length === 0) {
await fs.rmdir(currentDir);
return true;
}
// Check subdirectories
for (const entry of filteredEntries) {
if (entry.isDirectory()) {
const subDir = path.join(currentDir, entry.name);
await removeEmptyDirs(subDir);
}
}
// Check again after cleaning subdirectories
const remainingEntries = await fs.readdir(currentDir);
const filteredRemaining = remainingEntries.filter(e => e !== '.git');
if (filteredRemaining.length === 0 && currentDir !== dir) {
await fs.rmdir(currentDir);
return true;
}
return false;
} catch (error) {
return false;
}
}
// Get all directories and process them
const allDirs = await getAllDirectories(dir);
for (const subDir of allDirs) {
await removeEmptyDirs(subDir);
}
}
/**
* Get all directories recursively
*/
async function getAllDirectories(dir: string): Promise<string[]> {
const dirs: string[] = [];
async function walk(currentDir: string): Promise<void> {
try {
const entries = await fs.readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory() && entry.name !== '.git') {
const fullPath = path.join(currentDir, entry.name);
dirs.push(fullPath);
await walk(fullPath);
}
}
} catch {
// Ignore errors
}
}
await walk(dir);
return dirs.sort((a, b) => b.length - a.length); // Sort longest first for cleanup
}
/**
* Fix references in markdown files for wiki compatibility
*
* Issues fixed:
* 1. URL-encoded image references (spaces as %20) need to match actual filenames
* 2. Internal markdown links need to be converted to wiki syntax
* 3. Images can optionally use wiki syntax [[image.png]] for better compatibility
*/
async function fixImageReferences(wikiDir: string): Promise<void> {
console.log('Fixing references for GitHub Wiki compatibility...');
const mdFiles = await findFiles(wikiDir, ['.md']);
let fixedCount = 0;
for (const file of mdFiles) {
let content = await fs.readFile(file, 'utf-8');
let modified = false;
const originalContent = content;
// Step 1: Fix URL-encoded image references
// Convert ![](Name%20With%20Spaces.png) to ![](Name With Spaces.png)
content = content.replace(/!\[([^\]]*)\]\(([^)]+)\)/g, (match, alt, src) => {
// Skip external URLs
if (src.startsWith('http://') || src.startsWith('https://')) {
return match;
}
// Decode URL encoding if present
if (src.includes('%')) {
try {
const decodedSrc = decodeURIComponent(src);
return `![${alt}](${decodedSrc})`;
} catch {
return match;
}
}
return match;
});
// Step 2: Fix internal links - decode URL encoding but keep standard markdown format
// GitHub Wiki actually supports standard markdown links with relative paths
content = content.replace(/\[([^\]]+)\]\(([^)]+)\)/g, (match, text, href) => {
// Skip external URLs, anchors, and images
if (href.startsWith('http://') ||
href.startsWith('https://') ||
href.startsWith('#') ||
href.match(/\.(png|jpg|jpeg|gif|svg)$/i)) {
return match;
}
// Decode URL encoding for all internal links
if (href.includes('%')) {
try {
const decodedHref = decodeURIComponent(href);
return `[${text}](${decodedHref})`;
} catch {
return match;
}
}
return match;
});
// Check if content was modified
if (content !== originalContent) {
modified = true;
await fs.writeFile(file, content, 'utf-8');
const relativePath = path.relative(wikiDir, file);
console.log(` Fixed references in: ${relativePath}`);
fixedCount++;
}
}
if (fixedCount > 0) {
console.log(`Fixed references in ${fixedCount} files`);
} else {
console.log('No references needed fixing');
}
}
/**
* Rename README files to wiki-compatible names
*/
async function renameReadmeFiles(wikiDir: string): Promise<void> {
console.log('Converting README files for wiki compatibility...');
const files = await fs.readdir(wikiDir);
for (const file of files) {
const match = file.match(README_PATTERN);
if (match) {
const oldPath = path.join(wikiDir, file);
let newName: string;
if (match[1]) {
// Language-specific README (e.g., README-ZH_CN.md or README.es.md)
newName = `Home-${match[1]}.md`;
} else {
// Main README
newName = 'Home.md';
}
const newPath = path.join(wikiDir, newName);
await fs.rename(oldPath, newPath);
console.log(` Renamed: ${file}${newName}`);
}
}
}
/**
* Check if there are any changes in the wiki
*/
async function hasChanges(wikiDir: string): Promise<boolean> {
try {
const { stdout } = await execAsync('git status --porcelain', { cwd: wikiDir });
return stdout.trim().length > 0;
} catch (error) {
console.error('Error checking git status:', error);
return false;
}
}
/**
* Copy root README.md to wiki as Home.md if it exists
*/
async function copyRootReadme(mainRepoPath: string, wikiPath: string): Promise<void> {
const rootReadmePath = path.join(mainRepoPath, 'README.md');
const wikiHomePath = path.join(wikiPath, 'Home.md');
try {
await fs.access(rootReadmePath);
await fs.copyFile(rootReadmePath, wikiHomePath);
console.log(' Copied root README.md as Home.md');
} catch (error) {
// Root README doesn't exist or can't be accessed
console.log(' No root README.md found to use as Home page');
}
}
/**
* Get configuration from environment variables
*/
function getConfig(): SyncConfig {
const mainRepoPath = process.env.MAIN_REPO_PATH || 'main-repo';
const wikiPath = process.env.WIKI_PATH || 'wiki';
const docsPath = path.join(mainRepoPath, 'docs');
return { mainRepoPath, wikiPath, docsPath };
}
/**
* Main sync function
*/
async function syncDocsToWiki(): Promise<void> {
const config = getConfig();
console.log('Starting documentation sync to wiki...');
console.log(`Source: ${config.docsPath}`);
console.log(`Target: ${config.wikiPath}`);
try {
// Verify paths exist
await fs.access(config.docsPath);
await fs.access(config.wikiPath);
// Sync files (copy new/updated, remove orphaned)
await syncFiles(config.docsPath, config.wikiPath);
// Copy root README.md as Home.md
await copyRootReadme(config.mainRepoPath, config.wikiPath);
// Fix image and link references for wiki compatibility
await fixImageReferences(config.wikiPath);
// Rename README files to wiki-compatible names
await renameReadmeFiles(config.wikiPath);
// Check for changes
const changed = await hasChanges(config.wikiPath);
if (changed) {
console.log('\nChanges detected in wiki');
// GitHub Actions output format
process.stdout.write('::set-output name=changes::true\n');
} else {
console.log('\nNo changes detected in wiki');
process.stdout.write('::set-output name=changes::false\n');
}
console.log('Sync completed successfully!');
} catch (error) {
console.error('Error during sync:', error);
process.exit(1);
}
}
// Run if called directly
if (require.main === module) {
syncDocsToWiki();
}
export { syncDocsToWiki };

67
.github/workflows/sync-docs-to-wiki.yml vendored Normal file
View File

@@ -0,0 +1,67 @@
name: Sync Docs to Wiki
on:
push:
branches:
- main
paths:
- 'docs/**'
workflow_dispatch: # Allow manual triggering
permissions:
contents: read # Read access to repository contents
# Note: Writing to wiki requires a PAT or GITHUB_TOKEN with additional permissions
# The default GITHUB_TOKEN cannot write to wikis, so we need to:
# 1. Create a Personal Access Token (PAT) with 'repo' scope
# 2. Add it as a repository secret named WIKI_TOKEN
jobs:
sync-wiki:
runs-on: ubuntu-latest
steps:
- name: Checkout main repository
uses: actions/checkout@v4
with:
path: main-repo
- name: Checkout wiki repository
uses: actions/checkout@v4
with:
repository: TriliumNext/Trilium.wiki
path: wiki
token: ${{ secrets.WIKI_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install tsx for TypeScript execution
run: npm install -g tsx
- name: Setup Git
run: |
git config --global user.email "action@github.com"
git config --global user.name "GitHub Action"
- name: Sync documentation to wiki
id: sync
run: |
tsx main-repo/.github/scripts/sync-docs-to-wiki.ts
env:
MAIN_REPO_PATH: main-repo
WIKI_PATH: wiki
- name: Commit and push changes
if: contains(steps.sync.outputs.changes, 'true')
run: |
cd wiki
git add .
git commit -m "Sync documentation from main repository
Source commit: ${{ github.sha }}
Triggered by: ${{ github.event.head_commit.message }}"
git push
env:
GITHUB_TOKEN: ${{ secrets.WIKI_TOKEN }}