|
1 |
| -name: Sync README files to Wiki |
| 1 | +name: Sync Wiki |
2 | 2 |
|
3 | 3 | on:
|
4 | 4 | push:
|
5 |
| - branches: [ main ] |
| 5 | + branches: |
| 6 | + - main |
6 | 7 | paths:
|
7 |
| - - '**/README.md' |
8 |
| - - '**/readme.md' |
| 8 | + - 'benchmarks/*/readme.md' |
| 9 | + - '.github/workflows/sync-wiki.yml' |
| 10 | + workflow_dispatch: |
9 | 11 |
|
10 |
| -permissions: |
11 |
| - contents: write |
12 |
| - |
13 | 12 | jobs:
|
14 | 13 | sync-wiki:
|
15 | 14 | runs-on: ubuntu-latest
|
16 | 15 | steps:
|
17 |
| - - uses: actions/checkout@v3 |
18 |
| - |
19 |
| - - name: Configure Git for Wiki |
20 |
| - run: | |
21 |
| - git config --global user.name "GitHub Actions" |
22 |
| - git config --global user.email "actions@github.com" |
23 |
| - |
24 |
| - - name: Clone Wiki Repository |
25 |
| - run: git clone https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.wiki.git ./wiki |
26 |
| - |
27 |
| - - name: Copy README files to Wiki |
28 |
| - run: | |
29 |
| - # Special mappings - add specific README files to specific wiki pages |
30 |
| - declare -A special_mappings |
31 |
| - special_mappings["benchmarks/rotatingDrum/readme.md"]="Performance-of-phasicFlow.md" |
| 16 | + - name: Checkout Repository |
| 17 | + uses: actions/checkout@v3 |
| 18 | + with: |
| 19 | + path: repo |
32 | 20 |
|
33 |
| - # Create an images directory in the wiki if it doesn't exist |
34 |
| - mkdir -p ./wiki/images |
| 21 | + - name: Checkout Wiki |
| 22 | + uses: actions/checkout@v3 |
| 23 | + with: |
| 24 | + repository: ${{ github.repository }}.wiki |
| 25 | + path: wiki |
35 | 26 |
|
36 |
| - # Process mapped files |
37 |
| - for rel_path in "${!special_mappings[@]}"; do |
38 |
| - if [ -f "./$rel_path" ]; then |
39 |
| - wiki_page="${special_mappings[$rel_path]}" |
40 |
| - echo "Processing special mapping: $rel_path -> $wiki_page" |
41 |
| - |
42 |
| - # Get the base directory of the readme file |
43 |
| - base_dir=$(dirname "./$rel_path") |
44 |
| - |
45 |
| - # Read content of the README file |
46 |
| - content=$(cat "./$rel_path") |
47 |
| - |
48 |
| - # Use grep to identify and process image paths instead of regex |
49 |
| - echo "Processing Markdown image references..." |
50 |
| - for img_ref in $(grep -o '!\[.*\](.*[^)]*)' "./$rel_path" | sed -E 's/!\[.*\]\((.*)\)/\1/'); do |
51 |
| - # Skip URLs |
52 |
| - if [[ $img_ref == http* ]]; then |
53 |
| - continue |
54 |
| - fi |
55 |
| - |
56 |
| - # Process markdown image as before |
57 |
| - if [[ $img_ref == /* ]]; then |
58 |
| - # Absolute path within repository |
59 |
| - abs_img_path="./$img_ref" |
60 |
| - else |
61 |
| - # Relative path to the README |
62 |
| - abs_img_path="$base_dir/$img_ref" |
63 |
| - fi |
64 |
| - |
65 |
| - # Extract just the filename |
66 |
| - img_filename=$(basename "$img_ref") |
67 |
| - wiki_img_path="images/$img_filename" |
68 |
| - |
69 |
| - # Copy the image to wiki repository if it exists |
70 |
| - if [ -f "$abs_img_path" ]; then |
71 |
| - echo "Copying image: $abs_img_path -> ./wiki/$wiki_img_path" |
72 |
| - cp -v "$abs_img_path" "./wiki/$wiki_img_path" || echo "Error copying image" |
73 |
| - |
74 |
| - # Escape special characters in the path for sed |
75 |
| - escaped_img_path=$(echo "$img_ref" | sed 's/[\/&]/\\&/g') |
76 |
| - |
77 |
| - # Replace the image reference in content - simpler approach with sed |
78 |
| - content=$(echo "$content" | sed "s|!\\[.*\\]($escaped_img_path)||g") |
79 |
| - echo "Replaced image reference: $img_ref → $wiki_img_path" |
80 |
| - else |
81 |
| - echo "Warning: Image file not found: $abs_img_path" |
82 |
| - # Add more debug info |
83 |
| - echo "Current directory: $(pwd)" |
84 |
| - echo "Files in $base_dir:" |
85 |
| - ls -la "$base_dir" |
86 |
| - fi |
87 |
| - done |
88 |
| -
|
89 |
| - # Process HTML img tags by finding all images and copying them |
90 |
| - echo "Processing HTML image references..." |
91 |
| -
|
92 |
| - # First, find and copy all images referenced in HTML tags |
93 |
| - img_tags_file=$(mktemp) |
94 |
| - # Capture complete HTML img tags with all attributes into a file |
95 |
| - grep -o '<img[^>]*>' "./$rel_path" > "$img_tags_file" || true |
96 |
| - |
97 |
| - # Create a file to store all image source paths |
98 |
| - img_src_file=$(mktemp) |
99 |
| - |
100 |
| - # Extract src attributes from img tags |
101 |
| - while IFS= read -r img_tag; do |
102 |
| - img_src=$(echo "$img_tag" | grep -o 'src="[^"]*"' | sed 's/src="//;s/"$//') |
103 |
| - if [ -n "$img_src" ] && [[ $img_src != http* ]]; then |
104 |
| - echo "$img_src" >> "$img_src_file" |
105 |
| - fi |
106 |
| - done < "$img_tags_file" |
107 |
| - |
108 |
| - # Process each unique image source |
109 |
| - if [ -s "$img_src_file" ]; then |
110 |
| - sort -u "$img_src_file" | while read -r img_src; do |
111 |
| - # Skip empty lines |
112 |
| - if [ -z "$img_src" ]; then |
113 |
| - continue |
114 |
| - fi |
115 |
| - |
116 |
| - # Determine image path |
117 |
| - if [[ $img_src == /* ]]; then |
118 |
| - abs_img_path="./$img_src" |
119 |
| - else |
120 |
| - abs_img_path="$base_dir/$img_src" |
121 |
| - fi |
122 |
| - |
123 |
| - # Extract filename |
124 |
| - img_filename=$(basename "$img_src") |
125 |
| - wiki_img_path="images/$img_filename" |
126 |
| - |
127 |
| - # Copy image to wiki |
128 |
| - if [ -f "$abs_img_path" ]; then |
129 |
| - echo "Copying HTML image: $abs_img_path -> ./wiki/$wiki_img_path" |
130 |
| - cp -v "$abs_img_path" "./wiki/$wiki_img_path" || echo "Error copying image" |
131 |
| - |
132 |
| - # Prepare for replacement |
133 |
| - escaped_img_src=$(echo "$img_src" | sed 's/[\/&]/\\&/g') |
134 |
| - escaped_wiki_path=$(echo "$wiki_img_path" | sed 's/[\/&]/\\&/g') |
135 |
| - |
136 |
| - # Update src path while preserving ALL other attributes |
137 |
| - content=$(echo "$content" | sed "s|src=\"$escaped_img_src\"|src=\"$escaped_wiki_path\"|g") |
138 |
| - else |
139 |
| - echo "Warning: HTML image file not found: $abs_img_path" |
140 |
| - fi |
141 |
| - done |
142 |
| - fi |
143 |
| - |
144 |
| - # Clean up temporary files |
145 |
| - rm -f "$img_tags_file" "$img_src_file" |
146 |
| - |
147 |
| - # Debug output |
148 |
| - echo "Wiki page content preview (first 100 chars): ${content:0:100}" |
149 |
| - |
150 |
| - # Replace the wiki page with the updated content rather than appending |
151 |
| - mkdir -p "$(dirname "./wiki/$wiki_page")" # Ensure directory exists |
152 |
| - echo -e "# $(basename "$wiki_page" .md)\n\n$content" > "./wiki/$wiki_page" |
153 |
| - echo "Updated wiki page: $wiki_page" |
154 |
| - fi |
155 |
| - done |
156 |
| - |
157 |
| - - name: Commit and Push to Wiki |
158 |
| - working-directory: ./wiki |
| 27 | + - name: Set up Python |
| 28 | + uses: actions/setup-python@v4 |
| 29 | + with: |
| 30 | + python-version: '3.x' |
| 31 | + |
| 32 | + - name: Install dependencies |
| 33 | + run: pip install beautifulsoup4 |
| 34 | + |
| 35 | + - name: Sync specific README files to Wiki |
| 36 | + run: | |
| 37 | + python $GITHUB_WORKSPACE/repo/.github/scripts/sync-wiki.py |
| 38 | + env: |
| 39 | + GITHUB_REPOSITORY: ${{ github.repository }} |
| 40 | + |
| 41 | + - name: Push changes to wiki |
159 | 42 | run: |
|
160 |
| - echo "Files changed in wiki repository:" |
161 |
| - git status |
| 43 | + cd wiki |
| 44 | + git config user.name "${{ github.actor }}" |
| 45 | + git config user.email "${{ github.actor }}@users.noreply.github.com" |
162 | 46 | git add .
|
163 |
| - git diff-index --quiet HEAD || git commit -m "Sync README files from main repository" |
164 |
| - git push || { echo "Push failed, retrying with more details..."; git push --verbose; } |
| 47 | + git diff --quiet && git diff --staged --quiet || (git commit -m "Auto sync wiki from main repository" && git push) |
0 commit comments