Skip to content

Commit

Permalink
sync thicket tutorial notebooks onto thicket docs
Browse files Browse the repository at this point in the history
  • Loading branch information
Thicket Tutorial Sync Bot committed Oct 13, 2023
1 parent 9adab52 commit e1f29ce
Showing 1 changed file with 79 additions and 15 deletions.
94 changes: 79 additions & 15 deletions .github/workflows/notebook-sync.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,21 @@ name: Sync thicket docs with executed notebook from thicket-tutorial
on:
push:
branches: [ develop ]
# TODO: remove before merge
pull_request:
branches: [ develop ]
######

env:
GH_TOKEN: ${{ secrets.THICKET_GITHUB_TOKEN }}

jobs:
parse_file_tracker:
sync_notebooks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Parse config file
id: parse
- name: Detect Changes
id: detect
run: |
jq_out=$(cat .github/workflows/tutorials_sync.json | jq -r 'to_entries|map("\(.key) \(.value.thicket_fname) \(.value.tutorial_fname)")|.[]')
IFS=$'\n' rows=( $( echo "$jq_out" | while read -r; do echo "$REPLY"; done ) )
Expand All @@ -23,24 +30,81 @@ jobs:
done
out_list=${out_list%?}
out_list="$out_list]"
echo "tracked_files=$out_list" >> "$GITHUB_OUTPUT"
echo "change_track_filter=$change_track_filter" >> "$GITHUB_OUTPUT"
detect_changes:
needs: parse_file_tracker
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
echo "change_track_filter=$change_track_filter" >> $GITHUB_ENV
echo "tracked_files=$out_list" >> $GITHUB_ENV
- name: Get changed files
uses: dorny/paths-filter@v2
id: changes
with:
list-files: json
filters: |
${{ needs.outputs.change_track_filter }}
changed:
- $change_track_filter
- name: Get number of changed files
id: calc_num
env:
changes: ${{ toJSON(changes.changes) }}
run: |
changes=${{ steps.changes.outputs.changed_files }}
num_changes=$(echo $changes | jq '. | length')
echo "num_changes=$num_changes" >> "$GITHUB_OUTPUT"
echo "num_changes=$num_changes" >> $GITHUB_ENV
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install Jupyter + dependencies
run: |
python3 -m pip install jupyter "nbconvert>=6.0.0"
python3 -m pip install papermill
python3 -m pip install -r requirements.txt
- name: Prints GitHub CLI Info
env:
GH_TOKEN: ${{ secrets.THICKET_GITHUB_TOKEN }}
run: |
echo "GitHub CLI Version: $(gh --version)"
#echo "GitHub CLI Username: $(gh api user | jq -r '.login')"
gh api user
- name: Sync Files
run: |
git config user.name 'Thicket Tutorial Sync Bot'
git config user.email '<>'
sha=$(git rev-parse --short HEAD)
gh repo clone LLNL/thicket thicket_src
cd thicket_src
branch_name="autosync-$(date +%s)"
git checkout -b $branch_name
#username=$(gh api user | jq -r '.login')
#num_prs=$(gh pr list -A "$username" | wc -l)
#if test $num_prs -gt 1; then
#echo "ERROR: multiple PRs from user $username exist!"
#exit 1
#elif test $num_prs -eq 1; then
#pr_num=$(gh pr list --json number -A "$username" | jq -r '.[0].number')
#gh pr close $pr_num -c "Closing for new sync PR" -d
#fi
thicket_dir=$(pwd)
cd ..
for elem in $(echo ${{ needs.parse_file_tracker.outputs.tracked_files }} | jq -r '.[]'); do
th_file=$(echo $elem | jq -r '.thicket_file')
tut_file=$(echo $elem | jq -r '.tutorials_file')
tut_abs_file=$(realpath "./${tut_file}")
if test ! -f $tut_abs_file; then
echo "ERROR: tutorial file $tut_abs_file does not exist!"
exit 1
fi
th_abs_file=$(realpath "${thicket_dir}/${th_file}")
if test -f $th_abs_file; then
rm $th_abs_file
fi
# change to running notebook with papermil, creates a new notebook,
# add that to docs
papermill $tut_abs_file $th_abs_file
done
cd $thicket_dir
git add --all
commit_time="$(date +"%m/%d/%Y - %H:%M:%S")"
git commit -m "Auto sync notebooks from thicket-tutorial"
git push -u origin $branch_name
gh pr create -B develop -b \
"This is an autogenerated PR for syncing the tutorial notebooks onto Thicket docs\nthicket-tutorial commit: LLNL/thicket-tutorial@$sha" \
-t "Autosync Notebooks from thicket-tutorial"
env:
GH_TOKEN: ${{ secrets.THICKET_GITHUB_TOKEN }}

0 comments on commit e1f29ce

Please sign in to comment.