Skip to content

FVUtils Weekly Report #28

FVUtils Weekly Report

FVUtils Weekly Report #28

name: FVUtils Weekly Report
on:
schedule:
- cron: '0 9 * * 1'
workflow_dispatch:
permissions:
contents: read
pull-requests: read
discussions: write
models: read
env:
MODEL: gpt-4o
MODELS_ENDPOINT: https://models.inference.ai.azure.com/chat/completions
jobs:
generate-news:
runs-on: ubuntu-latest
steps:
# ── Step 1: Identify active repos ────────────────────────────────────────
# Find fvutils repos that have had pushes in the past 7 days.
- name: Identify active repos
env:
GH_TOKEN: ${{ github.token }}
run: |
END_DATE=$(date -u +%Y-%m-%dT%H:%M:%SZ)
START_DATE=$(date -u -d '7 days ago' +%Y-%m-%dT%H:%M:%SZ)
echo "START_DATE=$START_DATE" >> $GITHUB_ENV
echo "END_DATE=$END_DATE" >> $GITHUB_ENV
echo "Scanning fvutils org for repos pushed since $START_DATE ..."
gh repo list fvutils --limit 100 --json name,pushedAt \
| jq -r --arg start "$START_DATE" \
'.[] | select(.pushedAt >= $start) | .name' \
> /tmp/active_repos.txt
echo "Active repos:"
cat /tmp/active_repos.txt
# ── Step 2: Collect details from active repos ────────────────────────────
# For each active repo, gather commits, merged PRs, and updated issues.
- name: Collect details from active repos
env:
GH_TOKEN: ${{ github.token }}
run: |
: > /tmp/activity.md # start fresh
while IFS= read -r repo; do
echo "## fvutils/$repo" >> /tmp/activity.md
echo "" >> /tmp/activity.md
# Commits
COMMITS=$(gh api \
"repos/fvutils/$repo/commits?since=${START_DATE}&until=${END_DATE}&per_page=50" \
--jq '.[] | "- \(.commit.message | split("\n")[0]) [\(.sha[0:7])]"' \
2>/dev/null || true)
if [ -n "$COMMITS" ]; then
echo "### Commits" >> /tmp/activity.md
echo "$COMMITS" >> /tmp/activity.md
echo "" >> /tmp/activity.md
fi
# Pull requests (merged or opened this week)
PRS=$(gh pr list --repo "fvutils/$repo" --state all \
--search "updated:>=$(echo $START_DATE | cut -c1-10)" \
--json number,title,state,author \
--jq '.[] | "- #\(.number) \(.title) [\(.state)] (@\(.author.login))"' \
2>/dev/null || true)
if [ -n "$PRS" ]; then
echo "### Pull Requests" >> /tmp/activity.md
echo "$PRS" >> /tmp/activity.md
echo "" >> /tmp/activity.md
fi
# Issues opened or updated this week
ISSUES=$(gh issue list --repo "fvutils/$repo" --state all \
--search "updated:>=$(echo $START_DATE | cut -c1-10)" \
--json number,title,state,author \
--jq '.[] | "- #\(.number) \(.title) [\(.state)] (@\(.author.login))"' \
2>/dev/null || true)
if [ -n "$ISSUES" ]; then
echo "### Issues" >> /tmp/activity.md
echo "$ISSUES" >> /tmp/activity.md
echo "" >> /tmp/activity.md
fi
done < /tmp/active_repos.txt
echo "--- collected activity ---"
cat /tmp/activity.md
# ── Step 3: Generate news item via gh api /models/$MODEL ─────────────────
# Pass the raw activity to gpt-5-mini-high and ask for a polished news item.
- name: Generate news item
env:
GH_TOKEN: ${{ github.token }}
run: |
WEEK_END=$(date -u '+%d %b %Y')
# Build the JSON payload with Python to avoid shell quoting issues.
# All heredoc lines are indented 10 spaces so YAML includes them in the
# block scalar; YAML strips the 10-space prefix before passing to bash.
export WEEK_END MODEL
python3 - << 'PYEOF'
import json, os
activity = open('/tmp/activity.md').read()
week_end = os.environ['WEEK_END']
model = os.environ['MODEL']
prompt = (
"You are a technical writer for an open-source verification tools project.\n\n"
f"Below is raw weekly activity (commits, PRs, issues) from the fvutils GitHub org "
f"for the week ending {week_end}.\n\n"
+ activity
+ "\n\nWrite a concise weekly news item suitable for the project website. "
"Include: a short overall summary paragraph, then a per-repo highlights section "
"using bullet points, and a brief \"What's next\" closing line. "
"Be factual, avoid hype, use past tense for completed work."
)
payload = {"model": model, "messages": [{"role": "user", "content": prompt}],
"max_tokens": 600, "temperature": 0.4}
with open('/tmp/request.json', 'w') as f:
json.dump(payload, f)
print("Request JSON written.")
PYEOF
curl -s \
-H "Authorization: Bearer $GH_TOKEN" \
-H "Content-Type: application/json" \
--data @/tmp/request.json \
"$MODELS_ENDPOINT" \
> /tmp/api_response.json 2>/tmp/api_error.txt
echo "--- API response ---"
cat /tmp/api_response.json
python3 << 'PYEOF'
import sys, json
data = json.load(open('/tmp/api_response.json'))
if 'choices' in data:
with open('/tmp/news_item.md', 'w') as f:
f.write(data['choices'][0]['message']['content'])
else:
print('ERROR:', data, file=sys.stderr)
sys.exit(1)
PYEOF
if [ $? -ne 0 ]; then
echo "WARNING: GitHub Models API call failed:"
cat /tmp/api_error.txt
echo "Generating fallback news item..."
printf "## fvutils Weekly Highlights — week ending %s\n\n" "$WEEK_END" > /tmp/news_item.md
cat /tmp/activity.md >> /tmp/news_item.md
echo "" >> /tmp/news_item.md
echo "*Auto-generated from commit/PR/issue metadata — AI summarization unavailable.*" >> /tmp/news_item.md
fi
echo "=== Generated news item ==="
cat /tmp/news_item.md
# ── Publish: post to fvutils/.github Discussions ─────────────────────────
- name: Post to Discussions
env:
GH_TOKEN: ${{ github.token }}
run: |
WEEK_NUM=$(date -u +%V)
YEAR=$(date -u +%Y)
TITLE="Weekly Highlights — Week $WEEK_NUM, $YEAR"
BODY=$(cat /tmp/news_item.md)
BODY="$BODY"$'\n\n---\n*Generated automatically by [fvutils-weekly-report](../actions/workflows/fvutils-weekly-report.yaml) using '"$MODEL"'.*'
gh api graphql -f query='
mutation($repositoryId: ID!, $categoryId: ID!, $title: String!, $body: String!) {
createDiscussion(input: {
repositoryId: $repositoryId
categoryId: $categoryId
title: $title
body: $body
}) {
discussion { id url title }
}
}
' \
-f repositoryId="R_kgDOHMTiUw" \
-f categoryId="DIC_kwDOHMTiU84C2BrA" \
-f title="$TITLE" \
-f body="$BODY" \
| jq -r '.data.createDiscussion.discussion | "Created: \(.title)\nURL: \(.url)"'