diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index c54e62b0e..8e6776587 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -11,12 +11,11 @@ permissions: pages: write id-token: write -concurrency: - group: "pages" - cancel-in-progress: false - jobs: deploy: + concurrency: + group: "pages" + cancel-in-progress: false environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} @@ -55,3 +54,41 @@ jobs: - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v2 + + # NB: We use a separate job so we don't hog the single spot dedicated to building/deploying + # (since we set `concurrency` on that job). As more things merge, they can start building/deploying + # while this spins. + reindex: + runs-on: ubuntu-22.04 + needs: deploy + steps: + - name: Wait for deployment to propagate + run: | + TIMEOUT_S=300 + SLEEP_S=5 + + while [ $TIMEOUT_S -gt 0 ]; do + if curl -s http://www.pantsbuild.org | grep -q "$GITHUB_SHA"; then + echo "Found ref! Continuing on." + break + fi + + echo "Ref not found yet, sleeping for $SLEEP_S seconds" + sleep $SLEEP_S + TIMEOUT_S=$((TIMEOUT_S-SLEEP_S)) + done + + if [ $TIMEOUT_S -le 0 ]; then + echo "TIMEOUT_S reached, failing!" + echo "::error::Timeout waiting for deploy" + exit 1 + fi + + # See https://www.algolia.com/doc/rest-api/crawler/#reindex-with-a-crawler + - name: Kickoff a crawl + run: | + curl \ + -H "Content-Type: application/json" \ + -X POST \ + --user ${{ secrets.THEJCANNON_ALGOLIA_CRAWLER_USER_ID }}:${{ secrets.THEJCANNON_ALGOLIA_CRAWLER_API_KEY }} \ + https://crawler.algolia.com/api/1/crawlers/7ae90af1-f627-4806-a2cc-89e7157daa44/reindex diff --git a/docusaurus.config.js b/docusaurus.config.js index c97487147..006ef054a 100644 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -40,6 +40,8 @@ const formatCopyright = () => { // Only set by CI, so fallback to just `local` for local dev const docsCommit = process.env.GITHUB_SHA; + // NB: The full SHA is grepped by our deployment script to know when the site has been updated "live" + // so it can trigger a reindex of the crawler. const commitLink = docsCommit ? makeLink(`${repoUrl}/commit/${docsCommit}`, docsCommit.slice(0, 6)) : "local";