From d92382f4d0e1329a931ecab0aff7435ea6c1a97d Mon Sep 17 00:00:00 2001 From: ModeSevenIndustrialSolutions <93649628+ModeSevenIndustrialSolutions@users.noreply.github.com> Date: Fri, 21 Jun 2024 13:16:49 +0000 Subject: [PATCH] Chore: Update DevOps tooling from central repository [skip ci] Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .coveragerc | 28 ++ .devops-exclusions | 2 + .flake8 | 3 + .github/dependabot.yml | 17 +- .github/workflows/bootstrap.yaml | 342 ++++++++++++++++++++++++ .github/workflows/builds.yaml | 88 ++++++ .github/workflows/dependencies.yaml | 48 ++++ .github/workflows/documentation.yaml | 60 +++++ .github/workflows/release.yaml | 173 ++++++++++++ .github/workflows/security.yaml | 60 +++++ .github/workflows/test-release.yaml | 152 +++++++++++ .github/workflows/testing.yaml | 61 +++++ .gitignore | 185 +++++++++++++ .markdownlint.yaml | 10 + .pre-commit-config.yaml | 109 +++++++- .prettierignore | 4 + .readthedocs.yml | 27 ++ AUTHORS.rst | 5 + CHANGELOG.rst | 10 + CONTRIBUTING.rst | 353 +++++++++++++++++++++++++ README.md | 61 ----- README.rst | 49 ++++ docs/Makefile | 29 ++ docs/_static/.gitignore | 1 + docs/authors.rst | 2 + docs/changelog.rst | 2 + docs/conf.py | 286 ++++++++++++++++++++ docs/contributing.rst | 1 + docs/index.rst | 61 +++++ docs/license.rst | 7 + docs/readme.rst | 2 + docs/requirements.txt | 5 + pyproject.toml | 39 +++ pyscaffold.cfg | 67 ----- scripts/bootstrap.sh | 136 ++++++++++ scripts/dev-versioning.sh | 24 ++ scripts/linting.sh | 6 + scripts/purge-dev-tags.sh | 8 + scripts/release-versioning.sh | 16 ++ scripts/rename-tests.sh | 24 ++ scripts/template-to-repo.sh | 138 ++++++++++ scripts/tomllint.sh | 104 ++++++++ src/osc_physrisk_financial/__init__.py | 16 ++ src/osc_physrisk_financial/skeleton.py | 149 +++++++++++ test.sh | 141 ---------- tests/conftest.py | 10 + tests/test_skeleton.py | 25 ++ tox.ini | 93 +++++++ 48 files changed, 2957 insertions(+), 282 deletions(-) create mode 100644 .coveragerc create mode 100644 .devops-exclusions create mode 100644 .flake8 create mode 100644 .github/workflows/bootstrap.yaml create mode 100644 .github/workflows/builds.yaml create mode 100644 .github/workflows/dependencies.yaml create mode 100644 .github/workflows/documentation.yaml create mode 100644 .github/workflows/release.yaml create mode 100644 .github/workflows/security.yaml create mode 100644 .github/workflows/test-release.yaml create mode 100644 .github/workflows/testing.yaml create mode 100644 .gitignore create mode 100644 .markdownlint.yaml create mode 100644 .prettierignore create mode 100644 .readthedocs.yml create mode 100644 AUTHORS.rst create mode 100644 CHANGELOG.rst create mode 100644 CONTRIBUTING.rst delete mode 100644 README.md create mode 100644 README.rst create mode 100644 docs/Makefile create mode 100644 docs/_static/.gitignore create mode 100644 docs/authors.rst create mode 100644 docs/changelog.rst create mode 100644 docs/conf.py create mode 100644 docs/contributing.rst create mode 100644 docs/index.rst create mode 100644 docs/license.rst create mode 100644 docs/readme.rst create mode 100644 docs/requirements.txt create mode 100644 pyproject.toml delete mode 100644 pyscaffold.cfg create mode 100755 scripts/bootstrap.sh create mode 100755 scripts/dev-versioning.sh create mode 100755 scripts/linting.sh create mode 100755 scripts/purge-dev-tags.sh create mode 100755 scripts/release-versioning.sh create mode 100755 scripts/rename-tests.sh create mode 100755 scripts/template-to-repo.sh create mode 100755 scripts/tomllint.sh create mode 100644 src/osc_physrisk_financial/__init__.py create mode 100644 src/osc_physrisk_financial/skeleton.py delete mode 100755 test.sh create mode 100644 tests/conftest.py create mode 100644 tests/test_skeleton.py create mode 100644 tox.ini diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..43d93a7 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,28 @@ +# .coveragerc to control coverage.py +[run] +branch = True +source = osc_physrisk_financial +# omit = bad_file.py + +[paths] +source = + src/ + */site-packages/ + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: diff --git a/.devops-exclusions b/.devops-exclusions new file mode 100644 index 0000000..436ef0b --- /dev/null +++ b/.devops-exclusions @@ -0,0 +1,2 @@ +.github/workflows/linting.yaml +.github/workflows/notebooks.yaml diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..cb23f32 --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 160 +extend-ignore = E203, E501 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index aa91a54..012dc41 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,14 +1,25 @@ --- -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2024 The Linux Foundation +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: # prettier-ignore + - package-ecosystem: "pip" # See documentation for possible values + # prettier-ignore + directory: "/" # Location of package manifests + commit-message: + prefix: "[dependabot] Chore:" + open-pull-requests-limit: 3 + schedule: + interval: "weekly" + - package-ecosystem: "github-actions" directory: "/" commit-message: prefix: "[dependabot] Chore:" - open-pull-requests-limit: 1 + open-pull-requests-limit: 3 schedule: interval: "weekly" diff --git a/.github/workflows/bootstrap.yaml b/.github/workflows/bootstrap.yaml new file mode 100644 index 0000000..3e1df48 --- /dev/null +++ b/.github/workflows/bootstrap.yaml @@ -0,0 +1,342 @@ +--- +name: "♻️ Update shared DevOps tooling" + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + schedule: + - cron: "0 8 * * MON" + +jobs: + update-actions: + name: "Update DevOps tooling" + runs-on: ubuntu-latest + permissions: + # IMPORTANT: mandatory to create or update content/actions/pr + contents: write + actions: write + pull-requests: write + + steps: + - name: "Checkout primary repository" + uses: actions/checkout@v4 + + - name: "Pull devops content from repository" + uses: actions/checkout@v4 + with: + repository: "os-climate/devops-toolkit" + path: ".devops" + + - name: "Update repository workflows and create PR" + id: update-repository + env: + GH_TOKEN: ${{ github.token }} + # yamllint disable rule:line-length + run: | + #SHELLCODESTART + + set -euo pipefail + # set -x + + # Define variables + + DEVOPS_DIR=".devops" + AUTOMATION_BRANCH="update-devops-tooling" + REPO_DIR=$(git rev-parse --show-toplevel) + GIT_ORIGIN=$(git config --get remote.origin.url) + REPO_NAME=$(basename -s .git "$GIT_ORIGIN") + EXCLUDE_FILE=".devops-exclusions" + DEVOPS_REPO='git@github.com:os-climate/devops-toolkit.git' + HEAD_BRANCH=$(git rev-parse --abbrev-ref HEAD) + + # Content folder defines the files and folders to update + FILES="$DEVOPS_DIR/content/files.txt" + FOLDERS="$DEVOPS_DIR/content/folders.txt" + + # Define functions + + perform_folder_operation() { + FS_PATH="$1" + if [ -d "$DEVOPS_DIR"/"$FS_PATH" ]; then + echo "Scanning target folder content at: $FS_PATH" + return 0 + else + echo "Upstream folder NOT found: $FS_PATH [skipping]" + return 1 + fi + } + + # Allows for selective opt-out components on a per-path basis + perform_operation() { + FS_PATH="$1" + if [ ! -f "$DEVOPS_DIR"/"$FS_PATH" ]; then + echo "Skipping missing upstream file at: $FS_PATH" + return 1 + fi + # Elements excluded from processing return exit status 1 + if [ ! -f "$EXCLUDE_FILE" ]; then + return 0 + elif [ "$FS_PATH" = "$EXCLUDE_FILE" ]; then + # The exclusion file itself is never updated by automation + return 1 + elif (grep -Fxq "$FS_PATH" "$EXCLUDE_FILE" > /dev/null); then + # Element listed; exclude from processing + return 1 + else + # Element not found in exclusion file; process it + return 0 + fi + } + + # Only updates file if it has changed + selective_file_copy() { + # Receives a single file path as argument + # SHA_SRC=$(sha1sum "$DEVOPS_DIR/$1" | awk '{print $1}') + # SHA_DST=$(sha1sum "$1" 2>/dev/null | awk '{print $1}' || :) + # if [ "$SHA_SRC" != "$SHA_DST" ]; then + if ! (cmp "$DEVOPS_DIR/$1" "$1"); then + echo "Copying: $1" + cp "$DEVOPS_DIR/$1" "$1" + git add "$1" + fi + } + + check_pr_for_author() { + AUTHOR="$1" + printf "Checking for pull requests by: %s" "$AUTHOR" + # Capture the existing PR number + PR_NUM=$(gh pr list --state open -L 1 \ + --author "$AUTHOR" --json number | \ + grep "number" | sed "s/:/ /g" | awk '{print $2}' | \ + sed "s/}//g" | sed "s/]//g") + if [ -z "$PR_NUM" ]; then + echo " [none]" + return 1 + else + echo " [$PR_NUM]" + echo "Running: gh pr checkout $PR_NUM" + if (gh pr checkout "$PR_NUM"); then + return 0 + else + echo "Failed to checkout GitHub pull request" + echo "Check errors/output for the cause" + return 2 + fi + fi + } + + check_prs() { + # Define users to check for pre-existing pull requests + AUTOMATION_USER="github-actions[bot]" + if [[ -n ${GH_TOKEN+x} ]]; then + GITHUB_USERS="$AUTOMATION_USER" + else + GITHUB_USERS=$(gh api user | jq -r '.login') + # Check local user account first, if enumerated + GITHUB_USERS+=" $AUTOMATION_USER" + fi + + # Check for existing pull requests opened by this automation + for USER in $GITHUB_USERS; do + if (check_pr_for_author "$USER"); then + return 0 + else + STATUS="$?" + fi + if [ "$STATUS" -eq 1 ]; then + continue + elif [ "$STATUS" -eq 2 ]; then + echo "Failed to checkout pull request"; exit 1 + fi + done + return 1 + } + + # Check if script is running in GHA workflow + in_github() { + if [ -z ${GITHUB_RUN_ID+x} ]; then + echo "Script is NOT running in GitHub" + return 1 + else + echo "Script is running in GitHub" + return 0 + fi + } + + # Check if user is logged into GitHub + logged_in_github() { + if (gh auth status); then + echo "Logged in and authenticated to GitHb" + return 0 + else + echo "Not logged into GitHub, some script operations unavailable" + return 1 + fi + } + + # Main script entry point + + echo "Repository name and HEAD branch: $REPO_NAME [$HEAD_BRANCH]" + + # Ensure working from top-level of GIT repository + CURRENT_DIR=$(pwd) + if [ "$REPO_DIR" != "$CURRENT_DIR" ]; then + echo "Changing directory to: $REPO_DIR" + if ! (cd "$REPO_DIR"); then + echo "Error: unable to change directory"; exit 1 + fi + fi + + # Stashing only used during development/testing + # Check if there are unstaged changes + # if ! (git diff --exit-code --quiet); then + # echo "Stashing unstaged changes in current repository" + # git stash -q + # fi + + # Configure GIT environment only if NOT already configured + # i.e. when running in a GitHub Actions workflow + TEST=$(git config -l > /dev/null 2>&1) + if [ -n "$TEST" ]; then + git config user.name "github-actions[bot]" + git config user.email \ + "41898282+github-actions[bot]@users.noreply.github.com" + fi + + + if ! (check_prs); then + # No existing open pull requests found for this repository + + # Remove remote branch if it exists + git push origin --delete "$AUTOMATION_BRANCH" > /dev/null 2>&1 || : + git branch -D "$AUTOMATION_BRANCH" || : + git checkout -b "$AUTOMATION_BRANCH" + else + # The -B flag swaps branch and creates it if NOT present + git checkout -B "$AUTOMATION_BRANCH" + fi + + # Only if NOT running in GitHub + # (checkout is otherwise performed by earlier steps) + if ! (in_github); then + # Remove any stale local copy of the upstream repository + if [ -d "$DEVOPS_DIR" ]; then + rm -Rf "$DEVOPS_DIR" + fi + printf "Cloning DevOps repository into: %s" "$DEVOPS_DIR" + if (git clone "$DEVOPS_REPO" "$DEVOPS_DIR" > /dev/null 2>&1); then + echo " [success]" + else + echo " [failed]"; exit 1 + fi + fi + + # Process upstream DevOps repository content and update + + LOCATIONS="" + # Populate list of files to be updated/sourced + while read -ra LINE; + do + for FILE in "${LINE[@]}"; + do + LOCATIONS+="$FILE " + done + done < "$FILES" + + # Gather files from specified folders and append to locations list + while read -ra LINE; + do + for FOLDER in "${LINE[@]}"; + do + # Check to see if this folder should be skipped + if (perform_folder_operation "$FOLDER"); then + # If necessary, create target folder + if [ ! -d "$FOLDER" ]; then + echo "Creating target folder: $FOLDER" + mkdir "$FOLDER" + fi + # Add folder contents to list of file LOCATIONS + FILES=$(cd "$DEVOPS_DIR/$FOLDER"; find . -maxdepth 1 -type f -exec basename {} \;) + for LOCATION in $FILES; do + # Also check if individual files in the folder are excluded + if (perform_operation "$FOLDER/$LOCATION"); then + LOCATIONS+=" $FOLDER/$LOCATION" + fi + done + else + echo "Opted out of folder: $FOLDER" + continue + fi + done; + done < "$FOLDERS" + + # Copy specified files into repository root + for LOCATION in ${LOCATIONS}; do + if (perform_operation "$LOCATION"); then + selective_file_copy "$LOCATION" + else + echo "Not updating: $LOCATION" + fi + done + + # If no changes required, do not throw an error + if [ -z "$(git status --porcelain)" ]; then + echo "No updates/changes to commit"; exit 0 + fi + + # Temporarily disable exit on unbound variable + set +eu +o pipefail + + # Next step is only performed if running as GitHub Action + if [[ -n ${GH_TOKEN+x} ]]; then + # Script is running in a GitHub actions workflow + # Set outputs for use by the next actions/steps + # shellcheck disable=SC2129 + echo "changed=true" >> "$GITHUB_OUTPUT" + echo "branchname=$AUTOMATION_BRANCH" >> "$GITHUB_OUTPUT" + echo "headbranch=$HEAD_BRANCH" >> "$GITHUB_OUTPUT" + # Move to the next workflow step to raise the PR + git push --set-upstream origin "$AUTOMATION_BRANCH" + exit 0 + fi + + # If running shell code locally, continue to raise the PR + + # Reinstate exit on unbound variables + set -euo pipefail + + git status + if ! (git commit -as -S -m "Chore: Update DevOps tooling from central repository [skip ci]" \ + -m "This commit created by automation/scripting" --no-verify); then + echo "Commit failed; aborting"; exit 1 + else + # Push branch to remote repository + git push --set-upstream origin "$AUTOMATION_BRANCH" + # Create PR request + gh pr create \ + --title "Chore: Pull DevOps tooling from upstream repository" \ + --body 'Automated by a GitHub workflow: bootstrap.yaml' + fi + # echo "Unstashing unstaged changes, if any exist" + # git stash pop -q || : + #SHELLCODEEND + + - name: Create Pull Request + if: steps.update-repository.outputs.changed == 'true' + uses: peter-evans/create-pull-request@v6 + # env: + # GITHUB_TOKEN: ${{ github.token }} + with: + # Note: Requires a specific/defined Personal Access Token + token: ${{ secrets.ACTIONS_WORKFLOW }} + commit-message: "Chore: Update DevOps tooling from central repository [skip ci]" + signoff: "true" + base: ${{ steps.update-repository.outputs.headbranch }} + branch: ${{ steps.update-repository.outputs.branchname }} + delete-branch: true + title: "Chore: Update DevOps tooling from central repository [skip ci]" + body: | + Update repository with content from upstream: os-climate/devops-toolkit + labels: | + automated pr + draft: false diff --git a/.github/workflows/builds.yaml b/.github/workflows/builds.yaml new file mode 100644 index 0000000..1c7f171 --- /dev/null +++ b/.github/workflows/builds.yaml @@ -0,0 +1,88 @@ +--- +name: "🧱 Builds (Matrix)" + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + pull_request: + types: [opened, reopened, edited, synchronize] + branches: + - "*" + - "!update-devops-tooling" + +jobs: + parse-project-metadata: + name: "Determine Python versions" + # yamllint disable-line rule:line-length + uses: os-climate/devops-reusable-workflows/.github/workflows/pyproject-toml-fetch-matrix.yaml@main + + test-builds: + name: "Build: Python" + needs: [parse-project-metadata] + runs-on: "ubuntu-latest" + continue-on-error: true + # Don't run when pull request is merged + if: github.event.pull_request.merged == false + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.parse-project-metadata.outputs.matrix) }} + + steps: + - name: "Populate environment variables" + id: setenv + run: | + echo "Action triggered by user: ${GITHUB_TRIGGERING_ACTOR}" + set -x + datetime=$(date +'%Y%m%d%H%M') + export datetime + echo "datetime=${datetime}" >> "$GITHUB_OUTPUT" + vernum="${{ matrix.python-version }}.${datetime}" + echo "vernum=${vernum}" >> "$GITHUB_OUTPUT" + + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: "Set up Python ${{ matrix.python-version }}" + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: "Tag for test release" + # Delete all local tags, then create a synthetic tag for testing + # Use the date/time to avoid conflicts uploading to Test PyPI + run: | + scripts/dev-versioning.sh "${{ steps.setenv.outputs.vernum }}" + git tag | xargs -L 1 | xargs git tag --delete + git tag "v${{ steps.setenv.outputs.vernum }}" + git checkout "tags/v${{ steps.setenv.outputs.vernum }}" + grep version pyproject.toml + + - name: "Performing build" + run: | + python -m pip install --upgrade pip + if [ -f tox.ini ]; then + pip install tox tox-gh-actions + echo "Found file: tox.ini" + echo "Building with command: tox -e build" + tox -e build + elif [ -f pyproject.toml ]; then + echo "Found file: pyproject.toml" + echo "Building with command: pdm build" + pdm build + else + echo "Neither file found: tox.ini/pyproject.toml" + pip install --upgrade build + echo "Attempting build with: python -m build" + python -m build + fi + + - name: "Validating Artefacts with Twine" + run: | + echo "Validating artefacts with: twine check dist/*" + pip install --upgrade twine + twine check dist/* diff --git a/.github/workflows/dependencies.yaml b/.github/workflows/dependencies.yaml new file mode 100644 index 0000000..8edd981 --- /dev/null +++ b/.github/workflows/dependencies.yaml @@ -0,0 +1,48 @@ +--- +name: "♻️ Update dependencies" + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + schedule: + - cron: "0 8 1 * *" + +jobs: + update-dependencies: + name: "Update dependencies" + runs-on: ubuntu-latest + permissions: + # IMPORTANT: mandatory to raise the PR + id-token: write + pull-requests: write + repository-projects: write + contents: write + + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + steps: + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: "Set up Python" + uses: actions/setup-python@v5 + + - name: "Update Python dependencies" + uses: pdm-project/update-deps-action@v1 + with: + sign-off-commit: "true" + token: ${{ secrets.GH_TOKEN }} + commit-message: "Chore: Update dependencies and pdm.lock [skip ci]" + pr-title: "Update Python module dependencies" + update-strategy: eager + # Whether to install PDM plugins before update + install-plugins: "false" + + - name: "Export dependencies" + run: | + pdm export --without-hashes -o requirements.txt + + # Ideally, we should export requirements.txt then amend the earlier PR + # update-deps-action could be modified to export PR number as as output + # Or we add the option to export the requirements.txt in that action diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml new file mode 100644 index 0000000..09a0ffa --- /dev/null +++ b/.github/workflows/documentation.yaml @@ -0,0 +1,60 @@ +--- +name: "📘 Documentation build/publish" + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + pull_request: + types: [closed] + branches: + - "*" + - "!update-devops-tooling" + +jobs: + build_and_deploy: + # Only run when pull request is merged + if: github.event.pull_request.merged == true + name: "Rebuild documentation" + runs-on: ubuntu-latest + continue-on-error: true + + permissions: + # IMPORTANT: mandatory for documentation updates; used in final step + id-token: write + pull-requests: write + contents: write + repository-projects: write + + steps: + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: "Set up Python" + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + + - name: "Install dependencies" + run: | + python -m pip install --upgrade pip + pdm lock + pdm export -o requirements.txt + if [ -f docs/requirements.txt ]; then + pip install -r docs/requirements.txt; fi + + - name: "Build documentation: (tox/sphinx)" + run: | + pip install --upgrade tox + tox -e docs + + - name: "Publish documentation" + if: success() + uses: peaceiris/actions-gh-pages@v4 + with: + publish_branch: gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs/_build/html/ + keep_files: true diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..ac254c4 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,173 @@ +--- +name: "🐍📦 Production build and release" + +# GitHub/PyPI trusted publisher documentation: +# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ + +# yamllint disable-line rule:truthy +on: + # workflow_dispatch: + push: + # Only invoked on release tag pushes + branches: + - 'main' + - 'master' + tags: + - 'v*.*.*' + +env: + python-version: "3.10" + +### BUILD ### + +jobs: + + build: + name: "🐍 Build packages" + # Only publish on tag pushes + if: startsWith(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + permissions: + # IMPORTANT: mandatory for Sigstore + id-token: write + steps: + ### BUILDING ### + + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: "Setup Python" + uses: actions/setup-python@v5 + with: + python-version: ${{ env.python-version }} + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + + - name: "Update version from tags for production release" + run: | + echo "Github versioning: ${{ github.ref_name }}" + scripts/release-versioning.sh + + - name: "Build with PDM backend" + run: | + pdm build + + ### SIGNING ### + + - name: "Sign packages with Sigstore" + uses: sigstore/gh-action-sigstore-python@v2 + with: + inputs: >- + ./dist/*.tar.gz + ./dist/*.whl + + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: ${{ github.ref_name }} + path: dist/ + + ### PUBLISH GITHUB ### + + github: + name: "📦 Publish to GitHub" + # Only publish on tag pushes + if: startsWith(github.ref, 'refs/tags/') + needs: + - build + runs-on: ubuntu-latest + permissions: + # IMPORTANT: mandatory to publish artefacts + contents: write + steps: + - name: "⬇ Download build artefacts" + uses: actions/download-artifact@v4 + with: + name: ${{ github.ref_name }} + path: dist/ + + - name: "📦 Publish artefacts to GitHub" + # https://github.com/softprops/action-gh-release + uses: softprops/action-gh-release@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + prerelease: false + tag_name: ${{ github.ref_name }} + name: "Test/Development Build \ + ${{ github.ref_name }}" + # body_path: ${{ github.workspace }}/CHANGELOG.rst + files: | + dist/*.tar.gz + dist/*.whl + dist/*.sigstore + + ### PUBLISH PYPI TEST ### + + testpypi: + name: "📦 Publish to PyPi Test" + # Only publish on tag pushes + if: startsWith(github.ref, 'refs/tags/') + needs: + - build + runs-on: ubuntu-latest + environment: + name: testpypi + permissions: + # IMPORTANT: mandatory for trusted publishing + id-token: write + steps: + - name: "⬇ Download build artefacts" + uses: actions/download-artifact@v4 + with: + name: ${{ github.ref_name }} + path: dist/ + + - name: "Remove files unsupported by PyPi" + run: | + if [ -f dist/buildvars.txt ]; then + rm dist/buildvars.txt + fi + rm dist/*.sigstore + + - name: Publish distribution to Test PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + verbose: true + + ### PUBLISH PYPI ### + + pypi: + name: "📦 Publish to PyPi" + # Only publish on tag pushes + if: startsWith(github.ref, 'refs/tags/') + needs: + - testpypi + runs-on: ubuntu-latest + environment: + name: pypi + permissions: + # IMPORTANT: mandatory for trusted publishing + id-token: write + steps: + - name: "⬇ Download build artefacts" + uses: actions/download-artifact@v4 + with: + name: ${{ github.ref_name }} + path: dist/ + + - name: "Remove files unsupported by PyPi" + run: | + if [ -f dist/buildvars.txt ]; then + rm dist/buildvars.txt + fi + rm dist/*.sigstore + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + + - name: "Publish release to PyPI" + uses: pypa/gh-action-pypi-publish@release/v1 + with: + verbose: true diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml new file mode 100644 index 0000000..26251da --- /dev/null +++ b/.github/workflows/security.yaml @@ -0,0 +1,60 @@ +--- +# This workflow will install Python dependencies +# run tests and lint with a variety of Python versions +# For more information see: +# https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: "⛔️ Security auditing (Matrix)" + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + pull_request: + types: [opened, reopened, edited, synchronize] + branches: + - "*" + - "!update-devops-tooling" + +jobs: + + parse-project-metadata: + name: "Determine Python versions" + # yamllint disable-line rule:line-length + uses: os-climate/devops-reusable-workflows/.github/workflows/pyproject-toml-fetch-matrix.yaml@main + + build: + name: "Audit Python dependencies" + needs: [parse-project-metadata] + runs-on: ubuntu-latest + # Don't run when pull request is merged + if: github.event.pull_request.merged == false + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.parse-project-metadata.outputs.matrix) }} + + steps: + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: "Install dependencies" + run: | + pip install --upgrade pip + pdm lock + pdm export -o requirements.txt + python -m pip install -r requirements.txt + python -m pip install . + pip install --upgrade setuptools + pdm list --graph + + - name: "Run: pip-audit" + uses: pypa/gh-action-pip-audit@v1.0.8 diff --git a/.github/workflows/test-release.yaml b/.github/workflows/test-release.yaml new file mode 100644 index 0000000..bafecb4 --- /dev/null +++ b/.github/workflows/test-release.yaml @@ -0,0 +1,152 @@ +--- +name: "🐍📦 Test build and release" + +# GitHub/PyPI trusted publisher documentation: +# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + +env: + python-version: "3.10" + +### BUILD ### + +jobs: + build: + name: "🐍 Build packages" + runs-on: ubuntu-latest + permissions: + # IMPORTANT: mandatory for Sigstore + id-token: write + steps: + ### BUILDING ### + + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: "Setup Python 3.10" + uses: actions/setup-python@v5 + with: + python-version: ${{ env.python-version }} + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ env.python-version }} + + - name: "Populate environment variables" + id: setenv + run: | + vernum="${{ env.python-version }}.$(date +'%Y%m%d%H%M')" + echo "vernum=${vernum}" >> "$GITHUB_OUTPUT" + echo "vernum=${vernum}" >> buildvars.txt + + - name: "Tag for test release" + # Delete all local tags, then create a synthetic tag for testing + # Use the date/time to avoid conflicts uploading to Test PyPI + run: | + scripts/dev-versioning.sh "${{ steps.setenv.outputs.vernum }}" + git tag | xargs -L 1 | xargs git tag --delete + git tag "v${{ steps.setenv.outputs.vernum }}" + git checkout "tags/v${{ steps.setenv.outputs.vernum }}" + grep version pyproject.toml + + - name: "Build with PDM backend" + run: | + pdm build + # Need to save the build environment for subsequent steps + mv buildvars.txt dist/buildvars.txt + + ### SIGNING ### + + - name: "Sign packages with Sigstore" + uses: sigstore/gh-action-sigstore-python@v2 + + with: + inputs: >- + ./dist/*.tar.gz + ./dist/*.whl + + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: Development + path: dist/ + + ### PUBLISH GITHUB ### + + github: + name: "📦 Test publish to GitHub" + needs: + - build + runs-on: ubuntu-latest + permissions: + # IMPORTANT: mandatory to publish artefacts + contents: write + steps: + - name: "⬇ Download build artefacts" + uses: actions/download-artifact@v4 + with: + name: Development + path: dist/ + + - name: "Source environment variables" + id: setenv + run: | + if [ -f dist/buildvars.txt ]; then + source dist/buildvars.txt + echo "vernum=${vernum}" >> "$GITHUB_OUTPUT" + else + echo "Build environment variables could not be sourced" + fi + echo "tarball=$(ls dist/*.tgz)" >> "$GITHUB_OUTPUT" + echo "wheel=$(ls dist/*.whl)" >> "$GITHUB_OUTPUT" + + - name: "📦 Publish artefacts to GitHub" + # https://github.com/softprops/action-gh-release + uses: softprops/action-gh-release@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + prerelease: true + tag_name: ${{ steps.setenv.outputs.vernum }} + name: "Test/Development Build \ + ${{ steps.setenv.outputs.vernum }}" + # body_path: ${{ github.workspace }}/CHANGELOG.rst + files: | + dist/*.tar.gz + dist/*.whl + dist/*.sigstore + + ### PUBLISH TEST PYPI ### + + testpypi: + name: "📦 Test publish to PyPi" + needs: + - build + runs-on: ubuntu-latest + environment: + name: testpypi + permissions: + # IMPORTANT: mandatory for trusted publishing + id-token: write + steps: + - name: "⬇ Download build artefacts" + uses: actions/download-artifact@v4 + with: + name: Development + path: dist/ + + - name: "Remove files unsupported by PyPi" + run: | + if [ -f dist/buildvars.txt ]; then + rm dist/buildvars.txt + fi + rm dist/*.sigstore + + - name: Publish distribution to Test PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + verbose: true + repository-url: https://test.pypi.org/legacy/ diff --git a/.github/workflows/testing.yaml b/.github/workflows/testing.yaml new file mode 100644 index 0000000..aebd8a9 --- /dev/null +++ b/.github/workflows/testing.yaml @@ -0,0 +1,61 @@ +--- +name: "🧪 Unit tests (Matrix)" + +# yamllint disable-line rule:truthy +on: + workflow_dispatch: + pull_request: + types: [opened, reopened, edited, synchronize] + branches: + - "*" + - "!update-devops-tooling" + +jobs: + + parse-project-metadata: + name: "Determine Python versions" + # yamllint disable-line rule:line-length + uses: os-climate/devops-reusable-workflows/.github/workflows/pyproject-toml-fetch-matrix.yaml@main + + testing: + name: "Run unit tests" + needs: [parse-project-metadata] + runs-on: ubuntu-latest + # Don't run when pull request is merged + if: github.event.pull_request.merged == false + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.parse-project-metadata.outputs.matrix) }} + + steps: + - name: "Checkout repository" + uses: actions/checkout@v4 + + - name: "Setup Python ${{ matrix.python-version }}" + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: "Setup PDM for build commands" + uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: "Install dependencies" + run: | + python -m pip install --upgrade pip + pdm export -o requirements.txt + pip install -r requirements.txt + pip install --upgrade pytest pytest-cov + pip install . + + - name: "Run unit tests: pytest" + run: | + if [ -d test ]; then + python -m pytest test + elif [ -d tests ]; then + python -m pytest tests + else + echo "No test/tests directory could be found" + echo "Aborting testing without error"; exit 0 + fi diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..60040b6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,185 @@ +# Temporary devops repo +/.devops + +# Twine temporary files +package-lock.json +package.json + +# Output files from co2budget.ipynb (ITR-Examples) +OECM-images +TPI-images + +# Local node cache +node_modules + +# Credentials / Secrets +credentials.env +config.toml + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +.idea/* +*.iml + +# C extensions +*.so + +# Distribution / packaging +.Python +.pdm-python +.pdm-build +.pdm.toml +.tox +build/ +develop-eggs/ +dist/** +!app/static/frontend/dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +poetry.lock + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Excel stuff: +~$*.xlsx + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +/.pybuilder/ +/target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +examples/itr_ui/ +itr_env/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# The actual launch configuration for AWS +docker-compose_aws.yml +.idea/workspace.xml +*.exe +%USERPROFILE%/* + +# vscode +.vscode + +# Misc +.noseids +test/.DS_Store +.DS_Store + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000..34efb59 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,10 @@ +--- +# Markdownlint configuration file + +# Default state for all rules +default: true + +# Path to configuration file to extend +extends: null + +MD013: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 12b5242..f5312c9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,27 +1,62 @@ --- -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2024 The Linux Foundation - ci: autofix_commit_msg: "Chore: pre-commit autoupdate" + skip: + # pre-commit.ci cannot install WGET, so tomlint must be disabled + - tomllint + +exclude: | + (?x)^( + docs\/conf.py| + dco-signoffs/$ + )$ repos: + + - repo: local + hooks: + - id: tomllint + name: "Script: scripts/tomllint.sh" + language: script + # pass_filenames: false + files: \^*.toml + types: [file] + entry: scripts/tomllint.sh . + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: + - id: check-added-large-files + - id: check-ast - id: check-case-conflict - id: check-executables-have-shebangs + - id: check-json - id: check-merge-conflict - id: check-shebang-scripts-are-executable - id: check-symlinks + - id: check-toml + # - id: detect-aws-credentials + - id: check-xml - id: check-yaml + - id: debug-statements + - id: detect-private-key - id: end-of-file-fixer - id: mixed-line-ending args: ["--fix=lf"] + - id: name-tests-test + args: ["--pytest-test-first"] - id: no-commit-to-branch # - id: pretty-format-json + - id: requirements-txt-fixer - id: trailing-whitespace + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v4.0.0-alpha.8 + hooks: + - id: prettier + args: + ['--no-error-on-unmatched-pattern', '--ignore-unknown'] + - repo: https://github.com/igorshubovych/markdownlint-cli rev: v0.41.0 hooks: @@ -37,24 +72,78 @@ repos: rev: 2.1.1 hooks: - id: bashate + args: ["--ignore=E006,E011"] - repo: https://github.com/shellcheck-py/shellcheck-py rev: v0.10.0.1 hooks: - id: shellcheck + - repo: https://github.com/pycqa/pydocstyle.git + rev: 6.3.0 + hooks: + - id: pydocstyle + additional_dependencies: ["tomli"] + + - repo: https://github.com/Mateusz-Grzelinski/actionlint-py + rev: v1.7.1.15 + hooks: + - id: actionlint + + - repo: https://github.com/pycqa/flake8 + rev: "7.1.0" + hooks: + - id: flake8 + additional_dependencies: + - pep8-naming + - repo: https://github.com/adrienverge/yamllint.git rev: v1.35.1 hooks: - id: yamllint + args: [ "-d", "{rules: {line-length: {max: 120}}, ignore-from-file: [.gitignore],}", ] - - repo: https://github.com/fsfe/reuse-tool - rev: v3.0.2 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.9 hooks: - - id: reuse + - id: ruff + files: ^(scripts|tests|custom_components)/.+\.py$ + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + files: ^(scripts|tests|custom_components)/.+\.py$ - # Check for misspellings in documentation files - - repo: https://github.com/codespell-project/codespell - rev: v2.2.2 + - repo: local + hooks: + - id: mypy-cache + name: "create mypy cache" + language: system + pass_filenames: false + entry: bash -c 'if [ ! -d .mypy_cache ]; then /bin/mkdir .mypy_cache; fi; exit 0' + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.10.0" hooks: - - id: codespell + - id: mypy + verbose: true + args: ["--show-error-codes", "--install-types", "--non-interactive"] + additional_dependencies: ["pytest", "types-requests"] + + # Check for misspellings in documentation files + # - repo: https://github.com/codespell-project/codespell + # rev: v2.2.2 + # hooks: + # - id: codespell + + # To embrace black styles, even in docs + # - repo: https://github.com/asottile/blacken-docs + # rev: v1.13.0 + # hooks: + # - id: blacken-docs + # additional_dependencies: [black] + + # Automatically upgrade Python syntax for newer versions + # - repo: https://github.com/asottile/pyupgrade + # rev: v3.15.0 + # hooks: + # - id: pyupgrade + # args: ['--py37-plus'] diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..6491e42 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,4 @@ +**/.pre-commit-config.yaml +**/*.yaml +**/*.yml +**/.git/** diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..a2bcab3 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,27 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Build documentation with MkDocs +#mkdocs: +# configuration: mkdocs.yml + +# Optionally build your docs in additional formats such as PDF +formats: + - pdf + +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +python: + install: + - requirements: docs/requirements.txt + - {path: ., method: pip} diff --git a/AUTHORS.rst b/AUTHORS.rst new file mode 100644 index 0000000..5281c92 --- /dev/null +++ b/AUTHORS.rst @@ -0,0 +1,5 @@ +============ +Contributors +============ + +* github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 0000000..226e6f5 --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,10 @@ +========= +Changelog +========= + +Version 0.1 +=========== + +- Feature A added +- FIX: nasty bug #1729 fixed +- add your changes here! diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..a4d1ae7 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,353 @@ +.. todo:: THIS IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! + + The document assumes you are using a source repository service that promotes a + contribution model similar to `GitHub's fork and pull request workflow`_. + While this is true for the majority of services (like GitHub, GitLab, + BitBucket), it might not be the case for private repositories (e.g., when + using Gerrit). + + Also notice that the code examples might refer to GitHub URLs or the text + might use GitHub specific terminology (e.g., *Pull Request* instead of *Merge + Request*). + + Please make sure to check the document having these assumptions in mind + and update things accordingly. + +.. todo:: Provide the correct links/replacements at the bottom of the document. + +.. todo:: You might want to have a look on `PyScaffold's contributor's guide`_, + + especially if your project is open source. The text should be very similar to + this template, but there are a few extra contents that you might decide to + also include, like mentioning labels of your issue tracker or automated + releases. + + +============ +Contributing +============ + +Welcome to ``osc-physrisk-financial`` contributor's guide. + +This document focuses on getting any potential contributor familiarized +with the development processes, but `other kinds of contributions`_ are also +appreciated. + +If you are new to using git_ or have never collaborated in a project previously, +please have a look at `contribution-guide.org`_. Other resources are also +listed in the excellent `guide created by FreeCodeCamp`_ [#contrib1]_. + +Please notice, all users and contributors are expected to be **open, +considerate, reasonable, and respectful**. When in doubt, `Python Software +Foundation's Code of Conduct`_ is a good reference in terms of behavior +guidelines. + + +Issue Reports +============= + +If you experience bugs or general issues with ``osc-physrisk-financial``, please have a look +on the `issue tracker`_. If you don't see anything useful there, please feel +free to fire an issue report. + +.. tip:: + Please don't forget to include the closed issues in your search. + Sometimes a solution was already reported, and the problem is considered + **solved**. + +New issue reports should include information about your programming environment +(e.g., operating system, Python version) and steps to reproduce the problem. +Please try also to simplify the reproduction steps to a very minimal example +that still illustrates the problem you are facing. By removing other factors, +you help us to identify the root cause of the issue. + + +Documentation Improvements +========================== + +You can help improve ``osc-physrisk-financial`` docs by making them more readable and coherent, or +by adding missing information and correcting mistakes. + +``osc-physrisk-financial`` documentation uses Sphinx_ as its main documentation compiler. +This means that the docs are kept in the same repository as the project code, and +that any documentation update is done in the same way was a code contribution. + +.. todo:: Don't forget to mention which markup language you are using. + + e.g., reStructuredText_ or CommonMark_ with MyST_ extensions. + +.. todo:: If your project is hosted on GitHub, you can also mention the following tip: + + .. tip:: + Please notice that the `GitHub web interface`_ provides a quick way of + propose changes in ``osc-physrisk-financial``'s files. While this mechanism can + be tricky for normal code contributions, it works perfectly fine for + contributing to the docs, and can be quite handy. + + If you are interested in trying this method out, please navigate to + the ``docs`` folder in the source repository_, find which file you + would like to propose changes and click in the little pencil icon at the + top, to open `GitHub's code editor`_. Once you finish editing the file, + please write a message in the form at the bottom of the page describing + which changes have you made and what are the motivations behind them and + submit your proposal. + +When working on documentation changes in your local machine, you can +compile them using |tox|_:: + + tox -e docs + +and use Python's built-in web server for a preview in your web browser +(``http://localhost:8000``):: + + python3 -m http.server --directory 'docs/_build/html' + + +Code Contributions +================== + +.. todo:: Please include a reference or explanation about the internals of the project. + + An architecture description, design principles or at least a summary of the + main concepts will make it easy for potential contributors to get started + quickly. + +Submit an issue +--------------- + +Before you work on any non-trivial code contribution it's best to first create +a report in the `issue tracker`_ to start a discussion on the subject. +This often provides additional considerations and avoids unnecessary work. + +Create an environment +--------------------- + +Before you start coding, we recommend creating an isolated `virtual +environment`_ to avoid any problems with your installed Python packages. +This can easily be done via either |virtualenv|_:: + + virtualenv + source /bin/activate + +or Miniconda_:: + + conda create -n osc-physrisk-financial python=3 six virtualenv pytest pytest-cov + conda activate osc-physrisk-financial + +Clone the repository +-------------------- + +#. Create an user account on |the repository service| if you do not already have one. +#. Fork the project repository_: click on the *Fork* button near the top of the + page. This creates a copy of the code under your account on |the repository service|. +#. Clone this copy to your local disk:: + + git clone git@github.com:YourLogin/osc-physrisk-financial.git + cd osc-physrisk-financial + +#. You should run:: + + pip install -U pip setuptools -e . + + to be able to import the package under development in the Python REPL. + + .. todo:: if you are not using pre-commit, please remove the following item: + +#. Install |pre-commit|_:: + + pip install pre-commit + pre-commit install + + ``osc-physrisk-financial`` comes with a lot of hooks configured to automatically help the + developer to check the code being written. + +Implement your changes +---------------------- + +#. Create a branch to hold your changes:: + + git checkout -b my-feature + + and start making changes. Never work on the main branch! + +#. Start your work on this branch. Don't forget to add docstrings_ to new + functions, modules and classes, especially if they are part of public APIs. + +#. Add yourself to the list of contributors in ``AUTHORS.rst``. + +#. When you’re done editing, do:: + + git add + git commit + + to record your changes in git_. + + .. todo:: if you are not using pre-commit, please remove the following item: + + Please make sure to see the validation messages from |pre-commit|_ and fix + any eventual issues. + This should automatically use flake8_/black_ to check/fix the code style + in a way that is compatible with the project. + + .. important:: Don't forget to add unit tests and documentation in case your + contribution adds an additional feature and is not just a bugfix. + + Moreover, writing a `descriptive commit message`_ is highly recommended. + In case of doubt, you can check the commit history with:: + + git log --graph --decorate --pretty=oneline --abbrev-commit --all + + to look for recurring communication patterns. + +#. Please check that your changes don't break any unit tests with:: + + tox + + (after having installed |tox|_ with ``pip install tox`` or ``pipx``). + + You can also use |tox|_ to run several other pre-configured tasks in the + repository. Try ``tox -av`` to see a list of the available checks. + +Submit your contribution +------------------------ + +#. If everything works fine, push your local branch to |the repository service| with:: + + git push -u origin my-feature + +#. Go to the web page of your fork and click |contribute button| + to send your changes for review. + + .. todo:: if you are using GitHub, you can uncomment the following paragraph + + Find more detailed information in `creating a PR`_. You might also want to open + the PR as a draft first and mark it as ready for review after the feedbacks + from the continuous integration (CI) system or any required fixes. + + +Troubleshooting +--------------- + +The following tips can be used when facing problems to build or test the +package: + +#. Make sure to fetch all the tags from the upstream repository_. + The command ``git describe --abbrev=0 --tags`` should return the version you + are expecting. If you are trying to run CI scripts in a fork repository, + make sure to push all the tags. + You can also try to remove all the egg files or the complete egg folder, i.e., + ``.eggs``, as well as the ``*.egg-info`` folders in the ``src`` folder or + potentially in the root of your project. + +#. Sometimes |tox|_ misses out when new dependencies are added, especially to + ``setup.cfg`` and ``docs/requirements.txt``. If you find any problems with + missing dependencies when running a command with |tox|_, try to recreate the + ``tox`` environment using the ``-r`` flag. For example, instead of:: + + tox -e docs + + Try running:: + + tox -r -e docs + +#. Make sure to have a reliable |tox|_ installation that uses the correct + Python version (e.g., 3.7+). When in doubt you can run:: + + tox --version + # OR + which tox + + If you have trouble and are seeing weird errors upon running |tox|_, you can + also try to create a dedicated `virtual environment`_ with a |tox|_ binary + freshly installed. For example:: + + virtualenv .venv + source .venv/bin/activate + .venv/bin/pip install tox + .venv/bin/tox -e all + +#. `Pytest can drop you`_ in an interactive session in the case an error occurs. + In order to do that you need to pass a ``--pdb`` option (for example by + running ``tox -- -k --pdb``). + You can also setup breakpoints manually instead of using the ``--pdb`` option. + + +Maintainer tasks +================ + +Releases +-------- + +.. todo:: This section assumes you are using PyPI to publicly release your package. + + If instead you are using a different/private package index, please update + the instructions accordingly. + +If you are part of the group of maintainers and have correct user permissions +on PyPI_, the following steps can be used to release a new version for +``osc-physrisk-financial``: + +#. Make sure all unit tests are successful. +#. Tag the current commit on the main branch with a release tag, e.g., ``v1.2.3``. +#. Push the new tag to the upstream repository_, e.g., ``git push upstream v1.2.3`` +#. Clean up the ``dist`` and ``build`` folders with ``tox -e clean`` + (or ``rm -rf dist build``) + to avoid confusion with old builds and Sphinx docs. +#. Run ``tox -e build`` and check that the files in ``dist`` have + the correct version (no ``.dirty`` or git_ hash) according to the git_ tag. + Also check the sizes of the distributions, if they are too big (e.g., > + 500KB), unwanted clutter may have been accidentally included. +#. Run ``tox -e publish -- --repository pypi`` and check that everything was + uploaded to PyPI_ correctly. + + + +.. [#contrib1] Even though, these resources focus on open source projects and + communities, the general ideas behind collaborating with other developers + to collectively create software are general and can be applied to all sorts + of environments, including private companies and proprietary code bases. + + +.. <-- start --> +.. todo:: Please review and change the following definitions: + +.. |the repository service| replace:: GitHub +.. |contribute button| replace:: "Create pull request" + +.. _repository: https://github.com//osc-physrisk-financial +.. _issue tracker: https://github.com//osc-physrisk-financial/issues +.. <-- end --> + + +.. |virtualenv| replace:: ``virtualenv`` +.. |pre-commit| replace:: ``pre-commit`` +.. |tox| replace:: ``tox`` + + +.. _black: https://pypi.org/project/black/ +.. _CommonMark: https://commonmark.org/ +.. _contribution-guide.org: https://www.contribution-guide.org/ +.. _creating a PR: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request +.. _descriptive commit message: https://chris.beams.io/posts/git-commit +.. _docstrings: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html +.. _first-contributions tutorial: https://github.com/firstcontributions/first-contributions +.. _flake8: https://flake8.pycqa.org/en/stable/ +.. _git: https://git-scm.com +.. _GitHub's fork and pull request workflow: https://guides.github.com/activities/forking/ +.. _guide created by FreeCodeCamp: https://github.com/FreeCodeCamp/how-to-contribute-to-open-source +.. _Miniconda: https://docs.conda.io/en/latest/miniconda.html +.. _MyST: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html +.. _other kinds of contributions: https://opensource.guide/how-to-contribute +.. _pre-commit: https://pre-commit.com/ +.. _PyPI: https://pypi.org/ +.. _PyScaffold's contributor's guide: https://pyscaffold.org/en/stable/contributing.html +.. _Pytest can drop you: https://docs.pytest.org/en/stable/how-to/failures.html#using-python-library-pdb-with-pytest +.. _Python Software Foundation's Code of Conduct: https://www.python.org/psf/conduct/ +.. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/ +.. _Sphinx: https://www.sphinx-doc.org/en/master/ +.. _tox: https://tox.wiki/en/stable/ +.. _virtual environment: https://realpython.com/python-virtual-environments-a-primer/ +.. _virtualenv: https://virtualenv.pypa.io/en/stable/ + +.. _GitHub web interface: https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files +.. _GitHub's code editor: https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files diff --git a/README.md b/README.md deleted file mode 100644 index 34e09fe..0000000 --- a/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Python Template Repository - -This repository hosts the setup scripts and metadata for new OS-Climate Python projects - -## Bootstrap Scripts, Templating and Skeleton Files - -Raise GitHub issues here if/when you need a new OS-Climate GitHub repository creating - -## Description - -Repository and CLI tool naming should reflect and match installable package names. -Repository names are prefixed with "osc-" to help avoid wider name-space conflicts. -Repository names use dashes, while module names and some folders may use underscores. - -Package names should be generic - -**Note:** _this ensures consistency if/when packages are made available through PyPI_ - -- We use the following tool to bootstrap new projects: [Pyscaffold](https://pyscaffold.org/en/stable/) -- Initial linting and GitHub workflows are imported from: [devops-toolkit](https://github.com/os-climate/devops-toolkit/) - -The setup script does the following: - -- Invokes pyscaffold to create a folder hierarchy (based on modern PEP standards) -- Creates default linting, TOX and project metadata files -- Performs some post-installation customisation to Pyscaffold (specific to OS-Climate) -- Imports an enhance linting setup from a central OS-Climate reposiutory -- Imports a bunch of shared GitHub actions workflow for common Python project needs - -## Modern PEP Standards Compliance - -We aim to ensure our projects start with the latest PEP standards compliance in mind - -To this end, we do NOT use the following: - -- Setuptools (for builds) -- requirements.txt (for describing module dependencies) - -Instead we are using the following: - -- PDM project (build/dependency management tool) -- pyproject.toml (project metadata description) - -### PDM Project - -For further details on using PDM for managing dependencies and builds, see: - -- [PDM Project](https://pdm-project.org/en/latest/) -- [PDM Project on GitHub](https://github.com/pdm-project/pdm) -- [PDM/Setup Github Action](https://github.com/pdm-project/setup-pdm) - -### Information on pyproject.toml - -- [Guide to writing pyproject.toml](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/) -- [File Specification](https://packaging.python.org/en/latest/specifications/pyproject-toml/) -- [Syntax/Cheat Sheet](https://betterprogramming.pub/a-pyproject-toml-developers-cheat-sheet-5782801fb3ed) - - diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..8276d42 --- /dev/null +++ b/README.rst @@ -0,0 +1,49 @@ +.. These are examples of badges you might want to add to your README: + please update the URLs accordingly + + .. image:: https://api.cirrus-ci.com/github//osc-physrisk-financial.svg?branch=main + :alt: Built Status + :target: https://cirrus-ci.com/github//osc-physrisk-financial + .. image:: https://readthedocs.org/projects/osc-physrisk-financial/badge/?version=latest + :alt: ReadTheDocs + :target: https://osc-physrisk-financial.readthedocs.io/en/stable/ + .. image:: https://img.shields.io/coveralls/github//osc-physrisk-financial/main.svg + :alt: Coveralls + :target: https://coveralls.io/r//osc-physrisk-financial + .. image:: https://img.shields.io/pypi/v/osc-physrisk-financial.svg + :alt: PyPI-Server + :target: https://pypi.org/project/osc-physrisk-financial/ + .. image:: https://img.shields.io/conda/vn/conda-forge/osc-physrisk-financial.svg + :alt: Conda-Forge + :target: https://anaconda.org/conda-forge/osc-physrisk-financial + .. image:: https://pepy.tech/badge/osc-physrisk-financial/month + :alt: Monthly Downloads + :target: https://pepy.tech/project/osc-physrisk-financial + .. image:: https://img.shields.io/twitter/url/http/shields.io.svg?style=social&label=Twitter + :alt: Twitter + :target: https://twitter.com/osc-physrisk-financial + +.. image:: https://img.shields.io/badge/-PyScaffold-005CA0?logo=pyscaffold + :alt: Project generated with PyScaffold + :target: https://pyscaffold.org/ + +| + +====================== +osc-physrisk-financial +====================== + + + OS-Climate Python Project + + +A longer description of your project goes here... + + +.. _pyscaffold-notes: + +Note +==== + +This project has been set up using PyScaffold 4.5. For details and usage +information on PyScaffold see https://pyscaffold.org/. diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..31655dd --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,29 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build +AUTODOCDIR = api + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) +$(error "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://sphinx-doc.org/") +endif + +.PHONY: help clean Makefile + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +clean: + rm -rf $(BUILDDIR)/* $(AUTODOCDIR) + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_static/.gitignore b/docs/_static/.gitignore new file mode 100644 index 0000000..3c96363 --- /dev/null +++ b/docs/_static/.gitignore @@ -0,0 +1 @@ +# Empty directory diff --git a/docs/authors.rst b/docs/authors.rst new file mode 100644 index 0000000..cd8e091 --- /dev/null +++ b/docs/authors.rst @@ -0,0 +1,2 @@ +.. _authors: +.. include:: ../AUTHORS.rst diff --git a/docs/changelog.rst b/docs/changelog.rst new file mode 100644 index 0000000..871950d --- /dev/null +++ b/docs/changelog.rst @@ -0,0 +1,2 @@ +.. _changes: +.. include:: ../CHANGELOG.rst diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..a798c7b --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,286 @@ +# This file is execfile()d with the current directory set to its containing dir. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import sys +import shutil + +# -- Path setup -------------------------------------------------------------- + +__location__ = os.path.dirname(__file__) + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.join(__location__, "../src")) + +# -- Run sphinx-apidoc ------------------------------------------------------- +# This hack is necessary since RTD does not issue `sphinx-apidoc` before running +# `sphinx-build -b html . _build/html`. See Issue: +# https://github.com/readthedocs/readthedocs.org/issues/1139 +# DON'T FORGET: Check the box "Install your project inside a virtualenv using +# setup.py install" in the RTD Advanced Settings. +# Additionally it helps us to avoid running apidoc manually + +try: # for Sphinx >= 1.7 + from sphinx.ext import apidoc +except ImportError: + from sphinx import apidoc + +output_dir = os.path.join(__location__, "api") +module_dir = os.path.join(__location__, "../src/osc_physrisk_financial") +try: + shutil.rmtree(output_dir) +except FileNotFoundError: + pass + +try: + import sphinx + + cmd_line = f"sphinx-apidoc --implicit-namespaces -f -o {output_dir} {module_dir}" + + args = cmd_line.split(" ") + if tuple(sphinx.__version__.split(".")) >= ("1", "7"): + # This is a rudimentary parse_version to avoid external dependencies + args = args[1:] + + apidoc.main(args) +except Exception as e: + print("Running `sphinx-apidoc` failed!\n{}".format(e)) + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.autosummary", + "sphinx.ext.viewcode", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.ifconfig", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix of source filenames. +source_suffix = ".rst" + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "osc-physrisk-financial" +copyright = "2024, github-actions[bot]" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# version: The short X.Y version. +# release: The full version, including alpha/beta/rc tags. +# If you don’t need the separation provided between version and release, +# just set them both to the same value. +try: + from osc_physrisk_financial import __version__ as version +except ImportError: + version = "" + +if not version or version.lower() == "unknown": + version = os.getenv("READTHEDOCS_VERSION", "unknown") # automatically set by RTD + +release = version + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"] + +# The reST default role (used for this markup: `text`) to use for all documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If this is True, todo emits a warning for each TODO entries. The default is False. +todo_emit_warnings = True + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "sidebar_width": "300px", + "page_width": "1200px" +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = "" + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = "osc-physrisk-financial-doc" + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ("letterpaper" or "a4paper"). + # "papersize": "letterpaper", + # The font size ("10pt", "11pt" or "12pt"). + # "pointsize": "10pt", + # Additional stuff for the LaTeX preamble. + # "preamble": "", +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ("index", "user_guide.tex", "osc-physrisk-financial Documentation", "github-actions[bot]", "manual") +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = "" + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + +# -- External mapping -------------------------------------------------------- +python_version = ".".join(map(str, sys.version_info[0:2])) +intersphinx_mapping = { + "sphinx": ("https://www.sphinx-doc.org/en/master", None), + "python": ("https://docs.python.org/" + python_version, None), + "matplotlib": ("https://matplotlib.org", None), + "numpy": ("https://numpy.org/doc/stable", None), + "sklearn": ("https://scikit-learn.org/stable", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None), + "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), + "setuptools": ("https://setuptools.pypa.io/en/stable/", None), + "pyscaffold": ("https://pyscaffold.org/en/stable", None), +} + +print(f"loading configurations for {project} {version} ...", file=sys.stderr) diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 0000000..e582053 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1 @@ +.. include:: ../CONTRIBUTING.rst diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..521f362 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,61 @@ +====================== +osc-physrisk-financial +====================== + +This is the documentation of **osc-physrisk-financial**. + +.. note:: + + This is the main page of your project's `Sphinx`_ documentation. + It is formatted in `reStructuredText`_. Add additional pages + by creating rst-files in ``docs`` and adding them to the `toctree`_ below. + Use then `references`_ in order to link them from this page, e.g. + :ref:`authors` and :ref:`changes`. + + It is also possible to refer to the documentation of other Python packages + with the `Python domain syntax`_. By default you can reference the + documentation of `Sphinx`_, `Python`_, `NumPy`_, `SciPy`_, `matplotlib`_, + `Pandas`_, `Scikit-Learn`_. You can add more by extending the + ``intersphinx_mapping`` in your Sphinx's ``conf.py``. + + The pretty useful extension `autodoc`_ is activated by default and lets + you include documentation from docstrings. Docstrings can be written in + `Google style`_ (recommended!), `NumPy style`_ and `classical style`_. + + +Contents +======== + +.. toctree:: + :maxdepth: 2 + + Overview + Contributions & Help + License + Authors + Changelog + Module Reference + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + +.. _toctree: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html +.. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +.. _references: https://www.sphinx-doc.org/en/stable/markup/inline.html +.. _Python domain syntax: https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#the-python-domain +.. _Sphinx: https://www.sphinx-doc.org/ +.. _Python: https://docs.python.org/ +.. _Numpy: https://numpy.org/doc/stable +.. _SciPy: https://docs.scipy.org/doc/scipy/reference/ +.. _matplotlib: https://matplotlib.org/contents.html# +.. _Pandas: https://pandas.pydata.org/pandas-docs/stable +.. _Scikit-Learn: https://scikit-learn.org/stable +.. _autodoc: https://www.sphinx-doc.org/en/master/ext/autodoc.html +.. _Google style: https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings +.. _NumPy style: https://numpydoc.readthedocs.io/en/latest/format.html +.. _classical style: https://www.sphinx-doc.org/en/master/domains.html#info-field-lists diff --git a/docs/license.rst b/docs/license.rst new file mode 100644 index 0000000..3989c51 --- /dev/null +++ b/docs/license.rst @@ -0,0 +1,7 @@ +.. _license: + +======= +License +======= + +.. include:: ../LICENSE.txt diff --git a/docs/readme.rst b/docs/readme.rst new file mode 100644 index 0000000..81995ef --- /dev/null +++ b/docs/readme.rst @@ -0,0 +1,2 @@ +.. _readme: +.. include:: ../README.rst diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..2ddf98a --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,5 @@ +# Requirements file for ReadTheDocs, check .readthedocs.yml. +# To build the module reference correctly, make sure every external package +# under `install_requires` in `setup.cfg` is also listed here! +sphinx>=3.2.1 +# sphinx_rtd_theme diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..b4025d7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.setuptools_scm] +# For smarter version schemes and other configuration options, +# check out https://github.com/pypa/setuptools_scm +version_scheme = "no-guess-dev" + +[tool.pdm] +package-dir = "src" + +[project] +name = "osc-physrisk-financial" +description = "OS-Climate Python Project" +readme = "README.rst" +authors = [ + {name = "github-actions[bot]", email = "41898282+github-actions[bot]@users.noreply.github.com"}, +] +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", +] +dependencies = [ + "importlib-metadata; python_version<\"3.8\"", +] +license = {text = "Apache-2.0"} +requires-python = ">=3.10" + +[project.urls] +Homepage = "https://github.com/pyscaffold/pyscaffold/" +Documentation = "https://pyscaffold.org/" + +[project.optional-dependencies] +testing = [ + "pytest", + "pytest-cov", + "setuptools", +] diff --git a/pyscaffold.cfg b/pyscaffold.cfg deleted file mode 100644 index 21ce9f3..0000000 --- a/pyscaffold.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2024 The Linux Foundation - -# We want to see all the project metadata in: pyproject.toml -# https://peps.python.org/pep-0621/ -# This is NOT currently supported by PyScaffold - -[pyscaffold] - -[metadata] -authors = [ { name = "Matthew Watkins", email = "93649628+ModeSevenIndustrialSolutions@users.noreply.github.com" } ] -description = OS-Climate Python Project -license = { text = "Apache-2.0" } -long_description = file: README.rst -platforms = any -requires-python = ">=3.9" -keywords = ["OS-Climate", "Climate", "Climate Change"] - -[project.urls] -Homepage = "https://github.com/os-climate/python-template-repository" -Repository = "https://github.com/os-climate/python-template-repository" -Downloads = "https://github.com/os-climate/python-template-repository/releases" -"Bug Tracker" = "https://github.com/os-climate/python-template-repository/issues" -Documentation = "https://github.com/os-climate/python-template-repository/tree/main/docs" -"Source Code" = "https://github.com/os-climate/python-template-repository" - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -[tool.pdm.scripts] -pre_release = "scripts/dev-versioning.sh" -release = "scripts/release-versioning.sh" -test = "pytest" -tox = "tox" -docs = { shell = "cd docs && mkdocs serve", help = "Start the dev server for doc preview" } -lint = "pre-commit run --all-files" -complete = { call = "tasks.complete:main", help = "Create autocomplete files for bash and fish" } - -[tool.pdm.dev-dependencies] -test = ["pdm[pytest]", "pytest-cov"] -tox = ["tox", "tox-pdm>=0.5"] -docs = ["sphinx>=7.2.6", "sphinx-copybutton>=0.5.2"] -dev = ["tox>=4.11.3", "tox-pdm>=0.7.0"] -lint = ["pre-commit"] - -[tool.coverage.run] -source = ["src"] -omit = ["test/*"] -# relative_files = true - -[tool.pytest.ini_options] -testpaths = [ "test/" ] -addopts = "--cov --cov-report html --cov-report term-missing --cov-fail-under 70" - -[tool.black] -line-length = 120 - -[tool.isort] -profile = "black" - -[tool.flake8] -max-line-length = "120" -extend-ignore = [ "E501" ] - -[tool.mypy] -ignore_missing_imports = true diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh new file mode 100755 index 0000000..ac070c2 --- /dev/null +++ b/scripts/bootstrap.sh @@ -0,0 +1,136 @@ +#!/usr/bin/env bash + +### Script to bootstrap the OS-Climate DevOps environment ### + +set -eu -o pipefail +# set -xv + +### Variables ### + +SOURCE_FILE="bootstrap.yaml" +WGET_URL="https://raw.githubusercontent.com/os-climate/devops-toolkit/main/.github/workflows/$SOURCE_FILE" +AUTOMATION_BRANCH="update-devops-tooling" +DEVOPS_DIR=".devops" +FETCH_MODE="wget" + +### Checks ### + +GIT_CMD=$(which git) +if [ ! -x "$GIT_CMD" ]; then + echo "GIT command was NOT found in PATH"; exit 1 +fi + +WGET_CMD=$(which wget) +if [ ! -x "$WGET_CMD" ]; then + echo "WGET command was NOT found in PATH; using CURL" + FETCH_MODE="curl" +fi + +MKTEMP_CMD=$(which mktemp) +if [ ! -x "$MKTEMP_CMD" ]; then + echo "MKTEMP command was NOT found in PATH"; exit 1 +fi + + + +SHELL_SCRIPT=$(mktemp -t script-XXXXXXXX.sh) + +### Functions ### + +change_dir_error() { + echo "Could not change directory"; exit 1 +} + +check_for_local_branch() { + BRANCH="$1" + git show-ref --quiet refs/heads/"$BRANCH" + return $? +} + +check_for_remote_branch() { + BRANCH="$1" + git ls-remote --exit-code --heads origin "$BRANCH" + return $? +} + +cleanup_on_exit() { + # Remove PR branch, if it exists + echo "Cleaning up on exit: bootstrap.sh" + echo "Swapping from temporary branch to: $HEAD_BRANCH" + git checkout main > /dev/null 2>&1 + if (check_for_local_branch "$AUTOMATION_BRANCH"); then + echo "Removing temporary local branch: $AUTOMATION_BRANCH" + git branch -d "$AUTOMATION_BRANCH" > /dev/null 2>&1 + fi + if [ -f "$SHELL_SCRIPT" ]; then + echo "Removing temporary shell code" + rm "$SHELL_SCRIPT" + fi + if [ -d "$DEVOPS_DIR" ]; then + echo "Removed local copy of devops repository" + rm -Rf "$DEVOPS_DIR" + fi +} +trap cleanup_on_exit EXIT + +### Main script entry point + +# Get organisation and repository name +# git config --get remote.origin.url +# git@github.com:ModeSevenIndustrialSolutions/test-bootstrap.git +URL=$(git config --get remote.origin.url) + +# Take the above and store it converted as ORG_AND_REPO +# e.g. ModeSevenIndustrialSolutions/test-bootstrap +ORG_AND_REPO=${URL/%.git} +ORG_AND_REPO=${ORG_AND_REPO//:/ } +ORG_AND_REPO=$(echo "$ORG_AND_REPO" | awk '{ print $2 }') +HEAD_BRANCH=$("$GIT_CMD" rev-parse --abbrev-ref HEAD) +REPO_DIR=$(git rev-parse --show-toplevel) +# Change to top-level of GIT repository +CURRENT_DIR=$(pwd) +if [ "$REPO_DIR" != "$CURRENT_DIR" ]; then + echo "Changing directory to: $REPO_DIR" + cd "$REPO_DIR" || change_dir_error +fi + +# Get latest copy of bootstrap workflow +if [ -f "$SOURCE_FILE" ]; then + echo "Removing existing copy of: $SOURCE_FILE" + rm "$SOURCE_FILE" +fi +echo "Pulling latest DevOps bootstrap YAML from:" +echo " $WGET_URL" +if [ "$FETCH_MODE" = "wget" ]; then + "$WGET_CMD" -q "$WGET_URL" > /dev/null 2>&1 +fi +if [ ! -f "$SOURCE_FILE" ]; then + echo "Attempting to retrieve YAML file with CURL" + curl "$WGET_URL" > "$SOURCE_FILE" +fi + +# The section below extracts shell code from the YAML file +echo "Extracting shell code from: $SOURCE_FILE" +EXTRACT="false" +while read -r LINE; do + if [ "$LINE" = "#SHELLCODESTART" ]; then + EXTRACT="true" + SHELL_SCRIPT=$(mktemp -t script-XXXXXXXX.sh) + touch "$SHELL_SCRIPT" + chmod a+x "$SHELL_SCRIPT" + echo "Creating shell script: $SHELL_SCRIPT" + echo "#!/bin/sh" > "$SHELL_SCRIPT" + fi + if [ "$EXTRACT" = "true" ]; then + echo "$LINE" >> "$SHELL_SCRIPT" + if [ "$LINE" = "#SHELLCODEEND" ]; then + break + fi + fi +done < "$SOURCE_FILE" + +echo "Running extracted shell script code" +# https://www.shellcheck.net/wiki/SC1090 +# Shell code executed is temporary and cannot be checked by linting +# shellcheck disable=SC1090 +. "$SHELL_SCRIPT" diff --git a/scripts/dev-versioning.sh b/scripts/dev-versioning.sh new file mode 100755 index 0000000..d752268 --- /dev/null +++ b/scripts/dev-versioning.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +#set -x + +FILEPATH="pyproject.toml" + +if [ $# -ne 1 ] && [ $# -ne 0 ]; then + echo "Usage: $0 [version-string]" + echo "Substitutes the version string in pyproject.toml"; exit 1 +elif [ $# -eq 1 ]; then + VERSION=$1 + echo "Received version string: $VERSION" +else + datetime=$(date +'%Y%m%d%H%M') + pyver=$(python --version | awk '{print $2}') + VERSION="${pyver}.${datetime}" + echo "Defined version string: $VERSION" +fi + +echo "Performing string substitution on: $FILEPATH" +sed -i "s/.*version =.*/version = \"$VERSION\"/" "$FILEPATH" +echo "Versioning set to:" +grep version "$FILEPATH" +echo "Script completed!"; exit 0 diff --git a/scripts/linting.sh b/scripts/linting.sh new file mode 100755 index 0000000..db9c60a --- /dev/null +++ b/scripts/linting.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +npm install eslint @babel/core @babel/eslint-parser --save-dev +echo "Run with: eslint --ext .toml ." +pre-commit install +pre-commit autoupdate diff --git a/scripts/purge-dev-tags.sh b/scripts/purge-dev-tags.sh new file mode 100755 index 0000000..274e885 --- /dev/null +++ b/scripts/purge-dev-tags.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +#set -x + +for TAG in $(git tag -l | grep 202 | sort | uniq); do +git tag -d "${TAG}"git tag -d "$TAG" +done +echo "Script completed!"; exit 0 diff --git a/scripts/release-versioning.sh b/scripts/release-versioning.sh new file mode 100755 index 0000000..6e057c9 --- /dev/null +++ b/scripts/release-versioning.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +#set -x + +FILEPATH="pyproject.toml" + +for TAG in $(git tag -l | sort | uniq); do +echo "" > /dev/null +done +echo "Version string from tags: ${TAG}" + +echo "Performing string substitution on: ${FILEPATH}" +sed -i "s/.*version =.*/version = \"$TAG\"/" "${FILEPATH}" +echo "Versioning set to:" +grep version "${FILEPATH}" +echo "Script completed!"; exit 0 diff --git a/scripts/rename-tests.sh b/scripts/rename-tests.sh new file mode 100755 index 0000000..2f03b0b --- /dev/null +++ b/scripts/rename-tests.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +#set -x + +REPO_NAME=$(basename "$(git rev-parse --show-toplevel)") +echo "Repository name: $REPO_NAME" + +if [ $# -ne 1 ]; then + echo "Usage: $0 [test folder]"; exit 1 +elif [ ! -d "$1" ]; then + echo "Error: specified target was not a folder"; exit 1 +else + # Target specified was a folder + TARGET="$1" +fi + +for TEST in $(find "$TARGET" -type f -name '*_test.py' | xargs -0); do + echo "Processing: $TEST" + FILE_PATH=$(dirname "$TEST") + FILE_NAME=$(basename "$TEST") + STRIPPED="${FILE_NAME//_test.py/.py}" + echo " git mv \"${TEST}\" $FILE_PATH/test_\"${STRIPPED%%}\"" + git mv "${TEST}" "$FILE_PATH"/test_"${STRIPPED%%}" +done diff --git a/scripts/template-to-repo.sh b/scripts/template-to-repo.sh new file mode 100755 index 0000000..8fe5759 --- /dev/null +++ b/scripts/template-to-repo.sh @@ -0,0 +1,138 @@ +#!/bin/bash + +# set -x + +THIS_SCRIPT=$(basename "$0") +echo "This script: $SELF" + +TEMPLATE_NAME=osc-python-template +ALT_TEMPLATE_NAME="${TEMPLATE_NAME//-/_}" + +### Shared functions + +# Renames files/folders containing template name +rename_object() { + if [ $# -ne 1 ]; then + echo "Function requires an argumeent: rename_object [filesystem object]"; exit 1 + else + FS_OBJECT="$1" + fi + # Function take a filesystem object as a single argument + FS_OBJECT="$1" + OBJECT_PATH=$(dirname "$FS_OBJECT") + OBJECT_NAME=$(basename "$FS_OBJECT") + + # Check if filesystem object contains template name + if [[ ! "$OBJECT_NAME" == *"$TEMPLATE_NAME"* ]]; then + # Nothing to do; abort early + return + else + NEW_NAME="${OBJECT_NAME//$TEMPLATE_NAME/$REPO_NAME}" + fi + if [[ ! "$OBJECT_NAME" == *"$ALT_TEMPLATE_NAME"* ]]; then + # Nothing to do; abort early + return + else + NEW_NAME="${OBJECT_NAME//$ALT_TEMPLATE_NAME/$ALT_REPO_NAME}" + fi + + # Perform the renaming operation + if [ -d "$FS_OBJECT" ]; then + echo "Renaming folder: $FS_OBJECT" + elif [ -f "$FS_OBJECT" ]; then + echo "Renaming file: $FS_OBJECT" + elif [ -L "$FS_OBJECT" ]; then + echo "Renaming symlink: $FS_OBJECT" + fi + git mv "$OBJECT_PATH/$OBJECT_NAME" "$OBJECT_PATH/$NEW_NAME" +} + +# Checks file content for template name and replaces matching strings +file_content_substitution() { + if [ $# -ne 1 ]; then + echo "Function requires an argument: file_content_substitution [filename]"; exit 1 + else + FILENAME="$1" + fi + + # Do not modify self! + BASE_FILENAME=$(basename "$FILENAME") + if [ "$BASE_FILENAME" = "$THIS_SCRIPT" ]; then + echo "Skipping self: $THIS_SCRIPT" + return + fi + + COUNT=0 + if (grep "$TEMPLATE_NAME" "$FILENAME" > /dev/null 2>&1); then + MATCHES=$(grep -c "$TEMPLATE_NAME" "$FILENAME") + if [ "$MATCHES" -eq 1 ]; then + echo "1 content substitution required: $FILENAME (dashes)" + COUNT=$((COUNT++)) + else + echo "$MATCHES content substitutions required: $FILENAME (dashes)" + COUNT=$((COUNT+MATCHES)) + fi + sed -i "s/$TEMPLATE_NAME/$REPO_NAME/g" "$FILENAME" + fi + if (grep "$ALT_TEMPLATE_NAME" "$FILENAME" > /dev/null 2>&1); then + MATCHES=$(grep -c "$ALT_TEMPLATE_NAME" "$FILENAME") + if [ "$MATCHES" -eq 1 ]; then + echo "1 content substitution required: $FILENAME (underscores)" + COUNT=$((COUNT++)) + else + echo "$MATCHES content substitutions required: $FILENAME (underscores)" + COUNT=$((COUNT+MATCHES)) + fi + sed -i "s/$ALT_TEMPLATE_NAME/$ALT_REPO_NAME/g" "$FILENAME" + fi + if [[ "$COUNT" != "0" ]] && [[ "$COUNT" = "1" ]]; then + echo "$COUNT substitution made in file: $FILENAME" + elif [[ "$COUNT" != "0" ]] && [[ "$COUNT" -gt "1" ]]; then + echo "$COUNT substitutions made in file: $FILENAME" + fi +} + +### Main script entry point + +if ! (git rev-parse --show-toplevel > /dev/null); then + echo "Error: this folder is not part of a GIT repository"; exit 1 +fi + +REPO_DIR=$(git rev-parse --show-toplevel) +REPO_NAME=$(basename "$REPO_DIR") +ALT_REPO_NAME="${REPO_NAME//-/_}" + +if [ "$TEMPLATE_NAME" == "$REPO_NAME" ]; then + echo "WARNING: template name matches repository name" +else + echo "Template name: $TEMPLATE_NAME" + echo "Alternate name: $ALT_TEMPLATE_NAME" + echo "Repository name: $REPO_NAME" + echo "Alternate name: $ALT_REPO_NAME" +fi + +# Change to top-level of GIT repository +CURRENT_DIR=$(pwd) +if [ "$REPO_DIR" != "$CURRENT_DIR" ]; then + echo "Changing directory to: $REPO_DIR" + if ! (cd "$REPO_DIR"); then + echo "Could not change directory!"; exit 1 + fi +fi + +echo "Processing repository contents..." + +# Rename directories first, as they affect file paths afterwards +for FS_OBJECT in $(find -- * -type d | xargs -0); do + rename_object "$FS_OBJECT" + if [ -f "$FS_OBJECT" ]; then + file_content_substitution "$FS_OBJECT" + fi +done + +for FS_OBJECT in $(find -- * -type f | xargs -0); do + rename_object "$FS_OBJECT" + if [ -f "$FS_OBJECT" ]; then + file_content_substitution "$FS_OBJECT" + fi +done diff --git a/scripts/tomllint.sh b/scripts/tomllint.sh new file mode 100755 index 0000000..7e46a03 --- /dev/null +++ b/scripts/tomllint.sh @@ -0,0 +1,104 @@ +#!/bin/bash + +# set -x + +status_code="0" +TAPLO_URL=https://github.com/tamasfe/taplo/releases/download/0.8.1 + +# Process commmand-line arguments +if [ $# -eq 0 ]; then + TARGET=$(pwd) +elif [ $# -eq 1 ]; then + TARGET="$1" +fi + +check_platform() { + # Enumerate platform and set binary name appropriately + PLATFORM=$(uname -a) + if (echo "${PLATFORM}" | grep Darwin | grep arm64); then + TAPLO_BIN="taplo-darwin-aarch64" + elif (echo "${PLATFORM}" | grep Darwin | grep x86_64); then + TAPLO_BIN="taplo-darwin-x86_64" + elif (echo "${PLATFORM}" | grep Linux | grep aarch64); then + TAPLO_BIN="taplo-full-linux-aarch64" + elif (echo "${PLATFORM}" | grep Linux | grep x86_64); then + TAPLO_BIN="taplo-full-linux-x86_64" + else + echo "Unsupported platform!"; exit 1 + fi + TAPLO_GZIP="$TAPLO_BIN.gz" + +} + +check_file() { + local file_path="$1" + cp "$file_path" "$file_path.original" + /tmp/"${TAPLO_BIN}" format "$file_path" >/dev/null + diff "$file_path" "$file_path.original" + local exit_code=$? + if [ $exit_code -ne 0 ]; then + status_code=$exit_code + echo "::error file={$file_path},line={line},col={col}::{TOML unformatted}" + elif [ -f "$file_path.original" ]; then + rm "$file_path.original" + fi +} + +check_all() { + if [ -d "${TARGET}" ]; then + echo "Scanning all the TOML files at folder: ${TARGET}" + fi + while IFS= read -r current_file; do + echo "Check file $current_file" + check_file "$current_file" + done < <(find . -name '*.toml' -type f -not -path '*/.*') +} + +download_taplo() { + if [ ! -f /tmp/"${TAPLO_GZIP}" ]; then + "${WGET_BIN}" -q -e robots=off -P /tmp "${TAPLO_URL}"/"${TAPLO_GZIP}" + fi + TAPLO_PATH="/tmp/${TAPLO_BIN}" + if [ ! -x "${TAPLO_PATH}" ]; then + gzip -d "/tmp/${TAPLO_GZIP}" + chmod +x "/tmp/${TAPLO_BIN}" + fi + TAPLO_BIN="/tmp/${TAPLO_BIN}" +} + +cleanup_tmp() { + # Only clean the temp directory if it was used + if [ -f /tmp/"${TAPLO_BIN}" ] || [ -f /tmp/"${TAPLO_GZIP}" ]; then + echo "Cleaning up..." + rm /tmp/"${TAPLO_BIN}"* + fi +} + +check_wget() { + # Pre-flight binary checks and download + WGET_BIN=$(which wget) + if [ ! -x "${WGET_BIN}" ]; then + echo "WGET command not found" + sudo apt update; sudo apt-get install -y wget + fi + WGET_BIN=$(which wget) + if [ ! -x "${WGET_BIN}" ]; then + echo "WGET could not be installed"; exit 1 + fi +} + +TAPLO_BIN=$(which taplo) +if [ ! -x "${TAPLO_BIN}" ]; then + check_wget && check_platform && download_taplo +fi + +if [ ! -x "${TAPLO_BIN}" ]; then + echo "Download failed: TOML linting binary not found [taplo]" + status_code="1" +else + # To avoid execution when sourcing this script for testing + [ "$0" = "${BASH_SOURCE[0]}" ] && check_all "$@" +fi + +cleanup_tmp +exit $status_code diff --git a/src/osc_physrisk_financial/__init__.py b/src/osc_physrisk_financial/__init__.py new file mode 100644 index 0000000..a65eb6f --- /dev/null +++ b/src/osc_physrisk_financial/__init__.py @@ -0,0 +1,16 @@ +import sys + +if sys.version_info[:2] >= (3, 8): + # TODO: Import directly (no need for conditional) when `python_requires = >= 3.8` + from importlib.metadata import PackageNotFoundError, version # pragma: no cover +else: + from importlib_metadata import PackageNotFoundError, version # pragma: no cover + +try: + # Change here if project is renamed and does not equal the package name + dist_name = "osc-physrisk-financial" + __version__ = version(dist_name) +except PackageNotFoundError: # pragma: no cover + __version__ = "unknown" +finally: + del version, PackageNotFoundError diff --git a/src/osc_physrisk_financial/skeleton.py b/src/osc_physrisk_financial/skeleton.py new file mode 100644 index 0000000..46239d8 --- /dev/null +++ b/src/osc_physrisk_financial/skeleton.py @@ -0,0 +1,149 @@ +""" +This is a skeleton file that can serve as a starting point for a Python +console script. To run this script uncomment the following lines in the +``[options.entry_points]`` section in ``setup.cfg``:: + + console_scripts = + fibonacci = osc_physrisk_financial.skeleton:run + +Then run ``pip install .`` (or ``pip install -e .`` for editable mode) +which will install the command ``fibonacci`` inside your current environment. + +Besides console scripts, the header (i.e. until ``_logger``...) of this file can +also be used as template for Python modules. + +Note: + This file can be renamed depending on your needs or safely removed if not needed. + +References: + - https://setuptools.pypa.io/en/latest/userguide/entry_point.html + - https://pip.pypa.io/en/stable/reference/pip_install +""" + +import argparse +import logging +import sys + +from osc_physrisk_financial import __version__ + +__author__ = "github-actions[bot]" +__copyright__ = "github-actions[bot]" +__license__ = "Apache-2.0" + +_logger = logging.getLogger(__name__) + + +# ---- Python API ---- +# The functions defined in this section can be imported by users in their +# Python scripts/interactive interpreter, e.g. via +# `from osc_physrisk_financial.skeleton import fib`, +# when using this Python module as a library. + + +def fib(n): + """Fibonacci example function + + Args: + n (int): integer + + Returns: + int: n-th Fibonacci number + """ + assert n > 0 + a, b = 1, 1 + for _i in range(n - 1): + a, b = b, a + b + return a + + +# ---- CLI ---- +# The functions defined in this section are wrappers around the main Python +# API allowing them to be called directly from the terminal as a CLI +# executable/script. + + +def parse_args(args): + """Parse command line parameters + + Args: + args (List[str]): command line parameters as list of strings + (for example ``["--help"]``). + + Returns: + :obj:`argparse.Namespace`: command line parameters namespace + """ + parser = argparse.ArgumentParser(description="Just a Fibonacci demonstration") + parser.add_argument( + "--version", + action="version", + version=f"osc-physrisk-financial {__version__}", + ) + parser.add_argument(dest="n", help="n-th Fibonacci number", type=int, metavar="INT") + parser.add_argument( + "-v", + "--verbose", + dest="loglevel", + help="set loglevel to INFO", + action="store_const", + const=logging.INFO, + ) + parser.add_argument( + "-vv", + "--very-verbose", + dest="loglevel", + help="set loglevel to DEBUG", + action="store_const", + const=logging.DEBUG, + ) + return parser.parse_args(args) + + +def setup_logging(loglevel): + """Setup basic logging + + Args: + loglevel (int): minimum loglevel for emitting messages + """ + logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s" + logging.basicConfig( + level=loglevel, stream=sys.stdout, format=logformat, datefmt="%Y-%m-%d %H:%M:%S" + ) + + +def main(args): + """Wrapper allowing :func:`fib` to be called with string arguments in a CLI fashion + + Instead of returning the value from :func:`fib`, it prints the result to the + ``stdout`` in a nicely formatted message. + + Args: + args (List[str]): command line parameters as list of strings + (for example ``["--verbose", "42"]``). + """ + args = parse_args(args) + setup_logging(args.loglevel) + _logger.debug("Starting crazy calculations...") + print(f"The {args.n}-th Fibonacci number is {fib(args.n)}") + _logger.info("Script ends here") + + +def run(): + """Calls :func:`main` passing the CLI arguments extracted from :obj:`sys.argv` + + This function can be used as entry point to create console scripts with setuptools. + """ + main(sys.argv[1:]) + + +if __name__ == "__main__": + # ^ This is a guard statement that will prevent the following code from + # being executed in the case someone imports this file instead of + # executing it as a script. + # https://docs.python.org/3/library/__main__.html + + # After installing your project with pip, users can also run your Python + # modules as scripts via the ``-m`` flag, as defined in PEP 338:: + # + # python -m osc_physrisk_financial.skeleton 42 + # + run() diff --git a/test.sh b/test.sh deleted file mode 100755 index 93db8e2..0000000 --- a/test.sh +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env bash - -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2024 The Linux Foundation - -# Thin wrapper script to test workflow YAML code directly from a shell - -set -eu -o pipefail -DEBUG="false" -export DEBUG - -# Check script arguments - -if [ $# -ne 1 ]; then - # Provide a Github action/workflow YAML file as argument - echo "Usage: $0 [workflow YAML file]"; exit 1 -else - SOURCE_FILE="$1" - if [ ! -f "$SOURCE_FILE" ]; then - echo "Specified file could not be read: $SOURCE_FILE"; exit 1 - fi - SETUP_FILE="setup.txt" - if [ -f "$SETUP_FILE" ]; then - echo "Sourcing script actions/variables from: $SETUP_FILE" - # shellcheck disable=SC1090 - source "$SETUP_FILE" - else - echo "No file found specifying inputs: $SETUP_FILE" - fi -fi - -# Check for required binaries - -GIT_CMD=$(which git) -if [ ! -x "$GIT_CMD" ]; then - echo "GIT command was NOT found in PATH"; exit 1 -fi - -MKTEMP_CMD=$(which mktemp) -if [ ! -x "$MKTEMP_CMD" ]; then - echo "MKTEMP command was NOT found in PATH"; exit 1 -fi - -# Script debugging options - -if [ $DEBUG = "true" ]; then - # set -xv - SHELL_SCRIPT="extracted.sh" - PATH=".:$PATH" - if [ -f "$SHELL_SCRIPT" ]; then - # Remove any previously extracted code on subsequent runs - rm "$SHELL_SCRIPT" - fi -else - SHELL_SCRIPT=$(mktemp -t script-XXXXXXXX.sh) -fi - -# Functions - -change_dir_error() { - echo "Could not change directory"; exit 1 -} - -check_for_local_branch() { - BRANCH="$1" - git show-ref --quiet refs/heads/"$BRANCH" - return $? -} - -check_for_remote_branch() { - BRANCH="$1" - git ls-remote --exit-code --heads origin "$BRANCH" - return $? -} - -cleanup_on_exit() { - if [ -f "$SHELL_SCRIPT" ]; then - echo "Removing temporary shell code" - rm "$SHELL_SCRIPT" - fi -} - -# Main script entry point - -# Get organisation and repository name -URL=$(git config --get remote.origin.url) - -# Take the above and store it converted as ORG_AND_REPO -# e.g. ModeSevenIndustrialSolutions/test-bootstrap -ORG_AND_REPO=${URL/%.git} -ORG_AND_REPO=${ORG_AND_REPO//:/ } -ORG_AND_REPO=$(echo "$ORG_AND_REPO" | awk '{ print $2 }') -# Variable below is currently unused -# HEAD_BRANCH=$("$GIT_CMD" rev-parse --abbrev-ref HEAD) -REPO_DIR=$(git rev-parse --show-toplevel) - -# Change to top-level of GIT repository -CURRENT_DIR=$(pwd) -if [ "$REPO_DIR" != "$CURRENT_DIR" ]; then - echo "Changing directory to: $REPO_DIR" - cd "$REPO_DIR" || change_dir_error -fi - -# The section below extracts shell code from the YAML file -echo "Attempting to parse shell code from: $SOURCE_FILE" -EXTRACT="false" -while read -r LINE; do - if [[ "$LINE" = *"#SHELLCODEEND"* ]]; then - EXTRACT="complete" - break - - elif [[ "$LINE" = *"#SHELLCODESTART"* ]]; then - EXTRACT="true" - touch "$SHELL_SCRIPT" - chmod a+x "$SHELL_SCRIPT" - continue - - elif [ "$EXTRACT" = "true" ]; then - echo "$LINE" >> "$SHELL_SCRIPT" - fi -done < "$SOURCE_FILE" - -# Only remove temporary files when NOT debugging -if [ "$DEBUG" != "true" ]; then - trap cleanup_on_exit EXIT -fi - -if [ -f "$SHELL_SCRIPT" ] && [ "$DEBUG" = "true" ]; then - echo "Extracted code to file: $SHELL_SCRIPT" -fi - -if [ "$EXTRACT" = "complete" ]; then - echo "Executing extracted shell script/code..." - # Shell code executed is temporary and cannot be checked by linting - # https://www.shellcheck.net/wiki/SC1090 - # shellcheck disable=SC1090 - "$SHELL_SCRIPT" -else - echo "Error: start/stop markers not found in file" - exit 1 -fi diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..dbb0067 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,10 @@ +""" + Dummy conftest.py for osc_physrisk_financial. + + If you don't know what this is for, just leave it empty. + Read more about conftest.py under: + - https://docs.pytest.org/en/stable/fixture.html + - https://docs.pytest.org/en/stable/writing_plugins.html +""" + +# import pytest diff --git a/tests/test_skeleton.py b/tests/test_skeleton.py new file mode 100644 index 0000000..06da5fc --- /dev/null +++ b/tests/test_skeleton.py @@ -0,0 +1,25 @@ +import pytest + +from osc_physrisk_financial.skeleton import fib, main + +__author__ = "github-actions[bot]" +__copyright__ = "github-actions[bot]" +__license__ = "Apache-2.0" + + +def test_fib(): + """API Tests""" + assert fib(1) == 1 + assert fib(2) == 1 + assert fib(7) == 13 + with pytest.raises(AssertionError): + fib(-10) + + +def test_main(capsys): + """CLI Tests""" + # capsys is a pytest fixture that allows asserts against stdout/stderr + # https://docs.pytest.org/en/stable/capture.html + main(["7"]) + captured = capsys.readouterr() + assert "The 7-th Fibonacci number is 13" in captured.out diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..69f8159 --- /dev/null +++ b/tox.ini @@ -0,0 +1,93 @@ +# Tox configuration file +# Read more under https://tox.wiki/ +# THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! + +[tox] +minversion = 3.24 +envlist = default +isolated_build = True + + +[testenv] +description = Invoke pytest to run automated tests +setenv = + TOXINIDIR = {toxinidir} +passenv = + HOME + SETUPTOOLS_* +extras = + testing +commands = + pytest {posargs} + + +# # To run `tox -e lint` you need to make sure you have a +# # `.pre-commit-config.yaml` file. See https://pre-commit.com +# [testenv:lint] +# description = Perform static analysis and style checks +# skip_install = True +# deps = pre-commit +# passenv = +# HOMEPATH +# PROGRAMDATA +# SETUPTOOLS_* +# commands = +# pre-commit run --all-files {posargs:--show-diff-on-failure} + + +[testenv:{build,clean}] +description = + build: Build the package in isolation according to PEP517, see https://github.com/pypa/build + clean: Remove old distribution files and temporary build artifacts (./build and ./dist) +# https://setuptools.pypa.io/en/stable/build_meta.html#how-to-use-it +skip_install = True +changedir = {toxinidir} +deps = + build: build[virtualenv] +passenv = + SETUPTOOLS_* +commands = + clean: python -c 'import shutil; [shutil.rmtree(p, True) for p in ("build", "dist", "docs/_build")]' + clean: python -c 'import pathlib, shutil; [shutil.rmtree(p, True) for p in pathlib.Path("src").glob("*.egg-info")]' + build: python -m build {posargs} +# By default, both `sdist` and `wheel` are built. If your sdist is too big or you don't want +# to make it available, consider running: `tox -e build -- --wheel` + + +[testenv:{docs,doctests,linkcheck}] +description = + docs: Invoke sphinx-build to build the docs + doctests: Invoke sphinx-build to run doctests + linkcheck: Check for broken links in the documentation +passenv = + SETUPTOOLS_* +setenv = + DOCSDIR = {toxinidir}/docs + BUILDDIR = {toxinidir}/docs/_build + docs: BUILD = html + doctests: BUILD = doctest + linkcheck: BUILD = linkcheck +deps = + -r {toxinidir}/docs/requirements.txt + # ^ requirements.txt shared with Read The Docs +commands = + sphinx-build --color -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs} + + +[testenv:publish] +description = + Publish the package you have been developing to a package index server. + By default, it uses testpypi. If you really want to publish your package + to be publicly accessible in PyPI, use the `-- --repository pypi` option. +skip_install = True +changedir = {toxinidir} +passenv = + # See: https://twine.readthedocs.io/en/latest/ + TWINE_USERNAME + TWINE_PASSWORD + TWINE_REPOSITORY + TWINE_REPOSITORY_URL +deps = twine +commands = + python -m twine check dist/* + python -m twine upload {posargs:--repository {env:TWINE_REPOSITORY:testpypi}} dist/*