diff --git a/.credo.exs b/.credo.exs new file mode 100644 index 0000000..dbdab20 --- /dev/null +++ b/.credo.exs @@ -0,0 +1,217 @@ +# This file contains the configuration for Credo and you are probably reading +# this after creating it with `mix credo.gen.config`. +# +# If you find anything wrong or unclear in this file, please report an +# issue on GitHub: https://github.com/rrrene/credo/issues +# +%{ + # + # You can have as many configs as you like in the `configs:` field. + configs: [ + %{ + # + # Run any config using `mix credo -C `. If no config name is given + # "default" is used. + # + name: "default", + # + # These are the files included in the analysis: + files: %{ + # + # You can give explicit globs or simply directories. + # In the latter case `**/*.{ex,exs}` will be used. + # + included: [ + "lib/", + "src/", + "test/", + "web/", + "apps/*/lib/", + "apps/*/src/", + "apps/*/test/", + "apps/*/web/" + ], + excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] + }, + # + # Load and configure plugins here: + # + plugins: [], + # + # If you create your own checks, you must specify the source files for + # them here, so they can be loaded by Credo before running the analysis. + # + requires: [], + # + # If you want to enforce a style guide and need a more traditional linting + # experience, you can change `strict` to `true` below: + # + strict: false, + # + # To modify the timeout for parsing files, change this value: + # + parse_timeout: 5000, + # + # If you want to use uncolored output by default, you can change `color` + # to `false` below: + # + color: true, + # + # You can customize the parameters of any check by adding a second element + # to the tuple. + # + # To disable a check put `false` as second element: + # + # {Credo.Check.Design.DuplicatedCode, false} + # + checks: %{ + enabled: [ + # + ## Consistency Checks + # + {Credo.Check.Consistency.ExceptionNames, []}, + {Credo.Check.Consistency.LineEndings, []}, + {Credo.Check.Consistency.ParameterPatternMatching, []}, + {Credo.Check.Consistency.SpaceAroundOperators, []}, + {Credo.Check.Consistency.SpaceInParentheses, []}, + {Credo.Check.Consistency.TabsOrSpaces, []}, + + # + ## Design Checks + # + # You can customize the priority of any check + # Priority values are: `low, normal, high, higher` + # + {Credo.Check.Design.AliasUsage, + [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]}, + {Credo.Check.Design.TagFIXME, []}, + # You can also customize the exit_status of each check. + # If you don't want TODO comments to cause `mix credo` to fail, just + # set this value to 0 (zero). + # + {Credo.Check.Design.TagTODO, [exit_status: 2]}, + + # + ## Readability Checks + # + {Credo.Check.Readability.AliasOrder, []}, + {Credo.Check.Readability.FunctionNames, []}, + {Credo.Check.Readability.LargeNumbers, []}, + {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]}, + {Credo.Check.Readability.ModuleAttributeNames, []}, + {Credo.Check.Readability.ModuleDoc, []}, + {Credo.Check.Readability.ModuleNames, []}, + {Credo.Check.Readability.ParenthesesInCondition, []}, + {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, + {Credo.Check.Readability.PipeIntoAnonymousFunctions, []}, + {Credo.Check.Readability.PredicateFunctionNames, []}, + {Credo.Check.Readability.PreferImplicitTry, []}, + {Credo.Check.Readability.RedundantBlankLines, []}, + {Credo.Check.Readability.Semicolons, []}, + {Credo.Check.Readability.SpaceAfterCommas, []}, + {Credo.Check.Readability.Specs, []}, + {Credo.Check.Readability.StringSigils, []}, + {Credo.Check.Readability.TrailingBlankLine, []}, + {Credo.Check.Readability.TrailingWhiteSpace, []}, + {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, + {Credo.Check.Readability.VariableNames, []}, + {Credo.Check.Readability.WithSingleClause, []}, + + # + ## Refactoring Opportunities + # + {Credo.Check.Refactor.Apply, []}, + {Credo.Check.Refactor.CondStatements, []}, + {Credo.Check.Refactor.CyclomaticComplexity, []}, + {Credo.Check.Refactor.FilterCount, []}, + {Credo.Check.Refactor.FilterFilter, []}, + {Credo.Check.Refactor.FunctionArity, []}, + {Credo.Check.Refactor.LongQuoteBlocks, []}, + {Credo.Check.Refactor.MapJoin, []}, + {Credo.Check.Refactor.MatchInCondition, []}, + {Credo.Check.Refactor.NegatedConditionsInUnless, []}, + {Credo.Check.Refactor.NegatedConditionsWithElse, []}, + {Credo.Check.Refactor.Nesting, []}, + {Credo.Check.Refactor.RedundantWithClauseResult, []}, + {Credo.Check.Refactor.RejectReject, []}, + {Credo.Check.Refactor.UnlessWithElse, []}, + {Credo.Check.Refactor.WithClauses, []}, + + # + ## Warnings + # + {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []}, + {Credo.Check.Warning.BoolOperationOnSameValues, []}, + {Credo.Check.Warning.Dbg, []}, + {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, + {Credo.Check.Warning.IExPry, []}, + {Credo.Check.Warning.IoInspect, []}, + {Credo.Check.Warning.MissedMetadataKeyInLoggerConfig, []}, + {Credo.Check.Warning.OperationOnSameValues, []}, + {Credo.Check.Warning.OperationWithConstantResult, []}, + {Credo.Check.Warning.RaiseInsideRescue, []}, + {Credo.Check.Warning.SpecWithStruct, []}, + {Credo.Check.Warning.UnsafeExec, []}, + {Credo.Check.Warning.UnusedEnumOperation, []}, + {Credo.Check.Warning.UnusedFileOperation, []}, + {Credo.Check.Warning.UnusedKeywordOperation, []}, + {Credo.Check.Warning.UnusedListOperation, []}, + {Credo.Check.Warning.UnusedPathOperation, []}, + {Credo.Check.Warning.UnusedRegexOperation, []}, + {Credo.Check.Warning.UnusedStringOperation, []}, + {Credo.Check.Warning.UnusedTupleOperation, []}, + {Credo.Check.Warning.WrongTestFileExtension, []}, + ], + disabled: [ + # + # Checks scheduled for next check update (opt-in for now) + {Credo.Check.Refactor.UtcNowTruncate, []}, + + # + # Controversial and experimental checks (opt-in, just move the check to `:enabled` + # and be sure to use `mix credo --strict` to see low priority checks) + # + {Credo.Check.Consistency.MultiAliasImportRequireUse, []}, + {Credo.Check.Consistency.UnusedVariableNames, []}, + {Credo.Check.Design.DuplicatedCode, []}, + {Credo.Check.Design.SkipTestWithoutComment, []}, + {Credo.Check.Readability.AliasAs, []}, + {Credo.Check.Readability.BlockPipe, []}, + {Credo.Check.Readability.ImplTrue, []}, + {Credo.Check.Readability.MultiAlias, []}, + {Credo.Check.Readability.NestedFunctionCalls, []}, + {Credo.Check.Readability.OneArityFunctionInPipe, []}, + {Credo.Check.Readability.OnePipePerLine, []}, + {Credo.Check.Readability.SeparateAliasRequire, []}, + {Credo.Check.Readability.SingleFunctionToBlockPipe, []}, + {Credo.Check.Readability.SinglePipe, []}, + {Credo.Check.Readability.StrictModuleLayout, []}, + {Credo.Check.Readability.WithCustomTaggedTuple, []}, + {Credo.Check.Refactor.ABCSize, []}, + {Credo.Check.Refactor.AppendSingleItem, []}, + {Credo.Check.Refactor.DoubleBooleanNegation, []}, + {Credo.Check.Refactor.FilterReject, []}, + {Credo.Check.Refactor.IoPuts, []}, + {Credo.Check.Refactor.MapMap, []}, + {Credo.Check.Refactor.ModuleDependencies, []}, + {Credo.Check.Refactor.NegatedIsNil, []}, + {Credo.Check.Refactor.PassAsyncInTestCases, []}, + {Credo.Check.Refactor.PipeChainStart, []}, + {Credo.Check.Refactor.RejectFilter, []}, + {Credo.Check.Refactor.VariableRebinding, []}, + {Credo.Check.Warning.LazyLogging, []}, + {Credo.Check.Warning.LeakyEnvironment, []}, + {Credo.Check.Warning.MapGetUnsafePass, []}, + {Credo.Check.Warning.MixEnv, []}, + {Credo.Check.Warning.UnsafeToAtom, []} + + # {Credo.Check.Refactor.MapInto, []}, + + # + # Custom checks can be created using `mix credo.gen.check`. + # + ] + } + } + ] +} diff --git a/.github/actions/elixir_setup/action.yaml b/.github/actions/elixir_setup/action.yaml new file mode 100644 index 0000000..c4d0156 --- /dev/null +++ b/.github/actions/elixir_setup/action.yaml @@ -0,0 +1,30 @@ +# Install Elixir +# +# I install the Elixir and OTP versions specified in the inputs. +# +# I take two parameters: +# - elixir-version: The version of Elixir to install. +# - otp-version: The version of OTP to install. + +name: Install Elixir +description: Install Elixir +inputs: + elixir-version: + description: "elixir version" + required: true + otp-version: + description: "otp version" + required: true +runs: + using: "composite" + steps: + # setup the beam + - name: configure beam + uses: erlef/setup-beam@v1 + with: + otp-version: ${{ inputs.otp-version }} + elixir-version: ${{ inputs.elixir-version }} + # install protobuf package + - name: Install Protobuf Elixir dependencies + shell: bash + run: mix escript.install hex protobuf --force diff --git a/.github/actions/os_setup/action.yaml b/.github/actions/os_setup/action.yaml new file mode 100644 index 0000000..61520b1 --- /dev/null +++ b/.github/actions/os_setup/action.yaml @@ -0,0 +1,18 @@ +# OS Setup +# +# I install the necessary packages at the OS level. +name: OS Setup +description: OS Setup +runs: + using: "composite" + steps: + - name: install apt packages + shell: bash + run: sudo apt-get install -y libsodium-dev protobuf-compiler + + - name: install new protobuf + shell: bash + run: | + curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v29.0/protoc-29.0-linux-x86_64.zip && \ + unzip protoc-29.0-linux-x86_64.zip -d $HOME/.local + echo "$HOME/.local" >> $GITHUB_PATH diff --git a/.github/workflows/build_release.yaml b/.github/workflows/build_release.yaml new file mode 100644 index 0000000..8dd0a9b --- /dev/null +++ b/.github/workflows/build_release.yaml @@ -0,0 +1,68 @@ +# Build Release +# +# I build a release of the project on GitHub. +# +# I take three parameters: +# - mix-env: The environment to build the release for. +# - elixir-version: The version of Elixir to use. +# - otp-version: The version of OTP to use. +name: Build Release +on: + workflow_call: + inputs: + mix-env: + required: true + type: string + elixir-version: + required: true + type: string + otp-version: + required: true + type: string + release_name: + required: true + type: string +jobs: + build_release: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + - name: setup deps and _build cache + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/deps + ${{ github.workspace }}/_build + key: ${{ runner.os }}-build-${{ inputs.mix-env }}-${{ hashFiles('mix.lock') }} + + - name: setup elixir + uses: ./.github/actions/elixir_setup + with: + elixir-version: ${{ inputs.elixir-version }} + otp-version: ${{ inputs.otp-version }} + + - name: install apt packages + uses: ./.github/actions/os_setup + + #--------------------------------------------------------------------------- + # Create binaries + # + # This part of the action generates all the binaries that are necessary + # for a release. + # If these need to be compiled for different elixir versions, the matrix + # should be put in the on_release.yml file. + + # - name: create the client binary + # continue-on-error: false + # shell: bash + # run: | + # MIX_ENV=prod mix do --app anoma_client escript.build + # mv "apps/anoma_client/anoma_client" "apps/anoma_client/anoma_client-elixir-${{ inputs.elixir-version }}-otp-${{ inputs.otp-version }}" + + # - name: create artifact of the client binary + # uses: actions/upload-artifact@v4 + # with: + # name: anoma_client-elixir-${{ inputs.elixir-version }}-otp-${{ inputs.otp-version }} + # path: "apps/anoma_client/anoma_client-elixir-${{ inputs.elixir-version }}-otp-${{ inputs.otp-version }}" diff --git a/.github/workflows/compile.yaml b/.github/workflows/compile.yaml new file mode 100644 index 0000000..7d0300a --- /dev/null +++ b/.github/workflows/compile.yaml @@ -0,0 +1,54 @@ +# Compile +# +# I compile the Elixir project. +# +# I take three parameters: +# - mix-env: The environment to compile the project for. +# - elixir-version: The version of Elixir to use. +# - otp-version: The version of OTP to use. +name: Compile +on: + workflow_call: + inputs: + mix-env: + required: true + type: string + elixir-version: + required: true + type: string + otp-version: + required: true + type: string +jobs: + compile: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + - name: setup deps and _build cache + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/deps + ${{ github.workspace }}/_build + key: ${{ runner.os }}-build-${{ inputs.mix-env }}-${{ hashFiles('mix.lock') }} + + - name: setup elixir + uses: ./.github/actions/elixir_setup + with: + elixir-version: ${{ inputs.elixir-version }} + otp-version: ${{ inputs.otp-version }} + + - name: install apt packages + uses: ./.github/actions/os_setup + + - name: fetch elixir dependencies + run: MIX_ENV=${{ inputs.mix-env }} mix deps.get + + - name: print protobuf information + shell: bash + run: protoc --version + + - name: compile elixir project + run: MIX_ENV=${{ inputs.mix-env }} mix compile diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml new file mode 100644 index 0000000..e69abda --- /dev/null +++ b/.github/workflows/docs.yaml @@ -0,0 +1,92 @@ +# Generate Docs +# +# I generate the documentation for the project. +# I compile the documentation and publish it on GitHub pages. +# +# I take three parameters: +# - mix-env: The environment to build the release for. +# - elixir-version: The version of Elixir to use. +# - otp-version: The version of OTP to use. +name: Generate Docs +on: + workflow_call: + inputs: + mix-env: + required: true + type: string + elixir-version: + required: true + type: string + otp-version: + required: true + type: string +jobs: + compile-docs: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + - name: setup deps and _build cache + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/deps + ${{ github.workspace }}/_build + key: ${{ runner.os }}-build-${{ inputs.mix-env }}-${{ hashFiles('mix.lock') }} + + - name: setup elixir + uses: ./.github/actions/elixir_setup + with: + elixir-version: ${{ inputs.elixir-version }} + otp-version: ${{ inputs.otp-version }} + + - name: install apt packages + uses: ./.github/actions/os_setup + + # - name: compile docs + # shell: bash + # run: make docs-release + - name: generate docs + run: MIX_ENV=${{ inputs.mix-env }} mix docs + + publish-docs: + environment: + name: github-pages + needs: compile-docs + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + - name: setup deps and _build cache + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/deps + ${{ github.workspace }}/_build + key: ${{ runner.os }}-build-${{ inputs.mix-env }}-${{ hashFiles('mix.lock') }} + + - name: setup elixir + uses: ./.github/actions/elixir_setup + with: + elixir-version: ${{ inputs.elixir-version }} + otp-version: ${{ inputs.otp-version }} + + - name: install apt packages + uses: ./.github/actions/os_setup + + - name: generate docs + run: MIX_ENV=${{ inputs.mix-env }} mix docs + + - name: setup github pages + uses: actions/configure-pages@v5 + + - name: upload docs to github pages + uses: actions/upload-pages-artifact@v3 + with: + path: "./doc" + + - name: deploy github pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 0000000..633253a --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,80 @@ +# Lint +# +# I lint the codebase. +# +# I take three parameters: +# - mix-env: The environment to build the release for. +# - elixir-version: The version of Elixir to use. +# - otp-version: The version of OTP to use. +# +# Linting means the following. +# - Check if the code if formatted +# - Check if the code has trailing whitespaces +# - Check if the code has any issues reported by credo +# - Check if the code has any issues reported by dialyzer +name: Lint +on: + workflow_call: + inputs: + mix-env: + required: true + type: string + elixir-version: + required: true + type: string + otp-version: + required: true + type: string +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + - name: setup deps and _build cache + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/deps + ${{ github.workspace }}/_build + key: ${{ runner.os }}-build-${{ inputs.mix-env }}-${{ hashFiles('mix.lock') }} + + - name: setup plt cache + uses: actions/cache@v4 + with: + path: ${{ github.workspace }}/plts + key: ${{ runner.os }}-plt-${{ inputs.mix-env }}-${{ hashFiles('**/*.ex') }} + + - name: setup elixir + uses: ./.github/actions/elixir_setup + with: + elixir-version: ${{ inputs.elixir-version }} + otp-version: ${{ inputs.otp-version }} + + - name: install apt packages + uses: ./.github/actions/os_setup + + # strict credo may fail + - name: mix credo + shell: bash + continue-on-error: true + run: MIX_ENV=${{inputs.mix-env}} mix credo --strict + + # non strict credo should be blocking + - name: mix credo + shell: bash + continue-on-error: false + run: MIX_ENV=${{inputs.mix-env}} mix credo + + - name: mix format + shell: bash + run: MIX_ENV=${{inputs.mix-env}} mix format --check-formatted + + - name: mix dialyzer + shell: bash + run: MIX_ENV=${{inputs.mix-env}} mix dialyzer --format github + + - name: trailing whitespaces + shell: bash + run: git diff-tree --check 4b825dc642cb6eb9a060e54bf8d69288fbee4904 HEAD diff --git a/.github/workflows/on_main_or_next_or_base.yaml b/.github/workflows/on_main_or_next_or_base.yaml new file mode 100644 index 0000000..bfd650e --- /dev/null +++ b/.github/workflows/on_main_or_next_or_base.yaml @@ -0,0 +1,42 @@ +# Push On Main/Next/Base +# +# This is the pipeline that will run for every push on main or next or base. +# +# The pipeline does the following. +# 1. Compile the source code for all environments (dev, test, prod) +# 2. Run the linter on the source code. +# 3. Run the entire test suite + +name: Push On Main/Next/Base +run-name: Push on `${{ github.ref_name }}` + +on: + push: + branches: ["main", "next", "base"] + +jobs: + compile: + strategy: + matrix: + target: [dev, test, prod] + uses: ./.github/workflows/compile.yaml + with: + mix-env: ${{ matrix.target }} + elixir-version: "1.17" + otp-version: "27.1" + + lint: + needs: compile + uses: ./.github/workflows/lint.yaml + with: + mix-env: "dev" + elixir-version: "1.17" + otp-version: "27.1" + + test: + needs: compile + uses: ./.github/workflows/test.yaml + with: + mix-env: "test" + elixir-version: "1.17" + otp-version: "27.1" diff --git a/.github/workflows/on_pull_request.yaml b/.github/workflows/on_pull_request.yaml new file mode 100644 index 0000000..26dbbac --- /dev/null +++ b/.github/workflows/on_pull_request.yaml @@ -0,0 +1,47 @@ +# Pull Request +# +# This is the pipeline that will run for every pull request that is created against the base branch. +# +# The pipeline does the following. +# 0. Try to merge this release with next. +# 1. Compile the source code for all environments (dev, test, prod) +# 2. Run the linter on the source code. +# 3. Run the entire test suite + +name: Pull Request +run-name: Pull request `${{ github.head_ref }}` into `${{ github.base_ref }}` +on: + pull_request: + branches: ["base"] + +jobs: + try_merge: + uses: ./.github/workflows/try_merge.yaml + with: + merge_with: "main" + + compile: + strategy: + matrix: + target: [dev, test, prod] + uses: ./.github/workflows/compile.yaml + with: + mix-env: ${{ matrix.target }} + elixir-version: "1.17" + otp-version: "27.1" + + lint: + needs: compile + uses: ./.github/workflows/lint.yaml + with: + mix-env: "dev" + elixir-version: "1.17" + otp-version: "27.1" + + test: + needs: compile + uses: ./.github/workflows/test.yaml + with: + mix-env: "test" + elixir-version: "1.17" + otp-version: "27.1" diff --git a/.github/workflows/on_push.yml b/.github/workflows/on_push.yml new file mode 100644 index 0000000..35a3326 --- /dev/null +++ b/.github/workflows/on_push.yml @@ -0,0 +1,35 @@ +# Push +# +# This is the pipeline that will run for every pushed commit, except for main and next. +# +# The pipeline does the following. +# 1. Compile the source code for all environments (dev, test, prod) +# 2. Run the linter on the source code. + +name: Push +run-name: Push on `${{ github.ref_name }}` +on: + push: + branches-ignore: + - 'main' + - 'next' + - 'base' + +jobs: + compile: + strategy: + matrix: + target: [dev, test, prod] + uses: ./.github/workflows/compile.yaml + with: + mix-env: ${{ matrix.target }} + elixir-version: "1.17" + otp-version: "27.1" + + lint: + needs: compile + uses: ./.github/workflows/lint.yaml + with: + mix-env: "dev" + elixir-version: "1.17" + otp-version: "27.1" diff --git a/.github/workflows/on_release.yaml b/.github/workflows/on_release.yaml new file mode 100644 index 0000000..7e76bba --- /dev/null +++ b/.github/workflows/on_release.yaml @@ -0,0 +1,88 @@ +# Release +# +# This is the pipeline that will run for every release. +# A release is a tag that starts with a "v" followed by a version number. +# In brief, pushing a tag will run this pipeline. +# +# The pipeline does the following. +# 0. Try to merge this release with next. +# 1. Compile the source code for all environments (dev, test, prod) +# 2. Run the linter on the source code. +# 3. Run the entire test suite +# 4. Compile the documentation +# 5. Make a release on GitHub + +name: Release +run-name: Release `${{ github.ref_name }}` + +on: + push: + tags: + - v* + +jobs: + compile: + strategy: + matrix: + mix_env: [dev, test, prod] + uses: ./.github/workflows/compile.yaml + with: + mix-env: ${{ matrix.mix_env }} + elixir-version: "1.17" + otp-version: "27.1" + + lint: + needs: compile + uses: ./.github/workflows/lint.yaml + with: + mix-env: "dev" + elixir-version: "1.17" + otp-version: "27.1" + + test: + needs: compile + uses: ./.github/workflows/test.yaml + with: + mix-env: "test" + elixir-version: "1.17" + otp-version: "27.1" + + docs: + needs: compile + permissions: + contents: write + id-token: write + pages: write + uses: ./.github/workflows/docs.yaml + with: + mix-env: "dev" + elixir-version: "1.17" + otp-version: "27.1" + + make_release: + needs: [compile, lint, test, docs] + strategy: + matrix: + beam_versions: + - otp: "27.1" + elixir: "1.17" + # - otp: "26.2.5.5" + # elixir: "1.17" + permissions: + contents: write + id-token: write + pages: write + uses: ./.github/workflows/build_release.yaml + with: + mix-env: "prod" + elixir-version: "${{ matrix.beam_versions.elixir }}" + otp-version: "${{ matrix.beam_versions.otp }}" + release_name: ${{ github.ref_name }} + + publish_release: + needs: [make_release] + permissions: + contents: write + id-token: write + pages: write + uses: ./.github/workflows/publish_release.yaml diff --git a/.github/workflows/publish_release.yaml b/.github/workflows/publish_release.yaml new file mode 100644 index 0000000..1436445 --- /dev/null +++ b/.github/workflows/publish_release.yaml @@ -0,0 +1,57 @@ +# Build Release +# +# I publish a release on GitHub +name: Publish Release +on: + workflow_call: + +jobs: + build_release: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + #--------------------------------------------------------------------------- + # Create a GitHub release + + - name: ensure that the release does not exist yet + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + export RELEASE=$(gh release list --json name | jq '.[] | select(.name == "${{ github.ref_name }}") | .name') ; \ + test -z "${RELEASE}" || echo "release ${RELEASE} already exists" + + - name: create the release on github + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + gh release create ${{ github.ref_name }} \ + --title ${{ github.ref_name }} \ + --repo ${{ github.repository }} + + #--------------------------------------------------------------------------- + # Add the client binary to the release + + - name: Download all artifacts + uses: actions/download-artifact@v4 + with: + path: artifacts + merge-multiple: true + + - name: Display structure of downloaded files + run: ls -la artifacts + + #--------------------------------------------------------------------------- + # Push artifacts for this release + + - name: publish all release binaries + shell: bash + env: + GH_TOKEN: ${{ secrets.github_token }} + run: | + for f in artifacts/*; do + gh release upload ${{ github.ref_name }} --clobber $f + done \ No newline at end of file diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..ca440df --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,49 @@ +# Test +# +# I run the testsuite on the code base. +# If any tests fail, I fail. +# +# I take three parameters: +# - mix-env: The environment to build the release for. +# - elixir-version: The version of Elixir to use. +# - otp-version: The version of OTP to use. +name: Test +on: + workflow_call: + inputs: + mix-env: + required: true + type: string + elixir-version: + required: true + type: string + otp-version: + required: true + type: string +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + + - name: setup deps and _build cache + uses: actions/cache@v4 + with: + path: | + ${{ github.workspace }}/deps + ${{ github.workspace }}/_build + key: ${{ runner.os }}-build-${{ inputs.mix-env }}-${{ hashFiles('mix.lock') }} + + - name: setup elixir + uses: ./.github/actions/elixir_setup + with: + elixir-version: ${{ inputs.elixir-version }} + otp-version: ${{ inputs.otp-version }} + + - name: install apt packages + uses: ./.github/actions/os_setup + + - name: mix test + shell: bash + run: MIX_ENV=${{inputs.mix-env}} mix test diff --git a/.github/workflows/try_merge.yaml b/.github/workflows/try_merge.yaml new file mode 100644 index 0000000..15255e7 --- /dev/null +++ b/.github/workflows/try_merge.yaml @@ -0,0 +1,39 @@ +# Try Merge +# +# I will try to merge the current branch onto the target branch. +# If the merge fails, I error. +# +# I take one parameter: +# - merge_with: The branch to merge the current branch with. + +name: Try Merge +on: + workflow_call: + inputs: + merge_with: + required: true + type: string +jobs: + try-merge: + runs-on: ubuntu-latest + steps: + - name: checkout the repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 # fetch all branches + + # get the current commit hash and store in the environment under `$SHA` + - name: get the current commit hash + id: commit_hash + shell: bash + run: echo "SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV + + # checkout the target branch + - name: checkout the target branch + shell: bash + run: git checkout ${{ inputs.merge_with }} + + # try and merge the commit into the target branch + - name: merge the commit into the checked out branch + shell: bash + run: git -c user.name="Dummy User" -c user.email="example@example.com" merge $SHA diff --git a/README.md b/README.md index b83b7a3..ae72134 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,82 @@ # ExExample -**TODO: Add description** +`ExExample` aims to provide an example-driven test framework for Elixir applications. + +As opposed to regular unit tests, examples are supposed to be executed from within the REPL. + +Examples serve both as a unit test, but also as a tool to discover, learn, and interact with a live +system such as Elixir applications. ## Installation -If [available in Hex](https://hex.pm/docs/publish), the package can be installed -by adding `ex_example` to your list of dependencies in `mix.exs`: +There is no package available on hex yet, so add it as a Git dependency: ```elixir def deps do [ - {:ex_example, "~> 0.1.0"} + {:ex_example, git: "https://github.com/anoma/ex_example"} ] end ``` -Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) -and published on [HexDocs](https://hexdocs.pm). Once published, the docs can -be found at . +## Your First Example + +To get started, create a new module in the `lib/` folder of your Elixir application and add an example. + +```elixir +defmodule MyExamples do + use ExExample + + defexample read_data() do + 1..1000 |> Enum.shuffle() |> Enum.take(10) + end + + @spec copy(any()) :: Stack.t() + def copy(stack) do + %Stack{elements: stack.elements} + end + + @spec rerun?(any()) :: boolean() + def rerun?(_), do: false +end +``` + +In a running REPL with your application loaded, you can execute this example using `MyExamples.read_data()`. +The example will be executed once, and the cached result will be returned the next time around. + +The optional callbacks `copy/1` and `rerun?/1` are used to change the caching behavior. +These functions are called whenever an example within the module they're defined in are executed. +The `copy/1` function takes in the previous result value if there was one, and allows you to define custom logic on how to copy a value. +This is especially useful if you return values that are mutable (e.g., process ids). +For example, if you want to create a copy of a supervision tree, you define the logic to clone that supervision tree in the `copy/1` function. +This is useful if you have examples that change that value, while other examples do not expect their inputs to be changed. + +The `rerun?/1` function takes in the result of an already run example, and determines based on its output if it should be recomputed anyway. +This is useful to circumvent the caching mechanism in case you do not want cached values in examples. + +## Caching + +In a REPL session it's not uncommon to recompile your code (e.g., using `recompile()`). This changes +the semantics of your examples. + +To avoid working with stale outputs, `ExExample` only returns the cached version of your example +if the code it depends on, or the example itself, have not been changed. + +When the code changes, the example is executed again. + +## Tests + +The examples are created to work with the code base, but they can also serve as a unit test. + +To let ExUnit use the examples in your codebase as tests, add a test file in the `test/` folder, and +import the `ExExample.Test` module. + +To run the examples from above, add a file `ny_examples_test.exs` to your `test/` folder and include the following. + +```elixir +defmodule MyExamplesTest do + use ExUnit.Case + use ExExample.Tests, for: MyExamples +end +``` \ No newline at end of file diff --git a/lib/ex_example.ex b/lib/ex_example.ex index ee90b41..8caa4c6 100644 --- a/lib/ex_example.ex +++ b/lib/ex_example.ex @@ -1,17 +1,154 @@ defmodule ExExample do @moduledoc """ - I am the ExExample Application Module + Documentation for `ExExample`. + """ + alias ExExample.Analyze + alias ExExample.Cache + alias ExExample.Executor + + ############################################################ + # Types # + ############################################################ + + @typedoc """ + A dependency is a function that will be called by an example. + The format of a dependency is `{{module, function}, arity}` + """ + @type dependency :: {{atom(), atom()}, non_neg_integer()} + + @typedoc """ + """ + @type example :: {atom(), list(dependency)} + + ############################################################ + # Helpers # + ############################################################ + + @doc """ + I return the hidden name of an example. + The hidden name is the example body without modification. + """ + @spec hidden_name({atom(), atom()}) :: {atom(), atom()} + def hidden_name({module, func}) do + {module, String.to_atom("__#{func}__")} + end + + @doc """ + I determine if a module/function pair is an example or not. + + A function is an example if it is defined in a module that has the `__examples__/0` function + implemented, and when the `__examples__()` output lists that function name as being an example. + """ + @spec example?(dependency()) :: boolean() + def example?({{module, func}, _arity}) do + example_module?(module) and Keyword.has_key?(module.__examples__(), func) + end + + @doc """ + I return true if the given module contains examples. + """ + @spec example_module?(atom()) :: boolean + def example_module?(module) do + {:__examples__, 0} in module.__info__(:functions) + end + + @doc """ + I return a list of all dependencies for this example. + Note: this does includes other called modules too (e.g., Enum). + """ + @spec all_dependencies({atom(), atom()}) :: [dependency()] + def all_dependencies({module, func}) do + module.__examples__() + |> Keyword.get(func, []) + end - I startup the ExExample system as an OTP application. Moreover Ι - provide all the API necessary for the user of the system. I contain - all public functionality + @doc """ + I return a list of example dependencies for this example. + Note: this does not include other called modules. + """ + @spec example_dependencies({atom(), atom()}) :: [dependency()] + def example_dependencies({module, func}) do + all_dependencies({module, func}) + |> Enum.filter(&example?/1) + end - ### Public API + @doc """ + I return a list of examples in the order they should be + executed in. + + I do this by topologically sorting their execution order. """ + @spec execution_order(atom()) :: [{atom(), atom()}] + def execution_order(module) do + module.__examples__() + |> Enum.reduce(Graph.new(), fn + {function, []}, g -> + Graph.add_vertex(g, {__MODULE__, function}) + + {function, dependencies}, g -> + dependencies + # filter out all non-example dependencies + |> Enum.filter(&example?/1) + |> Enum.reduce(g, fn {{module, func}, _arity}, g -> + Graph.add_edge(g, {module, func}, {module, function}) + end) + end) + |> Graph.topsort() + end + + ############################################################ + # Macros # + ############################################################ + + defmacro __using__(_options) do + quote do + import unquote(__MODULE__) + + @behaviour ExExample.Behaviour + + # module attribute that holds all the examples + Module.register_attribute(__MODULE__, :examples, accumulate: true) + + @before_compile unquote(__MODULE__) + end + end + + defmacro __before_compile__(_env) do + quote do + @spec __examples__ :: [ExExample.example()] + def __examples__, do: @examples + end + end + + defmacro example({example_name, context, args} = name, do: body) do + called_functions = Analyze.extract_function_calls(body, __CALLER__) + + # example_name is the name of the function that is being tested + # e.g., `example_name` + + # hidden_func_name is the name of the hidden function that is being tested + # this will contain the actual body of the example + # __example_name__ + hidden_example_name = String.to_atom("__#{example_name}__") + + quote do + def unquote({hidden_example_name, context, args}) do + unquote(body) + end + + @examples {unquote(example_name), unquote(called_functions)} + def unquote(name) do + case Executor.attempt_example({__MODULE__, unquote(example_name)}, []) do + %{result: %Cache.Result{success: :success} = result} -> + result.result - use Application + %{result: %Cache.Result{success: :failed} = result} -> + raise result.result - def start(_type, args \\ []) do - ExExample.Supervisor.start_link(args) + %{result: %Cache.Result{success: :skipped} = result} -> + :skipped + end + end + end end end diff --git a/lib/ex_example/analyzer/analyze.ex b/lib/ex_example/analyzer/analyze.ex new file mode 100644 index 0000000..85380fb --- /dev/null +++ b/lib/ex_example/analyzer/analyze.ex @@ -0,0 +1,168 @@ +defmodule ExExample.Analyze do + @moduledoc """ + I contain functionality to analyze ASTs. + + I have functionality to extract modules on which an AST depends, + function calls it makes, and definitions from a module AST. + """ + require Logger + + defmodule State do + @moduledoc """ + I implement the state for analyzing an AST. + """ + defstruct called_functions: [], env: nil, functions: [] + + @spec put_call(map(), {atom(), atom()}, non_neg_integer()) :: map() + def put_call(state, mod, arg) do + %{state | called_functions: [{mod, arg} | state.called_functions]} + end + + @spec put_def(map(), atom(), non_neg_integer()) :: map() + def put_def(state, func, arity) do + %{state | functions: [{func, arity} | state.functions]} + end + end + + # ---------------------------------------------------------------------------- + # Exctract function calls from ast + + @spec extract_function_calls(tuple(), Macro.Env.t()) :: [{{atom(), atom()}, non_neg_integer()}] + def extract_function_calls(ast, env) do + state = %State{env: env} + # IO.inspect(env) + {_, state} = Macro.prewalk(ast, state, &extract_function_calls_logged/2) + state.called_functions + end + + defp extract_function_calls_logged(ast, state) do + # IO.puts("------------------------------------------- ") + # IO.inspect(ast) + + extract_function_call(ast, state) + end + + # qualified function call + # e.g., Foo.bar() + + defp extract_function_call( + {{:., _, [{:__aliases__, _, aliases}, func_name]}, _, args} = ast, + state + ) do + case Macro.Env.expand_alias(state.env, [], aliases) do + :error -> + arg_count = Enum.count(args) + module = Module.concat(aliases) + state = State.put_call(state, {module, func_name}, arg_count) + {ast, state} + + {:alias, resolved} -> + arg_count = Enum.count(args) + state = State.put_call(state, {resolved, func_name}, arg_count) + {ast, state} + end + end + + defp extract_function_call({{:., _, _args}, _, _} = ast, state) do + {ast, state} + end + + # variable in binding + # e.g. `x` in `x = 1` + defp extract_function_call({_func, _, nil} = ast, state) do + {ast, state} + end + + @special_forms Kernel.SpecialForms.__info__(:macros) + defp extract_function_call({func, _, args} = ast, state) do + arg_count = Enum.count(args) + + state = + case Macro.Env.lookup_import(state.env, {func, arg_count}) do + # imported call + [{:function, module}] -> + State.put_call(state, {module, func}, arg_count) + + [{:macro, _module}] -> + state + + # local def + [] -> + if {func, arg_count} in @special_forms or func in [:__block__, :&, :__aliases__] do + state + else + State.put_call(state, {state.env.module, func}, arg_count) + end + end + + {ast, state} + end + + defp extract_function_call(ast, state) do + {ast, state} + end + + # ---------------------------------------------------------------------------- + # Exctract function definitions from module + + @doc """ + Given the path of a source file, I extract the definitions of the functions. + """ + @spec extract_defs(String.t(), Macro.Env.t()) :: [{atom(), non_neg_integer()}] + def extract_defs(file, env) do + source = File.read!(file) + {:ok, ast} = Code.string_to_quoted(source) + extract_defs_from_source(ast, env) + end + + defp extract_defs_from_source(ast, env) do + # create the initial state + state = %State{env: env} + + # walk the ast and extract the function definitions + {_, state} = Macro.prewalk(ast, state, &extract_def_logged/2) + + state.functions + end + + defp extract_def_logged(ast, state) do + # IO.puts("------------------------------------------- ") + # IO.inspect(ast) + + extract_def(ast, state) + end + + defp extract_def({:example, _, [{fun, _, args}, _body]} = ast, state) do + state = + args + |> count_args() + |> Enum.reduce(state, fn i, state -> + State.put_def(state, fun, i) + end) + + {ast, state} + end + + defp extract_def(ast, state) do + {ast, state} + end + + # @doc """ + # I count the arguments in an argument list. + # I return the number of required arguments followed by the number of optional arguments. + # """ + @spec count_args([any()]) :: any() + defp count_args(args) do + {req, opt} = + args + |> Enum.reduce({0, 0}, fn + {:\\, _, [{_arg, _, _}, _]}, {req, opt} -> + {req, opt + 1} + + _, {req, opt} -> + {req + 1, opt} + end) + + req..(req + opt) + end +end diff --git a/lib/ex_example/application.ex b/lib/ex_example/application.ex new file mode 100644 index 0000000..31a253f --- /dev/null +++ b/lib/ex_example/application.ex @@ -0,0 +1,13 @@ +defmodule ExExample.Application do + @moduledoc false + + use Application + + @impl true + def start(_type, _args) do + children = [{Cachex, [ExExample.Cache]}] + + opts = [strategy: :one_for_one, name: ExExample.Supervisor] + Supervisor.start_link(children, opts) + end +end diff --git a/lib/ex_example/behaviour.ex b/lib/ex_example/behaviour.ex index 4e1cbcb..28ca9b1 100644 --- a/lib/ex_example/behaviour.ex +++ b/lib/ex_example/behaviour.ex @@ -7,6 +7,6 @@ defmodule ExExample.Behaviour do use macro for ExExample """ - @callback rerun?(ExExample.CacheResult.t()) :: boolean() - @callback copy(ExExample.CacheResult.t()) :: any() + @callback rerun?(any()) :: boolean() + @callback copy(any()) :: any() end diff --git a/lib/ex_example/cache/cache.ex b/lib/ex_example/cache/cache.ex new file mode 100644 index 0000000..849aeb6 --- /dev/null +++ b/lib/ex_example/cache/cache.ex @@ -0,0 +1,62 @@ +defmodule ExExample.Cache do + @moduledoc """ + I define logic to store and retrieve results from the cache. + """ + + alias ExExample.Cache.Key + alias ExExample.Cache.Result + + require Logger + + @cache_name __MODULE__ + + @doc """ + I clear the entire cache. + """ + @spec clear() :: :ok + def clear do + Cachex.clear!(@cache_name) + :ok + end + + @doc """ + I store a result in cache for a given key. + """ + @spec put_result(Result.t(), Key.t()) :: {atom(), boolean()} + def put_result(%Result{} = result, %Key{} = key) do + Cachex.put(@cache_name, key, result) + end + + @doc """ + I fetch a previous Result from the cache if it exists. + If it does not exist, I return `{:error, :not_found}`. + """ + @spec get_result(Key.t()) :: {:ok, Result.t()} | {:error, :no_result} + def get_result(%Key{} = key) do + case Cachex.get(@cache_name, key) do + {:ok, nil} -> + {:error, :no_result} + + {:ok, result} -> + {:ok, result} + end + end + + @doc """ + I return the state of the last execution of an example. + """ + @spec state(Key.t() | {atom(), atom()}) :: :succeeded | :failed | :skipped + def state({module, function}) do + state(%Key{module: module, function: function}) + end + + def state(%Key{} = key) do + case Cachex.get(@cache_name, key) do + {:ok, nil} -> + nil + + {:ok, result} -> + result.success + end + end +end diff --git a/lib/ex_example/cache/key.ex b/lib/ex_example/cache/key.ex new file mode 100644 index 0000000..bb6d390 --- /dev/null +++ b/lib/ex_example/cache/key.ex @@ -0,0 +1,18 @@ +defmodule ExExample.Cache.Key do + @moduledoc """ + I represent the key for an example invocation. + + I identify an invocation by means of its module, name, arity, and list of arguments. + """ + use TypedStruct + + typedstruct enforce: true do + @typedoc """ + I represent the key for an example invocation. + """ + field(:module, atom()) + field(:function, atom()) + field(:arguments, [term()], default: []) + field(:deps_hash, any(), default: nil) + end +end diff --git a/lib/ex_example/cache/result.ex b/lib/ex_example/cache/result.ex new file mode 100644 index 0000000..4d71b60 --- /dev/null +++ b/lib/ex_example/cache/result.ex @@ -0,0 +1,20 @@ +defmodule ExExample.Cache.Result do + @moduledoc """ + I represent the result of an example execution. + + I contain the key for the example I am the result of, the status of the execution, and the result of the execution. + """ + use TypedStruct + + alias ExExample.Cache.Key + + typedstruct enforce: false do + @typedoc """ + I represent the result of a completed Example Computation + """ + field(:key, Key.t()) + field(:success, :failed | :success | :skipped) + field(:result, term()) + field(:cached, boolean(), default: true) + end +end diff --git a/lib/ex_example/cache_result.ex b/lib/ex_example/cache_result.ex deleted file mode 100644 index 3793f47..0000000 --- a/lib/ex_example/cache_result.ex +++ /dev/null @@ -1,19 +0,0 @@ -defmodule ExExample.CacheResult do - @moduledoc """ - I represent the cached result of a ran Example - """ - - use TypedStruct - - typedstruct enforce: true do - @typedoc """ - I represent the result of a completed Example Computation - """ - - field(:arguments, Macro.input() | nil, default: nil) - field(:source, Macro.input()) - field(:source_name, {module(), atom(), non_neg_integer()}) - field(:result, term()) - field(:pure, boolean()) - end -end diff --git a/lib/ex_example/examples/e_cache_result.ex b/lib/ex_example/examples/e_cache_result.ex deleted file mode 100644 index c962705..0000000 --- a/lib/ex_example/examples/e_cache_result.ex +++ /dev/null @@ -1,16 +0,0 @@ -defmodule ExExample.Examples.ECacheResult do - alias ExExample.CacheResult - - def trivial_definition() do - 5 - end - - def trivial_cached_result do - %CacheResult{ - source: [do: 5], - pure: true, - result: 5, - source_name: {__MODULE__, :trivial_definition, 0} - } - end -end diff --git a/lib/ex_example/executor.ex b/lib/ex_example/executor.ex new file mode 100644 index 0000000..b2ed6cd --- /dev/null +++ b/lib/ex_example/executor.ex @@ -0,0 +1,279 @@ +defmodule ExExample.Executor do + @moduledoc """ + I contain functionality to execute examples. + + I contain logic to determine if a cachd result should be used, computation should be done again, + or if an example should be skipped. + """ + + require Logger + + alias ExExample.Cache + alias ExExample.Run + + ############################################################ + # API # + ############################################################ + + @spec print_dependencies(ExExample.Run.t()) :: binary() + def print_dependencies(run) do + output = + if run.success != [] do + run.success + |> Enum.map_join( + ", ", + fn {{mod, func}, _arity} -> " 🟢 #{inspect(mod)}.#{Atom.to_string(func)}" end + ) + else + "" + end + + output = + if run.failed != [] do + run.success + |> Enum.map_join( + ", ", + fn {{mod, func}, _arity} -> + " 🔴 #{inspect(mod)}.#{Atom.to_string(func)}" + end + ) + |> Kernel.<>(output) + else + output + end + + output = + if run.no_cache != [] do + run.success + |> Enum.map_join( + ", ", + fn {{mod, func}, _arity} -> + " ⚪️ #{inspect(mod)}.#{Atom.to_string(func)}" + end + ) + |> Kernel.<>(output) + else + output + end + + output = + if run.skipped != [] do + run.success + |> Enum.map_join( + ", ", + fn {{mod, func}, _arity} -> + " ⚪️ #{inspect(mod)}.#{Atom.to_string(func)}" + end + ) + |> Kernel.<>(output) + else + output + end + + if output == "", do: "", else: "\n" <> output + end + + @spec print_run(ExExample.Run.t()) :: :ok + def print_run(%Run{result: %Cache.Result{success: :success} = result} = run) do + cached = if result.cached, do: "(cached) ", else: "" + + IO.puts(""" + 🟢 #{cached}#{inspect(run.key.module)}.#{Atom.to_string(run.key.function)}\ + #{print_dependencies(run)}\ + """) + + :ok + end + + def print_run(%Run{result: %Cache.Result{success: :skipped} = result} = run) do + cached = if result.cached, do: "(cached) ", else: "" + + IO.puts(""" + ⚪️ #{cached}#{inspect(run.key.module)}.#{Atom.to_string(run.key.function)}\ + #{print_dependencies(run)}\ + """) + + :ok + end + + def print_run(%Run{result: %Cache.Result{success: :failed} = result} = run) do + cached = if result.cached, do: "(cached) ", else: "" + + IO.puts(""" + 🔴 #{cached}#{inspect(run.key.module)}.#{Atom.to_string(run.key.function)}\ + #{print_dependencies(run)}\ + """) + + :ok + end + + @spec pretty_run(atom()) :: :ok + def pretty_run(module) do + module + |> ExExample.execution_order() + |> Enum.map(&attempt_example(&1, [])) + |> Enum.each(&print_run/1) + + :ok + end + + @doc """ + I return the last known result of an example invocation. + If the example has not been run yet I return an error. + """ + @spec last_result(ExExample.dependency()) :: :success | :skipped | :failed | :no_cache + def last_result({{module, func}, _arity}) do + deps_hash = dependency_hash({module, func}) + + key = %Cache.Key{module: module, function: func, arguments: [], deps_hash: deps_hash} + + case Cache.get_result(key) do + {:ok, result} -> + result.success + + {:error, :no_result} -> + :no_cache + end + end + + @doc """ + Given an example, I return a map of all its dependencies + that failed, succeeded, were skipped, or have not run yet. + """ + @spec dependency_results({atom(), atom()}) :: %{ + success: [ExExample.dependency()], + skipped: [ExExample.dependency()], + failed: [ExExample.dependency()], + no_cache: [ExExample.dependency()] + } + def dependency_results({module, func}) do + results = + {module, func} + |> ExExample.example_dependencies() + |> Enum.group_by(&last_result/1) + + Map.merge(%{success: [], skipped: [], failed: [], no_cache: []}, results) + end + + @doc """ + Given an example, I return a hash of all its dependencies. + This hash can be used to determine of an example was run with + an older version of a dependency. + """ + @spec dependency_hash({atom(), atom()}) :: non_neg_integer() + def dependency_hash({module, func}) do + {module, func} + |> ExExample.all_dependencies() + |> Enum.map(fn {{module, _func}, _arity} -> + {module, module.__info__(:attributes)[:vsn]} + end) + |> Enum.uniq() + |> :erlang.phash2() + end + + @doc """ + I run all the examples in the given module. + I use the cache for each invocation. + """ + @spec run_all_examples(atom()) :: [Run.t()] + def run_all_examples(module) do + module + |> ExExample.execution_order() + |> Enum.map(&attempt_example(&1, [])) + end + + @doc """ + I attempt to run an example. + + I return a struct that holds the result, the key, and a list of all + the dependencies and their previous result. + """ + @spec attempt_example({atom(), atom()}, [any()]) :: Run.t() + def attempt_example({module, func}, arguments) do + deps_hash = dependency_hash({module, func}) + key = %Cache.Key{module: module, function: func, arguments: arguments, deps_hash: deps_hash} + + case dependency_results({module, func}) do + # no failures, only no cache or success + %{failed: [], skipped: [], no_cache: no_cache, success: success} -> + result = run_example_with_cache({module, func}, arguments) + %Run{key: key, result: result, no_cache: no_cache, success: success} + + # failures and/or skipped + %{failed: failed, skipped: skipped, no_cache: no_cache, success: success} -> + result = %Cache.Result{key: key, success: :skipped, result: nil, cached: false} + Cache.put_result(result, key) + + %Run{ + key: key, + result: result, + no_cache: no_cache, + success: success, + failed: failed, + skipped: skipped + } + end + end + + @doc """ + I run an example with the cached results. + If there is cached result, I return that. + If there is no result in the cache I run the example. + """ + @spec run_example_with_cache({atom(), atom()}, [any()]) :: Cache.Result.t() + def run_example_with_cache({module, func}, arguments) do + deps_hash = dependency_hash({module, func}) + key = %Cache.Key{module: module, function: func, arguments: arguments, deps_hash: deps_hash} + + case Cache.get_result(key) do + {:ok, result} -> + if module.rerun?(result.result) do + run_example({module, func}, arguments) + else + %{result | result: module.copy(result.result)} + end + + {:error, :no_result} -> + run_example({module, func}, arguments) + end + end + + @doc """ + I run an example directly. I do not consult the cache for a previous result. + I return a result of this execution and put it in the cache. + """ + @spec run_example({atom(), atom()}, [any()]) :: Cache.Result.t() + def run_example({module, func}, arguments) do + deps_hash = dependency_hash({module, func}) + key = %Cache.Key{module: module, function: func, arguments: arguments, deps_hash: deps_hash} + + result = + try do + {module, func} = ExExample.hidden_name({module, func}) + result = apply(module, func, arguments) + %Cache.Result{key: key, success: :success, result: result} + rescue + e -> + %Cache.Result{key: key, success: :failed, result: e} + end + + # store the result in the cache + Cache.put_result(result, key) + + %{result | cached: false} + end + + @doc """ + Given an example, I return a hash of all its dependencies. + This hash can be used to determine of an example was run with + an older version of a dependency. + """ + @spec deps_hash(list(ExExample.dependency())) :: non_neg_integer() + def deps_hash(dependencies) do + dependencies + |> Enum.map(fn {{module, _func}, _arity} -> + module.__info__(:attributes)[:vsn] + end) + |> :erlang.phash2() + end +end diff --git a/lib/ex_example/run.ex b/lib/ex_example/run.ex new file mode 100644 index 0000000..a329a4f --- /dev/null +++ b/lib/ex_example/run.ex @@ -0,0 +1,23 @@ +defmodule ExExample.Run do + @moduledoc """ + I am the result of running an example. + + I contain meta-data about this particular invocation such as + whether the example was found in cache, the state of its dependencies, + and the key. + """ + alias ExExample.Cache.Key + alias ExExample.Cache.Result + + use TypedStruct + + typedstruct do + field(:cached, boolean(), default: true) + field(:key, Key.t()) + field(:result, Result.t()) + field(:skipped, [ExExample.dependency()], default: []) + field(:failed, [ExExample.dependency()], default: []) + field(:no_cache, [ExExample.dependency()], default: []) + field(:success, [ExExample.dependency()], default: []) + end +end diff --git a/lib/ex_example/supervisor.ex b/lib/ex_example/supervisor.ex deleted file mode 100644 index 9f3100d..0000000 --- a/lib/ex_example/supervisor.ex +++ /dev/null @@ -1,24 +0,0 @@ -defmodule ExExample.Supervisor do - @moduledoc """ - I am the ExUnit Supervisor for Caching. - """ - - use Supervisor - - @type startup_options :: {:name, atom()} - - @spec start_link(list(startup_options())) :: GenServer.on_start() - def start_link(args \\ []) do - {:ok, keys} = - args - |> Keyword.validate(name: __MODULE__) - - Supervisor.start_link(__MODULE__, keys, name: keys[:name]) - end - - @impl true - def init(_args) do - children = [{Cachex, [:ex_examples]}] - Supervisor.init(children, strategy: :one_for_one) - end -end diff --git a/lib/ex_example/tests.ex b/lib/ex_example/tests.ex new file mode 100644 index 0000000..4b5214e --- /dev/null +++ b/lib/ex_example/tests.ex @@ -0,0 +1,25 @@ +defmodule ExExample.Tests do + @moduledoc """ + I generate a test for the given module that runs all of its examples. + """ + defmacro __using__(for: module) do + alias ExExample.Cache + + module_to_test = Macro.expand(module, __CALLER__) + examples = ExExample.execution_order(module_to_test) + + for {mod, func} <- examples do + quote do + test "#{inspect(unquote(mod))}.#{Atom.to_string(unquote(func))}" do + case ExExample.Executor.attempt_example({unquote(mod), unquote(func)}, []) do + %{result: %Cache.Result{success: :failed} = result} -> + raise result.result + + _ -> + :ok + end + end + end + end + end +end diff --git a/lib/examples/stack.ex b/lib/examples/stack.ex new file mode 100644 index 0000000..a837d64 --- /dev/null +++ b/lib/examples/stack.ex @@ -0,0 +1,46 @@ +defmodule Stack do + @moduledoc """ + I am an example implementation of a Stack. I am used to show example.. examples. + """ + use TypedStruct + + typedstruct enforce: true do + @typedoc """ + I represent the key for an example invocation. + """ + field(:elements, [any()], default: []) + end + + @spec create([any()]) :: {:ok, t()} + def create(xs \\ []) do + {:ok, %Stack{elements: xs}} + end + + # yesyesyes + @spec empty?(t()) :: boolean + def empty?(%Stack{elements: []}), do: true + def empty?(%Stack{elements: _}), do: false + + @spec push(t(), any()) :: {:ok, t()} + def push(%Stack{elements: xs}, x) do + {:ok, %Stack{elements: [x | xs]}} + end + + @spec pop(t()) :: {:ok, t(), any()} | {:error, :empty} + def pop(%Stack{elements: []}) do + {:error, :empty} + end + + def pop(%Stack{elements: [x | xs]}) do + {:ok, %Stack{elements: xs}, x} + end + + @spec peek(t()) :: {:ok, t(), any()} | {:error, :empty} + def peek(%Stack{elements: []}) do + {:error, :empty} + end + + def peek(%Stack{elements: [x | xs]}) do + {:ok, %Stack{elements: [x | xs]}, x} + end +end diff --git a/lib/examples/stack_examples.ex b/lib/examples/stack_examples.ex new file mode 100644 index 0000000..b0c1945 --- /dev/null +++ b/lib/examples/stack_examples.ex @@ -0,0 +1,42 @@ +defmodule Examples.Stack do + # all exmaples must return a Stack + @moduledoc """ + I contain examples that test the `Stack` implementation. + """ + use ExExample + + import ExUnit.Assertions + + example new_stack do + {:ok, stack} = Stack.create([]) + assert stack == %Stack{elements: []} + stack + end + + example empty_stack_should_be_empty do + stack = new_stack() + assert Stack.empty?(stack) + stack + end + + example push_stack do + stack = new_stack() + {:ok, stack} = Stack.push(stack, 1) + assert stack == %Stack{elements: [1]} + stack + end + + example pop_stack do + stack = push_stack() + {:ok, stack, 1} = Stack.pop(stack) + stack + end + + @spec copy(any()) :: Stack.t() + def copy(stack) do + %Stack{elements: stack.elements} + end + + @spec rerun?(any()) :: boolean() + def rerun?(_), do: false +end diff --git a/mix.exs b/mix.exs index 76ad48d..4037108 100644 --- a/mix.exs +++ b/mix.exs @@ -7,15 +7,20 @@ defmodule ExExample.MixProject do version: "0.1.0", elixir: "~> 1.17", start_permanent: Mix.env() == :prod, - deps: deps() + deps: deps(), + dialyzer: [ + plt_add_deps: :apps_direct, + plt_add_apps: [:wx, :ex_unit], + plt_ignore_apps: [:mnesia] + ] ] end # Run "mix help compile.app" to learn about applications. def application do [ - mod: {ExExample, []}, - extra_applications: [:logger, :observer, :wx] + extra_applications: [:logger, :observer, :wx], + mod: {ExExample.Application, []} ] end @@ -29,7 +34,8 @@ defmodule ExExample.MixProject do # non-runtime dependencies below {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, {:dialyxir, "~> 1.3", only: [:dev], runtime: false}, - {:ex_doc, "~> 0.31", only: [:dev], runtime: false} + {:ex_doc, "~> 0.31", only: [:dev], runtime: false}, + {:libgraph, "~> 0.16.0"} ] end end diff --git a/mix.lock b/mix.lock index cbb4abf..6ff4b68 100644 --- a/mix.lock +++ b/mix.lock @@ -1,19 +1,19 @@ %{ "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, - "cachex": {:hex, :cachex, "4.0.2", "120f9c27b0a453c7cb3319d9dc6c61c050a480e5299fc1f8bded1e2e334992ab", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:ex_hash_ring, "~> 6.0", [hex: :ex_hash_ring, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "4f4890122bddd979f6c217d5e300d0c0d3eb858a976cbe1f65a94e6322bc5825"}, - "credo": {:hex, :credo, "1.7.8", "9722ba1681e973025908d542ec3d95db5f9c549251ba5b028e251ad8c24ab8c5", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cb9e87cc64f152f3ed1c6e325e7b894dea8f5ef2e41123bd864e3cd5ceb44968"}, - "dialyxir": {:hex, :dialyxir, "1.4.4", "fb3ce8741edeaea59c9ae84d5cec75da00fa89fe401c72d6e047d11a61f65f70", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "cd6111e8017ccd563e65621a4d9a4a1c5cd333df30cebc7face8029cacb4eff6"}, + "cachex": {:hex, :cachex, "4.0.3", "95e88c3ef4d37990948eaecccefe40b4ce4a778e0d7ade29081e6b7a89309ee2", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:ex_hash_ring, "~> 6.0", [hex: :ex_hash_ring, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "d5d632da7f162f8a190f1c39b712c0ebc9cf0007c4e2029d44eddc8041b52d55"}, + "credo": {:hex, :credo, "1.7.10", "6e64fe59be8da5e30a1b96273b247b5cf1cc9e336b5fd66302a64b25749ad44d", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "71fbc9a6b8be21d993deca85bf151df023a3097b01e09a2809d460348561d8cd"}, + "dialyxir": {:hex, :dialyxir, "1.4.5", "ca1571ac18e0f88d4ab245f0b60fa31ff1b12cbae2b11bd25d207f865e8ae78a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b0fb08bb8107c750db5c0b324fa2df5ceaa0f9307690ee3c1f6ba5b9eb5d35c3"}, "earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"}, "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, "eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"}, - "ex_doc": {:hex, :ex_doc, "0.34.2", "13eedf3844ccdce25cfd837b99bea9ad92c4e511233199440488d217c92571e8", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "5ce5f16b41208a50106afed3de6a2ed34f4acfd65715b82a0b84b49d995f95c1"}, + "ex_doc": {:hex, :ex_doc, "0.35.1", "de804c590d3df2d9d5b8aec77d758b00c814b356119b3d4455e4b8a8687aecaf", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "2121c6402c8d44b05622677b761371a759143b958c6c19f6558ff64d0aed40df"}, "ex_hash_ring": {:hex, :ex_hash_ring, "6.0.4", "bef9d2d796afbbe25ab5b5a7ed746e06b99c76604f558113c273466d52fa6d6b", [:mix], [], "hexpm", "89adabf31f7d3dfaa36802ce598ce918e9b5b33bae8909ac1a4d052e1e567d18"}, - "excache": {:hex, :excache, "0.1.0", "cb47ccc8372a4490d7738d2353b3b743d0975e820b5251b6881821b02e062770", [:mix], [], "hexpm", "b5bbefc2b8a82bc92b848fd1da5b32cfb856fe4ff4ffd615aa77e16de772500f"}, "file_system": {:hex, :file_system, "1.0.1", "79e8ceaddb0416f8b8cd02a0127bdbababe7bf4a23d2a395b983c1f8b3f73edd", [:mix], [], "hexpm", "4414d1f38863ddf9120720cd976fce5bdde8e91d8283353f0e31850fa89feb9e"}, "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, "jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"}, - "makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"}, - "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"}, + "libgraph": {:hex, :libgraph, "0.16.0", "3936f3eca6ef826e08880230f806bfea13193e49bf153f93edcf0239d4fd1d07", [:mix], [], "hexpm", "41ca92240e8a4138c30a7e06466acc709b0cbb795c643e9e17174a178982d6bf"}, + "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, + "makeup_elixir": {:hex, :makeup_elixir, "1.0.0", "74bb8348c9b3a51d5c589bf5aebb0466a84b33274150e3b6ece1da45584afc82", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "49159b7d7d999e836bedaf09dcf35ca18b312230cf901b725a64f3f42e407983"}, "makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"}, "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, "sleeplocks": {:hex, :sleeplocks, "1.1.3", "96a86460cc33b435c7310dbd27ec82ca2c1f24ae38e34f8edde97f756503441a", [:rebar3], [], "hexpm", "d3b3958552e6eb16f463921e70ae7c767519ef8f5be46d7696cc1ed649421321"}, diff --git a/test/ex_example_test.exs b/test/ex_example_test.exs deleted file mode 100644 index 0cf82cc..0000000 --- a/test/ex_example_test.exs +++ /dev/null @@ -1,8 +0,0 @@ -defmodule ExExampleTest do - use ExUnit.Case - doctest ExExample - - test "greets the world" do - assert 1 == 1 - end -end diff --git a/test/stack_test.exs b/test/stack_test.exs new file mode 100644 index 0000000..aabe1bd --- /dev/null +++ b/test/stack_test.exs @@ -0,0 +1,5 @@ +defmodule StackTest do + use ExUnit.Case + doctest ExExample + use ExExample.Tests, for: Examples.Stack +end