diff --git a/.github/get_version.py b/.github/get_version.py new file mode 100644 index 0000000..da76ee8 --- /dev/null +++ b/.github/get_version.py @@ -0,0 +1,20 @@ +""" +Simple script used to tag our releases with major and minor git tags. + +This lets users use the action with @v1 or @v1.1 references, and not have +to use the complete tag (with patch version specified). +""" + +import sys + +from packaging import version + +if __name__ == '__main__': + ref = sys.argv[1] # ref will usually look like refs/tags/v1.0.1 + major = sys.argv[2] == 'major' + version = version.parse(ref.split('refs/tags/v')[1]) + + if major: + print(f'v{version.major}') + else: + print(f'v{version.major}.{version.minor}') diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml new file mode 100644 index 0000000..4a7bf3b --- /dev/null +++ b/.github/workflows/tag.yml @@ -0,0 +1,20 @@ +# TODO: When upgrading to v1 and above, implement this: +# https://github.com/snok/install-poetry/blob/main/.github/workflows/tag_release.yml +name: Tag releases with minor versions + +on: + release: + types: [published] + +jobs: + tag-v1: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Update tag + run: | + minor_tag="$(python .github/get_version.py "${GITHUB_REF}" minor)" + git tag $minor_tag + git push origin HEAD:refs/heads/master --tags --force + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..0b6eaf7 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,80 @@ +name: Test + +on: + pull_request: + push: + branches: + - main + +jobs: + linting: + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.10.0-rc.1 + + - name: Load pre-commit cache + uses: actions/cache@v2 + with: + path: | + ~/.cache/pip + ~/.cache/pre-commit + key: ${{ runner.os }}-pip-2 + restore-keys: | + ${{ runner.os }}-pip- + ${{ runner.os }}- + + - name: Install pre-commit + run: python -m pip install pre-commit + + - name: Run pre-commit + run: pre-commit run --all-files + + test: + needs: linting + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.10.0-rc.1 + + - name: Load cached Poetry installation + uses: actions/cache@v2 + with: + path: ~/.local + key: poetry-cache-0 + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.2.0a2 + virtualenvs-in-project: true + + - name: Load cached venv + uses: actions/cache@v2 + id: cache-venv + with: + path: .venv + key: ${{ hashFiles('**/poetry.lock') }}-0 + + - name: Install dependencies + run: poetry install --no-interaction --no-root + if: steps.cache-venv.outputs.cache-hit != 'true' + + - name: Run tests + run: source $VENV && pytest main_tests.py --cov-report=xml + + - uses: codecov/codecov-action@v2 + with: + file: ./coverage.xml + fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1d2b3aa --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +.env +.venv +.idea/ +.mypy_cache/ +__pycache__/ +.coverage diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..d1cb996 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,47 @@ +repos: + - repo: https://github.com/ambv/black + rev: 21.7b0 + hooks: + - id: black + args: ['--quiet'] + - repo: https://github.com/pycqa/isort + rev: 5.9.3 + hooks: + - id: isort + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: check-ast + - id: check-added-large-files + - id: check-merge-conflict + - id: check-case-conflict + - id: check-docstring-first + - id: check-json + - id: check-yaml + - id: double-quote-string-fixer + - id: end-of-file-fixer + - id: trailing-whitespace + - id: mixed-line-ending + - id: trailing-whitespace + - repo: https://github.com/asottile/pyupgrade + rev: v2.23.3 + hooks: + - id: pyupgrade + args: ['--py36-plus', '--py37-plus', '--keep-runtime-typing'] + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + additional_dependencies: [ + 'flake8-bugbear', + 'flake8-comprehensions', + 'flake8-deprecated', + 'flake8-use-fstring', + 'flake8-docstrings', + 'flake8-type-checking', + ] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: 'v0.910' + hooks: + - id: mypy + additional_dependencies: [types-dateparser] diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..ea63281 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,9 @@ +FROM python:3.10.0rc1-alpine + +RUN apk add build-base + +RUN pip install httpx dateparser + +COPY main.py /main.py + +ENTRYPOINT ["python", "main.py"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..eb83171 --- /dev/null +++ b/README.md @@ -0,0 +1,160 @@ +[![release](https://img.shields.io/github/release/sondrelg/container-retention-policy.svg)](https://github.com/sondrelg/container-retention-policy/releases/latest) +[![coverage](https://codecov.io/gh/snok/drf-openapi-tester/branch/master/graph/badge.svg)](https://codecov.io/gh/sondrelg/container-retention-policy) + +# 📘 GHCR Container Retention Policy + +A Github Action for deleting old image versions from the Github container registry. + +Storage isn't free and registries can often get bloated with unused images. Having a retention policy to prevent clutter +makes sense in most cases. + +Supports both organizational and personal accounts. + +# Content + +- [Usage](#usage) +- [Examples](#examples) +- [Parameters](#parameters) +- [Contributing](#contributing) + +# Usage + +To use the action, simply add it to your Github workflow, like this: + +```yaml +- uses: sondrelg/container-retention-policy@v0.1 + with: + image-names: dev, web, test + cut-off: two hours ago UTC+2 + timestamp-to-use: updated_at + account-type: org + org-name: google + token: ${{ secrets.PAT }} +``` + +You could run this as +a [scheduled event](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#schedule), or as a part +of an existing workflow, but for the sake of inspiration, it might also make sense for you to trigger it with a: + +- [workflow_dispatch](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#workflow_dispatch): trigger it manually in the Github repo UI when needed +- [workflow_run](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#workflow_run): have it run as clean-up after another key workflow completes +- or triggering it with a + webhook ([repository_dispatch](https://docs.github.com/en/actions/reference/events-that-trigger-workflows#repository_dispatch)) + +# Examples + +For an organization, a full example might look something like this: + +```yaml +name: Delete old container images + +on: + schedule: + - cron: '0 0 * * *' # every day at midnight + +jobs: + delete-old-container-images: + - name: Delete 'dev' containers older than a week + uses: sondrelg/container-retention-policy@v0.1 + with: + image-names: app1/dev, app2/dev + cut-off: A week ago UTC + account-type: org + org-name: my-org + token: ${{ secrets.PAT }} + + - name: Delete 'test' containers older than a month + uses: sondrelg/container-retention-policy@v0.1 + with: + image-names: app1/test, app2/test + cut-off: One month ago UTC + account-type: org + org-name: my-org + token: ${{ secrets.PAT }} +``` + +While for a personal account, something like this might do: + +```yaml +name: Delete old container images + +on: + schedule: + - cron: '0 0 0 * *' # the first day of the month + +jobs: + delete-old-container-images: + - name: Delete old images + uses: sondrelg/container-retention-policy@v0.1 + with: + image-names: dev + cut-off: One month ago UTC + account-type: personal + token: ${{ secrets.PAT }} +``` + +# Parameters + +## image-names + +* **Required**: `Yes` +* **Example**: `image-names: image1,image2,image3` + +The names of the container images you want to delete old versions for. Takes one or several container image names as a +comma separated list. + +## cut-off + +* **Required**: `Yes` +* **Example**: `cut-off: 1 week ago UTC` + +The timezone-aware datetime you want to delete container versions that are older than. + +We use [dateparser](https://dateparser.readthedocs.io/en/latest/) to parse the cut-off specified. This means you should +be able to specify your cut-off in relative human readable terms like `Two hours ago UTC`, or by using a normal +timestamp. + +The parsed datetime **must** contain a timezone. + +## timestamp-to-use + +* **Required**: `Yes` +* **Example**: `timestamp-to-use: created_at` +* **Default**: `updated_at` +* **Valid choices**: `updated_at` or `created_at` + +Which timestamp to use when comparing the cut-off to the container version. + +Must be `created_at` or `updated_at`. The timestamp to use determines how we filter container versions. + +## account-type + +* **Required**: `Yes` +* **Example**: `account-type: personal` +* **Valid choices**: `org` or `personal` + +The account type of the account running the action. The account type determines which API endpoints to use in the Github +API. + +## org-name + +* **Required**: `Only if account type is org` +* **Example**: `org-name: google` + +The name of your organization. + +## token + +* **Required**: `Yes` +* **Example**: `token: ${{ secrets.PAT }}` + +For the token, you need to pass a [personal access token](https://docs.github.com/en/github/authenticating-to-github/keeping-your-account-and-data-secure/creating-a-personal-access-token) +with access to the container registry. Specifically, you need to grant +it the following scopes: + +- `read:packages`, and +- `delete:packages` + +# Contributing + +Please do 👏 diff --git a/action.yml b/action.yml new file mode 100644 index 0000000..c6ae4a6 --- /dev/null +++ b/action.yml @@ -0,0 +1,37 @@ +name: 'Container Retention Policy' +description: 'Lets you create a retention policy for GHCR hosted container images' +branding: + icon: "book" + color: "blue" +inputs: + account-type: + description: "The type of account. Can be either 'org' or 'personal'." + required: true + org-name: + description: "The name of the organization. Only required if the account type is 'personal'." + default: '' + required: false + image-names: + description: 'Image name to delete. Supports passing several names as a comma-separated list.' + required: true + timestamp-to-use: + description: 'Whether to use updated_at or created_at timestamps. Defaults to updated_at.' + required: true + default: 'updated_at' + cut-off: + description: "The cut-off for which to delete images older than. For example '2 days ago UTC'. Timezone is required." + required: true + token: + description: 'Personal access token with read and delete scopes.' + required: true + +runs: + using: 'docker' + image: 'Dockerfile' + args: + - ${{ inputs.account-type }} + - ${{ inputs.org-name }} + - ${{ inputs.image-names }} + - ${{ inputs.timestamp-to-use }} + - ${{ inputs.cut-off }} + - ${{ inputs.token }} diff --git a/main.py b/main.py new file mode 100644 index 0000000..6522868 --- /dev/null +++ b/main.py @@ -0,0 +1,215 @@ +from __future__ import annotations + +import asyncio +from collections import namedtuple +from dataclasses import dataclass +from enum import Enum +from functools import partial +from sys import argv +from typing import TYPE_CHECKING +from urllib.parse import quote_from_bytes + +from dateparser import parse +from httpx import AsyncClient + +if TYPE_CHECKING: + from datetime import datetime + from typing import Any, Callable, Coroutine, Optional + +BASE_URL = 'https://api.github.com' + +ImageName = namedtuple('ImageName', ['value', 'encoded']) + + +class TimestampType(Enum): + UPDATED_AT = 'updated_at' + CREATED_AT = 'created_at' + + +class AccountType(Enum): + ORG = 'org' + PERSONAL = 'personal' + + +async def list_org_package_versions(org_name: str, image_name: ImageName, http_client: AsyncClient) -> list[dict]: + """ + List image versions for an organization. + + :param org_name: The name of the organization. + :param image_name: The name of the container image. + :param http_client: HTTP client. + :return: List of image objects. + """ + response = await http_client.get(f'{BASE_URL}/orgs/{org_name}/packages/container/{image_name.encoded}/versions') + response.raise_for_status() + return response.json() + + +async def list_package_versions(image_name: ImageName, http_client: AsyncClient) -> list[dict]: + """ + List image versions for a personal account. + + :param image_name: The name of the container image. + :param http_client: HTTP client. + :return: List of image objects. + """ + response = await http_client.get(f'{BASE_URL}/user/packages/container/{image_name.encoded}/versions') + response.raise_for_status() + return response.json() + + +async def delete_org_package_versions(org_name: str, image_name: ImageName, version_id: int, http_client: AsyncClient) -> None: + """ + Delete an image version for an organization. + + :param org_name: The name of the org. + :param image_name: The name of the container image. + :param version_id: The ID of the image version we're deleting. + :param http_client: HTTP client. + :return: Nothing - the API returns a 204. + """ + url = f'{BASE_URL}/orgs/{org_name}/packages/container/{image_name.encoded}/versions/{version_id}' + response = await http_client.delete(url) + response.raise_for_status() + print(f'Deleted old image: {image_name.value}:{version_id}') + + +async def delete_package_versions(image_name: ImageName, version_id: int, http_client: AsyncClient) -> None: + """ + Delete an image version for a personal account. + + :param image_name: The name of the container image. + :param version_id: The ID of the image version we're deleting. + :param http_client: HTTP client. + :return: Nothing - the API returns a 204. + """ + url = f'{BASE_URL}/user/packages/container/{image_name.encoded}/versions/{version_id}' + response = await http_client.delete(url) + response.raise_for_status() + print(f'Deleted old image: {image_name.value}:{version_id}') + + +@dataclass +class Inputs: + """ + Class holds validated inputs, and unifies the API for org- and personal functions. + """ + + parsed_cutoff: datetime + timestamp_type: TimestampType + account_type: AccountType + org_name: Optional[str] = None + + @property + def is_org(self) -> bool: + """ + Whether the account type is an org or not. + """ + return self.account_type == AccountType.ORG + + @property + def list_package_versions(self) -> Callable[[ImageName, Any], Coroutine[Any, Any, list[dict]]]: + """ + Unify the API for package version list functions. + """ + if self.is_org: + return partial(list_org_package_versions, self.org_name) + else: + return list_package_versions + + @property + def delete_package(self) -> Callable[[ImageName, int, Any], Coroutine[Any, Any, None]]: + """ + Unify the API for package deletion functions. + """ + if self.is_org: + return partial(delete_org_package_versions, self.org_name) + else: + return delete_package_versions + + +async def get_and_delete_old_versions(image_name: ImageName, inputs: Inputs, http_client: AsyncClient) -> None: + """ + Delete old package versions for an image name. + """ + versions = await inputs.list_package_versions(image_name, http_client) + + tasks = [] + + for version in versions: + updated_or_created_at = parse(version[inputs.timestamp_type.value]) + + if not updated_or_created_at: + print(f'Skipping image version {version["id"]}. Unable to parse timestamps.') + continue + + if updated_or_created_at < inputs.parsed_cutoff: + tasks.append(asyncio.create_task(inputs.delete_package(image_name, version['id'], http_client))) + + if not tasks: + print(f'No more versions to delete for {image_name.value}') + + await asyncio.gather(*tasks) + + +def validate_inputs(account_type: str, org_name: str, timestamp_type: str, cut_off: str) -> Inputs: + """ + Perform basic validation on the incoming parameters and return an Inputs instance. + """ + # For date parsing we use `dateparser`. If you're having issues getting this to work, + # check out https://dateparser.readthedocs.io/en/latest/. + if not (parsed_cutoff := parse(cut_off)): + raise ValueError(f"Unable to parse '{cut_off}'") + elif parsed_cutoff.tzinfo is None or parsed_cutoff.tzinfo.utcoffset(parsed_cutoff) is None: + raise ValueError('Timezone is required for the cut-off') + + if account_type == 'org' and not org_name: + raise ValueError('org-name is required when account-type is org') + + return Inputs( + parsed_cutoff=parsed_cutoff, + timestamp_type=TimestampType(timestamp_type), + account_type=AccountType(account_type), + org_name=org_name if account_type == 'org' else None, + ) + + +def parse_image_names(image_names: str) -> list[ImageName]: + """ + Return an ImageName for each images name received. + + The image_name can be one or multiple image names, and should be comma-separated. + For images with special characters in the name (e.g., `/`), we must url-encode + the image names before passing them to the Github API, so we save both the url- + encoded and raw value to a named tuple. + """ + return [ImageName(img_name.strip(), quote_from_bytes(img_name.strip().encode('utf-8'), safe='')) for img_name in image_names.split(',')] + + +async def main(account_type: str, org_name: str, image_names: str, timestamp_type: str, cut_off: str, token: str) -> None: + """ + Delete old image versions. + + See action.yml for additional descriptions of each parameter. + + :param account_type: Account type, must be 'org' or 'personal'. + :param org_name: The name of the org. Required if account type is 'org'. + :param image_names: The image names to delete versions for. + Can be a single image name, or multiple comma-separated image names. + :param timestamp_type: Which timestamp to base our cut-off on. Can be 'updated_at' or 'created_at'. + :param cut_off: Can be a human readable relative time like '2 days ago UTC', or a timestamp. + Must contain a reference to the timezone. + :param token: The personal access token to authenticate with. + """ + parsed_image_names: list[ImageName] = parse_image_names(image_names) + inputs: Inputs = validate_inputs(account_type, org_name, timestamp_type, cut_off) + headers = {'accept': 'application/vnd.github.v3+json', 'Authorization': f'Bearer {token}'} + + async with AsyncClient(headers=headers) as http_client: + await asyncio.gather( + *(asyncio.create_task(get_and_delete_old_versions(image_name, inputs, http_client)) for image_name in parsed_image_names) + ) + + +if __name__ == '__main__': + asyncio.run(main(*argv[1:])) diff --git a/main_tests.py b/main_tests.py new file mode 100644 index 0000000..2589abf --- /dev/null +++ b/main_tests.py @@ -0,0 +1,191 @@ +from datetime import datetime, timedelta, timezone +from functools import partial +from pathlib import Path +from unittest.mock import AsyncMock, Mock + +import pytest as pytest +from dateparser import parse +from httpx import AsyncClient + +import main +from main import ( + AccountType, + ImageName, + Inputs, + TimestampType, + delete_org_package_versions, + delete_package_versions, + get_and_delete_old_versions, + list_org_package_versions, + list_package_versions, +) +from main import main as main_ +from main import parse_image_names, validate_inputs + +mock_response = Mock() +mock_response.json.return_value = [] +mock_http_client = AsyncMock() +mock_http_client.get.return_value = mock_response +mock_http_client.delete.return_value = mock_response + + +@pytest.mark.asyncio +async def test_list_org_package_version(): + await list_org_package_versions(org_name='test', image_name=ImageName('test', 'test'), http_client=mock_http_client) + + +@pytest.mark.asyncio +async def test_list_package_version(): + await list_package_versions(image_name=ImageName('test', 'test'), http_client=mock_http_client) + + +@pytest.mark.asyncio +async def test_delete_org_package_version(): + await delete_org_package_versions(org_name='test', image_name=ImageName('test', 'test'), http_client=mock_http_client, version_id=123) + + +@pytest.mark.asyncio +async def test_delete_package_version(): + await delete_package_versions(image_name=ImageName('test', 'test'), http_client=mock_http_client, version_id=123) + + +def test_inputs_dataclass(): + personal = Inputs( + parsed_cutoff=parse('an hour ago utc'), timestamp_type=TimestampType('created_at'), account_type=AccountType('personal') + ) + assert personal.is_org is False + assert personal.list_package_versions == list_package_versions + assert personal.delete_package == delete_package_versions + + org = Inputs( + parsed_cutoff=parse('an hour ago utc'), + timestamp_type=TimestampType('created_at'), + account_type=AccountType('org'), + org_name='abcorp', + ) + assert org.is_org is True + assert isinstance(org.list_package_versions, partial) + assert isinstance(org.delete_package, partial) + + +class TestGetAndDeleteOldVersions: + @staticmethod + async def mock_list_package_versions(data, *args): + return data + + @pytest.mark.asyncio + async def test_get_and_delete_old_versions_delete_package_scenario(self, capsys): + data = [ + { + 'created_at': '2021-05-26T14:03:03Z', + 'html_url': 'https://github.com/orgs/org-name/packages/container/image-name/1234567', + 'id': 1234567, + 'metadata': {'container': {'tags': []}, 'package_type': 'container'}, + 'name': 'sha256:3c6891187412bd31fa04c63b4f06c47417eb599b1b659462632285531aa99c19', + 'package_html_url': 'https://github.com/orgs/org-name/packages/container/package/image-name', + 'updated_at': '2021-05-26T14:03:03Z', + 'url': 'https://api.github.com/orgs/org-name/packages/container/image-name/versions/1234567', + } + ] + Inputs.list_package_versions = partial(self.mock_list_package_versions, data) + inputs = Inputs( + parsed_cutoff=parse('an hour ago utc'), timestamp_type=TimestampType('created_at'), account_type=AccountType('personal') + ) + + await get_and_delete_old_versions(image_name=ImageName('a', 'a'), inputs=inputs, http_client=AsyncMock()) + captured = capsys.readouterr() + assert captured.out == 'Deleted old image: a:1234567\n' + + @pytest.mark.asyncio + async def test_get_and_delete_old_versions_not_old_enough_scenario(self, capsys): + Inputs.list_package_versions = partial( + self.mock_list_package_versions, + [ + { + 'created_at': str(datetime.now(timezone(timedelta(hours=1)))), + 'id': 1234567, + } + ], + ) + inputs = Inputs( + parsed_cutoff=parse('2 days ago utc'), timestamp_type=TimestampType('created_at'), account_type=AccountType('personal') + ) + + await get_and_delete_old_versions(image_name=ImageName('a', 'a'), inputs=inputs, http_client=AsyncMock()) + captured = capsys.readouterr() + assert captured.out == 'No more versions to delete for a\n' + + @pytest.mark.asyncio + async def test_get_and_delete_old_versions_skip_package_scenario(self, capsys): + Inputs.list_package_versions = partial(self.mock_list_package_versions, [{'created_at': '', 'id': 1234567}]) + inputs = Inputs( + parsed_cutoff=parse('an hour ago utc'), timestamp_type=TimestampType('created_at'), account_type=AccountType('personal') + ) + + await get_and_delete_old_versions(image_name=ImageName('a', 'a'), inputs=inputs, http_client=AsyncMock()) + captured = capsys.readouterr() + assert captured.out == 'Skipping image version 1234567. Unable to parse timestamps.\nNo more versions to delete for a\n' + + @pytest.mark.asyncio + async def test_get_and_delete_old_versions_no_packages_scenario(self, capsys): + Inputs.list_package_versions = partial(self.mock_list_package_versions, []) + inputs = Inputs( + parsed_cutoff=parse('an hour ago utc'), timestamp_type=TimestampType('created_at'), account_type=AccountType('personal') + ) + + await get_and_delete_old_versions(image_name=ImageName('a', 'a'), inputs=inputs, http_client=AsyncMock()) + captured = capsys.readouterr() + assert captured.out == 'No more versions to delete for a\n' + + +def test_inputs_bad_account_type(): + defaults = {'account_type': 'org', 'org_name': 'test', 'timestamp_type': 'updated_at', 'cut_off': '2 hours ago UTC'} + + # Account type + validate_inputs(**defaults | {'account_type': 'personal'}) + validate_inputs(**defaults | {'account_type': 'org'}) + with pytest.raises(ValueError, match="'' is not a valid AccountType"): + validate_inputs(**defaults | {'account_type': ''}) + + # Org name + validate_inputs(**defaults | {'org_name': '', 'account_type': 'personal'}) + with pytest.raises(ValueError, match='org-name is required when account-type is org'): + validate_inputs(**defaults | {'org_name': ''}) + + # Timestamp type + validate_inputs(**defaults | {'timestamp_type': 'updated_at'}) + validate_inputs(**defaults | {'timestamp_type': 'created_at'}) + with pytest.raises(ValueError, match="'wat' is not a valid TimestampType"): + validate_inputs(**defaults | {'timestamp_type': 'wat'}) + + # Cut-off + validate_inputs(**defaults | {'cut_off': '21 July 2013 10:15 pm +0500'}) + validate_inputs(**defaults | {'cut_off': '12/12/12 PM EST'}) + with pytest.raises(ValueError, match='Timezone is required for the cut-off'): + validate_inputs(**defaults | {'cut_off': '12/12/12'}) + with pytest.raises(ValueError, match="Unable to parse 'lolol'"): + validate_inputs(**defaults | {'cut_off': 'lolol'}) + + +def test_parse_image_names(): + assert parse_image_names('a') == [ImageName('a', 'a')] + assert parse_image_names('a,b') == [ImageName('a', 'a'), ImageName('b', 'b')] + assert parse_image_names(' a , b ') == [ImageName('a', 'a'), ImageName('b', 'b')] + assert parse_image_names('a/a') == [ImageName('a/a', 'a%2Fa')] + + +@pytest.mark.asyncio +async def test_main(mocker): + mocker.patch.object(AsyncClient, 'get', return_value=mock_response) + mocker.patch.object(AsyncClient, 'delete', return_value=mock_response) + mocker.patch.object(main, 'get_and_delete_old_versions', AsyncMock()) + await main_( + **{ + 'account_type': 'org', + 'org_name': 'test', + 'image_names': 'a,b,c', + 'timestamp_type': 'updated_at', + 'cut_off': '2 hours ago UTC', + 'token': 'abc', + } + ) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..c4911a3 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,531 @@ +[[package]] +name = "anyio" +version = "3.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +name = "certifi" +version = "2021.5.30" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "dateparser" +version = "1.0.0" +description = "Date parsing library designed to parse dates from HTML pages" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +python-dateutil = "*" +pytz = "*" +regex = "!=2019.02.19" +tzlocal = "*" + +[package.extras] +calendars = ["convertdate", "hijri-converter", "convertdate"] + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "httpcore" +version = "0.13.6" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +anyio = ">=3.0.0,<4.0.0" +h11 = ">=0.11,<0.13" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] + +[[package]] +name = "httpx" +version = "0.18.2" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +certifi = "*" +httpcore = ">=0.13.3,<0.14.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotlicffi (>=1.0.0,<2.0.0)"] +http2 = ["h2 (>=3.0.0,<4.0.0)"] + +[[package]] +name = "idna" +version = "3.2" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.0" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2" + +[[package]] +name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dev = ["pre-commit", "tox"] + +[[package]] +name = "py" +version = "1.10.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "pytest" +version = "6.2.4" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<1.0.0a1" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.15.1" +description = "Pytest support for asyncio." +category = "dev" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +pytest = ">=5.4.0" + +[package.extras] +testing = ["coverage", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "2.12.1" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +coverage = ">=5.2.1" +pytest = ">=4.6" +toml = "*" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.6.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "tox", "pytest-asyncio"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2021.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "regex" +version = "2021.8.3" +description = "Alternative regular expression module, to replace re." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "types-dateparser" +version = "1.0.0" +description = "Typing stubs for dateparser" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "tzlocal" +version = "2.1" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pytz = "*" + +[metadata] +lock-version = "1.1" +python-versions = "^3.9" +content-hash = "47dbaf03cfc5bc48fded928f3fc0e0ff0720faa54e39fa17ca7d21eb9a6115d9" + +[metadata.files] +anyio = [ + {file = "anyio-3.3.0-py3-none-any.whl", hash = "sha256:929a6852074397afe1d989002aa96d457e3e1e5441357c60d03e7eea0e65e1b0"}, + {file = "anyio-3.3.0.tar.gz", hash = "sha256:ae57a67583e5ff8b4af47666ff5651c3732d45fd26c929253748e796af860374"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +certifi = [ + {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, + {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] +dateparser = [ + {file = "dateparser-1.0.0-py2.py3-none-any.whl", hash = "sha256:17202df32c7a36e773136ff353aa3767e987f8b3e27374c39fd21a30a803d6f8"}, + {file = "dateparser-1.0.0.tar.gz", hash = "sha256:159cc4e01a593706a15cd4e269a0b3345edf3aef8bf9278a57dac8adf5bf1e4a"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +httpcore = [ + {file = "httpcore-0.13.6-py3-none-any.whl", hash = "sha256:db4c0dcb8323494d01b8c6d812d80091a31e520033e7b0120883d6f52da649ff"}, + {file = "httpcore-0.13.6.tar.gz", hash = "sha256:b0d16f0012ec88d8cc848f5a55f8a03158405f4bca02ee49bc4ca2c1fda49f3e"}, +] +httpx = [ + {file = "httpx-0.18.2-py3-none-any.whl", hash = "sha256:979afafecb7d22a1d10340bafb403cf2cb75aff214426ff206521fc79d26408c"}, + {file = "httpx-0.18.2.tar.gz", hash = "sha256:9f99c15d33642d38bce8405df088c1c4cfd940284b4290cacbfb02e64f4877c6"}, +] +idna = [ + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +packaging = [ + {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, + {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, +] +pluggy = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] +py = [ + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, +] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pytest = [ + {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, + {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, +] +pytest-asyncio = [ + {file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"}, + {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, +] +pytest-cov = [ + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, +] +pytest-mock = [ + {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, + {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pytz = [ + {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, + {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, +] +regex = [ + {file = "regex-2021.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8764a78c5464ac6bde91a8c87dd718c27c1cabb7ed2b4beaf36d3e8e390567f9"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4551728b767f35f86b8e5ec19a363df87450c7376d7419c3cac5b9ceb4bce576"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:577737ec3d4c195c4aef01b757905779a9e9aee608fa1cf0aec16b5576c893d3"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c856ec9b42e5af4fe2d8e75970fcc3a2c15925cbcc6e7a9bcb44583b10b95e80"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3835de96524a7b6869a6c710b26c90e94558c31006e96ca3cf6af6751b27dca1"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cea56288eeda8b7511d507bbe7790d89ae7049daa5f51ae31a35ae3c05408531"}, + {file = "regex-2021.8.3-cp36-cp36m-win32.whl", hash = "sha256:a4eddbe2a715b2dd3849afbdeacf1cc283160b24e09baf64fa5675f51940419d"}, + {file = "regex-2021.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:57fece29f7cc55d882fe282d9de52f2f522bb85290555b49394102f3621751ee"}, + {file = "regex-2021.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a5c6dbe09aff091adfa8c7cfc1a0e83fdb8021ddb2c183512775a14f1435fe16"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff4a8ad9638b7ca52313d8732f37ecd5fd3c8e3aff10a8ccb93176fd5b3812f6"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b63e3571b24a7959017573b6455e05b675050bbbea69408f35f3cb984ec54363"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fbc20975eee093efa2071de80df7f972b7b35e560b213aafabcec7c0bd00bd8c"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14caacd1853e40103f59571f169704367e79fb78fac3d6d09ac84d9197cadd16"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb350eb1060591d8e89d6bac4713d41006cd4d479f5e11db334a48ff8999512f"}, + {file = "regex-2021.8.3-cp37-cp37m-win32.whl", hash = "sha256:18fdc51458abc0a974822333bd3a932d4e06ba2a3243e9a1da305668bd62ec6d"}, + {file = "regex-2021.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:026beb631097a4a3def7299aa5825e05e057de3c6d72b139c37813bfa351274b"}, + {file = "regex-2021.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16d9eaa8c7e91537516c20da37db975f09ac2e7772a0694b245076c6d68f85da"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3905c86cc4ab6d71635d6419a6f8d972cab7c634539bba6053c47354fd04452c"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937b20955806381e08e54bd9d71f83276d1f883264808521b70b33d98e4dec5d"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28e8af338240b6f39713a34e337c3813047896ace09d51593d6907c66c0708ba"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c09d88a07483231119f5017904db8f60ad67906efac3f1baa31b9b7f7cca281"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:85f568892422a0e96235eb8ea6c5a41c8ccbf55576a2260c0160800dbd7c4f20"}, + {file = "regex-2021.8.3-cp38-cp38-win32.whl", hash = "sha256:bf6d987edd4a44dd2fa2723fca2790f9442ae4de2c8438e53fcb1befdf5d823a"}, + {file = "regex-2021.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:8fe58d9f6e3d1abf690174fd75800fda9bdc23d2a287e77758dc0e8567e38ce6"}, + {file = "regex-2021.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7976d410e42be9ae7458c1816a416218364e06e162b82e42f7060737e711d9ce"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9569da9e78f0947b249370cb8fadf1015a193c359e7e442ac9ecc585d937f08d"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bbe342c5b2dec5c5223e7c363f291558bc27982ef39ffd6569e8c082bdc83"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f421e3cdd3a273bace013751c345f4ebeef08f05e8c10757533ada360b51a39"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea212df6e5d3f60341aef46401d32fcfded85593af1d82b8b4a7a68cd67fdd6b"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a3b73390511edd2db2d34ff09aa0b2c08be974c71b4c0505b4a048d5dc128c2b"}, + {file = "regex-2021.8.3-cp39-cp39-win32.whl", hash = "sha256:f35567470ee6dbfb946f069ed5f5615b40edcbb5f1e6e1d3d2b114468d505fc6"}, + {file = "regex-2021.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:bfa6a679410b394600eafd16336b2ce8de43e9b13f7fb9247d84ef5ad2b45e91"}, + {file = "regex-2021.8.3.tar.gz", hash = "sha256:8935937dad2c9b369c3d932b0edbc52a62647c2afb2fafc0c280f14a8bf56a6a"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +types-dateparser = [ + {file = "types-dateparser-1.0.0.tar.gz", hash = "sha256:2ab180bddf23349ade7861d67c9abc9e22b9de8a7ae5981ed9e40e441750c51f"}, + {file = "types_dateparser-1.0.0-py3-none-any.whl", hash = "sha256:1a14ad04159eada9e57caa53abdadb3a1524227a3e8627e47f165a7ddb60cd20"}, +] +tzlocal = [ + {file = "tzlocal-2.1-py2.py3-none-any.whl", hash = "sha256:e2cb6c6b5b604af38597403e9852872d7f534962ae2954c7f35efcb1ccacf4a4"}, + {file = "tzlocal-2.1.tar.gz", hash = "sha256:643c97c5294aedc737780a49d9df30889321cbe1204eac2c2ec6134035a92e44"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ebe87bc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,42 @@ +[tool.poetry] +name = "container-retention-policy" +version = "1.0.0" # This version doesn't matter - only using Poetry for dependencies +description = "Lets you create a retention policy for GHCR hosted container images" +authors = ["Sondre Lillebø Gundersen "] +license = "BSD-3" + +[tool.poetry.dependencies] +python = "^3.9" +httpx = "^0.18.2" +dateparser = "^1.0.0" + +[tool.poetry.dev-dependencies] +types-dateparser = "^1.0.0" +pytest = "^6.2.4" +pytest-mock = "^3.6.1" +pytest-asyncio = "^0.15.1" +pytest-cov = "^2.12.1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 140 +skip-string-normalization = true + +[tool.isort] +profile = "black" +line_length = 140 + +[tool.coverage.run] +omit = [] +branch = true + +[tool.coverage.report] +show_missing = true +skip_covered = true +exclude_lines = [ + 'if TYPE_CHECKING:', + "if __name__ == '__main__':", +] diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..75069b3 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,27 @@ +[tool:pytest] +addopts = --cov=main --cov-report term-missing + +[flake8] +exclude = main_tests.py +max-line-length = 140 +ignore = E203, D100, D101, D200 +enable-extensions = TC, TC1 +pytest-mark-no-parentheses=true +pytest-fixture-no-parentheses=true +pytest-parametrize-names-type=csv + +[mypy] +python_version = 3.9 +show_error_codes = True +warn_unused_ignores = True +strict_optional = True +incremental = True +ignore_missing_imports = True +warn_redundant_casts = True +warn_unused_configs = True +warn_no_return = False +disallow_untyped_defs = True +local_partial_types = True + +[mypy-main_tests.*] +ignore_errors = True