Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #2230

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ ci:
- shfmt-docker
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: trailing-whitespace
exclude: static/js/lib/ckeditor/plugins/MarkdownListSyntax.test.ts
Expand All @@ -24,7 +24,7 @@ repos:
- id: check-toml
- id: debug-statements
- repo: https://github.com/scop/pre-commit-shfmt
rev: v3.8.0-1
rev: v3.10.0-2
hooks:
- id: shfmt-docker
- repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt
Expand Down Expand Up @@ -72,7 +72,7 @@ repos:
- --exclude-files
- ".*/generated/"
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.4.10"
rev: "v0.9.2"
hooks:
- id: ruff-format
- id: ruff
Expand Down
16 changes: 8 additions & 8 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@


@pytest.fixture(autouse=True)
def default_settings(settings): # noqa: PT004
def default_settings(settings):
"""Set default settings for all tests"""
settings.DISABLE_WEBPACK_LOADER_STATS = True


@pytest.fixture()
@pytest.fixture
def mocked_celery(mocker):
"""Mock object that patches certain celery functions"""
exception_class = TabError
Expand All @@ -36,22 +36,22 @@ def mocked_celery(mocker):
)


@pytest.fixture()
@pytest.mark.django_db()
@pytest.fixture
@pytest.mark.django_db
def course_starter(settings):
"""Returns the 'course'-type WebsiteStarter that is seeded in a data migration""" # noqa: D401
return WebsiteStarter.objects.get(slug=settings.OCW_COURSE_STARTER_SLUG)


@pytest.fixture()
@pytest.mark.django_db()
@pytest.fixture
@pytest.mark.django_db
def omnibus_starter():
"""Returns the omnibus WebsiteStarter that is seeded in a data migration""" # noqa: D401
return WebsiteStarter.objects.get(slug=OMNIBUS_STARTER_SLUG)


@pytest.fixture()
@pytest.mark.django_db()
@pytest.fixture
@pytest.mark.django_db
def omnibus_config(settings):
"""Returns the omnibus site config""" # noqa: D401
with open( # noqa: PTH123
Expand Down
2 changes: 1 addition & 1 deletion content_sync/api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# pylint:disable=redefined-outer-name


@pytest.fixture()
@pytest.fixture
def mock_api_funcs(settings, mocker):
"""Mock functions used in publish_websites"""
settings.CONTENT_SYNC_BACKEND = "content_sync.backends.TestBackend"
Expand Down
14 changes: 7 additions & 7 deletions content_sync/apis/github_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@


@pytest.fixture(autouse=True)
def mock_github_integration(mocker): # noqa: PT004
def mock_github_integration(mocker):
"""Mock the github app request"""
mock_get = mocker.patch("content_sync.apis.github.requests.get")
mock_get.return_value.status_code = 200
Expand All @@ -86,7 +86,7 @@ def mock_github_integration(mocker): # noqa: PT004
mock_integration.return_value.get_access_token.return_value.token = "gh_token" # noqa: S105


@pytest.fixture()
@pytest.fixture
def db_data():
"""Fixture that seeds the database with data needed for this test suite"""
users = UserFactory.create_batch(2)
Expand All @@ -106,7 +106,7 @@ def db_data():
)


@pytest.fixture()
@pytest.fixture
def mock_rsa_key():
"""Generate a test key"""
private_key = rsa.generate_private_key(
Expand All @@ -119,7 +119,7 @@ def mock_rsa_key():
)


@pytest.fixture()
@pytest.fixture
def mock_api_wrapper(settings, mocker, db_data):
"""Create a GithubApiWrapper with a mock Github object"""
settings.GIT_TOKEN = "faketoken" # noqa: S105
Expand All @@ -132,13 +132,13 @@ def mock_api_wrapper(settings, mocker, db_data):
)


@pytest.fixture()
@pytest.fixture
def mock_github(mocker):
"""Return a mock Github class"""
return mocker.patch("content_sync.apis.github.Github")


@pytest.fixture()
@pytest.fixture
def patched_file_serialize(mocker):
"""Patches function that serializes website content to file contents"""
return mocker.patch(
Expand All @@ -151,7 +151,7 @@ def fake_destination_filepath(website_content: WebsiteContent, *args) -> str:
return f"path/to/{website_content.filename}.md"


@pytest.fixture()
@pytest.fixture
def patched_destination_filepath(mocker):
"""Patches the get_destination_filepath API function"""
return mocker.patch(
Expand Down
2 changes: 1 addition & 1 deletion content_sync/backends/base_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def test_sync_content_to_backend_delete(mocker):
mock_delete_content_in_backend.assert_called_once_with(state)


@pytest.mark.django_db()
@pytest.mark.django_db
def test_sync_all_content_to_backend(mocker):
"""Verify that sync_all_content_to_backend calls sync_content_to_backend for each piece of content"""
mock_sync_content_to_backend = mocker.patch.object(
Expand Down
4 changes: 2 additions & 2 deletions content_sync/backends/github_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
# pylint:disable=redefined-outer-name


@pytest.fixture()
@pytest.fixture
def github(settings, mocker, mock_branches):
"""Create a github backend for a website"""
settings.GIT_TOKEN = "faketoken" # noqa: S105
Expand All @@ -40,7 +40,7 @@ def github(settings, mocker, mock_branches):
)


@pytest.fixture()
@pytest.fixture
def patched_file_deserialize(mocker):
"""Patches function that deserializes file contents to website content"""
return mocker.patch(
Expand Down
8 changes: 4 additions & 4 deletions content_sync/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,20 @@ def pytest_keyboard_interrupt(excinfo): # noqa: ARG001


@pytest.fixture(params=["dev", "not_dev"])
def mock_environments(settings, request): # noqa: PT004
def mock_environments(settings, request):
"""Fixture that tests with dev vs non-dev environment"""
settings.OCW_STUDIO_ENVIRONMENT = request.param
settings.ENV_NAME = request.param
settings.ENVIRONMENT = request.param


@pytest.fixture(params=[True, False])
def mock_concourse_hard_purge(settings, request): # noqa: PT004
def mock_concourse_hard_purge(settings, request):
"""Fixture that tests with True and False for settings.CONCOURSE_HARD_PURGE"""
settings.CONCOURSE_HARD_PURGE = request.param


@pytest.fixture()
@pytest.fixture
def mock_branches(settings, mocker):
"""Return mock github branches with names"""
mocked_branches = []
Expand All @@ -47,7 +47,7 @@ def mock_branches(settings, mocker):
return mocked_branches


@pytest.fixture()
@pytest.fixture
def github_content_file(mocker):
"""Fixture that returns a mocked Github ContentFile object with some related properties""" # noqa: E501
content_str = "my file content"
Expand Down
10 changes: 5 additions & 5 deletions content_sync/pipelines/concourse_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,14 +105,14 @@
]


@pytest.fixture()
def mock_auth(mocker): # noqa: PT004
@pytest.fixture
def mock_auth(mocker):
"""Mock the concourse api auth method"""
mocker.patch("content_sync.pipelines.concourse.PipelineApi.auth")


@pytest.fixture(params=["test", "dev"])
def pipeline_settings(settings, request): # noqa: PT004
def pipeline_settings(settings, request):
"""Default settings for pipelines""" # noqa: D401
env = request.param
settings.ENVIRONMENT = env
Expand Down Expand Up @@ -267,7 +267,7 @@ def test_delete_pipelines(settings, mocker, mock_auth, names):
for idx, site_pipeline in enumerate(PIPELINES_LIST):
if idx < 4 or names is None:
pipe_name = site_pipeline["name"]
pipe_vars = f'?vars={quote(json.dumps(site_pipeline["instance_vars"]))}'
pipe_vars = f"?vars={quote(json.dumps(site_pipeline['instance_vars']))}"
mock_api_delete.assert_any_call(
f"/api/v1/teams/team1/pipelines/{pipe_name}{pipe_vars}"
)
Expand Down Expand Up @@ -384,7 +384,7 @@ def test_upsert_website_pipelines( # noqa: PLR0913, PLR0915
expected_noindex = "true"
else:
expected_noindex = "false"
expected_instance_vars = f'?vars={quote(json.dumps({"site": website.name}))}'
expected_instance_vars = f"?vars={quote(json.dumps({'site': website.name}))}"
starter_slug = starter.slug
base_hugo_args = {"--themesDir": f"../{OCW_HUGO_THEMES_GIT_IDENTIFIER}/"}
base_online_args = base_hugo_args.copy()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class ErrorHandlingStep(TryStep):
Extends TryStep and sets error handling steps
"""

def __init__( # noqa: PLR0913
def __init__(
self,
pipeline_name: str,
status: str,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def __init__(self, config: MassBuildSitesPipelineDefinitionConfig, **kwargs):
if batch_number > 1:
tasks.append(
GetStep(
get=f"{MASS_BUILD_SITES_BATCH_GATE_IDENTIFIER}-{batch_number -1}", # noqa: E501
get=f"{MASS_BUILD_SITES_BATCH_GATE_IDENTIFIER}-{batch_number - 1}", # noqa: E501
passed=[
f"{MASS_BUILD_SITES_JOB_IDENTIFIER}-batch-{batch_number - 1}" # noqa: E501
],
Expand Down
22 changes: 11 additions & 11 deletions content_sync/pipelines/definitions/concourse/site_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,8 +525,8 @@ def __init__( # noqa: PLR0913
"-exc",
f"""
cp ../{WEBPACK_MANIFEST_S3_IDENTIFIER}/webpack.json ../{OCW_HUGO_THEMES_GIT_IDENTIFIER}/base-theme/data
hugo {pipeline_vars['hugo_args_online']}
cp -r -n ../{STATIC_RESOURCES_S3_IDENTIFIER}/. ./output-online{pipeline_vars['static_resources_subdirectory']}
hugo {pipeline_vars["hugo_args_online"]}
cp -r -n ../{STATIC_RESOURCES_S3_IDENTIFIER}/. ./output-online{pipeline_vars["static_resources_subdirectory"]}
""", # noqa: E501
],
),
Expand All @@ -548,9 +548,9 @@ def __init__( # noqa: PLR0913
online_sync_command = f"""
aws configure set default.s3.max_concurrent_requests $AWS_MAX_CONCURRENT_CONNECTIONS
if [ $IS_ROOT_WEBSITE = 1 ] ; then
aws s3{get_cli_endpoint_url()} cp {SITE_CONTENT_GIT_IDENTIFIER}/output-online s3://{pipeline_vars['web_bucket']}/{pipeline_vars['prefix']}{pipeline_vars['base_url']} --recursive --metadata site-id={pipeline_vars['site_name']}{pipeline_vars['delete_flag']}
aws s3{get_cli_endpoint_url()} cp {SITE_CONTENT_GIT_IDENTIFIER}/output-online s3://{pipeline_vars["web_bucket"]}/{pipeline_vars["prefix"]}{pipeline_vars["base_url"]} --recursive --metadata site-id={pipeline_vars["site_name"]}{pipeline_vars["delete_flag"]}
else
aws s3{get_cli_endpoint_url()} sync {SITE_CONTENT_GIT_IDENTIFIER}/output-online s3://{pipeline_vars['web_bucket']}/{pipeline_vars['prefix']}{pipeline_vars['base_url']} --exclude='{pipeline_vars['short_id']}.zip' --exclude='{pipeline_vars['short_id']}-video.zip' --metadata site-id={pipeline_vars['site_name']}{delete_flag}
aws s3{get_cli_endpoint_url()} sync {SITE_CONTENT_GIT_IDENTIFIER}/output-online s3://{pipeline_vars["web_bucket"]}/{pipeline_vars["prefix"]}{pipeline_vars["base_url"]} --exclude='{pipeline_vars["short_id"]}.zip' --exclude='{pipeline_vars["short_id"]}-video.zip' --metadata site-id={pipeline_vars["site_name"]}{delete_flag}
fi
""" # noqa: E501
upload_online_build_step = add_error_handling(
Expand Down Expand Up @@ -660,19 +660,19 @@ def __init__(self, pipeline_vars: dict, fastly_var: str, pipeline_name: str):
fi
touch ./content/static_resources/_index.md
cp -r ../{WEBPACK_ARTIFACTS_IDENTIFIER}/static_shared/. ./static/static_shared/
hugo {pipeline_vars['hugo_args_offline']}
hugo {pipeline_vars["hugo_args_offline"]}
if [ $IS_ROOT_WEBSITE = 0 ] ; then
cd output-offline
zip -r ../../{BUILD_OFFLINE_SITE_IDENTIFIER}/{pipeline_vars['short_id']}.zip ./
zip -r ../../{BUILD_OFFLINE_SITE_IDENTIFIER}/{pipeline_vars["short_id"]}.zip ./
rm -rf ./*
cd ..
if [ $MP4_COUNT != 0 ];
then
mv ../videos/* ./content/static_resources
fi
hugo {pipeline_vars['hugo_args_offline']}
hugo {pipeline_vars["hugo_args_offline"]}
cd output-offline
zip -r ../../{BUILD_OFFLINE_SITE_IDENTIFIER}/{pipeline_vars['short_id']}-video.zip ./
zip -r ../../{BUILD_OFFLINE_SITE_IDENTIFIER}/{pipeline_vars["short_id"]}-video.zip ./
fi
""" # noqa: E501
build_offline_site_step = add_error_handling(
Expand Down Expand Up @@ -734,12 +734,12 @@ def __init__(self, pipeline_vars: dict, fastly_var: str, pipeline_name: str):
offline_sync_command = f"""
aws configure set default.s3.max_concurrent_requests $AWS_MAX_CONCURRENT_CONNECTIONS
if [ $IS_ROOT_WEBSITE = 1 ] ; then
aws s3{get_cli_endpoint_url()} cp {SITE_CONTENT_GIT_IDENTIFIER}/output-offline/ s3://{pipeline_vars['offline_bucket']}/{pipeline_vars['prefix']}{pipeline_vars['base_url']} --recursive --metadata site-id={pipeline_vars['site_name']}{pipeline_vars['delete_flag']}
aws s3{get_cli_endpoint_url()} cp {SITE_CONTENT_GIT_IDENTIFIER}/output-offline/ s3://{pipeline_vars["offline_bucket"]}/{pipeline_vars["prefix"]}{pipeline_vars["base_url"]} --recursive --metadata site-id={pipeline_vars["site_name"]}{pipeline_vars["delete_flag"]}
else
aws s3{get_cli_endpoint_url()} sync {SITE_CONTENT_GIT_IDENTIFIER}/output-offline/ s3://{pipeline_vars['offline_bucket']}/{pipeline_vars['prefix']}{pipeline_vars['base_url']} --metadata site-id={pipeline_vars['site_name']}{pipeline_vars['delete_flag']}
aws s3{get_cli_endpoint_url()} sync {SITE_CONTENT_GIT_IDENTIFIER}/output-offline/ s3://{pipeline_vars["offline_bucket"]}/{pipeline_vars["prefix"]}{pipeline_vars["base_url"]} --metadata site-id={pipeline_vars["site_name"]}{pipeline_vars["delete_flag"]}
fi
if [ $IS_ROOT_WEBSITE = 0 ] ; then
aws s3{get_cli_endpoint_url()} sync {BUILD_OFFLINE_SITE_IDENTIFIER}/ s3://{pipeline_vars['web_bucket']}/{pipeline_vars['prefix']}{pipeline_vars['base_url']} --exclude='*' --include='{pipeline_vars['short_id']}.zip' --include='{pipeline_vars['short_id']}-video.zip' --metadata site-id={pipeline_vars['site_name']}
aws s3{get_cli_endpoint_url()} sync {BUILD_OFFLINE_SITE_IDENTIFIER}/ s3://{pipeline_vars["web_bucket"]}/{pipeline_vars["prefix"]}{pipeline_vars["base_url"]} --exclude='*' --include='{pipeline_vars["short_id"]}.zip' --include='{pipeline_vars["short_id"]}-video.zip' --metadata site-id={pipeline_vars["site_name"]}
fi
""" # noqa: E501
upload_offline_build_step = add_error_handling(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class ThemeAssetsPipelineDefinition(Pipeline):
_clear_live_cdn_cache_identifier = Identifier("clear-live-cdn-cache-task").root
_slack_resource = SlackAlertResource()

def __init__( # noqa: PLR0913
def __init__(
self,
artifacts_bucket: str,
preview_bucket: str,
Expand Down
Loading
Loading