Skip to content

Commit

Permalink
feat: added webhdfs test without knox
Browse files Browse the repository at this point in the history
  • Loading branch information
SteBaum committed Jan 14, 2025
1 parent 4c0533b commit 50bcc37
Show file tree
Hide file tree
Showing 2 changed files with 100 additions and 1 deletion.
5 changes: 5 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,11 @@ def knox_gateway(host: Host, user: str) -> Dict[str, str]:
}


@pytest.fixture(scope="session")
def webhdfs_gateway(host: Host, user: str) -> str:
return f"https://{host.backend.get_hosts('hdfs_nn')[0]}:9871"


@pytest.fixture(scope="session")
def hbase_rest(host: Host) -> str:
return f"https://{host.backend.get_hosts('hbase_rest')[0]}:8080"
Expand Down
96 changes: 95 additions & 1 deletion tests/test_hdfs.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import json
import os
from typing import Dict
from typing import Callable, Dict, List

import pytest

from testinfra import host

from .conftest import USERS, retry


testinfra_hosts = ["edge"]


Expand Down Expand Up @@ -55,3 +61,91 @@ def test_create_temporary_file_in_user_directory(
)
assert "No such file or directory" in hdfs_cmd.stderr, hdfs_cmd
assert hdfs_cmd.stdout == "", hdfs_cmd


@pytest.fixture(scope="module")
def webhdfs_ranger_policy(
ranger_policy: Callable[[str, str, dict, List[dict]], dict],
):
resources = {
"path": {"values": ["/*"], "isExcludes": False},
}
policyItems = [
{
"users": USERS,
"accesses": [
{"isAllowed": True, "type": "read"},
{"isAllowed": True, "type": "write"},
{"isAllowed": True, "type": "execute"},
],
}
]
ranger_policy("webhdfs_test", "hdfs-tdp", resources, policyItems)


def test_create_webhdfs_temporary_file_in_user_directory(
host: host.Host,
user: str,
realm: str,
user_file: Dict[str, str],
webhdfs_gateway: str,
webhdfs_ranger_policy: None,
curl: Callable,
):
distant_file = user_file["distant_file"]
distant_hdfs_path = user_file["distant_hdfs_path"]
file_content = user_file["file_content"]
webhdfs_url = webhdfs_gateway
webhdfs_gateway_url = f"{webhdfs_url}/webhdfs/v1"

host.run(f"kinit -kt /home/{user}/{user}.keytab {user}@{realm}")
curl_result = retry(
lambda: curl(
f"-L -T {distant_file} --negotiate -u : -X PUT '{webhdfs_gateway_url}/user/{user}/{distant_hdfs_path}?op=CREATE'"
)
)()
assert curl_result["http_status"] == 201, curl_result

try:
curl_result = curl(
f"-L -k --negotiate -u : -X GET '{webhdfs_gateway_url}/user/{user}/{distant_hdfs_path}?op=OPEN'"
)
assert file_content in curl_result["command"].stdout, curl_result

curl_result = curl(
f"-L -k --negotiate -u : -X GET '{webhdfs_gateway_url}/user/{user}/{distant_hdfs_path}?op=LISTSTATUS'"
)
assert curl_result["http_status"] == 200, curl_result

liststatus = json.loads(curl_result["command"].stdout)
assert "FileStatuses" in liststatus, curl_result
assert "FileStatus" in liststatus["FileStatuses"], curl_result
assert len(liststatus["FileStatuses"]["FileStatus"]) == 1, curl_result

filestatus = liststatus["FileStatuses"]["FileStatus"][0]
assert filestatus["group"] == user, curl_result
assert filestatus["owner"] == user, curl_result
assert filestatus["type"] == "FILE", curl_result
finally:
curl_result = curl(
f"-L -k --negotiate -u : -X DELETE '{webhdfs_gateway_url}/user/{user}/{distant_hdfs_path}?op=DELETE'"
)
assert curl_result["http_status"] == 200, curl_result

curl_result = curl(
f"-L -k --negotiate -u : -X GET '{webhdfs_gateway_url}/user/{user}/{distant_hdfs_path}?op=LISTSTATUS'",
check_status_code=False,
)
liststatus = json.loads(curl_result["command"].stdout)
assert "RemoteException" in liststatus, curl_result

assert "exception" in liststatus["RemoteException"], curl_result
assert (
"FileNotFoundException" in liststatus["RemoteException"]["exception"]
), curl_result

assert "message" in liststatus["RemoteException"], curl_result
assert (
f"File /user/{user}/{distant_hdfs_path} does not exist."
in liststatus["RemoteException"]["message"]
), curl_result

0 comments on commit 50bcc37

Please sign in to comment.