Skip to content

Commit

Permalink
several code improvements (#176)
Browse files Browse the repository at this point in the history
* several code improvements
  • Loading branch information
rsanchez87 authored Jul 8, 2024
1 parent 466c1b4 commit c0d8ecc
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 37 deletions.
1 change: 0 additions & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,6 @@ disable=raw-checker-failed,
broad-exception-raised,
too-few-public-methods,
consider-using-generator,
no-self-use,
too-many-arguments

# Enable the message, report, category or checker with the given id(s). You can
Expand Down
36 changes: 16 additions & 20 deletions fluster/decoders/gstreamer.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ def gst_element_exists(element: str) -> bool:
return True
except (subprocess.CalledProcessError, FileNotFoundError):
return False
return False
except Exception as error: # pylint: disable=broad-except
print(f"An unexpected error occurred: {error}")
return False


def output_format_to_gst(output_format: OutputFormat) -> str:
Expand Down Expand Up @@ -111,28 +113,22 @@ def gen_pipeline(
self.cmd, input_filepath, self.decoder_bin, self.caps, self.sink, output
)

def parse_videocodectestsink_md5sum(self, data: List[str]) -> str:
@staticmethod
def parse_videocodectestsink_md5sum(data: List[str]) -> str:
"""Parse the MD5 sum out of commandline output produced when using
videocodectestsink."""
pattern = "conformance/checksum, checksum-type=(string)MD5, checksum=(string)"
# Iterate over each line in the data
for line in data:
pattern = (
"conformance/checksum, checksum-type=(string)MD5, checksum=(string)"
)
sum_start = line.find(pattern)
# pylint: disable=no-else-continue
if sum_start <= 0:
# Skip to the next iteration if sum_start is less than or equal to 0
continue
else:
sum_start += len(pattern)
sum_end = line[sum_start:].find(";")
# pylint: disable=no-else-continue
if sum_end <= 0:
# Skip to the next iteration if sum_end is less than or equal to 0
continue
else:
sum_end += sum_start
return line[sum_start:sum_end]
# Partition the line into three parts: before the pattern, the pattern, and after the pattern
_, match, after = line.partition(pattern)
# If the pattern is found (match is not empty)
if match:
# Partition the remaining part to find the checksum up to the first ';'
checksum, _, _ = after.partition(";")
# If a valid checksum is found
if checksum:
return checksum

raise Exception("No MD5 found in the program trace.")

Expand Down
11 changes: 6 additions & 5 deletions fluster/fluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,9 +227,8 @@ def list_test_suites(
if len(self.test_suites) == 0:
print(f' No test suites found in "{self.test_suites_dir}"')

def _get_matches(
self, in_list: List[str], check_list: List[Any], name: str
) -> List[Any]:
@staticmethod
def _get_matches(in_list: List[str], check_list: List[Any], name: str) -> List[Any]:
if in_list:
in_list_names = {x.lower() for x in in_list}
check_list_names = {x.name.lower() for x in check_list}
Expand Down Expand Up @@ -350,8 +349,9 @@ def _show_summary_if_needed(
else:
self._generate_md_summary(ctx, results)

@staticmethod
def _generate_junit_summary(
self, ctx: Context, results: Dict[str, List[Tuple[Decoder, TestSuite]]]
ctx: Context, results: Dict[str, List[Tuple[Decoder, TestSuite]]]
) -> None:
# pylint: disable=import-outside-toplevel

Expand Down Expand Up @@ -426,8 +426,9 @@ def _parse_suite_results(
with open(ctx.summary_output, "w+", encoding="utf-8") as summary_file:
xml.write(summary_file.name, pretty=True)

@staticmethod
def _generate_csv_summary(
self, ctx: Context, results: Dict[str, List[Tuple[Decoder, TestSuite]]]
ctx: Context, results: Dict[str, List[Tuple[Decoder, TestSuite]]]
) -> None:
# pylint: disable=too-many-locals
result_map = {
Expand Down
21 changes: 14 additions & 7 deletions fluster/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ def run(self) -> None:
else:
self.parser.print_help()

def _validate_args(self, args: Any) -> None:
@staticmethod
def _validate_args(args: Any) -> None:
if hasattr(args, "format"):
if (
args.format in [SummaryFormat.JUNITXML.value, SummaryFormat.CSV.value]
Expand All @@ -97,15 +98,17 @@ def _validate_args(self, args: Any) -> None:
"error: please specify XML/CSV file path with -so/--summary-output option."
)

def _validate_deps(self, args: Any) -> None:
@staticmethod
def _validate_deps(args: Any) -> None:
if hasattr(args, "format"):
junit_spec = util.find_spec("junitparser")
if args.format == SummaryFormat.JUNITXML.value and junit_spec is None:
sys.exit(
"error: junitparser required to use JUnit format. Please install with pip install junitparser."
)

def _get_installed_dirs(self) -> Tuple[str, str]:
@staticmethod
def _get_installed_dirs() -> Tuple[str, str]:
site_data_dirs = utils.site_data_dirs(APPNAME)
user_data_dir = utils.user_data_dir(APPNAME)

Expand Down Expand Up @@ -361,13 +364,15 @@ def _add_download_cmd(self, subparsers: Any) -> None:
)
subparser.set_defaults(func=self._download_cmd)

def _list_cmd(self, args: Any, fluster: Fluster) -> None:
@staticmethod
def _list_cmd(args: Any, fluster: Fluster) -> None:
fluster.list_test_suites(
show_test_vectors=args.testvectors, test_suites=args.testsuites
)
fluster.list_decoders(check=args.check, verbose=args.verbose)

def _run_cmd(self, args: Any, fluster: Fluster) -> None:
@staticmethod
def _run_cmd(args: Any, fluster: Fluster) -> None:
args.jobs = args.jobs if args.jobs > 0 else multiprocessing.cpu_count()
context = Context(
jobs=args.jobs,
Expand All @@ -391,7 +396,8 @@ def _run_cmd(self, args: Any, fluster: Fluster) -> None:
except SystemExit as exception:
sys.exit(exception.code)

def _reference_cmd(self, args: Any, fluster: Fluster) -> None:
@staticmethod
def _reference_cmd(args: Any, fluster: Fluster) -> None:
context = Context(
jobs=args.jobs,
timeout=args.timeout,
Expand All @@ -408,7 +414,8 @@ def _reference_cmd(self, args: Any, fluster: Fluster) -> None:
except SystemExit as exception:
sys.exit(exception.code)

def _download_cmd(self, args: Any, fluster: Fluster) -> None:
@staticmethod
def _download_cmd(args: Any, fluster: Fluster) -> None:
args.jobs = args.jobs if args.jobs > 0 else multiprocessing.cpu_count()
fluster.download_test_suites(
test_suites=args.testsuites,
Expand Down
12 changes: 8 additions & 4 deletions fluster/test_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,8 @@ def to_json_file(self, filename: str) -> None:
]
json.dump(data, json_file, indent=4)

def _download_worker(self, ctx: DownloadWork) -> None:
@staticmethod
def _download_worker(ctx: DownloadWork) -> None:
"""Download and extract a test vector"""
test_vector = ctx.test_vector
dest_dir = os.path.join(ctx.out_dir, ctx.test_suite_name, test_vector.name)
Expand Down Expand Up @@ -270,12 +271,14 @@ def _callback_error(err: Any) -> None:

print("All downloads finished")

def _rename_test(self, test: Test, module: str, qualname: str) -> None:
@staticmethod
def _rename_test(test: Test, module: str, qualname: str) -> None:
test_cls = type(test)
test_cls.__module__ = module
test_cls.__qualname__ = qualname

def _collect_results(self, test_result: TestResult) -> None:
@staticmethod
def _collect_results(test_result: TestResult) -> None:
"""Collect all TestResults with error to add them into the test vectors"""
for res in test_result.failures:
test_vector = cast(Test, res[0]).test_vector
Expand Down Expand Up @@ -303,7 +306,8 @@ def _run_worker(self, test: Test) -> TestVector:

return test.test_vector

def _get_max_length_list_name(self, _list: List[str], name: str) -> int:
@staticmethod
def _get_max_length_list_name(_list: List[str], name: str) -> int:
max_length = len(name)
for elem in _list:
length = len(elem)
Expand Down

0 comments on commit c0d8ecc

Please sign in to comment.