Skip to content

Commit

Permalink
remove pdb
Browse files Browse the repository at this point in the history
  • Loading branch information
scarlehoff committed Mar 4, 2024
1 parent 9cfdb2f commit c7aa960
Showing 1 changed file with 36 additions and 39 deletions.
75 changes: 36 additions & 39 deletions n3fit/src/n3fit/tests/test_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,45 +94,42 @@ def check_fit_results(
equal_checks = ["stop_epoch", "pos_state"]
approx_checks = ["erf_tr", "erf_vl", "chi2", "best_epoch", "best_epoch"]
relaxed_checks = ["arc_lengths", "integrability"]
try:
for key, value in new_json.items():
reference = old_json[key]
err_msg = f"error for .json: {key}"
if key in equal_checks:
assert_equal(value, reference, err_msg=err_msg)
elif key in approx_checks:
assert_allclose(value, reference, err_msg=err_msg, rtol=rel_error)
elif key in relaxed_checks:
assert_allclose(value, reference, err_msg=err_msg, rtol=rel_error * 10)
elif key == "preprocessing":
for ref, cur in zip(reference, value):
err_msg += f" - {ref['fl']}"
assert_allclose(ref["smallx"], cur["smallx"], err_msg=err_msg, rtol=rel_error)
assert_allclose(ref["largex"], cur["largex"], err_msg=err_msg, rtol=rel_error)

# check that the times didnt grow in a weird manner
if timing:
# Better to catch up errors even when they happen to grow larger by chance
times = new_json["timing"]
fitting_time = times["walltime"]["replica_set_to_replica_fitted"]
assert fitting_time < EXPECTED_MAX_FITTIME

# For safety, check also the version
assert new_json["version"]["nnpdf"] == n3fit.__version__

new_expgrid = _load_exportgrid(new_expgrid_file)
old_expgrid = _load_exportgrid(old_expgrid_file)

# Now compare the exportgrids
for key, value in new_expgrid.items():
reference = old_expgrid[key]
err_msg = f"error for .exportgrid: {key}"
if key == "pdfgrid":
assert_allclose(value, reference, rtol=rel_error, atol=1e-6, err_msg=err_msg)
else:
assert_equal(value, reference, err_msg=err_msg)
except:
import ipdb; ipdb.set_trace()
for key, value in new_json.items():
reference = old_json[key]
err_msg = f"error for .json: {key}"
if key in equal_checks:
assert_equal(value, reference, err_msg=err_msg)
elif key in approx_checks:
assert_allclose(value, reference, err_msg=err_msg, rtol=rel_error)
elif key in relaxed_checks:
assert_allclose(value, reference, err_msg=err_msg, rtol=rel_error * 10)
elif key == "preprocessing":
for ref, cur in zip(reference, value):
err_msg += f" - {ref['fl']}"
assert_allclose(ref["smallx"], cur["smallx"], err_msg=err_msg, rtol=rel_error)
assert_allclose(ref["largex"], cur["largex"], err_msg=err_msg, rtol=rel_error)

# check that the times didnt grow in a weird manner
if timing:
# Better to catch up errors even when they happen to grow larger by chance
times = new_json["timing"]
fitting_time = times["walltime"]["replica_set_to_replica_fitted"]
assert fitting_time < EXPECTED_MAX_FITTIME

# For safety, check also the version
assert new_json["version"]["nnpdf"] == n3fit.__version__

new_expgrid = _load_exportgrid(new_expgrid_file)
old_expgrid = _load_exportgrid(old_expgrid_file)

# Now compare the exportgrids
for key, value in new_expgrid.items():
reference = old_expgrid[key]
err_msg = f"error for .exportgrid: {key}"
if key == "pdfgrid":
assert_allclose(value, reference, rtol=rel_error, atol=1e-6, err_msg=err_msg)
else:
assert_equal(value, reference, err_msg=err_msg)


def _auxiliary_performfit(tmp_path, runcard=QUICKNAME, replica=1, timing=True, rel_error=2e-3):
Expand Down

0 comments on commit c7aa960

Please sign in to comment.