Skip to content

Commit

Permalink
Correcting Statistical Tests (#869)
Browse files Browse the repository at this point in the history
  • Loading branch information
oualib authored Nov 15, 2023
1 parent eeaad93 commit bd86a4e
Show file tree
Hide file tree
Showing 8 changed files with 23 additions and 20 deletions.
4 changes: 2 additions & 2 deletions verticapy/core/vdataframe/_corr.py
Original file line number Diff line number Diff line change
Expand Up @@ -1988,15 +1988,15 @@ def pacf(
_executeSQL(query, print_time_sql=False)
vdf = create_new_vdf(tmp_view_name)
drop(tmp_lr0_name, method="model")
model = vml.LinearRegression(name=tmp_lr0_name, solver="Newton")
model = vml.LinearRegression(name=tmp_lr0_name, solver="newton")
model.fit(
input_relation=tmp_view_name,
X=[f"lag_{i}_{gen_name([column])}" for i in range(1, p)],
y=column,
)
model.predict(vdf, name="prediction_0")
drop(tmp_lr1_name, method="model")
model = vml.LinearRegression(name=tmp_lr1_name, solver="Newton")
model = vml.LinearRegression(name=tmp_lr1_name, solver="newton")
model.fit(
input_relation=tmp_view_name,
X=[f"lag_{i}_{gen_name([column])}" for i in range(1, p)],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,15 +221,18 @@ def adfuller(
{ts_str} AS ts
FROM {vdf}"""
_executeSQL(query, print_time_sql=False)
model = LinearRegression(name, solver="Newton", max_iter=1000)
model = LinearRegression(name, solver="newton", max_iter=1000)
predictors = ["lag1"] + [f"delta{i}" for i in range(1, p + 1)]
if with_trend:
predictors += ["ts"]
try:
model.fit(relation_name, predictors, "delta")
coef = model.get_vertica_attributes("details")
except QueryError:
model.set_params({"solver": "bfgs"})
model.fit(relation_name, predictors, "delta")
finally:
drop(relation_name, method="view")
coef = model.get_vertica_attributes("details")
model.drop()
if regresults:
return coef
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -890,7 +890,7 @@ def test_vDF_score(self, titanic_vd):
tol=1e-4,
C=1.0,
max_iter=100,
solver="CGD",
solver="cgd",
penalty="ENet",
l1_ratio=0.5,
)
Expand Down
4 changes: 2 additions & 2 deletions verticapy/tests/vDataFrame/test_vDF_plot_plotly.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ def champion_challenger_plot_result(load_plotly, titanic_vd):
@pytest.fixture(scope="class")
def stepwise_plot_result(load_plotly, titanic_vd):
model = LogisticRegression(
name="test_LR_titanic", tol=1e-4, max_iter=100, solver="Newton"
name="test_LR_titanic", tol=1e-4, max_iter=100, solver="newton"
)
stepwise_result = stepwise(
model,
Expand Down Expand Up @@ -2645,7 +2645,7 @@ def test_additional_options_custom_height(self, load_plotly, titanic_vd):
custom_height = 650
custom_width = 700
model = LogisticRegression(
name="test_LR_titanic", tol=1e-4, max_iter=100, solver="Newton"
name="test_LR_titanic", tol=1e-4, max_iter=100, solver="newton"
)
# Act
stepwise_result = stepwise(
Expand Down
2 changes: 1 addition & 1 deletion verticapy/tests_new/machine_learning/vertica/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def _get_vpy_model(model_class, X=None, y=None, **kwargs):
if model_class in ["Lasso", "ElasticNet"]:
solver = "cgd"
else:
solver = "Newton"
solver = "newton"

if model_class in ["RandomForestRegressor", "RandomForestClassifier"]:
model = getattr(vpy_tree, model_class)(
Expand Down
8 changes: 4 additions & 4 deletions verticapy/tests_new/mlops/test_model_tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def test_load_best_model(self, experiment, best_model_name, metric):

@pytest.fixture(scope="module")
def reg_model1(winequality_vpy):
model = LinearRegression("reg_m1", solver="BFGS", max_iter=1)
model = LinearRegression("reg_m1", solver="bfgs", max_iter=1)
model.drop()

model.fit(
Expand All @@ -160,7 +160,7 @@ def reg_model1(winequality_vpy):

@pytest.fixture(scope="module")
def reg_model2(winequality_vpy):
model = LinearRegression("reg_m2", solver="BFGS", max_iter=3)
model = LinearRegression("reg_m2", solver="bfgs", max_iter=3)
model.drop()

model.fit(
Expand Down Expand Up @@ -278,7 +278,7 @@ def test_load_best_model(self, regressor_experiment):

@pytest.fixture(scope="module")
def bin_model1(winequality_vpy):
model = LogisticRegression("bin_m1", solver="Newton", max_iter=5, penalty=None)
model = LogisticRegression("bin_m1", solver="newton", max_iter=5, penalty=None)
model.drop()

model.fit(
Expand All @@ -292,7 +292,7 @@ def bin_model1(winequality_vpy):

@pytest.fixture(scope="module")
def bin_model2(winequality_vpy):
model = LogisticRegression("bin_m2", solver="BFGS", max_iter=5, penalty=None)
model = LogisticRegression("bin_m2", solver="bfgs", max_iter=5, penalty=None)
model.drop()

model.fit(
Expand Down
12 changes: 6 additions & 6 deletions verticapy/tests_new/mlops/test_model_versioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

@pytest.fixture(scope="module")
def reg_model1(winequality_vpy):
model = LinearRegression("reg_m1", solver="Newton", max_iter=2)
model = LinearRegression("reg_m1", solver="newton", max_iter=2)
model.drop()

model.fit(
Expand All @@ -46,7 +46,7 @@ def reg_model1(winequality_vpy):

@pytest.fixture(scope="module")
def reg_model2(winequality_vpy):
model = LinearRegression("reg_m2", solver="Newton", max_iter=2)
model = LinearRegression("reg_m2", solver="newton", max_iter=2)
model.drop()

model.fit(
Expand All @@ -60,7 +60,7 @@ def reg_model2(winequality_vpy):

@pytest.fixture(scope="module")
def reg_model3(winequality_vpy):
model = LinearRegression("reg_m3", solver="Newton", max_iter=2)
model = LinearRegression("reg_m3", solver="newton", max_iter=2)
model.drop()

model.fit(
Expand All @@ -74,7 +74,7 @@ def reg_model3(winequality_vpy):

@pytest.fixture(scope="module")
def bin_model1(winequality_vpy):
model = LogisticRegression("bin_m1", solver="Newton", max_iter=2, penalty=None)
model = LogisticRegression("bin_m1", solver="newton", max_iter=2, penalty=None)
model.drop()

model.fit(
Expand All @@ -88,7 +88,7 @@ def bin_model1(winequality_vpy):

@pytest.fixture(scope="module")
def bin_model2(winequality_vpy):
model = LogisticRegression("bin_m2", solver="Newton", max_iter=2, penalty=None)
model = LogisticRegression("bin_m2", solver="newton", max_iter=2, penalty=None)
model.drop()

model.fit(
Expand All @@ -110,7 +110,7 @@ class TestModelVersioning:
def test_register_models(self, reg_model1):
reg_model1.register("regression_app1", raise_error=True)

new_model = LinearRegression("new_m", solver="Newton", max_iter=5)
new_model = LinearRegression("new_m", solver="newton", max_iter=5)
with pytest.raises(RuntimeError, match="Failed to register the model"):
new_model.register("new_app", raise_error=True)

Expand Down
4 changes: 2 additions & 2 deletions verticapy/tests_new/plotting/base_test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -2130,7 +2130,7 @@ def plot_result(self, schema_loader, titanic_vd):
name=f"{schema_loader}.test_LR_titanic",
tol=1e-4,
max_iter=100,
solver="Newton",
solver="newton",
)
stepwise_result = stepwise(
model,
Expand Down Expand Up @@ -2190,7 +2190,7 @@ def test_additional_options_custom_height(self, titanic_vd):
custom_height = 60
custom_width = 70
model = LogisticRegression(
name="test_LR_titanic", tol=1e-4, max_iter=100, solver="Newton"
name="test_LR_titanic", tol=1e-4, max_iter=100, solver="newton"
)
# Act
stepwise_result = stepwise(
Expand Down

0 comments on commit bd86a4e

Please sign in to comment.