Skip to content

Commit

Permalink
fixed flake8 errors
Browse files Browse the repository at this point in the history
  • Loading branch information
tihom committed Aug 24, 2024
1 parent 23af715 commit 1812f22
Show file tree
Hide file tree
Showing 6 changed files with 8 additions and 13 deletions.
3 changes: 1 addition & 2 deletions mango/domain/domain_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import math
import numpy as np
from collections.abc import Callable
import warnings
from itertools import compress
from functools import cached_property
import warnings
Expand Down Expand Up @@ -272,7 +271,7 @@ def classify_parameters(param_dict: dict) -> (set, set, set, set):
try:
# this check takes care of numpy ints as well
all_int = all(
x == int(x) and type(x) != bool for x in param_dict[par]
x == int(x) and not isinstance(x, bool) for x in param_dict[par]
)
except (ValueError, TypeError):
all_int = False
Expand Down
2 changes: 0 additions & 2 deletions mango/domain/parameter_sampler.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from abc import ABCMeta, abstractmethod

import warnings

import numpy as np
Expand Down
2 changes: 0 additions & 2 deletions mango/metatuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
from tqdm.auto import tqdm
import random


## setting warnings to ignore for now
import warnings

warnings.filterwarnings("ignore")
Expand Down
10 changes: 5 additions & 5 deletions mango/optimizer/bayesian_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,8 @@ def Get_Upper_Confidence_Bound(self, X):
return Value

"""
Returns the most optmal x along with mean value from the domain of x and making sure it is not a Duplicate (depending on closeness)
Returns the most optmal x along with mean value from the domain of x and
making sure it is not a Duplicate (depending on closeness)
used in batch setting: As mean is also returned
"""

Expand All @@ -128,7 +129,7 @@ def remove_duplicates(self, X, X_Sample, mu, Value):
# check if x_optimal is in X_Sample
check_closeness = self.closeness(x_optimal, X_Sample)

if check_closeness == False: # No close element to x_optimal in X_Sample
if check_closeness is False: # No close element to x_optimal in X_Sample
break

# we will look for next optimal value to try
Expand Down Expand Up @@ -159,7 +160,7 @@ def remove_duplicates_serial(self, X, X_Sample, Value):
# check if x_optimal is in X_Sample
check_closeness = self.closeness(x_optimal, X_Sample)

if check_closeness == False: # No close element to x_optimal in X_Sample
if check_closeness is False: # No close element to x_optimal in X_Sample
break

# we will look for next optimal value to try
Expand Down Expand Up @@ -287,7 +288,7 @@ def remove_duplicates_MetaTuner(self, X, X_Sample, mu, Value, Value_ext):
# check if x_optimal is in X_Sample
check_closeness = self.closeness(x_optimal, X_Sample)

if check_closeness == False: # No close element to x_optimal in X_Sample
if check_closeness is False: # No close element to x_optimal in X_Sample
break

# we will look for next optimal value to try
Expand Down Expand Up @@ -386,7 +387,6 @@ def get_next_batch_MetaTuner(

try:
self.surrogate.fit(X_temp, Y_temp)

except:
print("*" * 100)
print(X_temp)
Expand Down
1 change: 0 additions & 1 deletion mango/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def wrapper(params_batch):


def celery(n_jobs, timeout=None):
import celery
from celery import exceptions

def decorator(func):
Expand Down
3 changes: 2 additions & 1 deletion mango/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,8 @@ def runBayesianOptimizer(self):

if len(Y_next_list) == 0:
# no values returned
# this is problematic if domain is small and same value is tried again in the next iteration as the optimizer would be stuck
# this is problematic if domain is small and same value is tried again
# in the next iteration as the optimizer would be stuck
continue

Y_next_batch = Y_next_list.reshape(len(Y_next_list), 1)
Expand Down

0 comments on commit 1812f22

Please sign in to comment.