Skip to content

Commit

Permalink
Fix errors
Browse files Browse the repository at this point in the history
  • Loading branch information
pobonomo committed Jan 2, 2025
1 parent 8893a6e commit 2e03399
Show file tree
Hide file tree
Showing 8 changed files with 31 additions and 48 deletions.
1 change: 0 additions & 1 deletion docs/examples/example2_student_admission.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@
"""

import gurobipy as gp
import gurobipy_pandas as gppd
import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression
Expand Down
25 changes: 14 additions & 11 deletions notebooks/adversarial/adversarial_logistic_regression.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@
"\n",
"from sklearn.datasets import fetch_openml\n",
"from sklearn.linear_model import LogisticRegression\n",
"from sklearn.pipeline import make_pipeline\n",
"from sklearn.preprocessing import StandardScaler"
"from sklearn.pipeline import make_pipeline"
]
},
{
Expand Down Expand Up @@ -66,7 +65,8 @@
"outputs": [],
"source": [
"import sys\n",
"sys.path.append('../../src')"
"\n",
"sys.path.append(\"../../src\")"
]
},
{
Expand All @@ -77,8 +77,7 @@
},
"outputs": [],
"source": [
"from gurobi_ml import add_predictor_constr\n",
"from gurobi_ml.sklearn import logistic_regression"
"from gurobi_ml import add_predictor_constr"
]
},
{
Expand All @@ -98,7 +97,9 @@
"# Flatten and scale the data\n",
"\n",
"X = X.astype(np.float64) / 255.0\n",
"clf = LogisticRegression(C=50.0 / 500, penalty=\"l1\", solver=\"saga\", tol=0.1, random_state=4)\n",
"clf = LogisticRegression(\n",
" C=50.0 / 500, penalty=\"l1\", solver=\"saga\", tol=0.1, random_state=4\n",
")\n",
"pipeline = make_pipeline(clf)\n",
"pipeline.fit(X, y)"
]
Expand All @@ -122,7 +123,7 @@
},
"outputs": [],
"source": [
"ex_prob = clf.predict_proba(X[imageno:imageno+1, :])\n",
"ex_prob = clf.predict_proba(X[imageno : imageno + 1, :])\n",
"sorted_labels = np.argsort(ex_prob)[0]\n",
"right_label = sorted_labels[-1]\n",
"wrong_label = sorted_labels[-2]"
Expand All @@ -138,7 +139,7 @@
"source": [
"image = X[imageno, :]\n",
"plt.imshow(image.reshape((28, 28)), cmap=\"gray\")\n",
"label = clf.predict(image.reshape(1,-1))\n",
"label = clf.predict(image.reshape(1, -1))\n",
"print(f\"Solution is classified as {label}\")"
]
},
Expand All @@ -165,7 +166,7 @@
"delta = 10\n",
"\n",
"x = m.addMVar(image.shape, lb=0.0, ub=1.0, name=\"x\")\n",
"y = m.addMVar(ex_prob.shape, lb=-float('inf'), name=\"y\")\n",
"y = m.addMVar(ex_prob.shape, lb=-float(\"inf\"), name=\"y\")\n",
"\n",
"abs_diff = m.addMVar(image.shape, lb=0, ub=1, name=\"abs_diff\")\n",
"\n",
Expand All @@ -177,7 +178,9 @@
"m.addConstr(abs_diff >= -x + image)\n",
"m.addConstr(abs_diff.sum() <= delta)\n",
"\n",
"pred_constr = add_predictor_constr(m, pipeline.steps[-1][1], x, y, epsilon=0, predict_function='predict_proba')\n",
"pred_constr = add_predictor_constr(\n",
" m, pipeline.steps[-1][1], x, y, epsilon=0, predict_function=\"predict_proba\"\n",
")\n",
"\n",
"pred_constr.print_stats()"
]
Expand Down Expand Up @@ -214,7 +217,7 @@
"outputs": [],
"source": [
"plt.imshow(x.X.reshape((28, 28)), cmap=\"gray\")\n",
"label = pipeline.predict(x.X.reshape(1,-1))\n",
"label = pipeline.predict(x.X.reshape(1, -1))\n",
"print(f\"Solution is classified as {label}\")"
]
},
Expand Down
22 changes: 10 additions & 12 deletions notebooks/adversarial/adversarial_softmax.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@
"\n",
"from sklearn.datasets import fetch_openml\n",
"from sklearn.neural_network import MLPClassifier\n",
"from sklearn.pipeline import make_pipeline\n",
"from sklearn.preprocessing import StandardScaler"
"from sklearn.pipeline import make_pipeline"
]
},
{
Expand Down Expand Up @@ -65,7 +64,8 @@
"outputs": [],
"source": [
"import sys\n",
"sys.path.append('../../src')"
"\n",
"sys.path.append(\"../../src\")"
]
},
{
Expand All @@ -76,9 +76,7 @@
},
"outputs": [],
"source": [
"from gurobi_ml import add_predictor_constr\n",
"from gurobi_ml.sklearn import mlpregressor\n",
"from gurobi_ml.modeling import softmax"
"from gurobi_ml import add_predictor_constr"
]
},
{
Expand Down Expand Up @@ -112,7 +110,7 @@
"outputs": [],
"source": [
"X = X.astype(np.float64) / 255.0\n",
"clf = MLPClassifier([20,20])\n",
"clf = MLPClassifier([20, 20])\n",
"pipeline = make_pipeline(clf)\n",
"pipeline.fit(X, Y)"
]
Expand All @@ -136,7 +134,7 @@
},
"outputs": [],
"source": [
"ex_prob = clf.predict_proba(X[imageno:imageno+1, :])\n",
"ex_prob = clf.predict_proba(X[imageno : imageno + 1, :])\n",
"sorted_labels = np.argsort(ex_prob)[0]\n",
"right_label = sorted_labels[-1]\n",
"wrong_label = sorted_labels[-2]"
Expand Down Expand Up @@ -171,7 +169,7 @@
"outputs": [],
"source": [
"plt.imshow(image.reshape((28, 28)), cmap=\"gray\")\n",
"label = clf.predict(image.reshape(1,-1))\n",
"label = clf.predict(image.reshape(1, -1))\n",
"print(f\"Solution is classified as {label}\")"
]
},
Expand All @@ -198,7 +196,7 @@
"m.addConstr(abs_diff.sum() <= delta)\n",
"\n",
"# Gurobi ML magic\n",
"pred_constr = add_predictor_constr(m, clf, x, y, out_activation='softmax')\n",
"pred_constr = add_predictor_constr(m, clf, x, y, out_activation=\"softmax\")\n",
"\n",
"pred_constr.print_stats()"
]
Expand All @@ -211,7 +209,7 @@
},
"outputs": [],
"source": [
"m.Params.OBBT=3\n",
"m.Params.OBBT = 3\n",
"m.optimize()"
]
},
Expand All @@ -224,7 +222,7 @@
"outputs": [],
"source": [
"plt.imshow(x.X.reshape((28, 28)), cmap=\"gray\")\n",
"label = pipeline.predict(x.X.reshape(1,-1))\n",
"label = pipeline.predict(x.X.reshape(1, -1))\n",
"print(f\"Solution is classified as {label}\")"
]
},
Expand Down
4 changes: 2 additions & 2 deletions src/gurobi_ml/keras/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def _mip_model(self, **kwargs):
pass
elif isinstance(step, (keras.layers.ReLU, keras.layers.Softmax)):
layer = self._add_activation_layer(
_input, self.act_dict["relu"](), output, name=f"relu{i}", **kwargs
_input, self.act_dict["relu"], output, name=f"relu{i}", **kwargs
)
_input = layer.output
else:
Expand All @@ -127,7 +127,7 @@ def _mip_model(self, **kwargs):
_input,
weights,
bias,
self.act_dict[activation](),
self.act_dict[activation],
output,
name=f"dense{i}",
**kwargs,
Expand Down
6 changes: 1 addition & 5 deletions src/gurobi_ml/modeling/softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,10 +165,6 @@ def softmax(
"""Add the prediction constraints to Gurobi."""
gp_model: gp.Model = predictor_model.gp_model
output: gp.MVar = predictor_model.output
try:
predict_function: str = predictor_model.predict_function
except AttributeError:
predict_function = "predict_proba"

if "epsilon" in kwargs:
epsilon = kwargs["epsilon"]
Expand All @@ -190,7 +186,7 @@ def softmax(

# Voila!
gp_model.addConstr(
output == exponentials / denominator[:, np.newaxis], name=f"multlog"
output == exponentials / denominator[:, np.newaxis], name="multlog"
)
else:
# How boy that is tedious you don't want not to use Gurobi 12!
Expand Down
5 changes: 3 additions & 2 deletions src/gurobi_ml/sklearn/mlpregressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"""Module for formulating a :external+sklearn:py:class:`sklearn.neural_network.MLPRegressor` in a
:external+gurobi:py:class:`Model`.
"""

from ..exceptions import NoModel
from ..modeling.neuralnet import BaseNNConstr
from .skgetter import SKClassifier, SKRegressor
Expand Down Expand Up @@ -149,7 +150,7 @@ def _mip_model(self, **kwargs):
neural_net,
f"No implementation for activation function {neural_net.activation}",
)
activation = self.act_dict[neural_net.activation]()
activation = self.act_dict[neural_net.activation]

input_vars = self._input
output = None
Expand All @@ -160,7 +161,7 @@ def _mip_model(self, **kwargs):

# For last layer change activation
if i == neural_net.n_layers_ - 2:
activation = self.act_dict[neural_net.out_activation_]()
activation = self.act_dict[neural_net.out_activation_]
output = self._output
if neural_net.out_activation_ in ("softmax", "logistic"):
kwargs["predict_function"] = self.predict_function
Expand Down
2 changes: 1 addition & 1 deletion src/gurobi_ml/torch/sequential.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _mip_model(self, **kwargs):
activation = self.activations[type(step)]
layer = self._add_activation_layer(
_input,
self.act_dict[activation](),
self.act_dict[activation],
output,
name=f"{activation}_{i}",
**kwargs,
Expand Down
14 changes: 0 additions & 14 deletions tests/test_sklearn/test_sklearn_formulations.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,7 @@ def test_iris_proba(self):
data = datasets.load_iris()

X = data.data
y = data.target

# Make it a simple classification
X = X[y != 2]
y = y[y != 2]
cases = IrisBinaryCases()

for regressor in cases:
Expand All @@ -92,8 +88,6 @@ def test_iris_multi(self):
data = datasets.load_iris()

X = data.data
y = data.target

# Make it a simple classification
cases = IrisMultiCases()

Expand All @@ -106,11 +100,7 @@ def test_iris_clf(self):
data = datasets.load_iris()

X = data.data
y = data.target

# Make it a simple classification
X = X[y != 2]
y = y[y != 2]
cases = IrisBinaryCases()

for regressor in cases:
Expand All @@ -124,11 +114,7 @@ def test_iris_pwl_args(self):
data = datasets.load_iris()

X = data.data
y = data.target

# Make it a simple classification
X = X[y != 2]
y = y[y != 2]
cases = IrisBinaryCases()

for regressor in cases:
Expand Down

0 comments on commit 2e03399

Please sign in to comment.