Skip to content

Commit dd5c6c4

Browse files
authored
Better lint (#132)
* Better lint * fix one unit test * fix wrong change * update cp * warning * mac * update mac
1 parent 3a1f14b commit dd5c6c4

File tree

83 files changed

+246
-217
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

83 files changed

+246
-217
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ _doc/examples/plot_*.onnx
5151
_doc/examples/plot_*.xlsx
5252
_doc/_static/require.js
5353
_doc/_static/viz.js
54+
_doc/sg_execution_times.rst
5455
_unittests/ut__main/*.png
5556
_unittests/test_constants.h
5657
mlinsights/_config.py

CHANGELOGS.rst

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
1-
21
===========
32
Change Logs
43
===========
54

5+
0.5.1
6+
=====
7+
8+
* :pr:`132` builds against scikit-learn==1.5.0, python 3.12
9+
610
0.5.0
711
=====
812

_doc/examples/plot_constraint_kmeans.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@
6262

6363
colors = "brgy"
6464
fig, ax = plt.subplots(1, 1, figsize=(4, 4))
65-
for i in range(0, max(cl) + 1):
65+
for i in range(max(cl) + 1):
6666
ax.plot(X[cl == i, 0], X[cl == i, 1], colors[i] + ".", label="cl%d" % i)
6767
x = [km.cluster_centers_[i, 0], km.cluster_centers_[i, 0]]
6868
y = [km.cluster_centers_[i, 1], km.cluster_centers_[i, 1]]
@@ -97,7 +97,7 @@
9797
#
9898

9999
fig, ax = plt.subplots(1, 2, figsize=(10, 4))
100-
for i in range(0, max(cl1) + 1):
100+
for i in range(max(cl1) + 1):
101101
ax[0].plot(X[cl1 == i, 0], X[cl1 == i, 1], colors[i] + ".", label="cl%d" % i)
102102
ax[1].plot(X[cl2 == i, 0], X[cl2 == i, 1], colors[i] + ".", label="cl%d" % i)
103103
x = [km1.cluster_centers_[i, 0], km1.cluster_centers_[i, 0]]
@@ -135,7 +135,7 @@ def plot_delaunay(ax, edges, points):
135135

136136

137137
fig, ax = plt.subplots(1, 2, figsize=(10, 4))
138-
for i in range(0, max(cl) + 1):
138+
for i in range(max(cl) + 1):
139139
ax[0].plot(X[cl == i, 0], X[cl == i, 1], colors[i] + ".", label="cl%d" % i)
140140
x = [km.cluster_centers_[i, 0], km.cluster_centers_[i, 0]]
141141
y = [km.cluster_centers_[i, 1], km.cluster_centers_[i, 1]]
@@ -145,7 +145,7 @@ def plot_delaunay(ax, edges, points):
145145

146146
cls = km.cluster_centers_iter_
147147
ax[1].plot(X[:, 0], X[:, 1], ".", label="X", color="#AAAAAA", ms=3)
148-
for i in range(0, max(cl) + 1):
148+
for i in range(max(cl) + 1):
149149
ms = numpy.arange(cls.shape[-1]).astype(numpy.float64) / cls.shape[-1] * 50 + 1
150150
ax[1].scatter(cls[i, 0, :], cls[i, 1, :], color=colors[i], s=ms, label="cl%d" % i)
151151
plot_delaunay(ax[1], edges, km.cluster_centers_)

_doc/examples/plot_logistic_regression_clustering.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
Xs = []
2424
Ys = []
2525
n = 20
26-
for i in range(0, 5):
27-
for j in range(0, 4):
26+
for i in range(5):
27+
for j in range(4):
2828
x1 = numpy.random.rand(n) + i * 1.1
2929
x2 = numpy.random.rand(n) + j * 1.1
3030
Xs.append(numpy.vstack([x1, x2]).T)
@@ -41,7 +41,7 @@
4141
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
4242
for i in set(Y):
4343
ax.plot(
44-
X[Y == i, 0], X[Y == i, 1], "o", label="cl%d" % i, color=plt.cm.tab20.colors[i]
44+
X[i == Y, 0], X[i == Y, 1], "o", label="cl%d" % i, color=plt.cm.tab20.colors[i]
4545
)
4646
ax.legend()
4747
ax.set_title("Classification not convex")

_doc/examples/plot_piecewise_linear_regression_criterion.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
"""
22
Custom DecisionTreeRegressor adapted to a linear regression
33
===========================================================
4-
4+
55
A :class:`sklearn.tree.DecisionTreeRegressor`
6-
can be trained with a couple of possible criterions but it is possible
6+
can be trained with a couple of possible criterions but it is possible
77
to implement a custom one (see `hellinger_distance_criterion
88
<https://github.com/EvgeniDubov/hellinger-distance-criterion/blob/master/hellinger_distance_criterion.pyx>`_).
99
See also tutorial

_doc/examples/plot_search_images_torch.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@
147147
imgs = datasets.ImageFolder("simages", trans)
148148
dataloader = DataLoader(imgs, batch_size=1, shuffle=True, num_workers=1)
149149
img_seq = iter(dataloader)
150-
imgs = list(img[0] for i, img in zip(range(2), img_seq))
150+
imgs = [img[0] for i, img in zip(range(2), img_seq)]
151151
#######################################
152152
#
153153

@@ -182,7 +182,7 @@
182182
ConcatDataset([imgs1, imgs2]), batch_size=1, shuffle=True, num_workers=1
183183
)
184184
img_seq = iter(dataloader)
185-
imgs = list(img[0] for i, img in zip(range(10), img_seq))
185+
imgs = [img[0] for i, img in zip(range(10), img_seq)]
186186
#######################################
187187
#
188188

@@ -259,7 +259,7 @@
259259
imgs = datasets.ImageFolder("simages", trans)
260260
dataloader = DataLoader(imgs, batch_size=1, shuffle=False, num_workers=1)
261261
img_seq = iter(dataloader)
262-
imgs = list(img[0] for img in img_seq)
262+
imgs = [img[0] for img in img_seq]
263263

264264
all_outputs = [model.forward(img).detach().numpy().ravel() for img in imgs]
265265

_doc/examples/plot_sklearn_transformed_target.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ def evaluation():
202202
rnd = []
203203
perf_reg = []
204204
perf_clr = []
205-
for rs in range(0, 200):
205+
for rs in range(200):
206206
rnd.append(rs)
207207
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=rs)
208208
reg = LinearRegression()
@@ -247,7 +247,7 @@ def evaluation2():
247247
perf_clr = []
248248
acc_reg = []
249249
acc_clr = []
250-
for rs in range(0, 50):
250+
for rs in range(50):
251251
rnd.append(rs)
252252
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=rs)
253253
reg = LinearRegression()
@@ -368,7 +368,7 @@ def permute(y):
368368

369369

370370
rows = []
371-
for i in range(0, 10):
371+
for _i in range(10):
372372
regpt = TransformedTargetRegressor2(LinearRegression(), transformer="permute")
373373
regpt.fit(X_train, y_train)
374374
logpt = TransformedTargetClassifier2(

_doc/examples/plot_visualize_pipeline.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -238,6 +238,6 @@
238238
# Every piece behaves the same way.
239239

240240

241-
for coor, model, vars in enumerate_pipeline_models(model):
241+
for coor, m, _vars in enumerate_pipeline_models(model):
242242
print(coor)
243-
print(model._debug)
243+
print(m._debug)

_unittests/ut_helpers/test_debug.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# -*- coding: utf-8 -*-
21
import unittest
32
import numpy.random
43
from sklearn.linear_model import LinearRegression, LogisticRegression

_unittests/ut_helpers/test_parameters.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# -*- coding: utf-8 -*-
21
import unittest
32
from mlinsights.ext_test_case import ExtTestCase
43
from mlinsights.helpers.parameters import format_value

0 commit comments

Comments
 (0)