From c21ac431478fbd87fb68ecd4d405ccc4f8a9e6e0 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 15 Mar 2025 12:49:37 -0400 Subject: [PATCH 1/3] Apply flake8+pycodestyles autofixes --- pyproject.toml | 23 +++++++++++-- stubs/matplotlib/legend_handler.pyi | 2 +- stubs/matplotlib/pyplot.pyi | 3 +- stubs/matplotlib/widgets.pyi | 3 -- .../algorithms/approximation/kcomponents.pyi | 1 - .../algorithms/community/__init__.pyi | 33 +++++++------------ .../algorithms/connectivity/connectivity.pyi | 3 +- .../networkx/algorithms/connectivity/cuts.pyi | 3 +- .../connectivity/disjoint_paths.pyi | 7 ++-- stubs/networkx/algorithms/tree/branchings.pyi | 1 - .../algorithms/tree/decomposition.pyi | 1 - stubs/networkx/algorithms/tree/mst.pyi | 1 - stubs/networkx/classes/graph.pyi | 1 - stubs/networkx/classes/graphviews.pyi | 1 - stubs/networkx/classes/reportviews.pyi | 1 - stubs/networkx/generators/classic.pyi | 1 - stubs/networkx/generators/triads.pyi | 1 - stubs/skimage/__init__.pyi | 6 ++-- stubs/skimage/future/graph/rag.pyi | 1 - stubs/skimage/measure/_regionprops.pyi | 1 - stubs/sklearn/base.pyi | 14 ++++---- stubs/sklearn/calibration.pyi | 6 ++-- .../sklearn/cluster/_affinity_propagation.pyi | 6 ++-- stubs/sklearn/cluster/_agglomerative.pyi | 6 ++-- stubs/sklearn/cluster/_bicluster.pyi | 3 +- stubs/sklearn/cluster/_birch.pyi | 6 ++-- stubs/sklearn/cluster/_bisect_k_means.pyi | 8 ++--- stubs/sklearn/cluster/_dbscan.pyi | 6 ++-- stubs/sklearn/cluster/_kmeans.pyi | 8 ++--- stubs/sklearn/cluster/_mean_shift.pyi | 6 ++-- stubs/sklearn/cluster/_optics.pyi | 6 ++-- stubs/sklearn/cluster/_spectral.pyi | 6 ++-- stubs/sklearn/compose/_column_transformer.pyi | 3 +- stubs/sklearn/compose/_target.pyi | 6 ++-- .../sklearn/covariance/_elliptic_envelope.pyi | 3 +- .../covariance/_empirical_covariance.pyi | 5 ++- stubs/sklearn/covariance/_graph_lasso.pyi | 12 +++---- .../sklearn/covariance/_robust_covariance.pyi | 6 ++-- .../sklearn/covariance/_shrunk_covariance.pyi | 5 ++- stubs/sklearn/cross_decomposition/_pls.pyi | 6 ++-- stubs/sklearn/datasets/_samples_generator.pyi | 2 +- stubs/sklearn/decomposition/_base.pyi | 3 +- .../sklearn/decomposition/_dict_learning.pyi | 12 +++---- .../decomposition/_factor_analysis.pyi | 6 ++-- stubs/sklearn/decomposition/_fastica.pyi | 6 ++-- .../decomposition/_incremental_pca.pyi | 3 +- stubs/sklearn/decomposition/_kernel_pca.pyi | 3 +- stubs/sklearn/decomposition/_lda.pyi | 5 ++- stubs/sklearn/decomposition/_nmf.pyi | 12 +++---- stubs/sklearn/decomposition/_pca.pyi | 3 +- stubs/sklearn/decomposition/_sparse_pca.pyi | 3 +- .../sklearn/decomposition/_truncated_svd.pyi | 5 ++- stubs/sklearn/discriminant_analysis.pyi | 8 ++--- stubs/sklearn/dummy.pyi | 8 ++--- stubs/sklearn/ensemble/_bagging.pyi | 8 ++--- stubs/sklearn/ensemble/_forest.pyi | 6 ++-- stubs/sklearn/ensemble/_gb.pyi | 6 ++-- .../_hist_gradient_boosting/binning.pyi | 3 +- .../gradient_boosting.pyi | 11 ++----- stubs/sklearn/ensemble/_iforest.pyi | 6 ++-- stubs/sklearn/ensemble/_stacking.pyi | 5 ++- stubs/sklearn/ensemble/_voting.pyi | 3 +- stubs/sklearn/ensemble/_weight_boosting.pyi | 6 ++-- stubs/sklearn/feature_extraction/__init__.pyi | 2 +- .../feature_extraction/_dict_vectorizer.pyi | 7 ++-- stubs/sklearn/feature_extraction/_hash.pyi | 5 ++- stubs/sklearn/feature_extraction/image.pyi | 3 +- stubs/sklearn/feature_extraction/text.pyi | 16 ++++----- .../sklearn/feature_selection/_from_model.pyi | 3 +- stubs/sklearn/feature_selection/_rfe.pyi | 3 +- .../sklearn/feature_selection/_sequential.pyi | 6 ++-- .../_univariate_selection.pyi | 6 ++-- .../feature_selection/_variance_threshold.pyi | 3 +- stubs/sklearn/gaussian_process/_gpc.pyi | 5 ++- stubs/sklearn/gaussian_process/_gpr.pyi | 8 ++--- stubs/sklearn/gaussian_process/kernels.pyi | 12 +++---- stubs/sklearn/impute/_base.pyi | 10 +++--- stubs/sklearn/impute/_iterative.pyi | 6 ++-- stubs/sklearn/impute/_knn.pyi | 3 +- stubs/sklearn/isotonic.pyi | 8 ++--- stubs/sklearn/kernel_approximation.pyi | 8 ++--- stubs/sklearn/kernel_ridge.pyi | 3 +- stubs/sklearn/linear_model/_base.pyi | 10 +++--- stubs/sklearn/linear_model/_bayes.pyi | 3 +- .../linear_model/_coordinate_descent.pyi | 10 +++--- stubs/sklearn/linear_model/_glm/glm.pyi | 5 ++- stubs/sklearn/linear_model/_huber.pyi | 3 +- stubs/sklearn/linear_model/_least_angle.pyi | 8 ++--- stubs/sklearn/linear_model/_logistic.pyi | 8 ++--- stubs/sklearn/linear_model/_omp.pyi | 6 ++-- stubs/sklearn/linear_model/_quantile.pyi | 5 ++- stubs/sklearn/linear_model/_ransac.pyi | 6 ++-- stubs/sklearn/linear_model/_ridge.pyi | 8 ++--- .../linear_model/_stochastic_gradient.pyi | 6 ++-- stubs/sklearn/linear_model/_theil_sen.pyi | 6 ++-- stubs/sklearn/manifold/_isomap.pyi | 5 ++- stubs/sklearn/manifold/_locally_linear.pyi | 3 +- stubs/sklearn/manifold/_mds.pyi | 6 ++-- .../sklearn/manifold/_spectral_embedding.pyi | 6 ++-- .../metrics/_plot/confusion_matrix.pyi | 1 - stubs/sklearn/mixture/_base.pyi | 6 ++-- stubs/sklearn/model_selection/_search.pyi | 14 ++++---- .../_search_successive_halving.pyi | 3 +- stubs/sklearn/model_selection/_split.pyi | 2 +- stubs/sklearn/multiclass.pyi | 12 +++---- stubs/sklearn/multioutput.pyi | 5 ++- stubs/sklearn/naive_bayes.pyi | 7 ++-- stubs/sklearn/neighbors/_classification.pyi | 6 ++-- stubs/sklearn/neighbors/_kde.pyi | 5 ++- stubs/sklearn/neighbors/_lof.pyi | 6 ++-- stubs/sklearn/neighbors/_nca.pyi | 8 ++--- stubs/sklearn/neighbors/_nearest_centroid.pyi | 6 ++-- stubs/sklearn/neighbors/_regression.pyi | 6 ++-- .../neural_network/_multilayer_perceptron.pyi | 10 +++--- stubs/sklearn/neural_network/_rbm.pyi | 8 ++--- stubs/sklearn/pipeline.pyi | 3 +- stubs/sklearn/preprocessing/_data.pyi | 6 ++-- .../sklearn/preprocessing/_discretization.pyi | 6 ++-- stubs/sklearn/preprocessing/_encoders.pyi | 9 ++--- .../preprocessing/_function_transformer.pyi | 6 ++-- stubs/sklearn/preprocessing/_label.pyi | 12 +++---- stubs/sklearn/preprocessing/_polynomial.pyi | 6 ++-- stubs/sklearn/random_projection.pyi | 8 ++--- .../semi_supervised/_label_propagation.pyi | 8 ++--- .../semi_supervised/_self_training.pyi | 6 ++-- stubs/sklearn/svm/_base.pyi | 8 ++--- stubs/sklearn/svm/_classes.pyi | 3 +- stubs/sklearn/tree/_classes.pyi | 10 +++--- stubs/sklearn/utils/__init__.pyi | 2 +- stubs/sklearn/utils/_mocking.pyi | 3 +- stubs/sklearn/utils/_testing.pyi | 2 +- stubs/sklearn/utils/estimator_checks.pyi | 1 - stubs/sympy-stubs/matrices/immutable.pyi | 2 -- stubs/vispy/app/canvas.pyi | 2 -- stubs/vispy/geometry/generation.pyi | 1 - stubs/vispy/gloo/buffer.pyi | 1 - stubs/vispy/gloo/context.pyi | 1 - stubs/vispy/gloo/framebuffer.pyi | 1 - stubs/vispy/gloo/program.pyi | 1 - stubs/vispy/gloo/texture.pyi | 2 +- stubs/vispy/io/wavefront.pyi | 1 - stubs/vispy/plot/plotwidget.pyi | 1 - stubs/vispy/scene/canvas.pyi | 2 -- stubs/vispy/scene/node.pyi | 2 -- stubs/vispy/scene/visuals.pyi | 1 - stubs/vispy/scene/widgets/console.pyi | 1 - stubs/vispy/util/fonts/__init__.pyi | 4 +-- stubs/vispy/util/logs.pyi | 1 - stubs/vispy/util/transforms.pyi | 1 - stubs/vispy/visuals/_scalable_textures.pyi | 1 - .../vispy/visuals/collections/collection.pyi | 1 - stubs/vispy/visuals/filters/mesh.pyi | 1 - stubs/vispy/visuals/gridlines.pyi | 1 - stubs/vispy/visuals/isosurface.pyi | 1 - stubs/vispy/visuals/mesh.pyi | 2 -- stubs/vispy/visuals/rectangle.pyi | 1 - stubs/vispy/visuals/tube.pyi | 1 - tests/run_hygiene.py | 18 +++++----- utils/validate_stubs.py | 3 +- 159 files changed, 308 insertions(+), 526 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 42e32ed6..d061aa63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,16 +27,19 @@ line-length = 130 target-version = "py39" [tool.ruff.lint] -# TODO: Use extend-select instead to get base E and F rules that don't conflict with the formatter -select = [ +extend-select = [ "FA", # flake8-future-annotations "I", # isort "PYI", # flake8-pyi + "W", # pycodestyle Warning ] ignore = [ ### # Rules we don't want or don't agree with ### + # Used for direct, non-subclass type comparison, for example: `type(val) is str` + # see https://github.com/astral-sh/ruff/issues/6465 + "E721", # Do not compare types, use `isinstance()` # Typeshed doesn't want complex or non-literal defaults, or long strings, for maintenance and testing reasons. # This doesn't affect us, let's have more complete stubs. "PYI011", @@ -46,6 +49,11 @@ ignore = [ # TODO: Handle in its own PR "PYI021", # https://github.com/microsoft/python-type-stubs/pull/343 + # TODO: Fixing these would change which symbols are even visible for Pylance. + # Which may negatively affect users, especially if the symbol wasn't meant to be re-exported. + # Manually evaluate each violation. + "F401", + # TODO: Investigate and fix or configure "PYI001", "PYI002", @@ -56,6 +64,17 @@ ignore = [ "PYI051", # Request for autofix: https://github.com/astral-sh/ruff/issues/14185 "PYI052", ] +[tool.ruff.lint.per-file-ignores] +"*.pyi" = [ + ### + # Rules that are out of the control of stub authors: + ### + "E743", # Ambiguous function name; stubs must follow implementation + "F403", # `from . import *` used; unable to detect undefined names + # Stubs can sometimes re-export entire modules. + # Issues with using a star-imported name will be caught by type-checkers. + "F405", # may be undefined, or defined from star imports +] [tool.ruff.lint.isort] combine-as-imports = true diff --git a/stubs/matplotlib/legend_handler.pyi b/stubs/matplotlib/legend_handler.pyi index d3349acb..227e0d4e 100644 --- a/stubs/matplotlib/legend_handler.pyi +++ b/stubs/matplotlib/legend_handler.pyi @@ -1,5 +1,5 @@ from collections.abc import Sequence -from typing import Callable, Sequence +from typing import Callable from .artist import Artist from .container import BarContainer diff --git a/stubs/matplotlib/pyplot.pyi b/stubs/matplotlib/pyplot.pyi index c9718094..8aea22a9 100644 --- a/stubs/matplotlib/pyplot.pyi +++ b/stubs/matplotlib/pyplot.pyi @@ -9,7 +9,6 @@ import numpy as np from matplotlib import rcParams as rcParams, style as style from matplotlib.contour import QuadContourSet -from . import rcParams from ._typing import * from .artist import Artist from .axes import Axes as Axes @@ -778,7 +777,7 @@ def tick_params(axis: Literal["x", "y", "both"] = ..., **kwargs): ... def ticklabel_format( *, axis: Literal["x", "y", "both"] = ..., - style: Literal["sci", "scientific", "plain"] = ..., + style: Literal["sci", "scientific", "plain"] = ..., # noqa: F811 scilimits=..., useOffset: bool | float = ..., useLocale: bool = ..., diff --git a/stubs/matplotlib/widgets.pyi b/stubs/matplotlib/widgets.pyi index 91a6ed17..8b60a60f 100644 --- a/stubs/matplotlib/widgets.pyi +++ b/stubs/matplotlib/widgets.pyi @@ -8,9 +8,6 @@ import numpy as np from matplotlib.axes._axes import Axes from matplotlib.backend_bases import DrawEvent, Event, FigureCanvasBase, KeyEvent, MouseButton, MouseEvent from matplotlib.figure import Figure -from matplotlib.lines import Line2D -from matplotlib.patches import Ellipse, Rectangle -from matplotlib.transforms import Affine2D from numpy import float64, ndarray from numpy.typing import ArrayLike from PIL.Image import Image diff --git a/stubs/networkx/algorithms/approximation/kcomponents.pyi b/stubs/networkx/algorithms/approximation/kcomponents.pyi index 87a521fa..c0e0a1e2 100644 --- a/stubs/networkx/algorithms/approximation/kcomponents.pyi +++ b/stubs/networkx/algorithms/approximation/kcomponents.pyi @@ -2,7 +2,6 @@ import itertools from collections import defaultdict from collections.abc import Mapping from functools import cached_property -from typing import Mapping from ...classes.graph import Graph from ...exception import NetworkXError diff --git a/stubs/networkx/algorithms/community/__init__.pyi b/stubs/networkx/algorithms/community/__init__.pyi index f728ea7b..fd1d0c64 100644 --- a/stubs/networkx/algorithms/community/__init__.pyi +++ b/stubs/networkx/algorithms/community/__init__.pyi @@ -1,22 +1,11 @@ -from .asyn_fluid import asyn_fluidc as asyn_fluidc -from .centrality import girvan_newman as girvan_newman -from .community_utils import is_partition as is_partition -from .kclique import k_clique_communities as k_clique_communities -from .kernighan_lin import kernighan_lin_bisection as kernighan_lin_bisection -from .label_propagation import ( - asyn_lpa_communities as asyn_lpa_communities, - asyn_lpa_communities as label_propagation_communities, - label_propagation_communities as label_propagation_communities, -) -from .louvain import louvain_communities as louvain_communities, louvain_partitions as louvain_partitions -from .lukes import lukes_partitioning as lukes_partitioning -from .modularity_max import ( - greedy_modularity_communities as greedy_modularity_communities, - naive_greedy_modularity_communities as naive_greedy_modularity_communities, -) -from .quality import ( - coverage as coverage, - modularity as modularity, - partition_quality as partition_quality, - performance as performance, -) +from networkx.algorithms.community.asyn_fluid import * +from networkx.algorithms.community.centrality import * +from networkx.algorithms.community.community_utils import * +from networkx.algorithms.community.divisive import * +from networkx.algorithms.community.kclique import * +from networkx.algorithms.community.kernighan_lin import * +from networkx.algorithms.community.label_propagation import * +from networkx.algorithms.community.louvain import * +from networkx.algorithms.community.lukes import * +from networkx.algorithms.community.modularity_max import * +from networkx.algorithms.community.quality import * diff --git a/stubs/networkx/algorithms/connectivity/connectivity.pyi b/stubs/networkx/algorithms/connectivity/connectivity.pyi index 11cc0532..77069377 100644 --- a/stubs/networkx/algorithms/connectivity/connectivity.pyi +++ b/stubs/networkx/algorithms/connectivity/connectivity.pyi @@ -6,11 +6,10 @@ from typing import Mapping # connectivity algorithms. from ...algorithms.flow import boykov_kolmogorov, build_residual_network, dinitz, edmonds_karp, shortest_augmenting_path from ...classes.graph import Graph +from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity default_flow_func = ... -from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity - __all__ = [ "average_node_connectivity", "local_node_connectivity", diff --git a/stubs/networkx/algorithms/connectivity/cuts.pyi b/stubs/networkx/algorithms/connectivity/cuts.pyi index 6f150cca..1bf3dde7 100644 --- a/stubs/networkx/algorithms/connectivity/cuts.pyi +++ b/stubs/networkx/algorithms/connectivity/cuts.pyi @@ -4,11 +4,10 @@ import itertools # cut algorithms. from ...algorithms.flow import build_residual_network, edmonds_karp from ...classes.graph import Graph +from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity default_flow_func = ... -from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity - __all__ = [ "minimum_st_node_cut", "minimum_node_cut", diff --git a/stubs/networkx/algorithms/connectivity/disjoint_paths.pyi b/stubs/networkx/algorithms/connectivity/disjoint_paths.pyi index 09704684..e51a3ce6 100644 --- a/stubs/networkx/algorithms/connectivity/disjoint_paths.pyi +++ b/stubs/networkx/algorithms/connectivity/disjoint_paths.pyi @@ -1,15 +1,16 @@ # Define the default maximum flow function to use for the undelying # maximum flow computations +from itertools import filterfalse as _filterfalse + from ...algorithms.flow import edmonds_karp, preflow_push, shortest_augmenting_path from ...classes.graph import Graph from ...exception import NetworkXNoPath -default_flow_func = ... -from itertools import filterfalse as _filterfalse - # Functions to build auxiliary data structures. from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity +default_flow_func = ... + __all__ = ["edge_disjoint_paths", "node_disjoint_paths"] def edge_disjoint_paths( diff --git a/stubs/networkx/algorithms/tree/branchings.pyi b/stubs/networkx/algorithms/tree/branchings.pyi index a801b93d..a6b6470e 100644 --- a/stubs/networkx/algorithms/tree/branchings.pyi +++ b/stubs/networkx/algorithms/tree/branchings.pyi @@ -24,7 +24,6 @@ from operator import itemgetter from queue import PriorityQueue from typing import Literal -from ...algorithms.tree.branchings import ArborescenceIterator from ...classes.digraph import DiGraph from ...classes.graph import Graph from ...classes.multidigraph import MultiDiGraph diff --git a/stubs/networkx/algorithms/tree/decomposition.pyi b/stubs/networkx/algorithms/tree/decomposition.pyi index 474c4353..fd5b62a0 100644 --- a/stubs/networkx/algorithms/tree/decomposition.pyi +++ b/stubs/networkx/algorithms/tree/decomposition.pyi @@ -3,7 +3,6 @@ from itertools import combinations from networkx import Graph from ...algorithms import chordal_graph_cliques, complete_to_chordal_graph, moral -from ...classes.graph import Graph from ...utils import not_implemented_for __all__ = ["junction_tree"] diff --git a/stubs/networkx/algorithms/tree/mst.pyi b/stubs/networkx/algorithms/tree/mst.pyi index efbedf07..e9ad1104 100644 --- a/stubs/networkx/algorithms/tree/mst.pyi +++ b/stubs/networkx/algorithms/tree/mst.pyi @@ -6,7 +6,6 @@ from math import isnan from operator import itemgetter from queue import PriorityQueue -from ...algorithms.tree.mst import SpanningTreeIterator from ...classes.graph import Graph from ...utils import UnionFind, not_implemented_for, py_random_state diff --git a/stubs/networkx/classes/graph.pyi b/stubs/networkx/classes/graph.pyi index 87a5a855..a02ced9a 100644 --- a/stubs/networkx/classes/graph.pyi +++ b/stubs/networkx/classes/graph.pyi @@ -8,7 +8,6 @@ from numpy.typing import ArrayLike from ..classes.coreviews import AdjacencyView from ..classes.digraph import DiGraph -from ..classes.graph import Graph from ..classes.reportviews import DegreeView, EdgeView, NodeView from ..exception import NetworkXError diff --git a/stubs/networkx/classes/graphviews.pyi b/stubs/networkx/classes/graphviews.pyi index 922b61bf..ffd62a6f 100644 --- a/stubs/networkx/classes/graphviews.pyi +++ b/stubs/networkx/classes/graphviews.pyi @@ -4,7 +4,6 @@ from networkx import DiGraph, Graph from ..classes.coreviews import FilterAdjacency, FilterAtlas, FilterMultiAdjacency, UnionAdjacency, UnionMultiAdjacency from ..classes.filters import no_filter -from ..classes.graph import Graph from ..exception import NetworkXError from ..utils import not_implemented_for diff --git a/stubs/networkx/classes/reportviews.pyi b/stubs/networkx/classes/reportviews.pyi index d115c2c3..9c1843b9 100644 --- a/stubs/networkx/classes/reportviews.pyi +++ b/stubs/networkx/classes/reportviews.pyi @@ -2,7 +2,6 @@ from collections.abc import Mapping, Set as AbstractSet from typing import Any from ..classes.graph import Graph -from ..classes.reportviews import NodeDataView __all__ = [ "NodeView", diff --git a/stubs/networkx/generators/classic.pyi b/stubs/networkx/generators/classic.pyi index d4bc7451..e0e7c52a 100644 --- a/stubs/networkx/generators/classic.pyi +++ b/stubs/networkx/generators/classic.pyi @@ -3,7 +3,6 @@ import numbers from collections.abc import Iterable from ..classes import Graph -from ..classes.graph import Graph from ..exception import NetworkXError from ..utils import nodes_or_number, pairwise diff --git a/stubs/networkx/generators/triads.pyi b/stubs/networkx/generators/triads.pyi index 14a9c79b..f781863f 100644 --- a/stubs/networkx/generators/triads.pyi +++ b/stubs/networkx/generators/triads.pyi @@ -4,7 +4,6 @@ from networkx import DiGraph # Copyright 2011 Reya Group # Copyright 2011 Alex Levenson # Copyright 2011 Diederik van Liere -from ..classes import DiGraph __all__ = ["triad_graph"] diff --git a/stubs/skimage/__init__.pyi b/stubs/skimage/__init__.pyi index bd1130e1..41b3ebfa 100644 --- a/stubs/skimage/__init__.pyi +++ b/stubs/skimage/__init__.pyi @@ -1,11 +1,9 @@ -__version__: str = ... - -submodules: list = ... - from ._shared import lazy as lazy from ._shared.tester import PytestTester as PytestTester # noqa from ._shared.version_requirements import ensure_python_version as ensure_python_version +__version__: str = ... +submodules: list = ... __getattr__ = ... __lazy_dir__ = ... diff --git a/stubs/skimage/future/graph/rag.pyi b/stubs/skimage/future/graph/rag.pyi index f5f0dfa9..3132a748 100644 --- a/stubs/skimage/future/graph/rag.pyi +++ b/stubs/skimage/future/graph/rag.pyi @@ -7,7 +7,6 @@ from numpy.lib.stride_tricks import as_strided from scipy import ndimage as ndi, sparse from ..._shared.version_requirements import require -from .rag import RAG def _edge_generator_from_csr(csr_matrix): ... def min_weight(graph: RAG, src: int, dst: int, n: int) -> Mapping: ... diff --git a/stubs/skimage/measure/_regionprops.pyi b/stubs/skimage/measure/_regionprops.pyi index 7a0b2397..c0577c4c 100644 --- a/stubs/skimage/measure/_regionprops.pyi +++ b/stubs/skimage/measure/_regionprops.pyi @@ -12,7 +12,6 @@ from scipy.spatial.distance import pdist from . import _moments from ._find_contours import find_contours from ._marching_cubes_lewiner import marching_cubes -from ._regionprops import RegionProperties from ._regionprops_utils import euler_number, perimeter, perimeter_crofton __all__ = ["regionprops", "euler_number", "perimeter", "perimeter_crofton"] diff --git a/stubs/sklearn/base.pyi b/stubs/sklearn/base.pyi index 9fbe1c41..3ac1419c 100644 --- a/stubs/sklearn/base.pyi +++ b/stubs/sklearn/base.pyi @@ -1,6 +1,12 @@ +import copy +import inspect +import platform +import re +import warnings from collections import defaultdict as defaultdict from typing import Any, ClassVar, Iterable, TypeVar +import numpy as np from numpy import ndarray from ._config import get_config as get_config @@ -16,14 +22,6 @@ BaseEstimator_Self = TypeVar("BaseEstimator_Self", bound=BaseEstimator) # Author: Gael Varoquaux # License: BSD 3 clause -import copy -import inspect -import platform -import re -import warnings - -import numpy as np - def clone(estimator: BaseEstimator | Iterable[BaseEstimator], *, safe: bool = True) -> Any: ... class BaseEstimator: diff --git a/stubs/sklearn/calibration.pyi b/stubs/sklearn/calibration.pyi index 07e49b44..5cc3127f 100644 --- a/stubs/sklearn/calibration.pyi +++ b/stubs/sklearn/calibration.pyi @@ -1,9 +1,11 @@ +import warnings from functools import partial as partial from inspect import signature as signature from math import log as log from numbers import Integral as Integral from typing import ClassVar, Iterable, Literal, TypeVar +import numpy as np from matplotlib.artist import Artist from matplotlib.axes import Axes from matplotlib.figure import Figure @@ -33,10 +35,6 @@ from .utils.validation import check_consistent_length as check_consistent_length CalibratedClassifierCV_Self = TypeVar("CalibratedClassifierCV_Self", bound=CalibratedClassifierCV) _SigmoidCalibration_Self = TypeVar("_SigmoidCalibration_Self", bound=_SigmoidCalibration) -import warnings - -import numpy as np - class CalibratedClassifierCV(ClassifierMixin, MetaEstimatorMixin, BaseEstimator): calibrated_classifiers_: list = ... feature_names_in_: ndarray = ... diff --git a/stubs/sklearn/cluster/_affinity_propagation.pyi b/stubs/sklearn/cluster/_affinity_propagation.pyi index ea4f77fa..d13689e0 100644 --- a/stubs/sklearn/cluster/_affinity_propagation.pyi +++ b/stubs/sklearn/cluster/_affinity_propagation.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -15,10 +17,6 @@ from ..utils.validation import check_is_fitted as check_is_fitted AffinityPropagation_Self = TypeVar("AffinityPropagation_Self", bound=AffinityPropagation) -import warnings - -import numpy as np - ############################################################################### # Public API diff --git a/stubs/sklearn/cluster/_agglomerative.pyi b/stubs/sklearn/cluster/_agglomerative.pyi index f895fa88..a0526353 100644 --- a/stubs/sklearn/cluster/_agglomerative.pyi +++ b/stubs/sklearn/cluster/_agglomerative.pyi @@ -1,7 +1,9 @@ +import warnings from heapq import heapify as heapify, heappop as heappop, heappush as heappush, heappushpop as heappushpop from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, Set, TypeVar +import numpy as np from joblib import Memory from numpy import ndarray from scipy import sparse as sparse @@ -22,10 +24,6 @@ from ._feature_agglomeration import AgglomerationTransform FeatureAgglomeration_Self = TypeVar("FeatureAgglomeration_Self", bound=FeatureAgglomeration) AgglomerativeClustering_Self = TypeVar("AgglomerativeClustering_Self", bound=AgglomerativeClustering) -import warnings - -import numpy as np - ############################################################################### # Hierarchical tree building functions diff --git a/stubs/sklearn/cluster/_bicluster.pyi b/stubs/sklearn/cluster/_bicluster.pyi index de7b3580..bed6096f 100644 --- a/stubs/sklearn/cluster/_bicluster.pyi +++ b/stubs/sklearn/cluster/_bicluster.pyi @@ -2,6 +2,7 @@ from abc import ABCMeta, abstractmethod from numbers import Integral as Integral from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.linalg import norm as norm @@ -22,8 +23,6 @@ from . import KMeans as KMeans, MiniBatchKMeans as MiniBatchKMeans BaseSpectral_Self = TypeVar("BaseSpectral_Self", bound=BaseSpectral) -import numpy as np - __all__ = ["SpectralCoclustering", "SpectralBiclustering"] class BaseSpectral(BiclusterMixin, BaseEstimator, metaclass=ABCMeta): diff --git a/stubs/sklearn/cluster/_birch.pyi b/stubs/sklearn/cluster/_birch.pyi index 6ffe9714..9e1801e9 100644 --- a/stubs/sklearn/cluster/_birch.pyi +++ b/stubs/sklearn/cluster/_birch.pyi @@ -1,7 +1,9 @@ +import warnings from math import sqrt as sqrt from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy import sparse as sparse from scipy.sparse import spmatrix @@ -24,10 +26,6 @@ Birch_Self = TypeVar("Birch_Self", bound=Birch) # Joel Nothman # License: BSD 3 clause -import warnings - -import numpy as np - class _CFNode: squared_norm_: ndarray = ... centroids_: ndarray = ... diff --git a/stubs/sklearn/cluster/_bisect_k_means.pyi b/stubs/sklearn/cluster/_bisect_k_means.pyi index 2304cc74..4036599c 100644 --- a/stubs/sklearn/cluster/_bisect_k_means.pyi +++ b/stubs/sklearn/cluster/_bisect_k_means.pyi @@ -1,5 +1,8 @@ +import warnings from typing import Any, Callable, ClassVar, Iterator, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState @@ -13,11 +16,6 @@ BisectingKMeans_Self = TypeVar("BisectingKMeans_Self", bound=BisectingKMeans) # Author: Michal Krawczyk -import warnings - -import numpy as np -import scipy.sparse as sp - class _BisectingTree: def __init__(self, center: ndarray, indices: ndarray, score: Float) -> None: ... def split(self, labels: ndarray, centers: ndarray, scores: ndarray) -> None: ... diff --git a/stubs/sklearn/cluster/_dbscan.pyi b/stubs/sklearn/cluster/_dbscan.pyi index 2140b07a..5c43bb36 100644 --- a/stubs/sklearn/cluster/_dbscan.pyi +++ b/stubs/sklearn/cluster/_dbscan.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from scipy import sparse as sparse @@ -18,10 +20,6 @@ DBSCAN_Self = TypeVar("DBSCAN_Self", bound=DBSCAN) # # License: BSD 3 clause -import warnings - -import numpy as np - def dbscan( X: MatrixLike, eps: Float = 0.5, diff --git a/stubs/sklearn/cluster/_kmeans.pyi b/stubs/sklearn/cluster/_kmeans.pyi index a12fd731..3fcd8fec 100644 --- a/stubs/sklearn/cluster/_kmeans.pyi +++ b/stubs/sklearn/cluster/_kmeans.pyi @@ -1,7 +1,10 @@ +import warnings from abc import ABC, abstractmethod as abstractmethod from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState @@ -36,11 +39,6 @@ from ._k_means_lloyd import ( KMeans_Self = TypeVar("KMeans_Self", bound=KMeans) MiniBatchKMeans_Self = TypeVar("MiniBatchKMeans_Self", bound=MiniBatchKMeans) -import warnings - -import numpy as np -import scipy.sparse as sp - ############################################################################### # Initialization heuristic diff --git a/stubs/sklearn/cluster/_mean_shift.pyi b/stubs/sklearn/cluster/_mean_shift.pyi index 6e264589..2095530d 100644 --- a/stubs/sklearn/cluster/_mean_shift.pyi +++ b/stubs/sklearn/cluster/_mean_shift.pyi @@ -1,7 +1,9 @@ +import warnings from collections import defaultdict as defaultdict from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -22,10 +24,6 @@ MeanShift_Self = TypeVar("MeanShift_Self", bound=MeanShift) # Gael Varoquaux # Martino Sorbaro -import warnings - -import numpy as np - def estimate_bandwidth( X: MatrixLike, *, diff --git a/stubs/sklearn/cluster/_optics.pyi b/stubs/sklearn/cluster/_optics.pyi index f33567b2..0d3ab172 100644 --- a/stubs/sklearn/cluster/_optics.pyi +++ b/stubs/sklearn/cluster/_optics.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from joblib import Memory from numpy import ndarray from scipy.sparse import SparseEfficiencyWarning as SparseEfficiencyWarning, issparse as issparse @@ -17,10 +19,6 @@ from ..utils.validation import check_memory as check_memory OPTICS_Self = TypeVar("OPTICS_Self", bound=OPTICS) -import warnings - -import numpy as np - class OPTICS(ClusterMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/cluster/_spectral.pyi b/stubs/sklearn/cluster/_spectral.pyi index fb7efb52..f5d26f99 100644 --- a/stubs/sklearn/cluster/_spectral.pyi +++ b/stubs/sklearn/cluster/_spectral.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, Mapping, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.linalg import LinAlgError as LinAlgError, qr as qr, svd as svd @@ -18,10 +20,6 @@ from ._kmeans import k_means as k_means SpectralClustering_Self = TypeVar("SpectralClustering_Self", bound=SpectralClustering) -import warnings - -import numpy as np - def cluster_qr(vectors: MatrixLike) -> ndarray: ... def discretize( vectors: MatrixLike, diff --git a/stubs/sklearn/compose/_column_transformer.pyi b/stubs/sklearn/compose/_column_transformer.pyi index 77a5df85..2a60766e 100644 --- a/stubs/sklearn/compose/_column_transformer.pyi +++ b/stubs/sklearn/compose/_column_transformer.pyi @@ -2,6 +2,7 @@ from collections import Counter as Counter from itertools import chain as chain from typing import ClassVar, Literal, Sequence, TypeVar +import numpy as np from numpy import dtype, ndarray from scipy import sparse as sparse from scipy.sparse import spmatrix @@ -17,8 +18,6 @@ from ..utils.validation import check_array as check_array, check_is_fitted as ch ColumnTransformer_Self = TypeVar("ColumnTransformer_Self", bound=ColumnTransformer) -import numpy as np - __all__ = ["ColumnTransformer", "make_column_transformer", "make_column_selector"] _ERR_MSG_1DCOLUMN: str = ... diff --git a/stubs/sklearn/compose/_target.pyi b/stubs/sklearn/compose/_target.pyi index ffa384b1..fae67d6c 100644 --- a/stubs/sklearn/compose/_target.pyi +++ b/stubs/sklearn/compose/_target.pyi @@ -1,5 +1,7 @@ +import warnings from typing import Any, Callable, ClassVar, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, MatrixLike @@ -17,10 +19,6 @@ TransformedTargetRegressor_Self = TypeVar("TransformedTargetRegressor_Self", bou # Guillaume Lemaitre # License: BSD 3 clause -import warnings - -import numpy as np - __all__ = ["TransformedTargetRegressor"] class TransformedTargetRegressor(RegressorMixin, BaseEstimator): diff --git a/stubs/sklearn/covariance/_elliptic_envelope.pyi b/stubs/sklearn/covariance/_elliptic_envelope.pyi index 4d8dc6a2..d8f71373 100644 --- a/stubs/sklearn/covariance/_elliptic_envelope.pyi +++ b/stubs/sklearn/covariance/_elliptic_envelope.pyi @@ -1,6 +1,7 @@ from numbers import Real as Real from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -17,8 +18,6 @@ EllipticEnvelope_Self = TypeVar("EllipticEnvelope_Self", bound=EllipticEnvelope) # # License: BSD 3 clause -import numpy as np - class EllipticEnvelope(OutlierMixin, MinCovDet): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/covariance/_empirical_covariance.pyi b/stubs/sklearn/covariance/_empirical_covariance.pyi index 656dc9ad..f7bee504 100644 --- a/stubs/sklearn/covariance/_empirical_covariance.pyi +++ b/stubs/sklearn/covariance/_empirical_covariance.pyi @@ -1,5 +1,7 @@ +import warnings from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from scipy import linalg as linalg @@ -19,9 +21,6 @@ EmpiricalCovariance_Self = TypeVar("EmpiricalCovariance_Self", bound=EmpiricalCo # License: BSD 3 clause # avoid division truncation -import warnings - -import numpy as np def log_likelihood(emp_cov: MatrixLike, precision: MatrixLike) -> Float: ... def empirical_covariance(X: ArrayLike, *, assume_centered: bool = False) -> ndarray: ... diff --git a/stubs/sklearn/covariance/_graph_lasso.pyi b/stubs/sklearn/covariance/_graph_lasso.pyi index 4f864d76..a5f0e52b 100644 --- a/stubs/sklearn/covariance/_graph_lasso.pyi +++ b/stubs/sklearn/covariance/_graph_lasso.pyi @@ -1,6 +1,11 @@ +import operator +import sys +import time +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Iterable, Literal, TypeVar +import numpy as np from numpy import ndarray from scipy import linalg as linalg @@ -16,16 +21,9 @@ from . import EmpiricalCovariance, empirical_covariance as empirical_covariance, GraphicalLassoCV_Self = TypeVar("GraphicalLassoCV_Self", bound=GraphicalLassoCV) GraphicalLasso_Self = TypeVar("GraphicalLasso_Self", bound=GraphicalLasso) -import operator -import sys -import time - # Author: Gael Varoquaux # License: BSD 3 clause # Copyright: INRIA -import warnings - -import numpy as np def alpha_max(emp_cov: MatrixLike) -> Float: ... diff --git a/stubs/sklearn/covariance/_robust_covariance.pyi b/stubs/sklearn/covariance/_robust_covariance.pyi index 34a07084..e31b78ca 100644 --- a/stubs/sklearn/covariance/_robust_covariance.pyi +++ b/stubs/sklearn/covariance/_robust_covariance.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -18,10 +20,6 @@ MinCovDet_Self = TypeVar("MinCovDet_Self", bound=MinCovDet) # # License: BSD 3 clause -import warnings - -import numpy as np - # Minimum Covariance Determinant # Implementing of an algorithm by Rousseeuw & Van Driessen described in # (A Fast Algorithm for the Minimum Covariance Determinant Estimator, diff --git a/stubs/sklearn/covariance/_shrunk_covariance.pyi b/stubs/sklearn/covariance/_shrunk_covariance.pyi index e6f05ede..d2a0a961 100644 --- a/stubs/sklearn/covariance/_shrunk_covariance.pyi +++ b/stubs/sklearn/covariance/_shrunk_covariance.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from .._config import config_context as config_context @@ -20,9 +22,6 @@ OAS_Self = TypeVar("OAS_Self", bound=OAS) # License: BSD 3 clause # avoid division truncation -import warnings - -import numpy as np # ShrunkCovariance estimator diff --git a/stubs/sklearn/cross_decomposition/_pls.pyi b/stubs/sklearn/cross_decomposition/_pls.pyi index 61536b83..97212d78 100644 --- a/stubs/sklearn/cross_decomposition/_pls.pyi +++ b/stubs/sklearn/cross_decomposition/_pls.pyi @@ -1,7 +1,9 @@ +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Float, Int, MatrixLike @@ -16,10 +18,6 @@ PLSSVD_Self = TypeVar("PLSSVD_Self", bound=PLSSVD) PLSRegression_Self = TypeVar("PLSRegression_Self", bound=PLSRegression) _PLS_Self = TypeVar("_PLS_Self", bound=_PLS) -import warnings - -import numpy as np - __all__ = ["PLSCanonical", "PLSRegression", "PLSSVD"] class _PLS( diff --git a/stubs/sklearn/datasets/_samples_generator.pyi b/stubs/sklearn/datasets/_samples_generator.pyi index 23578f55..bc4abeb2 100644 --- a/stubs/sklearn/datasets/_samples_generator.pyi +++ b/stubs/sklearn/datasets/_samples_generator.pyi @@ -2,7 +2,7 @@ import array import numbers import warnings from collections.abc import Iterable -from typing import Iterable, Literal, Sequence +from typing import Literal, Sequence import numpy as np import scipy.sparse as sp diff --git a/stubs/sklearn/decomposition/_base.pyi b/stubs/sklearn/decomposition/_base.pyi index f327b120..df6e38c0 100644 --- a/stubs/sklearn/decomposition/_base.pyi +++ b/stubs/sklearn/decomposition/_base.pyi @@ -1,6 +1,7 @@ from abc import ABCMeta, abstractmethod from typing import TypeVar +import numpy as np from numpy import ndarray from scipy import linalg as linalg @@ -18,8 +19,6 @@ _BasePCA_Self = TypeVar("_BasePCA_Self", bound=_BasePCA) # # License: BSD 3 clause -import numpy as np - class _BasePCA(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator, metaclass=ABCMeta): def get_covariance(self) -> ndarray: ... def get_precision(self) -> ndarray: ... diff --git a/stubs/sklearn/decomposition/_dict_learning.pyi b/stubs/sklearn/decomposition/_dict_learning.pyi index e8bec454..4e94948a 100644 --- a/stubs/sklearn/decomposition/_dict_learning.pyi +++ b/stubs/sklearn/decomposition/_dict_learning.pyi @@ -1,7 +1,12 @@ +import itertools +import sys +import time +import warnings from math import ceil as ceil from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray from numpy.random import RandomState @@ -29,13 +34,6 @@ MiniBatchDictionaryLearning_Self = TypeVar("MiniBatchDictionaryLearning_Self", b # Author: Vlad Niculae, Gael Varoquaux, Alexandre Gramfort # License: BSD 3 clause -import itertools -import sys -import time -import warnings - -import numpy as np - # XXX : could be moved to the linear_model module def sparse_encode( X: ArrayLike, diff --git a/stubs/sklearn/decomposition/_factor_analysis.pyi b/stubs/sklearn/decomposition/_factor_analysis.pyi index e7c7f0c1..2c271c6b 100644 --- a/stubs/sklearn/decomposition/_factor_analysis.pyi +++ b/stubs/sklearn/decomposition/_factor_analysis.pyi @@ -1,7 +1,9 @@ +import warnings from math import log as log, sqrt as sqrt from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -22,10 +24,6 @@ FactorAnalysis_Self = TypeVar("FactorAnalysis_Self", bound=FactorAnalysis) # License: BSD3 -import warnings - -import numpy as np - class FactorAnalysis(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/decomposition/_fastica.pyi b/stubs/sklearn/decomposition/_fastica.pyi index 67f2d713..de356873 100644 --- a/stubs/sklearn/decomposition/_fastica.pyi +++ b/stubs/sklearn/decomposition/_fastica.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -18,10 +20,6 @@ FastICA_Self = TypeVar("FastICA_Self", bound=FastICA) # Bertrand Thirion, Alexandre Gramfort, Denis A. Engemann # License: BSD 3 clause -import warnings - -import numpy as np - __all__ = ["fastica", "FastICA"] def fastica( diff --git a/stubs/sklearn/decomposition/_incremental_pca.pyi b/stubs/sklearn/decomposition/_incremental_pca.pyi index 9cce800f..a9dd9e10 100644 --- a/stubs/sklearn/decomposition/_incremental_pca.pyi +++ b/stubs/sklearn/decomposition/_incremental_pca.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy import linalg as linalg, sparse as sparse @@ -12,8 +13,6 @@ from ._base import _BasePCA IncrementalPCA_Self = TypeVar("IncrementalPCA_Self", bound=IncrementalPCA) -import numpy as np - class IncrementalPCA(_BasePCA): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/decomposition/_kernel_pca.pyi b/stubs/sklearn/decomposition/_kernel_pca.pyi index 1703e0a6..6ff5435c 100644 --- a/stubs/sklearn/decomposition/_kernel_pca.pyi +++ b/stubs/sklearn/decomposition/_kernel_pca.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -21,8 +22,6 @@ KernelPCA_Self = TypeVar("KernelPCA_Self", bound=KernelPCA) # Sylvain Marie # License: BSD 3 clause -import numpy as np - class KernelPCA(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/decomposition/_lda.pyi b/stubs/sklearn/decomposition/_lda.pyi index 6a512a47..957aa8be 100644 --- a/stubs/sklearn/decomposition/_lda.pyi +++ b/stubs/sklearn/decomposition/_lda.pyi @@ -1,6 +1,8 @@ from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray from numpy.random import RandomState @@ -16,9 +18,6 @@ from ._online_lda_fast import mean_change as cy_mean_change LatentDirichletAllocation_Self = TypeVar("LatentDirichletAllocation_Self", bound=LatentDirichletAllocation) -import numpy as np -import scipy.sparse as sp - EPS = ... class LatentDirichletAllocation(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): diff --git a/stubs/sklearn/decomposition/_nmf.pyi b/stubs/sklearn/decomposition/_nmf.pyi index 4bc8948d..24569c2f 100644 --- a/stubs/sklearn/decomposition/_nmf.pyi +++ b/stubs/sklearn/decomposition/_nmf.pyi @@ -1,8 +1,13 @@ +import itertools +import time +import warnings from abc import ABC from math import sqrt as sqrt from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -20,13 +25,6 @@ from ..utils.validation import check_is_fitted as check_is_fitted, check_non_neg MiniBatchNMF_Self = TypeVar("MiniBatchNMF_Self", bound=MiniBatchNMF) _BaseNMF_Self = TypeVar("_BaseNMF_Self", bound=_BaseNMF) -import itertools -import time -import warnings - -import numpy as np -import scipy.sparse as sp - EPSILON = ... def norm(x: ArrayLike) -> float: ... diff --git a/stubs/sklearn/decomposition/_pca.pyi b/stubs/sklearn/decomposition/_pca.pyi index 57f571d5..77a04534 100644 --- a/stubs/sklearn/decomposition/_pca.pyi +++ b/stubs/sklearn/decomposition/_pca.pyi @@ -2,6 +2,7 @@ from math import log as log, sqrt as sqrt from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -24,8 +25,6 @@ from ._base import _BasePCA PCA_Self = TypeVar("PCA_Self", bound=PCA) -import numpy as np - class PCA(_BasePCA): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/decomposition/_sparse_pca.pyi b/stubs/sklearn/decomposition/_sparse_pca.pyi index 8e242a6d..6ec58496 100644 --- a/stubs/sklearn/decomposition/_sparse_pca.pyi +++ b/stubs/sklearn/decomposition/_sparse_pca.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -15,8 +16,6 @@ from ._dict_learning import MiniBatchDictionaryLearning as MiniBatchDictionaryLe _BaseSparsePCA_Self = TypeVar("_BaseSparsePCA_Self", bound=_BaseSparsePCA) -import numpy as np - class _BaseSparsePCA(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/decomposition/_truncated_svd.pyi b/stubs/sklearn/decomposition/_truncated_svd.pyi index e9b718d2..71da6568 100644 --- a/stubs/sklearn/decomposition/_truncated_svd.pyi +++ b/stubs/sklearn/decomposition/_truncated_svd.pyi @@ -1,6 +1,8 @@ from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState from scipy.sparse.linalg import svds as svds @@ -15,9 +17,6 @@ from ..utils.validation import check_is_fitted as check_is_fitted TruncatedSVD_Self = TypeVar("TruncatedSVD_Self", bound=TruncatedSVD) -import numpy as np -import scipy.sparse as sp - __all__ = ["TruncatedSVD"] class TruncatedSVD(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): diff --git a/stubs/sklearn/discriminant_analysis.pyi b/stubs/sklearn/discriminant_analysis.pyi index 738b7320..0a25b0c5 100644 --- a/stubs/sklearn/discriminant_analysis.pyi +++ b/stubs/sklearn/discriminant_analysis.pyi @@ -1,6 +1,9 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np +import scipy.linalg from numpy import ndarray from scipy import linalg @@ -30,11 +33,6 @@ QuadraticDiscriminantAnalysis_Self = TypeVar("QuadraticDiscriminantAnalysis_Self # License: BSD 3-Clause -import warnings - -import numpy as np -import scipy.linalg - __all__ = ["LinearDiscriminantAnalysis", "QuadraticDiscriminantAnalysis"] class LinearDiscriminantAnalysis( diff --git a/stubs/sklearn/dummy.pyi b/stubs/sklearn/dummy.pyi index 0fa798d8..bd286108 100644 --- a/stubs/sklearn/dummy.pyi +++ b/stubs/sklearn/dummy.pyi @@ -1,6 +1,9 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState @@ -23,11 +26,6 @@ DummyClassifier_Self = TypeVar("DummyClassifier_Self", bound=DummyClassifier) # Maheshakya Wijewardena # License: BSD 3 clause -import warnings - -import numpy as np -import scipy.sparse as sp - class DummyClassifier(MultiOutputMixin, ClassifierMixin, BaseEstimator): sparse_output_: bool = ... n_outputs_: int = ... diff --git a/stubs/sklearn/ensemble/_bagging.pyi b/stubs/sklearn/ensemble/_bagging.pyi index 167c13bf..1acad171 100644 --- a/stubs/sklearn/ensemble/_bagging.pyi +++ b/stubs/sklearn/ensemble/_bagging.pyi @@ -1,9 +1,12 @@ +import itertools +import numbers from abc import ABCMeta, abstractmethod from functools import partial as partial from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, TypeVar from warnings import warn as warn +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -25,11 +28,6 @@ BaseBagging_Self = TypeVar("BaseBagging_Self", bound=BaseBagging) # Author: Gilles Louppe # License: BSD 3 clause -import itertools -import numbers - -import numpy as np - __all__ = ["BaggingClassifier", "BaggingRegressor"] MAX_INT = ... diff --git a/stubs/sklearn/ensemble/_forest.pyi b/stubs/sklearn/ensemble/_forest.pyi index 8f604de2..1c961151 100644 --- a/stubs/sklearn/ensemble/_forest.pyi +++ b/stubs/sklearn/ensemble/_forest.pyi @@ -1,8 +1,10 @@ +import threading from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, Mapping, Sequence, TypeVar from warnings import catch_warnings as catch_warnings, simplefilter as simplefilter, warn as warn +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.sparse import hstack as sparse_hstack, issparse as issparse, spmatrix @@ -30,10 +32,6 @@ from ._base import BaseEnsemble BaseForest_Self = TypeVar("BaseForest_Self", bound=BaseForest) RandomTreesEmbedding_Self = TypeVar("RandomTreesEmbedding_Self", bound=RandomTreesEmbedding) -import threading - -import numpy as np - __all__ = [ "RandomForestClassifier", "RandomForestRegressor", diff --git a/stubs/sklearn/ensemble/_gb.pyi b/stubs/sklearn/ensemble/_gb.pyi index 2592e96e..5eb1fcd9 100644 --- a/stubs/sklearn/ensemble/_gb.pyi +++ b/stubs/sklearn/ensemble/_gb.pyi @@ -1,8 +1,10 @@ +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from time import time as time from typing import Callable, ClassVar, Iterator, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.sparse import csc_matrix as csc_matrix, csr_matrix as csr_matrix, issparse as issparse @@ -23,10 +25,6 @@ from ._gradient_boosting import predict_stage as predict_stage, predict_stages a BaseGradientBoosting_Self = TypeVar("BaseGradientBoosting_Self", bound=BaseGradientBoosting) -import warnings - -import numpy as np - class VerboseReporter: def __init__(self, verbose: Int) -> None: ... def init(self, est: BaseEstimator, begin_at_stage: Int = 0): ... diff --git a/stubs/sklearn/ensemble/_hist_gradient_boosting/binning.pyi b/stubs/sklearn/ensemble/_hist_gradient_boosting/binning.pyi index 09dddb5d..f385ee82 100644 --- a/stubs/sklearn/ensemble/_hist_gradient_boosting/binning.pyi +++ b/stubs/sklearn/ensemble/_hist_gradient_boosting/binning.pyi @@ -1,5 +1,6 @@ from typing import Sequence, TypeVar +import numpy as np from numpy import ndarray, uint8 from numpy.random import RandomState @@ -20,8 +21,6 @@ _BinMapper_Self = TypeVar("_BinMapper_Self", bound=_BinMapper) # Author: Nicolas Hug -import numpy as np - class _BinMapper(TransformerMixin, BaseEstimator): missing_values_bin_idx_: uint8 = ... is_categorical_: ndarray = ... diff --git a/stubs/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.pyi b/stubs/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.pyi index f75a3513..65ee5104 100644 --- a/stubs/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.pyi +++ b/stubs/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.pyi @@ -1,14 +1,16 @@ +import itertools +import warnings from abc import ABC, abstractmethod from functools import partial as partial from numbers import Integral as Integral, Real as Real from timeit import default_timer as time from typing import Callable, ClassVar, Literal, Mapping, Sequence, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from ..._loss.loss import ( - _LOSSES, BaseLoss as BaseLoss, HalfBinomialLoss as HalfBinomialLoss, HalfMultinomialLoss as HalfMultinomialLoss, @@ -33,13 +35,6 @@ from .grower import TreeGrower as TreeGrower BaseHistGradientBoosting_Self = TypeVar("BaseHistGradientBoosting_Self", bound=BaseHistGradientBoosting) -import itertools -import warnings - -import numpy as np - -_LOSSES = ... - class BaseHistGradientBoosting(BaseEstimator, ABC): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/ensemble/_iforest.pyi b/stubs/sklearn/ensemble/_iforest.pyi index faf9a041..65e33456 100644 --- a/stubs/sklearn/ensemble/_iforest.pyi +++ b/stubs/sklearn/ensemble/_iforest.pyi @@ -1,7 +1,9 @@ +import numbers from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar from warnings import warn as warn +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.sparse import issparse as issparse @@ -26,10 +28,6 @@ IsolationForest_Self = TypeVar("IsolationForest_Self", bound=IsolationForest) # Alexandre Gramfort # License: BSD 3 clause -import numbers - -import numpy as np - __all__ = ["IsolationForest"] class IsolationForest(OutlierMixin, BaseBagging): diff --git a/stubs/sklearn/ensemble/_stacking.pyi b/stubs/sklearn/ensemble/_stacking.pyi index 34f50c51..c6dca527 100644 --- a/stubs/sklearn/ensemble/_stacking.pyi +++ b/stubs/sklearn/ensemble/_stacking.pyi @@ -3,6 +3,8 @@ from copy import deepcopy as deepcopy from numbers import Integral as Integral from typing import ClassVar, Iterable, Literal, Sequence, TypeVar +import numpy as np +import scipy.sparse as sparse from numpy import ndarray from .._typing import ArrayLike, Int, MatrixLike @@ -33,9 +35,6 @@ _BaseStacking_Self = TypeVar("_BaseStacking_Self", bound=_BaseStacking) StackingRegressor_Self = TypeVar("StackingRegressor_Self", bound=StackingRegressor) StackingClassifier_Self = TypeVar("StackingClassifier_Self", bound=StackingClassifier) -import numpy as np -import scipy.sparse as sparse - class _BaseStacking(TransformerMixin, _BaseHeterogeneousEnsemble, metaclass=ABCMeta): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/ensemble/_voting.pyi b/stubs/sklearn/ensemble/_voting.pyi index 903dc22d..4cfadb7f 100644 --- a/stubs/sklearn/ensemble/_voting.pyi +++ b/stubs/sklearn/ensemble/_voting.pyi @@ -2,6 +2,7 @@ from abc import abstractmethod from numbers import Integral as Integral from typing import ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Int, MatrixLike @@ -19,8 +20,6 @@ from ._base import _BaseHeterogeneousEnsemble VotingClassifier_Self = TypeVar("VotingClassifier_Self", bound=VotingClassifier) VotingRegressor_Self = TypeVar("VotingRegressor_Self", bound=VotingRegressor) -import numpy as np - class _BaseVoting(TransformerMixin, _BaseHeterogeneousEnsemble): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/ensemble/_weight_boosting.pyi b/stubs/sklearn/ensemble/_weight_boosting.pyi index 873f60c7..6934332d 100644 --- a/stubs/sklearn/ensemble/_weight_boosting.pyi +++ b/stubs/sklearn/ensemble/_weight_boosting.pyi @@ -1,7 +1,9 @@ +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Iterator, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.special import xlogy as xlogy @@ -18,10 +20,6 @@ from ._base import BaseEnsemble BaseWeightBoosting_Self = TypeVar("BaseWeightBoosting_Self", bound=BaseWeightBoosting) -import warnings - -import numpy as np - __all__ = [ "AdaBoostClassifier", "AdaBoostRegressor", diff --git a/stubs/sklearn/feature_extraction/__init__.pyi b/stubs/sklearn/feature_extraction/__init__.pyi index 239cc2b0..b8cc717a 100644 --- a/stubs/sklearn/feature_extraction/__init__.pyi +++ b/stubs/sklearn/feature_extraction/__init__.pyi @@ -1,4 +1,4 @@ -from . import text as text +from . import image as image, text as text from ._dict_vectorizer import DictVectorizer as DictVectorizer from ._hash import FeatureHasher as FeatureHasher from .image import grid_to_graph as grid_to_graph, img_to_graph as img_to_graph diff --git a/stubs/sklearn/feature_extraction/_dict_vectorizer.pyi b/stubs/sklearn/feature_extraction/_dict_vectorizer.pyi index e48fcede..296d61d9 100644 --- a/stubs/sklearn/feature_extraction/_dict_vectorizer.pyi +++ b/stubs/sklearn/feature_extraction/_dict_vectorizer.pyi @@ -2,8 +2,10 @@ from array import array as array from collections.abc import Iterable, Mapping from numbers import Number as Number from operator import itemgetter as itemgetter -from typing import Any, ClassVar, Iterable, Iterator, Mapping, TypeVar +from typing import Any, ClassVar, Iterator, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from scipy.sparse import spmatrix @@ -17,9 +19,6 @@ DictVectorizer_Self = TypeVar("DictVectorizer_Self", bound=DictVectorizer) # Dan Blanchard # License: BSD 3 clause -import numpy as np -import scipy.sparse as sp - class DictVectorizer(TransformerMixin, BaseEstimator): feature_names_: list = ... vocabulary_: dict = ... diff --git a/stubs/sklearn/feature_extraction/_hash.pyi b/stubs/sklearn/feature_extraction/_hash.pyi index 4294bf78..64013748 100644 --- a/stubs/sklearn/feature_extraction/_hash.pyi +++ b/stubs/sklearn/feature_extraction/_hash.pyi @@ -2,6 +2,8 @@ from itertools import chain as chain from numbers import Integral as Integral from typing import Any, ClassVar, Iterator, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import dtype from scipy.sparse import spmatrix @@ -15,9 +17,6 @@ FeatureHasher_Self = TypeVar("FeatureHasher_Self", bound=FeatureHasher) # Author: Lars Buitinck # License: BSD 3 clause -import numpy as np -import scipy.sparse as sp - class FeatureHasher(TransformerMixin, BaseEstimator): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/feature_extraction/image.pyi b/stubs/sklearn/feature_extraction/image.pyi index f7b43b8f..c9eb59e6 100644 --- a/stubs/sklearn/feature_extraction/image.pyi +++ b/stubs/sklearn/feature_extraction/image.pyi @@ -2,6 +2,7 @@ from itertools import product as product from numbers import Integral as Integral, Number as Number, Real as Real from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from numpy.lib.stride_tricks import as_strided as as_strided from numpy.random import RandomState @@ -16,8 +17,6 @@ from ..utils._param_validation import Interval as Interval PatchExtractor_Self = TypeVar("PatchExtractor_Self", bound=PatchExtractor) -import numpy as np - __all__ = [ "PatchExtractor", "extract_patches_2d", diff --git a/stubs/sklearn/feature_extraction/text.pyi b/stubs/sklearn/feature_extraction/text.pyi index 2f8afb97..cb489174 100644 --- a/stubs/sklearn/feature_extraction/text.pyi +++ b/stubs/sklearn/feature_extraction/text.pyi @@ -1,10 +1,16 @@ +import array +import re +import unicodedata +import warnings from collections import defaultdict as defaultdict from collections.abc import Mapping from functools import partial from numbers import Integral as Integral, Real as Real from operator import itemgetter as itemgetter -from typing import Any, Callable, ClassVar, Iterable, Literal, Mapping, TypeVar +from typing import Any, Callable, ClassVar, Iterable, Literal, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from scipy.sparse import spmatrix @@ -31,14 +37,6 @@ TfidfTransformer_Self = TypeVar("TfidfTransformer_Self", bound=TfidfTransformer) # # License: BSD 3 clause -import array -import re -import unicodedata -import warnings - -import numpy as np -import scipy.sparse as sp - __all__ = [ "HashingVectorizer", "CountVectorizer", diff --git a/stubs/sklearn/feature_selection/_from_model.pyi b/stubs/sklearn/feature_selection/_from_model.pyi index 3924d850..75c943c1 100644 --- a/stubs/sklearn/feature_selection/_from_model.pyi +++ b/stubs/sklearn/feature_selection/_from_model.pyi @@ -2,6 +2,7 @@ from copy import deepcopy as deepcopy from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, MatrixLike @@ -17,8 +18,6 @@ SelectFromModel_Self = TypeVar("SelectFromModel_Self", bound=SelectFromModel) # Authors: Gilles Louppe, Mathieu Blondel, Maheshakya Wijewardena # License: BSD 3 clause -import numpy as np - class SelectFromModel(MetaEstimatorMixin, SelectorMixin, BaseEstimator): feature_names_in_: ndarray = ... max_features_: int = ... diff --git a/stubs/sklearn/feature_selection/_rfe.pyi b/stubs/sklearn/feature_selection/_rfe.pyi index e12230b2..c167ad0d 100644 --- a/stubs/sklearn/feature_selection/_rfe.pyi +++ b/stubs/sklearn/feature_selection/_rfe.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Iterable, TypeVar +import numpy as np from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray @@ -24,8 +25,6 @@ RFE_Self = TypeVar("RFE_Self", bound=RFE) # # License: BSD 3 clause -import numpy as np - class RFE(SelectorMixin, MetaEstimatorMixin, BaseEstimator): support_: ndarray = ... ranking_: ndarray = ... diff --git a/stubs/sklearn/feature_selection/_sequential.pyi b/stubs/sklearn/feature_selection/_sequential.pyi index 84cbcd84..cea445a7 100644 --- a/stubs/sklearn/feature_selection/_sequential.pyi +++ b/stubs/sklearn/feature_selection/_sequential.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Iterable, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Float, Int, MatrixLike @@ -13,10 +15,6 @@ from ._base import SelectorMixin SequentialFeatureSelector_Self = TypeVar("SequentialFeatureSelector_Self", bound=SequentialFeatureSelector) -import warnings - -import numpy as np - class SequentialFeatureSelector(SelectorMixin, MetaEstimatorMixin, BaseEstimator): support_: ndarray = ... n_features_to_select_: int = ... diff --git a/stubs/sklearn/feature_selection/_univariate_selection.pyi b/stubs/sklearn/feature_selection/_univariate_selection.pyi index f8a18705..66bf446f 100644 --- a/stubs/sklearn/feature_selection/_univariate_selection.pyi +++ b/stubs/sklearn/feature_selection/_univariate_selection.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Literal, TypeVar +import numpy as np from joblib.memory import MemorizedFunc from numpy import ndarray from scipy import special as special, stats @@ -27,10 +29,6 @@ _BaseFilter_Self = TypeVar("_BaseFilter_Self", bound=_BaseFilter) # L. Buitinck, A. Joly # License: BSD 3 clause -import warnings - -import numpy as np - ###################################################################### # Scoring functions diff --git a/stubs/sklearn/feature_selection/_variance_threshold.pyi b/stubs/sklearn/feature_selection/_variance_threshold.pyi index 001ef127..74eb25b7 100644 --- a/stubs/sklearn/feature_selection/_variance_threshold.pyi +++ b/stubs/sklearn/feature_selection/_variance_threshold.pyi @@ -1,6 +1,7 @@ from numbers import Real as Real from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from .._typing import Float, MatrixLike @@ -15,8 +16,6 @@ VarianceThreshold_Self = TypeVar("VarianceThreshold_Self", bound=VarianceThresho # Author: Lars Buitinck # License: 3-clause BSD -import numpy as np - class VarianceThreshold(SelectorMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/gaussian_process/_gpc.pyi b/stubs/sklearn/gaussian_process/_gpc.pyi index 1b05382d..ce1056da 100644 --- a/stubs/sklearn/gaussian_process/_gpc.pyi +++ b/stubs/sklearn/gaussian_process/_gpc.pyi @@ -2,6 +2,8 @@ from numbers import Integral as Integral from operator import itemgetter as itemgetter from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np +import scipy.optimize from numpy import ndarray from numpy.random import RandomState from scipy.linalg import cho_solve as cho_solve, cholesky as cholesky, solve as solve @@ -22,9 +24,6 @@ _BinaryGaussianProcessClassifierLaplace_Self = TypeVar( ) GaussianProcessClassifier_Self = TypeVar("GaussianProcessClassifier_Self", bound=GaussianProcessClassifier) -import numpy as np -import scipy.optimize - # Values required for approximating the logistic sigmoid by # error functions. coefs are obtained via: # x = np.array([0, 0.6, 2, 3.5, 4.5, np.inf]) diff --git a/stubs/sklearn/gaussian_process/_gpr.pyi b/stubs/sklearn/gaussian_process/_gpr.pyi index 127fe5c0..74368339 100644 --- a/stubs/sklearn/gaussian_process/_gpr.pyi +++ b/stubs/sklearn/gaussian_process/_gpr.pyi @@ -1,7 +1,10 @@ +import warnings from numbers import Integral as Integral, Real as Real from operator import itemgetter as itemgetter from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np +import scipy.optimize from numpy import ndarray from numpy.random import RandomState from scipy.linalg import cho_solve as cho_solve, cholesky as cholesky, solve_triangular as solve_triangular @@ -18,11 +21,6 @@ GaussianProcessRegressor_Self = TypeVar("GaussianProcessRegressor_Self", bound=G # Modified by: Pete Green # License: BSD 3 clause -import warnings - -import numpy as np -import scipy.optimize - GPR_CHOLESKY_LOWER: bool = ... class GaussianProcessRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator): diff --git a/stubs/sklearn/gaussian_process/kernels.pyi b/stubs/sklearn/gaussian_process/kernels.pyi index 0e4e3e7f..279a1ba5 100644 --- a/stubs/sklearn/gaussian_process/kernels.pyi +++ b/stubs/sklearn/gaussian_process/kernels.pyi @@ -1,11 +1,15 @@ +import math +import warnings from abc import ABCMeta, abstractmethod from collections import namedtuple from inspect import signature as signature from typing import Callable, Literal, Sequence, TypeVar +from typing_extensions import Self +import numpy as np from numpy import ndarray from scipy.spatial.distance import cdist as cdist, pdist as pdist, squareform as squareform -from scipy.special import gamma, kv as kv +from scipy.special import kv as kv from .._typing import ArrayLike, Float, MatrixLike from ..base import clone as clone @@ -15,12 +19,6 @@ from ..metrics.pairwise import pairwise_kernels as pairwise_kernels Kernel_Self = TypeVar("Kernel_Self", bound=Kernel) Hyperparameter_Self = TypeVar("Hyperparameter_Self", bound=Hyperparameter) -import math -import warnings -from typing_extensions import Self - -import numpy as np - class Hyperparameter(namedtuple("Hyperparameter", ("name", "value_type", "bounds", "n_elements", "fixed"))): fixed: bool = ... n_elements: int = ... diff --git a/stubs/sklearn/impute/_base.pyi b/stubs/sklearn/impute/_base.pyi index 9a6113d5..1cc6b872 100644 --- a/stubs/sklearn/impute/_base.pyi +++ b/stubs/sklearn/impute/_base.pyi @@ -1,6 +1,10 @@ +import numbers +import warnings from collections import Counter as Counter from typing import Any, ClassVar, Literal, TypeVar +import numpy as np +import numpy.ma as ma from numpy import ndarray from scipy import sparse as sp from scipy.sparse import spmatrix @@ -18,12 +22,6 @@ MissingIndicator_Self = TypeVar("MissingIndicator_Self", bound=MissingIndicator) # Sergey Feldman # License: BSD 3 clause -import numbers -import warnings - -import numpy as np -import numpy.ma as ma - class _BaseImputer(TransformerMixin, BaseEstimator): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/impute/_iterative.pyi b/stubs/sklearn/impute/_iterative.pyi index bc4982ed..cef3e9d3 100644 --- a/stubs/sklearn/impute/_iterative.pyi +++ b/stubs/sklearn/impute/_iterative.pyi @@ -1,8 +1,10 @@ +import warnings from collections import namedtuple as namedtuple from numbers import Integral as Integral, Real as Real from time import time as time from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import stats as stats @@ -19,10 +21,6 @@ from ._base import MissingIndicator, SimpleImputer, _BaseImputer IterativeImputer_Self = TypeVar("IterativeImputer_Self", bound=IterativeImputer) -import warnings - -import numpy as np - _ImputerTriplet = ... class IterativeImputer(_BaseImputer): diff --git a/stubs/sklearn/impute/_knn.pyi b/stubs/sklearn/impute/_knn.pyi index a83f5a5b..4d1d5f14 100644 --- a/stubs/sklearn/impute/_knn.pyi +++ b/stubs/sklearn/impute/_knn.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Int, MatrixLike @@ -16,8 +17,6 @@ KNNImputer_Self = TypeVar("KNNImputer_Self", bound=KNNImputer) # Thomas J Fan # License: BSD 3 clause -import numpy as np - class KNNImputer(_BaseImputer): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/isotonic.pyi b/stubs/sklearn/isotonic.pyi index f94f7e73..b527c8ba 100644 --- a/stubs/sklearn/isotonic.pyi +++ b/stubs/sklearn/isotonic.pyi @@ -1,6 +1,9 @@ +import math +import warnings from numbers import Real as Real from typing import Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from scipy import interpolate as interpolate from scipy.stats import spearmanr as spearmanr @@ -17,11 +20,6 @@ IsotonicRegression_Self = TypeVar("IsotonicRegression_Self", bound=IsotonicRegre # Nelle Varoquaux # License: BSD 3 clause -import math -import warnings - -import numpy as np - __all__ = ["check_increasing", "isotonic_regression", "IsotonicRegression"] def check_increasing(x: ArrayLike, y: ArrayLike) -> bool: ... diff --git a/stubs/sklearn/kernel_approximation.pyi b/stubs/sklearn/kernel_approximation.pyi index 18274a53..f68aebf8 100644 --- a/stubs/sklearn/kernel_approximation.pyi +++ b/stubs/sklearn/kernel_approximation.pyi @@ -1,6 +1,9 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState from scipy.fftpack import fft as fft, ifft as ifft @@ -25,11 +28,6 @@ AdditiveChi2Sampler_Self = TypeVar("AdditiveChi2Sampler_Self", bound=AdditiveChi RBFSampler_Self = TypeVar("RBFSampler_Self", bound=RBFSampler) Nystroem_Self = TypeVar("Nystroem_Self", bound=Nystroem) -import warnings - -import numpy as np -import scipy.sparse as sp - class PolynomialCountSketch(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/kernel_ridge.pyi b/stubs/sklearn/kernel_ridge.pyi index acbbbdc7..316d9895 100644 --- a/stubs/sklearn/kernel_ridge.pyi +++ b/stubs/sklearn/kernel_ridge.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy.sparse import spmatrix @@ -12,8 +13,6 @@ from .utils.validation import check_is_fitted as check_is_fitted KernelRidge_Self = TypeVar("KernelRidge_Self", bound=KernelRidge) -import numpy as np - class KernelRidge(MultiOutputMixin, RegressorMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/linear_model/_base.pyi b/stubs/sklearn/linear_model/_base.pyi index 1fb50ebe..08967532 100644 --- a/stubs/sklearn/linear_model/_base.pyi +++ b/stubs/sklearn/linear_model/_base.pyi @@ -1,7 +1,11 @@ +import numbers +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral from typing import ClassVar, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random.mtrand import RandomState from scipy import linalg as linalg, optimize as optimize, sparse @@ -22,12 +26,6 @@ from ._stochastic_gradient import SGDClassifier LinearRegression_Self = TypeVar("LinearRegression_Self", bound=LinearRegression) SparseCoefMixin_Self = TypeVar("SparseCoefMixin_Self", bound=SparseCoefMixin) -import numbers -import warnings - -import numpy as np -import scipy.sparse as sp - # TODO: bayesian_ridge_regression and bayesian_regression_ard # should be squashed into its respective objects. diff --git a/stubs/sklearn/linear_model/_bayes.pyi b/stubs/sklearn/linear_model/_bayes.pyi index c1e32ed9..03378950 100644 --- a/stubs/sklearn/linear_model/_bayes.pyi +++ b/stubs/sklearn/linear_model/_bayes.pyi @@ -2,6 +2,7 @@ from math import log as log from numbers import Integral as Integral, Real as Real from typing import ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy import linalg as linalg from scipy.linalg import pinvh as pinvh @@ -15,8 +16,6 @@ from ._base import LinearModel BayesianRidge_Self = TypeVar("BayesianRidge_Self", bound=BayesianRidge) ARDRegression_Self = TypeVar("ARDRegression_Self", bound=ARDRegression) -import numpy as np - ############################################################################### # BayesianRidge regression diff --git a/stubs/sklearn/linear_model/_coordinate_descent.pyi b/stubs/sklearn/linear_model/_coordinate_descent.pyi index 2ecc942b..2b969bde 100644 --- a/stubs/sklearn/linear_model/_coordinate_descent.pyi +++ b/stubs/sklearn/linear_model/_coordinate_descent.pyi @@ -1,8 +1,12 @@ +import numbers +import sys +import warnings from abc import ABC, abstractmethod from functools import partial as partial from numbers import Integral as Integral, Real as Real from typing import ClassVar, Iterable, Literal, Sequence, TypeVar +import numpy as np from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray from numpy.random import RandomState @@ -38,12 +42,6 @@ MultiTaskElasticNet_Self = TypeVar("MultiTaskElasticNet_Self", bound=MultiTaskEl # # License: BSD 3 clause -import numbers -import sys -import warnings - -import numpy as np - def lasso_path( X: MatrixLike | ArrayLike, y: MatrixLike | ArrayLike, diff --git a/stubs/sklearn/linear_model/_glm/glm.pyi b/stubs/sklearn/linear_model/_glm/glm.pyi index 11d454b3..fa7b223b 100644 --- a/stubs/sklearn/linear_model/_glm/glm.pyi +++ b/stubs/sklearn/linear_model/_glm/glm.pyi @@ -1,6 +1,8 @@ from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np +import scipy.optimize from numpy import ndarray from ..._loss.glm_distribution import TweedieDistribution as TweedieDistribution @@ -22,9 +24,6 @@ from ._newton_solver import NewtonCholeskySolver as NewtonCholeskySolver, Newton _GeneralizedLinearRegressor_Self = TypeVar("_GeneralizedLinearRegressor_Self", bound=_GeneralizedLinearRegressor) -import numpy as np -import scipy.optimize - class _GeneralizedLinearRegressor(RegressorMixin, BaseEstimator): _base_loss: BaseLoss = ... n_iter_: int = ... diff --git a/stubs/sklearn/linear_model/_huber.pyi b/stubs/sklearn/linear_model/_huber.pyi index 7a24c481..bbdc6c4f 100644 --- a/stubs/sklearn/linear_model/_huber.pyi +++ b/stubs/sklearn/linear_model/_huber.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy import optimize as optimize @@ -16,8 +17,6 @@ HuberRegressor_Self = TypeVar("HuberRegressor_Self", bound=HuberRegressor) # Authors: Manoj Kumar mks542@nyu.edu # License: BSD 3 clause -import numpy as np - class HuberRegressor(LinearModel, RegressorMixin, BaseEstimator): outliers_: ndarray = ... n_iter_: int = ... diff --git a/stubs/sklearn/linear_model/_least_angle.pyi b/stubs/sklearn/linear_model/_least_angle.pyi index 7dfc185f..50e81c00 100644 --- a/stubs/sklearn/linear_model/_least_angle.pyi +++ b/stubs/sklearn/linear_model/_least_angle.pyi @@ -1,7 +1,10 @@ +import sys +import warnings from math import log as log from numbers import Integral as Integral, Real as Real from typing import ClassVar, Iterable, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import interpolate as interpolate, linalg as linalg @@ -20,11 +23,6 @@ LassoLarsIC_Self = TypeVar("LassoLarsIC_Self", bound=LassoLarsIC) Lars_Self = TypeVar("Lars_Self", bound=Lars) LarsCV_Self = TypeVar("LarsCV_Self", bound=LarsCV) -import sys -import warnings - -import numpy as np - SOLVE_TRIANGULAR_ARGS: dict = ... def lars_path( diff --git a/stubs/sklearn/linear_model/_logistic.pyi b/stubs/sklearn/linear_model/_logistic.pyi index 902df9d3..407c768a 100644 --- a/stubs/sklearn/linear_model/_logistic.pyi +++ b/stubs/sklearn/linear_model/_logistic.pyi @@ -1,6 +1,9 @@ +import numbers +import warnings from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Literal, Mapping, Sequence, TypeVar +import numpy as np from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray from numpy.random import RandomState @@ -38,11 +41,6 @@ LogisticRegression_Self = TypeVar("LogisticRegression_Self", bound=LogisticRegre # Simon Wu # Arthur Mensch # Christian Lorentzen # License: BSD 3 clause -import warnings - -import numpy as np class QuantileRegressor(LinearModel, RegressorMixin, BaseEstimator): n_iter_: int = ... diff --git a/stubs/sklearn/linear_model/_ransac.pyi b/stubs/sklearn/linear_model/_ransac.pyi index 77e4969b..85384ca2 100644 --- a/stubs/sklearn/linear_model/_ransac.pyi +++ b/stubs/sklearn/linear_model/_ransac.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -25,10 +27,6 @@ RANSACRegressor_Self = TypeVar("RANSACRegressor_Self", bound=RANSACRegressor) # # License: BSD 3 clause -import warnings - -import numpy as np - _EPSILON = ... class RANSACRegressor(MetaEstimatorMixin, RegressorMixin, MultiOutputMixin, BaseEstimator): diff --git a/stubs/sklearn/linear_model/_ridge.pyi b/stubs/sklearn/linear_model/_ridge.pyi index 9faeb23f..cf9b8bf9 100644 --- a/stubs/sklearn/linear_model/_ridge.pyi +++ b/stubs/sklearn/linear_model/_ridge.pyi @@ -1,8 +1,11 @@ +import numbers +import warnings from abc import ABCMeta, abstractmethod from functools import partial as partial from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Iterable, Literal, Mapping, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from pandas.core.frame import DataFrame @@ -40,11 +43,6 @@ RidgeClassifierCV_Self = TypeVar("RidgeClassifierCV_Self", bound=RidgeClassifier RidgeCV_Self = TypeVar("RidgeCV_Self", bound=RidgeCV) Ridge_Self = TypeVar("Ridge_Self", bound=Ridge) -import numbers -import warnings - -import numpy as np - def ridge_regression( X: MatrixLike | LinearOperator, y: MatrixLike | ArrayLike, diff --git a/stubs/sklearn/linear_model/_stochastic_gradient.pyi b/stubs/sklearn/linear_model/_stochastic_gradient.pyi index 4ae45e4c..3422b363 100644 --- a/stubs/sklearn/linear_model/_stochastic_gradient.pyi +++ b/stubs/sklearn/linear_model/_stochastic_gradient.pyi @@ -1,7 +1,9 @@ +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, Mapping, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.sparse._csr import csr_matrix @@ -38,10 +40,6 @@ BaseSGDRegressor_Self = TypeVar("BaseSGDRegressor_Self", bound=BaseSGDRegressor) # # License: BSD 3 clause -import warnings - -import numpy as np - LEARNING_RATE_TYPES: dict = ... PENALTY_TYPES: dict = ... diff --git a/stubs/sklearn/linear_model/_theil_sen.pyi b/stubs/sklearn/linear_model/_theil_sen.pyi index f7bd9802..1792ea0c 100644 --- a/stubs/sklearn/linear_model/_theil_sen.pyi +++ b/stubs/sklearn/linear_model/_theil_sen.pyi @@ -1,7 +1,9 @@ +import warnings from itertools import combinations as combinations from numbers import Integral as Integral, Real as Real from typing import ClassVar, TypeVar +import numpy as np from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray from numpy.random import RandomState @@ -23,10 +25,6 @@ TheilSenRegressor_Self = TypeVar("TheilSenRegressor_Self", bound=TheilSenRegress # # License: BSD 3 clause -import warnings - -import numpy as np - _EPSILON = ... class TheilSenRegressor(RegressorMixin, LinearModel): diff --git a/stubs/sklearn/manifold/_isomap.pyi b/stubs/sklearn/manifold/_isomap.pyi index eecf040f..199b0f4e 100644 --- a/stubs/sklearn/manifold/_isomap.pyi +++ b/stubs/sklearn/manifold/_isomap.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from scipy.sparse import issparse as issparse from scipy.sparse._csr import csr_matrix @@ -20,9 +22,6 @@ Isomap_Self = TypeVar("Isomap_Self", bound=Isomap) # Author: Jake Vanderplas -- # License: BSD 3 clause (C) 2011 -import warnings - -import numpy as np class Isomap(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... diff --git a/stubs/sklearn/manifold/_locally_linear.pyi b/stubs/sklearn/manifold/_locally_linear.pyi index 74cf0271..dc82a96e 100644 --- a/stubs/sklearn/manifold/_locally_linear.pyi +++ b/stubs/sklearn/manifold/_locally_linear.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.linalg import qr as qr, solve as solve, svd as svd @@ -18,8 +19,6 @@ from ..utils.validation import FLOAT_DTYPES as FLOAT_DTYPES, check_is_fitted as LocallyLinearEmbedding_Self = TypeVar("LocallyLinearEmbedding_Self", bound=LocallyLinearEmbedding) -import numpy as np - def barycenter_weights(X: MatrixLike, Y: MatrixLike, indices: MatrixLike, reg: Float = 1e-3) -> ndarray: ... def barycenter_kneighbors_graph( X: ArrayLike | NearestNeighbors, diff --git a/stubs/sklearn/manifold/_mds.pyi b/stubs/sklearn/manifold/_mds.pyi index 83e99520..3b825741 100644 --- a/stubs/sklearn/manifold/_mds.pyi +++ b/stubs/sklearn/manifold/_mds.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from joblib import effective_n_jobs as effective_n_jobs from numpy import ndarray from numpy.random import RandomState @@ -15,10 +17,6 @@ from ..utils.parallel import Parallel as Parallel, delayed as delayed MDS_Self = TypeVar("MDS_Self", bound=MDS) -import warnings - -import numpy as np - def smacof( dissimilarities: MatrixLike, *, diff --git a/stubs/sklearn/manifold/_spectral_embedding.pyi b/stubs/sklearn/manifold/_spectral_embedding.pyi index 892b1485..e6fd0e94 100644 --- a/stubs/sklearn/manifold/_spectral_embedding.pyi +++ b/stubs/sklearn/manifold/_spectral_embedding.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy import sparse as sparse @@ -18,10 +20,6 @@ from ..utils._param_validation import Interval as Interval, StrOptions as StrOpt SpectralEmbedding_Self = TypeVar("SpectralEmbedding_Self", bound=SpectralEmbedding) -import warnings - -import numpy as np - def spectral_embedding( adjacency: coo_matrix | MatrixLike, *, diff --git a/stubs/sklearn/metrics/_plot/confusion_matrix.pyi b/stubs/sklearn/metrics/_plot/confusion_matrix.pyi index 536ae1c1..7dbe380b 100644 --- a/stubs/sklearn/metrics/_plot/confusion_matrix.pyi +++ b/stubs/sklearn/metrics/_plot/confusion_matrix.pyi @@ -12,7 +12,6 @@ from ..._typing import ArrayLike, MatrixLike from ...base import BaseEstimator, is_classifier as is_classifier from ...utils import check_matplotlib_support as check_matplotlib_support from ...utils.multiclass import unique_labels as unique_labels -from .. import confusion_matrix class ConfusionMatrixDisplay: figure_: Figure = ... diff --git a/stubs/sklearn/mixture/_base.pyi b/stubs/sklearn/mixture/_base.pyi index 632950e5..c5c57bc0 100644 --- a/stubs/sklearn/mixture/_base.pyi +++ b/stubs/sklearn/mixture/_base.pyi @@ -1,8 +1,10 @@ +import warnings from abc import ABCMeta, abstractmethod as abstractmethod from numbers import Integral as Integral, Real as Real from time import time as time from typing import Any, ClassVar, TypeVar +import numpy as np from numpy import ndarray from numpy.random.mtrand import RandomState from scipy.special import logsumexp as logsumexp @@ -22,10 +24,6 @@ BaseMixture_Self = TypeVar("BaseMixture_Self", bound=BaseMixture) # Modified by Thierry Guillemot # License: BSD 3 clause -import warnings - -import numpy as np - class BaseMixture(DensityMixin, BaseEstimator, metaclass=ABCMeta): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/model_selection/_search.pyi b/stubs/sklearn/model_selection/_search.pyi index f82bd72e..9fd0b920 100644 --- a/stubs/sklearn/model_selection/_search.pyi +++ b/stubs/sklearn/model_selection/_search.pyi @@ -1,10 +1,15 @@ +import numbers +import operator +import time +import warnings from abc import ABCMeta, abstractmethod from collections import defaultdict as defaultdict from collections.abc import Iterable, Mapping, Sequence from functools import partial as partial, reduce as reduce from itertools import product as product -from typing import Any, Callable, ClassVar, Generic, Iterable, Iterator, Mapping, Sequence, TypeVar +from typing import Any, Callable, ClassVar, Generic, Iterator, TypeVar +import numpy as np from numpy import ndarray from numpy.ma import MaskedArray as MaskedArray from numpy.random import RandomState @@ -26,13 +31,6 @@ from ._split import check_cv as check_cv BaseSearchCV_Self = TypeVar("BaseSearchCV_Self", bound=BaseSearchCV) BaseEstimatorT = TypeVar("BaseEstimatorT", bound=BaseEstimator, default=BaseEstimator, covariant=True) -import numbers -import operator -import time -import warnings - -import numpy as np - __all__ = ["GridSearchCV", "ParameterGrid", "ParameterSampler", "RandomizedSearchCV"] class ParameterGrid: diff --git a/stubs/sklearn/model_selection/_search_successive_halving.pyi b/stubs/sklearn/model_selection/_search_successive_halving.pyi index 435d8f3c..39cc86a6 100644 --- a/stubs/sklearn/model_selection/_search_successive_halving.pyi +++ b/stubs/sklearn/model_selection/_search_successive_halving.pyi @@ -4,6 +4,7 @@ from math import ceil as ceil, floor as floor, log as log from numbers import Integral as Integral from typing import Callable, ClassVar, Iterable, Iterator, Literal, Mapping, Sequence, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -18,8 +19,6 @@ from ._split import check_cv as check_cv BaseSuccessiveHalving_Self = TypeVar("BaseSuccessiveHalving_Self", bound=BaseSuccessiveHalving) -import numpy as np - __all__ = ["HalvingGridSearchCV", "HalvingRandomSearchCV"] class _SubsampleMetaSplitter: diff --git a/stubs/sklearn/model_selection/_split.pyi b/stubs/sklearn/model_selection/_split.pyi index 82764188..6295f473 100644 --- a/stubs/sklearn/model_selection/_split.pyi +++ b/stubs/sklearn/model_selection/_split.pyi @@ -6,7 +6,7 @@ from collections.abc import Iterable from inspect import signature as signature from itertools import chain as chain, combinations as combinations from math import ceil as ceil, floor as floor -from typing import Any, Callable, Iterable, Iterator +from typing import Any, Callable, Iterator import numpy as np from numpy import ndarray diff --git a/stubs/sklearn/multiclass.pyi b/stubs/sklearn/multiclass.pyi index 4e04edfd..bece5985 100644 --- a/stubs/sklearn/multiclass.pyi +++ b/stubs/sklearn/multiclass.pyi @@ -1,6 +1,11 @@ +import array +import itertools +import warnings from numbers import Integral as Integral, Real as Real from typing import ClassVar, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState @@ -32,13 +37,6 @@ OutputCodeClassifier_Self = TypeVar("OutputCodeClassifier_Self", bound=OutputCod # # License: BSD 3 clause -import array -import itertools -import warnings - -import numpy as np -import scipy.sparse as sp - __all__ = [ "OneVsRestClassifier", "OneVsOneClassifier", diff --git a/stubs/sklearn/multioutput.pyi b/stubs/sklearn/multioutput.pyi index 9bd4ad61..36373f84 100644 --- a/stubs/sklearn/multioutput.pyi +++ b/stubs/sklearn/multioutput.pyi @@ -2,6 +2,8 @@ from abc import ABCMeta, abstractmethod from numbers import Integral as Integral from typing import ClassVar, Sequence, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from scipy.sparse import spmatrix @@ -31,9 +33,6 @@ _MultiOutputEstimator_Self = TypeVar("_MultiOutputEstimator_Self", bound=_MultiO MultiOutputRegressor_Self = TypeVar("MultiOutputRegressor_Self", bound=MultiOutputRegressor) ClassifierChain_Self = TypeVar("ClassifierChain_Self", bound=ClassifierChain) -import numpy as np -import scipy.sparse as sp - __all__ = [ "MultiOutputRegressor", "MultiOutputClassifier", diff --git a/stubs/sklearn/naive_bayes.pyi b/stubs/sklearn/naive_bayes.pyi index b4017100..3264fca5 100644 --- a/stubs/sklearn/naive_bayes.pyi +++ b/stubs/sklearn/naive_bayes.pyi @@ -1,13 +1,15 @@ +import warnings from abc import ABCMeta, abstractmethod as abstractmethod from numbers import Integral as Integral, Real as Real from typing import ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy.special import logsumexp as logsumexp from ._typing import ArrayLike, Float, Int, MatrixLike from .base import BaseEstimator, ClassifierMixin -from .preprocessing import LabelBinarizer as LabelBinarizer, binarize, label_binarize as label_binarize +from .preprocessing import LabelBinarizer as LabelBinarizer, label_binarize as label_binarize from .utils._param_validation import Hidden as Hidden, Interval as Interval, StrOptions as StrOptions from .utils.extmath import safe_sparse_dot as safe_sparse_dot from .utils.validation import check_is_fitted as check_is_fitted, check_non_negative as check_non_negative @@ -25,9 +27,6 @@ CategoricalNB_Self = TypeVar("CategoricalNB_Self", bound=CategoricalNB) # (parts based on earlier work by Mathieu Blondel) # # License: BSD 3 clause -import warnings - -import numpy as np __all__ = [ "BernoulliNB", diff --git a/stubs/sklearn/neighbors/_classification.pyi b/stubs/sklearn/neighbors/_classification.pyi index 32cc0cfd..b63b12bc 100644 --- a/stubs/sklearn/neighbors/_classification.pyi +++ b/stubs/sklearn/neighbors/_classification.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral from typing import Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Float, Int, MatrixLike @@ -12,10 +14,6 @@ from ._base import KNeighborsMixin, NeighborsBase, RadiusNeighborsMixin RadiusNeighborsClassifier_Self = TypeVar("RadiusNeighborsClassifier_Self", bound=RadiusNeighborsClassifier) KNeighborsClassifier_Self = TypeVar("KNeighborsClassifier_Self", bound=KNeighborsClassifier) -import warnings - -import numpy as np - class KNeighborsClassifier(KNeighborsMixin, ClassifierMixin, NeighborsBase): outputs_2d_: bool = ... n_samples_fit_: int = ... diff --git a/stubs/sklearn/neighbors/_kde.pyi b/stubs/sklearn/neighbors/_kde.pyi index 6662c311..35b7ea6f 100644 --- a/stubs/sklearn/neighbors/_kde.pyi +++ b/stubs/sklearn/neighbors/_kde.pyi @@ -1,6 +1,8 @@ +import itertools from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.special import gammainc as gammainc @@ -19,9 +21,6 @@ from ._kd_tree import KDTree as KDTree KernelDensity_Self = TypeVar("KernelDensity_Self", bound=KernelDensity) # Author: Jake Vanderplas -import itertools - -import numpy as np VALID_KERNELS: list = ... diff --git a/stubs/sklearn/neighbors/_lof.pyi b/stubs/sklearn/neighbors/_lof.pyi index 97dbf13e..0f7546a5 100644 --- a/stubs/sklearn/neighbors/_lof.pyi +++ b/stubs/sklearn/neighbors/_lof.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Real as Real from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Int, MatrixLike @@ -17,10 +19,6 @@ LocalOutlierFactor_Self = TypeVar("LocalOutlierFactor_Self", bound=LocalOutlierF # Alexandre Gramfort # License: BSD 3 clause -import warnings - -import numpy as np - __all__ = ["LocalOutlierFactor"] class LocalOutlierFactor(KNeighborsMixin, OutlierMixin, NeighborsBase): diff --git a/stubs/sklearn/neighbors/_nca.pyi b/stubs/sklearn/neighbors/_nca.pyi index 75c2ba73..5c17b7a9 100644 --- a/stubs/sklearn/neighbors/_nca.pyi +++ b/stubs/sklearn/neighbors/_nca.pyi @@ -1,7 +1,10 @@ +import sys +import time from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Literal, TypeVar from warnings import warn as warn +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.optimize import minimize as minimize @@ -20,11 +23,6 @@ from ..utils.validation import check_array as check_array, check_is_fitted as ch NeighborhoodComponentsAnalysis_Self = TypeVar("NeighborhoodComponentsAnalysis_Self", bound=NeighborhoodComponentsAnalysis) -import sys -import time - -import numpy as np - class NeighborhoodComponentsAnalysis(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... random_state_: RandomState = ... diff --git a/stubs/sklearn/neighbors/_nearest_centroid.pyi b/stubs/sklearn/neighbors/_nearest_centroid.pyi index 4d498873..0b76adb1 100644 --- a/stubs/sklearn/neighbors/_nearest_centroid.pyi +++ b/stubs/sklearn/neighbors/_nearest_centroid.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Real as Real from typing import Callable, ClassVar, TypeVar +import numpy as np from numpy import ndarray from scipy import sparse as sp @@ -20,10 +22,6 @@ NearestCentroid_Self = TypeVar("NearestCentroid_Self", bound=NearestCentroid) # # License: BSD 3 clause -import warnings - -import numpy as np - class NearestCentroid(ClassifierMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/neighbors/_regression.pyi b/stubs/sklearn/neighbors/_regression.pyi index 5b07fe1e..c614f623 100644 --- a/stubs/sklearn/neighbors/_regression.pyi +++ b/stubs/sklearn/neighbors/_regression.pyi @@ -1,5 +1,7 @@ +import warnings from typing import Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Float, Int, MatrixLike @@ -20,10 +22,6 @@ RadiusNeighborsRegressor_Self = TypeVar("RadiusNeighborsRegressor_Self", bound=R # License: BSD 3 clause (C) INRIA, University of Amsterdam, # University of Copenhagen -import warnings - -import numpy as np - class KNeighborsRegressor(KNeighborsMixin, RegressorMixin, NeighborsBase): n_samples_fit_: int = ... feature_names_in_: ndarray = ... diff --git a/stubs/sklearn/neural_network/_multilayer_perceptron.pyi b/stubs/sklearn/neural_network/_multilayer_perceptron.pyi index a130bde7..45bc641a 100644 --- a/stubs/sklearn/neural_network/_multilayer_perceptron.pyi +++ b/stubs/sklearn/neural_network/_multilayer_perceptron.pyi @@ -1,8 +1,11 @@ +import warnings from abc import ABCMeta, abstractmethod from itertools import chain as chain from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np +import scipy.optimize from numpy import ndarray from numpy.random import RandomState @@ -12,7 +15,7 @@ from ..exceptions import ConvergenceWarning as ConvergenceWarning from ..metrics import accuracy_score as accuracy_score, r2_score as r2_score from ..model_selection import train_test_split as train_test_split from ..preprocessing import LabelBinarizer as LabelBinarizer -from ..utils import check_random_state as check_random_state, column_or_1d as column_or_1d, gen_batches as gen_batches, shuffle +from ..utils import check_random_state as check_random_state, column_or_1d as column_or_1d, gen_batches as gen_batches from ..utils._param_validation import Interval as Interval, Options as Options, StrOptions as StrOptions from ..utils.extmath import safe_sparse_dot as safe_sparse_dot from ..utils.metaestimators import available_if as available_if @@ -25,11 +28,6 @@ BaseMultilayerPerceptron_Self = TypeVar("BaseMultilayerPerceptron_Self", bound=B MLPRegressor_Self = TypeVar("MLPRegressor_Self", bound=MLPRegressor) MLPClassifier_Self = TypeVar("MLPClassifier_Self", bound=MLPClassifier) -import warnings - -import numpy as np -import scipy.optimize - _STOCHASTIC_SOLVERS: list = ... class BaseMultilayerPerceptron(BaseEstimator, metaclass=ABCMeta): diff --git a/stubs/sklearn/neural_network/_rbm.pyi b/stubs/sklearn/neural_network/_rbm.pyi index 9120b892..53336bb6 100644 --- a/stubs/sklearn/neural_network/_rbm.pyi +++ b/stubs/sklearn/neural_network/_rbm.pyi @@ -1,6 +1,9 @@ +import time from numbers import Integral as Integral, Real as Real from typing import ClassVar, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState from scipy.special import expit as expit @@ -20,11 +23,6 @@ BernoulliRBM_Self = TypeVar("BernoulliRBM_Self", bound=BernoulliRBM) # Lars Buitinck # License: BSD 3 clause -import time - -import numpy as np -import scipy.sparse as sp - class BernoulliRBM(ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/pipeline.pyi b/stubs/sklearn/pipeline.pyi index 486e1eeb..5e4af5e7 100644 --- a/stubs/sklearn/pipeline.pyi +++ b/stubs/sklearn/pipeline.pyi @@ -2,6 +2,7 @@ from collections import defaultdict as defaultdict from itertools import islice as islice from typing import Any, ClassVar, Iterable, Literal, Sequence, TypeVar +import numpy as np from joblib import Memory from numpy import ndarray from pandas.core.frame import DataFrame @@ -22,8 +23,6 @@ from .utils.validation import check_is_fitted as check_is_fitted, check_memory a FeatureUnion_Self = TypeVar("FeatureUnion_Self", bound=FeatureUnion) Pipeline_Self = TypeVar("Pipeline_Self", bound=Pipeline) -import numpy as np - __all__ = ["Pipeline", "FeatureUnion", "make_pipeline", "make_union"] class Pipeline(_BaseComposition): diff --git a/stubs/sklearn/preprocessing/_data.pyi b/stubs/sklearn/preprocessing/_data.pyi index bd8f5d46..3f589d70 100644 --- a/stubs/sklearn/preprocessing/_data.pyi +++ b/stubs/sklearn/preprocessing/_data.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, TypeVar, overload +import numpy as np from numpy import ndarray from numpy.random import RandomState from pandas.core.series import Series @@ -50,10 +52,6 @@ PowerTransformer_Self = TypeVar("PowerTransformer_Self", bound=PowerTransformer) # Eric Chang # License: BSD 3 clause -import warnings - -import numpy as np - BOUNDS_THRESHOLD: float = ... __all__ = [ diff --git a/stubs/sklearn/preprocessing/_discretization.pyi b/stubs/sklearn/preprocessing/_discretization.pyi index bc912dfe..d73ca39d 100644 --- a/stubs/sklearn/preprocessing/_discretization.pyi +++ b/stubs/sklearn/preprocessing/_discretization.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral from typing import ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from pandas.core.series import Series @@ -24,10 +26,6 @@ KBinsDiscretizer_Self = TypeVar("KBinsDiscretizer_Self", bound=KBinsDiscretizer) # License: BSD -import warnings - -import numpy as np - class KBinsDiscretizer(TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/preprocessing/_encoders.pyi b/stubs/sklearn/preprocessing/_encoders.pyi index 75ab7ac0..5cdcceb4 100644 --- a/stubs/sklearn/preprocessing/_encoders.pyi +++ b/stubs/sklearn/preprocessing/_encoders.pyi @@ -1,9 +1,11 @@ +import numbers +import warnings from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, Sequence, TypeVar +import numpy as np from numpy import ndarray from pandas.core.series import Series -from scipy import sparse from scipy.sparse import spmatrix from .._typing import ArrayLike, Int, MatrixLike @@ -19,11 +21,6 @@ OneHotEncoder_Self = TypeVar("OneHotEncoder_Self", bound=OneHotEncoder) # Joris Van den Bossche # License: BSD 3 clause -import numbers -import warnings - -import numpy as np - __all__ = ["OneHotEncoder", "OrdinalEncoder"] class _BaseEncoder(TransformerMixin, BaseEstimator): ... diff --git a/stubs/sklearn/preprocessing/_function_transformer.pyi b/stubs/sklearn/preprocessing/_function_transformer.pyi index 656fc840..51080f02 100644 --- a/stubs/sklearn/preprocessing/_function_transformer.pyi +++ b/stubs/sklearn/preprocessing/_function_transformer.pyi @@ -1,5 +1,7 @@ +import warnings from typing import Any, Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray, ufunc from pandas.core.frame import DataFrame @@ -11,10 +13,6 @@ from ..utils.validation import check_array as check_array FunctionTransformer_Self = TypeVar("FunctionTransformer_Self", bound=FunctionTransformer) -import warnings - -import numpy as np - class FunctionTransformer(TransformerMixin, BaseEstimator): feature_names_in_: ndarray = ... n_features_in_: int = ... diff --git a/stubs/sklearn/preprocessing/_label.pyi b/stubs/sklearn/preprocessing/_label.pyi index ff45f62f..44a3404f 100644 --- a/stubs/sklearn/preprocessing/_label.pyi +++ b/stubs/sklearn/preprocessing/_label.pyi @@ -1,7 +1,12 @@ +import array +import itertools +import warnings from collections import defaultdict as defaultdict from numbers import Integral as Integral from typing import Any, ClassVar, Iterable, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from pandas.core.frame import DataFrame from scipy.sparse import csr_matrix, spmatrix @@ -25,13 +30,6 @@ LabelBinarizer_Self = TypeVar("LabelBinarizer_Self", bound=LabelBinarizer) # Hamzeh Alsalhi # License: BSD 3 clause -import array -import itertools -import warnings - -import numpy as np -import scipy.sparse as sp - __all__ = [ "label_binarize", "LabelBinarizer", diff --git a/stubs/sklearn/preprocessing/_polynomial.pyi b/stubs/sklearn/preprocessing/_polynomial.pyi index 47685cd9..60979cbf 100644 --- a/stubs/sklearn/preprocessing/_polynomial.pyi +++ b/stubs/sklearn/preprocessing/_polynomial.pyi @@ -1,7 +1,9 @@ +import collections from itertools import chain as chain, combinations as combinations, combinations_with_replacement as combinations_w_r from numbers import Integral as Integral from typing import Any, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from pandas.core.series import Series from scipy import sparse @@ -18,10 +20,6 @@ from ..utils.validation import FLOAT_DTYPES as FLOAT_DTYPES, check_is_fitted as SplineTransformer_Self = TypeVar("SplineTransformer_Self", bound=SplineTransformer) PolynomialFeatures_Self = TypeVar("PolynomialFeatures_Self", bound=PolynomialFeatures) -import collections - -import numpy as np - __all__ = [ "PolynomialFeatures", "SplineTransformer", diff --git a/stubs/sklearn/random_projection.pyi b/stubs/sklearn/random_projection.pyi index 4e33cd03..eda2f35f 100644 --- a/stubs/sklearn/random_projection.pyi +++ b/stubs/sklearn/random_projection.pyi @@ -1,7 +1,10 @@ +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from typing import Any, ClassVar, Literal, Sequence, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random import RandomState from scipy import linalg as linalg @@ -22,11 +25,6 @@ BaseRandomProjection_Self = TypeVar("BaseRandomProjection_Self", bound=BaseRando # Arnaud Joly # License: BSD 3 clause -import warnings - -import numpy as np -import scipy.sparse as sp - __all__ = [ "SparseRandomProjection", "GaussianRandomProjection", diff --git a/stubs/sklearn/semi_supervised/_label_propagation.pyi b/stubs/sklearn/semi_supervised/_label_propagation.pyi index ad3f74dd..0b591bf9 100644 --- a/stubs/sklearn/semi_supervised/_label_propagation.pyi +++ b/stubs/sklearn/semi_supervised/_label_propagation.pyi @@ -1,7 +1,9 @@ +import warnings from abc import ABCMeta, abstractmethod as abstractmethod from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from scipy import sparse as sparse from scipy.sparse import csgraph as csgraph @@ -19,12 +21,6 @@ from ..utils.validation import check_is_fitted as check_is_fitted BaseLabelPropagation_Self = TypeVar("BaseLabelPropagation_Self", bound=BaseLabelPropagation) LabelPropagation_Self = TypeVar("LabelPropagation_Self", bound=LabelPropagation) -# coding=utf8 - -import warnings - -import numpy as np - class BaseLabelPropagation(ClassifierMixin, BaseEstimator, metaclass=ABCMeta): _parameter_constraints: ClassVar[dict] = ... diff --git a/stubs/sklearn/semi_supervised/_self_training.pyi b/stubs/sklearn/semi_supervised/_self_training.pyi index 52eef913..ed6ba56e 100644 --- a/stubs/sklearn/semi_supervised/_self_training.pyi +++ b/stubs/sklearn/semi_supervised/_self_training.pyi @@ -1,6 +1,8 @@ +import warnings from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Float, Int, MatrixLike @@ -12,10 +14,6 @@ from ..utils.validation import check_is_fitted as check_is_fitted SelfTrainingClassifier_Self = TypeVar("SelfTrainingClassifier_Self", bound=SelfTrainingClassifier) -import warnings - -import numpy as np - __all__ = ["SelfTrainingClassifier"] class SelfTrainingClassifier(MetaEstimatorMixin, BaseEstimator): diff --git a/stubs/sklearn/svm/_base.pyi b/stubs/sklearn/svm/_base.pyi index 9455bf82..6e60841b 100644 --- a/stubs/sklearn/svm/_base.pyi +++ b/stubs/sklearn/svm/_base.pyi @@ -1,7 +1,10 @@ +import warnings from abc import ABCMeta, abstractmethod from numbers import Integral as Integral, Real as Real from typing import Callable, ClassVar, TypeVar +import numpy as np +import scipy.sparse as sp from numpy import ndarray from numpy.random.mtrand import RandomState @@ -23,11 +26,6 @@ from ..utils.validation import check_consistent_length as check_consistent_lengt BaseLibSVM_Self = TypeVar("BaseLibSVM_Self", bound=BaseLibSVM) -import warnings - -import numpy as np -import scipy.sparse as sp - LIBSVM_IMPL: list = ... class BaseLibSVM(BaseEstimator, metaclass=ABCMeta): diff --git a/stubs/sklearn/svm/_classes.pyi b/stubs/sklearn/svm/_classes.pyi index df733dd1..83d384b0 100644 --- a/stubs/sklearn/svm/_classes.pyi +++ b/stubs/sklearn/svm/_classes.pyi @@ -1,6 +1,7 @@ from numbers import Integral as Integral, Real as Real from typing import Any, Callable, ClassVar, Literal, Mapping, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState @@ -16,8 +17,6 @@ OneClassSVM_Self = TypeVar("OneClassSVM_Self", bound=OneClassSVM) LinearSVC_Self = TypeVar("LinearSVC_Self", bound=LinearSVC) LinearSVR_Self = TypeVar("LinearSVR_Self", bound=LinearSVR) -import numpy as np - class LinearSVC(LinearClassifierMixin, SparseCoefMixin, BaseEstimator): n_iter_: int = ... feature_names_in_: ndarray = ... diff --git a/stubs/sklearn/tree/_classes.pyi b/stubs/sklearn/tree/_classes.pyi index fc227592..6ea0398b 100644 --- a/stubs/sklearn/tree/_classes.pyi +++ b/stubs/sklearn/tree/_classes.pyi @@ -1,8 +1,12 @@ +import copy +import numbers +import warnings from abc import ABCMeta, abstractmethod from math import ceil as ceil from numbers import Integral as Integral, Real as Real from typing import ClassVar, Literal, Mapping, Sequence, TypeVar +import numpy as np from numpy import ndarray from numpy.random import RandomState from scipy.sparse import issparse as issparse, spmatrix @@ -43,12 +47,6 @@ DecisionTreeRegressor_Self = TypeVar("DecisionTreeRegressor_Self", bound=Decisio # # License: BSD 3 clause -import copy -import numbers -import warnings - -import numpy as np - __all__ = [ "DecisionTreeClassifier", "DecisionTreeRegressor", diff --git a/stubs/sklearn/utils/__init__.pyi b/stubs/sklearn/utils/__init__.pyi index 204f4303..f3b7a3a8 100644 --- a/stubs/sklearn/utils/__init__.pyi +++ b/stubs/sklearn/utils/__init__.pyi @@ -7,7 +7,7 @@ import warnings as warnings from collections.abc import Sequence as Sequence from contextlib import contextmanager as contextmanager, suppress as suppress from itertools import compress as compress, islice as islice -from typing import Any, Iterable, Iterator, Sequence, SupportsIndex +from typing import Any, Iterable, Iterator, SupportsIndex import numpy as np from numpy import ndarray diff --git a/stubs/sklearn/utils/_mocking.pyi b/stubs/sklearn/utils/_mocking.pyi index fe84f239..49cb0b31 100644 --- a/stubs/sklearn/utils/_mocking.pyi +++ b/stubs/sklearn/utils/_mocking.pyi @@ -1,5 +1,6 @@ from typing import Callable, Literal, Sequence, TypeVar +import numpy as np from numpy import ndarray from .._typing import ArrayLike, Int, MatrixLike @@ -8,8 +9,6 @@ from .validation import check_array as check_array, check_is_fitted as check_is_ CheckingClassifier_Self = TypeVar("CheckingClassifier_Self", bound=CheckingClassifier) -import numpy as np - class ArraySlicingWrapper: def __init__(self, array) -> None: ... def __getitem__(self, aslice): ... diff --git a/stubs/sklearn/utils/_testing.pyi b/stubs/sklearn/utils/_testing.pyi index 98ed7b7d..546c40e1 100644 --- a/stubs/sklearn/utils/_testing.pyi +++ b/stubs/sklearn/utils/_testing.pyi @@ -30,7 +30,7 @@ from subprocess import ( TimeoutExpired as TimeoutExpired, check_output as check_output, ) -from typing import Any, Callable, ClassVar, Sequence +from typing import Any, Callable, ClassVar from unittest import TestCase as TestCase import joblib diff --git a/stubs/sklearn/utils/estimator_checks.pyi b/stubs/sklearn/utils/estimator_checks.pyi index 650c8d32..aefe48ec 100644 --- a/stubs/sklearn/utils/estimator_checks.pyi +++ b/stubs/sklearn/utils/estimator_checks.pyi @@ -1,6 +1,5 @@ import pickle import re -import types import warnings from collections.abc import Generator from copy import deepcopy as deepcopy diff --git a/stubs/sympy-stubs/matrices/immutable.pyi b/stubs/sympy-stubs/matrices/immutable.pyi index b2c80ffe..475e63a5 100644 --- a/stubs/sympy-stubs/matrices/immutable.pyi +++ b/stubs/sympy-stubs/matrices/immutable.pyi @@ -24,8 +24,6 @@ class ImmutableRepMatrix(RepMatrix, MatrixExpr): def __setitem__(self, *args): ... def is_diagonalizable(self, reals_only=..., **kwargs) -> bool: ... - is_diagonalizable = ... - class ImmutableDenseMatrix(DenseMatrix, ImmutableRepMatrix): _iterable = ... _class_priority = ... diff --git a/stubs/vispy/app/canvas.pyi b/stubs/vispy/app/canvas.pyi index a6eb7980..faea5de4 100644 --- a/stubs/vispy/app/canvas.pyi +++ b/stubs/vispy/app/canvas.pyi @@ -18,9 +18,7 @@ from ..util import config as util_config, logger from ..util.dpi import get_dpi from ..util.event import EmitterGroup, Event, WarningEmitter from ..util.keys import Key -from ..util.ptime import time from . import Application, use_app -from .application import Application # todo: add functions for asking about current mouse/keyboard state # todo: add hover enter/exit events diff --git a/stubs/vispy/geometry/generation.pyi b/stubs/vispy/geometry/generation.pyi index 07930653..8ad6ae4e 100644 --- a/stubs/vispy/geometry/generation.pyi +++ b/stubs/vispy/geometry/generation.pyi @@ -1,7 +1,6 @@ import numpy as np from numpy.typing import ArrayLike, NDArray -from ..geometry.meshdata import MeshData from .meshdata import MeshData # -*- coding: utf-8 -*- diff --git a/stubs/vispy/gloo/buffer.pyi b/stubs/vispy/gloo/buffer.pyi index 9412aa68..6b98c8ed 100644 --- a/stubs/vispy/gloo/buffer.pyi +++ b/stubs/vispy/gloo/buffer.pyi @@ -5,7 +5,6 @@ from traceback import extract_stack, format_list import numpy as np from numpy.typing import NDArray -from ..gloo.buffer import DataBuffer from ..util import logger from .globject import GLObject diff --git a/stubs/vispy/gloo/context.pyi b/stubs/vispy/gloo/context.pyi index 5d988330..6a7527cb 100644 --- a/stubs/vispy/gloo/context.pyi +++ b/stubs/vispy/gloo/context.pyi @@ -2,7 +2,6 @@ import weakref from copy import deepcopy from typing import Any, Mapping -from .. import config from ..util.event import Event from .glir import BaseGlirParser, GlirParser, GlirQueue, glir_logger from .wrappers import BaseGlooFunctions diff --git a/stubs/vispy/gloo/framebuffer.pyi b/stubs/vispy/gloo/framebuffer.pyi index c57ec827..efeda73d 100644 --- a/stubs/vispy/gloo/framebuffer.pyi +++ b/stubs/vispy/gloo/framebuffer.pyi @@ -2,7 +2,6 @@ from typing import Literal from numpy.typing import ArrayLike -from ..gloo.framebuffer import RenderBuffer from .context import get_current_canvas from .globject import GLObject from .texture import Texture2D diff --git a/stubs/vispy/gloo/program.pyi b/stubs/vispy/gloo/program.pyi index 25dba9a6..b844fff9 100644 --- a/stubs/vispy/gloo/program.pyi +++ b/stubs/vispy/gloo/program.pyi @@ -3,7 +3,6 @@ import re import numpy as np from numpy.typing import ArrayLike -from ..gloo.buffer import VertexBuffer from ..util import logger from .buffer import DataBuffer, IndexBuffer, VertexBuffer from .context import get_current_canvas diff --git a/stubs/vispy/gloo/texture.pyi b/stubs/vispy/gloo/texture.pyi index 56a9f312..8aeca098 100644 --- a/stubs/vispy/gloo/texture.pyi +++ b/stubs/vispy/gloo/texture.pyi @@ -2,7 +2,7 @@ import math import warnings import numpy as np -from numpy import dtype, ndarray +from numpy import ndarray from numpy.typing import NDArray from .globject import GLObject diff --git a/stubs/vispy/io/wavefront.pyi b/stubs/vispy/io/wavefront.pyi index 457988a2..ce0730b2 100644 --- a/stubs/vispy/io/wavefront.pyi +++ b/stubs/vispy/io/wavefront.pyi @@ -5,7 +5,6 @@ from os import path as op import numpy as np from numpy.typing import NDArray -from ..geometry import _calculate_normals from ..util import logger # -*- coding: utf-8 -*- diff --git a/stubs/vispy/plot/plotwidget.pyi b/stubs/vispy/plot/plotwidget.pyi index acbe17d8..bb9fb86f 100644 --- a/stubs/vispy/plot/plotwidget.pyi +++ b/stubs/vispy/plot/plotwidget.pyi @@ -5,7 +5,6 @@ from numpy.typing import ArrayLike, NDArray from .. import scene from ..color import Color, Colormap from ..geometry import MeshData -from ..geometry.meshdata import MeshData from ..io import read_mesh from ..scene.visuals import Image, LinePlot, Mesh, Polygon, Spectrogram, Volume from ..scene.widgets.colorbar import ColorBarWidget diff --git a/stubs/vispy/scene/canvas.pyi b/stubs/vispy/scene/canvas.pyi index 91d72f53..30c98564 100644 --- a/stubs/vispy/scene/canvas.pyi +++ b/stubs/vispy/scene/canvas.pyi @@ -11,11 +11,9 @@ from ..color import Color from ..gloo.context import GLContext from ..gloo.framebuffer import FrameBuffer from ..scene.node import Node -from ..scene.widgets.widget import Widget from ..util import Frozen, logger from ..util.event import Event from ..util.profiler import Profiler -from ..util.svg.color import Color from ..visuals.transforms import TransformSystem from ..visuals.visual import Visual from .events import SceneMouseEvent diff --git a/stubs/vispy/scene/node.pyi b/stubs/vispy/scene/node.pyi index 065ee566..083e8bbd 100644 --- a/stubs/vispy/scene/node.pyi +++ b/stubs/vispy/scene/node.pyi @@ -4,8 +4,6 @@ from numpy.typing import ArrayLike from ..util.event import EmitterGroup, Event from ..visuals.transforms import BaseTransform, ChainTransform, NullTransform, TransformSystem, create_transform -from ..visuals.transforms.chain import ChainTransform -from ..visuals.transforms.transform_system import TransformSystem # -*- coding: utf-8 -*- # Copyright (c) Vispy Development Team. All Rights Reserved. diff --git a/stubs/vispy/scene/visuals.pyi b/stubs/vispy/scene/visuals.pyi index f021d8af..26b4353b 100644 --- a/stubs/vispy/scene/visuals.pyi +++ b/stubs/vispy/scene/visuals.pyi @@ -6,7 +6,6 @@ import weakref from typing import ClassVar, Type, TypeVar from .. import visuals -from ..scene.node import Node from ..visuals.filters import Alpha, PickingFilter from .node import Node diff --git a/stubs/vispy/scene/widgets/console.pyi b/stubs/vispy/scene/widgets/console.pyi index da28f8dc..422d8178 100644 --- a/stubs/vispy/scene/widgets/console.pyi +++ b/stubs/vispy/scene/widgets/console.pyi @@ -3,7 +3,6 @@ import numpy as np from ...color import Color from ...gloo import VertexBuffer from ...util.event import Event -from ...util.svg.color import Color from ...visuals import Visual from .widget import Widget diff --git a/stubs/vispy/util/fonts/__init__.pyi b/stubs/vispy/util/fonts/__init__.pyi index e44a52c9..79bac94b 100644 --- a/stubs/vispy/util/fonts/__init__.pyi +++ b/stubs/vispy/util/fonts/__init__.pyi @@ -6,5 +6,5 @@ __all__ = ["list_fonts"] -from ._triage import list_fonts as list_fonts # noqa, analysis:ignore -from ._vispy_fonts import _vispy_fonts as _vispy_fonts # noqa, analysis:ignore +from ._triage import list_fonts as list_fonts # analysis:ignore +from ._vispy_fonts import _vispy_fonts as _vispy_fonts # analysis:ignore diff --git a/stubs/vispy/util/logs.pyi b/stubs/vispy/util/logs.pyi index b87c874a..196cd630 100644 --- a/stubs/vispy/util/logs.pyi +++ b/stubs/vispy/util/logs.pyi @@ -4,7 +4,6 @@ import json import logging import re import sys -import traceback from functools import partial import numpy as np diff --git a/stubs/vispy/util/transforms.pyi b/stubs/vispy/util/transforms.pyi index 75eed8fd..f5045766 100644 --- a/stubs/vispy/util/transforms.pyi +++ b/stubs/vispy/util/transforms.pyi @@ -1,7 +1,6 @@ import math import numpy as np -from numpy import dtype from numpy.typing import ArrayLike, NDArray #!/usr/bin/env python diff --git a/stubs/vispy/visuals/_scalable_textures.pyi b/stubs/vispy/visuals/_scalable_textures.pyi index 1c3776cd..f88d532c 100644 --- a/stubs/vispy/visuals/_scalable_textures.pyi +++ b/stubs/vispy/visuals/_scalable_textures.pyi @@ -4,7 +4,6 @@ import warnings import numpy as np -from numpy import dtype from numpy.typing import NDArray from ..gloo import Texture2D, Texture3D diff --git a/stubs/vispy/visuals/collections/collection.pyi b/stubs/vispy/visuals/collections/collection.pyi index edfb447f..b8db6ffc 100644 --- a/stubs/vispy/visuals/collections/collection.pyi +++ b/stubs/vispy/visuals/collections/collection.pyi @@ -1,5 +1,4 @@ import numpy as np -from numpy import dtype from numpy.typing import ArrayLike from ... import gloo diff --git a/stubs/vispy/visuals/filters/mesh.pyi b/stubs/vispy/visuals/filters/mesh.pyi index 8069f88a..5a703f4c 100644 --- a/stubs/vispy/visuals/filters/mesh.pyi +++ b/stubs/vispy/visuals/filters/mesh.pyi @@ -7,7 +7,6 @@ import numpy as np from numpy.typing import ArrayLike from ...color import Color -from ...util.svg.color import Color from . import Filter class TextureFilter(Filter): diff --git a/stubs/vispy/visuals/gridlines.pyi b/stubs/vispy/visuals/gridlines.pyi index 7798987d..4e26d7bb 100644 --- a/stubs/vispy/visuals/gridlines.pyi +++ b/stubs/vispy/visuals/gridlines.pyi @@ -1,5 +1,4 @@ from ..color import Color -from ..util.svg.color import Color from .image import ImageVisual from .shaders import Function diff --git a/stubs/vispy/visuals/isosurface.pyi b/stubs/vispy/visuals/isosurface.pyi index 395ccb93..3af8e885 100644 --- a/stubs/vispy/visuals/isosurface.pyi +++ b/stubs/vispy/visuals/isosurface.pyi @@ -4,7 +4,6 @@ from numpy.typing import ArrayLike, NDArray from ..color import Color from ..geometry.isosurface import isosurface -from ..util.svg.color import Color from .mesh import MeshVisual # -*- coding: utf-8 -*- diff --git a/stubs/vispy/visuals/mesh.pyi b/stubs/vispy/visuals/mesh.pyi index 89e3ab38..335b34df 100644 --- a/stubs/vispy/visuals/mesh.pyi +++ b/stubs/vispy/visuals/mesh.pyi @@ -8,10 +8,8 @@ from numpy.typing import ArrayLike from ..color import Color, get_colormap from ..color.colormap import CubeHelixColormap from ..geometry import MeshData -from ..geometry.meshdata import MeshData from ..gloo import VertexBuffer from ..util.event import Event -from ..util.svg.color import Color from .shaders import Function, FunctionChain from .visual import Visual diff --git a/stubs/vispy/visuals/rectangle.pyi b/stubs/vispy/visuals/rectangle.pyi index 5673eb09..d2ebfc4a 100644 --- a/stubs/vispy/visuals/rectangle.pyi +++ b/stubs/vispy/visuals/rectangle.pyi @@ -2,7 +2,6 @@ import numpy as np from numpy.typing import ArrayLike from ..color import Color -from ..util.svg.color import Color from .polygon import PolygonVisual # -*- coding: utf-8 -*- diff --git a/stubs/vispy/visuals/tube.pyi b/stubs/vispy/visuals/tube.pyi index ff29dbcc..90a4f140 100644 --- a/stubs/vispy/visuals/tube.pyi +++ b/stubs/vispy/visuals/tube.pyi @@ -5,7 +5,6 @@ from numpy.linalg import norm from numpy.typing import NDArray from ..color import ColorArray -from ..color.color_array import ColorArray from ..util.svg.color import Color from ..util.transforms import rotate from .mesh import MeshVisual diff --git a/tests/run_hygiene.py b/tests/run_hygiene.py index b8c64010..309237e6 100644 --- a/tests/run_hygiene.py +++ b/tests/run_hygiene.py @@ -6,17 +6,17 @@ def install_requirements(): print("\nInstalling requirements...") - return subprocess.run((sys.executable, "-m", "pip", "install", "--upgrade", "isort", "black")) + return subprocess.run((sys.executable, "-m", "pip", "install", "--upgrade", "ruff")) -def run_isort(): - print("\nRunning isort...") - return subprocess.run((sys.executable, "-m", "isort", ".")) +def run_ruff_fix(): + print("\nRunning Ruff check --fix...") + return subprocess.run((sys.executable, "-m", "ruff", "--fix")) -def run_black(): - print("\nRunning Black...") - return subprocess.run((sys.executable, "-m", "black", ".")) +def run_ruff_format(): + print("\nRunning Ruff format...") + return subprocess.run((sys.executable, "-m", "ruff", "format")) def main(): @@ -26,8 +26,8 @@ def main(): install_requirements().check_returncode() results = ( - run_isort(), - run_black(), + run_ruff_fix(), + run_ruff_format(), ) if sum([result.returncode for result in results]) > 0: print("\nOne or more tests failed. See above for details.") diff --git a/utils/validate_stubs.py b/utils/validate_stubs.py index a01b225e..26f088cb 100644 --- a/utils/validate_stubs.py +++ b/utils/validate_stubs.py @@ -1,4 +1,5 @@ #!/bin/python +# ruff: noqa: F841 # TODO: plenty of unused variables. Validate what was the intent. from __future__ import annotations __doc__ = """Validate Stubs. @@ -147,7 +148,7 @@ def isfrommodule(v: object, module: str, default: bool = True) -> bool: try: # Make sure it came from this module return v.__dict__["__module__"] == module - except: + except Exception: return default From ffcd81fdb2bfd78ee37b440c6774dbaf5e831c7f Mon Sep 17 00:00:00 2001 From: Erik De Bonte Date: Tue, 25 Mar 2025 16:53:10 -0700 Subject: [PATCH 2/3] Remove duplicate [tool.ruff.lint.per-file-ignores] header --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 830b8665..381df0be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,6 @@ ignore = [ "F405", # may be undefined, or defined from star imports ] -[tool.ruff.lint.per-file-ignores] # We keep docstrings in sklearn "stubs/sklearn/**" = ["PYI021"] From 9de1af1b63e3bc0fbe03d2be11be3011739fa52e Mon Sep 17 00:00:00 2001 From: Erik De Bonte Date: Tue, 25 Mar 2025 17:04:09 -0700 Subject: [PATCH 3/3] Remove redundant Sequence import --- stubs/sklearn/utils/__init__.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/sklearn/utils/__init__.pyi b/stubs/sklearn/utils/__init__.pyi index 839651d6..6a75f13f 100644 --- a/stubs/sklearn/utils/__init__.pyi +++ b/stubs/sklearn/utils/__init__.pyi @@ -4,7 +4,7 @@ import platform as platform import struct as struct import timeit as timeit import warnings as warnings -from collections.abc import Iterable, Iterator, Sequence, Sequence as Sequence +from collections.abc import Iterable, Iterator, Sequence from contextlib import contextmanager as contextmanager, suppress as suppress from itertools import compress as compress, islice as islice from typing import Any, SupportsIndex