diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 9648f66d840d4..52ce1c4906765 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,7 +7,7 @@ on: - pull_request_target concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }}-${{ github.head_ref }} cancel-in-progress: true jobs: @@ -62,7 +62,7 @@ jobs: comment: needs: lint - if: always() + if: ${{ !cancelled() }} runs-on: ubuntu-latest # We need these permissions to be able to post / update comments diff --git a/README.rst b/README.rst index 5ea84a2e9ccd0..ea3e088e5f180 100644 --- a/README.rst +++ b/README.rst @@ -80,7 +80,7 @@ scikit-learn 1.0 and later require Python 3.7 or newer. scikit-learn 1.1 and later require Python 3.8 or newer. Scikit-learn plotting capabilities (i.e., functions start with ``plot_`` and -classes end with "Display") require Matplotlib (>= |MatplotlibMinVersion|). +classes end with ``Display``) require Matplotlib (>= |MatplotlibMinVersion|). For running the examples Matplotlib >= |MatplotlibMinVersion| is required. A few examples require scikit-image >= |Scikit-ImageMinVersion|, a few examples require pandas >= |PandasMinVersion|, some examples require seaborn >= diff --git a/benchmarks/bench_hist_gradient_boosting_higgsboson.py b/benchmarks/bench_hist_gradient_boosting_higgsboson.py index 65be02ec0c4b9..f719f9450278b 100644 --- a/benchmarks/bench_hist_gradient_boosting_higgsboson.py +++ b/benchmarks/bench_hist_gradient_boosting_higgsboson.py @@ -25,6 +25,7 @@ parser.add_argument("--no-predict", action="store_true", default=False) parser.add_argument("--cache-loc", type=str, default="/tmp") parser.add_argument("--no-interactions", type=bool, default=False) +parser.add_argument("--max-features", type=float, default=1) args = parser.parse_args() HERE = os.path.dirname(__file__) @@ -36,6 +37,7 @@ subsample = args.subsample lr = args.learning_rate max_bins = args.max_bins +max_features = args.max_features @m.cache @@ -104,6 +106,7 @@ def predict(est, data_test, target_test): random_state=0, verbose=1, interaction_cst=interaction_cst, + max_features=max_features, ) fit(est, data_train, target_train, "sklearn") predict(est, data_test, target_test) diff --git a/build_tools/azure/py38_conda_defaults_openblas_linux-64_conda.lock b/build_tools/azure/py38_conda_defaults_openblas_linux-64_conda.lock index 0f1eab7e067c8..00770a9c68641 100644 --- a/build_tools/azure/py38_conda_defaults_openblas_linux-64_conda.lock +++ b/build_tools/azure/py38_conda_defaults_openblas_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 59b748d4b41a3e69462c0c657961aebaa5b15bc3caad670dff038296fa151c6e +# input_hash: 8b8c2bedfa05e30194cd2410b5d2ff2d99cfb73ae610329657de8c0e0d51ec04 @EXPLICIT https://repo.anaconda.com/pkgs/main/linux-64/_libgcc_mutex-0.1-main.conda#c3473ff8bdb3d124ed5ff11ec380d6f9 https://repo.anaconda.com/pkgs/main/linux-64/blas-1.0-openblas.conda#9ddfcaef10d79366c90128f5dc444be8 diff --git a/build_tools/azure/py38_conda_forge_mkl_win-64_conda.lock b/build_tools/azure/py38_conda_forge_mkl_win-64_conda.lock index 73378dc54835e..514e7ed1dd082 100644 --- a/build_tools/azure/py38_conda_forge_mkl_win-64_conda.lock +++ b/build_tools/azure/py38_conda_forge_mkl_win-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: win-64 -# input_hash: 4ac1abe3eccdd48c0d50af8de11dd3c144459b84f500eae8f575232e0be3a07d +# input_hash: bea34b6b198d6e378e140c30b6362cfd4a721a94eb9883a66cf2a3fbf426f590 @EXPLICIT https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2023.7.22-h56e8100_0.conda#b1c2327b36f1a25d96f2039b0d3e3739 https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2023.2.0-h57928b3_50497.conda#a401f3cae152deb75bbed766a90a6312 @@ -14,7 +14,7 @@ https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.36.32532-hdcecf7f_ https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2#4289d80fb4d272f1f3b56cfe87ac90bd https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h64f974e_17.conda#67ff6791f235bb606659bf2a5c169191 https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.36.32532-h05e6639_17.conda#4618046c39f7c81861e53ded842e738a -https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h8ffe710_4.tar.bz2#7c03c66026944073040cb19a4f3ec3c9 +https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-hcfcfb64_5.conda#26eb8ca6ea332b675e11704cce84a3be https://conda.anaconda.org/conda-forge/win-64/icu-73.2-h63175ca_0.conda#0f47d9e3192d9e09ae300da0d28e0f56 https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2#1900cb3cab5055833cfddb0ba233b074 https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.1.0-hcfcfb64_1.conda#f77f319fb82980166569e1280d5b2864 @@ -57,7 +57,7 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#3427 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.5-py38hb1fd069_1.conda#19a5ecd89c16b22db1d1830e93392aab https://conda.anaconda.org/conda-forge/win-64/libclang-15.0.7-default_h77d9078_3.conda#71c8b6249c9e9e18b3aec705e95c1040 -https://conda.anaconda.org/conda-forge/win-64/libglib-2.78.0-he8f3873_0.conda#25f5b3502a82ac425c72c3bc0efbecb5 +https://conda.anaconda.org/conda-forge/win-64/libglib-2.78.1-he8f3873_0.conda#7022abdf53daa6566caebbe1c2d328ae https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.9.3-default_haede6df_1009.conda#87da045f6d26ce9fe20ad76a18f6a18a https://conda.anaconda.org/conda-forge/win-64/libtiff-4.6.0-h6e2ebb7_2.conda#08d653b74ee2dec0131ad4259ffbb126 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 @@ -82,35 +82,34 @@ https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/win-64/brotli-1.1.0-hcfcfb64_1.conda#f47f6db2528e38321fb00ae31674c133 https://conda.anaconda.org/conda-forge/win-64/coverage-7.3.2-py38h91455d4_0.conda#6d4fd016918358448d9055caa59cb616 -https://conda.anaconda.org/conda-forge/win-64/glib-tools-2.78.0-h12be248_0.conda#466538fb59949a3c015b55671dc7e52c -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e +https://conda.anaconda.org/conda-forge/win-64/glib-tools-2.78.1-h12be248_0.conda#7d9280579328b01cddbb1c4e91ca2df1 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/win-64/lcms2-2.15-h67d730c_3.conda#f92e86636451e3f6cea03e395346fa90 https://conda.anaconda.org/conda-forge/win-64/libxcb-1.15-hcd874cb_0.conda#090d91b69396f14afef450c285f9758c https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.0-h3d672ee_3.conda#45a9628a04efb6fc326fff0a8f47b799 https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2#56cd9fe388baac0e90c7149cfac95b60 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/win-64/sip-6.7.12-py38hd3f51b4_0.conda#8234c36685a08c47f11865ffc7ed36a9 https://conda.anaconda.org/conda-forge/win-64/tbb-2021.10.0-h91493d7_2.conda#5b8c97cf8f0e81d6c22c0bda9978790d -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/win-64/fonttools-4.44.0-py38h91455d4_0.conda#c9d185ddf7b36fbc3b95649dcde7c0cc -https://conda.anaconda.org/conda-forge/win-64/glib-2.78.0-h12be248_0.conda#1ed98e4da48693079f2fe83298c5b0ac -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 +https://conda.anaconda.org/conda-forge/win-64/fonttools-4.44.1-py38h91455d4_0.conda#bf67e8137f9e6985f14d65ccad3fd01a +https://conda.anaconda.org/conda-forge/win-64/glib-2.78.1-h12be248_0.conda#55a9176ace9721d0b89cbf99f78e6b04 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/win-64/mkl-2023.2.0-h6a75c08_50496.conda#03da367d935ecf4d3e4005cf705d0e21 https://conda.anaconda.org/conda-forge/win-64/pillow-10.1.0-py38hc375fad_0.conda#d671ae9247896e544d8b2df9feaf1f89 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/win-64/pyqt5-sip-12.12.2-py38hd3f51b4_5.conda#32974507018705cbe32a392473cd6ec1 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda#270e71c14d37074b1d066ee21cf0c4a6 -https://conda.anaconda.org/conda-forge/win-64/gstreamer-1.22.6-hb4038d2_2.conda#e6d2009457a1e5d9653fd06873a7a367 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/win-64/gstreamer-1.22.7-hb4038d2_0.conda#9b2f6622276ed34d20eb36e6a4ce2f50 https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-19_win64_mkl.conda#4f8a1a63cfbf74bc7b2813d9c6c205be https://conda.anaconda.org/conda-forge/win-64/mkl-devel-2023.2.0-h57928b3_50496.conda#381330681b4506191e1a71699ea9e6fc https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b -https://conda.anaconda.org/conda-forge/win-64/gst-plugins-base-1.22.6-h001b923_2.conda#20e57b894392cb792cdf5c501b35a8f6 +https://conda.anaconda.org/conda-forge/win-64/gst-plugins-base-1.22.7-h001b923_0.conda#e4b56ad6c21e861456f32bfc79b43c4b https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-19_win64_mkl.conda#1b9ede5cff953aa1a5f4d9f8ec644972 https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-19_win64_mkl.conda#574e6e8bcc85df2885eb2a87d31ae005 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.0-pyhd8ed1ab_0.conda#134b2b57b7865d2316a7cce1915a51ed diff --git a/build_tools/azure/py38_conda_forge_openblas_ubuntu_2204_linux-64_conda.lock b/build_tools/azure/py38_conda_forge_openblas_ubuntu_2204_linux-64_conda.lock index 950c663feb0a9..a9230278c5d38 100644 --- a/build_tools/azure/py38_conda_forge_openblas_ubuntu_2204_linux-64_conda.lock +++ b/build_tools/azure/py38_conda_forge_openblas_ubuntu_2204_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: c5e4221207552c628ae5d6fd5d1a639c5fa48e17df39e521800953029c61bb2a +# input_hash: ee5e87aee633c2d399766755db0f456f4dc56276e6418fe55fb281b6a8083318 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,15 +9,15 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-4_cp38.conda#ea6b353536f42246cd130c7fef1285cf https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#562b26ba2e19059551a811e72ab7f793 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff @@ -28,7 +28,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 @@ -60,7 +60,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b @@ -77,14 +77,14 @@ https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b8 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 https://conda.anaconda.org/conda-forge/linux-64/libhiredis-1.0.2-h2cc385e_0.tar.bz2#b34907d3a81a3cd8095ee83d174c074a https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.4-h4dfa4b3_0.conda#c560d4ecf0d3536108aa4de0222942d3 +https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.5-h4dfa4b3_0.conda#799291c22ec87a0c86c0a4fc0e22b1c5 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.8.18-hd12c33a_0_cpython.conda#334cb629e10d209f1c17630f653168b1 @@ -105,7 +105,7 @@ https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#e https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_0.conda#5b4fe75a68cbb95350f47bb9a707b53b https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py38h7f3f72f_1.conda#b66dcd4f710628fc5563ad56f02ca89b @@ -113,7 +113,7 @@ https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e966 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openblas-0.3.24-pthreads_h7a3da1a_0.conda#ebe8e905b06dfc5b4b40642d34b1d2f3 @@ -140,33 +140,32 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.0-py38h01eb140_0.conda#5139d0f119ea132235a122cbf3f0cc1a -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.1-py38h01eb140_0.conda#ad18269c2701e49925eacf98d457067f +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_0.conda#43c633c015a361610ee4db2e95f8a517 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_openblas.conda#9f100edf65436e3eabc2a51fc00b2c37 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py38ha43c96d_0.conda#67ca17c651f86159a3b8ed1132d97c12 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py38h17151c0_0.conda#ae2edf79b63f97071aea203b22a6774a -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda#270e71c14d37074b1d066ee21cf0c4a6 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/linux-64/liblapacke-3.9.0-19_linux64_openblas.conda#685e99d3214f5ac9d1ec6b37983985a6 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.4-py38h59b608b_0.conda#8c3e050afeeb2b32575bdb8955cc67b2 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py38h17151c0_5.conda#3d66f5c4a0af2713f60ec11bf1230136 https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/linux-64/blas-devel-3.9.0-19_linux64_openblas.conda#96bca12f1b7c48298dd1abf3e11121af https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.1.1-py38h7f3f72f_1.conda#18ae206b2d413e5cc8d2bb8ab48aa165 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.3-py38h01efb38_1.conda#01a2b6144e65631e2fe24e569d0738ee https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.0-pyhd8ed1ab_0.conda#134b2b57b7865d2316a7cce1915a51ed https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e diff --git a/build_tools/azure/pylatest_conda_forge_mkl_linux-64_conda.lock b/build_tools/azure/pylatest_conda_forge_mkl_linux-64_conda.lock index 5a03652d6ac33..201ff0a7248e0 100644 --- a/build_tools/azure/pylatest_conda_forge_mkl_linux-64_conda.lock +++ b/build_tools/azure/pylatest_conda_forge_mkl_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 7aa55d66dfbd0f6267a9aff8c750d1e9f42cd339726c8f9c4d1299341b064849 +# input_hash: 06a1abd91fe199d0e020e5ac38efba4bc3d4a7752e01cf91e4b046c5d0ba8a93 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,17 +9,17 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#562b26ba2e19059551a811e72ab7f793 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.0-hd590300_0.conda#71b89db63b5b504e7afc8ad901172e1e -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.21.0-hd590300_0.conda#c06fa0440048270817b9e3142cc661bf https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2#cddaf2c63ea4a5901cf09524c490ecdc @@ -35,7 +35,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 @@ -78,9 +78,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.55.1-h47da74e_0.conda#a802251d1eaeeae041c867faf0f94fa8 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-3.21.12-hfc55251_2.conda#e3a7d4ba09b8dc939b98fef55f539220 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b @@ -101,8 +101,8 @@ https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.32-he9a53bd_1.cond https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.54.3-hb20ce57_0.conda#7af7c59ab24db007dfd82e0a3a343f66 https://conda.anaconda.org/conda-forge/linux-64/libhiredis-1.0.2-h2cc385e_0.tar.bz2#b34907d3a81a3cd8095ee83d174c074a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.3-default_h554bfaf_1009.conda#f36ddc11ca46958197a45effdd286e45 @@ -110,7 +110,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.cond https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.18.1-h8fd135c_2.conda#bbf65f7688512872f063810623b755dc https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.4-h4dfa4b3_0.conda#c560d4ecf0d3536108aa4de0222942d3 +https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.5-h4dfa4b3_0.conda#799291c22ec87a0c86c0a4fc0e22b1c5 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/orc-1.9.0-h2f23424_1.conda#9571eb3eb0f7fe8b59956a7786babbcd @@ -133,14 +133,14 @@ https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#e https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_0.conda#5b4fe75a68cbb95350f47bb9a707b53b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.4.0-hca28451_0.conda#1158ac1d2613b28685644931f11ee807 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 @@ -167,8 +167,8 @@ https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.3-h28f7589_1.cond https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.9.3-hb447be9_1.conda#c520669eb0be9269a5f0d8ef62531882 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.0-py311h459d7ec_0.conda#f12f4d7361178f94df1052d6b63fd868 -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.1-py311h459d7ec_0.conda#6d9f40aa7c1fcd9cef01fddb5caed228 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_0.conda#43c633c015a361610ee4db2e95f8a517 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.12.0-hac9eb74_1.conda#0dee716254497604762957076ac76540 @@ -181,14 +181,14 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py311hb755f60_0.conda#02336abab4cb5dd794010ef53c54bd09 https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.14-hf3aad02_1.conda#a968ffa7e9fe0c257628033d393e512f https://conda.anaconda.org/conda-forge/linux-64/blas-1.0-mkl.tar.bz2#349aef876b1d8c9dccae01de20d5b385 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_mkl.tar.bz2#85f61af03fd291dae33150ffe89dc09a https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py311hb755f60_5.conda#e4d262cc3600e70b505a6761d29f6207 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.21.0-hb942446_5.conda#07d92ed5403ad7b5c66ffd7d5b8f7e57 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_mkl.tar.bz2#361bf757b95488de76c4f123805742d3 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_mkl.tar.bz2#a2f166748917d6d6e4707841ca1f519e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e @@ -197,8 +197,8 @@ https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py311h64a7726_0.con https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 https://conda.anaconda.org/conda-forge/linux-64/libarrow-12.0.1-hb87d912_8_cpu.conda#3f3b11398fe79b578e3c44dd00a44e4a -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.2-py311h320fe9a_0.conda#c36a53056129665b34db419b6af3d230 -https://conda.anaconda.org/conda-forge/linux-64/polars-0.19.12-py311hf926cbc_0.conda#a0ae30db02c59baae219ffa8f55d154a +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py311h320fe9a_0.conda#3ea3486e16d559dfcb539070ed330a1e +https://conda.anaconda.org/conda-forge/linux-64/polars-0.19.13-py311hf926cbc_0.conda#d0b679975a62d186f00425c175980e84 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py311hf0fb5b6_5.conda#ec7e45bc76d9d0b69a74a2075932b8e8 https://conda.anaconda.org/conda-forge/linux-64/pytorch-1.13.1-cpu_py311h410fd25_1.conda#ddd2fadddf89e3dc3d541a2537fce010 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py311h64a7726_1.conda#e4b4d3b764e2d029477d0db88248a8b5 diff --git a/build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock b/build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock index 9d62ddba73b30..a46091b214377 100644 --- a/build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock +++ b/build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 223cf367742008b437f38ff4642c0e70494f665cf9434d4da5c6483c757397fd +# input_hash: 66cbc7b263fbf4db3cc89cc53f522739390cbf324ab81cff43bff8bd3630c49d @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -9,17 +9,17 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/mkl-include-2023.2.0-h84fe81f_50496.conda#7af9fd0b2d7219f4a4200a34561340f6 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-4_cp312.conda#dccc2d142812964fcc6abdc97b672dff https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#562b26ba2e19059551a811e72ab7f793 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff @@ -30,7 +30,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 @@ -62,7 +62,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25c https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b @@ -79,14 +79,14 @@ https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b8 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 https://conda.anaconda.org/conda-forge/linux-64/libhiredis-1.0.2-h2cc385e_0.tar.bz2#b34907d3a81a3cd8095ee83d174c074a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.3-default_h554bfaf_1009.conda#f36ddc11ca46958197a45effdd286e45 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.4-h4dfa4b3_0.conda#c560d4ecf0d3536108aa4de0222942d3 +https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.5-h4dfa4b3_0.conda#799291c22ec87a0c86c0a4fc0e22b1c5 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.0-hab00c5b_0_cpython.conda#7f97faab5bebcc2580f4f299285323da @@ -105,13 +105,13 @@ https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#e https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.3-pyhd8ed1ab_0.conda#e6518222753f519e911e83136d2158d9 https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_0.conda#5b4fe75a68cbb95350f47bb9a707b53b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py312h8572e83_1.conda#c1e71f2bc05d8e8e033aefac2c490d05 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-h488ebb8_3.conda#128c25b7fe6a25286a48f3a6a9b5b6f3 @@ -134,8 +134,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.40-hd590300_0 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.0-py312h98912ed_0.conda#285a46f34e2b5b357e5999b1e699714f -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.1-py312h98912ed_0.conda#0d427102d9a93752d2f4e4516137c88c +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_0.conda#43c633c015a361610ee4db2e95f8a517 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_h7634d5b_3.conda#0922208521c0463e690bbaebba7eb551 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.conda#d8edd0e29db6fb6b6988e1a28d35d994 @@ -145,13 +145,13 @@ https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_ https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py312h30efb56_0.conda#32633871002ee9902f747d2236e0d122 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_mkl.conda#ec166f71f3d4c92ef1a714717b9b22eb https://conda.anaconda.org/conda-forge/linux-64/mkl-devel-2023.2.0-ha770c72_50496.conda#3b4c50e31ff098b18a450e4f5f860adf https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py312h30efb56_5.conda#8a2a122dc4fe14d8cff38f1cf426381f https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_mkl.conda#2468764de45bdcd1b2baf35a93312ca8 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-19_linux64_mkl.conda#aaa1703ee4c30735dbfeabc8287ce81e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e @@ -160,7 +160,7 @@ https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py312heda63a1_0.con https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h82b777d_17.conda#4f01e33dbb406085a16a2813ab067e95 https://conda.anaconda.org/conda-forge/linux-64/blas-devel-3.9.0-19_linux64_mkl.conda#7a04ef5f2294b05fcece16e4a1f04d7a https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py312h8572e83_0.conda#b6249daaaf4577e6f72d95fc4ab767c6 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.2-py312hfb8ada1_0.conda#53c435c54c12d902c6e0c7979711fac4 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py312hfb8ada1_0.conda#ef74af58f348d62a35c58e82aef5f868 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.9-py312h949fe66_5.conda#f6548a564e2d01b2a42020259503945b https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py312heda63a1_1.conda#c89108be4deb842ced096623aa932fd0 https://conda.anaconda.org/conda-forge/linux-64/blas-2.119-mkl.conda#923c56d369fe68c2da0d554d3e5edc2c diff --git a/build_tools/azure/pylatest_conda_forge_mkl_osx-64_conda.lock b/build_tools/azure/pylatest_conda_forge_mkl_osx-64_conda.lock index 16120296b7f6f..9755ba3479ed4 100644 --- a/build_tools/azure/pylatest_conda_forge_mkl_osx-64_conda.lock +++ b/build_tools/azure/pylatest_conda_forge_mkl_osx-64_conda.lock @@ -1,8 +1,8 @@ # Generated by conda-lock. # platform: osx-64 -# input_hash: 02abef27514db5e5119c3cdc253e84a06374c1b308495298b46bdb14dcc52ae9 +# input_hash: 1c061d421872c406aaefcd63aa475f5decae7806dd07d710dc5d742da72de61a @EXPLICIT -https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h0d85af4_4.tar.bz2#37edc4e6304ca87316e160f5ca0bd1b5 +https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda#6097a6ca9ada32699b5fc4312dd6ef18 https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2023.7.22-h8857fd0_0.conda#bf2c54c18997bf3542af074c10191771 https://conda.anaconda.org/conda-forge/osx-64/icu-73.2-hf5e326d_0.conda#5cc301d759ec03f28328428e28f65591 https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.1.0-h0dc2134_1.conda#9e6c31441c9aa24e41ace40d6151aab6 @@ -15,7 +15,7 @@ https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.17-hac89ed1_0.tar.bz2#6 https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.0.0-h0dc2134_1.conda#72507f8e3961bc968af17435060b6dd6 https://conda.anaconda.org/conda-forge/osx-64/libwebp-base-1.3.2-h0dc2134_0.conda#4e7e9d244e87d66c18d36894fd6a8ae5 https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.2.13-h8a1eda9_5.conda#4a3ad23f6e16f99c04e166767193d700 -https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-17.0.4-hb6ac08f_0.conda#31391b68245bc68504169e98ffaf2c44 +https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-17.0.5-hb6ac08f_0.conda#8ca3784280b7cb54163a46e8a918fb43 https://conda.anaconda.org/conda-forge/osx-64/mkl-include-2023.2.0-h6bab518_50500.conda#835abb8ded5e26f23ea6996259c7972e https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2#addd19059de62181cd11ae8f4ef26084 https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda#87201ac4314b911b74197e588cca3639 @@ -24,7 +24,7 @@ https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda#9566b4c29274125b0266d0177b5eb97b https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2#86ac76d6bf1cbb9621943eb3bd9ae36e https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2#a72f9d4ea13d55d745ff1ed594747f10 -https://conda.anaconda.org/conda-forge/osx-64/gmp-6.2.1-h2e338ed_0.tar.bz2#dedc96914428dae572a39e69ee2a392f +https://conda.anaconda.org/conda-forge/osx-64/gmp-6.3.0-h93d8f39_0.conda#a4ffd4bfd88659cbecbd36b61594bf0d https://conda.anaconda.org/conda-forge/osx-64/isl-0.25-hb486fe8_0.tar.bz2#45a9a46c78c0ea5c275b535f7923bde3 https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hb486fe8_0.tar.bz2#f9d6a4c82889d5ecedec1d90eb673c55 https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.1.0-h0dc2134_1.conda#9ee0bab91b2ca579e10353738be36063 @@ -96,7 +96,7 @@ https://conda.anaconda.org/conda-forge/osx-64/cctools-973.0.1-hd9ad811_15.conda# https://conda.anaconda.org/conda-forge/osx-64/clang-15.0.7-h694c41f_3.conda#8a48d466e519b8db7dda7c5d27cc1d31 https://conda.anaconda.org/conda-forge/osx-64/contourpy-1.2.0-py312hbf0bb39_0.conda#74190e06053cda7139a0cb71f3e618fd https://conda.anaconda.org/conda-forge/osx-64/coverage-7.3.2-py312h104f124_0.conda#1e98139a6dc6e29569dff47a1895a40c -https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.44.0-py312h41838bb_0.conda#1e62e0d32f5e39d36d328f28738905e9 +https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.44.1-py312h41838bb_0.conda#2ad934add81c345cf93ffec5bad918b5 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 @@ -104,7 +104,7 @@ https://conda.anaconda.org/conda-forge/osx-64/scipy-1.11.3-py312h2c2f0bb_1.conda https://conda.anaconda.org/conda-forge/osx-64/blas-2.119-mkl.conda#eabf5c1e4f1e9e2eee06e533618a09b0 https://conda.anaconda.org/conda-forge/osx-64/clangxx-15.0.7-default_hdb78580_3.conda#58df9ff86fefc7684670be729b41412f https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.8.1-py312h1fe5000_0.conda#f384d9472c380eea6a14ab5dc6cb77c4 -https://conda.anaconda.org/conda-forge/osx-64/pandas-2.1.2-py312haf8ecfc_0.conda#33f2969aade3bd3b4547213076407ca2 +https://conda.anaconda.org/conda-forge/osx-64/pandas-2.1.3-py312haf8ecfc_0.conda#d96a4b2b3dc4ae11f7fc8b736a12c3fb https://conda.anaconda.org/conda-forge/osx-64/pyamg-5.0.1-py312h674694f_1.conda#e5b9c0f8b5c367467425ff34353ef761 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 diff --git a/build_tools/azure/pylatest_conda_mkl_no_openmp_osx-64_conda.lock b/build_tools/azure/pylatest_conda_mkl_no_openmp_osx-64_conda.lock index ad4a5b30ba6ef..b6f40225a2556 100644 --- a/build_tools/azure/pylatest_conda_mkl_no_openmp_osx-64_conda.lock +++ b/build_tools/azure/pylatest_conda_mkl_no_openmp_osx-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: osx-64 -# input_hash: 03f7604aefb9752d2367c457bdf4e4923158be96db35ac0dd1d5dc60a9981cd1 +# input_hash: c8fdd08f1a9a3d91ec09f211e4444ef33921a111f684fa63428591be5ca1eb68 @EXPLICIT https://repo.anaconda.com/pkgs/main/osx-64/blas-1.0-mkl.conda#cb2c87e85ac8e0ceae776d26d4214c8a https://repo.anaconda.com/pkgs/main/osx-64/bzip2-1.0.8-h1de35cc_0.conda#19fcb113b170fe2a0be96b47801fed7d @@ -18,7 +18,7 @@ https://repo.anaconda.com/pkgs/main/noarch/tzdata-2023c-h04d1e81_0.conda#29db02a https://repo.anaconda.com/pkgs/main/osx-64/xz-5.4.2-h6c40b1e_0.conda#5e546d3c9765b4441e511804d58f6e3f https://repo.anaconda.com/pkgs/main/osx-64/zlib-1.2.13-h4dc903c_0.conda#d0202dd912bfb45d3422786531717882 https://repo.anaconda.com/pkgs/main/osx-64/ccache-3.7.9-hf120daa_0.conda#a01515a32e721c51d631283f991bc8ea -https://repo.anaconda.com/pkgs/main/osx-64/intel-openmp-2023.1.0-ha357a0b_43547.conda#aa6031369dd8c8cc6b2f393a0b2d9f0c +https://repo.anaconda.com/pkgs/main/osx-64/intel-openmp-2023.1.0-ha357a0b_43548.conda#ba8a89ffe593eb88e4c01334753c40c3 https://repo.anaconda.com/pkgs/main/osx-64/lerc-3.0-he9d5cce_0.conda#aec2c3dbef836849c9260f05be04f3db https://repo.anaconda.com/pkgs/main/osx-64/libbrotlidec-1.0.9-hca72f7f_7.conda#b85983951745cc666d9a1b42894210b2 https://repo.anaconda.com/pkgs/main/osx-64/libbrotlienc-1.0.9-hca72f7f_7.conda#e306d7a1599202a7c95762443f110832 @@ -32,7 +32,7 @@ https://repo.anaconda.com/pkgs/main/osx-64/tk-8.6.12-h5d9f67b_0.conda#047f0af548 https://repo.anaconda.com/pkgs/main/osx-64/brotli-bin-1.0.9-hca72f7f_7.conda#110bdca1a20710820e61f7fa3047f737 https://repo.anaconda.com/pkgs/main/osx-64/freetype-2.12.1-hd8bbffd_0.conda#1f276af321375ee7fe8056843044fa76 https://repo.anaconda.com/pkgs/main/osx-64/libgfortran-5.0.0-11_3_0_hecd8cb5_28.conda#2eb13b680803f1064e53873ae0aaafb3 -https://repo.anaconda.com/pkgs/main/osx-64/mkl-2023.1.0-h8e150cf_43559.conda#f5a09d45a003f817d5c43935e20ca0c8 +https://repo.anaconda.com/pkgs/main/osx-64/mkl-2023.1.0-h8e150cf_43560.conda#85d0f3431dd5c6ae44f8725fdd3d3e59 https://repo.anaconda.com/pkgs/main/osx-64/sqlite-3.41.2-h6c40b1e_0.conda#6947a501943529c7536b7e4ba53802c1 https://repo.anaconda.com/pkgs/main/osx-64/zstd-1.5.5-hc035e20_0.conda#5e0b7ddb1b7dc6b630e1f9a03499c19c https://repo.anaconda.com/pkgs/main/osx-64/brotli-1.0.9-hca72f7f_7.conda#68e54d12ec67591deb2ffd70348fb00f diff --git a/build_tools/azure/pylatest_pip_openblas_pandas_linux-64_conda.lock b/build_tools/azure/pylatest_pip_openblas_pandas_linux-64_conda.lock index 28afddfcd8897..5e7b1f0e64461 100644 --- a/build_tools/azure/pylatest_pip_openblas_pandas_linux-64_conda.lock +++ b/build_tools/azure/pylatest_pip_openblas_pandas_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: d01d23bd27bcd50d2b3643492f966c8e390822d72b69f31bf66c2fe98a265a4c +# input_hash: 51f374bd6034467b82c190398f401712163436d283f9536c2e5a1d07e9f7b1e2 @EXPLICIT https://repo.anaconda.com/pkgs/main/linux-64/_libgcc_mutex-0.1-main.conda#c3473ff8bdb3d124ed5ff11ec380d6f9 https://repo.anaconda.com/pkgs/main/linux-64/ca-certificates-2023.08.22-h06a4308_0.conda#243d5065a09a3e85ab888c05f5b6445a @@ -32,7 +32,7 @@ https://repo.anaconda.com/pkgs/main/linux-64/pip-23.3-py39h06a4308_0.conda#25664 # pip docutils @ https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl#sha256=96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 # pip exceptiongroup @ https://files.pythonhosted.org/packages/ad/83/b71e58666f156a39fb29417e4c8ca4bc7400c0dd4ed9e8842ab54dc8c344/exceptiongroup-1.1.3-py3-none-any.whl#sha256=343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3 # pip execnet @ https://files.pythonhosted.org/packages/e8/9c/a079946da30fac4924d92dbc617e5367d454954494cf1e71567bcc4e00ee/execnet-2.0.2-py3-none-any.whl#sha256=88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41 -# pip fonttools @ https://files.pythonhosted.org/packages/19/5e/6bc27a56dacc884a4a9af2f2ee6b8df6cd7ac34d15b930ba410a9c47a329/fonttools-4.44.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=46c79af80a835410874683b5779b6c1ec1d5a285e11c45b5193e79dd691eb111 +# pip fonttools @ https://files.pythonhosted.org/packages/0c/f8/0af505fec8344c0f7d5bb58ef7279baa6cdee1f07569da4bf60932380bce/fonttools-4.44.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=caf014bcc24673b681e7f768180f063691b301e2eccd9a53c43b5eebfb448bd8 # pip idna @ https://files.pythonhosted.org/packages/fc/34/3030de6f1370931b9dbb4dad48f6ab1015ab1d32447850b9fc94e60097be/idna-3.4-py3-none-any.whl#sha256=90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # pip imagesize @ https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl#sha256=0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b # pip iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 @@ -41,9 +41,9 @@ https://repo.anaconda.com/pkgs/main/linux-64/pip-23.3-py39h06a4308_0.conda#25664 # pip lazy-loader @ https://files.pythonhosted.org/packages/a1/c3/65b3814e155836acacf720e5be3b5757130346670ac454fee29d3eda1381/lazy_loader-0.3-py3-none-any.whl#sha256=1e9e76ee8631e264c62ce10006718e80b2cfc74340d17d1031e0f84af7478554 # pip markupsafe @ https://files.pythonhosted.org/packages/de/63/cb7e71984e9159ec5f45b5e81e896c8bdd0e45fe3fc6ce02ab497f0d790e/MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e # pip networkx @ https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl#sha256=f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2 -# pip numpy @ https://files.pythonhosted.org/packages/89/ac/53100546dcd9aa400a73c7770b13cad9a3b18bf83433499e36b5efe9850f/numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908 +# pip numpy @ https://files.pythonhosted.org/packages/2f/75/f007cc0e6a373207818bef17f463d3305e9dd380a70db0e523e7660bf21f/numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818 # pip packaging @ https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl#sha256=8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 -# pip pillow @ https://files.pythonhosted.org/packages/9f/3a/ada56d489446dbb7679d242bfd7bb159cee8a7989c34dd34045103d5280d/Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219 +# pip pillow @ https://files.pythonhosted.org/packages/5c/dc/acccca38a87272cb2eed372f112595439418dfb6119770b04dc06d3b78bd/Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl#sha256=b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd # pip pluggy @ https://files.pythonhosted.org/packages/05/b8/42ed91898d4784546c5f06c60506400548db3f7a4b3fb441cba4e5c17952/pluggy-1.3.0-py3-none-any.whl#sha256=d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 # pip py @ https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl#sha256=607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 # pip pygments @ https://files.pythonhosted.org/packages/43/88/29adf0b44ba6ac85045e63734ae0997d3c58d8b1a91c914d240828d0d73d/Pygments-2.16.1-py3-none-any.whl#sha256=13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 @@ -56,22 +56,22 @@ https://repo.anaconda.com/pkgs/main/linux-64/pip-23.3-py39h06a4308_0.conda#25664 # pip threadpoolctl @ https://files.pythonhosted.org/packages/81/12/fd4dea011af9d69e1cad05c75f3f7202cdcbeac9b712eea58ca779a72865/threadpoolctl-3.2.0-py3-none-any.whl#sha256=2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032 # pip tomli @ https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl#sha256=939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc # pip tzdata @ https://files.pythonhosted.org/packages/d5/fb/a79efcab32b8a1f1ddca7f35109a50e4a80d42ac1c9187ab46522b2407d7/tzdata-2023.3-py2.py3-none-any.whl#sha256=7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda -# pip urllib3 @ https://files.pythonhosted.org/packages/d2/b2/b157855192a68541a91ba7b2bbcb91f1b4faa51f8bae38d8005c034be524/urllib3-2.0.7-py3-none-any.whl#sha256=fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +# pip urllib3 @ https://files.pythonhosted.org/packages/96/94/c31f58c7a7f470d5665935262ebd7455c7e4c7782eb525658d3dbf4b9403/urllib3-2.1.0-py3-none-any.whl#sha256=55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 # pip zipp @ https://files.pythonhosted.org/packages/d9/66/48866fc6b158c81cc2bfecc04c480f105c6040e8b077bc54c634b4a67926/zipp-3.17.0-py3-none-any.whl#sha256=0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 # pip contourpy @ https://files.pythonhosted.org/packages/a9/ba/d8fd1380876f1e9114157606302e3644c85f6d116aeba354c212ee13edc7/contourpy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=11f8d2554e52f459918f7b8e6aa20ec2a3bce35ce95c1f0ef4ba36fbda306df5 # pip coverage @ https://files.pythonhosted.org/packages/f1/e7/6d778d717d178c8c73103e2c467f3c8d8ebc9cacb825ebe3f3cf05e7c6df/coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9 # pip imageio @ https://files.pythonhosted.org/packages/f6/37/e21e6f38b93878ba80302e95b8ccd4718d80f0c53055ccae343e606b1e2d/imageio-2.31.5-py3-none-any.whl#sha256=97f68e12ba676f2f4b541684ed81f7f3370dc347e8321bc68ee34d37b2dbac9f # pip importlib-metadata @ https://files.pythonhosted.org/packages/cc/37/db7ba97e676af155f5fcb1a35466f446eadc9104e25b83366e8088c9c926/importlib_metadata-6.8.0-py3-none-any.whl#sha256=3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb -# pip importlib-resources @ https://files.pythonhosted.org/packages/65/6e/09d8816b5cb7a4006ef8ad1717a2703ad9f331dae9717d9f22488a2d6469/importlib_resources-6.1.0-py3-none-any.whl#sha256=aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83 +# pip importlib-resources @ https://files.pythonhosted.org/packages/93/e8/facde510585869b5ec694e8e0363ffe4eba067cb357a8398a55f6a1f8023/importlib_resources-6.1.1-py3-none-any.whl#sha256=e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6 # pip jinja2 @ https://files.pythonhosted.org/packages/bc/c3/f068337a370801f372f2f8f6bad74a5c140f6fda3d9de154052708dd3c65/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # pip pytest @ https://files.pythonhosted.org/packages/f3/8c/f16efd81ca8e293b2cc78f111190a79ee539d0d5d36ccd49975cb3beac60/pytest-7.4.3-py3-none-any.whl#sha256=0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac # pip python-dateutil @ https://files.pythonhosted.org/packages/36/7a/87837f39d0296e723bb9b62bbb257d0355c7f6128853c78955f57342a56d/python_dateutil-2.8.2-py2.py3-none-any.whl#sha256=961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # pip requests @ https://files.pythonhosted.org/packages/70/8e/0e2d847013cb52cd35b38c009bb167a1a26b2ce6cd6965bf26b47bc0bf44/requests-2.31.0-py3-none-any.whl#sha256=58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f # pip scipy @ https://files.pythonhosted.org/packages/88/8c/9d1f74196c296046af1f20e6d3fc7fbb27387282315e1643f450bba14329/scipy-1.11.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=c77da50c9a91e23beb63c2a711ef9e9ca9a2060442757dffee34ea41847d8156 # pip tifffile @ https://files.pythonhosted.org/packages/f5/72/68ea763b5f3e3d9871492683059ed4724fd700dbe54aa03cdda7a9692129/tifffile-2023.9.26-py3-none-any.whl#sha256=1de47fa945fddaade256e25ad4f375ae65547f3c1354063aded881c32a64cf89 -# pip lightgbm @ https://files.pythonhosted.org/packages/98/a9/01f50aee85949ba713733b69a3f0b42d39719a414a0e29bdf2a9f05ecc53/lightgbm-4.1.0.tar.gz#sha256=bee59dd269a93b093f2c610d4a6683a7ea87c63d3ea35c622123ce2c020b2abc +# pip lightgbm @ https://files.pythonhosted.org/packages/b8/9d/1ce80cee7c5ef60f2fcc7e9fa97f29f7a8de3dc5a08922b3b2f1e9106481/lightgbm-4.1.0-py3-none-manylinux_2_28_x86_64.whl#sha256=47578cff4bc8116b62adc02437bf2b49dcc7ad4e8e3dd8dad3fe88e694d74d93 # pip matplotlib @ https://files.pythonhosted.org/packages/c6/31/dfb69305bddf68428e92381093aa7d5f2658a069ccb4c77e9b45b92e124e/matplotlib-3.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=c54c55457c7f5ea4dfdba0020004fc7667f5c10c8d9b8010d735345acc06c9b8 -# pip pandas @ https://files.pythonhosted.org/packages/e0/4a/3356fb787b67d2adebc91a6a8b134826248790f0cf947fe2e2da20babe86/pandas-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=25c9976c17311388fcd953cb3d0697999b2205333f4e11e669d90ff8d830d429 +# pip pandas @ https://files.pythonhosted.org/packages/4e/7b/6c251522fd21ad2a51f26df677582ed917650cb8dff286e17625e7a6531b/pandas-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=1329dbe93a880a3d7893149979caa82d6ba64a25e471682637f846d9dbc10dd2 # pip pyamg @ https://files.pythonhosted.org/packages/35/1c/8b2aa6fbb2bae258ab6cdb35b09635bf50865ac2bcdaf220db3d972cc0d8/pyamg-5.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=1332acec6d5ede9440c8ced0ef20952f5b766387116f254b79880ce29fdecee7 # pip pytest-cov @ https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl#sha256=6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a # pip pytest-forked @ https://files.pythonhosted.org/packages/f4/af/9c0bda43e486a3c9bf1e0f876d0f241bc3f229d7d65d09331a0868db9629/pytest_forked-1.6.0-py3-none-any.whl#sha256=810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0 diff --git a/build_tools/azure/pylatest_pip_scipy_dev_linux-64_conda.lock b/build_tools/azure/pylatest_pip_scipy_dev_linux-64_conda.lock index b51bf0167a264..bd442611aa19d 100644 --- a/build_tools/azure/pylatest_pip_scipy_dev_linux-64_conda.lock +++ b/build_tools/azure/pylatest_pip_scipy_dev_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 28ec764eefc982520846833c9ea571cf6ea5a0593dee76d7a7560b34e341e35b +# input_hash: fd04fc403b53efa56f36da3adef140e1911a2ebfdc4c4bc856cb169c729fae87 @EXPLICIT https://repo.anaconda.com/pkgs/main/linux-64/_libgcc_mutex-0.1-main.conda#c3473ff8bdb3d124ed5ff11ec380d6f9 https://repo.anaconda.com/pkgs/main/linux-64/ca-certificates-2023.08.22-h06a4308_0.conda#243d5065a09a3e85ab888c05f5b6445a @@ -37,7 +37,7 @@ https://repo.anaconda.com/pkgs/main/linux-64/pip-23.3-py311h06a4308_0.conda#36ce # pip iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl#sha256=b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # pip markupsafe @ https://files.pythonhosted.org/packages/fe/21/2eff1de472ca6c99ec3993eab11308787b9879af9ca8bbceb4868cf4f2ca/MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 # pip packaging @ https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl#sha256=8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 -# pip platformdirs @ https://files.pythonhosted.org/packages/56/29/3ec311dc18804409ecf0d2b09caa976f3ae6215559306b5b530004e11156/platformdirs-3.11.0-py3-none-any.whl#sha256=e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +# pip platformdirs @ https://files.pythonhosted.org/packages/31/16/70be3b725073035aa5fc3229321d06e22e73e3e09f6af78dcfdf16c7636c/platformdirs-4.0.0-py3-none-any.whl#sha256=118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b # pip pluggy @ https://files.pythonhosted.org/packages/05/b8/42ed91898d4784546c5f06c60506400548db3f7a4b3fb441cba4e5c17952/pluggy-1.3.0-py3-none-any.whl#sha256=d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 # pip py @ https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl#sha256=607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 # pip pygments @ https://files.pythonhosted.org/packages/43/88/29adf0b44ba6ac85045e63734ae0997d3c58d8b1a91c914d240828d0d73d/Pygments-2.16.1-py3-none-any.whl#sha256=13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 @@ -46,7 +46,7 @@ https://repo.anaconda.com/pkgs/main/linux-64/pip-23.3-py311h06a4308_0.conda#36ce # pip sphinxcontrib-jsmath @ https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl#sha256=2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 # pip tabulate @ https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl#sha256=024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f # pip threadpoolctl @ https://files.pythonhosted.org/packages/81/12/fd4dea011af9d69e1cad05c75f3f7202cdcbeac9b712eea58ca779a72865/threadpoolctl-3.2.0-py3-none-any.whl#sha256=2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032 -# pip urllib3 @ https://files.pythonhosted.org/packages/d2/b2/b157855192a68541a91ba7b2bbcb91f1b4faa51f8bae38d8005c034be524/urllib3-2.0.7-py3-none-any.whl#sha256=fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +# pip urllib3 @ https://files.pythonhosted.org/packages/96/94/c31f58c7a7f470d5665935262ebd7455c7e4c7782eb525658d3dbf4b9403/urllib3-2.1.0-py3-none-any.whl#sha256=55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 # pip jinja2 @ https://files.pythonhosted.org/packages/bc/c3/f068337a370801f372f2f8f6bad74a5c140f6fda3d9de154052708dd3c65/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # pip pytest @ https://files.pythonhosted.org/packages/f3/8c/f16efd81ca8e293b2cc78f111190a79ee539d0d5d36ccd49975cb3beac60/pytest-7.4.3-py3-none-any.whl#sha256=0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac # pip python-dateutil @ https://files.pythonhosted.org/packages/36/7a/87837f39d0296e723bb9b62bbb257d0355c7f6128853c78955f57342a56d/python_dateutil-2.8.2-py2.py3-none-any.whl#sha256=961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 diff --git a/build_tools/azure/pypy3_linux-64_conda.lock b/build_tools/azure/pypy3_linux-64_conda.lock index 1a6ed721b9a4e..1623310c84f47 100644 --- a/build_tools/azure/pypy3_linux-64_conda.lock +++ b/build_tools/azure/pypy3_linux-64_conda.lock @@ -1,21 +1,21 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 296e0e62aa19cfbc6aa6d615c86db2d06be56b4b5f76bf148152aff936fcddf5 +# input_hash: 0cadd6b75cd1961c429451dc50b538478233272c1b3b10a1e1c9d9542071260b @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_pypy39_pp73.conda#c1b2f29111681a4036ed21eaa3f44620 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#562b26ba2e19059551a811e72ab7f793 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad @@ -31,7 +31,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c @@ -45,7 +45,7 @@ https://conda.anaconda.org/conda-forge/linux-64/gdbm-1.18-h0a1914f_2.tar.bz2#b77 https://conda.anaconda.org/conda-forge/linux-64/libhiredis-1.0.2-h2cc385e_0.tar.bz2#b34907d3a81a3cd8095ee83d174c074a https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.4-h4dfa4b3_0.conda#c560d4ecf0d3536108aa4de0222942d3 +https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.5-h4dfa4b3_0.conda#799291c22ec87a0c86c0a4fc0e22b1c5 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.0-h2c6b66d_0.conda#df56c636df4a98990462d66ac7be2330 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f @@ -84,14 +84,14 @@ https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hf860d4a https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/linux-64/blas-devel-3.9.0-19_linux64_openblas.conda#96bca12f1b7c48298dd1abf3e11121af https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39ha90811c_0.conda#f3b2afc64bf0cbe901a9b00d44611c61 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.0-py39hf860d4a_0.conda#1904d7a02a3c4e554dc9f86e4ce7db09 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.1-py39hf860d4a_0.conda#f13dff06d70692e3b80bf629499fbf07 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.3-py39h6dedee3_1.conda#595610a3cd404ad02ce81308b6b344ba https://conda.anaconda.org/conda-forge/linux-64/blas-2.119-openblas.conda#f536a14a54da8b2aedd5a967d1e407c9 -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/linux-64/pyamg-5.0.1-py39h5fd064f_1.conda#e364cfb3ffb590ccef24b5a92389e751 https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.1-py39h4e7d633_0.conda#7acc65059ae7d9f74bc567bc495f036f diff --git a/build_tools/circle/doc_linux-64_conda.lock b/build_tools/circle/doc_linux-64_conda.lock index 27d1aaf0448bf..99e6a4704bc66 100644 --- a/build_tools/circle/doc_linux-64_conda.lock +++ b/build_tools/circle/doc_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 74e9e451b651d0b84d1c066a106b93d1a0f711e6aa6c5a48d2169af2e01f4d90 +# input_hash: ce7f8616ac61bef14e3b70bfe4a5c72f05d5a44811f499dd0b26c570dc74ac9f @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -10,24 +10,24 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77 https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_16.conda#7ca122655873935e02c91279c5b03c8c https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-devel_linux-64-12.3.0-h8bca6fd_2.conda#ed613582de7b8569fdc53ca141be176a -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-devel_linux-64-12.3.0-h8bca6fd_2.conda#7268a17e56eb099d1b8869bbbf46de4c -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-12.3.0-h8bca6fd_103.conda#1d7f6d1825bd6bf21ee04336ec87a777 +https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-12.3.0-h8bca6fd_103.conda#3f784d2c059e960156d1ab3858cbf200 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-4_cp39.conda#bfe4b3259a8ac6cdf0037752904da6a7 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_16.conda#071ea8dceff4d30ac511f4a2f8437cd1 https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-hf600244_0.conda#33084421a8c0af6aef1b439707f7662a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/binutils-2.40-hdd6e379_0.conda#ccc940fddbc3fcd3d79cd4c654c4b5c4 https://conda.anaconda.org/conda-forge/linux-64/binutils_linux-64-2.40-hbdbef99_2.conda#adfebae9fdc63a598495dfe3b006973a https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#562b26ba2e19059551a811e72ab7f793 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 -https://conda.anaconda.org/conda-forge/linux-64/aom-3.6.1-h59595ed_0.conda#8457db6d1175ee86c8e077f6ac60ff55 +https://conda.anaconda.org/conda-forge/linux-64/aom-3.7.0-h59595ed_0.conda#fe83288cf960ca82e60d5b5472809917 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.2-h59595ed_0.conda#4336bd67920dd504cd8c6761d6a99645 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 @@ -43,13 +43,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_2.conda#78fdab09d9138851dde2b5fe2a11019e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-12.3.0-h0f45ef3_2.conda#4655db64eca78a6fcc4fb654fc1f8d57 +https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-12.3.0-h0f45ef3_3.conda#eda05ab0db8f8490945fd99244183e3a https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_0.conda#30de3fd9b3b602f7473f30e684eeea8c https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad @@ -76,15 +76,15 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.0-h59595ed_4.conda#9cfbafab420f42b572f3c032ad59da85 https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.0.7-h0b41bf4_0.conda#49e8329110001f04923fe7e864990b0c https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-12.3.0-he2b93b0_2.conda#2f4d8677dc7dd87f93e9abfb2ce86808 -https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.0.1-h87da1f6_2.conda#0281e5f0887a512d7cc2a843173ca243 +https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-12.3.0-he2b93b0_3.conda#71c68ea75afe6ac7a9c62c08f5d67a5a +https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.0.1-hed45d22_3.conda#dac2d54e40baebee47eb4a0f8911a264 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda#25cb5999faa414e5ccb2c1388f62d3d5 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_2.conda#e75a75a6eaf6f318dae2631158c46575 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.47-h71f35ed_0.conda#c2097d0b46367996f09b4e8e4920384a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.0-h2797004_0.conda#b58e6816d137f3aabf77d341dd5d732b @@ -100,20 +100,20 @@ https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba -https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.11.1-hb4ffafa_0.conda#c898b02b6ed79db692e6a85f7bdba176 +https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.11.2-hb4ffafa_0.conda#aa776e4716e54633d1279cf7599c3711 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/gcc-12.3.0-h8d2909c_2.conda#e2f2f81f367e14ca1f77a870bda2fe59 https://conda.anaconda.org/conda-forge/linux-64/gcc_linux-64-12.3.0-h76fc315_2.conda#11517e7b5c910c5b5d6985c0c7eb7f50 -https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-12.3.0-hfcedea8_2.conda#09d48cadff6669068c3bf7ae7dc8ea4a -https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-12.3.0-he2b93b0_2.conda#f89b9916afc36fc5562fbfc11330a8a2 +https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-12.3.0-hfcedea8_3.conda#929fbb7d28a3727e96170e613253d2f4 +https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-12.3.0-he2b93b0_3.conda#b6ce9868fc6c65a18c22fd983e2d7e6f https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda#6e4ef6ca28655124dcde9bd500e44c32 https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.4-h4dfa4b3_0.conda#c560d4ecf0d3536108aa4de0222942d3 +https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.5-h4dfa4b3_0.conda#799291c22ec87a0c86c0a4fc0e22b1c5 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda#3ede353bc605068d9677e700b1847382 @@ -138,7 +138,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/gfortran-12.3.0-h499e0f7_2.conda#0558a8c44eb7a18e6682bd3a8ae6dcab https://conda.anaconda.org/conda-forge/linux-64/gfortran_linux-64-12.3.0-h7fe76b4_2.conda#3a749210487c0358b6f135a648cbbf60 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_0.conda#5b4fe75a68cbb95350f47bb9a707b53b https://conda.anaconda.org/conda-forge/linux-64/gxx-12.3.0-h8d2909c_2.conda#673bac341be6b90ef9e8abae7e52ca46 https://conda.anaconda.org/conda-forge/linux-64/gxx_linux-64-12.3.0-h8a814eb_2.conda#f517b1525e9783849bd56a5dc45a9960 https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed @@ -150,7 +150,7 @@ https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e966 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-19_linux64_openblas.conda#420f4e9be59d0dc9133a0f43f7bab3f3 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py39hd1e30aa_1.conda#ee2b4665b852ec6ff2758f3c1b91233d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 @@ -188,11 +188,11 @@ https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3c https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-h9c3ff4c_0.tar.bz2#c1ac6229d0bfd14f8354ff9ad2a26cad https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/cxx-compiler-1.6.0-h00ab1b0_0.conda#364c6ae36c4e36fcbd4d273cf4db78af -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.0-py39hd1e30aa_0.conda#bbfd1e3ef6f66b07e025fe872e854b0d +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.44.1-py39hd1e30aa_0.conda#1b23514fcf79468b2fdf1cc9a083ce0a https://conda.anaconda.org/conda-forge/linux-64/fortran-compiler-1.6.0-heb67821_0.conda#b65c49dda97ae497abcbdf3a8ba0018f -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_0.conda#43c633c015a361610ee4db2e95f8a517 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-19_linux64_openblas.conda#d12374af44575413fbbd4a217d46ea33 @@ -202,29 +202,28 @@ https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.6.0-h5d7e998_0.co https://conda.anaconda.org/conda-forge/noarch/memory_profiler-0.61.0-pyhd8ed1ab_0.tar.bz2#8b45f9f2b2f7a98b0ec179c8991a4a9b https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda#eeaa413fddccecb2ab7f747bdb55b07f https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/noarch/plotly-5.18.0-pyhd8ed1ab_0.conda#9f6a8664f1fe752f79473eeb9bf33a60 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py39h3d6467e_0.conda#e667a3ab0df62c54e60e1843d2e6defb -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda#270e71c14d37074b1d066ee21cf0c4a6 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a https://conda.anaconda.org/conda-forge/linux-64/compilers-1.6.0-ha770c72_0.conda#e2259de4640a51a28c21931ae98e4975 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/linux-64/liblapacke-3.9.0-19_linux64_openblas.conda#685e99d3214f5ac9d1ec6b37983985a6 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.0-py39h474f0d3_0.conda#62f1d2e05327bf62728afa448f2a9261 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py39h3d6467e_5.conda#93aff412f3e49fdb43361c0215cbd72d https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/linux-64/blas-devel-3.9.0-19_linux64_openblas.conda#96bca12f1b7c48298dd1abf3e11121af https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py39h7633fee_0.conda#ed71ad3e30eb03da363fb797419cce98 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2023.9.18-py39hf9b8f0e_2.conda#38f576a701ea508ed210087c711a06ee https://conda.anaconda.org/conda-forge/noarch/imageio-2.31.5-pyh8c1a49c_0.conda#6820ccf6a3a27df348f18c85dd89014a -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.2-py39hddac248_0.conda#e21e23161a1627475021844a887ecd4f +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.3-py39hddac248_0.conda#961b398d8c421a3752e26f01f2dcbdac https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.0-pyhd8ed1ab_0.conda#134b2b57b7865d2316a7cce1915a51ed https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-2.5.0-pyhd8ed1ab_0.tar.bz2#1fdd1f3baccf0deb647385c677a1a48e https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h44dd56e_1.conda#d037c20e3da2e85f03ebd20ad480c359 @@ -255,7 +254,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxext-opengraph-0.9.0-pyhd8ed1 # pip attrs @ https://files.pythonhosted.org/packages/f0/eb/fcb708c7bf5056045e9e98f62b93bd7467eb718b0202e7698eb11d66416c/attrs-23.1.0-py3-none-any.whl#sha256=1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 # pip cloudpickle @ https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl#sha256=246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7 # pip defusedxml @ https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl#sha256=a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 -# pip fastjsonschema @ https://files.pythonhosted.org/packages/7f/1a/8aad366cf1779351741e5c791ae76dc8b293f72e9448c689cc2e730f06cb/fastjsonschema-2.18.1-py3-none-any.whl#sha256=aec6a19e9f66e9810ab371cc913ad5f4e9e479b63a7072a2cd060a9369e329a8 +# pip fastjsonschema @ https://files.pythonhosted.org/packages/63/e9/d3dca06ea6b8e58e65716973bc7d9bee9bc39ce233595aa04d04e89a1089/fastjsonschema-2.19.0-py3-none-any.whl#sha256=b9fd1a2dd6971dbc7fee280a95bd199ae0dd9ce22beb91cc75e9c1c528a5170e # pip fqdn @ https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl#sha256=3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014 # pip json5 @ https://files.pythonhosted.org/packages/70/ba/fa37123a86ae8287d6678535a944f9c3377d8165e536310ed6f6cb0f0c0e/json5-0.9.14-py2.py3-none-any.whl#sha256=740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f # pip jsonpointer @ https://files.pythonhosted.org/packages/12/f6/0232cc0c617e195f06f810534d00b74d2f348fe71b2118009ad8ad31f878/jsonpointer-2.4-py2.py3-none-any.whl#sha256=15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a @@ -287,24 +286,24 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxext-opengraph-0.9.0-pyhd8ed1 # pip cffi @ https://files.pythonhosted.org/packages/ea/ac/e9e77bc385729035143e54cc8c4785bd480eaca9df17565963556b0b7a93/cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 # pip doit @ https://files.pythonhosted.org/packages/44/83/a2960d2c975836daa629a73995134fd86520c101412578c57da3d2aa71ee/doit-0.36.0-py3-none-any.whl#sha256=ebc285f6666871b5300091c26eafdff3de968a6bd60ea35dd1e3fc6f2e32479a # pip jupyter-core @ https://files.pythonhosted.org/packages/ab/ea/af6508f71d2bcbf4db538940120cc3d3f10287f62105e756bd315aa345b5/jupyter_core-5.5.0-py3-none-any.whl#sha256=e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805 -# pip referencing @ https://files.pythonhosted.org/packages/be/8e/56d6f1e2d591f4d6cbcba446cac4a1b0dc4f584537e2071d9bcee8eeab6b/referencing-0.30.2-py3-none-any.whl#sha256=449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf +# pip referencing @ https://files.pythonhosted.org/packages/29/c1/69342fbc8efd1aac5cda853cea771763b95d92325c4f8f83b499c07bc698/referencing-0.31.0-py3-none-any.whl#sha256=381b11e53dd93babb55696c71cf42aef2d36b8a150c49bf0bc301e36d536c882 # pip rfc3339-validator @ https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl#sha256=24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa -# pip terminado @ https://files.pythonhosted.org/packages/84/a7/c7628d79651b8c8c775d27b374315a825141b5783512e82026fb210dd639/terminado-0.17.1-py3-none-any.whl#sha256=8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae +# pip terminado @ https://files.pythonhosted.org/packages/69/df/deebc9fb14a49062a3330f673e80b100e665b54d998163b3f62620b6240c/terminado-0.18.0-py3-none-any.whl#sha256=87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e # pip tinycss2 @ https://files.pythonhosted.org/packages/da/99/fd23634d6962c2791fb8cb6ccae1f05dcbfc39bce36bba8b1c9a8d92eae8/tinycss2-1.2.1-py3-none-any.whl#sha256=2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847 # pip argon2-cffi-bindings @ https://files.pythonhosted.org/packages/ec/f7/378254e6dd7ae6f31fe40c8649eea7d4832a42243acaf0f1fff9083b2bed/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae # pip isoduration @ https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl#sha256=b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042 -# pip jsonschema-specifications @ https://files.pythonhosted.org/packages/1c/24/83349ac2189cc2435e84da3f69ba3c97314d3c0622628e55171c6798ed80/jsonschema_specifications-2023.7.1-py3-none-any.whl#sha256=05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1 +# pip jsonschema-specifications @ https://files.pythonhosted.org/packages/20/a9/384ec45013ab883d7c2bf120f2988682986fdead973decf0bae28a4523e7/jsonschema_specifications-2023.11.1-py3-none-any.whl#sha256=f596778ab612b3fd29f72ea0d990393d0540a5aab18bf0407a46632eab540779 # pip jupyter-server-terminals @ https://files.pythonhosted.org/packages/ea/7f/36db12bdb90f5237766dcbf59892198daab7260acbcf03fc75e2a2a82672/jupyter_server_terminals-0.4.4-py3-none-any.whl#sha256=75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36 # pip jupyterlite-core @ https://files.pythonhosted.org/packages/b4/5d/9708684e65d244493ff4c970ea882b508da8d46e59a4cc99076991c16732/jupyterlite_core-0.2.0-py3-none-any.whl#sha256=255e8272941d0e950d05cfcfc28bde244c0404d2d5990da1b8b3485c44fe1718 # pip pyzmq @ https://files.pythonhosted.org/packages/a2/e0/08605421a2ede5d87adbde9685599fa7e6af1df700c657759a1892ced942/pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl#sha256=d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae # pip argon2-cffi @ https://files.pythonhosted.org/packages/a4/6a/e8a041599e78b6b3752da48000b14c8d1e8a04ded09c88c714ba047f34f5/argon2_cffi-23.1.0-py3-none-any.whl#sha256=c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea # pip jsonschema @ https://files.pythonhosted.org/packages/ce/aa/d1bd0b5ec568a903cc3ebcb6b096ab65c1d971c8a01ca3bf3cf788c3c646/jsonschema-4.19.2-py3-none-any.whl#sha256=eee9e502c788e89cb166d4d37f43084e3b64ab405c795c03d343a4dbc2c810fc -# pip jupyter-client @ https://files.pythonhosted.org/packages/ab/1f/d93fd1d2bf75233134a4aa1f56186b3c1975932fbfb58322e8de2906ea3d/jupyter_client-8.5.0-py3-none-any.whl#sha256=c3877aac7257ec68d79b5c622ce986bd2a992ca42f6ddc9b4dd1da50e89f7028 +# pip jupyter-client @ https://files.pythonhosted.org/packages/43/ae/5f4f72980765e2e5e02b260f9c53bcc706cefa7ac9c8d7240225c55788d4/jupyter_client-8.6.0-py3-none-any.whl#sha256=909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99 # pip jupyterlite-pyodide-kernel @ https://files.pythonhosted.org/packages/1c/a8/d4c30081747f4c5d3d75c1e77251ef64f2c3b927023f2796168a83aa65e2/jupyterlite_pyodide_kernel-0.2.0-py3-none-any.whl#sha256=17d713f0eeb3f778c4d51129834096d364c16f05ac06e10292383c43c0eb5bd9 -# pip jupyter-events @ https://files.pythonhosted.org/packages/47/47/cd46c2d3e409bed27338aec1610dfa13da67f64c671f739b7eff0954c14d/jupyter_events-0.8.0-py3-none-any.whl#sha256=81f07375c7673ff298bfb9302b4a981864ec64edaed75ca0fe6f850b9b045525 +# pip jupyter-events @ https://files.pythonhosted.org/packages/e3/55/0c1aa72f4317e826a471dc4adc3036acd11d496ded68c4bbac2a88551519/jupyter_events-0.9.0-py3-none-any.whl#sha256=d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf # pip nbformat @ https://files.pythonhosted.org/packages/f4/e7/ef30a90b70eba39e675689b9eaaa92530a71d7435ab8f9cae520814e0caf/nbformat-5.9.2-py3-none-any.whl#sha256=1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9 -# pip nbclient @ https://files.pythonhosted.org/packages/ac/5a/d670ca51e6c3d98574b9647599821590efcd811d71f58e9c89fc59a17685/nbclient-0.8.0-py3-none-any.whl#sha256=25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548 -# pip nbconvert @ https://files.pythonhosted.org/packages/18/71/0f58015055be21f99af33d25f3e565eb43058e84da5a4d9da65df854fcb1/nbconvert-7.10.0-py3-none-any.whl#sha256=8cf1d95e569730f136feb85e4bba25bdcf3a63fefb122d854ddff6771c0ac933 -# pip jupyter-server @ https://files.pythonhosted.org/packages/b0/0b/4b115b09d0182f335bb0bf085bb598bc2aa94600ffd5d38bbd20a4ef6a29/jupyter_server-2.9.1-py3-none-any.whl#sha256=21ad1a3d455d5a79ce4bef5201925cd17510c17898cf9d54e3ccfb6b12734948 -# pip jupyterlab-server @ https://files.pythonhosted.org/packages/96/cd/cdabe44549d60e0967904f0bdd9e3756b521112317612a3997eb2fda9181/jupyterlab_server-2.25.0-py3-none-any.whl#sha256=c9f67a98b295c5dee87f41551b0558374e45d449f3edca153dd722140630dcb2 -# pip jupyterlite-sphinx @ https://files.pythonhosted.org/packages/38/c9/5f1142c005cf8d75830b10029e53f074324bc85cfca1f1d0f22a207b771c/jupyterlite_sphinx-0.9.3-py3-none-any.whl#sha256=be6332d16490ea2fa90b78187a2c5e1c357195966a25741d60b1790346571041 +# pip nbclient @ https://files.pythonhosted.org/packages/6b/3a/607149974149f847125c38a62b9ea2b8267eb74823bbf8d8c54ae0212a00/nbclient-0.9.0-py3-none-any.whl#sha256=a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15 +# pip nbconvert @ https://files.pythonhosted.org/packages/84/61/460af4b68b3c681d1f82d48646cf2acb8f6d29edf9a8366dc37ae69e902a/nbconvert-7.11.0-py3-none-any.whl#sha256=d1d417b7f34a4e38887f8da5bdfd12372adf3b80f995d57556cb0972c68909fe +# pip jupyter-server @ https://files.pythonhosted.org/packages/e6/42/d200e2aaed1ce8f755b499aea7f83632760654ab75dae07f9038594c7418/jupyter_server-2.10.0-py3-none-any.whl#sha256=dde56c9bc3cb52d7b72cc0f696d15d7163603526f1a758eb4a27405b73eab2a5 +# pip jupyterlab-server @ https://files.pythonhosted.org/packages/de/86/9484f35ea09efc3f8b90e58398e9b9783b9e7e9ee5b24fbb41f5e465d7a2/jupyterlab_server-2.25.1-py3-none-any.whl#sha256=dce9714d91fb3e53d2b37d0e0619fa26ed223c8e7b8c81cca112926de19b53a4 +# pip jupyterlite-sphinx @ https://files.pythonhosted.org/packages/fa/f9/ad6d7164eca7ab9d523fc9b8c8a4a5508b424ee051f44a01797be224aeaa/jupyterlite_sphinx-0.10.0-py3-none-any.whl#sha256=72f332bf2748902802b719fbce598234e27facfcdc9aec020bf8cf025b12ba62 diff --git a/build_tools/circle/doc_min_dependencies_linux-64_conda.lock b/build_tools/circle/doc_min_dependencies_linux-64_conda.lock index 841bce0900737..917ac3d8f20c4 100644 --- a/build_tools/circle/doc_min_dependencies_linux-64_conda.lock +++ b/build_tools/circle/doc_min_dependencies_linux-64_conda.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 2e88b7ec27e9f65850d83419eb4f91734b98473ddf6dd7499ef25079a9cd0337 +# input_hash: ea14ba8479445faa5f5e66ad57539cb2796432d11145057b0b0181e52dfdf0e4 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda#a73ecd2988327ad4c8f2c331482917f2 @@ -13,21 +13,21 @@ https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9 https://conda.anaconda.org/conda-forge/linux-64/libgcc-devel_linux-64-7.5.0-hda03d7c_20.tar.bz2#2146b25eb2a762a44fab709338a7b6d9 https://conda.anaconda.org/conda-forge/linux-64/libgfortran4-7.5.0-h14aa051_20.tar.bz2#a072eab836c3a9578ce72b5640ce592d https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-devel_linux-64-7.5.0-hb016644_20.tar.bz2#31d5500f621954679ee41d7f5d1089fb -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_2.conda#9172c297304f2a20134fc56c97fbe229 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-4_cp38.conda#ea6b353536f42246cd130c7fef1285cf https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-7.5.0-h14aa051_20.tar.bz2#c3b2ad091c043c08689e64b10741484b -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_2.conda#e2042154faafe61969556f28bade94b9 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_16.conda#071ea8dceff4d30ac511f4a2f8437cd1 https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.36.1-h193b22a_2.tar.bz2#32aae4265554a47ea77f7c09f86aeb3b https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/binutils-2.36.1-hdd6e379_2.tar.bz2#3111f86041b5b6863545ca49130cca95 https://conda.anaconda.org/conda-forge/linux-64/binutils_linux-64-2.36-hf3e587d_33.tar.bz2#72b245322c589284f1b92a5c971e5cb6 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#562b26ba2e19059551a811e72ab7f793 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_2.conda#c28003b0be0494f9a7664389146716ff +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.10-hd590300_0.conda#75dae9a4201732aa78a530b826ee5fe0 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-7.5.0-habd7529_20.tar.bz2#42140612518a7ce78f571d64b6a50ba3 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 @@ -89,12 +89,12 @@ https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda https://conda.anaconda.org/conda-forge/linux-64/gfortran_linux-64-7.5.0-h78c8a43_33.tar.bz2#b2879010fb369f4012040f7a27657cd8 https://conda.anaconda.org/conda-forge/linux-64/gxx_linux-64-7.5.0-h555fc39_33.tar.bz2#5cf979793d2c5130a012cb6480867adc https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.0-hebfc3b9_0.conda#e618003da3547216310088478e475945 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.2-hd590300_0.conda#3d7d5e5cebf8af5aadb040732860f1b6 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-h5cf9203_3.conda#9efe82d44b76a7529a1d702e5a37752e https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-ha9c0a0a_2.conda#55ed21669b2015f77c180feb1dd41930 -https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.4-h4dfa4b3_0.conda#c560d4ecf0d3536108aa4de0222942d3 +https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-17.0.5-h4dfa4b3_0.conda#799291c22ec87a0c86c0a4fc0e22b1c5 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.33-hca2cd23_6.conda#e87530d1b12dd7f4e0f856dc07358d60 https://conda.anaconda.org/conda-forge/linux-64/nss-3.94-h1d7d5a4_0.conda#7caef74bbfa730e014b20f0852068509 https://conda.anaconda.org/conda-forge/linux-64/python-3.8.18-hd12c33a_0_cpython.conda#334cb629e10d209f1c17630f653168b1 @@ -120,7 +120,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/fortran-compiler-1.1.1-he991be0_0.tar.bz2#e38ac82cc517b9e245c1ae99f9f140da https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.10.0-pyhca7485f_0.conda#5b86cf1ceaaa9be2ec4627377e538db1 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.0-hfc55251_0.conda#e10134de3558dd95abda6987b5548f4f +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.78.1-hfc55251_0.conda#5b4fe75a68cbb95350f47bb9a707b53b https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 @@ -128,7 +128,7 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py38h7f3f72f_1. https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hb7c19ff_3.conda#e96637dd92c5f340215c753a5c9a22d7 https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h9986a30_3.conda#1720df000b48e31842500323cb7be18c https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.0-hfc447b1_1.conda#e4a9a5ba40123477db33e02a78dffb01 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.1-hfc447b1_0.conda#2b7f1893cf40b4ccdc0230bcd94d5ed9 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-254-h3516f8a_0.conda#df4b1cd0c91b4234fb02b5701a4cdddc https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py38h01eb140_1.conda#2dabf287937cd631e292096cc6d0867e @@ -171,7 +171,7 @@ https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3c https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e https://conda.anaconda.org/conda-forge/linux-64/compilers-1.1.1-0.tar.bz2#1ba267e19dbaf3db9dd0404e6fb9cdb9 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py38h01eb140_1.conda#56222b99bdd044e52c364c4fbee28a7a -https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.0-hfc55251_0.conda#2f55a36b549f51a7e0c2b1e3c3f0ccd4 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.78.1-hfc55251_0.conda#43c633c015a361610ee4db2e95f8a517 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.8.0-pyha770c72_0.conda#4e9f59a060c3be52bc4ddc46ee9b6946 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc @@ -182,24 +182,23 @@ https://conda.anaconda.org/conda-forge/noarch/memory_profiler-0.61.0-pyhd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py38ha43c96d_0.conda#67ca17c651f86159a3b8ed1132d97c12 https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.0.0-pyhd8ed1ab_0.conda#6bb4ee32cd435deaeac72776c001e7ac https://conda.anaconda.org/conda-forge/noarch/plotly-5.14.0-pyhd8ed1ab_0.conda#6a7bcc42ef58dd6cf3da9333ea102433 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda#ac902ff3c1c6d750dd0dfc93a974ab74 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.12-py38h17151c0_0.conda#ae2edf79b63f97071aea203b22a6774a -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.8.0-hd8ed1ab_0.conda#384462e63262a527bda564fa2d9126c0 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.7-pyhd8ed1ab_0.conda#270e71c14d37074b1d066ee21cf0c4a6 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.6-h98fc4e7_2.conda#1c95f7c612f9121353c4ef764678113e -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.2.1-h3d44ed6_0.conda#98db5f8813f45e2b29766aff0e4a499c +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.7-h98fc4e7_0.conda#6c919bafe5e03428a8e2ef319d7ef990 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.8.0-hd8ed1ab_0.conda#b279b07ce18058034e5b3606ba103a8b https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.8.0-20_mkl.tar.bz2#14b25490fdcc44e879ac6c10fe764f68 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.8.0-20_mkl.tar.bz2#52c0ae3606eeae7e1d493f37f336f4f5 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.11.0-pyhd8ed1ab_0.conda#8f567c0a74aa44cf732f15773b4083b0 https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.12.2-py38h17151c0_5.conda#3d66f5c4a0af2713f60ec11bf1230136 https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.5.0-pyhd8ed1ab_0.conda#03ed2d040648a5ba1063bf1cb0d87b78 -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.6-h8e1006c_2.conda#3d8e98279bad55287f2ef9047996f33c +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.7-h8e1006c_0.conda#065e2c1d49afa3fdc1a01f1dacd6ab09 https://conda.anaconda.org/conda-forge/linux-64/liblapacke-3.8.0-20_mkl.tar.bz2#8274dc30518af9df1de47f5d9e73165c https://conda.anaconda.org/conda-forge/linux-64/numpy-1.17.3-py38h95a1406_0.tar.bz2#bc0cbf611fe2f86eab29b98e51404f5e https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.0-pyhd8ed1ab_0.conda#134b2b57b7865d2316a7cce1915a51ed diff --git a/build_tools/cirrus/py39_conda_forge_linux-aarch64_conda.lock b/build_tools/cirrus/py39_conda_forge_linux-aarch64_conda.lock index a36cc4e025749..1b58b4f66ac0f 100644 --- a/build_tools/cirrus/py39_conda_forge_linux-aarch64_conda.lock +++ b/build_tools/cirrus/py39_conda_forge_linux-aarch64_conda.lock @@ -1,20 +1,20 @@ # Generated by conda-lock. # platform: linux-aarch64 -# input_hash: 26cb8d771d4d1ecc00c0fc477f3a4b364e4bd7558f3d18ecd50c0d1b440ffe7f +# input_hash: 0d85e0709820b70a9669d6117bfc25716ef5e50278f2788b8deb7af027b80e73 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-aarch64/ca-certificates-2023.7.22-hcefe29a_0.conda#95d7f998087114466fa91e7c2887fa2f https://conda.anaconda.org/conda-forge/linux-aarch64/ld_impl_linux-aarch64-2.40-h2d8c526_0.conda#16246d69e945d0b1969a6099e7c5d457 -https://conda.anaconda.org/conda-forge/linux-aarch64/libstdcxx-ng-13.2.0-h9a76618_2.conda#921c652898c8602bf2697d015f3efc77 +https://conda.anaconda.org/conda-forge/linux-aarch64/libstdcxx-ng-13.2.0-h9a76618_3.conda#7ad2164936c4975d94ca883d34809c0f https://conda.anaconda.org/conda-forge/linux-aarch64/python_abi-3.9-4_cp39.conda#c191905a08694e4a5cb1238e90233878 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/linux-aarch64/_openmp_mutex-4.5-2_kmp_llvm.tar.bz2#98a1185182fec3c434069fa74e6473d6 -https://conda.anaconda.org/conda-forge/linux-aarch64/libgcc-ng-13.2.0-hf8544c7_2.conda#f4dfb3bad7c8b38c3f8ed7f15a91a1ed -https://conda.anaconda.org/conda-forge/linux-aarch64/bzip2-1.0.8-hf897c2e_4.tar.bz2#2d787570a729e273a4e75775ddf3348a +https://conda.anaconda.org/conda-forge/linux-aarch64/libgcc-ng-13.2.0-hf8544c7_3.conda#00f021ee1a24c798ae53c87ee79597f1 +https://conda.anaconda.org/conda-forge/linux-aarch64/bzip2-1.0.8-h31becfc_5.conda#a64e35f01e0b7a2a152eca87d33b9c87 https://conda.anaconda.org/conda-forge/linux-aarch64/lerc-4.0.0-h4de3ea5_0.tar.bz2#1a0ffc65e03ce81559dbcb0695ad1476 https://conda.anaconda.org/conda-forge/linux-aarch64/libbrotlicommon-1.1.0-h31becfc_1.conda#1b219fd801eddb7a94df5bd001053ad9 https://conda.anaconda.org/conda-forge/linux-aarch64/libdeflate-1.19-h31becfc_0.conda#014e57e35f2dc95c9a12f63d4378e093 https://conda.anaconda.org/conda-forge/linux-aarch64/libffi-3.4.2-h3557bc0_5.tar.bz2#dddd85f4d52121fab0a8b099c5e06501 -https://conda.anaconda.org/conda-forge/linux-aarch64/libgfortran5-13.2.0-h582850c_2.conda#1be4fb84d6b6617a844933ca406c6bd5 +https://conda.anaconda.org/conda-forge/linux-aarch64/libgfortran5-13.2.0-h582850c_3.conda#d81dcb787465447542ad9c4cf0bab65e https://conda.anaconda.org/conda-forge/linux-aarch64/libjpeg-turbo-3.0.0-h31becfc_1.conda#ed24e702928be089d9ba3f05618515c6 https://conda.anaconda.org/conda-forge/linux-aarch64/libnsl-2.0.1-h31becfc_0.conda#c14f32510f694e3185704d89967ec422 https://conda.anaconda.org/conda-forge/linux-aarch64/libuuid-2.38.1-hb4cce97_0.conda#000e30b09db0b7c775b21695dff30969 @@ -28,7 +28,7 @@ https://conda.anaconda.org/conda-forge/linux-aarch64/xorg-libxdmcp-1.1.3-h3557bc https://conda.anaconda.org/conda-forge/linux-aarch64/xz-5.2.6-h9cdd2b7_0.tar.bz2#83baad393a31d59c20b63ba4da6592df https://conda.anaconda.org/conda-forge/linux-aarch64/libbrotlidec-1.1.0-h31becfc_1.conda#8db7cff89510bec0b863a0a8ee6a7bce https://conda.anaconda.org/conda-forge/linux-aarch64/libbrotlienc-1.1.0-h31becfc_1.conda#ad3d3a826b5848d99936e4466ebbaa26 -https://conda.anaconda.org/conda-forge/linux-aarch64/libgfortran-ng-13.2.0-he9431aa_2.conda#720092257480c53e80f32cc819821fea +https://conda.anaconda.org/conda-forge/linux-aarch64/libgfortran-ng-13.2.0-he9431aa_3.conda#6c292066bb9876d7ba35c590868baaeb https://conda.anaconda.org/conda-forge/linux-aarch64/libpng-1.6.39-hf9034f9_0.conda#5ec9052384a6ac85e9111e9ac7c5ec4c https://conda.anaconda.org/conda-forge/linux-aarch64/libsqlite-3.44.0-h194ca79_0.conda#6d33a45e15846407c1a9a7388dda5436 https://conda.anaconda.org/conda-forge/linux-aarch64/libxcb-1.15-h2a766a3_0.conda#eb3d8c8170e3d03f2564ed2024aa00c8 @@ -40,7 +40,7 @@ https://conda.anaconda.org/conda-forge/linux-aarch64/freetype-2.12.1-hf0a5ef3_2. https://conda.anaconda.org/conda-forge/linux-aarch64/libhiredis-1.0.2-h05efe27_0.tar.bz2#a87f068744fd20334cd41489eb163bee https://conda.anaconda.org/conda-forge/linux-aarch64/libopenblas-0.3.24-pthreads_h5a5ec62_0.conda#22555a102c05b77dc45ff22a21255935 https://conda.anaconda.org/conda-forge/linux-aarch64/libtiff-4.6.0-h1708d11_2.conda#d5638e110e7f22e2602a8edd20656720 -https://conda.anaconda.org/conda-forge/linux-aarch64/llvm-openmp-17.0.4-h8b0cb96_0.conda#bb6d15b337f6e6f2d43213eaf028c30a +https://conda.anaconda.org/conda-forge/linux-aarch64/llvm-openmp-17.0.5-h8b0cb96_0.conda#07056470540d494e46d432e8468d9c24 https://conda.anaconda.org/conda-forge/linux-aarch64/python-3.9.18-h4ac3b42_0_cpython.conda#4d36e157278470ac06508579c6d36555 https://conda.anaconda.org/conda-forge/linux-aarch64/brotli-1.1.0-h31becfc_1.conda#e41f5862ac746428407f3fd44d2ed01f https://conda.anaconda.org/conda-forge/linux-aarch64/ccache-4.8.1-h6552966_0.conda#5b436a19e818f05fe0c9ab4f5ac61233 @@ -69,8 +69,8 @@ https://conda.anaconda.org/conda-forge/linux-aarch64/tornado-6.3.3-py39h7cc1d5f_ https://conda.anaconda.org/conda-forge/linux-aarch64/unicodedata2-15.1.0-py39h898b7ef_0.conda#8c072c9329aeea97a46005625267a851 https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.3-pyhd8ed1ab_0.conda#3fc026b9c87d091c4b34a6c997324ae8 https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a -https://conda.anaconda.org/conda-forge/linux-aarch64/fonttools-4.44.0-py39h898b7ef_0.conda#b2aeffb88f60588a1d0c7aff1b08fa39 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.0-pyhd8ed1ab_0.conda#48b0d98e0c0ec810d3ccc2a0926c8c0e +https://conda.anaconda.org/conda-forge/linux-aarch64/fonttools-4.44.1-py39h898b7ef_0.conda#86c966c702fb5937a2c57ef5f236ed36 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc https://conda.anaconda.org/conda-forge/linux-aarch64/libcblas-3.9.0-19_linuxaarch64_openblas.conda#8d52c7095a072dde1990717b5f0ab267 https://conda.anaconda.org/conda-forge/linux-aarch64/liblapack-3.9.0-19_linuxaarch64_openblas.conda#c2a01118ea07574a10c19f7e9997f73b @@ -78,7 +78,7 @@ https://conda.anaconda.org/conda-forge/linux-aarch64/pillow-10.1.0-py39h8ce38d7_ https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.0-pyhd8ed1ab_0.conda#6a62c2cc25376a0d050b3d1d221c3ee9 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.1.1-pyhd8ed1ab_0.conda#d04bd1b5bed9177dd7c3cef15e2b6710 https://conda.anaconda.org/conda-forge/linux-aarch64/liblapacke-3.9.0-19_linuxaarch64_openblas.conda#61d8dfefa1b44482f1f2ea08f3cb88b2 https://conda.anaconda.org/conda-forge/linux-aarch64/numpy-1.26.0-py39h91c28bb_0.conda#cb45bbda25d8486609cab8ecf2c957e1 https://conda.anaconda.org/conda-forge/noarch/pytest-forked-1.6.0-pyhd8ed1ab_0.conda#a46947638b6e005b63d2d6271da529b0 diff --git a/doc/about.rst b/doc/about.rst index e2242d28f73e7..e462963135b58 100644 --- a/doc/about.rst +++ b/doc/about.rst @@ -161,13 +161,17 @@ The project would like to thank the following funders. The `Members `_ of the `Scikit-Learn Consortium at Inria Foundation -`_ fund Olivier -Grisel, Guillaume Lemaitre, and Jérémie du Boisberranger. +`_ fund Arturo Amor, +François Goupil, Guillaume Lemaitre, Jérémie du Boisberranger, and Olivier Grisel. .. raw:: html +.. |chanel| image:: images/chanel.png + :width: 70pt + :target: https://www.chanel.com + .. |axa| image:: images/axa.png :width: 50pt :target: https://www.axa.fr/ @@ -200,6 +204,10 @@ Grisel, Guillaume Lemaitre, and Jérémie du Boisberranger. .. table:: :class: sk-sponsor-table align-default + +----------+-----------+ + | |chanel| | + +----------+-----------+ + | | +----------+-----------+ | |axa| | |bnp| | +----------+-----------+ diff --git a/doc/conf.py b/doc/conf.py index 288aba5404ae6..9a0c09438d865 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -309,7 +309,7 @@ html_additional_pages[old_link] = "redirects.html" # Not showing the search summary makes the search page load faster. -html_show_search_summary = False +html_show_search_summary = True # The "summary-anchor" IDs will be overwritten via JavaScript to be unique. diff --git a/doc/developers/plotting.rst b/doc/developers/plotting.rst index 90b45ff4ef6b3..9acc3ef4a5061 100644 --- a/doc/developers/plotting.rst +++ b/doc/developers/plotting.rst @@ -8,7 +8,7 @@ Scikit-learn defines a simple API for creating visualizations for machine learning. The key features of this API is to run calculations once and to have the flexibility to adjust the visualizations after the fact. This section is intended for developers who wish to develop or maintain plotting tools. For -usage, users should refer to the :ref`User Guide `. +usage, users should refer to the :ref:`User Guide `. Plotting API Overview --------------------- diff --git a/doc/images/chanel-small.png b/doc/images/chanel-small.png new file mode 100644 index 0000000000000..b1965b714a42f Binary files /dev/null and b/doc/images/chanel-small.png differ diff --git a/doc/images/chanel.png b/doc/images/chanel.png new file mode 100644 index 0000000000000..1b2d39fd4facf Binary files /dev/null and b/doc/images/chanel.png differ diff --git a/doc/images/microsoft-small.png b/doc/images/microsoft-small.png new file mode 100644 index 0000000000000..56f3334e96c20 Binary files /dev/null and b/doc/images/microsoft-small.png differ diff --git a/doc/modules/array_api.rst b/doc/modules/array_api.rst index 777f0d1e2f17c..ebc157fb169d1 100644 --- a/doc/modules/array_api.rst +++ b/doc/modules/array_api.rst @@ -99,6 +99,7 @@ Estimators - :class:`preprocessing.KernelCenterer` - :class:`preprocessing.MaxAbsScaler` - :class:`preprocessing.MinMaxScaler` +- :class:`preprocessing.Normalizer` Metrics ------- diff --git a/doc/modules/calibration.rst b/doc/modules/calibration.rst index 081b3e9a0a883..e5f5c9ed391ab 100644 --- a/doc/modules/calibration.rst +++ b/doc/modules/calibration.rst @@ -241,7 +241,7 @@ there is enough data (greater than ~ 1000 samples) to avoid overfitting [3]_. `method="isotonic"` since isotonic regression introduces ties in the predicted probabilities. This can be seen as within the uncertainty of the model predictions. In case, you strictly want to keep the ranking and thus AUC scores, use - `method="logistic"` which is a strictly monotonic transformation and thus keeps + `method="sigmoid"` which is a strictly monotonic transformation and thus keeps the ranking. Multiclass support diff --git a/doc/modules/cross_validation.rst b/doc/modules/cross_validation.rst index fd3d5f170056f..53206bce28c8f 100644 --- a/doc/modules/cross_validation.rst +++ b/doc/modules/cross_validation.rst @@ -521,8 +521,8 @@ the proportion of samples on each side of the train / test split. .. _stratification: -Cross-validation iterators with stratification based on class labels. ---------------------------------------------------------------------- +Cross-validation iterators with stratification based on class labels +-------------------------------------------------------------------- Some classification problems can exhibit a large imbalance in the distribution of the target classes: for instance there could be several times more negative @@ -810,7 +810,7 @@ generated by :class:`LeavePGroupsOut`. .. _predefined_split: -Predefined Fold-Splits / Validation-Sets +Predefined fold-splits / Validation-sets ---------------------------------------- For some datasets, a pre-defined split of the data into training- and diff --git a/doc/modules/kernel_approximation.rst b/doc/modules/kernel_approximation.rst index 2166227daf247..30c5a71b1417d 100644 --- a/doc/modules/kernel_approximation.rst +++ b/doc/modules/kernel_approximation.rst @@ -35,13 +35,65 @@ is advisable to compare results against exact kernel methods when possible. Nystroem Method for Kernel Approximation ---------------------------------------- -The Nystroem method, as implemented in :class:`Nystroem` is a general method -for low-rank approximations of kernels. It achieves this by essentially subsampling -the data on which the kernel is evaluated. -By default :class:`Nystroem` uses the ``rbf`` kernel, but it can use any -kernel function or a precomputed kernel matrix. -The number of samples used - which is also the dimensionality of the features computed - -is given by the parameter ``n_components``. +The Nystroem method, as implemented in :class:`Nystroem` is a general method for +reduced rank approximations of kernels. It achieves this by subsampling without +replacement rows/columns of the data on which the kernel is evaluated. While the +computational complexity of the exact method is +:math:`\mathcal{O}(n^3_{\text{samples}})`, the complexity of the approximation +is :math:`\mathcal{O}(n^2_{\text{components}} \cdot n_{\text{samples}})`, where +one can set :math:`n_{\text{components}} \ll n_{\text{samples}}` without a +significative decrease in performance [WS2001]_. + +We can construct the eigendecomposition of the kernel matrix :math:`K`, based +on the features of the data, and then split it into sampled and unsampled data +points. + +.. math:: + + K = U \Lambda U^T + = \begin{bmatrix} U_1 \\ U_2\end{bmatrix} \Lambda \begin{bmatrix} U_1 \\ U_2 \end{bmatrix}^T + = \begin{bmatrix} U_1 \Lambda U_1^T & U_1 \Lambda U_2^T \\ U_2 \Lambda U_1^T & U_2 \Lambda U_2^T \end{bmatrix} + \equiv \begin{bmatrix} K_{11} & K_{12} \\ K_{21} & K_{22} \end{bmatrix} + +where: + + * :math:`U` is orthonormal + * :math:`Ʌ` is diagonal matrix of eigenvalues + * :math:`U_1` is orthonormal matrix of samples that were chosen + * :math:`U_2` is orthonormal matrix of samples that were not chosen + +Given that :math:`U_1 \Lambda U_1^T` can be obtained by orthonormalization of +the matrix :math:`K_{11}`, and :math:`U_2 \Lambda U_1^T` can be evaluated (as +well as its transpose), the only remaining term to elucidate is +:math:`U_2 \Lambda U_2^T`. To do this we can express it in terms of the already +evaluated matrices: + +.. math:: + + \begin{align} U_2 \Lambda U_2^T &= \left(K_{21} U_1 \Lambda^{-1}\right) \Lambda \left(K_{21} U_1 \Lambda^{-1}\right)^T + \\&= K_{21} U_1 (\Lambda^{-1} \Lambda) \Lambda^{-1} U_1^T K_{21}^T + \\&= K_{21} U_1 \Lambda^{-1} U_1^T K_{21}^T + \\&= K_{21} K_{11}^{-1} K_{21}^T + \\&= \left( K_{21} K_{11}^{-\frac12} \right) \left( K_{21} K_{11}^{-\frac12} \right)^T + .\end{align} + +During ``fit``, the class :class:`Nystroem` evaluates the basis :math:`U_1`, and +computes the normalization constant, :math:`K_{11}^{-\frac12}`. Later, during +``transform``, the kernel matrix is determined between the basis (given by the +`components_` attribute) and the new data points, ``X``. This matrix is then +multiplied by the ``normalization_`` matrix for the final result. + +By default :class:`Nystroem` uses the ``rbf`` kernel, but it can use any kernel +function or a precomputed kernel matrix. The number of samples used - which is +also the dimensionality of the features computed - is given by the parameter +``n_components``. + +.. topic:: Examples: + + * See the example entitled + :ref:`sphx_glr_auto_examples_applications_plot_cyclical_feature_engineering.py`, + that shows an efficient machine learning pipeline that uses a + :class:`Nystroem` kernel. .. _rbf_kernel_approx: @@ -233,6 +285,9 @@ or store training examples. .. topic:: References: + .. [WS2001] `"Using the Nyström method to speed up kernel machines" + `_ + Williams, C.K.I.; Seeger, M. - 2001. .. [RR2007] `"Random features for large-scale kernel machines" `_ Rahimi, A. and Recht, B. - Advances in neural information processing 2007, diff --git a/doc/templates/index.html b/doc/templates/index.html index 5a46c43d9af26..a20da900bafcb 100644 --- a/doc/templates/index.html +++ b/doc/templates/index.html @@ -251,8 +251,11 @@

Who uses scikit-learn?

scikit-learn development and maintenance are financially supported by

+ + + diff --git a/doc/themes/scikit-learn-modern/layout.html b/doc/themes/scikit-learn-modern/layout.html index c9d232db884b7..779435323be20 100644 --- a/doc/themes/scikit-learn-modern/layout.html +++ b/doc/themes/scikit-learn-modern/layout.html @@ -10,8 +10,8 @@ - - + + {{ metatags }} @@ -35,7 +35,7 @@ {%- endif %} {%- endfor %} - + {%- block extrahead %} {% endblock %} diff --git a/doc/whats_new/v1.4.rst b/doc/whats_new/v1.4.rst index 38f7a33ab9651..507b22cdf510a 100644 --- a/doc/whats_new/v1.4.rst +++ b/doc/whats_new/v1.4.rst @@ -169,7 +169,7 @@ and classes are impacted: - :class:`feature_extraction.text.TfidfTransformer` in :pr:`27219` by :user:`Yao Xiao `; - :class:`cluster.Isomap` in :pr:`27250` by :user:`Yao Xiao `; -- :func:`manifold.SpectralEmbedding` in :pr:`27240` by :user:`Yao Xiao `; +- :class:`manifold.SpectralEmbedding` in :pr:`27240` by :user:`Yao Xiao `; - :class:`manifold.TSNE` in :pr:`27250` by :user:`Yao Xiao `; - :class:`impute.SimpleImputer` in :pr:`27277` by :user:`Yao Xiao `; - :class:`impute.IterativeImputer` in :pr:`27277` by :user:`Yao Xiao `; @@ -180,6 +180,10 @@ and classes are impacted: :user:`Yao Xiao `; - :class:`preprocessing.PolynomialFeatures` in :pr:`27166` by :user:`Mohit Joshi `. +- :class:`random_projection.GaussianRandomProjection` in :pr:`27314` by + :user:`Stefanie Senger `. +- :class:`random_projection.SparseRandomProjection`in :pr:`27314` by + :user:`Stefanie Senger `. Changelog --------- @@ -272,7 +276,7 @@ Changelog - |Enhancement| :class:`decomposition.PCA` now supports the Array API for the `full` and `randomized` solvers (with QR power iterations). See :ref:`array_api` for more details. - :pr:`26315` and :pr:`27098` by :user:`Mateusz Sokół `, + :pr:`26315`, :pr:`27098` and :pr:`27431` by :user:`Mateusz Sokół `, :user:`Olivier Grisel ` and :user:`Edoardo Abati `. - |Feature| :class:`decomposition.PCA` now supports :class:`scipy.sparse.sparray` @@ -291,6 +295,12 @@ Changelog for classification or `squared_error`, `friedman_mse`, or `poisson` for regression. :pr:`26391` by `Thomas Fan`_. +- |MajorFeature| :class:`ensemble.HistGradientBoostingClassifier` and + :class:`ensemble.HistGradientBoostingRegressor` supports + `categorical_features="from_dtype"`, which treats columns with Pandas Categorical + dtype as categories in the algorithm. `categorical_features="from_dtype"` will become + the default in v1.6. :pr:`26411` by `Thomas Fan`_. + - |Feature| :class:`ensemble.RandomForestClassifier`, :class:`ensemble.RandomForestRegressor`, :class:`ensemble.ExtraTreesClassifier` and :class:`ensemble.ExtraTreesRegressor` now support monotonic constraints, @@ -299,6 +309,12 @@ Changelog :pr:`13649` by :user:`Samuel Ronsin `, initiated by :user:`Patrick O'Reilly `. +- |Feature| :class:`ensemble.HistGradientBoostingClassifier` and + :class:`ensemble.HistGradientBoostingRegressor` got the new parameter + `max_features` to specify the proportion of randomly chosen features considered + in each split. + :pr:`27139` by :user:`Christian Lorentzen `. + - |Efficiency| :class:`ensemble.GradientBoostingClassifier` is faster, for binary and in particular for multiclass problems thanks to the private loss function module. @@ -429,6 +445,10 @@ Changelog - |Enhancement| :func:`sklearn.model_selection.train_test_split` now supports Array API compatible inputs. :pr:`26855` by `Tim Head`_. +- |Enhancement| :func:`model_selection.learning_curve` raises a warning when + every cross validation fold fails. + :pr:`26299` by :user:`Rahil Parikh `. + - |Fix| :class:`model_selection.GridSearchCV`, :class:`model_selection.RandomizedSearchCV`, and :class:`model_selection.HalvingGridSearchCV` now don't change the given @@ -454,17 +474,23 @@ Changelog when `radius` is large and `algorithm="brute"` with non-Euclidean metrics. :pr:`26828` by :user:`Omar Salman `. +- |Fix| Improve error message for :class:`neighbors.LocalOutlierFactor` + when it is invoked with `n_samples = n_neighbors`. + :pr:`23317` by :user:`Bharat Raghunathan `. + :mod:`sklearn.preprocessing` ............................ -- |MajorFeature| :class:`preprocessing.MinMaxScaler`, :class:`preprocessing.MaxAbsScaler` - and :class:`preprocessing.KernelCenterer` now - support the `Array API `_. Array API +- |MajorFeature| The following classes now support the + `Array API `_. Array API support is considered experimental and might evolve without being subject to our usual rolling deprecation cycle policy. See :ref:`array_api` for more details. - :pr:`26243` by `Tim Head`_, :pr:`27110` by :user:`Edoardo Abati ` and - :pr:`27556` by :user:`Edoardo Abati `. + + - :class:`preprocessing.MinMaxScaler` :pr:`26243` by `Tim Head`_ + - :class:`preprocessing.MaxAbsScaler` :pr:`27110` by :user:`Edoardo Abati ` + - :class:`preprocessing.KernelCenterer` :pr:`27556` by :user:`Edoardo Abati ` + - :class:`preprocessing.Normalizer` :pr:`27558` by :user:`Edoardo Abati ` - |Efficiency| :class:`preprocessing.OrdinalEncoder` avoids calculating missing indices twice to improve efficiency. @@ -517,6 +543,10 @@ Changelog accept the same sparse input formats for SciPy sparse matrices and arrays. :pr:`27372` by :user:`Guillaume Lemaitre `. +- |Enhancement| :func:`~utils.multiclass.is_multilabel` now supports the Array API + compatible inputs. + :pr:`27601` by :user:`Yaroslav Korobko `. + - |Fix| :func:`sklearn.utils.check_array` should accept both matrix and array from the sparse SciPy module. The previous implementation would fail if `copy=True` by calling specific NumPy `np.may_share_memory` that does not work with SciPy sparse @@ -534,6 +564,10 @@ Changelog misdetects the CPU architecture. :pr:`27614` by :user:`Olivier Grisel `. +- |Fix| Error message in :func:`~utils.check_array` when a sparse matrix was + passed but `accept_sparse` is `False` now suggests to use `.toarray()` and not + `X.toarray()`. :pr:`27757` by :user:`Lucy Liu `. + Code and Documentation Contributors ----------------------------------- diff --git a/examples/applications/plot_cyclical_feature_engineering.py b/examples/applications/plot_cyclical_feature_engineering.py index 6a62def94fa31..b23a3bc5d4782 100644 --- a/examples/applications/plot_cyclical_feature_engineering.py +++ b/examples/applications/plot_cyclical_feature_engineering.py @@ -172,14 +172,11 @@ # # Here, we use the modern # :class:`~sklearn.ensemble.HistGradientBoostingRegressor` with native support -# for categorical features. Therefore, we only do minimal ordinal encoding for -# the categorical variables and then -# let the model know that it should treat those as categorical variables by -# using a dedicated tree splitting rule. Since we use an ordinal encoder, we -# pass the list of categorical values explicitly to use a logical order when -# encoding the categories as integers instead of the lexicographical order. -# This also has the added benefit of preventing any issue with unknown -# categories when using cross-validation. +# for categorical features. Therefore, we only need to set +# `categorical_features="from_dtype"` such that features with categorical dtype +# are considered categorical features. For reference, we extract the categorical +# features from the dataframe based on the dtype. The internal trees use a dedicated +# tree splitting rule for these features. # # The numerical variables need no preprocessing and, for the sake of simplicity, # we only try the default hyper-parameters for this model: @@ -187,42 +184,10 @@ from sklearn.ensemble import HistGradientBoostingRegressor from sklearn.model_selection import cross_validate from sklearn.pipeline import make_pipeline -from sklearn.preprocessing import OrdinalEncoder -categorical_columns = [ - "weather", - "season", - "holiday", - "workingday", -] -categories = [ - ["clear", "misty", "rain"], - ["spring", "summer", "fall", "winter"], - ["False", "True"], - ["False", "True"], -] -ordinal_encoder = OrdinalEncoder(categories=categories) - - -gbrt_pipeline = make_pipeline( - ColumnTransformer( - transformers=[ - ("categorical", ordinal_encoder, categorical_columns), - ], - remainder="passthrough", - # Use short feature names to make it easier to specify the categorical - # variables in the HistGradientBoostingRegressor in the next - # step of the pipeline. - verbose_feature_names_out=False, - ), - HistGradientBoostingRegressor( - max_iter=300, - early_stopping=True, - validation_fraction=0.1, - categorical_features=categorical_columns, - random_state=42, - ), -).set_output(transform="pandas") +gbrt = HistGradientBoostingRegressor(categorical_features="from_dtype", random_state=42) +categorical_columns = X.columns[X.dtypes == "category"] +print("Categorical features:", categorical_columns.tolist()) # %% # @@ -256,14 +221,7 @@ def evaluate(model, X, y, cv, model_prop=None, model_step=None): ) -evaluate( - gbrt_pipeline, - X, - y, - cv=ts_cv, - model_prop="n_iter_", - model_step="histgradientboostingregressor", -) +evaluate(gbrt, X, y, cv=ts_cv, model_prop="n_iter_") # %% # We see that we set `max_iter` large enough such that early stopping took place. @@ -735,8 +693,8 @@ def periodic_spline_transformer(period, n_splines=None, degree=3): # Let us now have a qualitative look at the predictions of the kernel models # and of the gradient boosted trees that should be able to better model # non-linear interactions between features: -gbrt_pipeline.fit(X.iloc[train_0], y.iloc[train_0]) -gbrt_predictions = gbrt_pipeline.predict(X.iloc[test_0]) +gbrt.fit(X.iloc[train_0], y.iloc[train_0]) +gbrt_predictions = gbrt.predict(X.iloc[test_0]) one_hot_poly_pipeline.fit(X.iloc[train_0], y.iloc[train_0]) one_hot_poly_predictions = one_hot_poly_pipeline.predict(X.iloc[test_0]) diff --git a/examples/ensemble/plot_gradient_boosting_categorical.py b/examples/ensemble/plot_gradient_boosting_categorical.py index 0dd0a84243b4d..32fa66ae6e979 100644 --- a/examples/ensemble/plot_gradient_boosting_categorical.py +++ b/examples/ensemble/plot_gradient_boosting_categorical.py @@ -138,26 +138,17 @@ # ----------------------------------------------------------- # We now create a :class:`~ensemble.HistGradientBoostingRegressor` estimator # that will natively handle categorical features. This estimator will not treat -# categorical features as ordered quantities. +# categorical features as ordered quantities. We set +# `categorical_features="from_dtype"` such that features with categorical dtype +# are considered categorical features. # -# Since the :class:`~ensemble.HistGradientBoostingRegressor` requires category -# values to be encoded in `[0, n_unique_categories - 1]`, we still rely on an -# :class:`~preprocessing.OrdinalEncoder` to pre-process the data. -# -# The main difference between this pipeline and the previous one is that in +# The main difference between this estimator and the previous one is that in # this one, we let the :class:`~ensemble.HistGradientBoostingRegressor` know # which features are categorical. -# The ordinal encoder will first output the categorical features, and then the -# continuous (passed-through) features - -hist_native = make_pipeline( - ordinal_encoder, - HistGradientBoostingRegressor( - random_state=42, - categorical_features=categorical_columns, - ), -).set_output(transform="pandas") +hist_native = HistGradientBoostingRegressor( + random_state=42, categorical_features="from_dtype" +) # %% # Model comparison @@ -256,10 +247,15 @@ def plot_results(figure_title): # of trees and the depth of each tree. for pipe in (hist_dropped, hist_one_hot, hist_ordinal, hist_native): - pipe.set_params( - histgradientboostingregressor__max_depth=3, - histgradientboostingregressor__max_iter=15, - ) + if pipe is hist_native: + # The native model does not use a pipeline so, we can set the parameters + # directly. + pipe.set_params(max_depth=3, max_iter=15) + else: + pipe.set_params( + histgradientboostingregressor__max_depth=3, + histgradientboostingregressor__max_iter=15, + ) dropped_result = cross_validate(hist_dropped, X, y, cv=n_cv_folds, scoring=scoring) one_hot_result = cross_validate(hist_one_hot, X, y, cv=n_cv_folds, scoring=scoring) diff --git a/sklearn/_min_dependencies.py b/sklearn/_min_dependencies.py index 7c0b8be6c1295..f91ca6594e987 100644 --- a/sklearn/_min_dependencies.py +++ b/sklearn/_min_dependencies.py @@ -53,7 +53,7 @@ "plotly": ("5.14.0", "docs, examples"), # XXX: Pin conda-lock to the latest released version (needs manual update # from time to time) - "conda-lock": ("2.1.1", "maintenance"), + "conda-lock": ("2.4.2", "maintenance"), } diff --git a/sklearn/calibration.py b/sklearn/calibration.py index 9490918729ac9..54c1b6827ba78 100644 --- a/sklearn/calibration.py +++ b/sklearn/calibration.py @@ -75,8 +75,9 @@ class CalibratedClassifierCV(ClassifierMixin, MetaEstimatorMixin, BaseEstimator) `ensemble=False`, cross-validation is used to obtain unbiased predictions, via :func:`~sklearn.model_selection.cross_val_predict`, which are then used for calibration. For prediction, the base estimator, trained using all - the data, is used. This is the method implemented when `probabilities=True` - for :mod:`sklearn.svm` estimators. + the data, is used. This is the prediction method implemented when + `probabilities=True` for :class:`~sklearn.svm.SVC` and :class:`~sklearn.svm.NuSVC` + estimators (see :ref:`User Guide ` for details). Already fitted classifiers can be calibrated via the parameter `cv="prefit"`. In this case, no cross-validation is used and all provided diff --git a/sklearn/cluster/_hdbscan/hdbscan.py b/sklearn/cluster/_hdbscan/hdbscan.py index 52e99027b61c5..161c64dbab053 100644 --- a/sklearn/cluster/_hdbscan/hdbscan.py +++ b/sklearn/cluster/_hdbscan/hdbscan.py @@ -124,7 +124,7 @@ def _brute_mst(mutual_reachability, min_samples): # Compute the minimum spanning tree for the sparse graph sparse_min_spanning_tree = csgraph.minimum_spanning_tree(mutual_reachability) rows, cols = sparse_min_spanning_tree.nonzero() - mst = np.core.records.fromarrays( + mst = np.rec.fromarrays( [rows, cols, sparse_min_spanning_tree.data], dtype=MST_edge_dtype, ) diff --git a/sklearn/cluster/tests/test_affinity_propagation.py b/sklearn/cluster/tests/test_affinity_propagation.py index 9f82957d2067a..319385635376e 100644 --- a/sklearn/cluster/tests/test_affinity_propagation.py +++ b/sklearn/cluster/tests/test_affinity_propagation.py @@ -106,7 +106,7 @@ def test_affinity_propagation_affinity_shape(): @pytest.mark.parametrize("csr_container", CSR_CONTAINERS) def test_affinity_propagation_precomputed_with_sparse_input(csr_container): - err_msg = "A sparse matrix was passed, but dense data is required" + err_msg = "Sparse data was passed for X, but dense data is required" with pytest.raises(TypeError, match=err_msg): AffinityPropagation(affinity="precomputed").fit(csr_container((3, 3))) diff --git a/sklearn/covariance/_elliptic_envelope.py b/sklearn/covariance/_elliptic_envelope.py index fe109dddd5303..ed99a38c0ee56 100644 --- a/sklearn/covariance/_elliptic_envelope.py +++ b/sklearn/covariance/_elliptic_envelope.py @@ -35,7 +35,7 @@ class EllipticEnvelope(OutlierMixin, MinCovDet): support_fraction : float, default=None The proportion of points to be included in the support of the raw MCD estimate. If None, the minimum value of support_fraction will - be used within the algorithm: `[n_sample + n_features + 1] / 2`. + be used within the algorithm: `(n_samples + n_features + 1) / 2 * n_samples`. Range is (0, 1). contamination : float, default=0.1 diff --git a/sklearn/covariance/_robust_covariance.py b/sklearn/covariance/_robust_covariance.py index a6b32e50a6c1f..c90e855ca6768 100644 --- a/sklearn/covariance/_robust_covariance.py +++ b/sklearn/covariance/_robust_covariance.py @@ -373,8 +373,8 @@ def fast_mcd( The proportion of points to be included in the support of the raw MCD estimate. Default is `None`, which implies that the minimum value of `support_fraction` will be used within the algorithm: - `(n_sample + n_features + 1) / 2`. This parameter must be in the - range (0, 1). + `(n_samples + n_features + 1) / 2 * n_samples`. This parameter must be + in the range (0, 1). cov_computation_method : callable, \ default=:func:`sklearn.covariance.empirical_covariance` @@ -607,8 +607,8 @@ class MinCovDet(EmpiricalCovariance): The proportion of points to be included in the support of the raw MCD estimate. Default is None, which implies that the minimum value of support_fraction will be used within the algorithm: - `(n_sample + n_features + 1) / 2`. The parameter must be in the range - (0, 1]. + `(n_samples + n_features + 1) / 2 * n_samples`. The parameter must be + in the range (0, 1]. random_state : int, RandomState instance or None, default=None Determines the pseudo random number generator for shuffling the data. diff --git a/sklearn/datasets/_twenty_newsgroups.py b/sklearn/datasets/_twenty_newsgroups.py index 5973e998c34b9..22ac716871cc2 100644 --- a/sklearn/datasets/_twenty_newsgroups.py +++ b/sklearn/datasets/_twenty_newsgroups.py @@ -31,6 +31,7 @@ import re import shutil import tarfile +from contextlib import suppress import joblib import numpy as np @@ -69,15 +70,16 @@ def _download_20newsgroups(target_dir, cache_path): train_path = os.path.join(target_dir, TRAIN_FOLDER) test_path = os.path.join(target_dir, TEST_FOLDER) - if not os.path.exists(target_dir): - os.makedirs(target_dir) + os.makedirs(target_dir, exist_ok=True) logger.info("Downloading dataset from %s (14 MB)", ARCHIVE.url) archive_path = _fetch_remote(ARCHIVE, dirname=target_dir) logger.debug("Decompressing %s", archive_path) tarfile.open(archive_path, "r:gz").extractall(path=target_dir) - os.remove(archive_path) + + with suppress(FileNotFoundError): + os.remove(archive_path) # Store a zipped pickle cache = dict( diff --git a/sklearn/decomposition/_pca.py b/sklearn/decomposition/_pca.py index 046d121ac1934..238454f237895 100644 --- a/sklearn/decomposition/_pca.py +++ b/sklearn/decomposition/_pca.py @@ -22,7 +22,7 @@ from ..base import _fit_context from ..utils import check_random_state from ..utils._arpack import _init_arpack_v0 -from ..utils._array_api import get_namespace +from ..utils._array_api import _convert_to_numpy, get_namespace from ..utils._param_validation import Interval, RealNotInt, StrOptions from ..utils.deprecation import deprecated from ..utils.extmath import fast_logdet, randomized_svd, stable_cumsum, svd_flip @@ -60,6 +60,7 @@ def _assess_dimension(spectrum, rank, n_samples): Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604 `_ """ + xp, _ = get_namespace(spectrum) n_features = spectrum.shape[0] if not 1 <= rank < n_features: @@ -73,29 +74,29 @@ def _assess_dimension(spectrum, rank, n_samples): # small and won't be the max anyway. Also, it can lead to numerical # issues below when computing pa, in particular in log((spectrum[i] - # spectrum[j]) because this will take the log of something very small. - return -np.inf + return -xp.inf pu = -rank * log(2.0) for i in range(1, rank + 1): pu += ( gammaln((n_features - i + 1) / 2.0) - - log(np.pi) * (n_features - i + 1) / 2.0 + - log(xp.pi) * (n_features - i + 1) / 2.0 ) - pl = np.sum(np.log(spectrum[:rank])) + pl = xp.sum(xp.log(spectrum[:rank])) pl = -pl * n_samples / 2.0 - v = max(eps, np.sum(spectrum[rank:]) / (n_features - rank)) - pv = -np.log(v) * n_samples * (n_features - rank) / 2.0 + v = max(eps, xp.sum(spectrum[rank:]) / (n_features - rank)) + pv = -log(v) * n_samples * (n_features - rank) / 2.0 m = n_features * rank - rank * (rank + 1.0) / 2.0 - pp = log(2.0 * np.pi) * (m + rank) / 2.0 + pp = log(2.0 * xp.pi) * (m + rank) / 2.0 pa = 0.0 - spectrum_ = spectrum.copy() + spectrum_ = xp.asarray(spectrum, copy=True) spectrum_[rank:n_features] = v for i in range(rank): - for j in range(i + 1, len(spectrum)): + for j in range(i + 1, spectrum.shape[0]): pa += log( (spectrum[i] - spectrum[j]) * (1.0 / spectrum_[j] - 1.0 / spectrum_[i]) ) + log(n_samples) @@ -116,7 +117,7 @@ def _infer_dimension(spectrum, n_samples): ll[0] = -xp.inf # we don't want to return n_components = 0 for rank in range(1, spectrum.shape[0]): ll[rank] = _assess_dimension(spectrum, rank, n_samples) - return ll.argmax() + return xp.argmax(ll) class PCA(_BasePCA): @@ -578,8 +579,24 @@ def _fit_full(self, X, n_components): # side='right' ensures that number of features selected # their variance is always greater than n_components float # passed. More discussion in issue: #15669 - ratio_cumsum = stable_cumsum(explained_variance_ratio_) - n_components = xp.searchsorted(ratio_cumsum, n_components, side="right") + 1 + if is_array_api_compliant: + # Convert to numpy as xp.cumsum and xp.searchsorted are not + # part of the Array API standard yet: + # + # https://github.com/data-apis/array-api/issues/597 + # https://github.com/data-apis/array-api/issues/688 + # + # Furthermore, it's not always safe to call them for namespaces + # that already implement them: for instance as + # cupy.searchsorted does not accept a float as second argument. + explained_variance_ratio_np = _convert_to_numpy( + explained_variance_ratio_, xp=xp + ) + else: + explained_variance_ratio_np = explained_variance_ratio_ + ratio_cumsum = stable_cumsum(explained_variance_ratio_np) + n_components = np.searchsorted(ratio_cumsum, n_components, side="right") + 1 + # Compute noise covariance using Probabilistic PCA model # The sigma2 maximum likelihood (cf. eq. 12.46) if n_components < min(n_features, n_samples): diff --git a/sklearn/decomposition/tests/test_pca.py b/sklearn/decomposition/tests/test_pca.py index 9cbd8936dc1dd..4df4124d5a765 100644 --- a/sklearn/decomposition/tests/test_pca.py +++ b/sklearn/decomposition/tests/test_pca.py @@ -19,6 +19,7 @@ from sklearn.utils._testing import _array_api_for_tests, assert_allclose from sklearn.utils.estimator_checks import ( _get_check_estimator_ids, + check_array_api_input, check_array_api_input_and_values, ) from sklearn.utils.fixes import CSC_CONTAINERS, CSR_CONTAINERS @@ -859,7 +860,7 @@ def check_array_api_get_precision(name, estimator, array_namespace, device, dtyp "estimator", [ PCA(n_components=2, svd_solver="full"), - PCA(n_components=2, svd_solver="full", whiten=True), + PCA(n_components=0.1, svd_solver="full", whiten=True), PCA( n_components=2, svd_solver="randomized", @@ -874,6 +875,28 @@ def test_pca_array_api_compliance(estimator, check, array_namespace, device, dty check(name, estimator, array_namespace, device=device, dtype=dtype) +@pytest.mark.parametrize( + "array_namespace, device, dtype", yield_namespace_device_dtype_combinations() +) +@pytest.mark.parametrize( + "check", + [check_array_api_input, check_array_api_get_precision], + ids=_get_check_estimator_ids, +) +@pytest.mark.parametrize( + "estimator", + [ + # PCA with mle cannot use check_array_api_input_and_values because of + # rounding errors in the noisy (low variance) components. + PCA(n_components="mle", svd_solver="full"), + ], + ids=_get_check_estimator_ids, +) +def test_pca_mle_array_api_compliance(estimator, check, array_namespace, device, dtype): + name = estimator.__class__.__name__ + check(name, estimator, array_namespace, device=device, dtype=dtype) + + def test_array_api_error_and_warnings_on_unsupported_params(): pytest.importorskip("array_api_compat") xp = pytest.importorskip("numpy.array_api") diff --git a/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.py b/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.py index 61bb4189293a4..eb5e9dd0a2c58 100644 --- a/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.py +++ b/sklearn/ensemble/_hist_gradient_boosting/gradient_boosting.py @@ -1,11 +1,14 @@ """Fast Gradient Boosting decision trees for classification and regression.""" + # Author: Nicolas Hug import itertools +import warnings from abc import ABC, abstractmethod +from contextlib import suppress from functools import partial from numbers import Integral, Real -from timeit import default_timer as time +from time import time import numpy as np @@ -25,16 +28,21 @@ _fit_context, is_classifier, ) +from ...compose import ColumnTransformer from ...metrics import check_scoring from ...model_selection import train_test_split -from ...preprocessing import LabelEncoder -from ...utils import check_random_state, compute_sample_weight, resample +from ...preprocessing import FunctionTransformer, LabelEncoder, OrdinalEncoder +from ...utils import _safe_indexing, check_random_state, compute_sample_weight, resample +from ...utils._encode import _unique from ...utils._openmp_helpers import _openmp_effective_n_threads -from ...utils._param_validation import Interval, RealNotInt, StrOptions +from ...utils._param_validation import Hidden, Interval, RealNotInt, StrOptions from ...utils.multiclass import check_classification_targets from ...utils.validation import ( _check_monotonic_cst, _check_sample_weight, + _check_y, + _is_pandas_df, + check_array, check_consistent_length, check_is_fitted, ) @@ -105,6 +113,7 @@ class BaseHistGradientBoosting(BaseEstimator, ABC): "max_depth": [Interval(Integral, 1, None, closed="left"), None], "min_samples_leaf": [Interval(Integral, 1, None, closed="left")], "l2_regularization": [Interval(Real, 0, None, closed="left")], + "max_features": [Interval(RealNotInt, 0, 1, closed="right")], "monotonic_cst": ["array-like", dict, None], "interaction_cst": [ list, @@ -120,7 +129,12 @@ class BaseHistGradientBoosting(BaseEstimator, ABC): ], "tol": [Interval(Real, 0, None, closed="left")], "max_bins": [Interval(Integral, 2, 255, closed="both")], - "categorical_features": ["array-like", None], + "categorical_features": [ + "array-like", + StrOptions({"from_dtype"}), + Hidden(StrOptions({"warn"})), + None, + ], "warm_start": ["boolean"], "early_stopping": [StrOptions({"auto"}), "boolean"], "scoring": [str, callable, None], @@ -139,6 +153,7 @@ def __init__( max_depth, min_samples_leaf, l2_regularization, + max_features, max_bins, categorical_features, monotonic_cst, @@ -159,6 +174,7 @@ def __init__( self.max_depth = max_depth self.min_samples_leaf = min_samples_leaf self.l2_regularization = l2_regularization + self.max_features = max_features self.max_bins = max_bins self.monotonic_cst = monotonic_cst self.interaction_cst = interaction_cst @@ -190,9 +206,93 @@ class weights. """ return sample_weight + def _preprocess_X(self, X, *, reset): + """Preprocess and validate X. + + Parameters + ---------- + X : {array-like, pandas DataFrame} of shape (n_samples, n_features) + Input data. + + reset : bool + Whether to reset the `n_features_in_` and `feature_names_in_ attributes. + + Returns + ------- + X : ndarray of shape (n_samples, n_features) + Validated input data. + + known_categories : list of ndarray of shape (n_categories,) + List of known categories for each categorical feature. + """ + # If there is a preprocessor, we let the preprocessor handle the validation. + # Otherwise, we validate the data ourselves. + check_X_kwargs = dict(dtype=[X_DTYPE], force_all_finite=False) + if not reset: + if self._preprocessor is None: + return self._validate_data(X, reset=False, **check_X_kwargs) + return self._preprocessor.transform(X) + + # At this point, reset is False, which runs during `fit`. + self.is_categorical_, known_categories = self._check_categories(X) + + if known_categories is None: + self._preprocessor = None + self._is_categorical_remapped = self.is_categorical_ + + X = self._validate_data(X, **check_X_kwargs) + return X, known_categories + + n_features = X.shape[1] + + # Create categories to pass into ordinal_encoder based on known_categories + categories_ = [c for c in known_categories if c is not None] + + ordinal_encoder = OrdinalEncoder( + categories=categories_, + handle_unknown="use_encoded_value", + unknown_value=np.nan, + encoded_missing_value=np.nan, + dtype=X_DTYPE, + ) + + check_X = partial(check_array, **check_X_kwargs) + numerical_preprocessor = FunctionTransformer(check_X) + self._preprocessor = ColumnTransformer( + [ + ("encoder", ordinal_encoder, self.is_categorical_), + ("numerical", numerical_preprocessor, ~self.is_categorical_), + ] + ) + self._preprocessor.set_output(transform="default") + X = self._preprocessor.fit_transform(X) + self.n_features_in_ = self._preprocessor.n_features_in_ + with suppress(AttributeError): + self.feature_names_in_ = self._preprocessor.feature_names_in_ + + # The ColumnTransformer's output places the categorical features at the + # beginning + categorical_remapped = np.zeros(n_features, dtype=bool) + n_categorical = self.is_categorical_.sum() + categorical_remapped[:n_categorical] = True + + self._is_categorical_remapped = categorical_remapped + + # OrdinalEncoder will map categories to [0,..., cardinality - 1] + renamed_categories = [np.arange(len(c), dtype=X_DTYPE) for c in categories_] + + n_numerical = n_features - n_categorical + known_categories = renamed_categories + [None] * n_numerical + return X, known_categories + def _check_categories(self, X): """Check and validate categorical features in X + Parameters + ---------- + X : {array-like, pandas DataFrame} of shape (n_samples, n_features) + Input data. + Return ------ is_categorical : ndarray of shape (n_features,) or None, dtype=bool @@ -204,10 +304,42 @@ def _check_categories(self, X): - None if the feature is not categorical None if no feature is categorical. """ - if self.categorical_features is None: + X_is_dataframe = _is_pandas_df(X) + + # TODO(1.6): Remove warning and change default to "from_dtype" in v1.6 + if ( + isinstance(self.categorical_features, str) + and self.categorical_features == "warn" + ): + if X_is_dataframe and (X.dtypes == "category").any(): + warnings.warn( + ( + "The categorical_features parameter will change to 'from_dtype'" + " in v1.6. The 'from_dtype' option automatically treats" + " categorical dtypes in a DataFrame as categorical features." + ), + FutureWarning, + ) + categorical_features = None + else: + categorical_features = self.categorical_features + + categorical_by_dtype = ( + isinstance(categorical_features, str) + and categorical_features == "from_dtype" + ) + no_categorical_dtype = categorical_features is None or ( + categorical_by_dtype and not X_is_dataframe + ) + + if no_categorical_dtype: return None, None - categorical_features = np.asarray(self.categorical_features) + use_pandas_categorical = categorical_by_dtype and X_is_dataframe + if use_pandas_categorical: + categorical_features = np.asarray(X.dtypes == "category") + else: + categorical_features = np.asarray(categorical_features) if categorical_features.size == 0: return None, None @@ -227,17 +359,21 @@ def _check_categories(self, X): ) n_features = X.shape[1] + # At this point `_validate_data` was not called yet because we want to use the + # dtypes are used to discover the categorical features. Thus `feature_names_in_` + # is not defined yet. + feature_names_in_ = getattr(X, "columns", None) if categorical_features.dtype.kind in ("U", "O"): # check for feature names - if not hasattr(self, "feature_names_in_"): + if feature_names_in_ is None: raise ValueError( "categorical_features should be passed as an array of " "integers or as a boolean mask when the model is fitted " "on data without feature names." ) is_categorical = np.zeros(n_features, dtype=bool) - feature_names = self.feature_names_in_.tolist() + feature_names = feature_names_in_.tolist() for feature_name in categorical_features: try: is_categorical[feature_names.index(feature_name)] = True @@ -278,18 +414,27 @@ def _check_categories(self, X): for f_idx in range(n_features): if is_categorical[f_idx]: - categories = np.unique(X[:, f_idx]) - missing = np.isnan(categories) - if missing.any(): - categories = categories[~missing] - - # Treat negative values for categorical features as missing values. - negative_categories = categories < 0 - if negative_categories.any(): - categories = categories[~negative_categories] - - if hasattr(self, "feature_names_in_"): - feature_name = f"'{self.feature_names_in_[f_idx]}'" + if use_pandas_categorical: + # pandas categories do not include missing values so there is + # no need to filter them out. + categories = X.iloc[:, f_idx].unique().dropna().to_numpy() + # OrdinalEncoder requires categories backed by numerical values + # to be sorted + if categories.dtype.kind not in "OUS": + categories = np.sort(categories) + else: + categories = _unique(_safe_indexing(X, f_idx, axis=1)) + missing = np.isnan(categories) + if missing.any(): + categories = categories[~missing] + + # Treat negative values for categorical features as missing values. + negative_categories = categories < 0 + if negative_categories.any(): + categories = categories[~negative_categories] + + if feature_names_in_ is not None: + feature_name = f"'{feature_names_in_[f_idx]}'" else: feature_name = f"at index {f_idx}" @@ -300,7 +445,7 @@ def _check_categories(self, X): f"has a cardinality of {categories.size}." ) - if (categories >= self.max_bins).any(): + if not use_pandas_categorical and (categories >= self.max_bins).any(): raise ValueError( f"Categorical feature {feature_name} is expected to " f"be encoded with values < {self.max_bins} but the " @@ -378,7 +523,8 @@ def fit(self, X, y, sample_weight=None): acc_compute_hist_time = 0.0 # time spent computing histograms # time spent predicting X for gradient and hessians update acc_prediction_time = 0.0 - X, y = self._validate_data(X, y, dtype=[X_DTYPE], force_all_finite=False) + X, known_categories = self._preprocess_X(X, reset=True) + y = _check_y(y, estimator=self) y = self._encode_y(y) check_consistent_length(X, y) # Do not create unit sample weights by default to later skip some @@ -393,10 +539,12 @@ def fit(self, X, y, sample_weight=None): rng = check_random_state(self.random_state) # When warm starting, we want to reuse the same seed that was used - # the first time fit was called (e.g. for subsampling or for the - # train/val split). - if not (self.warm_start and self._is_fitted()): + # the first time fit was called (e.g. train/val split). + # For feature subsampling, we want to continue with the rng we started with. + if not self.warm_start or not self._is_fitted(): self._random_seed = rng.randint(np.iinfo(np.uint32).max, dtype="u8") + feature_subsample_seed = rng.randint(np.iinfo(np.uint32).max, dtype="u8") + self._feature_subsample_rng = np.random.default_rng(feature_subsample_seed) self._validate_parameters() monotonic_cst = _check_monotonic_cst(self, self.monotonic_cst) @@ -404,8 +552,6 @@ def fit(self, X, y, sample_weight=None): # used for validation in predict n_samples, self._n_features = X.shape - self.is_categorical_, known_categories = self._check_categories(X) - # Encode constraints into a list of sets of features indices (integers). interaction_cst = self._check_interaction_cst(self._n_features) @@ -486,7 +632,7 @@ def fit(self, X, y, sample_weight=None): n_bins = self.max_bins + 1 # + 1 for missing values self._bin_mapper = _BinMapper( n_bins=n_bins, - is_categorical=self.is_categorical_, + is_categorical=self._is_categorical_remapped, known_categories=known_categories, random_state=self._random_seed, n_threads=n_threads, @@ -693,13 +839,15 @@ def fit(self, X, y, sample_weight=None): n_bins=n_bins, n_bins_non_missing=self._bin_mapper.n_bins_non_missing_, has_missing_values=has_missing_values, - is_categorical=self.is_categorical_, + is_categorical=self._is_categorical_remapped, monotonic_cst=monotonic_cst, interaction_cst=interaction_cst, max_leaf_nodes=self.max_leaf_nodes, max_depth=self.max_depth, min_samples_leaf=self.min_samples_leaf, l2_regularization=self.l2_regularization, + feature_fraction_per_split=self.max_features, + rng=self._feature_subsample_rng, shrinkage=self.learning_rate, n_threads=n_threads, ) @@ -1036,17 +1184,11 @@ def _raw_predict(self, X, n_threads=None): raw_predictions : array, shape (n_samples, n_trees_per_iteration) The raw predicted values. """ + check_is_fitted(self) is_binned = getattr(self, "_in_fit", False) if not is_binned: - X = self._validate_data( - X, dtype=X_DTYPE, force_all_finite=False, reset=False - ) - check_is_fitted(self) - if X.shape[1] != self._n_features: - raise ValueError( - "X has {} features but this estimator was trained with " - "{} features.".format(X.shape[1], self._n_features) - ) + X = self._preprocess_X(X, reset=False) + n_samples = X.shape[0] raw_predictions = np.zeros( shape=(n_samples, self.n_trees_per_iteration_), @@ -1107,8 +1249,8 @@ def _staged_raw_predict(self, X): The raw predictions of the input samples. The order of the classes corresponds to that in the attribute :term:`classes_`. """ - X = self._validate_data(X, dtype=X_DTYPE, force_all_finite=False, reset=False) check_is_fitted(self) + X = self._preprocess_X(X, reset=False) if X.shape[1] != self._n_features: raise ValueError( "X has {} features but this estimator was trained with " @@ -1261,8 +1403,16 @@ class HistGradientBoostingRegressor(RegressorMixin, BaseHistGradientBoosting): than a few hundred samples, it is recommended to lower this value since only very shallow trees would be built. l2_regularization : float, default=0 - The L2 regularization parameter. Use ``0`` for no regularization - (default). + The L2 regularization parameter. Use ``0`` for no regularization (default). + max_features : float, default=1.0 + Proportion of randomly chosen features in each and every node split. + This is a form of regularization, smaller values make the trees weaker + learners and might prevent overfitting. + If interaction constraints from `interaction_cst` are present, only allowed + features are taken into account for the subsampling. + + .. versionadded:: 1.4 + max_bins : int, default=255 The maximum number of bins to use for non-missing values. Before training, each feature of the input array `X` is binned into @@ -1280,6 +1430,8 @@ class HistGradientBoostingRegressor(RegressorMixin, BaseHistGradientBoosting): features. - str array-like: names of categorical features (assuming the training data has feature names). + - `"from_dtype"`: Pandas categorical dtypes are considered categorical. + The input must be a pandas DataFrame to use this feature. For each categorical feature, there must be at most `max_bins` unique categories, and each categorical value must be less then `max_bins - 1`. @@ -1295,6 +1447,10 @@ class HistGradientBoostingRegressor(RegressorMixin, BaseHistGradientBoosting): .. versionchanged:: 1.2 Added support for feature names. + .. versionchanged:: 1.4 + Added `"from_dtype"` option. The default will change to `"from_dtype"` in + v1.6. + monotonic_cst : array-like of int of shape (n_features) or dict, default=None Monotonic constraint to enforce on each feature are specified using the following integer values: @@ -1463,8 +1619,9 @@ def __init__( max_depth=None, min_samples_leaf=20, l2_regularization=0.0, + max_features=1.0, max_bins=255, - categorical_features=None, + categorical_features="warn", monotonic_cst=None, interaction_cst=None, warm_start=False, @@ -1484,6 +1641,7 @@ def __init__( max_depth=max_depth, min_samples_leaf=min_samples_leaf, l2_regularization=l2_regularization, + max_features=max_features, max_bins=max_bins, monotonic_cst=monotonic_cst, interaction_cst=interaction_cst, @@ -1620,7 +1778,16 @@ class HistGradientBoostingClassifier(ClassifierMixin, BaseHistGradientBoosting): than a few hundred samples, it is recommended to lower this value since only very shallow trees would be built. l2_regularization : float, default=0 - The L2 regularization parameter. Use 0 for no regularization. + The L2 regularization parameter. Use ``0`` for no regularization (default). + max_features : float, default=1.0 + Proportion of randomly chosen features in each and every node split. + This is a form of regularization, smaller values make the trees weaker + learners and might prevent overfitting. + If interaction constraints from `interaction_cst` are present, only allowed + features are taken into account for the subsampling. + + .. versionadded:: 1.4 + max_bins : int, default=255 The maximum number of bins to use for non-missing values. Before training, each feature of the input array `X` is binned into @@ -1638,6 +1805,8 @@ class HistGradientBoostingClassifier(ClassifierMixin, BaseHistGradientBoosting): features. - str array-like: names of categorical features (assuming the training data has feature names). + - `"from_dtype"`: Pandas categorical dtypes are considered categorical. + The input must be a pandas DataFrame to use this feature. For each categorical feature, there must be at most `max_bins` unique categories, and each categorical value must be less then `max_bins - 1`. @@ -1653,6 +1822,10 @@ class HistGradientBoostingClassifier(ClassifierMixin, BaseHistGradientBoosting): .. versionchanged:: 1.2 Added support for feature names. + .. versionchanged:: 1.4 + Added `"from_dtype"` option. The default will change to `"from_dtype"` in + v1.6. + monotonic_cst : array-like of int of shape (n_features) or dict, default=None Monotonic constraint to enforce on each feature are specified using the following integer values: @@ -1823,8 +1996,9 @@ def __init__( max_depth=None, min_samples_leaf=20, l2_regularization=0.0, + max_features=1.0, max_bins=255, - categorical_features=None, + categorical_features="warn", monotonic_cst=None, interaction_cst=None, warm_start=False, @@ -1845,6 +2019,7 @@ def __init__( max_depth=max_depth, min_samples_leaf=min_samples_leaf, l2_regularization=l2_regularization, + max_features=max_features, max_bins=max_bins, categorical_features=categorical_features, monotonic_cst=monotonic_cst, diff --git a/sklearn/ensemble/_hist_gradient_boosting/grower.py b/sklearn/ensemble/_hist_gradient_boosting/grower.py index 5d9d131de6b07..44392362fd60c 100644 --- a/sklearn/ensemble/_hist_gradient_boosting/grower.py +++ b/sklearn/ensemble/_hist_gradient_boosting/grower.py @@ -164,6 +164,10 @@ class TreeGrower: min_gain_to_split : float, default=0. The minimum gain needed to split a node. Splits with lower gain will be ignored. + min_hessian_to_split : float, default=1e-3 + The minimum sum of hessians needed in each node. Splits that result in + at least one child having a sum of hessians less than + ``min_hessian_to_split`` are discarded. n_bins : int, default=256 The total number of bins, including the bin for missing values. Used to define the shape of the histograms. @@ -189,10 +193,12 @@ class TreeGrower: List of interaction constraints. l2_regularization : float, default=0. The L2 regularization parameter. - min_hessian_to_split : float, default=1e-3 - The minimum sum of hessians needed in each node. Splits that result in - at least one child having a sum of hessians less than - ``min_hessian_to_split`` are discarded. + feature_fraction_per_split : float, default=1 + Proportion of randomly chosen features in each and every node split. + This is a form of regularization, smaller values make the trees weaker + learners and might prevent overfitting. + rng : Generator + Numpy random Generator used for feature subsampling. shrinkage : float, default=1. The shrinkage parameter to apply to the leaves values, also known as learning rate. @@ -234,6 +240,7 @@ def __init__( max_depth=None, min_samples_leaf=20, min_gain_to_split=0.0, + min_hessian_to_split=1e-3, n_bins=256, n_bins_non_missing=None, has_missing_values=False, @@ -241,7 +248,8 @@ def __init__( monotonic_cst=None, interaction_cst=None, l2_regularization=0.0, - min_hessian_to_split=1e-3, + feature_fraction_per_split=1.0, + rng=np.random.default_rng(), shrinkage=1.0, n_threads=None, ): @@ -297,33 +305,35 @@ def __init__( ) missing_values_bin_idx = n_bins - 1 self.splitter = Splitter( - X_binned, - n_bins_non_missing, - missing_values_bin_idx, - has_missing_values, - is_categorical, - monotonic_cst, - l2_regularization, - min_hessian_to_split, - min_samples_leaf, - min_gain_to_split, - hessians_are_constant, - n_threads, + X_binned=X_binned, + n_bins_non_missing=n_bins_non_missing, + missing_values_bin_idx=missing_values_bin_idx, + has_missing_values=has_missing_values, + is_categorical=is_categorical, + monotonic_cst=monotonic_cst, + l2_regularization=l2_regularization, + min_hessian_to_split=min_hessian_to_split, + min_samples_leaf=min_samples_leaf, + min_gain_to_split=min_gain_to_split, + hessians_are_constant=hessians_are_constant, + feature_fraction_per_split=feature_fraction_per_split, + rng=rng, + n_threads=n_threads, ) + self.X_binned = X_binned + self.max_leaf_nodes = max_leaf_nodes + self.max_depth = max_depth + self.min_samples_leaf = min_samples_leaf + self.min_gain_to_split = min_gain_to_split self.n_bins_non_missing = n_bins_non_missing self.missing_values_bin_idx = missing_values_bin_idx - self.max_leaf_nodes = max_leaf_nodes self.has_missing_values = has_missing_values + self.is_categorical = is_categorical self.monotonic_cst = monotonic_cst self.interaction_cst = interaction_cst - self.is_categorical = is_categorical self.l2_regularization = l2_regularization - self.n_features = X_binned.shape[1] - self.max_depth = max_depth - self.min_samples_leaf = min_samples_leaf - self.X_binned = X_binned - self.min_gain_to_split = min_gain_to_split self.shrinkage = shrinkage + self.n_features = X_binned.shape[1] self.n_threads = n_threads self.splittable_nodes = [] self.finalized_leaves = [] diff --git a/sklearn/ensemble/_hist_gradient_boosting/splitting.pyx b/sklearn/ensemble/_hist_gradient_boosting/splitting.pyx index 17f5769dfaf14..c310169da9fad 100644 --- a/sklearn/ensemble/_hist_gradient_boosting/splitting.pyx +++ b/sklearn/ensemble/_hist_gradient_boosting/splitting.pyx @@ -9,8 +9,9 @@ cimport cython from cython.parallel import prange +cimport numpy as cnp import numpy as np -from libc.math cimport INFINITY +from libc.math cimport INFINITY, ceil from libc.stdlib cimport malloc, free, qsort from libc.string cimport memcpy @@ -24,6 +25,8 @@ from ._bitset cimport init_bitset from ._bitset cimport set_bitset from ._bitset cimport in_bitset +cnp.import_array() + cdef struct split_info_struct: # Same as the SplitInfo class, but we need a C struct to use it in the @@ -155,6 +158,11 @@ cdef class Splitter: be ignored. hessians_are_constant: bool, default is False Whether hessians are constant. + feature_fraction_per_split : float, default=1 + Proportion of randomly chosen features in each and every node split. + This is a form of regularization, smaller values make the trees weaker + learners and might prevent overfitting. + rng : Generator n_threads : int, default=1 Number of OpenMP threads to use. """ @@ -171,6 +179,8 @@ cdef class Splitter: Y_DTYPE_C min_hessian_to_split unsigned int min_samples_leaf Y_DTYPE_C min_gain_to_split + Y_DTYPE_C feature_fraction_per_split + rng unsigned int [::1] partition unsigned int [::1] left_indices_buffer @@ -189,6 +199,8 @@ cdef class Splitter: unsigned int min_samples_leaf=20, Y_DTYPE_C min_gain_to_split=0., unsigned char hessians_are_constant=False, + Y_DTYPE_C feature_fraction_per_split=1.0, + rng=np.random.RandomState(), unsigned int n_threads=1): self.X_binned = X_binned @@ -196,13 +208,15 @@ cdef class Splitter: self.n_bins_non_missing = n_bins_non_missing self.missing_values_bin_idx = missing_values_bin_idx self.has_missing_values = has_missing_values - self.monotonic_cst = monotonic_cst self.is_categorical = is_categorical + self.monotonic_cst = monotonic_cst self.l2_regularization = l2_regularization self.min_hessian_to_split = min_hessian_to_split self.min_samples_leaf = min_samples_leaf self.min_gain_to_split = min_gain_to_split self.hessians_are_constant = hessians_are_constant + self.feature_fraction_per_split = feature_fraction_per_split + self.rng = rng self.n_threads = n_threads # The partition array maps each sample index into the leaves of the @@ -475,6 +489,9 @@ cdef class Splitter: const signed char [::1] monotonic_cst = self.monotonic_cst int n_threads = self.n_threads bint has_interaction_cst = False + Y_DTYPE_C feature_fraction_per_split = self.feature_fraction_per_split + cnp.npy_bool [:] subsample_mask + int n_subsampled_features has_interaction_cst = allowed_features is not None if has_interaction_cst: @@ -482,13 +499,26 @@ cdef class Splitter: else: n_allowed_features = self.n_features + if feature_fraction_per_split < 1.0: + # We do all random sampling before the nogil and make sure that we sample + # exactly n_subsampled_features >= 1 features. + n_subsampled_features = max( + 1, + int(ceil(feature_fraction_per_split * n_allowed_features)), + ) + subsample_mask_arr = np.full(n_allowed_features, False) + subsample_mask_arr[:n_subsampled_features] = True + self.rng.shuffle(subsample_mask_arr) + # https://github.com/numpy/numpy/issues/18273 + subsample_mask = subsample_mask_arr + with nogil: split_infos = malloc( n_allowed_features * sizeof(split_info_struct)) - # split_info_idx is index of split_infos of size n_features_allowed - # features_idx is the index of the feature column in X + # split_info_idx is index of split_infos of size n_allowed_features. + # features_idx is the index of the feature column in X. for split_info_idx in prange(n_allowed_features, schedule='static', num_threads=n_threads): if has_interaction_cst: @@ -506,6 +536,13 @@ cdef class Splitter: split_infos[split_info_idx].gain = -1 split_infos[split_info_idx].is_categorical = is_categorical[feature_idx] + # Note that subsample_mask is indexed by split_info_idx and not by + # feature_idx because we only need to exclude the same features again + # and again. We do NOT need to access the features directly by using + # allowed_features. + if feature_fraction_per_split < 1.0 and not subsample_mask[split_info_idx]: + continue + if is_categorical[feature_idx]: self._find_best_bin_to_split_category( feature_idx, has_missing_values[feature_idx], diff --git a/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py b/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py index 4851c8e129203..fa54e30fe0264 100644 --- a/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py +++ b/sklearn/ensemble/_hist_gradient_boosting/tests/test_gradient_boosting.py @@ -1019,6 +1019,7 @@ def test_categorical_encoding_strategies(): clf_cat = HistGradientBoostingClassifier( max_iter=1, max_depth=1, categorical_features=native_cat_spec ) + clf_cat.fit(X, y) # Using native categorical encoding, we get perfect predictions with just # one split @@ -1193,7 +1194,7 @@ def test_categorical_bad_encoding_errors(Est, use_pandas, feature_name): msg = ( f"Categorical feature {feature_name} is expected to be encoded " "with values < 2 but the largest value for the encoded categories " - "is 2.0." + "is 2." ) with pytest.raises(ValueError, match=msg): gb.fit(X, y) @@ -1387,3 +1388,120 @@ def test_unknown_category_that_are_negative(): X_test_nan = np.asarray([[1, np.nan], [3, np.nan]]) assert_allclose(hist.predict(X_test_neg), hist.predict(X_test_nan)) + + +@pytest.mark.parametrize( + "HistGradientBoosting", + [HistGradientBoostingClassifier, HistGradientBoostingRegressor], +) +def test_pandas_categorical_results_same_as_ndarray(HistGradientBoosting): + """Check that pandas categorical give the same results as ndarray.""" + pd = pytest.importorskip("pandas") + + rng = np.random.RandomState(42) + n_samples = 5_000 + n_cardinality = 50 + max_bins = 100 + f_num = rng.rand(n_samples) + f_cat = rng.randint(n_cardinality, size=n_samples) + + # Make f_cat an informative feature + y = (f_cat % 3 == 0) & (f_num > 0.2) + + X = np.c_[f_num, f_cat] + X_df = pd.DataFrame( + {"f_num": f_num, "f_cat": pd.Series(f_cat, dtype="category")}, + columns=["f_num", "f_cat"], + ) + + X_train, X_test, X_train_df, X_test_df, y_train, y_test = train_test_split( + X, X_df, y, random_state=0 + ) + + hist_kwargs = dict(max_iter=10, max_bins=max_bins, random_state=0) + hist_np = HistGradientBoosting(categorical_features=[False, True], **hist_kwargs) + hist_np.fit(X_train, y_train) + + hist_pd = HistGradientBoosting(categorical_features="from_dtype", **hist_kwargs) + hist_pd.fit(X_train_df, y_train) + + # Check categories are correct and sorted + categories = hist_pd._preprocessor.named_transformers_["encoder"].categories[0] + assert_array_equal(categories, np.unique(f_cat)) + + assert len(hist_np._predictors) == len(hist_pd._predictors) + for predictor_1, predictor_2 in zip(hist_np._predictors, hist_pd._predictors): + assert len(predictor_1[0].nodes) == len(predictor_2[0].nodes) + + score_np = hist_np.score(X_test, y_test) + score_pd = hist_pd.score(X_test_df, y_test) + assert score_np == pytest.approx(score_pd) + assert_allclose(hist_np.predict(X_test), hist_pd.predict(X_test_df)) + + +@pytest.mark.parametrize( + "HistGradientBoosting", + [HistGradientBoostingClassifier, HistGradientBoostingRegressor], +) +def test_pandas_categorical_errors(HistGradientBoosting): + """Check error cases for pandas categorical feature.""" + pd = pytest.importorskip("pandas") + + msg = "Categorical feature 'f_cat' is expected to have a cardinality <= 16" + hist = HistGradientBoosting(categorical_features="from_dtype", max_bins=16) + + rng = np.random.RandomState(42) + f_cat = rng.randint(0, high=100, size=100) + X_df = pd.DataFrame({"f_cat": pd.Series(f_cat, dtype="category")}) + y = rng.randint(0, high=2, size=100) + + with pytest.raises(ValueError, match=msg): + hist.fit(X_df, y) + + +def test_categorical_different_order_same_model(): + """Check that the order of the categorical gives same model.""" + pd = pytest.importorskip("pandas") + rng = np.random.RandomState(42) + n_samples = 1_000 + f_ints = rng.randint(low=0, high=2, size=n_samples) + + # Construct a target with some noise + y = f_ints.copy() + flipped = rng.choice([True, False], size=n_samples, p=[0.1, 0.9]) + y[flipped] = 1 - y[flipped] + + # Construct categorical where 0 -> A and 1 -> B and 1 -> A and 0 -> B + f_cat = pd.Categorical(f_ints) + f_cat_a_b = f_cat.rename_categories({0: "A", 1: "B"}) + f_cat_b_a = f_cat.rename_categories({0: "B", 1: "A"}) + + df_a_b = pd.DataFrame({"f_cat": f_cat_a_b}) + df_b_a = pd.DataFrame({"f_cat": f_cat_b_a}) + + hist_a_b = HistGradientBoostingClassifier( + categorical_features="from_dtype", random_state=0 + ) + hist_b_a = HistGradientBoostingClassifier( + categorical_features="from_dtype", random_state=0 + ) + + hist_a_b.fit(df_a_b, y) + hist_b_a.fit(df_b_a, y) + + assert len(hist_a_b._predictors) == len(hist_b_a._predictors) + for predictor_1, predictor_2 in zip(hist_a_b._predictors, hist_b_a._predictors): + assert len(predictor_1[0].nodes) == len(predictor_2[0].nodes) + + +# TODO(1.6): Remove warning and change default in 1.6 +def test_categorical_features_warn(): + """Raise warning when there are categorical features in the input DataFrame.""" + pd = pytest.importorskip("pandas") + X = pd.DataFrame({"a": pd.Series([1, 2, 3], dtype="category"), "b": [4, 5, 6]}) + y = [0, 1, 0] + hist = HistGradientBoostingClassifier(random_state=0) + + msg = "The categorical_features parameter will change to 'from_dtype' in v1.6" + with pytest.warns(FutureWarning, match=msg): + hist.fit(X, y) diff --git a/sklearn/ensemble/_hist_gradient_boosting/tests/test_splitting.py b/sklearn/ensemble/_hist_gradient_boosting/tests/test_splitting.py index f862273beadf5..388697340e08b 100644 --- a/sklearn/ensemble/_hist_gradient_boosting/tests/test_splitting.py +++ b/sklearn/ensemble/_hist_gradient_boosting/tests/test_splitting.py @@ -959,3 +959,112 @@ def test_split_interaction_constraints(): # make sure feature 0 and feature 3 are split on in the constraint setting assert set(allowed_features) == set(split_features) + + +@pytest.mark.parametrize("forbidden_features", [set(), {1, 3}]) +def test_split_feature_fraction_per_split(forbidden_features): + """Check that feature_fraction_per_split is respected. + + Because we set `n_features = 4` and `feature_fraction_per_split = 0.25`, it means + that calling `splitter.find_node_split` will be allowed to select a split for a + single completely random feature at each call. So if we iterate enough, we should + cover all the allowed features, irrespective of the values of the gradients and + Hessians of the objective. + """ + n_features = 4 + allowed_features = np.array( + list(set(range(n_features)) - forbidden_features), dtype=np.uint32 + ) + n_bins = 5 + n_samples = 40 + l2_regularization = 0.0 + min_hessian_to_split = 1e-3 + min_samples_leaf = 1 + min_gain_to_split = 0.0 + rng = np.random.default_rng(42) + + sample_indices = np.arange(n_samples, dtype=np.uint32) + all_gradients = rng.uniform(low=0.5, high=1, size=n_samples).astype(G_H_DTYPE) + sum_gradients = all_gradients.sum() + all_hessians = np.ones(1, dtype=G_H_DTYPE) + sum_hessians = n_samples + hessians_are_constant = True + + X_binned = np.asfortranarray( + rng.integers(low=0, high=n_bins - 1, size=(n_samples, n_features)), + dtype=X_BINNED_DTYPE, + ) + X_binned = np.asfortranarray(X_binned, dtype=X_BINNED_DTYPE) + builder = HistogramBuilder( + X_binned, + n_bins, + all_gradients, + all_hessians, + hessians_are_constant, + n_threads, + ) + histograms = builder.compute_histograms_brute(sample_indices) + value = compute_node_value( + sum_gradients, sum_hessians, -np.inf, np.inf, l2_regularization + ) + n_bins_non_missing = np.array([n_bins] * X_binned.shape[1], dtype=np.uint32) + has_missing_values = np.array([False] * X_binned.shape[1], dtype=np.uint8) + monotonic_cst = np.array( + [MonotonicConstraint.NO_CST] * X_binned.shape[1], dtype=np.int8 + ) + is_categorical = np.zeros_like(monotonic_cst, dtype=np.uint8) + missing_values_bin_idx = n_bins - 1 + + params = dict( + X_binned=X_binned, + n_bins_non_missing=n_bins_non_missing, + missing_values_bin_idx=missing_values_bin_idx, + has_missing_values=has_missing_values, + is_categorical=is_categorical, + monotonic_cst=monotonic_cst, + l2_regularization=l2_regularization, + min_hessian_to_split=min_hessian_to_split, + min_samples_leaf=min_samples_leaf, + min_gain_to_split=min_gain_to_split, + hessians_are_constant=hessians_are_constant, + rng=rng, + ) + splitter_subsample = Splitter( + feature_fraction_per_split=0.25, # THIS is the important setting here. + **params, + ) + splitter_all_features = Splitter(feature_fraction_per_split=1.0, **params) + + assert np.all(sample_indices == splitter_subsample.partition) + + split_features_subsample = [] + split_features_all = [] + # The loop is to ensure that we split at least once on each feature. + # This is tracked by split_features and checked at the end. + for i in range(20): + si_root = splitter_subsample.find_node_split( + n_samples, + histograms, + sum_gradients, + sum_hessians, + value, + allowed_features=allowed_features, + ) + split_features_subsample.append(si_root.feature_idx) + + # This second splitter is our "counterfactual". + si_root = splitter_all_features.find_node_split( + n_samples, + histograms, + sum_gradients, + sum_hessians, + value, + allowed_features=allowed_features, + ) + split_features_all.append(si_root.feature_idx) + + # Make sure all features are split on. + assert set(split_features_subsample) == set(allowed_features) + + # Make sure, our counterfactual always splits on same feature. + assert len(set(split_features_all)) == 1 diff --git a/sklearn/gaussian_process/kernels.py b/sklearn/gaussian_process/kernels.py index 9e52b9546ddc5..a498903e1a245 100644 --- a/sklearn/gaussian_process/kernels.py +++ b/sklearn/gaussian_process/kernels.py @@ -1949,7 +1949,7 @@ class ExpSineSquared(StationaryKernelMixin, NormalizedKernelMixin, Kernel): \frac{ 2\sin^2(\pi d(x_i, x_j)/p) }{ l^ 2} \right) where :math:`l` is the length scale of the kernel, :math:`p` the - periodicity of the kernel and :math:`d(\\cdot,\\cdot)` is the + periodicity of the kernel and :math:`d(\cdot,\cdot)` is the Euclidean distance. Read more in the :ref:`User Guide `. diff --git a/sklearn/linear_model/_logistic.py b/sklearn/linear_model/_logistic.py index e6ac6ff087945..03e44672d760d 100644 --- a/sklearn/linear_model/_logistic.py +++ b/sklearn/linear_model/_logistic.py @@ -624,34 +624,32 @@ def _log_reg_scoring_path( test : list of indices The indices of the test set. - pos_class : int, default=None + pos_class : int The class with respect to which we perform a one-vs-all fit. If None, then it is assumed that the given problem is binary. - Cs : int or list of floats, default=10 + Cs : int or list of floats Each of the values in Cs describes the inverse of regularization strength. If Cs is as an int, then a grid of Cs values are chosen in a logarithmic scale between 1e-4 and 1e4. - If not provided, then a fixed set of values for Cs are used. - scoring : callable, default=None + scoring : callable A string (see model evaluation documentation) or a scorer callable object / function with signature ``scorer(estimator, X, y)``. For a list of scoring functions - that can be used, look at :mod:`sklearn.metrics`. The - default scoring option used is accuracy_score. + that can be used, look at :mod:`sklearn.metrics`. - fit_intercept : bool, default=False + fit_intercept : bool If False, then the bias term is set to zero. Else the last term of each coef_ gives us the intercept. - max_iter : int, default=100 + max_iter : int Maximum number of iterations for the solver. - tol : float, default=1e-4 + tol : float Tolerance for stopping criteria. - class_weight : dict or 'balanced', default=None + class_weight : dict or 'balanced' Weights associated with classes in the form ``{class_label: weight}``. If not given, all classes are supposed to have weight one. @@ -662,25 +660,24 @@ def _log_reg_scoring_path( Note that these weights will be multiplied with sample_weight (passed through the fit method) if sample_weight is specified. - verbose : int, default=0 + verbose : int For the liblinear and lbfgs solvers set verbose to any positive number for verbosity. - solver : {'lbfgs', 'liblinear', 'newton-cg', 'newton-cholesky', 'sag', 'saga'}, \ - default='lbfgs' + solver : {'lbfgs', 'liblinear', 'newton-cg', 'newton-cholesky', 'sag', 'saga'} Decides which solver to use. - penalty : {'l1', 'l2', 'elasticnet'}, default='l2' + penalty : {'l1', 'l2', 'elasticnet'} Used to specify the norm used in the penalization. The 'newton-cg', 'sag' and 'lbfgs' solvers support only l2 penalties. 'elasticnet' is only supported by the 'saga' solver. - dual : bool, default=False + dual : bool Dual or primal formulation. Dual formulation is only implemented for l2 penalty with liblinear solver. Prefer dual=False when n_samples > n_features. - intercept_scaling : float, default=1. + intercept_scaling : float Useful only when the solver 'liblinear' is used and self.fit_intercept is set to True. In this case, x becomes [x, self.intercept_scaling], @@ -692,26 +689,26 @@ def _log_reg_scoring_path( To lessen the effect of regularization on synthetic feature weight (and therefore on the intercept) intercept_scaling has to be increased. - multi_class : {'auto', 'ovr', 'multinomial'}, default='auto' + multi_class : {'auto', 'ovr', 'multinomial'} If the option chosen is 'ovr', then a binary problem is fit for each label. For 'multinomial' the loss minimised is the multinomial loss fit across the entire probability distribution, *even when the data is binary*. 'multinomial' is unavailable when solver='liblinear'. - random_state : int, RandomState instance, default=None + random_state : int, RandomState instance Used when ``solver`` == 'sag', 'saga' or 'liblinear' to shuffle the data. See :term:`Glossary ` for details. - max_squared_sum : float, default=None + max_squared_sum : float Maximum squared sum of X over samples. Used only in SAG solver. If None, it will be computed, going through all the samples. The value should be precomputed to speed up cross validation. - sample_weight : array-like of shape(n_samples,), default=None + sample_weight : array-like of shape(n_samples,) Array of weights that are assigned to individual samples. If not provided, then each sample is given unit weight. - l1_ratio : float, default=None + l1_ratio : float The Elastic-Net mixing parameter, with ``0 <= l1_ratio <= 1``. Only used if ``penalty='elasticnet'``. Setting ``l1_ratio=0`` is equivalent to using ``penalty='l2'``, while setting ``l1_ratio=1`` is equivalent diff --git a/sklearn/linear_model/tests/test_base.py b/sklearn/linear_model/tests/test_base.py index 87b44ff9b4320..71f516dd76ed8 100644 --- a/sklearn/linear_model/tests/test_base.py +++ b/sklearn/linear_model/tests/test_base.py @@ -100,7 +100,7 @@ def test_linear_regression_sample_weights( def test_raises_value_error_if_positive_and_sparse(): - error_msg = "A sparse matrix was passed, but dense data is required." + error_msg = "Sparse data was passed for X, but dense data is required." # X must not be sparse if positive == True X = sparse.eye(10) y = np.ones(10) diff --git a/sklearn/metrics/cluster/_supervised.py b/sklearn/metrics/cluster/_supervised.py index 454da95c6678a..b8c80e6292b31 100644 --- a/sklearn/metrics/cluster/_supervised.py +++ b/sklearn/metrics/cluster/_supervised.py @@ -139,6 +139,16 @@ def contingency_matrix( otherwise with the ``dtype`` argument. If ``eps`` is given, the dtype will be float. Will be a ``sklearn.sparse.csr_matrix`` if ``sparse=True``. + + Examples + -------- + >>> from sklearn.metrics.cluster import contingency_matrix + >>> labels_true = [0, 0, 1, 1, 2, 2] + >>> labels_pred = [1, 0, 2, 1, 0, 2] + >>> contingency_matrix(labels_true, labels_pred) + array([[1, 1, 0], + [0, 1, 1], + [1, 0, 1]]) """ if eps is not None and sparse: diff --git a/sklearn/metrics/tests/test_dist_metrics.py b/sklearn/metrics/tests/test_dist_metrics.py index f4f9c136cf98e..b7b2e04b11396 100644 --- a/sklearn/metrics/tests/test_dist_metrics.py +++ b/sklearn/metrics/tests/test_dist_metrics.py @@ -368,7 +368,7 @@ def test_readonly_kwargs(): ( csr_container([1, 1.5, 1]), TypeError, - "A sparse matrix was passed, but dense data is required", + "Sparse data was passed for w, but dense data is required", ) for csr_container in CSR_CONTAINERS ], diff --git a/sklearn/model_selection/_split.py b/sklearn/model_selection/_split.py index 959a8c361d879..11398ec2cf0e2 100644 --- a/sklearn/model_selection/_split.py +++ b/sklearn/model_selection/_split.py @@ -68,7 +68,7 @@ class GroupsConsumerMixin(_MetadataRequester): class BaseCrossValidator(_MetadataRequester, metaclass=ABCMeta): - """Base class for all cross-validators + """Base class for all cross-validators. Implementations must define `_iter_test_masks` or `_iter_test_indices`. """ @@ -128,14 +128,14 @@ def _iter_test_indices(self, X=None, y=None, groups=None): @abstractmethod def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator""" + """Returns the number of splitting iterations in the cross-validator.""" def __repr__(self): return _build_repr(self) class LeaveOneOut(BaseCrossValidator): - """Leave-One-Out cross-validator + """Leave-One-Out cross-validator. Provides train/test indices to split data in train/test sets. Each sample is used once as a test set (singleton) while the remaining @@ -189,7 +189,7 @@ def _iter_test_indices(self, X, y=None, groups=None): return range(n_samples) def get_n_splits(self, X, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -214,7 +214,7 @@ def get_n_splits(self, X, y=None, groups=None): class LeavePOut(BaseCrossValidator): - """Leave-P-Out cross-validator + """Leave-P-Out cross-validator. Provides train/test indices to split data in train/test sets. This results in testing on all distinct samples of size p, while the remaining n - p @@ -286,7 +286,7 @@ def _iter_test_indices(self, X, y=None, groups=None): yield np.array(combination) def get_n_splits(self, X, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -306,7 +306,7 @@ def get_n_splits(self, X, y=None, groups=None): class _BaseKFold(BaseCrossValidator, metaclass=ABCMeta): - """Base class for KFold, GroupKFold, and StratifiedKFold""" + """Base class for K-Fold cross-validators and TimeSeriesSplit.""" @abstractmethod def __init__(self, n_splits, *, shuffle, random_state): @@ -378,7 +378,7 @@ def split(self, X, y=None, groups=None): yield train, test def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -400,7 +400,7 @@ def get_n_splits(self, X=None, y=None, groups=None): class KFold(_BaseKFold): - """K-Folds cross-validator + """K-Fold cross-validator. Provides train/test indices to split data in train/test sets. Split dataset into k consecutive folds (without shuffling by default). @@ -624,7 +624,7 @@ def split(self, X, y=None, groups=None): class StratifiedKFold(_BaseKFold): - """Stratified K-Folds cross-validator. + """Stratified K-Fold cross-validator. Provides train/test indices to split data in train/test sets. @@ -810,7 +810,7 @@ def split(self, X, y, groups=None): class StratifiedGroupKFold(GroupsConsumerMixin, _BaseKFold): - """Stratified K-Folds iterator variant with non-overlapping groups. + """Stratified K-Fold iterator variant with non-overlapping groups. This cross-validation object is a variation of StratifiedKFold attempts to return stratified folds with non-overlapping groups. The folds are made by @@ -1016,7 +1016,7 @@ def _find_best_fold(self, y_counts_per_fold, y_cnt, group_y_counts): class TimeSeriesSplit(_BaseKFold): - """Time Series cross-validator + """Time Series cross-validator. Provides train/test indices to split time series data samples that are observed at fixed time intervals, in train/test sets. @@ -1199,7 +1199,7 @@ def split(self, X, y=None, groups=None): class LeaveOneGroupOut(GroupsConsumerMixin, BaseCrossValidator): - """Leave One Group Out cross-validator + """Leave One Group Out cross-validator. Provides train/test indices to split data such that each training set is comprised of all samples except ones belonging to one specific group. @@ -1264,7 +1264,7 @@ def _iter_test_masks(self, X, y, groups): yield groups == i def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -1318,7 +1318,7 @@ def split(self, X, y=None, groups=None): class LeavePGroupsOut(GroupsConsumerMixin, BaseCrossValidator): - """Leave P Group(s) Out cross-validator + """Leave P Group(s) Out cross-validator. Provides train/test indices to split data according to a third-party provided group. This group information can be used to encode arbitrary @@ -1397,7 +1397,7 @@ def _iter_test_masks(self, X, y, groups): yield test_index def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -1528,7 +1528,7 @@ def split(self, X, y=None, groups=None): yield train_index, test_index def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -1695,7 +1695,7 @@ def __init__(self, *, n_splits=5, n_repeats=10, random_state=None): class BaseShuffleSplit(_MetadataRequester, metaclass=ABCMeta): - """Base class for ShuffleSplit and StratifiedShuffleSplit""" + """Base class for ShuffleSplit and StratifiedShuffleSplit.""" # This indicates that by default CV splitters don't have a "groups" kwarg, # unless indicated by inheriting from ``GroupsConsumerMixin``. @@ -1751,7 +1751,7 @@ def _iter_indices(self, X, y=None, groups=None): """Generate (train, test) indices""" def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -1776,7 +1776,7 @@ def __repr__(self): class ShuffleSplit(BaseShuffleSplit): - """Random permutation cross-validator + """Random permutation cross-validator. Yields indices to split data into training and test sets. @@ -1897,7 +1897,7 @@ def _iter_indices(self, X, y=None, groups=None): class GroupShuffleSplit(GroupsConsumerMixin, ShuffleSplit): - """Shuffle-Group(s)-Out cross-validation iterator + """Shuffle-Group(s)-Out cross-validation iterator. Provides randomized train/test indices to split data according to a third-party provided group. This group information can be used to encode @@ -2040,7 +2040,7 @@ def split(self, X, y=None, groups=None): class StratifiedShuffleSplit(BaseShuffleSplit): - """Stratified ShuffleSplit cross-validator + """Stratified ShuffleSplit cross-validator. Provides train/test indices to split data in train/test sets. @@ -2312,7 +2312,7 @@ def _validate_shuffle_split(n_samples, test_size, train_size, default_test_size= class PredefinedSplit(BaseCrossValidator): - """Predefined split cross-validator + """Predefined split cross-validator. Provides train/test indices to split data into train/test sets using a predefined scheme specified by the user with the ``test_fold`` parameter. @@ -2396,7 +2396,7 @@ def _iter_test_masks(self): yield test_mask def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- @@ -2424,7 +2424,7 @@ def __init__(self, cv): self.cv = list(cv) def get_n_splits(self, X=None, y=None, groups=None): - """Returns the number of splitting iterations in the cross-validator + """Returns the number of splitting iterations in the cross-validator. Parameters ---------- diff --git a/sklearn/model_selection/_validation.py b/sklearn/model_selection/_validation.py index f3c8735043408..d3110cb847b4c 100644 --- a/sklearn/model_selection/_validation.py +++ b/sklearn/model_selection/_validation.py @@ -1952,6 +1952,7 @@ def learning_curve( ) for train, test in train_test_proportions ) + _warn_or_raise_about_fit_failures(results, error_score) results = _aggregate_score_dicts(results) train_scores = results["train_scores"].reshape(-1, n_unique_ticks).T test_scores = results["test_scores"].reshape(-1, n_unique_ticks).T diff --git a/sklearn/model_selection/tests/test_validation.py b/sklearn/model_selection/tests/test_validation.py index ff4615f42c79e..acf4d27e0180e 100644 --- a/sklearn/model_selection/tests/test_validation.py +++ b/sklearn/model_selection/tests/test_validation.py @@ -2417,6 +2417,39 @@ def test_learning_curve_partial_fit_regressors(): learning_curve(MLPRegressor(), X, y, exploit_incremental_learning=True, cv=2) +def test_learning_curve_some_failing_fits_warning(global_random_seed): + """Checks for fit failures in `learning_curve` and raises the required warning""" + + X, y = make_classification( + n_samples=30, + n_classes=3, + n_informative=6, + shuffle=False, + random_state=global_random_seed, + ) + # sorting the target to trigger SVC error on the 2 first splits because a single + # class is present + sorted_idx = np.argsort(y) + X, y = X[sorted_idx], y[sorted_idx] + + svc = SVC() + warning_message = "10 fits failed out of a total of 25" + + with pytest.warns(FitFailedWarning, match=warning_message): + _, train_score, test_score, *_ = learning_curve( + svc, X, y, cv=5, error_score=np.nan + ) + + # the first 2 splits should lead to warnings and thus np.nan scores + for idx in range(2): + assert np.isnan(train_score[idx]).all() + assert np.isnan(test_score[idx]).all() + + for idx in range(2, train_score.shape[0]): + assert not np.isnan(train_score[idx]).any() + assert not np.isnan(test_score[idx]).any() + + def test_cross_validate_return_indices(global_random_seed): """Check the behaviour of `return_indices` in `cross_validate`.""" X, y = load_iris(return_X_y=True) diff --git a/sklearn/neighbors/_base.py b/sklearn/neighbors/_base.py index 519db9bead3d3..848c8b7c9dc5a 100644 --- a/sklearn/neighbors/_base.py +++ b/sklearn/neighbors/_base.py @@ -813,9 +813,15 @@ class from an array representing our data set and ask who's n_samples_fit = self.n_samples_fit_ if n_neighbors > n_samples_fit: + if query_is_train: + n_neighbors -= 1 # ok to modify inplace because an error is raised + inequality_str = "n_neighbors < n_samples_fit" + else: + inequality_str = "n_neighbors <= n_samples_fit" raise ValueError( - "Expected n_neighbors <= n_samples, " - " but n_samples = %d, n_neighbors = %d" % (n_samples_fit, n_neighbors) + f"Expected {inequality_str}, but " + f"n_neighbors = {n_neighbors}, n_samples_fit = {n_samples_fit}, " + f"n_samples = {X.shape[0]}" # include n_samples for common tests ) n_jobs = effective_n_jobs(self.n_jobs) diff --git a/sklearn/neighbors/tests/test_lof.py b/sklearn/neighbors/tests/test_lof.py index 221d78243915f..7233beddafe9c 100644 --- a/sklearn/neighbors/tests/test_lof.py +++ b/sklearn/neighbors/tests/test_lof.py @@ -255,6 +255,50 @@ def test_sparse(csr_container): lof.fit_predict(X) +def test_lof_error_n_neighbors_too_large(): + """Check that we raise a proper error message when n_neighbors == n_samples. + + Non-regression test for: + https://github.com/scikit-learn/scikit-learn/issues/17207 + """ + X = np.ones((7, 7)) + + msg = ( + "Expected n_neighbors < n_samples_fit, but n_neighbors = 1, " + "n_samples_fit = 1, n_samples = 1" + ) + with pytest.raises(ValueError, match=msg): + lof = neighbors.LocalOutlierFactor(n_neighbors=1).fit(X[:1]) + + lof = neighbors.LocalOutlierFactor(n_neighbors=2).fit(X[:2]) + assert lof.n_samples_fit_ == 2 + + msg = ( + "Expected n_neighbors < n_samples_fit, but n_neighbors = 2, " + "n_samples_fit = 2, n_samples = 2" + ) + with pytest.raises(ValueError, match=msg): + lof.kneighbors(None, n_neighbors=2) + + distances, indices = lof.kneighbors(None, n_neighbors=1) + assert distances.shape == (2, 1) + assert indices.shape == (2, 1) + + msg = ( + "Expected n_neighbors <= n_samples_fit, but n_neighbors = 3, " + "n_samples_fit = 2, n_samples = 7" + ) + with pytest.raises(ValueError, match=msg): + lof.kneighbors(X, n_neighbors=3) + + ( + distances, + indices, + ) = lof.kneighbors(X, n_neighbors=2) + assert distances.shape == (7, 2) + assert indices.shape == (7, 2) + + @pytest.mark.parametrize("algorithm", ["auto", "ball_tree", "kd_tree", "brute"]) @pytest.mark.parametrize("novelty", [True, False]) @pytest.mark.parametrize("contamination", [0.5, "auto"]) diff --git a/sklearn/preprocessing/_data.py b/sklearn/preprocessing/_data.py index 486574f50e991..cadd2737465d2 100644 --- a/sklearn/preprocessing/_data.py +++ b/sklearn/preprocessing/_data.py @@ -1858,12 +1858,14 @@ def normalize(X, norm="l2", *, axis=1, copy=True, return_norm=False): else: # axis == 1: sparse_format = "csr" + xp, _ = get_namespace(X) + X = check_array( X, accept_sparse=sparse_format, copy=copy, estimator="the normalize function", - dtype=FLOAT_DTYPES, + dtype=_array_api.supported_float_dtypes(xp), ) if axis == 0: X = X.T @@ -1887,13 +1889,13 @@ def normalize(X, norm="l2", *, axis=1, copy=True, return_norm=False): X.data[mask] /= norms_elementwise[mask] else: if norm == "l1": - norms = np.abs(X).sum(axis=1) + norms = xp.sum(xp.abs(X), axis=1) elif norm == "l2": norms = row_norms(X) elif norm == "max": - norms = np.max(abs(X), axis=1) + norms = xp.max(xp.abs(X), axis=1) norms = _handle_zeros_in_scale(norms, copy=False) - X /= norms[:, np.newaxis] + X /= norms[:, None] if axis == 0: X = X.T @@ -2031,7 +2033,7 @@ def transform(self, X, copy=None): return normalize(X, norm=self.norm, axis=1, copy=copy) def _more_tags(self): - return {"stateless": True} + return {"stateless": True, "array_api_support": True} @validate_params( diff --git a/sklearn/preprocessing/tests/test_data.py b/sklearn/preprocessing/tests/test_data.py index 70c48d5da91b0..5a70c3091a83d 100644 --- a/sklearn/preprocessing/tests/test_data.py +++ b/sklearn/preprocessing/tests/test_data.py @@ -691,7 +691,14 @@ def test_standard_check_array_of_inverse_transform(): ) @pytest.mark.parametrize( "estimator", - [MaxAbsScaler(), MinMaxScaler(), KernelCenterer()], + [ + MaxAbsScaler(), + MinMaxScaler(), + KernelCenterer(), + Normalizer(norm="l1"), + Normalizer(norm="l2"), + Normalizer(norm="max"), + ], ids=_get_check_estimator_ids, ) def test_scaler_array_api_compliance(estimator, check, array_namespace, device, dtype): diff --git a/sklearn/preprocessing/tests/test_encoders.py b/sklearn/preprocessing/tests/test_encoders.py index 1cb29863b1732..36badb6d33f04 100644 --- a/sklearn/preprocessing/tests/test_encoders.py +++ b/sklearn/preprocessing/tests/test_encoders.py @@ -1790,7 +1790,7 @@ def test_ordinal_encoder_sparse(csr_container): encoder = OrdinalEncoder() - err_msg = "A sparse matrix was passed, but dense data is required" + err_msg = "Sparse data was passed, but dense data is required" with pytest.raises(TypeError, match=err_msg): encoder.fit(X_sparse) with pytest.raises(TypeError, match=err_msg): diff --git a/sklearn/svm/tests/test_sparse.py b/sklearn/svm/tests/test_sparse.py index b8a9ef651221e..a7e517fdce893 100644 --- a/sklearn/svm/tests/test_sparse.py +++ b/sklearn/svm/tests/test_sparse.py @@ -1,18 +1,27 @@ import numpy as np import pytest -from numpy.testing import assert_array_almost_equal, assert_array_equal from scipy import sparse from sklearn import base, datasets, linear_model, svm from sklearn.datasets import load_digits, make_blobs, make_classification from sklearn.exceptions import ConvergenceWarning from sklearn.svm.tests import test_svm -from sklearn.utils._testing import ignore_warnings, skip_if_32bit +from sklearn.utils._testing import ( + assert_allclose, + assert_array_almost_equal, + assert_array_equal, + ignore_warnings, + skip_if_32bit, +) from sklearn.utils.extmath import safe_sparse_dot +from sklearn.utils.fixes import ( + CSR_CONTAINERS, + DOK_CONTAINERS, + LIL_CONTAINERS, +) # test sample 1 X = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]) -X_sp = sparse.lil_matrix(X) Y = [1, 1, 1, 2, 2, 2] T = np.array([[-1, -1], [2, 2], [3, 2]]) true_result = [1, 2, 2] @@ -27,23 +36,24 @@ [3, 3, 3], ] ) -X2_sp = sparse.dok_matrix(X2) Y2 = [1, 2, 2, 2, 3] T2 = np.array([[-1, -1, -1], [1, 1, 1], [2, 2, 2]]) true_result2 = [1, 2, 3] - iris = datasets.load_iris() -# permute rng = np.random.RandomState(0) perm = rng.permutation(iris.target.size) iris.data = iris.data[perm] iris.target = iris.target[perm] -# sparsify -iris.data = sparse.csr_matrix(iris.data) +X_blobs, y_blobs = make_blobs(n_samples=100, centers=10, random_state=0) + + +def check_svm_model_equal(dense_svm, X_train, y_train, X_test): + # Use the original svm model for dense fit and clone an exactly same + # svm model for sparse fit + sparse_svm = base.clone(dense_svm) -def check_svm_model_equal(dense_svm, sparse_svm, X_train, y_train, X_test): dense_svm.fit(X_train.toarray(), y_train) if sparse.issparse(X_test): X_test_dense = X_test.toarray() @@ -52,17 +62,14 @@ def check_svm_model_equal(dense_svm, sparse_svm, X_train, y_train, X_test): sparse_svm.fit(X_train, y_train) assert sparse.issparse(sparse_svm.support_vectors_) assert sparse.issparse(sparse_svm.dual_coef_) - assert_array_almost_equal( - dense_svm.support_vectors_, sparse_svm.support_vectors_.toarray() - ) - assert_array_almost_equal(dense_svm.dual_coef_, sparse_svm.dual_coef_.toarray()) + assert_allclose(dense_svm.support_vectors_, sparse_svm.support_vectors_.toarray()) + assert_allclose(dense_svm.dual_coef_, sparse_svm.dual_coef_.toarray()) if dense_svm.kernel == "linear": assert sparse.issparse(sparse_svm.coef_) assert_array_almost_equal(dense_svm.coef_, sparse_svm.coef_.toarray()) - assert_array_almost_equal(dense_svm.support_, sparse_svm.support_) - assert_array_almost_equal( - dense_svm.predict(X_test_dense), sparse_svm.predict(X_test) - ) + assert_allclose(dense_svm.support_, sparse_svm.support_) + assert_allclose(dense_svm.predict(X_test_dense), sparse_svm.predict(X_test)) + assert_array_almost_equal( dense_svm.decision_function(X_test_dense), sparse_svm.decision_function(X_test) ) @@ -74,7 +81,9 @@ def check_svm_model_equal(dense_svm, sparse_svm, X_train, y_train, X_test): msg = "cannot use sparse input in 'OneClassSVM' trained on dense data" else: assert_array_almost_equal( - dense_svm.predict_proba(X_test_dense), sparse_svm.predict_proba(X_test), 4 + dense_svm.predict_proba(X_test_dense), + sparse_svm.predict_proba(X_test), + decimal=4, ) msg = "cannot use sparse input in 'SVC' trained on dense data" if sparse.issparse(X_test): @@ -83,47 +92,41 @@ def check_svm_model_equal(dense_svm, sparse_svm, X_train, y_train, X_test): @skip_if_32bit -def test_svc(): - """Check that sparse SVC gives the same result as SVC""" - # many class dataset: - X_blobs, y_blobs = make_blobs(n_samples=100, centers=10, random_state=0) - X_blobs = sparse.csr_matrix(X_blobs) - - datasets = [ - [X_sp, Y, T], - [X2_sp, Y2, T2], +@pytest.mark.parametrize( + "X_train, y_train, X_test", + [ + [X, Y, T], + [X2, Y2, T2], [X_blobs[:80], y_blobs[:80], X_blobs[80:]], [iris.data, iris.target, iris.data], - ] - kernels = ["linear", "poly", "rbf", "sigmoid"] - for dataset in datasets: - for kernel in kernels: - clf = svm.SVC( - gamma=1, - kernel=kernel, - probability=True, - random_state=0, - decision_function_shape="ovo", - ) - sp_clf = svm.SVC( - gamma=1, - kernel=kernel, - probability=True, - random_state=0, - decision_function_shape="ovo", - ) - check_svm_model_equal(clf, sp_clf, *dataset) - - -def test_unsorted_indices(): + ], +) +@pytest.mark.parametrize("kernel", ["linear", "poly", "rbf", "sigmoid"]) +@pytest.mark.parametrize("sparse_container", CSR_CONTAINERS + LIL_CONTAINERS) +def test_svc(X_train, y_train, X_test, kernel, sparse_container): + """Check that sparse SVC gives the same result as SVC.""" + X_train = sparse_container(X_train) + + clf = svm.SVC( + gamma=1, + kernel=kernel, + probability=True, + random_state=0, + decision_function_shape="ovo", + ) + check_svm_model_equal(clf, X_train, y_train, X_test) + + +@pytest.mark.parametrize("csr_container", CSR_CONTAINERS) +def test_unsorted_indices(csr_container): # test that the result with sorted and unsorted indices in csr is the same # we use a subset of digits as iris, blobs or make_classification didn't # show the problem X, y = load_digits(return_X_y=True) - X_test = sparse.csr_matrix(X[50:100]) + X_test = csr_container(X[50:100]) X, y = X[:50], y[:50] - X_sparse = sparse.csr_matrix(X) + X_sparse = csr_container(X) coef_dense = ( svm.SVC(kernel="linear", probability=True, random_state=0).fit(X, y).coef_ ) @@ -132,7 +135,7 @@ def test_unsorted_indices(): ) coef_sorted = sparse_svc.coef_ # make sure dense and sparse SVM give the same result - assert_array_almost_equal(coef_dense, coef_sorted.toarray()) + assert_allclose(coef_dense, coef_sorted.toarray()) # reverse each row's indices def scramble_indices(X): @@ -142,7 +145,7 @@ def scramble_indices(X): row_slice = slice(*X.indptr[i - 1 : i + 1]) new_data.extend(X.data[row_slice][::-1]) new_indices.extend(X.indices[row_slice][::-1]) - return sparse.csr_matrix((new_data, new_indices, X.indptr), shape=X.shape) + return csr_container((new_data, new_indices, X.indptr), shape=X.shape) X_sparse_unsorted = scramble_indices(X_sparse) X_test_unsorted = scramble_indices(X_test) @@ -155,68 +158,73 @@ def scramble_indices(X): ) coef_unsorted = unsorted_svc.coef_ # make sure unsorted indices give same result - assert_array_almost_equal(coef_unsorted.toarray(), coef_sorted.toarray()) - assert_array_almost_equal( + assert_allclose(coef_unsorted.toarray(), coef_sorted.toarray()) + assert_allclose( sparse_svc.predict_proba(X_test_unsorted), sparse_svc.predict_proba(X_test) ) -def test_svc_with_custom_kernel(): +@pytest.mark.parametrize("lil_container", LIL_CONTAINERS) +def test_svc_with_custom_kernel(lil_container): def kfunc(x, y): return safe_sparse_dot(x, y.T) + X_sp = lil_container(X) clf_lin = svm.SVC(kernel="linear").fit(X_sp, Y) clf_mylin = svm.SVC(kernel=kfunc).fit(X_sp, Y) assert_array_equal(clf_lin.predict(X_sp), clf_mylin.predict(X_sp)) @skip_if_32bit -def test_svc_iris(): +@pytest.mark.parametrize("csr_container", CSR_CONTAINERS) +@pytest.mark.parametrize("kernel", ["linear", "poly", "rbf"]) +def test_svc_iris(csr_container, kernel): # Test the sparse SVC with the iris dataset - for k in ("linear", "poly", "rbf"): - sp_clf = svm.SVC(kernel=k).fit(iris.data, iris.target) - clf = svm.SVC(kernel=k).fit(iris.data.toarray(), iris.target) + iris_data_sp = csr_container(iris.data) - assert_array_almost_equal( - clf.support_vectors_, sp_clf.support_vectors_.toarray() - ) - assert_array_almost_equal(clf.dual_coef_, sp_clf.dual_coef_.toarray()) - assert_array_almost_equal( - clf.predict(iris.data.toarray()), sp_clf.predict(iris.data) - ) - if k == "linear": - assert_array_almost_equal(clf.coef_, sp_clf.coef_.toarray()) + sp_clf = svm.SVC(kernel=kernel).fit(iris_data_sp, iris.target) + clf = svm.SVC(kernel=kernel).fit(iris.data, iris.target) + assert_allclose(clf.support_vectors_, sp_clf.support_vectors_.toarray()) + assert_allclose(clf.dual_coef_, sp_clf.dual_coef_.toarray()) + assert_allclose(clf.predict(iris.data), sp_clf.predict(iris_data_sp)) + if kernel == "linear": + assert_allclose(clf.coef_, sp_clf.coef_.toarray()) -def test_sparse_decision_function(): + +@pytest.mark.parametrize("csr_container", CSR_CONTAINERS) +def test_sparse_decision_function(csr_container): # Test decision_function # Sanity check, test that decision_function implemented in python # returns the same as the one in libsvm # multi class: + iris_data_sp = csr_container(iris.data) svc = svm.SVC(kernel="linear", C=0.1, decision_function_shape="ovo") - clf = svc.fit(iris.data, iris.target) + clf = svc.fit(iris_data_sp, iris.target) - dec = safe_sparse_dot(iris.data, clf.coef_.T) + clf.intercept_ + dec = safe_sparse_dot(iris_data_sp, clf.coef_.T) + clf.intercept_ - assert_array_almost_equal(dec, clf.decision_function(iris.data)) + assert_allclose(dec, clf.decision_function(iris_data_sp)) # binary: clf.fit(X, Y) dec = np.dot(X, clf.coef_.T) + clf.intercept_ prediction = clf.predict(X) - assert_array_almost_equal(dec.ravel(), clf.decision_function(X)) - assert_array_almost_equal( + assert_allclose(dec.ravel(), clf.decision_function(X)) + assert_allclose( prediction, clf.classes_[(clf.decision_function(X) > 0).astype(int).ravel()] ) expected = np.array([-1.0, -0.66, -1.0, 0.66, 1.0, 1.0]) - assert_array_almost_equal(clf.decision_function(X), expected, 2) + assert_array_almost_equal(clf.decision_function(X), expected, decimal=2) -def test_error(): +@pytest.mark.parametrize("lil_container", LIL_CONTAINERS) +def test_error(lil_container): # Test that it gives proper exception on deficient input clf = svm.SVC() + X_sp = lil_container(X) Y2 = Y[:-1] # wrong dimensions for labels with pytest.raises(ValueError): @@ -226,8 +234,14 @@ def test_error(): assert_array_equal(clf.predict(T), true_result) -def test_linearsvc(): +@pytest.mark.parametrize( + "lil_container, dok_container", zip(LIL_CONTAINERS, DOK_CONTAINERS) +) +def test_linearsvc(lil_container, dok_container): # Similar to test_SVC + X_sp = lil_container(X) + X2_sp = dok_container(X2) + clf = svm.LinearSVC(dual="auto", random_state=0).fit(X, Y) sp_clf = svm.LinearSVC(dual="auto", random_state=0).fit(X_sp, Y) @@ -236,7 +250,7 @@ def test_linearsvc(): assert_array_almost_equal(clf.coef_, sp_clf.coef_, decimal=4) assert_array_almost_equal(clf.intercept_, sp_clf.intercept_, decimal=4) - assert_array_almost_equal(clf.predict(X), sp_clf.predict(X_sp)) + assert_allclose(clf.predict(X), sp_clf.predict(X_sp)) clf.fit(X2, Y2) sp_clf.fit(X2_sp, Y2) @@ -245,41 +259,40 @@ def test_linearsvc(): assert_array_almost_equal(clf.intercept_, sp_clf.intercept_, decimal=4) -def test_linearsvc_iris(): +@pytest.mark.parametrize("csr_container", CSR_CONTAINERS) +def test_linearsvc_iris(csr_container): # Test the sparse LinearSVC with the iris dataset + iris_data_sp = csr_container(iris.data) - sp_clf = svm.LinearSVC(dual="auto", random_state=0).fit(iris.data, iris.target) - clf = svm.LinearSVC(dual="auto", random_state=0).fit( - iris.data.toarray(), iris.target - ) + sp_clf = svm.LinearSVC(dual="auto", random_state=0).fit(iris_data_sp, iris.target) + clf = svm.LinearSVC(dual="auto", random_state=0).fit(iris.data, iris.target) assert clf.fit_intercept == sp_clf.fit_intercept assert_array_almost_equal(clf.coef_, sp_clf.coef_, decimal=1) assert_array_almost_equal(clf.intercept_, sp_clf.intercept_, decimal=1) - assert_array_almost_equal( - clf.predict(iris.data.toarray()), sp_clf.predict(iris.data) - ) + assert_allclose(clf.predict(iris.data), sp_clf.predict(iris_data_sp)) # check decision_function - pred = np.argmax(sp_clf.decision_function(iris.data), 1) - assert_array_almost_equal(pred, clf.predict(iris.data.toarray())) + pred = np.argmax(sp_clf.decision_function(iris_data_sp), axis=1) + assert_allclose(pred, clf.predict(iris.data)) # sparsify the coefficients on both models and check that they still # produce the same results clf.sparsify() - assert_array_equal(pred, clf.predict(iris.data)) + assert_array_equal(pred, clf.predict(iris_data_sp)) sp_clf.sparsify() - assert_array_equal(pred, sp_clf.predict(iris.data)) + assert_array_equal(pred, sp_clf.predict(iris_data_sp)) -def test_weight(): +@pytest.mark.parametrize("csr_container", CSR_CONTAINERS) +def test_weight(csr_container): # Test class weights X_, y_ = make_classification( n_samples=200, n_features=100, weights=[0.833, 0.167], random_state=0 ) - X_ = sparse.csr_matrix(X_) + X_ = csr_container(X_) for clf in ( linear_model.LogisticRegression(), svm.LinearSVC(dual="auto", random_state=0), @@ -291,8 +304,11 @@ def test_weight(): assert np.sum(y_pred == y_[180:]) >= 11 -def test_sample_weights(): +@pytest.mark.parametrize("lil_container", LIL_CONTAINERS) +def test_sample_weights(lil_container): # Test weights on individual samples + X_sp = lil_container(X) + clf = svm.SVC() clf.fit(X_sp, Y) assert_array_equal(clf.predict([X[2]]), [1.0]) @@ -307,119 +323,41 @@ def test_sparse_liblinear_intercept_handling(): test_svm.test_dense_liblinear_intercept_handling(svm.LinearSVC) -@pytest.mark.parametrize("datasets_index", range(4)) +@pytest.mark.parametrize( + "X_train, y_train, X_test", + [ + [X, None, T], + [X2, None, T2], + [X_blobs[:80], None, X_blobs[80:]], + [iris.data, None, iris.data], + ], +) @pytest.mark.parametrize("kernel", ["linear", "poly", "rbf", "sigmoid"]) +@pytest.mark.parametrize("sparse_container", CSR_CONTAINERS + LIL_CONTAINERS) @skip_if_32bit -def test_sparse_oneclasssvm(datasets_index, kernel): +def test_sparse_oneclasssvm(X_train, y_train, X_test, kernel, sparse_container): # Check that sparse OneClassSVM gives the same result as dense OneClassSVM - # many class dataset: - X_blobs, _ = make_blobs(n_samples=100, centers=10, random_state=0) - X_blobs = sparse.csr_matrix(X_blobs) - datasets = [ - [X_sp, None, T], - [X2_sp, None, T2], - [X_blobs[:80], None, X_blobs[80:]], - [iris.data, None, iris.data], - ] - dataset = datasets[datasets_index] + X_train = sparse_container(X_train) + clf = svm.OneClassSVM(gamma=1, kernel=kernel) - sp_clf = svm.OneClassSVM(gamma=1, kernel=kernel) - check_svm_model_equal(clf, sp_clf, *dataset) + check_svm_model_equal(clf, X_train, y_train, X_test) -def test_sparse_realdata(): +@pytest.mark.parametrize("csr_container", CSR_CONTAINERS) +def test_sparse_realdata(csr_container): # Test on a subset from the 20newsgroups dataset. # This catches some bugs if input is not correctly converted into # sparse format or weights are not correctly initialized. - data = np.array([0.03771744, 0.1003567, 0.01174647, 0.027069]) - indices = np.array([6, 5, 35, 31]) - indptr = np.array( - [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 1, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 2, - 4, - 4, - 4, - ] - ) - X = sparse.csr_matrix((data, indices, indptr)) + + # SVC does not support large sparse, so we specify int32 indices + # In this case, `csr_matrix` automatically uses int32 regardless of the dtypes of + # `indices` and `indptr` but `csr_array` may or may not use the same dtype as + # `indices` and `indptr`, which would be int64 if not specified + indices = np.array([6, 5, 35, 31], dtype=np.int32) + indptr = np.array([0] * 8 + [1] * 32 + [2] * 38 + [4] * 3, dtype=np.int32) + + X = csr_container((data, indices, indptr)) y = np.array( [ 1.0, @@ -506,18 +444,20 @@ def test_sparse_realdata(): ) clf = svm.SVC(kernel="linear").fit(X.toarray(), y) - sp_clf = svm.SVC(kernel="linear").fit(sparse.coo_matrix(X), y) + sp_clf = svm.SVC(kernel="linear").fit(X.tocoo(), y) assert_array_equal(clf.support_vectors_, sp_clf.support_vectors_.toarray()) assert_array_equal(clf.dual_coef_, sp_clf.dual_coef_.toarray()) -def test_sparse_svc_clone_with_callable_kernel(): +@pytest.mark.parametrize("lil_container", LIL_CONTAINERS) +def test_sparse_svc_clone_with_callable_kernel(lil_container): # Test that the "dense_fit" is called even though we use sparse input # meaning that everything works fine. - a = svm.SVC(C=1, kernel=lambda x, y: x * y.T, probability=True, random_state=0) + a = svm.SVC(C=1, kernel=lambda x, y: x @ y.T, probability=True, random_state=0) b = base.clone(a) + X_sp = lil_container(X) b.fit(X_sp, Y) pred = b.predict(X_sp) b.predict_proba(X_sp) @@ -530,16 +470,17 @@ def test_sparse_svc_clone_with_callable_kernel(): # b.decision_function(X_sp) # XXX : should be supported -def test_timeout(): +@pytest.mark.parametrize("lil_container", LIL_CONTAINERS) +def test_timeout(lil_container): sp = svm.SVC( - C=1, kernel=lambda x, y: x * y.T, probability=True, random_state=0, max_iter=1 + C=1, kernel=lambda x, y: x @ y.T, probability=True, random_state=0, max_iter=1 ) warning_msg = ( r"Solver terminated early \(max_iter=1\). Consider pre-processing " r"your data with StandardScaler or MinMaxScaler." ) with pytest.warns(ConvergenceWarning, match=warning_msg): - sp.fit(X_sp, Y) + sp.fit(lil_container(X), Y) def test_consistent_proba(): @@ -549,4 +490,4 @@ def test_consistent_proba(): a = svm.SVC(probability=True, max_iter=1, random_state=0) with ignore_warnings(category=ConvergenceWarning): proba_2 = a.fit(X, Y).predict_proba(X) - assert_array_almost_equal(proba_1, proba_2) + assert_allclose(proba_1, proba_2) diff --git a/sklearn/tests/test_multiclass.py b/sklearn/tests/test_multiclass.py index e27c2705406c1..caf7f5ae2fb49 100644 --- a/sklearn/tests/test_multiclass.py +++ b/sklearn/tests/test_multiclass.py @@ -741,11 +741,11 @@ def test_ecoc_delegate_sparse_base_estimator(csc_container): ) ecoc = OutputCodeClassifier(base_estimator, random_state=0) - with pytest.raises(TypeError, match="A sparse matrix was passed"): + with pytest.raises(TypeError, match="Sparse data was passed"): ecoc.fit(X_sp, y) ecoc.fit(X, y) - with pytest.raises(TypeError, match="A sparse matrix was passed"): + with pytest.raises(TypeError, match="Sparse data was passed"): ecoc.predict(X_sp) # smoke test to check when sparse input should be supported diff --git a/sklearn/tests/test_random_projection.py b/sklearn/tests/test_random_projection.py index c91833590a591..b279ab75ec8d9 100644 --- a/sklearn/tests/test_random_projection.py +++ b/sklearn/tests/test_random_projection.py @@ -6,7 +6,7 @@ import pytest import scipy.sparse as sp -from sklearn.exceptions import DataDimensionalityWarning +from sklearn.exceptions import DataDimensionalityWarning, NotFittedError from sklearn.metrics import euclidean_distances from sklearn.random_projection import ( GaussianRandomProjection, @@ -22,6 +22,7 @@ assert_array_almost_equal, assert_array_equal, ) +from sklearn.utils.fixes import COO_CONTAINERS all_sparse_random_matrix: List[Any] = [_sparse_random_matrix] all_dense_random_matrix: List[Any] = [_gaussian_random_matrix] @@ -32,11 +33,20 @@ all_RandomProjection = all_SparseRandomProjection + all_DenseRandomProjection -# Make some random data with uniformly located non zero entries with -# Gaussian distributed values -def make_sparse_random_data(n_samples, n_features, n_nonzeros, random_state=0): +def make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=None, + sparse_format="csr", +): + """Make some random data with uniformly located non zero entries with + Gaussian distributed values; `sparse_format` can be `"csr"` (default) or + `None` (in which case a dense array is returned). + """ rng = np.random.RandomState(random_state) - data_coo = sp.coo_matrix( + data_coo = coo_container( ( rng.randn(n_nonzeros), ( @@ -46,7 +56,10 @@ def make_sparse_random_data(n_samples, n_features, n_nonzeros, random_state=0): ), shape=(n_samples, n_features), ) - return data_coo.toarray(), data_coo.tocsr() + if sparse_format is not None: + return data_coo.asformat(sparse_format) + else: + return data_coo.toarray() def densify(matrix): @@ -58,7 +71,6 @@ def densify(matrix): n_samples, n_features = (10, 1000) n_nonzeros = int(n_samples * n_features / 100.0) -data, data_csr = make_sparse_random_data(n_samples, n_features, n_nonzeros) ############################################################################### @@ -221,14 +233,31 @@ def test_random_projection_transformer_invalid_input(): RandomProjection(n_components=n_components).fit(fit_data) -def test_try_to_transform_before_fit(): +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_try_to_transform_before_fit(coo_container, global_random_seed): + data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=global_random_seed, + sparse_format=None, + ) for RandomProjection in all_RandomProjection: - with pytest.raises(ValueError): + with pytest.raises(NotFittedError): RandomProjection(n_components="auto").transform(data) -def test_too_many_samples_to_find_a_safe_embedding(): - data, _ = make_sparse_random_data(1000, 100, 1000) +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_too_many_samples_to_find_a_safe_embedding(coo_container, global_random_seed): + data = make_sparse_random_data( + coo_container, + n_samples=1000, + n_features=100, + n_nonzeros=1000, + random_state=global_random_seed, + sparse_format=None, + ) for RandomProjection in all_RandomProjection: rp = RandomProjection(n_components="auto", eps=0.1) @@ -241,8 +270,16 @@ def test_too_many_samples_to_find_a_safe_embedding(): rp.fit(data) -def test_random_projection_embedding_quality(): - data, _ = make_sparse_random_data(8, 5000, 15000) +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_random_projection_embedding_quality(coo_container): + data = make_sparse_random_data( + coo_container, + n_samples=8, + n_features=5000, + n_nonzeros=15000, + random_state=0, + sparse_format=None, + ) eps = 0.2 original_distances = euclidean_distances(data, squared=True) @@ -271,28 +308,54 @@ def test_random_projection_embedding_quality(): assert 1 - eps < distances_ratio.min() -def test_SparseRandomProj_output_representation(): +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_SparseRandomProj_output_representation(coo_container): + dense_data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=0, + sparse_format=None, + ) + sparse_data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=0, + sparse_format="csr", + ) for SparseRandomProj in all_SparseRandomProjection: # when using sparse input, the projected data can be forced to be a # dense numpy array rp = SparseRandomProj(n_components=10, dense_output=True, random_state=0) - rp.fit(data) - assert isinstance(rp.transform(data), np.ndarray) - - sparse_data = sp.csr_matrix(data) + rp.fit(dense_data) + assert isinstance(rp.transform(dense_data), np.ndarray) assert isinstance(rp.transform(sparse_data), np.ndarray) # the output can be left to a sparse matrix instead rp = SparseRandomProj(n_components=10, dense_output=False, random_state=0) - rp = rp.fit(data) + rp = rp.fit(dense_data) # output for dense input will stay dense: - assert isinstance(rp.transform(data), np.ndarray) + assert isinstance(rp.transform(dense_data), np.ndarray) # output for sparse output will be sparse: assert sp.issparse(rp.transform(sparse_data)) -def test_correct_RandomProjection_dimensions_embedding(): +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_correct_RandomProjection_dimensions_embedding( + coo_container, global_random_seed +): + data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=global_random_seed, + sparse_format=None, + ) for RandomProjection in all_RandomProjection: rp = RandomProjection(n_components="auto", random_state=0, eps=0.5).fit(data) @@ -334,24 +397,52 @@ def test_correct_RandomProjection_dimensions_embedding(): assert 85 < rp.components_.nnz # close to 1% density -def test_warning_n_components_greater_than_n_features(): +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_warning_n_components_greater_than_n_features( + coo_container, global_random_seed +): n_features = 20 - data, _ = make_sparse_random_data(5, n_features, int(n_features / 4)) + n_samples = 5 + n_nonzeros = int(n_features / 4) + data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=global_random_seed, + sparse_format=None, + ) for RandomProjection in all_RandomProjection: with pytest.warns(DataDimensionalityWarning): RandomProjection(n_components=n_features + 1).fit(data) -def test_works_with_sparse_data(): +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) +def test_works_with_sparse_data(coo_container, global_random_seed): n_features = 20 - data, _ = make_sparse_random_data(5, n_features, int(n_features / 4)) + n_samples = 5 + n_nonzeros = int(n_features / 4) + dense_data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=global_random_seed, + sparse_format=None, + ) + sparse_data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=global_random_seed, + sparse_format="csr", + ) for RandomProjection in all_RandomProjection: - rp_dense = RandomProjection(n_components=3, random_state=1).fit(data) - rp_sparse = RandomProjection(n_components=3, random_state=1).fit( - sp.csr_matrix(data) - ) + rp_dense = RandomProjection(n_components=3, random_state=1).fit(dense_data) + rp_sparse = RandomProjection(n_components=3, random_state=1).fit(sparse_data) assert_array_almost_equal( densify(rp_dense.components_), densify(rp_sparse.components_) ) @@ -365,8 +456,19 @@ def test_johnson_lindenstrauss_min_dim(): assert johnson_lindenstrauss_min_dim(100, eps=1e-5) == 368416070986 +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) @pytest.mark.parametrize("random_projection_cls", all_RandomProjection) -def test_random_projection_feature_names_out(random_projection_cls): +def test_random_projection_feature_names_out( + coo_container, random_projection_cls, global_random_seed +): + data = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros, + random_state=global_random_seed, + sparse_format=None, + ) random_projection = random_projection_cls(n_components=2) random_projection.fit(data) names_out = random_projection.get_feature_names_out() @@ -379,11 +481,13 @@ def test_random_projection_feature_names_out(random_projection_cls): assert_array_equal(names_out, expected_names_out) +@pytest.mark.parametrize("coo_container", COO_CONTAINERS) @pytest.mark.parametrize("n_samples", (2, 9, 10, 11, 1000)) @pytest.mark.parametrize("n_features", (2, 9, 10, 11, 1000)) @pytest.mark.parametrize("random_projection_cls", all_RandomProjection) @pytest.mark.parametrize("compute_inverse_components", [True, False]) def test_inverse_transform( + coo_container, n_samples, n_features, random_projection_cls, @@ -398,11 +502,21 @@ def test_inverse_transform( random_state=global_random_seed, ) - X_dense, X_csr = make_sparse_random_data( + X_dense = make_sparse_random_data( + coo_container, + n_samples, + n_features, + n_nonzeros=n_samples * n_features // 100 + 1, + random_state=global_random_seed, + sparse_format=None, + ) + X_csr = make_sparse_random_data( + coo_container, n_samples, n_features, - n_samples * n_features // 100 + 1, + n_nonzeros=n_samples * n_features // 100 + 1, random_state=global_random_seed, + sparse_format="csr", ) for X in [X_dense, X_csr]: diff --git a/sklearn/utils/_estimator_html_repr.py b/sklearn/utils/_estimator_html_repr.py index 2c85211bcf4ca..d259016504685 100644 --- a/sklearn/utils/_estimator_html_repr.py +++ b/sklearn/utils/_estimator_html_repr.py @@ -21,8 +21,13 @@ def get_id(self): return f"{self.prefix}-{self.count}" +def _get_css_style(): + return Path(__file__).with_suffix(".css").read_text(encoding="utf-8") + + _CONTAINER_ID_COUNTER = _IDCounter("sk-container-id") _ESTIMATOR_ID_COUNTER = _IDCounter("sk-estimator-id") +_CSS_STYLE = _get_css_style() class _VisualBlock: @@ -309,11 +314,6 @@ def _write_estimator_html( ) -with open(Path(__file__).with_suffix(".css"), "r") as style_file: - # use the style defined in the css file - _STYLE = style_file.read() - - def estimator_html_repr(estimator): """Build a HTML representation of an estimator. @@ -350,7 +350,7 @@ def estimator_html_repr(estimator): ) with closing(StringIO()) as out: container_id = _CONTAINER_ID_COUNTER.get_id() - style_template = Template(_STYLE) + style_template = Template(_CSS_STYLE) style_with_id = style_template.substitute(id=container_id) estimator_str = str(estimator) diff --git a/sklearn/utils/_random.pxd b/sklearn/utils/_random.pxd index b5199fc506f4e..0ebcc1de0cce6 100644 --- a/sklearn/utils/_random.pxd +++ b/sklearn/utils/_random.pxd @@ -16,10 +16,6 @@ cdef enum: # 32-bit signed integers (i.e. 2^31 - 1). RAND_R_MAX = 2147483647 -cpdef sample_without_replacement(cnp.int_t n_population, - cnp.int_t n_samples, - method=*, - random_state=*) # rand_r replacement using a 32bit XorShift generator # See http://www.jstatsoft.org/v08/i14/paper for details diff --git a/sklearn/utils/_random.pyx b/sklearn/utils/_random.pyx index 277474f15d0db..a3efa16fa6b63 100644 --- a/sklearn/utils/_random.pyx +++ b/sklearn/utils/_random.pyx @@ -21,8 +21,17 @@ from sklearn.utils import check_random_state cdef UINT32_t DEFAULT_SEED = 1 -cpdef _sample_without_replacement_check_input(cnp.int_t n_population, - cnp.int_t n_samples): +# Compatibility type to always accept the default int type used by NumPy, both +# before and after NumPy 2. On Windows, `long` does not always match `cnp.inp_t`. +# See the comments in the `sample_without_replacement` Python function for more +# details. +ctypedef fused default_int: + cnp.intp_t + long + + +cpdef _sample_without_replacement_check_input(default_int n_population, + default_int n_samples): """ Check that input are consistent for sample_without_replacement""" if n_population < 0: raise ValueError('n_population should be greater than 0, got %s.' @@ -35,8 +44,8 @@ cpdef _sample_without_replacement_check_input(cnp.int_t n_population, cpdef _sample_without_replacement_with_tracking_selection( - cnp.int_t n_population, - cnp.int_t n_samples, + default_int n_population, + default_int n_samples, random_state=None): r"""Sample integers without replacement. @@ -78,9 +87,9 @@ cpdef _sample_without_replacement_with_tracking_selection( """ _sample_without_replacement_check_input(n_population, n_samples) - cdef cnp.int_t i - cdef cnp.int_t j - cdef cnp.int_t[::1] out = np.empty((n_samples, ), dtype=int) + cdef default_int i + cdef default_int j + cdef default_int[::1] out = np.empty((n_samples, ), dtype=int) rng = check_random_state(random_state) rng_randint = rng.randint @@ -99,8 +108,8 @@ cpdef _sample_without_replacement_with_tracking_selection( return np.asarray(out) -cpdef _sample_without_replacement_with_pool(cnp.int_t n_population, - cnp.int_t n_samples, +cpdef _sample_without_replacement_with_pool(default_int n_population, + default_int n_samples, random_state=None): """Sample integers without replacement. @@ -133,10 +142,10 @@ cpdef _sample_without_replacement_with_pool(cnp.int_t n_population, """ _sample_without_replacement_check_input(n_population, n_samples) - cdef cnp.int_t i - cdef cnp.int_t j - cdef cnp.int_t[::1] out = np.empty((n_samples,), dtype=int) - cdef cnp.int_t[::1] pool = np.empty((n_population,), dtype=int) + cdef default_int i + cdef default_int j + cdef default_int[::1] out = np.empty((n_samples,), dtype=int) + cdef default_int[::1] pool = np.empty((n_population,), dtype=int) rng = check_random_state(random_state) rng_randint = rng.randint @@ -156,8 +165,8 @@ cpdef _sample_without_replacement_with_pool(cnp.int_t n_population, cpdef _sample_without_replacement_with_reservoir_sampling( - cnp.int_t n_population, - cnp.int_t n_samples, + default_int n_population, + default_int n_samples, random_state=None ): """Sample integers without replacement. @@ -193,9 +202,9 @@ cpdef _sample_without_replacement_with_reservoir_sampling( """ _sample_without_replacement_check_input(n_population, n_samples) - cdef cnp.int_t i - cdef cnp.int_t j - cdef cnp.int_t[::1] out = np.empty((n_samples, ), dtype=int) + cdef default_int i + cdef default_int j + cdef default_int[::1] out = np.empty((n_samples, ), dtype=int) rng = check_random_state(random_state) rng_randint = rng.randint @@ -215,8 +224,8 @@ cpdef _sample_without_replacement_with_reservoir_sampling( return np.asarray(out) -cpdef sample_without_replacement(cnp.int_t n_population, - cnp.int_t n_samples, +cdef _sample_without_replacement(default_int n_population, + default_int n_samples, method="auto", random_state=None): """Sample integers without replacement. @@ -305,6 +314,32 @@ cpdef sample_without_replacement(cnp.int_t n_population, % (all_methods, method)) +def sample_without_replacement( + object n_population, object n_samples, method="auto", random_state=None): + cdef: + cnp.intp_t n_pop_intp, n_samples_intp + long n_pop_long, n_samples_long + + # On most platforms `np.int_ is np.intp`. However, before NumPy 2 the + # default integer `np.int_` was a long which is 32bit on 64bit windows + # while `intp` is 64bit on 64bit platforms and 32bit on 32bit ones. + if np.int_ is np.intp: + # Branch always taken on NumPy >=2 (or when not on 64bit windows). + # Cython has different rules for conversion of values to integers. + # For NumPy <1.26.2 AND Cython 3, this first branch requires `int()` + # called explicitly to allow e.g. floats. + n_pop_intp = int(n_population) + n_samples_intp = int(n_samples) + return _sample_without_replacement( + n_pop_intp, n_samples_intp, method, random_state) + else: + # Branch taken on 64bit windows with Numpy<2.0 where `long` is 32bit + n_pop_long = n_population + n_samples_long = n_samples + return _sample_without_replacement( + n_pop_long, n_samples_long, method, random_state) + + def _our_rand_r_py(seed): """Python utils to test the our_rand_r function""" cdef UINT32_t my_seed = seed diff --git a/sklearn/utils/arrayfuncs.pyx b/sklearn/utils/arrayfuncs.pyx index 30eaa0b92129e..d060c7bada92a 100644 --- a/sklearn/utils/arrayfuncs.pyx +++ b/sklearn/utils/arrayfuncs.pyx @@ -30,7 +30,7 @@ def min_pos(const floating[:] X): # n = rows # # TODO: put transpose as an option -def cholesky_delete(const floating[:, :] L, int go_out): +def cholesky_delete(floating[:, :] L, int go_out): cdef: int n = L.shape[0] int m = L.strides[0] diff --git a/sklearn/utils/estimator_checks.py b/sklearn/utils/estimator_checks.py index 807002e261261..82e40cec42641 100644 --- a/sklearn/utils/estimator_checks.py +++ b/sklearn/utils/estimator_checks.py @@ -3470,7 +3470,7 @@ def param_filter(p): type, } # Any numpy numeric such as np.int32. - allowed_types.update(np.core.numerictypes.allTypes.values()) + allowed_types.update(np.sctypeDict.values()) allowed_value = ( type(init_param.default) in allowed_types diff --git a/sklearn/utils/extmath.py b/sklearn/utils/extmath.py index f96d327b42d54..c256639997319 100644 --- a/sklearn/utils/extmath.py +++ b/sklearn/utils/extmath.py @@ -78,11 +78,18 @@ def row_norms(X, squared=False): if sparse.issparse(X): X = X.tocsr() norms = csr_row_norms(X) + if not squared: + norms = np.sqrt(norms) else: - norms = np.einsum("ij,ij->i", X, X) - - if not squared: - np.sqrt(norms, norms) + xp, _ = get_namespace(X) + if _is_numpy_namespace(xp): + X = np.asarray(X) + norms = np.einsum("ij,ij->i", X, X) + norms = xp.asarray(norms) + else: + norms = xp.sum(xp.multiply(X, X), axis=1) + if not squared: + norms = xp.sqrt(norms) return norms @@ -1204,14 +1211,10 @@ def stable_cumsum(arr, axis=None, rtol=1e-05, atol=1e-08): out : ndarray Array with the cumulative sums along the chosen axis. """ - xp, _ = get_namespace(arr) - - out = xp.cumsum(arr, axis=axis, dtype=np.float64) - expected = xp.sum(arr, axis=axis, dtype=np.float64) - if not xp.all( - xp.isclose( - out.take(-1, axis=axis), expected, rtol=rtol, atol=atol, equal_nan=True - ) + out = np.cumsum(arr, axis=axis, dtype=np.float64) + expected = np.sum(arr, axis=axis, dtype=np.float64) + if not np.allclose( + out.take(-1, axis=axis), expected, rtol=rtol, atol=atol, equal_nan=True ): warnings.warn( ( diff --git a/sklearn/utils/multiclass.py b/sklearn/utils/multiclass.py index 18e8cf444c156..a4b23427e5b70 100644 --- a/sklearn/utils/multiclass.py +++ b/sklearn/utils/multiclass.py @@ -119,7 +119,10 @@ def unique_labels(*ys): def _is_integral_float(y): - return y.dtype.kind == "f" and np.all(y.astype(int) == y) + xp, is_array_api_compliant = get_namespace(y) + return xp.isdtype(y.dtype, "real floating") and bool( + xp.all(xp.astype((xp.astype(y, xp.int64)), y.dtype) == y) + ) def is_multilabel(y): @@ -189,8 +192,9 @@ def is_multilabel(y): else: labels = xp.unique_values(y) - return len(labels) < 3 and ( - y.dtype.kind in "biu" or _is_integral_float(labels) # bool, int, uint + return labels.shape[0] < 3 and ( + xp.isdtype(y.dtype, ("bool", "signed integer", "unsigned integer")) + or _is_integral_float(labels) ) diff --git a/sklearn/utils/tests/test_estimator_html_repr.py b/sklearn/utils/tests/test_estimator_html_repr.py index a360f059d0564..d3054155b9bda 100644 --- a/sklearn/utils/tests/test_estimator_html_repr.py +++ b/sklearn/utils/tests/test_estimator_html_repr.py @@ -1,4 +1,5 @@ import html +import locale import re from contextlib import closing from io import StringIO @@ -26,6 +27,7 @@ from sklearn.svm import LinearSVC, LinearSVR from sklearn.tree import DecisionTreeClassifier from sklearn.utils._estimator_html_repr import ( + _get_css_style, _get_visual_block, _HTMLDocumentationLinkMixin, _write_label_html, @@ -464,3 +466,34 @@ def url_param_generator(estimator): mixin._doc_link_url_param_generator = url_param_generator assert mixin._get_doc_link() == "https://website.com/value_1.value_2.html" + + +@pytest.fixture +def set_non_utf8_locale(): + """Pytest fixture to set non utf-8 locale during the test. + + The locale is set to the original one after the test has run. + """ + try: + locale.setlocale(locale.LC_CTYPE, "C") + except locale.Error: + pytest.skip("'C' locale is not available on this OS") + + yield + + # Resets the locale to the original one. Python calles setlocale(LC_TYPE, "") + # at startup according to + # https://docs.python.org/3/library/locale.html#background-details-hints-tips-and-caveats. + # This assumes that no other locale changes have been made. For some reason, + # on some platforms, trying to restore locale with something like + # locale.setlocale(locale.LC_CTYPE, locale.getlocale()) raises a + # locale.Error: unsupported locale setting + locale.setlocale(locale.LC_CTYPE, "") + + +def test_non_utf8_locale(set_non_utf8_locale): + """Checks that utf8 encoding is used when reading the CSS file. + + Non-regression test for https://github.com/scikit-learn/scikit-learn/issues/27725 + """ + _get_css_style() diff --git a/sklearn/utils/tests/test_mocking.py b/sklearn/utils/tests/test_mocking.py index 93a07e3d7fab7..9c66d1345bb6d 100644 --- a/sklearn/utils/tests/test_mocking.py +++ b/sklearn/utils/tests/test_mocking.py @@ -136,7 +136,7 @@ def test_checking_classifier_with_params(iris, csr_container): check_X=check_array, check_X_params={"accept_sparse": False} ) clf.fit(X, y) - with pytest.raises(TypeError, match="A sparse matrix was passed"): + with pytest.raises(TypeError, match="Sparse data was passed"): clf.fit(X_sparse, y) diff --git a/sklearn/utils/tests/test_multiclass.py b/sklearn/utils/tests/test_multiclass.py index 2f2a134751856..3ff477c037043 100644 --- a/sklearn/utils/tests/test_multiclass.py +++ b/sklearn/utils/tests/test_multiclass.py @@ -4,10 +4,12 @@ import pytest from scipy.sparse import issparse -from sklearn import datasets +from sklearn import config_context, datasets from sklearn.model_selection import ShuffleSplit from sklearn.svm import SVC +from sklearn.utils._array_api import yield_namespace_device_dtype_combinations from sklearn.utils._testing import ( + _array_api_for_tests, assert_allclose, assert_array_almost_equal, assert_array_equal, @@ -172,6 +174,75 @@ def _generate_sparse( ], } +ARRAY_API_EXAMPLES = { + "multilabel-indicator": [ + np.random.RandomState(42).randint(2, size=(10, 10)), + [[0, 1], [1, 0]], + [[0, 1]], + multilabel_explicit_zero, + [[0, 0], [0, 0]], + [[-1, 1], [1, -1]], + np.array([[-1, 1], [1, -1]]), + np.array([[-3, 3], [3, -3]]), + _NotAnArray(np.array([[-3, 3], [3, -3]])), + ], + "multiclass": [ + [1, 0, 2, 2, 1, 4, 2, 4, 4, 4], + np.array([1, 0, 2]), + np.array([1, 0, 2], dtype=np.int8), + np.array([1, 0, 2], dtype=np.uint8), + np.array([1, 0, 2], dtype=float), + np.array([1, 0, 2], dtype=np.float32), + np.array([[1], [0], [2]]), + _NotAnArray(np.array([1, 0, 2])), + [0, 1, 2], + ], + "multiclass-multioutput": [ + [[1, 0, 2, 2], [1, 4, 2, 4]], + np.array([[1, 0, 2, 2], [1, 4, 2, 4]]), + np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.int8), + np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.uint8), + np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=float), + np.array([[1, 0, 2, 2], [1, 4, 2, 4]], dtype=np.float32), + np.array([[1, 0, 2]]), + _NotAnArray(np.array([[1, 0, 2]])), + ], + "binary": [ + [0, 1], + [1, 1], + [], + [0], + np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1]), + np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=bool), + np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.int8), + np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.uint8), + np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=float), + np.array([0, 1, 1, 1, 0, 0, 0, 1, 1, 1], dtype=np.float32), + np.array([[0], [1]]), + _NotAnArray(np.array([[0], [1]])), + [1, -1], + [3, 5], + ], + "continuous": [ + [1e-5], + [0, 0.5], + np.array([[0], [0.5]]), + np.array([[0], [0.5]], dtype=np.float32), + ], + "continuous-multioutput": [ + np.array([[0, 0.5], [0.5, 0]]), + np.array([[0, 0.5], [0.5, 0]], dtype=np.float32), + np.array([[0, 0.5]]), + ], + "unknown": [ + [[]], + [()], + np.array(0), + np.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]]), + ], +} + + NON_ARRAY_LIKE_EXAMPLES = [ {1, 2, 3}, {0: "a", 1: "b"}, @@ -270,12 +341,12 @@ def test_unique_labels_mixed_types(): def test_is_multilabel(): for group, group_examples in EXAMPLES.items(): - dense_exp = group in ["multilabel-indicator"] + dense_exp = group == "multilabel-indicator" for example in group_examples: # Only mark explicitly defined sparse examples as valid sparse # multilabel-indicators - sparse_exp = group == "multilabel-indicator" and issparse(example) + sparse_exp = dense_exp and issparse(example) if issparse(example) or ( hasattr(example, "__array__") @@ -296,7 +367,7 @@ def test_is_multilabel(): for exmpl_sparse in examples_sparse: assert sparse_exp == is_multilabel( exmpl_sparse - ), "is_multilabel(%r) should be %s" % (exmpl_sparse, sparse_exp) + ), f"is_multilabel({exmpl_sparse!r}) should be {sparse_exp}" # Densify sparse examples before testing if issparse(example): @@ -304,7 +375,29 @@ def test_is_multilabel(): assert dense_exp == is_multilabel( example - ), "is_multilabel(%r) should be %s" % (example, dense_exp) + ), f"is_multilabel({example!r}) should be {dense_exp}" + + +@pytest.mark.parametrize( + "array_namespace, device, dtype", + yield_namespace_device_dtype_combinations(), +) +def test_is_multilabel_array_api_compliance(array_namespace, device, dtype): + xp, device, dtype = _array_api_for_tests(array_namespace, device, dtype) + + for group, group_examples in ARRAY_API_EXAMPLES.items(): + dense_exp = group == "multilabel-indicator" + for example in group_examples: + if np.asarray(example).dtype.kind == "f": + example = np.asarray(example, dtype=dtype) + else: + example = np.asarray(example) + example = xp.asarray(example, device=device) + + with config_context(array_api_dispatch=True): + assert dense_exp == is_multilabel( + example + ), f"is_multilabel({example!r}) should be {dense_exp}" def test_check_classification_targets(): diff --git a/sklearn/utils/tests/test_utils.py b/sklearn/utils/tests/test_utils.py index 0f0c9c898b17a..9d4df09573857 100644 --- a/sklearn/utils/tests/test_utils.py +++ b/sklearn/utils/tests/test_utils.py @@ -168,7 +168,7 @@ def test_resample_stratify_sparse_error(csr_container): X = rng.normal(size=(n_samples, 2)) y = rng.randint(0, 2, size=n_samples) stratify = csr_container(y) - with pytest.raises(TypeError, match="A sparse matrix was passed"): + with pytest.raises(TypeError, match="Sparse data was passed"): X, y = resample(X, y, n_samples=50, random_state=rng, stratify=stratify) diff --git a/sklearn/utils/tests/test_validation.py b/sklearn/utils/tests/test_validation.py index 69fc7b6cac4d7..1c2a7cd3f855a 100644 --- a/sklearn/utils/tests/test_validation.py +++ b/sklearn/utils/tests/test_validation.py @@ -599,8 +599,8 @@ def test_check_array_accept_sparse_type_exception(): invalid_type = SVR() msg = ( - "A sparse matrix was passed, but dense data is required. " - r"Use X.toarray\(\) to convert to a dense numpy array." + "Sparse data was passed, but dense data is required. " + r"Use '.toarray\(\)' to convert to a dense numpy array." ) with pytest.raises(TypeError, match=msg): check_array(X_csr, accept_sparse=False) diff --git a/sklearn/utils/validation.py b/sklearn/utils/validation.py index a5b4a8555de63..8a3f07a5c867f 100644 --- a/sklearn/utils/validation.py +++ b/sklearn/utils/validation.py @@ -534,9 +534,10 @@ def _ensure_sparse_format( _check_large_sparse(sparse_container, accept_large_sparse) if accept_sparse is False: + padded_input = " for " + input_name if input_name else "" raise TypeError( - "A sparse matrix was passed, but dense data is required. Use X.toarray() " - "to convert to a dense numpy array." + f"Sparse data was passed{padded_input}, but dense data is required. " + "Use '.toarray()' to convert to a dense numpy array." ) elif isinstance(accept_sparse, (list, tuple)): if len(accept_sparse) == 0: