From 4ce41f303c88dbb711f3ba015a36a5a31c19c73e Mon Sep 17 00:00:00 2001 From: Jayaram Kancherla Date: Fri, 20 Dec 2024 12:00:59 -0800 Subject: [PATCH] chore: remove Python 3.8 support (#15) --- .github/workflows/pypi-publish.yml | 75 ++++++++++--------- .github/workflows/pypi-test.yml | 47 +++++------- .pre-commit-config.yaml | 25 ++++--- CHANGELOG.md | 5 ++ docs/conf.py | 9 +++ pyproject.toml | 4 + setup.cfg | 2 +- .../Hdf5CompressedSparseMatrixSeed.py | 53 +++---------- src/hdf5array/Hdf5DenseArraySeed.py | 4 +- 9 files changed, 103 insertions(+), 121 deletions(-) diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index 7b591a2..29657bb 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -9,43 +9,44 @@ on: jobs: build: - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 pytest tox - # - name: Lint with flake8 - # run: | - # # stop the build if there are Python syntax errors or undefined names - # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with tox - run: | - tox - - name: Build docs - run: | - tox -e docs - - run: touch ./docs/_build/html/.nojekyll - - name: GH Pages Deployment - uses: JamesIves/github-pages-deploy-action@4.1.3 - with: - branch: gh-pages # The branch the action should deploy to. - folder: ./docs/_build/html - clean: true # Automatically remove deleted files from the deploy branch - - name: Build Project and Publish - run: | - python -m tox -e clean,build - - name: Publish package - uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 - with: - user: __token__ - password: ${{ secrets.PYPI_PASSWORD }} + - uses: actions/checkout@v4 + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + + - name: Test with tox + run: | + tox + + - name: Build docs + run: | + tox -e docs + + - run: touch ./docs/_build/html/.nojekyll + + - name: GH Pages Deployment + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: gh-pages # The branch the action should deploy to. + folder: ./docs/_build/html + clean: true # Automatically remove deleted files from the deploy branch + + - name: Build Project and Publish + run: | + python -m tox -e clean,build + + - name: Publish package + uses: pypa/gh-action-pypi-publish@v1.12.2 + with: + user: __token__ + password: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/pypi-test.yml b/.github/workflows/pypi-test.yml index 9dc019a..90aa16a 100644 --- a/.github/workflows/pypi-test.yml +++ b/.github/workflows/pypi-test.yml @@ -1,40 +1,33 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Test the library +name: Run tests on: push: - branches: [ master ] + branches: [master] pull_request: - branches: [ master ] + branches: [master] jobs: build: - runs-on: ubuntu-latest strategy: matrix: - python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] name: Python ${{ matrix.python-version }} steps: - - uses: actions/checkout@v2 - - name: Setup Python - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 pytest tox - # - name: Lint with flake8 - # run: | - # # stop the build if there are Python syntax errors or undefined names - # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with tox - run: | - tox + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + + - name: Test with tox + run: | + tox diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3c9601c..e60a5f4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,19 +17,19 @@ repos: - id: mixed-line-ending args: ['--fix=auto'] # replace 'auto' with 'lf' to enforce Linux/Mac line endings or 'crlf' for Windows -- repo: https://github.com/PyCQA/docformatter - rev: v1.7.5 - hooks: - - id: docformatter - additional_dependencies: [tomli] - args: [--in-place, --wrap-descriptions=120, --wrap-summaries=120] - # --config, ./pyproject.toml +# - repo: https://github.com/PyCQA/docformatter +# rev: master +# hooks: +# - id: docformatter +# additional_dependencies: [tomli] +# args: [--in-place, --wrap-descriptions=120, --wrap-summaries=120] +# # --config, ./pyproject.toml -- repo: https://github.com/psf/black - rev: 24.8.0 - hooks: - - id: black - language_version: python3 +# - repo: https://github.com/psf/black +# rev: 24.8.0 +# hooks: +# - id: black +# language_version: python3 - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. @@ -37,6 +37,7 @@ repos: hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format ## If like to embrace black styles even in the docs: # - repo: https://github.com/asottile/blacken-docs diff --git a/CHANGELOG.md b/CHANGELOG.md index f6fcb14..c22f9da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Changelog +## Version 0.3.0 + +- chore: Remove Python 3.8 (EOL) +- precommit: Replace docformatter with ruff's formatter + ## Version 0.2.0 - Compatibility with NumPy 2.0 diff --git a/docs/conf.py b/docs/conf.py index 86d64bd..a0debfe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -170,6 +170,15 @@ def setup(app): # If this is True, todo emits a warning for each TODO entries. The default is False. todo_emit_warnings = True +autodoc_default_options = { + 'special-members': True, + 'undoc-members': False, + 'exclude-members': '__weakref__, __dict__, __str__, __module__, __init__' +} + +autosummary_generate = True +autosummary_imported_members = True + # -- Options for HTML output ------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index 0514df9..45716dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,10 @@ extend-ignore = ["F821"] [tool.ruff.pydocstyle] convention = "google" +[tool.ruff.format] +docstring-code-format = true +docstring-code-line-length = 20 + [tool.ruff.per-file-ignores] "__init__.py" = ["E402", "F401"] diff --git a/setup.cfg b/setup.cfg index 574bef0..5fc5d3f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,7 +41,7 @@ package_dir = =src # Require a min/specific Python version (comma-separated conditions) -python_requires = >=3.8 +python_requires = >=3.9 # Add here dependencies of your project (line-separated), e.g. requests>=2.2,<3.0. # Version specifiers like >=2.2,<3.0 avoid problems due to API changes in diff --git a/src/hdf5array/Hdf5CompressedSparseMatrixSeed.py b/src/hdf5array/Hdf5CompressedSparseMatrixSeed.py index e5d5b79..ba331cd 100644 --- a/src/hdf5array/Hdf5CompressedSparseMatrixSeed.py +++ b/src/hdf5array/Hdf5CompressedSparseMatrixSeed.py @@ -98,22 +98,14 @@ def __init__( with File(self._path, "r") as handle: self._indptr = handle[self._indptr_name][:] - if len(self._indptr.shape) != 1 or not issubdtype( - self._indptr.dtype, integer - ): - raise ValueError( - "'indptr' dataset should be 1-dimensional and contain integers" - ) + if len(self._indptr.shape) != 1 or not issubdtype(self._indptr.dtype, integer): + raise ValueError("'indptr' dataset should be 1-dimensional and contain integers") if by_column: if len(self._indptr) != shape[1] + 1: - raise ValueError( - "'indptr' dataset should have length equal to the number of columns + 1" - ) + raise ValueError("'indptr' dataset should have length equal to the number of columns + 1") else: if len(self._indptr) != shape[0] + 1: - raise ValueError( - "'indptr' dataset should have length equal to the number of columns + 1" - ) + raise ValueError("'indptr' dataset should have length equal to the number of columns + 1") if self._indptr[0] != 0: raise ValueError("first entry of 'indptr' dataset should be zero") for i in range(1, len(self._indptr)): @@ -122,9 +114,7 @@ def __init__( ddset = handle[self._data_name] if len(ddset.shape) != 1 or ddset.shape[0] != self._indptr[-1]: - raise ValueError( - "'data' dataset should have length equal to the number of non-zero elements" - ) + raise ValueError("'data' dataset should have length equal to the number of non-zero elements") self._modify_dtype = dtype is not None and dtype != ddset.dtype if not self._modify_dtype: dtype = ddset.dtype @@ -133,14 +123,10 @@ def __init__( # Not going to check for consistency of the indices themselves. idset = handle[self._indices_name] if len(idset.shape) != 1 or idset.shape[0] != self._indptr[-1]: - raise ValueError( - "'indices' dataset should have length equal to the number of non-zero elements" - ) + raise ValueError("'indices' dataset should have length equal to the number of non-zero elements") if not issubdtype(idset.dtype, integer): raise ValueError("'indices' dataset should contain integers") - self._modify_index_dtype = ( - index_dtype is not None and index_dtype != idset.dtype - ) + self._modify_index_dtype = index_dtype is not None and index_dtype != idset.dtype if not self._modify_index_dtype: index_dtype = idset.dtype self._index_dtype = index_dtype @@ -269,9 +255,7 @@ def _extract_array( start_idx = bisect_left(curindices, secondary_start) end_idx = len(curindices) if search_end: - end_idx = bisect_left( - curindices, secondary_end, lo=start_idx, hi=end_idx - ) + end_idx = bisect_left(curindices, secondary_end, lo=start_idx, hi=end_idx) if is_consecutive: mod_indices = curindices[start_idx:end_idx] @@ -406,14 +390,7 @@ def _consecutive(r, cols, values): class Hdf5CompressedSparseMatrix(DelayedArray): """Compressed sparse matrix in a HDF5 file as a ``DelayedArray``.""" - def __init__( - self, - path: str, - group_name: Optional[str], - shape: Tuple[int, int], - by_column: bool, - **kwargs - ): + def __init__(self, path: str, group_name: Optional[str], shape: Tuple[int, int], by_column: bool, **kwargs): """To construct a ``Hdf5CompressedSparseMatrix`` from an existing :py:class:`~Hdf5CompressedSparseMatrixSeed`, use :py:meth:`~delayedarray.wrap.wrap` instead. @@ -439,9 +416,7 @@ def __init__( if isinstance(path, Hdf5CompressedSparseMatrixSeed): seed = path else: - seed = Hdf5CompressedSparseMatrixSeed( - path, group_name, shape, by_column, **kwargs - ) + seed = Hdf5CompressedSparseMatrixSeed(path, group_name, shape, by_column, **kwargs) super(Hdf5CompressedSparseMatrix, self).__init__(seed) @property @@ -530,13 +505,9 @@ def to_scipy_sparse_matrix_from_Hdf5CompressedSparseMatrix( _indptr = handle[x.indptr_name][:] if x.by_column: - _matrix = scipy.sparse.csc_matrix( - (_data, _indices, _indptr), shape=x.shape, dtype=x.dtype - ) + _matrix = scipy.sparse.csc_matrix((_data, _indices, _indptr), shape=x.shape, dtype=x.dtype) else: - _matrix = scipy.sparse.csr_matrix( - (_data, _indices, _indptr), shape=x.shape, dtype=x.dtype - ) + _matrix = scipy.sparse.csr_matrix((_data, _indices, _indptr), shape=x.shape, dtype=x.dtype) if format == "csc": return _matrix.tocsc() diff --git a/src/hdf5array/Hdf5DenseArraySeed.py b/src/hdf5array/Hdf5DenseArraySeed.py index 855b1ea..22a3c46 100644 --- a/src/hdf5array/Hdf5DenseArraySeed.py +++ b/src/hdf5array/Hdf5DenseArraySeed.py @@ -122,9 +122,7 @@ def chunk_grid_Hdf5DenseArraySeed(x: Hdf5DenseArraySeed): @extract_dense_array.register -def extract_dense_array_Hdf5DenseArraySeed( - x: Hdf5DenseArraySeed, subset: Tuple[Sequence[int], ...] -) -> numpy.ndarray: +def extract_dense_array_Hdf5DenseArraySeed(x: Hdf5DenseArraySeed, subset: Tuple[Sequence[int], ...]) -> numpy.ndarray: """See :py:meth:`~delayedarray.extract_dense_array.extract_dense_array`.""" converted = [] num_lists = 0