diff --git a/doc/source/development/maintaining.rst b/doc/source/development/maintaining.rst index 29cc256f35a4e..948b545d42f0d 100644 --- a/doc/source/development/maintaining.rst +++ b/doc/source/development/maintaining.rst @@ -449,9 +449,13 @@ which will be triggered when the tag is pushed. git tag -a v1.5.0.dev0 -m "DEV: Start 1.5.0" git push upstream main --follow-tags -3. Build the source distribution (git must be in the tag commit):: +3. Download the source distribution and wheels from the `wheel staging area `_. + Be careful to make sure that no wheels are missing (e.g. due to failed builds). - ./setup.py sdist --formats=gztar --quiet + Running scripts/download_wheels.sh with the version that you want to download wheels/the sdist for should do the trick. + This script will make a ``dist`` folder inside your clone of pandas and put the downloaded wheels and sdist there. + + scripts/download_wheels.sh 4. Create a `new GitHub release `_: @@ -463,23 +467,20 @@ which will be triggered when the tag is pushed. - Set as the latest release: Leave checked, unless releasing a patch release for an older version (e.g. releasing 1.4.5 after 1.5 has been released) -5. The GitHub release will after some hours trigger an +5. Upload wheels to PyPI:: + + twine upload pandas/dist/pandas-*.{whl,tar.gz} --skip-existing + +6. The GitHub release will after some hours trigger an `automated conda-forge PR `_. + (If you don't want to wait, you can open an issue titled ``@conda-forge-admin, please update version`` + to trigger the bot.) Merge it once the CI is green, and it will generate the conda-forge packages. + In case a manual PR needs to be done, the version, sha256 and build fields are the ones that usually need to be changed. If anything else in the recipe has changed since the last release, those changes should be available in ``ci/meta.yaml``. -6. Packages for supported versions in PyPI are built automatically from our CI. - Once all packages are build download all wheels from the - `Anaconda repository >`_ - where our CI published them to the ``dist/`` directory in your local pandas copy. - You can use the script ``scripts/download_wheels.sh`` to download all wheels at once. - -7. Upload wheels to PyPI:: - - twine upload pandas/dist/pandas-*.{whl,tar.gz} --skip-existing - Post-Release ```````````` diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index ac4d2976593a2..e41f9c1dc37db 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -5254,7 +5254,6 @@ def _validate_fill_value(self, value): """ dtype = self.dtype if isinstance(dtype, np.dtype) and dtype.kind not in "mM": - # return np_can_hold_element(dtype, value) try: return np_can_hold_element(dtype, value) except LossySetitemError as err: @@ -6917,6 +6916,9 @@ def insert(self, loc: int, item) -> Index: dtype = self._find_common_type_compat(item) return self.astype(dtype).insert(loc, item) + print(item) + print(type(item)) + if arr.dtype != object or not isinstance( item, (tuple, np.datetime64, np.timedelta64) ): diff --git a/scripts/download_wheels.sh b/scripts/download_wheels.sh index 0b92e83113f5f..84279ac7a04d1 100755 --- a/scripts/download_wheels.sh +++ b/scripts/download_wheels.sh @@ -11,6 +11,7 @@ # one by one to the dist/ directory where they would be generated. VERSION=$1 +mkdir -p $(dirname -- $0)/../dist DIST_DIR="$(realpath $(dirname -- $0)/../dist)" if [ -z $VERSION ]; then @@ -20,7 +21,7 @@ fi curl "https://anaconda.org/multibuild-wheels-staging/pandas/files?version=${VERSION}" | \ grep "href=\"/multibuild-wheels-staging/pandas/${VERSION}" | \ - sed -r 's/.*.*/\1/g' | \ + sed -r 's/.*.*/\1/g' | \ awk '{print "https://anaconda.org" $0 }' | \ xargs wget -P $DIST_DIR