Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release v1.27.0 #2975

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
214 changes: 214 additions & 0 deletions .github/workflows/daily_modin_precommit_py311_py312.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,214 @@
# This is copied from original daily_precommit.yml with one change: only run Snowpark pandas tests

name: Daily Snowpark pandas API test with Py3.11 and Py3.12
on:
schedule:
# 10 AM UTC
- cron: "0 10 * * *"
workflow_dispatch:
inputs:
logLevel:
default: warning
description: "Log level"
required: true
tags:
description: "Test scenario tags"

jobs:
lint:
name: Check linting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Display Python version
run: python -c "import sys; import os; print(\"\n\".join(os.environ[\"PATH\"].split(os.pathsep))); print(sys.version); print(sys.executable);"
- name: Upgrade setuptools and pip
run: python -m pip install -U setuptools pip
- name: Install protoc
shell: bash
run: .github/scripts/install_protoc.sh
- name: Install tox
run: python -m pip install tox
- name: Run fix_lint
run: python -m tox -e fix_lint

build:
needs: lint
name: Build Wheel File
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install protoc
shell: bash
run: .github/scripts/install_protoc.sh
- name: Upgrade setuptools and pip
run: python -m pip install -U setuptools pip wheel
- name: Generate wheel
run: python -m pip wheel -v -w dist --no-deps .
- name: Show wheels generated
run: ls -lh dist
- uses: actions/upload-artifact@v4
with:
include-hidden-files: true
name: wheel
path: dist/

test:
name: Test modin-${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
needs: build
runs-on: ${{ matrix.os.image_name }}
strategy:
fail-fast: false
matrix:
os:
- image_name: ubuntu-latest-64-cores
download_name: linux
- image_name: macos-latest
download_name: macos
- image_name: windows-latest-64-cores
download_name: windows
python-version: ["3.11", "3.12"]
cloud-provider: [aws, azure, gcp]
steps:
- name: Checkout Code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Display Python version
run: python -c "import sys; print(sys.version)"
- name: Decrypt parameters.py
shell: bash
run: .github/scripts/decrypt_parameters.sh
env:
PARAMETER_PASSWORD: ${{ secrets.PARAMETER_PASSWORD }}
CLOUD_PROVIDER: ${{ matrix.cloud-provider }}
- name: Install protoc
shell: bash
run: .github/scripts/install_protoc.sh
- name: Add protoc to Windows path
if: runner.os == 'Windows'
run: |
echo "$HOME/local/bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Download wheel(s)
uses: actions/download-artifact@v4
with:
name: wheel
path: dist
- name: Show wheels downloaded
run: ls -lh dist
shell: bash
- name: Upgrade setuptools, pip and wheel
run: python -m pip install -U setuptools pip wheel
- name: Install tox
run: python -m pip install tox
- if: ${{ contains('macos', matrix.os.download_name) }}
name: Run Snowpark pandas API doctests
run: python -m tox -e "py${PYTHON_VERSION}-doctest-snowparkpandasdoctest-modin-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
PYTEST_ADDOPTS: --color=yes --tb=short
TOX_PARALLEL_NO_SPINNER: 1
# Specify SNOWFLAKE_IS_PYTHON_RUNTIME_TEST: 1 when adding >= python3.11 with no server-side support
# For example, see https://github.com/snowflakedb/snowpark-python/pull/681
shell: bash
- if: ${{ contains('macos', matrix.os.download_name) }}
name: Run Snowpark pandas API tests (excluding doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-snowparkpandasdailynotdoctest-modin-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
PYTEST_ADDOPTS: --color=yes --tb=short
TOX_PARALLEL_NO_SPINNER: 1
shell: bash
- if: ${{ !contains('macos', matrix.os.download_name) }}
name: Run Snowpark pandas API tests (excluding doctests)
# Use regular github setup for large windows and linux instance
run: python -m tox -e "py${PYTHON_VERSION/\./}-snowparkpandasnotdoctest-modin-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
PYTEST_ADDOPTS: --color=yes --tb=short
TOX_PARALLEL_NO_SPINNER: 1
shell: bash
- name: Combine coverages
run: python -m tox -e coverage --skip-missing-interpreters false
shell: bash
env:
SNOWFLAKE_IS_PYTHON_RUNTIME_TEST: 1
- uses: actions/upload-artifact@v4
with:
include-hidden-files: true
name: coverage_${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
path: |
.tox/.coverage
.tox/coverage.xml


combine-coverage:
if: ${{ success() || failure() }}
name: Combine coverage
needs: test
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
path: artifacts
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Display Python version
run: python -c "import sys; print(sys.version)"
- name: Install protoc
shell: bash
run: .github/scripts/install_protoc.sh
- name: Upgrade setuptools and pip
run: python -m pip install -U setuptools pip
- name: Install tox
run: python -m pip install tox
- name: Collect all coverages to one dir
run: |
python -c '
from pathlib import Path
import shutil

src_dir = Path("artifacts")
dst_dir = Path(".") / ".tox"
dst_dir.mkdir()
for src_file in src_dir.glob("*/.coverage"):
dst_file = dst_dir / ".coverage.{}".format(src_file.parent.name[9:])
print("{} copy to {}".format(src_file, dst_file))
shutil.copy(str(src_file), str(dst_file))'
- name: Combine coverages
run: python -m tox -e coverage
- name: Publish html coverage
uses: actions/upload-artifact@v4
with:
include-hidden-files: true
name: overall_cov_html
path: .tox/htmlcov
- name: Publish xml coverage
uses: actions/upload-artifact@v4
with:
include-hidden-files: true
name: overall_cov_xml
path: .tox/coverage.xml
- uses: codecov/codecov-action@v1
with:
file: .tox/coverage.xml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# This is copied from original daily_precommit.yml with one change: only run Snowpark pandas tests

name: Daily Snowpark pandas API test
name: Daily Snowpark pandas API test with Py3.9 and Py3.10
on:
schedule:
# 8 AM UTC
- cron: "0 8 * * *"
# 9 AM UTC
- cron: "0 9 * * *"
workflow_dispatch:
inputs:
logLevel:
Expand Down Expand Up @@ -124,7 +124,7 @@ jobs:
download_name: macos
- image_name: windows-latest-64-cores
download_name: windows
python-version: ["3.9", "3.10", "3.11"]
python-version: ["3.9", "3.10"]
cloud-provider: [aws, azure, gcp]
steps:
- name: Checkout Code
Expand Down Expand Up @@ -278,7 +278,7 @@ jobs:
os:
- image_name: windows-latest-64-cores
download_name: windows
python-version: ["3.11"]
python-version: ["3.10"]
cloud-provider: [azure]
steps:
- name: Checkout Code
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/daily_precommit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
name: Daily precommit test
on:
schedule:
# 9 AM UTC
- cron: "0 9 * * *"
# 8 AM UTC
- cron: "0 8 * * *"

workflow_dispatch:
inputs:
Expand Down
28 changes: 12 additions & 16 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Release History

## 1.27.0 (TBD)
## 1.27.0 (2025-02-03)

### Snowpark Python API Updates

Expand Down Expand Up @@ -58,34 +58,35 @@
- Added support for `DataFrameWriter.insert_into/insertInto`. This method also supports local testing mode.
- Added support for `DataFrame.create_temp_view` to create a temporary view. It will fail if the view already exists.
- Added support for multiple columns in the functions `map_cat` and `map_concat`.
- Added an option `keep_column_order` for keeping original column order in `DataFrame.with_column` and `DataFrame.with_columns`.
- Added options to column casts that allow renaming or adding fields in StructType columns.
- Added support for `contains_null` parameter to ArrayType.
- Added support for creating a temporary view via `DataFrame.create_or_replace_temp_view` from a DataFrame created by reading a file from a stage.
- Added support for `value_contains_null` parameter to MapType.
- Added `interactive` to telemetry that indicates whether the current environment is an interactive one.
- Allow `session.file.get` in a Native App to read file paths starting with `/` from the current version
- Added support for multiple aggregation functions after `DataFrame.pivot`.

#### Experimental Features

- Added `Catalog` class to manage snowflake objects. It can be accessed via `Session.catalog`.
- `snowflake.core` is a dependency required for this feature.
- Allow user input schema when reading JSON file on stage.
- Added support for specifying a schema string (including implicit struct syntax) when calling `DataFrame.create_dataframe`.
- `snowflake.core` is a dependency required for this feature.

#### Improvements

- Updated README.md to include instructions on how to verify package signatures using `cosign`.
- Added an option `keep_column_order` for keeping original column order in `DataFrame.with_column` and `DataFrame.with_columns`.
- Added options to column casts that allow renaming or adding fields in StructType columns.
- Added support for `contains_null` parameter to ArrayType.
- Added support for creating a temporary view via `DataFrame.create_or_replace_temp_view` from a DataFrame created by reading a file from a stage.
- Added support for `value_contains_null` parameter to MapType.
- Added `interactive` to telemetry that indicates whether the current environment is an interactive one.
- Allow `session.file.get` in a Native App to read file paths starting with `/` from the current version
- Added support for multiple aggregation functions after `DataFrame.pivot`.

#### Bug Fixes

- Fixed a bug in local testing mode that caused a column to contain None when it should contain 0
- Fixed a bug in local testing mode that caused a column to contain None when it should contain 0.
- Fixed a bug in `StructField.from_json` that prevented TimestampTypes with `tzinfo` from being parsed correctly.
- Fixed a bug in function `date_format` that caused an error when the input column was date type or timestamp type.
- Fixed a bug in dataframe that null value can be inserted in a non-nullable column.
- Fixed a bug in `replace` and `lit` which raised type hint assertion error when passing `Column` expression objects.
- Fixed a bug in `pandas_udf` and `pandas_udtf` where `session` parameter was erroneously ignored.
- Fixed a bug that raised incorrect type conversion error for system function called through `session.call`.

### Snowpark pandas API Updates

Expand Down Expand Up @@ -120,13 +121,8 @@
`"skew"`, `"std"`, `np.std` `"var"`, and `np.var` in
`pd.pivot_table()`, `DataFrame.pivot_table()`, and `pd.crosstab()`.

#### Bug Fixes

- Fixed a bug that system function called through `session.call` have incorrect type conversion.

#### Improvements
- Improve performance of `DataFrame.map`, `Series.apply` and `Series.map` methods by mapping numpy functions to snowpark functions if possible.
- Updated integration testing for `session.lineage.trace` to exclude deleted objects
- Added documentation for `DataFrame.map`.
- Improve performance of `DataFrame.apply` by mapping numpy functions to snowpark functions if possible.
- Added documentation on the extent of Snowpark pandas interoperability with scikit-learn.
Expand Down
2 changes: 1 addition & 1 deletion recipe/meta.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% set name = "snowflake-snowpark-python" %}
{% set version = "1.26.0" %}
{% set version = "1.27.0" %}
{% set noarch_build = (os.environ.get('SNOWFLAKE_SNOWPARK_PYTHON_NOARCH_BUILD', 'false')) == 'true' %}

package:
Expand Down
2 changes: 1 addition & 1 deletion src/snowflake/snowpark/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@


# Update this for the versions
VERSION = (1, 26, 0)
VERSION = (1, 27, 0)
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.agg.test
Original file line number Diff line number Diff line change
Expand Up @@ -522,5 +522,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.collect.test
Original file line number Diff line number Diff line change
Expand Up @@ -342,5 +342,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.count.test
Original file line number Diff line number Diff line change
Expand Up @@ -175,5 +175,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.count2.test
Original file line number Diff line number Diff line change
Expand Up @@ -204,5 +204,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.create_or_replace.test
Original file line number Diff line number Diff line change
Expand Up @@ -630,5 +630,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.cross_join.lsuffix.test
Original file line number Diff line number Diff line change
Expand Up @@ -181,5 +181,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.cross_join.rsuffix.test
Original file line number Diff line number Diff line change
Expand Up @@ -181,5 +181,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
2 changes: 1 addition & 1 deletion tests/ast/data/DataFrame.cross_join.suffix.test
Original file line number Diff line number Diff line change
Expand Up @@ -184,5 +184,5 @@ client_language {
}
client_version {
major: 1
minor: 26
minor: 27
}
Loading
Loading