python3Packages.pyiceberg: 0.9.1 -> 0.10.0

Diff: https://github.com/apache/iceberg-python/compare/pyiceberg-0.9.1...pyiceberg-0.10.0

Changelog: https://github.com/apache/iceberg-python/releases/tag/pyiceberg-0.10.0

Changed files
+61 -17
pkgs
development
python-modules
pyiceberg
+61 -17
pkgs/development/python-modules/pyiceberg/default.nix
···
cachetools,
click,
fsspec,
+
google-auth,
mmh3,
pydantic,
pyparsing,
+
pyroaring,
ray,
requests,
rich,
···
# optional-dependencies
adlfs,
-
# getdaft,
+
google-cloud-bigquery,
+
# bodo,
+
# daft,
duckdb,
pyarrow,
+
pyiceberg-core,
boto3,
+
huggingface-hub,
gcsfs,
-
mypy-boto3-glue,
thrift,
+
kerberos,
+
# thrift-sasl,
pandas,
+
# pyiceberg-core,
s3fs,
python-snappy,
psycopg2-binary,
···
buildPythonPackage rec {
pname = "iceberg-python";
-
version = "0.9.1";
+
version = "0.10.0";
pyproject = true;
src = fetchFromGitHub {
owner = "apache";
repo = "iceberg-python";
tag = "pyiceberg-${version}";
-
hash = "sha256-OUj8z/UOIcK0S4tf6Id52YHweNDfYnX6P4nChXrOxqY=";
+
hash = "sha256-uR8nmKVjYjiArcNaf/Af2kGh14p59VV9g2mKPKmiJnc=";
};
patches = [
···
cachetools
click
fsspec
+
google-auth
mmh3
pydantic
pyparsing
+
pyroaring
ray
requests
rich
···
adlfs = [
adlfs
];
+
bigquery = [
+
google-cloud-bigquery
+
];
+
bodo = [
+
# bodo
+
];
daft = [
-
# getdaft
+
# daft
];
duckdb = [
duckdb
···
dynamodb = [
boto3
];
+
hf = [
+
huggingface-hub
+
];
gcsfs = [
gcsfs
];
glue = [
boto3
-
mypy-boto3-glue
];
hive = [
thrift
];
+
hive-kerberos = [
+
kerberos
+
thrift
+
# thrift-sasl
+
];
pandas = [
pandas
pyarrow
];
pyarrow = [
pyarrow
+
pyiceberg-core
];
ray = [
pandas
···
datafusion
fastavro
moto
-
mypy-boto3-glue
-
pandas
-
pyarrow
pyspark
pytest-lazy-fixture
pytest-mock
pytest-timeout
pytestCheckHook
requests-mock
-
s3fs
-
sqlalchemy
-
thrift
]
+
++ optional-dependencies.bigquery
+
++ optional-dependencies.hive
+
++ optional-dependencies.pandas
+
++ optional-dependencies.pyarrow
+
++ optional-dependencies.s3fs
+
++ optional-dependencies.sql-sqlite
++ moto.optional-dependencies.server;
pytestFlags = [
···
];
disabledTests = [
+
# KeyError: 'authorization'
+
"test_token_200"
+
"test_token_200_without_optional_fields"
+
"test_token_with_default_scope"
+
"test_token_with_optional_oauth_params"
+
"test_token_with_custom_scope"
+
+
# AttributeError: 'SessionContext' object has no attribute 'register_table_provider'
+
"test_datafusion_register_pyiceberg_tabl"
+
# ModuleNotFoundError: No module named 'puresasl'
"test_create_hive_client_with_kerberos"
"test_create_hive_client_with_kerberos_using_context_manager"
-
# Require unpackaged pyiceberg_core
-
"test_bucket_pyarrow_transforms"
-
"test_transform_consistency_with_pyarrow_transform"
-
"test_truncate_pyarrow_transforms"
-
# botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL
"test_checking_if_a_file_exists"
"test_closing_a_file"
···
"test_fsspec_pickle_roundtrip_gcs"
# Timeout (network access)
+
"test_config_200"
"test_fsspec_converting_an_outputfile_to_an_inputfile_adls"
"test_fsspec_new_abfss_output_file_adls"
"test_fsspec_new_input_file_adls"
···
"test_partitioned_write"
"test_token_200_w_oauth2_server_uri"
+
# azure.core.exceptions.ServiceRequestError (network access)
+
"test_converting_an_outputfile_to_an_inputfile_adls"
+
"test_file_tell_adls"
+
"test_getting_length_of_file_adls"
+
"test_new_input_file_adls"
+
"test_new_output_file_adls"
+
"test_raise_on_opening_file_not_found_adls"
+
"test_read_specified_bytes_for_file_adls"
+
"test_write_and_read_file_adls"
+
# Hangs forever (from tests/io/test_pyarrow.py)
"test_getting_length_of_file_gcs"
]
++ lib.optionals stdenv.hostPlatform.isDarwin [
# ImportError: The pyarrow installation is not built with support for 'GcsFileSystem'
"test_converting_an_outputfile_to_an_inputfile_gcs"
+
"test_create_table_with_database_location"
+
"test_drop_table_with_database_location"
"test_new_input_file_gcs"
"test_new_output_file_gc"
···
# '/tmp/iceberg/warehouse/default.db/test_projection_partitions/metadata/00000-6c1c61a1-495f-45d3-903d-a2643431be91.metadata.json'
"test_identity_transform_column_projection"
"test_identity_transform_columns_projection"
+
"test_in_memory_catalog_context_manager"
+
"test_inspect_partition_for_nested_field"
]
++ lib.optionals (pythonAtLeast "3.13") [
# AssertionError: