···
···
31
+
google-cloud-bigquery,
···
pname = "iceberg-python";
tag = "pyiceberg-${version}";
66
-
hash = "sha256-OUj8z/UOIcK0S4tf6Id52YHweNDfYnX6P4nChXrOxqY=";
74
+
hash = "sha256-uR8nmKVjYjiArcNaf/Af2kGh14p59VV9g2mKPKmiJnc=";
···
···
122
+
google-cloud-bigquery
···
···
205
+
++ optional-dependencies.bigquery
206
+
++ optional-dependencies.hive
207
+
++ optional-dependencies.pandas
208
+
++ optional-dependencies.pyarrow
209
+
++ optional-dependencies.s3fs
210
+
++ optional-dependencies.sql-sqlite
++ moto.optional-dependencies.server;
···
227
+
# KeyError: 'authorization'
229
+
"test_token_200_without_optional_fields"
230
+
"test_token_with_default_scope"
231
+
"test_token_with_optional_oauth_params"
232
+
"test_token_with_custom_scope"
234
+
# AttributeError: 'SessionContext' object has no attribute 'register_table_provider'
235
+
"test_datafusion_register_pyiceberg_tabl"
# ModuleNotFoundError: No module named 'puresasl'
"test_create_hive_client_with_kerberos"
"test_create_hive_client_with_kerberos_using_context_manager"
207
-
# Require unpackaged pyiceberg_core
208
-
"test_bucket_pyarrow_transforms"
209
-
"test_transform_consistency_with_pyarrow_transform"
210
-
"test_truncate_pyarrow_transforms"
# botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL
"test_checking_if_a_file_exists"
···
"test_fsspec_pickle_roundtrip_gcs"
# Timeout (network access)
"test_fsspec_converting_an_outputfile_to_an_inputfile_adls"
"test_fsspec_new_abfss_output_file_adls"
"test_fsspec_new_input_file_adls"
···
"test_token_200_w_oauth2_server_uri"
267
+
# azure.core.exceptions.ServiceRequestError (network access)
268
+
"test_converting_an_outputfile_to_an_inputfile_adls"
269
+
"test_file_tell_adls"
270
+
"test_getting_length_of_file_adls"
271
+
"test_new_input_file_adls"
272
+
"test_new_output_file_adls"
273
+
"test_raise_on_opening_file_not_found_adls"
274
+
"test_read_specified_bytes_for_file_adls"
275
+
"test_write_and_read_file_adls"
# Hangs forever (from tests/io/test_pyarrow.py)
"test_getting_length_of_file_gcs"
++ lib.optionals stdenv.hostPlatform.isDarwin [
# ImportError: The pyarrow installation is not built with support for 'GcsFileSystem'
"test_converting_an_outputfile_to_an_inputfile_gcs"
283
+
"test_create_table_with_database_location"
284
+
"test_drop_table_with_database_location"
"test_new_input_file_gcs"
"test_new_output_file_gc"
···
# '/tmp/iceberg/warehouse/default.db/test_projection_partitions/metadata/00000-6c1c61a1-495f-45d3-903d-a2643431be91.metadata.json'
"test_identity_transform_column_projection"
"test_identity_transform_columns_projection"
292
+
"test_in_memory_catalog_context_manager"
293
+
"test_inspect_partition_for_nested_field"
++ lib.optionals (pythonAtLeast "3.13") [