at master 1.6 kB view raw
1{ 2 lib, 3 buildPythonPackage, 4 fetchFromGitHub, 5 6 # build-system 7 setuptools-scm, 8 9 # dependencies 10 prettytable, 11 sqlglot, 12 typing-extensions, 13 14 # tests 15 databricks-sql-connector, 16 duckdb, 17 findspark, 18 google-cloud-bigquery, 19 pyspark, 20 pytest-postgresql, 21 pytest-xdist, 22 pytestCheckHook, 23}: 24 25buildPythonPackage rec { 26 pname = "sqlframe"; 27 version = "3.38.2"; 28 pyproject = true; 29 30 src = fetchFromGitHub { 31 owner = "eakmanrq"; 32 repo = "sqlframe"; 33 tag = "v${version}"; 34 hash = "sha256-ekDt9vsHdHhUNaQghG3EaM82FRZYdw+gaxENcurSayk="; 35 }; 36 37 build-system = [ 38 setuptools-scm 39 ]; 40 41 dependencies = [ 42 prettytable 43 sqlglot 44 typing-extensions 45 ]; 46 47 pythonImportsCheck = [ "sqlframe" ]; 48 49 nativeCheckInputs = [ 50 databricks-sql-connector 51 duckdb 52 findspark 53 google-cloud-bigquery 54 pyspark 55 pytest-postgresql 56 pytest-xdist 57 pytestCheckHook 58 ]; 59 60 disabledTests = [ 61 # Requires google-cloud credentials 62 # google.auth.exceptions.DefaultCredentialsErro 63 "test_activate_bigquery_default_dataset" 64 ]; 65 66 disabledTestPaths = [ 67 # duckdb.duckdb.CatalogException: Catalog Error: Table Function with name "dsdgen" is not in the catalog, but it exists in the tpcds extension. 68 # "tests/integration/test_int_dataframe.py" 69 "tests/integration/" 70 ]; 71 72 meta = { 73 description = "Turning PySpark Into a Universal DataFrame API"; 74 homepage = "https://github.com/eakmanrq/sqlframe"; 75 changelog = "https://github.com/eakmanrq/sqlframe/releases/tag/${src.tag}"; 76 license = lib.licenses.mit; 77 maintainers = with lib.maintainers; [ GaetanLepage ]; 78 }; 79}