@@ -2251,13 +2251,19 @@ def test_branch_py_write_spark_read(session_catalog: Catalog, spark: SparkSessio
22512251@pytest .mark .integration
22522252def test_nanosecond_support_on_catalog (session_catalog : Catalog ) -> None :
22532253 identifier = "default.test_nanosecond_support_on_catalog"
2254- # Create a pyarrow table with a nanosecond timestamp column
2255- table = pa . Table . from_arrays (
2256- [
2257- pa . array ([ datetime . now ()], type = pa . timestamp ( "ns" )),
2258- pa .array ([datetime .now ()], type = pa .timestamp ("ns" , tz = "America/New_York" )),
2259- ],
2260- names = [ "timestamp_ns " , "timestamptz_ns" ],
2254+
2255+ catalog = load_catalog ( "default" , type = "in-memory" )
2256+ catalog . create_namespace ( "ns" )
2257+
2258+ table = pa .Table . from_arrays ([ pa . array ([datetime .now ()], type = pa .timestamp ("ns" ))], names = [ "timestamps" ])
2259+ table2 = pa . Table . from_arrays (
2260+ [ pa . array ([ datetime . now ()], type = pa . timestamp ( "ns " , tz = "America/New_York" ))], names = [ "timestamps" ]
22612261 )
22622262
22632263 _create_table (session_catalog , identifier , {"format-version" : "3" }, schema = table .schema )
2264+
2265+ with pytest .raises (NotImplementedError , match = "Writing V3 is not yet supported" ):
2266+ catalog .create_table ("ns.table1" , schema = table .schema , properties = {"format-version" : "3" })
2267+
2268+ with pytest .raises (NotImplementedError , match = "Writing V3 is not yet supported" ):
2269+ catalog .create_table ("ns.table2" , schema = table2 .schema , properties = {"format-version" : "3" })
0 commit comments