@@ -969,13 +969,11 @@ def test_read_pandas_json_index(session, write_engine):
969
969
@pytest .mark .parametrize (
970
970
("write_engine" ),
971
971
[
972
- pytest .param ("default" ),
973
972
pytest .param ("bigquery_load" ),
974
973
pytest .param ("bigquery_streaming" ),
975
- pytest .param ("bigquery_inline" , marks = pytest .mark .xfail (raises = ValueError )),
976
974
],
977
975
)
978
- def test_read_pandas_w_nested_json (session , write_engine ):
976
+ def test_read_pandas_w_nested_json_fails (session , write_engine ):
979
977
data = [
980
978
[{"json_field" : "1" }],
981
979
[{"json_field" : None }],
@@ -995,16 +993,44 @@ def test_read_pandas_w_nested_json(session, write_engine):
995
993
session .read_pandas (pd_s , write_engine = write_engine )
996
994
997
995
996
+ @utils .skip_legacy_pandas
998
997
@pytest .mark .parametrize (
999
998
("write_engine" ),
1000
999
[
1001
1000
pytest .param ("default" ),
1001
+ pytest .param ("bigquery_inline" ),
1002
+ ],
1003
+ )
1004
+ def test_read_pandas_inline_w_nested_json (session , write_engine ):
1005
+ data = [
1006
+ [{"json_field" : "1" }],
1007
+ [{"json_field" : None }],
1008
+ [{"json_field" : '["1","3","5"]' }],
1009
+ [{"json_field" : '{"a":1,"b":["x","y"],"c":{"x":[],"z":false}}' }],
1010
+ ]
1011
+ pa_array = pa .array (data , type = pa .list_ (pa .struct ([("name" , pa .string ())])))
1012
+ pd_s = pd .Series (
1013
+ arrays .ArrowExtensionArray (pa_array ), # type: ignore
1014
+ dtype = pd .ArrowDtype (
1015
+ pa .list_ (pa .struct ([("name" , bigframes .dtypes .JSON_ARROW_TYPE )]))
1016
+ ),
1017
+ )
1018
+ bq_s = (
1019
+ session .read_pandas (pd_s , write_engine = write_engine )
1020
+ .to_pandas ()
1021
+ .reset_index (drop = True )
1022
+ )
1023
+ pd .testing .assert_series_equal (bq_s , pd_s )
1024
+
1025
+
1026
+ @pytest .mark .parametrize (
1027
+ ("write_engine" ),
1028
+ [
1002
1029
pytest .param ("bigquery_load" ),
1003
1030
pytest .param ("bigquery_streaming" ),
1004
- pytest .param ("bigquery_inline" , marks = pytest .mark .xfail (raises = ValueError )),
1005
1031
],
1006
1032
)
1007
- def test_read_pandas_w_nested_json_index (session , write_engine ):
1033
+ def test_read_pandas_inline_w_nested_json_index_fails (session , write_engine ):
1008
1034
data = [
1009
1035
[{"json_field" : "1" }],
1010
1036
[{"json_field" : None }],
@@ -1026,6 +1052,32 @@ def test_read_pandas_w_nested_json_index(session, write_engine):
1026
1052
session .read_pandas (pd_idx , write_engine = write_engine )
1027
1053
1028
1054
1055
+ @utils .skip_legacy_pandas
1056
+ @pytest .mark .parametrize (
1057
+ ("write_engine" ),
1058
+ [
1059
+ pytest .param ("default" ),
1060
+ pytest .param ("bigquery_inline" ),
1061
+ ],
1062
+ )
1063
+ def test_read_pandas_w_nested_json_index (session , write_engine ):
1064
+ data = [
1065
+ [{"json_field" : "1" }],
1066
+ [{"json_field" : None }],
1067
+ [{"json_field" : '["1","3","5"]' }],
1068
+ [{"json_field" : '{"a":1,"b":["x","y"],"c":{"x":[],"z":false}}' }],
1069
+ ]
1070
+ pa_array = pa .array (data , type = pa .list_ (pa .struct ([("name" , pa .string ())])))
1071
+ pd_idx : pd .Index = pd .Index (
1072
+ arrays .ArrowExtensionArray (pa_array ), # type: ignore
1073
+ dtype = pd .ArrowDtype (
1074
+ pa .list_ (pa .struct ([("name" , bigframes .dtypes .JSON_ARROW_TYPE )]))
1075
+ ),
1076
+ )
1077
+ bq_idx = session .read_pandas (pd_idx , write_engine = write_engine ).to_pandas ()
1078
+ pd .testing .assert_index_equal (bq_idx , pd_idx )
1079
+
1080
+
1029
1081
@utils .skip_legacy_pandas
1030
1082
@pytest .mark .parametrize (
1031
1083
("write_engine" ,),
0 commit comments