Skip to content

Commit 28dd34d

Browse files
artem1205clnolllazebnyi
authored
Source Azure Blob Storage: add integration tests (#36542)
Signed-off-by: Artem Inzhyyants <[email protected]> Co-authored-by: Catherine Noll <[email protected]> Co-authored-by: Serhii Lazebnyi <[email protected]>
1 parent 33d6c50 commit 28dd34d

File tree

23 files changed

+1223
-186
lines changed

23 files changed

+1223
-186
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[run]
2+
omit =
3+
source_azure_blob_storage/run.py
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
2+
3+
from __future__ import annotations
4+
5+
from typing import TYPE_CHECKING
6+
7+
if TYPE_CHECKING:
8+
from dagger import Container
9+
10+
11+
async def pre_connector_install(base_image_container: Container) -> Container:
12+
"""
13+
Docker compose is required to run the integration tests so we install Docker on top of the base image.
14+
"""
15+
return (
16+
base_image_container.with_exec(["sh", "-c", "apt-get update && apt-get install -y curl jq"])
17+
# Download install-docker.sh script
18+
.with_exec(["curl", "-fsSL", "https://get.docker.com", "-o", "/tmp/install-docker.sh"])
19+
# Run the install-docker.sh script with a pinned Docker version
20+
.with_exec(["sh", "/tmp/install-docker.sh", "--version", "25.0"])
21+
# Remove the install-docker.sh script
22+
.with_exec(["rm", "/tmp/install-docker.sh"])
23+
)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
{
2+
"azure_blob_storage_endpoint": "http://localhost:10000/account1",
3+
"azure_blob_storage_account_name": "account1",
4+
"azure_blob_storage_account_key": "key1",
5+
"azure_blob_storage_container_name": "testcontainer",
6+
"streams": [
7+
{
8+
"name": "users",
9+
"file_type": "avro",
10+
"globs": ["**/test_avro_users*.avro"],
11+
"validation_policy": "Emit Record",
12+
"format": {
13+
"filetype": "avro"
14+
}
15+
},
16+
{
17+
"name": "purchases",
18+
"file_type": "avro",
19+
"globs": ["**/test_avro_purchases*.avro"],
20+
"validation_policy": "Emit Record",
21+
"format": {
22+
"filetype": "avro"
23+
}
24+
},
25+
{
26+
"name": "products",
27+
"file_type": "avro",
28+
"globs": ["**/test_avro_products*.avro"],
29+
"validation_policy": "Emit Record",
30+
"format": {
31+
"filetype": "avro"
32+
}
33+
}
34+
]
35+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,128 @@
1+
{
2+
"azure_blob_storage_endpoint": "http://localhost:10000/account1",
3+
"azure_blob_storage_account_name": "account1",
4+
"azure_blob_storage_account_key": "key1",
5+
"azure_blob_storage_container_name": "testcontainer",
6+
"streams": [
7+
{
8+
"name": "users",
9+
"file_type": "csv",
10+
"globs": ["**/test_csv_users*.csv"],
11+
"legacy_prefix": "",
12+
"validation_policy": "Emit Record",
13+
"format": {
14+
"filetype": "csv",
15+
"delimiter": ",",
16+
"quote_char": "\"",
17+
"double_quote": true,
18+
"null_values": [
19+
"",
20+
"#N/A",
21+
"#N/A N/A",
22+
"#NA",
23+
"-1.#IND",
24+
"-1.#QNAN",
25+
"-NaN",
26+
"-nan",
27+
"1.#IND",
28+
"1.#QNAN",
29+
"N/A",
30+
"NA",
31+
"NULL",
32+
"NaN",
33+
"n/a",
34+
"nan",
35+
"null"
36+
],
37+
"true_values": ["1", "True", "TRUE", "true"],
38+
"false_values": ["0", "False", "FALSE", "false"],
39+
"inference_type": "Primitive Types Only",
40+
"strings_can_be_null": false,
41+
"encoding": "utf8",
42+
"header_definition": {
43+
"header_definition_type": "From CSV"
44+
}
45+
}
46+
},
47+
{
48+
"name": "purchases",
49+
"file_type": "csv",
50+
"globs": ["**/test_csv_purchases*.csv"],
51+
"legacy_prefix": "",
52+
"validation_policy": "Emit Record",
53+
"format": {
54+
"filetype": "csv",
55+
"delimiter": ",",
56+
"quote_char": "\"",
57+
"double_quote": true,
58+
"null_values": [
59+
"",
60+
"#N/A",
61+
"#N/A N/A",
62+
"#NA",
63+
"-1.#IND",
64+
"-1.#QNAN",
65+
"-NaN",
66+
"-nan",
67+
"1.#IND",
68+
"1.#QNAN",
69+
"N/A",
70+
"NA",
71+
"NULL",
72+
"NaN",
73+
"n/a",
74+
"nan",
75+
"null"
76+
],
77+
"true_values": ["1", "True", "TRUE", "true"],
78+
"false_values": ["0", "False", "FALSE", "false"],
79+
"inference_type": "Primitive Types Only",
80+
"strings_can_be_null": false,
81+
"encoding": "utf8",
82+
"header_definition": {
83+
"header_definition_type": "From CSV"
84+
}
85+
}
86+
},
87+
{
88+
"name": "products",
89+
"file_type": "csv",
90+
"globs": ["**/test_csv_products*.csv"],
91+
"legacy_prefix": "",
92+
"validation_policy": "Emit Record",
93+
"format": {
94+
"filetype": "csv",
95+
"delimiter": ",",
96+
"quote_char": "\"",
97+
"double_quote": true,
98+
"null_values": [
99+
"",
100+
"#N/A",
101+
"#N/A N/A",
102+
"#NA",
103+
"-1.#IND",
104+
"-1.#QNAN",
105+
"-NaN",
106+
"-nan",
107+
"1.#IND",
108+
"1.#QNAN",
109+
"N/A",
110+
"NA",
111+
"NULL",
112+
"NaN",
113+
"n/a",
114+
"nan",
115+
"null"
116+
],
117+
"true_values": ["1", "True", "TRUE", "true"],
118+
"false_values": ["0", "False", "FALSE", "false"],
119+
"inference_type": "Primitive Types Only",
120+
"strings_can_be_null": false,
121+
"encoding": "utf8",
122+
"header_definition": {
123+
"header_definition_type": "From CSV"
124+
}
125+
}
126+
}
127+
]
128+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"azure_blob_storage_endpoint": "http://localhost:10000/account1",
3+
"azure_blob_storage_account_name": "account1",
4+
"azure_blob_storage_account_key": "key1",
5+
"azure_blob_storage_container_name": "testcontainer",
6+
"streams": [
7+
{
8+
"name": "users",
9+
"file_type": "jsonl",
10+
"globs": ["**/test_jsonl_users*.jsonl"],
11+
"legacy_prefix": "",
12+
"newlines_in_values": true,
13+
"validation_policy": "Emit Record",
14+
"format": {
15+
"filetype": "jsonl"
16+
}
17+
},
18+
{
19+
"name": "purchases",
20+
"file_type": "jsonl",
21+
"globs": ["**/test_jsonl_purchases*.jsonl"],
22+
"legacy_prefix": "",
23+
"newlines_in_values": true,
24+
"validation_policy": "Emit Record",
25+
"format": {
26+
"filetype": "jsonl"
27+
}
28+
},
29+
{
30+
"name": "products",
31+
"file_type": "jsonl",
32+
"globs": ["**/test_jsonl_products*.jsonl"],
33+
"legacy_prefix": "",
34+
"newlines_in_values": true,
35+
"validation_policy": "Emit Record",
36+
"format": {
37+
"filetype": "jsonl"
38+
}
39+
}
40+
]
41+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
{
2+
"azure_blob_storage_endpoint": "http://localhost:10000/account1",
3+
"azure_blob_storage_account_name": "account1",
4+
"azure_blob_storage_account_key": "key1",
5+
"azure_blob_storage_container_name": "testcontainer",
6+
"streams": [
7+
{
8+
"name": "users",
9+
"file_type": "jsonl",
10+
"globs": ["**/test_parquet_users*.parquet"],
11+
"validation_policy": "Emit Record",
12+
"format": {
13+
"filetype": "parquet"
14+
}
15+
},
16+
{
17+
"name": "purchases",
18+
"file_type": "jsonl",
19+
"globs": ["**/test_parquet_purchases*.parquet"],
20+
"validation_policy": "Emit Record",
21+
"format": {
22+
"filetype": "parquet"
23+
}
24+
},
25+
{
26+
"name": "products",
27+
"file_type": "jsonl",
28+
"globs": ["**/test_parquet_products*.parquet"],
29+
"validation_policy": "Emit Record",
30+
"format": {
31+
"filetype": "parquet"
32+
}
33+
}
34+
]
35+
}

0 commit comments

Comments
 (0)