Skip to content

Commit 100b3ac

Browse files
author
Anton Karpets
authored
🐛Source Zendesk Support: fix record filter for ticket_metrics stream (#38310)
1 parent b119353 commit 100b3ac

File tree

5 files changed

+52
-33
lines changed

5 files changed

+52
-33
lines changed

airbyte-integrations/connectors/source-zendesk-support/metadata.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ data:
1111
connectorSubtype: api
1212
connectorType: source
1313
definitionId: 79c1aa37-dae3-42ae-b333-d1c105477715
14-
dockerImageTag: 2.6.3
14+
dockerImageTag: 2.6.4
1515
dockerRepository: airbyte/source-zendesk-support
1616
documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-support
1717
githubIssueLabel: source-zendesk-support

airbyte-integrations/connectors/source-zendesk-support/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",]
33
build-backend = "poetry.core.masonry.api"
44

55
[tool.poetry]
6-
version = "2.6.3"
6+
version = "2.6.4"
77
name = "source-zendesk-support"
88
description = "Source implementation for Zendesk Support."
99
authors = [ "Airbyte <[email protected]>",]

airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py

+18-13
Original file line numberDiff line numberDiff line change
@@ -380,9 +380,9 @@ class SourceZendeskSupportTicketEventsExportStream(SourceZendeskIncrementalExpor
380380
https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/#incremental-ticket-event-export
381381
382382
@ param response_list_name: the main nested entity to look at inside of response, default = "ticket_events"
383-
@ param response_target_entity: nested property inside of `response_list_name`, default = "child_events"
383+
@ param response_target_entity: nested property inside `response_list_name`, default = "child_events"
384384
@ param list_entities_from_event : the list of nested child_events entities to include from parent record
385-
@ param event_type : specific event_type to check ["Audit", "Change", "Comment", etc]
385+
@ param event_type : specific event_type to check ["Audit", "Change", "Comment", etc.]
386386
@ param sideload_param : parameter variable to include various information to response
387387
"""
388388

@@ -507,15 +507,17 @@ def stream_slices(
507507
) -> Iterable[Optional[Mapping[str, Any]]]:
508508
parent_stream_state = None
509509
if stream_state:
510-
cursor_value = pendulum.parse(stream_state.get(self.cursor_field)).int_timestamp
511-
parent_stream_state = {self.parent.cursor_field: cursor_value}
510+
cursor_value = stream_state.get(self.cursor_field)
511+
parent_stream_state = {self.parent.cursor_field: pendulum.parse(cursor_value).int_timestamp}
512+
else:
513+
cursor_value = self._start_date
512514

513515
parent_records = self.parent.read_records(
514516
sync_mode=SyncMode.incremental, cursor_field=cursor_field, stream_slice=None, stream_state=parent_stream_state
515517
)
516518

517519
for record in parent_records:
518-
yield {"ticket_id": record["id"]}
520+
yield {"ticket_id": record["id"], self.cursor_field: cursor_value}
519521

520522
def should_retry(self, response: requests.Response) -> bool:
521523
if response.status_code == 404:
@@ -527,7 +529,7 @@ def should_retry(self, response: requests.Response) -> bool:
527529

528530
class TicketComments(SourceZendeskSupportTicketEventsExportStream):
529531
"""
530-
Fetch the TicketComments incrementaly from TicketEvents Export stream
532+
Fetch the TicketComments incrementally from TicketEvents Export stream
531533
"""
532534

533535
list_entities_from_event = ["via_reference_id", "ticket_id", "timestamp"]
@@ -603,7 +605,13 @@ def path(
603605
) -> str:
604606
return f"tickets/{stream_slice['ticket_id']}/metrics"
605607

606-
def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]:
608+
def parse_response(
609+
self,
610+
response: requests.Response,
611+
stream_state: Mapping[str, Any],
612+
stream_slice: Optional[Mapping[str, Any]] = None,
613+
**kwargs,
614+
) -> Iterable[Mapping]:
607615
"""try to select relevant data only"""
608616

609617
try:
@@ -613,13 +621,10 @@ def parse_response(self, response: requests.Response, stream_state: Mapping[str,
613621

614622
# no data in case of http errors
615623
if data:
616-
if not self.cursor_field:
624+
cursor_date = (stream_slice or {}).get(self.cursor_field)
625+
updated = data[self.cursor_field]
626+
if not cursor_date or updated >= cursor_date:
617627
yield data
618-
else:
619-
cursor_date = (stream_state or {}).get(self.cursor_field)
620-
updated = data[self.cursor_field]
621-
if not cursor_date or updated >= cursor_date:
622-
yield data
623628

624629

625630
class TicketSkips(CursorPaginationZendeskSupportStream):

airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py

+30-17
Original file line numberDiff line numberDiff line change
@@ -1047,8 +1047,19 @@ class TestTicketSubstream:
10471047
@pytest.mark.parametrize(
10481048
"stream_state, response, expected_slices",
10491049
[
1050-
({}, {"tickets": [{"id": "13"}, {"id": "80"}]}, [{"ticket_id": "13"}, {"ticket_id": "80"}]),
1051-
({"updated_at": "2024-04-17T19:34:06Z"}, {"tickets": [{"id": "80"}]}, [{"ticket_id": "80"}]),
1050+
(
1051+
{},
1052+
{"tickets": [{"id": "13"}, {"id": "80"}]},
1053+
[
1054+
{"ticket_id": "13", "updated_at": STREAM_ARGS["start_date"]},
1055+
{"ticket_id": "80", "updated_at": STREAM_ARGS["start_date"]},
1056+
],
1057+
),
1058+
(
1059+
{"updated_at": "2024-04-17T19:34:06Z"},
1060+
{"tickets": [{"id": "80"}]},
1061+
[{"ticket_id": "80", "updated_at": "2024-04-17T19:34:06Z"}],
1062+
),
10521063
({"updated_at": "2224-04-17T19:34:06Z"}, {"tickets": []}, []),
10531064
],
10541065
ids=[
@@ -1063,30 +1074,32 @@ def test_stream_slices(self, requests_mock, stream_state, response, expected_sli
10631074
assert list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state=stream_state)) == expected_slices
10641075

10651076
@pytest.mark.parametrize(
1066-
"stream_state, response, expected_records",
1077+
"stream_slice, response, expected_records",
10671078
[
1068-
({}, {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}, [{"id": "test id", "updated_at": "2024-04-17T19:34:06Z"}]),
1069-
({}, {"updated_at": "1979-04-17T19:34:06Z", "id": "test id"}, []),
1079+
({"updated_at": "2024-05-17T19:34:06Z"}, {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}, []),
10701080
(
1071-
{"updated_at": "2024-04-17T19:34:06Z"},
1072-
{"updated_at": "2024-04-18T19:34:06Z", "id": "test id"},
1073-
[{"updated_at": "2024-04-18T19:34:06Z", "id": "test id"}],
1081+
{"updated_at": "2024-03-17T19:34:06Z"},
1082+
{"updated_at": "2024-04-17T19:34:06Z", "id": "test id"},
1083+
[{"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}],
1084+
),
1085+
(
1086+
{},
1087+
{"updated_at": "1979-04-17T19:34:06Z", "id": "test id"},
1088+
[{"updated_at": "1979-04-17T19:34:06Z", "id": "test id"}],
10741089
),
1075-
({"updated_at": "2024-04-17T19:34:06Z"}, {"updated_at": "1979-04-18T19:34:06Z", "id": "test id"}, []),
10761090
],
10771091
ids=[
1078-
"read_without_state",
1079-
"read_without_state_cursor_older_then_start_date",
1080-
"read_with_state",
1081-
"read_with_state_cursor_older_then_state_value",
1092+
"read_with_slice_cursor_greater_than_record_cursor",
1093+
"read_with_slice_cursor_less_than_record_cursor",
1094+
"read_without_slice_cursor",
10821095
],
10831096
)
1084-
def test_ticket_metrics_parse_response(self, stream_state, response, expected_records):
1097+
def test_ticket_metrics_parse_response(self, stream_slice, response, expected_records):
10851098
stream = get_stream_instance(TicketMetrics, STREAM_ARGS)
10861099
mocked_response = Mock()
1087-
mocked_response.json.return_value = {"ticket_metric": {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}}
1088-
records = list(stream.parse_response(mocked_response, stream_state=stream_state))
1089-
assert records == [{"id": "test id", "updated_at": "2024-04-17T19:34:06Z"}]
1100+
mocked_response.json.return_value = {"ticket_metric": response}
1101+
records = list(stream.parse_response(mocked_response, stream_state={}, stream_slice=stream_slice))
1102+
assert records == expected_records
10901103

10911104
def test_read_ticket_metrics_with_error(self, requests_mock):
10921105
stream = get_stream_instance(TicketMetrics, STREAM_ARGS)

docs/integrations/sources/zendesk-support.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,8 @@ The Zendesk connector ideally should not run into Zendesk API limitations under
163163
## Changelog
164164

165165
| Version | Date | Pull Request | Subject |
166-
| :------ | :--------- | :------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
166+
|:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
167+
| 2.6.4 | 2024-05-20 | [38310](https://github.com/airbytehq/airbyte/pull/38310) | Fix record filter for `Ticket Metrics` stream |
167168
| 2.6.3 | 2024-05-02 | [36669](https://github.com/airbytehq/airbyte/pull/36669) | Schema descriptions |
168169
| 2.6.2 | 2024-02-05 | [37761](https://github.com/airbytehq/airbyte/pull/37761) | Add stop condition for `Ticket Audits` when recieved old records; Ignore 403 and 404 status codes. |
169170
| 2.6.1 | 2024-04-30 | [37723](https://github.com/airbytehq/airbyte/pull/37723) | Add %Y-%m-%dT%H:%M:%S%z to cursor_datetime_formats |

0 commit comments

Comments
 (0)