Skip to content

Commit 8add419

Browse files
tdsteinClaude
and
Claude
authored
feat: improve task polling with exponential backoff (#408)
Implements an improved task polling mechanism with configurable exponential backoff to reduce the number of API calls for long-running tasks. This helps to minimize resource consumption on the Connect server while still providing responsive feedback. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-authored-by: Claude <[email protected]>
1 parent 392214c commit 8add419

File tree

2 files changed

+104
-1
lines changed

2 files changed

+104
-1
lines changed

src/posit/connect/tasks.py

+26-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
from __future__ import annotations
44

5+
import time
6+
57
from typing_extensions import overload
68

79
from . import resources
@@ -95,17 +97,40 @@ def update(self, *args, **kwargs) -> None:
9597
result = response.json()
9698
super().update(**result)
9799

98-
def wait_for(self) -> None:
100+
def wait_for(self, *, initial_wait: int = 1, max_wait: int = 10, backoff: float = 1.5) -> None:
99101
"""Wait for the task to finish.
100102
103+
Parameters
104+
----------
105+
initial_wait : int, default 1
106+
Initial wait time in seconds. First API request will use this as the wait parameter.
107+
max_wait : int, default 10
108+
Maximum wait time in seconds between polling requests.
109+
backoff : float, default 1.5
110+
Backoff multiplier for increasing wait times.
111+
101112
Examples
102113
--------
103114
>>> task.wait_for()
104115
None
116+
117+
Notes
118+
-----
119+
This method implements an exponential backoff strategy to reduce the number of API calls
120+
while waiting for long-running tasks. The first request uses the initial_wait value,
121+
and subsequent requests increase the wait time by the backoff factor, up to max_wait. To disable exponential backoff, set backoff to 1.0.
105122
"""
123+
wait_time = initial_wait
124+
106125
while not self.is_finished:
107126
self.update()
108127

128+
# Wait client-side
129+
time.sleep(wait_time)
130+
131+
# Calculate next wait time with backoff
132+
wait_time = min(wait_time * backoff, max_wait)
133+
109134

110135
class Tasks(resources.Resources):
111136
@overload

tests/posit/connect/test_tasks.py

+78
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,84 @@ def test(self):
134134
assert mock_tasks_get[0].call_count == 1
135135
assert mock_tasks_get[1].call_count == 1
136136

137+
@responses.activate
138+
@mock.patch("time.sleep", autospec=True)
139+
def test_exponential_backoff(self, mock_sleep):
140+
uid = "jXhOhdm5OOSkGhJw"
141+
142+
# behavior
143+
mock_tasks_get = [
144+
responses.get(
145+
f"https://connect.example/__api__/v1/tasks/{uid}",
146+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": False},
147+
),
148+
responses.get(
149+
f"https://connect.example/__api__/v1/tasks/{uid}",
150+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": False},
151+
),
152+
responses.get(
153+
f"https://connect.example/__api__/v1/tasks/{uid}",
154+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": False},
155+
),
156+
responses.get(
157+
f"https://connect.example/__api__/v1/tasks/{uid}",
158+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": True},
159+
),
160+
]
161+
162+
# setup
163+
c = connect.Client("https://connect.example", "12345")
164+
task = c.tasks.get(uid)
165+
assert not task.is_finished
166+
167+
# invoke
168+
task.wait_for(initial_wait=1, max_wait=5, backoff=2.0)
169+
170+
# assert
171+
assert task.is_finished
172+
assert mock_tasks_get[0].call_count == 1
173+
assert mock_tasks_get[1].call_count == 1
174+
175+
# Verify sleep calls
176+
mock_sleep.assert_has_calls([mock.call(1), mock.call(2), mock.call(4)], any_order=False)
177+
178+
@responses.activate
179+
@mock.patch("time.sleep", autospec=True)
180+
def test_no_backoff(self, mock_sleep):
181+
uid = "jXhOhdm5OOSkGhJw"
182+
183+
# behavior
184+
mock_tasks_get = [
185+
responses.get(
186+
f"https://connect.example/__api__/v1/tasks/{uid}",
187+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": False},
188+
),
189+
responses.get(
190+
f"https://connect.example/__api__/v1/tasks/{uid}",
191+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": False},
192+
),
193+
responses.get(
194+
f"https://connect.example/__api__/v1/tasks/{uid}",
195+
json={**load_mock_dict(f"v1/tasks/{uid}.json"), "finished": True},
196+
),
197+
]
198+
199+
# setup
200+
c = connect.Client("https://connect.example", "12345")
201+
task = c.tasks.get(uid)
202+
assert not task.is_finished
203+
204+
# invoke
205+
task.wait_for(initial_wait=2, max_wait=5, backoff=1.0)
206+
207+
# assert
208+
assert task.is_finished
209+
assert mock_tasks_get[0].call_count == 1
210+
assert mock_tasks_get[1].call_count == 1
211+
212+
# Verify sleep calls
213+
mock_sleep.assert_has_calls([mock.call(2), mock.call(2)], any_order=False)
214+
137215

138216
class TestTasksGet:
139217
@responses.activate

0 commit comments

Comments
 (0)