Skip to content

Commit db483c1

Browse files
authored
Merge pull request #69 from AfricasVoices/s/live/production
Replace references to term "live" with "production"
2 parents 3976182 + 76235fd commit db483c1

File tree

1 file changed

+14
-12
lines changed

1 file changed

+14
-12
lines changed

rapid_pro_tools/rapid_pro_client.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -224,18 +224,19 @@ def get_raw_messages(self, created_after_inclusive=None, created_before_exclusiv
224224
)
225225

226226
log.info(f"Fetching messages from production Rapid Pro instance...")
227-
live_messages = self.rapid_pro.get_messages(after=created_after_inclusive, before=created_before_inclusive)\
227+
production_messages = self.rapid_pro.get_messages(after=created_after_inclusive, before=created_before_inclusive)\
228228
.all(retry_on_rate_exceed=True)
229229

230-
raw_messages = archived_messages + live_messages
231-
log.info(f"Fetched {len(raw_messages)} messages ({len(archived_messages)} from archives, {len(live_messages)} from production)")
230+
raw_messages = archived_messages + production_messages
231+
log.info(f"Fetched {len(raw_messages)} messages ({len(archived_messages)} from archives, "
232+
f"{len(production_messages)} from production)")
232233

233234
# Check that we only see each message once.
234235
seen_message_ids = set()
235236
for message in raw_messages:
236-
assert message.id not in seen_message_ids, f"Duplicate message {message.id} found in the downloaded data. This could be " \
237-
f"because a message with this id exists in both the archives and the live " \
238-
f"database."
237+
assert message.id not in seen_message_ids, f"Duplicate message {message.id} found in the downloaded data. " \
238+
f"This could be because a message with this id exists in both " \
239+
f"the archives and the production database."
239240
seen_message_ids.add(message.id)
240241

241242
if raw_export_log_file is not None:
@@ -337,7 +338,7 @@ def _get_archived_runs_for_flow_id(self, flow_id, last_modified_after_inclusive=
337338
def get_raw_runs_for_flow_id(self, flow_id, last_modified_after_inclusive=None, last_modified_before_exclusive=None,
338339
raw_export_log_file=None, ignore_archives=False):
339340
"""
340-
Gets the raw runs for the given flow_id from Rapid Pro's live database and, if needed, from its archives.
341+
Gets the raw runs for the given flow_id from Rapid Pro's production database and, if needed, from its archives.
341342
342343
:param flow_id: Id of the flow to download the runs of.
343344
:type flow_id: str
@@ -375,21 +376,22 @@ def get_raw_runs_for_flow_id(self, flow_id, last_modified_after_inclusive=None,
375376
)
376377

377378
log.info(f"Fetching runs from production Rapid Pro instance...")
378-
live_runs = self.rapid_pro.get_runs(
379+
production_runs = self.rapid_pro.get_runs(
379380
flow=flow_id, after=last_modified_after_inclusive, before=last_modified_before_inclusive
380381
).all(retry_on_rate_exceed=True)
381382

382-
raw_runs = archived_runs + live_runs
383-
log.info(f"Fetched {len(raw_runs)} runs ({len(archived_runs)} from archives, {len(live_runs)} from production)")
383+
raw_runs = archived_runs + production_runs
384+
log.info(f"Fetched {len(raw_runs)} runs ({len(archived_runs)} from archives, "
385+
f"{len(production_runs)} from production)")
384386

385387
# Check that we only see each run once. This shouldn't be possible, due to
386388
# https://github.com/nyaruka/rp-archiver/blob/7d3430b5260fa92abb62d828fc526af8e9d9d50a/archiver.go#L624,
387389
# but this check exists to be safe.
388390
seen_run_ids = set()
389391
for run in raw_runs:
390392
assert run.id not in seen_run_ids, f"Duplicate run {run.id} found in the downloaded data. This could be " \
391-
f"because a run with this id exists in both the archives and the live " \
392-
f"database."
393+
f"because a run with this id exists in both the archives and the " \
394+
f"production database."
393395
seen_run_ids.add(run.id)
394396

395397
if raw_export_log_file is not None:

0 commit comments

Comments
 (0)