Skip to content
This repository was archived by the owner on Dec 16, 2022. It is now read-only.

Commit d00ad66

Browse files
epwalshdirkgr
andauthored
Minor tqdm and logging clean up (#4448)
* fix formatting * set tqdm descriptions in various places Co-authored-by: Dirk Groeneveld <[email protected]>
1 parent 6acf205 commit d00ad66

File tree

4 files changed

+5
-5
lines changed

4 files changed

+5
-5
lines changed

allennlp/commands/train.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@ def train_model(
236236
world_size = num_nodes * num_procs
237237

238238
logging.info(
239-
f"Switching to distributed training mode since multiple GPUs are configured"
239+
f"Switching to distributed training mode since multiple GPUs are configured | "
240240
f"Master is at: {master_addr}:{master_port} | Rank of this node: {node_rank} | "
241241
f"Number of workers in this node: {num_procs} | Number of nodes: {num_nodes} | "
242242
f"World size: {world_size}"

allennlp/common/file_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ def _http_get(url: str, temp_file: IO) -> None:
283283
req = session.get(url, stream=True)
284284
content_length = req.headers.get("Content-Length")
285285
total = int(content_length) if content_length is not None else None
286-
progress = Tqdm.tqdm(unit="B", total=total)
286+
progress = Tqdm.tqdm(unit="B", total=total, desc="downloading")
287287
for chunk in req.iter_content(chunk_size=1024):
288288
if chunk: # filter out keep-alive new chunks
289289
progress.update(len(chunk))

allennlp/data/dataset_readers/dataset_reader.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@ def _instances_to_cache_file(self, cache_filename, instances) -> None:
282282
# Then we just copy the file over to `cache_filename`.
283283
with CacheFile(cache_filename, mode="w+") as cache_handle:
284284
logger.info("Caching instances to temp file %s", cache_handle.name)
285-
for instance in Tqdm.tqdm(instances):
285+
for instance in Tqdm.tqdm(instances, desc="caching instances"):
286286
cache_handle.write(self.serialize_instance(instance) + "\n")
287287

288288
def text_to_instance(self, *inputs) -> Instance:
@@ -381,7 +381,7 @@ def _multi_worker_islice(
381381

382382
islice = itertools.islice(iterable, start_index, self.max_instances, step_size)
383383
if wrap_with_tqdm:
384-
islice = Tqdm.tqdm(islice)
384+
islice = Tqdm.tqdm(islice, desc="reading instances")
385385

386386
if transform is not None:
387387
return (transform(x) for x in islice)

allennlp/data/vocabulary.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -288,7 +288,7 @@ def from_instances(
288288
padding_token = padding_token if padding_token is not None else DEFAULT_PADDING_TOKEN
289289
oov_token = oov_token if oov_token is not None else DEFAULT_OOV_TOKEN
290290
namespace_token_counts: Dict[str, Dict[str, int]] = defaultdict(lambda: defaultdict(int))
291-
for instance in Tqdm.tqdm(instances):
291+
for instance in Tqdm.tqdm(instances, desc="building vocab"):
292292
instance.count_vocab_items(namespace_token_counts)
293293

294294
return cls(

0 commit comments

Comments
 (0)