-
Notifications
You must be signed in to change notification settings - Fork 141
Import for MPII Human Pose Dataset #584
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 6 commits
Commits
Show all changes
34 commits
Select commit
Hold shift + click to select a range
e44605d
import for MPII datset format
yasakova-anastasia 6856538
add numpy annotation files
yasakova-anastasia dd16038
fix pylint
yasakova-anastasia e668e5b
update documentation
yasakova-anastasia 2003bbc
update Changelog
yasakova-anastasia 79fb8c5
small fix
yasakova-anastasia 0d0d6e1
make MpiiPointsCategories more readable
yasakova-anastasia 6bd9c96
fixes
yasakova-anastasia f92e641
update branch
yasakova-anastasia 15ec4a4
fix documentation
yasakova-anastasia fd430be
fixes
yasakova-anastasia 9c6fc7c
style fixes
yasakova-anastasia ca84473
fixes
yasakova-anastasia 461bb7e
fix documentation
yasakova-anastasia 568ee31
fixes
yasakova-anastasia 639e1e3
mpii -> mpii_json
yasakova-anastasia 310257f
add import for original MPII
yasakova-anastasia e99a0a2
add tests
yasakova-anastasia 7887620
update documentation
yasakova-anastasia e701511
fix mpii_json
yasakova-anastasia cf0b7a6
Merge branch 'develop' into ay/mpii-format
yasakova-anastasia fca2fc7
update Changelog
yasakova-anastasia ce2b41f
fix pylint
yasakova-anastasia c1dd22e
fixes
yasakova-anastasia cd7e7e3
fix documentation
yasakova-anastasia aa1d5da
fixes
yasakova-anastasia 0918968
fix mpii json
yasakova-anastasia 2bb5bc0
fixes
yasakova-anastasia 1ec42e5
fix pylint
yasakova-anastasia f3e3ecb
Movel mpii formats to a separate module
a394a72
Style fixes
801e1e2
Merge branch 'develop' into ay/mpii-format
dddb44a
Fix import
2e62db8
Merge branch 'ay/mpii-format' of https://github.com/openvinotoolkit/d…
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
# Copyright (C) 2021 Intel Corporation | ||
# | ||
# SPDX-License-Identifier: MIT | ||
|
||
import json | ||
import os.path as osp | ||
|
||
import numpy as np | ||
|
||
from datumaro.components.annotation import Bbox, Points, PointsCategories | ||
from datumaro.components.extractor import ( | ||
AnnotationType, DatasetItem, Importer, SourceExtractor, | ||
) | ||
from datumaro.components.format_detection import FormatDetectionContext | ||
from datumaro.components.media import Image | ||
|
||
|
||
class MpiiPath: | ||
ANNOTATION_FILE = 'mpii_annotations.json' | ||
HEADBOXES_FILE = 'mpii_headboxes.npy' | ||
VISIBILITY_FILE = 'jnt_visible.npy' | ||
POS_GT_FILE = 'mpii_pos_gt.npy' | ||
|
||
MpiiPointsCategories = [(0, ['r_ankle']), (1, ['r_knee']), (2, ['r_hip']), | ||
(3, ['l_hip']), (4, ['l_knee']), (5, ['l_ankle']), (6, ['pelvis']), | ||
(7, ['thorax']), (8, ['upper_neck']), (9, ['head top']), (10, ['r_wrist']), | ||
(11, ['r_elbow']), (12, ['r_shoulder']), (13, ['l_shoulder']), | ||
(14, ['l_elbow']), (15, ['l_wrist'])] | ||
|
||
class MpiiExtractor(SourceExtractor): | ||
def __init__(self, path): | ||
if not osp.isfile(path): | ||
raise FileNotFoundError("Can't read annotation file '%s'" % path) | ||
|
||
super().__init__() | ||
|
||
self._categories = { AnnotationType.points: | ||
PointsCategories.from_iterable(MpiiPointsCategories) } | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
self._items = list(self._load_items(path).values()) | ||
|
||
def _load_items(self, path): | ||
items = {} | ||
|
||
root_dir = osp.dirname(path) | ||
|
||
hb_path = osp.join(root_dir, MpiiPath.HEADBOXES_FILE) | ||
if osp.isfile(hb_path): | ||
headboxes = np.load(hb_path) | ||
else: | ||
headboxes = [] | ||
|
||
vis_path = osp.join(root_dir, MpiiPath.VISIBILITY_FILE) | ||
if osp.isfile(vis_path): | ||
visibility = np.load(vis_path).T | ||
else: | ||
visibility = [] | ||
|
||
pos_gt_path = osp.join(root_dir, MpiiPath.POS_GT_FILE) | ||
if osp.isfile(pos_gt_path): | ||
gt_pose = np.transpose(np.load(pos_gt_path), (2, 0, 1)) | ||
else: | ||
gt_pose = [] | ||
|
||
with open(path) as f: | ||
for i, ann in enumerate(json.load(f)): | ||
item_id = osp.splitext(ann.get('img_paths'))[0] | ||
|
||
center = ann.get('objpos') | ||
scale = float(ann.get('scale_provided')) | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
if np.size(gt_pose): | ||
points = gt_pose[i] | ||
points = points.reshape(points.shape[0] * points.shape[1]) | ||
IRDonch marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
if np.size(visibility): | ||
vis = visibility[i] | ||
else: | ||
vis = np.ones(len(points) // 2, dtype=np.int8) | ||
else: | ||
keypoints = np.array(ann.get('joint_self')) | ||
keypoints = keypoints.reshape(keypoints.shape[0] * keypoints.shape[1]) | ||
points = [p for i, p in enumerate(keypoints) if i % 3 != 2] | ||
|
||
vis = keypoints[2::3] | ||
IRDonch marked this conversation as resolved.
Show resolved
Hide resolved
|
||
if np.size(visibility): | ||
vis = visibility[i] | ||
|
||
vis = [int(val) for val in vis] | ||
|
||
annotations = [Points(points, vis, attributes={'center': center, | ||
'scale': scale})] | ||
|
||
if np.size(headboxes): | ||
bbox = headboxes[:, :, i] | ||
annotations.append(Bbox(bbox[0][0], bbox[0][1], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
bbox[1][0] - bbox[0][0], bbox[1][1] - bbox[0][1])) | ||
|
||
|
||
items[item_id] = DatasetItem(id=item_id, subset=self._subset, | ||
image=Image(path=osp.join(root_dir, ann.get('img_paths'))), | ||
annotations=annotations) | ||
|
||
return items | ||
|
||
class MpiiImporter(Importer): | ||
@classmethod | ||
def find_sources(cls, path): | ||
return cls._find_sources_recursive(path, '.json', 'mpii') | ||
|
||
@classmethod | ||
def detect(cls, context: FormatDetectionContext) -> None: | ||
context.require_file(MpiiPath.ANNOTATION_FILE) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
--- | ||
title: 'MPII Human Pose Dataset' | ||
linkTitle: 'MPII Human Pose Dataset' | ||
description: '' | ||
weight: 1 | ||
--- | ||
|
||
## Format specification | ||
|
||
The original MPII Human Pose Dataset is available | ||
[here](http://human-pose.mpi-inf.mpg.de/#overview). | ||
yasakova-anastasia marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
Supported annotation types: | ||
- `Bbox` | ||
- `Points` | ||
|
||
Supported attributes: | ||
- `center` (list) | ||
IRDonch marked this conversation as resolved.
Show resolved
Hide resolved
|
||
- `scale` (float) | ||
|
||
## Import MPII Human Pose Dataset | ||
|
||
Datumaro does not support `MATLAB` annotation files. Instead, | ||
`JSON` and `NUMPY` files are supported. | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
A Datumaro project with a MPII Human Pose Dataset source can be | ||
IRDonch marked this conversation as resolved.
Show resolved
Hide resolved
|
||
created in the following way: | ||
|
||
```bash | ||
datum create | ||
datum import --format mpii <path/to/dataset> | ||
``` | ||
|
||
It is also possible to import the dataset using Python API: | ||
|
||
```python | ||
from datumaro.components.dataset import Dataset | ||
|
||
mpii_dataset = Dataset.import_from('<path/to/dataset>', 'mpii') | ||
``` | ||
|
||
MPII Human Pose Dataset directory should have the following structure: | ||
|
||
<!--lint disable fenced-code-flag--> | ||
``` | ||
dataset/ | ||
├── jnt_visible.npy # optional | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
├── mpii_annotations.json | ||
├── mpii_headboxes.npy # optional | ||
├── mpii_pos_gt.npy # optional | ||
├── 000000001.jpg | ||
├── 000000002.jpg | ||
├── 000000003.jpg | ||
└── ... | ||
``` | ||
|
||
## Export to other formats | ||
|
||
Datumaro can convert a MPII Human Pose Dataset into any other format | ||
[Datumaro supports](/docs/user-manual/supported_formats/). | ||
To get the expected result, convert the dataset to a format | ||
that supports bounding boxes or points. | ||
|
||
There are several ways to convert a MPII Human Pose Dataset | ||
to other dataset formats using CLI: | ||
|
||
```bash | ||
datum create | ||
datum import -f mpii <path/to/dataset> | ||
datum export -f voc -o ./save_dir -- --save-images | ||
# or | ||
datum convert -if mpii -i <path/to/dataset> \ | ||
-f voc -o <output/dir> -- --save-images | ||
``` | ||
|
||
Or, using Python API: | ||
|
||
```python | ||
from datumaro.components.dataset import Dataset | ||
|
||
dataset = Dataset.import_from('<path/to/dataset>', 'mpii') | ||
dataset.export('save_dir', 'voc') | ||
``` | ||
|
||
## Examples | ||
|
||
Examples of using this format from the code can be found in | ||
[the format tests](https://github.com/openvinotoolkit/datumaro/blob/develop/tests/test_mpii_format.py) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
1 change: 1 addition & 0 deletions
1
tests/assets/mpii_dataset/dataset_with_numpy_files/mpii_annotations.json
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
[{"dataset": "MPI","isValidation": 0.000,"img_paths": "000000001.jpg","img_width": 1280.000,"img_height": 720.000,"objpos": [594.000,257.000],"joint_self": [[620.000,394.000,1.000],[616.000,269.000,1.000],[573.000,185.000,1.000],[647.000,188.000,0.000],[661.000,221.000,1.000],[656.000,231.000,1.000],[610.000,187.000,0.000],[647.000,176.000,1.000],[637.020,189.818,1.000],[695.980,108.182,1.000],[606.000,217.000,1.000],[553.000,161.000,1.000],[601.000,167.000,1.000],[692.000,185.000,1.000],[693.000,240.000,1.000],[688.000,313.000,1.000]],"scale_provided": 3.021}, {"dataset": "MPI","isValidation": 0.000,"img_paths": "000000002.jpg","img_width": 1280.000,"img_height": 720.000,"objpos": [624.000,287.000],"joint_self": [[650.000,424.000,1.000],[646.000,309.000,1.000],[603.000,215.000,1.000],[677.000,218.000,1.000],[691.000,251.000,0.000],[686.000,261.000,1.000],[640.000,217.000,0.000],[677.000,216.000,1.000],[667.020,219.818,1.000],[725.980,138.182,1.000],[636.000,247.000,1.000],[583.000,191.000,1.000],[631.000,197.000,1.000],[722.000,215.000,1.000],[723.000,270.000,1.000],[718.000,343.000,1.000]],"scale_provided": 3.7}, {"dataset": "MPI","isValidation": 0.000,"img_paths": "000000003.jpg","img_width": 1280.000,"img_height": 720.000,"objpos": [564.000,227.000],"joint_self": [[590.000,364.000,1.000],[586.000,239.000,1.000],[533.000,155.000,1.000],[617.000,158.000,1.000],[631.000,191.000,1.000],[626.000,201.000,1.000],[580.000,157.000,1.000],[617.000,146.000,1.000],[607.020,159.818,1.000],[645.980,68.182,1.000],[576.000,187.000,1.000],[532.000,131.000,1.000],[571.000,137.000,1.000],[662.000,155.000,0.000],[663.000,210.000,1.000],[658.000,283.000,1.000]],"scale_provided": 3.2}] |
Binary file added
BIN
+224 Bytes
tests/assets/mpii_dataset/dataset_with_numpy_files/mpii_headboxes.npy
Binary file not shown.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions
1
tests/assets/mpii_dataset/dataset_wo_numpy_files/mpii_annotations.json
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
[{"dataset": "MPI","isValidation": 0.000,"img_paths": "000000001.jpg","img_width": 1280.000,"img_height": 720.000,"objpos": [594.000,257.000],"joint_self": [[620.000,394.000,1.000],[616.000,269.000,1.000],[573.000,185.000,1.000],[647.000,188.000,0.000],[661.000,221.000,1.000],[656.000,231.000,1.000],[610.000,187.000,0.000],[647.000,176.000,1.000],[637.020,189.818,1.000],[695.980,108.182,1.000],[606.000,217.000,1.000],[553.000,161.000,1.000],[601.000,167.000,1.000],[692.000,185.000,1.000],[693.000,240.000,1.000],[688.000,313.000,1.000]],"scale_provided": 3.021}, {"dataset": "MPI","isValidation": 0.000,"img_paths": "000000002.jpg","img_width": 1280.000,"img_height": 720.000,"objpos": [624.000,287.000],"joint_self": [[650.000,424.000,1.000],[646.000,309.000,1.000],[603.000,215.000,1.000],[677.000,218.000,1.000],[691.000,251.000,0.000],[686.000,261.000,1.000],[640.000,217.000,0.000],[677.000,216.000,1.000],[667.020,219.818,1.000],[725.980,138.182,1.000],[636.000,247.000,1.000],[583.000,191.000,1.000],[631.000,197.000,1.000],[722.000,215.000,1.000],[723.000,270.000,1.000],[718.000,343.000,1.000]],"scale_provided": 3.7}, {"dataset": "MPI","isValidation": 0.000,"img_paths": "000000003.jpg","img_width": 1280.000,"img_height": 720.000,"objpos": [564.000,227.000],"joint_self": [[590.000,364.000,1.000],[586.000,239.000,1.000],[533.000,155.000,1.000],[617.000,158.000,1.000],[631.000,191.000,1.000],[626.000,201.000,1.000],[580.000,157.000,1.000],[617.000,146.000,1.000],[607.020,159.818,1.000],[645.980,68.182,1.000],[576.000,187.000,1.000],[532.000,131.000,1.000],[571.000,137.000,1.000],[662.000,155.000,0.000],[663.000,210.000,1.000],[658.000,283.000,1.000]],"scale_provided": 3.2}] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,122 @@ | ||
from unittest import TestCase | ||
import os.path as osp | ||
|
||
import numpy as np | ||
|
||
from datumaro.components.annotation import Bbox, Points, PointsCategories | ||
from datumaro.components.dataset import Dataset | ||
from datumaro.components.environment import Environment | ||
from datumaro.components.extractor import AnnotationType, DatasetItem | ||
from datumaro.plugins.mpii_format import MpiiImporter, MpiiPointsCategories | ||
from datumaro.util.test_utils import compare_datasets | ||
|
||
from .requirements import Requirements, mark_requirement | ||
|
||
DUMMY_DATASET_DIR_WITH_NUMPY_FILES = osp.join(osp.dirname(__file__), 'assets', | ||
'mpii_dataset', 'dataset_with_numpy_files') | ||
DUMMY_DATASET_DIR_WO_NUMPY_FILES = osp.join(osp.dirname(__file__), 'assets', | ||
'mpii_dataset', 'dataset_wo_numpy_files') | ||
|
||
class MpiiImporterTest(TestCase): | ||
@mark_requirement(Requirements.DATUM_580) | ||
def test_can_import_dataset_witn_numpy_files(self): | ||
expected_dataset = Dataset.from_iterable([ | ||
DatasetItem(id='000000001', image=np.ones((5, 5, 3)), | ||
annotations=[ | ||
Points([620.0, 394.0, 616.0, 269.0, 573.0, 185.0, 647.0, | ||
188.0, 661.0, 221.0, 656.0, 231.0, 610.0, 187.0, | ||
647.0, 176.0, 637.02, 189.818, 695.98, 108.182, | ||
606.0, 217.0, 553.0, 161.0, 601.0, 167.0, 692.0, | ||
185.0, 693.0, 240.0, 688.0, 313.0], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
[1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1], | ||
attributes={'center': [594.000,257.000], 'scale': 3.021}), | ||
Bbox(615, 218.65, 288.4, 286.95) | ||
] | ||
), | ||
DatasetItem(id='000000002', image=np.ones((5, 5, 3)), | ||
annotations=[ | ||
Points([650.0, 424.0, 646.0, 309.0, 603.0, 215.0, 677.0, | ||
218.0, 691.0, 251.0, 686.0, 261.0, 640.0, 217.0, | ||
677.0, 216.0, 667.02, 219.818, 725.98, 138.182, | ||
636.0, 247.0, 583.0, 191.0, 631.0, 197.0, 722.0, | ||
215.0, 723.0, 270.0, 718.0, 343.0], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
[1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1], | ||
attributes={'center': [624.000,287.000], 'scale': 3.7}), | ||
Bbox(101.1, 33.3, 113.9, 81.4) | ||
] | ||
), | ||
DatasetItem(id='000000003', image=np.ones((5, 5, 3)), | ||
annotations=[ | ||
Points([590.0, 364.0, 586.0, 239.0, 533.0, 155.0, 617.0, | ||
158.0, 631.0, 191.0, 626.0, 201.0, 580.0, 157.0, | ||
617.0, 146.0, 607.02, 159.818, 645.98, 68.182, | ||
576.0, 187.0, 532.0, 131.0, 571.0, 137.0, 662.0, | ||
155.0, 663.0, 210.0, 658.0, 283.0], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1], | ||
attributes={'center': [564.000,227.000], 'scale': 3.2}), | ||
Bbox(313.3, 512.43, 220.7, 121.57) | ||
] | ||
) | ||
], categories={ | ||
AnnotationType.points: PointsCategories.from_iterable(MpiiPointsCategories) | ||
}) | ||
|
||
dataset = Dataset.import_from(DUMMY_DATASET_DIR_WITH_NUMPY_FILES, 'mpii') | ||
|
||
compare_datasets(self, expected_dataset, dataset, require_images=True) | ||
|
||
@mark_requirement(Requirements.DATUM_580) | ||
def test_can_import_dataset_wo_numpy_files(self): | ||
expected_dataset = Dataset.from_iterable([ | ||
DatasetItem(id='000000001', image=np.ones((5, 5, 3)), | ||
annotations=[ | ||
Points([620.0, 394.0, 616.0, 269.0, 573.0, 185.0, 647.0, | ||
188.0, 661.0, 221.0, 656.0, 231.0, 610.0, 187.0, | ||
647.0, 176.0, 637.02, 189.818, 695.98, 108.182, | ||
606.0, 217.0, 553.0, 161.0, 601.0, 167.0, 692.0, | ||
185.0, 693.0, 240.0, 688.0, 313.0], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
[1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1], | ||
attributes={'center': [594.000,257.000], 'scale': 3.021}) | ||
] | ||
), | ||
DatasetItem(id='000000002', image=np.ones((5, 5, 3)), | ||
annotations=[ | ||
Points([650.0, 424.0, 646.0, 309.0, 603.0, 215.0, 677.0, | ||
218.0, 691.0, 251.0, 686.0, 261.0, 640.0, 217.0, | ||
677.0, 216.0, 667.02, 219.818, 725.98, 138.182, | ||
636.0, 247.0, 583.0, 191.0, 631.0, 197.0, 722.0, | ||
215.0, 723.0, 270.0, 718.0, 343.0], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
[1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1], | ||
attributes={'center': [624.000,287.000], 'scale': 3.7}) | ||
] | ||
), | ||
DatasetItem(id='000000003', image=np.ones((5, 5, 3)), | ||
annotations=[ | ||
Points([590.0, 364.0, 586.0, 239.0, 533.0, 155.0, 617.0, | ||
158.0, 631.0, 191.0, 626.0, 201.0, 580.0, 157.0, | ||
617.0, 146.0, 607.02, 159.818, 645.98, 68.182, | ||
576.0, 187.0, 532.0, 131.0, 571.0, 137.0, 662.0, | ||
155.0, 663.0, 210.0, 658.0, 283.0], | ||
zhiltsov-max marked this conversation as resolved.
Show resolved
Hide resolved
|
||
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1], | ||
attributes={'center': [564.000,227.000], 'scale': 3.2}) | ||
] | ||
) | ||
], categories={ | ||
AnnotationType.points: PointsCategories.from_iterable(MpiiPointsCategories) | ||
}) | ||
|
||
dataset = Dataset.import_from(DUMMY_DATASET_DIR_WO_NUMPY_FILES, 'mpii') | ||
|
||
compare_datasets(self, expected_dataset, dataset, require_images=True) | ||
|
||
@mark_requirement(Requirements.DATUM_580) | ||
def test_can_detect_dataset_with_numpy_files(self): | ||
detected_formats = Environment().detect_dataset( | ||
DUMMY_DATASET_DIR_WITH_NUMPY_FILES) | ||
self.assertEqual([MpiiImporter.NAME], detected_formats) | ||
|
||
@mark_requirement(Requirements.DATUM_580) | ||
def test_can_detect_dataset_wo_numpy_files(self): | ||
detected_formats = Environment().detect_dataset( | ||
DUMMY_DATASET_DIR_WO_NUMPY_FILES) | ||
self.assertEqual([MpiiImporter.NAME], detected_formats) |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.