Skip to content

Commit f17a058

Browse files
authored
Merge branch 'master' into dependabot/pip/jinja2-3.1.6
2 parents 596fcfd + 5b961f7 commit f17a058

File tree

245 files changed

+13434
-334483
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

245 files changed

+13434
-334483
lines changed

CMakeLists.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@ cmake_minimum_required(VERSION 3.20)
44
# SHAPEWORKS VERSION
55
###########################################
66
SET(SHAPEWORKS_MAJOR_VERSION 6 CACHE INTERNAL "Major version number" FORCE)
7-
SET(SHAPEWORKS_MINOR_VERSION 6 CACHE INTERNAL "Minor version number" FORCE)
7+
SET(SHAPEWORKS_MINOR_VERSION 7 CACHE INTERNAL "Minor version number" FORCE)
88
SET(SHAPEWORKS_PATCH_VERSION 0 CACHE INTERNAL "Patch version number" FORCE)
9-
SET(SHAPEWORKS_VERSION_STRING "6.6.0-dev")
9+
SET(SHAPEWORKS_VERSION_STRING "6.7.0-dev")
1010
SET(SHAPEWORKS_VERSION "${SHAPEWORKS_MAJOR_VERSION}.${SHAPEWORKS_MINOR_VERSION}.${SHAPEWORKS_PATCH_VERSION}")
1111

1212
# First, check that files were checked out properly using git-lfs
1313
file(MD5 "${CMAKE_CURRENT_SOURCE_DIR}/Testing/data/icp_baseline.nrrd" HASH)
1414
if (NOT "${HASH}" STREQUAL "bb94438a695c749b264180019abbbb97")
15-
message( FATAL_ERROR "MD5 hash of '${CMAKE_CURRENT_SOURCE_DIR}/Testing/data/icp_baseline.nrrd' is incorrect. This most likely means that git-lfs was not installed when ShapeWorks was cloned." )
15+
message( FATAL_ERROR "MD5 hash of '${CMAKE_CURRENT_SOURCE_DIR}/Testing/data/icp_baseline.nrrd' is incorrect. This most likely means that git-lfs was not installed when ShapeWorks was cloned. If you have downloaded a zip or tar.gz of the source, then it will be missing Git-LFS objects. Please use git to clone ShapeWorks or download a source archive containing the Git-LFS objects under releases." )
1616
endif()
1717

1818
set(CMAKE_CXX_STANDARD 17) # available options are [98, 11, 14, 17. 20]

Examples/Python/Data/ellipsoid-v0.zip

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/ellipsoid_05.vtk

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/ellipsoid_07.vtk

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/ellipsoid_1mode.zip

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/ellipsoid_fd-v0.zip

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/seg.ellipsoid_14.nrrd

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/seg.ellipsoid_17.nrrd

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/Data/seg.ellipsoid_19.nrrd

Lines changed: 0 additions & 3 deletions
This file was deleted.

Examples/Python/ellipsoid_multiple_domain.py

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,7 @@ def Run_Pipeline(args):
4040
else:
4141
dataset_name = "ellipsoid_multiple_domain"
4242
sw.download_dataset(dataset_name, output_directory)
43-
dataset_name = "ellipsoid_joint_rotation"
44-
file_list = sorted(glob.glob(output_directory +
45-
dataset_name + "/segmentations/*.nrrd"))
43+
file_list = sorted(glob.glob(output_directory + "/segmentations/*.nrrd"))
4644

4745
if args.use_subsample:
4846
inputImages =[sw.Image(filename) for filename in file_list]
@@ -152,6 +150,10 @@ def Run_Pipeline(args):
152150
reference = [domain1_reference,domain2_reference]
153151
ref_name = [domain1_ref_name,domain2_ref_name]
154152

153+
# Create a combined mesh for the global alignment
154+
combined_reference = domain1_reference.toMesh(0.5)
155+
combined_reference += domain2_reference.toMesh(0.5)
156+
155157
"""
156158
Grooming Step 3: Rigid alignment
157159
Now we can loop over all of the segmentations again to find the rigid
@@ -172,6 +174,15 @@ def Run_Pipeline(args):
172174
reference[d], iso_value, icp_iterations)
173175
rigid_transform = sw.utils.getVTKtransform(rigidTransform)
174176
transforms.append(rigid_transform)
177+
178+
combined_mesh = shape_seg_list[i*domains_per_shape+d].toMesh(0.5)
179+
for d in range(domains_per_shape):
180+
# skip the first domain
181+
if d == 0:
182+
continue
183+
combined_mesh += shape_seg_list[i*domains_per_shape+d].toMesh(0.5)
184+
transform = combined_mesh.createTransform(combined_reference, sw.Mesh.AlignmentType.Rigid, 100)
185+
transforms.append(transform)
175186

176187
"""
177188
Grooming Step 4: Converting segmentations to smooth signed distance transforms.
@@ -225,8 +236,9 @@ def Run_Pipeline(args):
225236
for d in range(domains_per_shape):
226237
rel_seg_files += sw.utils.get_relative_paths([os.getcwd() + '/' + file_list[i*domains_per_shape+d]], project_location)
227238
rel_groom_files += sw.utils.get_relative_paths([os.getcwd() + '/' + dt_files[i*domains_per_shape+d]], project_location)
228-
transform.append(transforms[i*domains_per_shape+d].flatten())
229-
239+
transform.append(transforms[i*(domains_per_shape+1)+d].flatten())
240+
# add the global alignment transform
241+
transform.append(transforms[i*(domains_per_shape+1)].flatten())
230242
subject.set_groomed_transforms(transform)
231243
subject.set_groomed_filenames(rel_groom_files)
232244
subject.set_original_filenames(rel_seg_files)

Examples/Python/ellipsoid_multiple_domain_mesh.py

Lines changed: 61 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,10 @@
1313
import glob
1414
import shapeworks as sw
1515
import AnalyzeUtils
16-
import numpy as np
16+
import numpy as np
1717
import subprocess
18+
19+
1820
def Run_Pipeline(args):
1921
print("\nStep 1. Acquire Data\n")
2022
"""
@@ -33,16 +35,16 @@ def Run_Pipeline(args):
3335
sw.download_dataset(dataset_name, output_directory)
3436
dataset_name = "ellipsoid_joint_rotation"
3537
mesh_files = sorted(glob.glob(output_directory +
36-
dataset_name + "/meshes/*.vtk"))[:6]
38+
dataset_name + "/meshes/*.vtk"))[:6]
3739
# Else download the entire dataset
3840
else:
3941
dataset_name = "ellipsoid_multiple_domain_mesh"
4042
sw.download_dataset(dataset_name, output_directory)
4143
mesh_files = sorted(glob.glob(output_directory + "/meshes/*.vtk"))
4244

4345
if args.use_subsample:
44-
inputMeshes =[sw.Mesh(filename) for filename in mesh_files]
45-
sample_idx = sw.data.sample_meshes(inputMeshes, int(args.num_subsample),domains_per_shape=2)
46+
inputMeshes = [sw.Mesh(filename) for filename in mesh_files]
47+
sample_idx = sw.data.sample_meshes(inputMeshes, int(args.num_subsample), domains_per_shape=2)
4648
mesh_files = [mesh_files[i] for i in sample_idx]
4749

4850
print("\nStep 2. Groom - Data Pre-processing\n")
@@ -76,7 +78,7 @@ def Run_Pipeline(args):
7678
mesh_names.append(mesh_name)
7779
# get domain identifiers
7880
domain_ids.append(mesh_name.split(".")[0].split("_")[-1])
79-
81+
8082
# load mesh
8183
mesh = sw.Mesh(mesh_file)
8284
# do initial grooming steps
@@ -85,12 +87,11 @@ def Run_Pipeline(args):
8587
# append to the mesh list
8688
mesh_list.append(mesh)
8789

88-
89-
#domain identifiers for all shapes
90+
# domain identifiers for all shapes
9091
domain_ids = np.array(domain_ids)
91-
#shape index for all shapes in domain 1
92+
# shape index for all shapes in domain 1
9293
domain1_indx = list(np.where(domain_ids == 'd1')[0])
93-
#shape index for all shapes in domain 2
94+
# shape index for all shapes in domain 2
9495
domain2_indx = list(np.where(domain_ids == 'd2')[0])
9596
"""
9697
Grooming Step 2: Select a reference
@@ -99,7 +100,7 @@ def Run_Pipeline(args):
99100
"""
100101
domains_per_shape = 2
101102
domain_1_meshes = []
102-
# get domain 1 shapes
103+
# get domain 1 shapes
103104
for i in range(int(len(mesh_list)/domains_per_shape)):
104105
domain_1_meshes.append(mesh_list[i*domains_per_shape])
105106

@@ -108,8 +109,13 @@ def Run_Pipeline(args):
108109
domain2_reference = mesh_list[ref_index*domains_per_shape+1].copy()
109110
domain1_ref_name = mesh_names[ref_index*domains_per_shape]
110111
domain2_ref_name = mesh_names[ref_index*domains_per_shape+1]
111-
reference = [domain1_reference,domain2_reference]
112-
ref_name = [domain1_ref_name,domain2_ref_name]
112+
reference = [domain1_reference, domain2_reference]
113+
ref_name = [domain1_ref_name, domain2_ref_name]
114+
115+
# Create a combined mesh for the global alignment
116+
combined_reference = domain1_reference.copy()
117+
combined_reference += domain2_reference
118+
113119
"""
114120
Grooming Step 3: Rigid alignment
115121
Now we can loop over all of the meshes again to find the rigid
@@ -118,26 +124,35 @@ def Run_Pipeline(args):
118124

119125
transforms = []
120126
for i in range(len(domain_1_meshes)):
121-
122-
# calculate the transformation
127+
128+
# calculate the transformation
123129
for d in range(domains_per_shape):
124130
# compute rigid transformation
125-
rigidTransform = mesh_list[i*domains_per_shape+d].createTransform(reference[d],sw.Mesh.AlignmentType.Rigid,100)
131+
rigidTransform = mesh_list[i*domains_per_shape +
132+
d].createTransform(reference[d], sw.Mesh.AlignmentType.Rigid, 100)
126133
name = mesh_names[i*domains_per_shape+d]
127-
print('Aligning ' + name + ' to ' + ref_name[d])
134+
print('Aligning ' + name + ' to ' + ref_name[d])
128135
transforms.append(rigidTransform)
129136

137+
combined_mesh = mesh_list[i*domains_per_shape].copy()
138+
for d in range(domains_per_shape):
139+
# skip the first domain
140+
if d == 0:
141+
continue
142+
combined_mesh += mesh_list[i*domains_per_shape+d]
143+
transform = combined_mesh.createTransform(combined_reference, sw.Mesh.AlignmentType.Rigid, 100)
144+
transforms.append(transform)
145+
130146
# Save groomed meshes
131147
groomed_mesh_files = sw.utils.save_meshes(groom_dir + 'meshes/', mesh_list, mesh_names, extension='vtk')
132148

133-
134149
print("\nStep 3. Optimize - Particle Based Optimization\n")
135150
"""
136151
Step 3: OPTIMIZE - Particle Based Optimization
137152
138153
Now we can run optimization directly on the meshes.
139154
For more details on the plethora of parameters for shapeworks please refer
140-
to docs/workflow/optimze.md
155+
to docs/workflow/optimize.md
141156
http://sciinstitute.github.io/ShapeWorks/workflow/optimize.html
142157
"""
143158

@@ -147,17 +162,21 @@ def Run_Pipeline(args):
147162
os.makedirs(project_location)
148163
# Set subjects
149164
subjects = []
150-
165+
151166
for i in range(len(domain_1_meshes)):
152167
subject = sw.Subject()
153168
subject.set_number_of_domains(domains_per_shape)
154169
rel_mesh_files = []
155170
rel_groom_files = []
156171
transform = []
157172
for d in range(domains_per_shape):
158-
rel_mesh_files += sw.utils.get_relative_paths([os.getcwd() + '/' + mesh_files[i*domains_per_shape+d]], project_location)
159-
rel_groom_files += sw.utils.get_relative_paths([os.getcwd() + '/' + groomed_mesh_files[i*domains_per_shape+d]], project_location)
160-
transform.append(transforms[i*domains_per_shape+d].flatten())
173+
rel_mesh_files += sw.utils.get_relative_paths([os.getcwd() +
174+
'/' + mesh_files[i*domains_per_shape+d]], project_location)
175+
rel_groom_files += sw.utils.get_relative_paths([os.getcwd() + '/' +
176+
groomed_mesh_files[i*domains_per_shape+d]], project_location)
177+
transform.append(transforms[i*(domains_per_shape+1)+d].flatten())
178+
# add the global alignment transform
179+
transform.append(transforms[i*(domains_per_shape+1)+domains_per_shape].flatten())
161180
subject.set_groomed_transforms(transform)
162181
subject.set_groomed_filenames(rel_groom_files)
163182
subject.set_original_filenames(rel_mesh_files)
@@ -168,36 +187,36 @@ def Run_Pipeline(args):
168187
parameters = sw.Parameters()
169188

170189
parameter_dictionary = {
171-
"checkpointing_interval" : 200,
172-
"keep_checkpoints" : 0,
173-
"iterations_per_split" : 200,
174-
"optimization_iterations" : 200,
175-
"starting_regularization" :1000,
176-
"ending_regularization" : 0.1,
177-
"relative_weighting" : 10,
178-
"initial_relative_weighting" : 0.1,
179-
"procrustes_interval" : 0,
180-
"procrustes_scaling" : 0,
181-
"save_init_splits" : 0,
182-
"verbosity" : 0
183-
184-
}
185-
num_particles = [128,128]
190+
"checkpointing_interval": 200,
191+
"keep_checkpoints": 0,
192+
"iterations_per_split": 200,
193+
"optimization_iterations": 200,
194+
"starting_regularization": 1000,
195+
"ending_regularization": 0.1,
196+
"relative_weighting": 10,
197+
"initial_relative_weighting": 0.1,
198+
"procrustes_interval": 0,
199+
"procrustes_scaling": 0,
200+
"save_init_splits": 0,
201+
"verbosity": 0
202+
203+
}
204+
num_particles = [128, 128]
186205

187206
# If running a tiny test, reduce some parameters
188207
if args.tiny_test:
189-
num_particles = [32,32]
208+
num_particles = [32, 32]
190209
parameter_dictionary["optimization_iterations"] = 30
191210

192-
#setting the argument to singlescale for the output filename
211+
# setting the argument to singlescale for the output filename
193212
args.use_single_scale = True
194-
args.option_set = args.option_set.replace("multiscale","singlescale")
213+
args.option_set = args.option_set.replace("multiscale", "singlescale")
195214
# Add param dictionary to spreadsheet
196215
for key in parameter_dictionary:
197216
parameters.set(key, sw.Variant([parameter_dictionary[key]]))
198-
parameters.set("number_of_particles" ,sw.Variant(num_particles))
217+
parameters.set("number_of_particles", sw.Variant(num_particles))
199218
project.set_parameters("optimize", parameters)
200-
219+
201220
spreadsheet_file = output_directory + "ellipsoid_multiple_domain_mesh_" + args.option_set + ".swproj"
202221
project.save(spreadsheet_file)
203222

0 commit comments

Comments
 (0)