Skip to content

Commit bb9d0e0

Browse files
committed
added bisenetmask and jonathandinumask nodes
1 parent 24e02fd commit bb9d0e0

File tree

3 files changed

+213
-197
lines changed

3 files changed

+213
-197
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ These custom nodes provide a rotation aware face extraction, paste back, and var
55
![Comparison](examples/comparison.jpg)
66

77
## Patch notes
8+
- 2024-05-19 - Added BiSeNetMask and JonathandinuMask nodes. Careful about JonathandinuMask, it's more accurate than BiSeNet, but it takes more memory; you can get out of memory more easily with it.
89
- 2024-03-10 - Added nodes to detect faces using `face_yolov8m` instead of `insightface`.
910

1011
## Example Workflows

nodes.py

Lines changed: 86 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -57,46 +57,6 @@ def run(self, faces, criteria, order, take_start, take_count):
5757
rest = sorted_faces[:take_start] + sorted_faces[take_start+take_count:]
5858
return (filtered, rest)
5959

60-
# class FaceDetailsDEPRECATED:
61-
# @classmethod
62-
# def INPUT_TYPES(cls):
63-
# return {
64-
# 'required': {
65-
# 'faces': ('FACE',),
66-
# 'crop_size': ('INT', {'default': 512, 'min': 512, 'max': 1024, 'step': 128}),
67-
# 'crop_factor': ('FLOAT', {'default': 1.5, 'min': 1.0, 'max': 3, 'step': 0.1}),
68-
# 'mask_type': (mask_types,)
69-
# }
70-
# }
71-
72-
# RETURN_TYPES = ('IMAGE', 'MASK', 'WARP')
73-
# RETURN_NAMES = ('crops', 'masks', 'warps')
74-
# FUNCTION = 'run'
75-
# CATEGORY = 'facetools'
76-
77-
# def run(self, faces, crop_size, crop_factor, mask_type):
78-
# if len(faces) == 0:
79-
# empty_crop = torch.zeros((1,512,512,3))
80-
# empty_mask = torch.zeros((1,512,512))
81-
# empty_warp = np.array([
82-
# [1,0,-512],
83-
# [0,1,-512],
84-
# ], dtype=np.float32)
85-
# return (empty_crop, empty_mask, [empty_warp])
86-
87-
# crops = []
88-
# masks = []
89-
# warps = []
90-
# for face in faces:
91-
# M, crop = crop_faces(face.image, face, crop_size, crop_factor)
92-
# mask = mask_crop(face, M, crop, mask_type)
93-
# crops.append(np.array(crop) / 255)
94-
# masks.append(np.array(mask))
95-
# warps.append(M)
96-
# crops = torch.from_numpy(np.array(crops)).type(torch.float32)
97-
# masks = torch.from_numpy(np.array(masks)).type(torch.float32)
98-
# return (crops, masks, warps)
99-
10060
class DetectFaces:
10161
@classmethod
10262
def INPUT_TYPES(cls):
@@ -131,7 +91,7 @@ def run(self, image, threshold, min_size, max_size, mask=None):
13191
w = abs(c-a)
13292
if (h <= max_size or w <= max_size) and (min_size <= h or min_size <= w):
13393
face.image_idx = i
134-
face.image = image[i]
94+
face.img = image[i]
13595
faces.append(face)
13696
return (faces,)
13797

@@ -166,47 +126,14 @@ def run(self, faces, crop_size, crop_factor, mask_type):
166126
masks = []
167127
warps = []
168128
for face in faces:
169-
M, crop, maskedcrop = face.crop(crop_size, crop_factor)
170-
mask = mask_crop(face, M, maskedcrop, mask_type)
171-
crops.append(np.array(crop))
172-
masks.append(np.array(mask))
129+
M, crop = face.crop(crop_size, crop_factor)
130+
mask = mask_crop(face, M, crop, mask_type)
131+
crops.append(np.array(crop[0]))
132+
masks.append(np.array(mask[0]))
173133
warps.append(M)
174134
crops = torch.from_numpy(np.array(crops)).type(torch.float32)
175135
masks = torch.from_numpy(np.array(masks)).type(torch.float32)
176136
return (crops, masks, warps)
177-
178-
# class AlignFacesDEPRECATED:
179-
# @classmethod
180-
# def INPUT_TYPES(cls):
181-
# return {
182-
# 'required': {
183-
# 'insightface': ('INSIGHTFACE',),
184-
# 'image': ('IMAGE',),
185-
# 'threshold': ('FLOAT', {'default': 0.5, 'min': 0.5, 'max': 1.0, 'step': 0.01}),
186-
# 'min_size': ('INT', {'default': 64, 'max': 512, 'step': 8}),
187-
# 'max_size': ('INT', {'default': 512, 'min': 512, 'step': 8}),
188-
# }
189-
# }
190-
191-
# RETURN_TYPES = ('FACE',)
192-
# RETURN_NAMES = ('faces',)
193-
# FUNCTION = 'run'
194-
# CATEGORY = 'facetools'
195-
196-
# def run(self, insightface, image, threshold, min_size, max_size):
197-
# faces = []
198-
# images = (image * 255).type(torch.uint8).numpy()
199-
# for i, img in enumerate(images):
200-
# unfiltered_faces = get_faces(img, insightface)
201-
# for face in unfiltered_faces:
202-
# a, b, c, d = face.bbox
203-
# h = abs(d-b)
204-
# w = abs(c-a)
205-
# if face.det_score >= threshold and (h <= max_size or w <= max_size) and (min_size <= h or min_size <= w):
206-
# face.image_idx = i
207-
# face.image = img
208-
# faces.append(face)
209-
# return (faces,)
210137

211138
class WarpFaceBack:
212139
RETURN_TYPES = ('IMAGE',)
@@ -228,14 +155,7 @@ def INPUT_TYPES(cls):
228155
def run(self, images, face, crop, mask, warp):
229156
groups = defaultdict(list)
230157
for f,c,m,w in zip(face, crop, mask, warp):
231-
# gray = cv2.cvtColor(c.numpy(), cv2.COLOR_RGB2GRAY)
232-
# _, gray = cv2.threshold(gray, 0.01, 1, cv2.THRESH_BINARY)
233-
# gray = gray.astype(np.uint8)
234-
# cnts, _ = cv2.findContours(gray, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
235-
# largeCnts = list(filter(lambda x: cv2.contourArea(x) > 10000, cnts))
236-
# gray = cv2.drawContours(gray, largeCnts, -1, 1, -1)
237-
# m *= torch.from_numpy(gray)
238-
groups[f.image_idx].append((f.image,c,m,w))
158+
groups[f.image_idx].append((f.img,c,m,w))
239159

240160
results = []
241161
for i, image in enumerate(images):
@@ -287,13 +207,89 @@ def run(self, crop0, mask0, warp0, crop1, mask1, warp1):
287207
masks = torch.vstack((mask0, mask1))
288208
warps = warp0 + warp1
289209
return (crops, masks, warps)
210+
211+
class BiSeNetMask:
212+
RETURN_TYPES = ('MASK',)
213+
FUNCTION = 'run'
214+
CATEGORY = 'facetools'
215+
216+
@classmethod
217+
def INPUT_TYPES(cls):
218+
return {
219+
'required': {
220+
'crop': ('IMAGE',),
221+
'skin': ('BOOLEAN', {'default': True}),
222+
'left_brow': ('BOOLEAN', {'default': True}),
223+
'right_brow': ('BOOLEAN', {'default': True}),
224+
'left_eye': ('BOOLEAN', {'default': True}),
225+
'right_eye': ('BOOLEAN', {'default': True}),
226+
'eyeglasses': ('BOOLEAN', {'default': True}),
227+
'left_ear': ('BOOLEAN', {'default': True}),
228+
'right_ear': ('BOOLEAN', {'default': True}),
229+
'earring': ('BOOLEAN', {'default': True}),
230+
'nose': ('BOOLEAN', {'default': True}),
231+
'mouth': ('BOOLEAN', {'default': True}),
232+
'upper_lip': ('BOOLEAN', {'default': True}),
233+
'lower_lip': ('BOOLEAN', {'default': True}),
234+
'neck': ('BOOLEAN', {'default': False}),
235+
'necklace': ('BOOLEAN', {'default': False}),
236+
'cloth': ('BOOLEAN', {'default': False}),
237+
'hair': ('BOOLEAN', {'default': False}),
238+
'hat': ('BOOLEAN', {'default': False}),
239+
}
240+
}
290241

242+
def run(self, crop, skin, left_brow, right_brow, left_eye, right_eye, eyeglasses,
243+
left_ear, right_ear, earring, nose, mouth, upper_lip, lower_lip,
244+
neck, necklace, cloth, hair, hat):
245+
masks = mask_BiSeNet(crop, skin, left_brow, right_brow, left_eye, right_eye, eyeglasses,
246+
left_ear, right_ear, earring, nose, mouth, upper_lip, lower_lip,
247+
neck, necklace, cloth, hair, hat)
248+
return (masks, )
249+
250+
class JonathandinuMask:
251+
RETURN_TYPES = ('MASK',)
252+
FUNCTION = 'run'
253+
CATEGORY = 'facetools'
254+
255+
@classmethod
256+
def INPUT_TYPES(cls):
257+
return {
258+
'required': {
259+
'crop': ('IMAGE',),
260+
'skin': ('BOOLEAN', {'default': True}),
261+
'nose': ('BOOLEAN', {'default': True}),
262+
'eyeglasses': ('BOOLEAN', {'default': False}),
263+
'left_eye': ('BOOLEAN', {'default': True}),
264+
'right_eye': ('BOOLEAN', {'default': True}),
265+
'left_brow': ('BOOLEAN', {'default': True}),
266+
'right_brow': ('BOOLEAN', {'default': True}),
267+
'left_ear': ('BOOLEAN', {'default': True}),
268+
'right_ear': ('BOOLEAN', {'default': True}),
269+
'mouth': ('BOOLEAN', {'default': True}),
270+
'upper_lip': ('BOOLEAN', {'default': True}),
271+
'lower_lip': ('BOOLEAN', {'default': True}),
272+
'hair': ('BOOLEAN', {'default': False}),
273+
'hat': ('BOOLEAN', {'default': False}),
274+
'earring': ('BOOLEAN', {'default': False}),
275+
'necklace': ('BOOLEAN', {'default': False}),
276+
'neck': ('BOOLEAN', {'default': False}),
277+
'cloth': ('BOOLEAN', {'default': False}),
278+
}
279+
}
280+
281+
def run(self, crop, skin, nose, eyeglasses, left_eye, right_eye, left_brow, right_brow, left_ear, right_ear,
282+
mouth, upper_lip, lower_lip, hair, hat, earring, necklace, neck, cloth):
283+
masks = mask_jonathandinu(crop, skin, nose, eyeglasses, left_eye, right_eye, left_brow, right_brow, left_ear, right_ear,
284+
mouth, upper_lip, lower_lip, hair, hat, earring, necklace, neck, cloth)
285+
return (masks, )
286+
291287
NODE_CLASS_MAPPINGS = {
292288
'DetectFaces': DetectFaces,
293289
'CropFaces': CropFaces,
294-
# 'AlignFaces': AlignFacesDEPRECATED,
295290
'WarpFacesBack': WarpFaceBack,
296-
# 'FaceDetails': FaceDetailsDEPRECATED,
291+
'BiSeNetMask': BiSeNetMask,
292+
'JonathandinuMask': JonathandinuMask,
297293
'MergeWarps': MergeWarps,
298294
'GenderFaceFilter': GenderFaceFilter,
299295
'OrderedFaceFilter': OrderedFaceFilter,
@@ -302,9 +298,9 @@ def run(self, crop0, mask0, warp0, crop1, mask1, warp1):
302298
NODE_DISPLAY_NAME_MAPPINGS = {
303299
'DetectFaces': 'DetectFaces',
304300
'CropFaces': 'CropFaces',
305-
# 'AlignFaces': 'Align Faces (DEPRECATED)',
306301
'WarpFacesBack': 'Warp Faces Back',
307-
# 'FaceDetails': 'Face Details (DEPRECATED)',
302+
'BiSeNetMask': 'BiSeNet Mask',
303+
'JonathandinuMask': 'Jonathandinu Mask',
308304
'MergeWarps': 'Merge Warps',
309305
'GenderFaceFilter': 'Gender Face Filter',
310306
'OrderedFaceFilter': 'Ordered Face Filter',

0 commit comments

Comments
 (0)