18
18
19
19
20
20
class VGG16 (nn .Module ):
21
- def __init__ (self , vgg_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ vgg\\ vgg16-00b39a1b.pth" ), train = False ):
21
+ def __init__ (self , vgg_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ vgg/ vgg16-00b39a1b.pth" ), train = False ):
22
22
super (VGG16 , self ).__init__ ()
23
23
# Load VGG Skeleton, Pretrained Weights
24
24
vgg16_features = models .vgg16 (pretrained = False )
@@ -181,12 +181,12 @@ def INPUT_TYPES(s):
181
181
182
182
183
183
def train (self , style_img , seed , batch_size , train_img_size , learning_rate , num_epochs , content_weight , style_weight , save_model_every ):
184
- save_model_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ models\\ " )
185
- dataset_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ dataset\\ " )
186
- vgg_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ vgg\\ vgg16-00b39a1b.pth" )
187
- save_image_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ output\\ " )
184
+ save_model_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ models/ " )
185
+ dataset_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ dataset/ " )
186
+ vgg_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ vgg/ vgg16-00b39a1b.pth" )
187
+ save_image_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ output/ " )
188
188
style_image_path = folder_paths .get_annotated_filepath (style_img )
189
- train_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ train.py" )
189
+ train_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ train.py" )
190
190
191
191
192
192
command = [
@@ -222,7 +222,7 @@ def INPUT_TYPES(s):
222
222
return {
223
223
"required" : {
224
224
"content_img" : ("IMAGE" ,),
225
- "model" : ([file for file in os .listdir (os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ models\\ " )) if file .endswith ('.pth' )], ),
225
+ "model" : ([file for file in os .listdir (os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ models/ " )) if file .endswith ('.pth' )], ),
226
226
},
227
227
}
228
228
@@ -248,7 +248,7 @@ def styleTransfer(self, content_img, model):
248
248
249
249
# Load Transformer Network
250
250
net = TransformerNetwork ().to (device )
251
- model_path = os .path .join (folder_paths .base_path , "custom_nodes\\ ComfyUI-Fast-Style-Transfer\\ models\\ " ) + model
251
+ model_path = os .path .join (folder_paths .base_path , "custom_nodes/ ComfyUI-Fast-Style-Transfer/ models/ " ) + model
252
252
net .load_state_dict (torch .load (model_path , map_location = device ))
253
253
net = net .to (device )
254
254
0 commit comments