Skip to content

Commit f3c58e1

Browse files
committed
Speed up goofi-pipe startup
1 parent 8c7e547 commit f3c58e1

17 files changed

+148
-74
lines changed

src/goofi/nodes/analysis/correlation.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import numpy as np
2-
from scipy import stats
32

43
from goofi.data import Data, DataType
54
from goofi.node import Node
@@ -17,6 +16,11 @@ def config_output_slots():
1716
def config_params():
1817
return {"correlation": {"axis": -1}}
1918

19+
def setup(self):
20+
from scipy import stats
21+
22+
self.stats = stats
23+
2024
def process(self, data1: Data, data2: Data):
2125
if data1 is None or data2 is None:
2226
return None
@@ -36,6 +40,6 @@ def process(self, data1: Data, data2: Data):
3640
axis += 1
3741

3842
# calculate correlation along axis
39-
r = np.apply_along_axis(lambda x: stats.pearsonr(*x.reshape(2, -1))[0], axis, data)[0]
43+
r = np.apply_along_axis(lambda x: self.stats.pearsonr(*x.reshape(2, -1))[0], axis, data)[0]
4044

4145
return {"pearson": (r, meta)}

src/goofi/nodes/analysis/graphmetrics.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import numpy as np
2-
import networkx as nx
2+
33
from goofi.data import Data, DataType
44
from goofi.node import Node
55

@@ -18,6 +18,11 @@ def config_output_slots():
1818
"transitivity": DataType.ARRAY,
1919
}
2020

21+
def setup(self):
22+
import networkx as nx
23+
24+
self.nx = nx
25+
2126
def process(self, matrix: Data):
2227
if matrix is None:
2328
return None
@@ -27,20 +32,20 @@ def process(self, matrix: Data):
2732
raise ValueError("Matrix must be 2D and symmetric.")
2833

2934
# Create a graph from the matrix (assuming undirected graph)
30-
G = nx.from_numpy_array(matrix.data)
35+
G = self.nx.from_numpy_array(matrix.data)
3136

3237
# Compute metrics
33-
clustering_coefficients = nx.average_clustering(G)
38+
clustering_coefficients = self.nx.average_clustering(G)
3439
try:
35-
path_length = nx.average_shortest_path_length(G)
36-
except nx.NetworkXError: # Handles cases where the graph is not connected
40+
path_length = self.nx.average_shortest_path_length(G)
41+
except self.nx.NetworkXError: # Handles cases where the graph is not connected
3742
path_length = None
38-
betweenness = nx.betweenness_centrality(G)
43+
betweenness = self.nx.betweenness_centrality(G)
3944
betweenness = np.array(list(betweenness.values()))
40-
degree_centrality = nx.degree_centrality(G)
45+
degree_centrality = self.nx.degree_centrality(G)
4146
degree_centrality = np.array(list(degree_centrality.values()))
42-
assortativity = nx.degree_assortativity_coefficient(G)
43-
transitivity = nx.transitivity(G)
47+
assortativity = self.nx.degree_assortativity_coefficient(G)
48+
transitivity = self.nx.transitivity(G)
4449

4550
return {
4651
"clustering_coefficient": (np.array(clustering_coefficients), {}),

src/goofi/nodes/analysis/img2txt.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import io
33

44
import numpy as np
5-
import requests
65
from PIL import Image
76

87
from goofi.data import Data, DataType
@@ -36,6 +35,10 @@ def config_params():
3635
}
3736

3837
def setup(self):
38+
import requests
39+
40+
self.requests = requests
41+
3942
self.processor = None
4043
self.model_instance = None
4144
self.openai = None
@@ -193,10 +196,10 @@ def process_openai_gpt(self, image_array):
193196
}
194197

195198
try:
196-
response = requests.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
199+
response = self.requests.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
197200
response.raise_for_status()
198201
generated_text = response.json().get("choices", [{}])[0].get("message", {}).get("content", "")
199-
except requests.exceptions.RequestException as e:
202+
except self.requests.exceptions.RequestException as e:
200203
print(f"Error during OpenAI captioning request: {e}")
201204
return {"generated_text": ("Error generating caption.", {})}
202205

src/goofi/nodes/analysis/poseestimation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from os import path
22

33
import numpy as np
4-
import requests
54

65
from goofi.data import Data, DataType
76
from goofi.node import Node
@@ -16,6 +15,7 @@ def config_output_slots():
1615

1716
def setup(self):
1817
import mediapipe as mp
18+
import requests
1919
from mediapipe.tasks import python
2020
from mediapipe.tasks.python import vision
2121

src/goofi/nodes/analysis/spectromorphology.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import pyACA
2-
31
from goofi.data import Data, DataType
42
from goofi.node import Node
53
from goofi.params import IntParam, StringParam
@@ -56,6 +54,9 @@ def process(self, data: Data):
5654
return {"spectro": (v, data.meta)}
5755

5856

57+
pyACA_mod = None
58+
59+
5960
def computeFeatureCl_new(afAudioData, cFeatureName, f_s, window=4000, overlap=1):
6061
"""Calculate spectromorphological metrics on time series.
6162
@@ -84,5 +85,11 @@ def computeFeatureCl_new(afAudioData, cFeatureName, f_s, window=4000, overlap=1)
8485
t : array
8586
Timestamps.
8687
"""
87-
[v, t] = pyACA.computeFeature(cFeatureName, afAudioData, f_s, None, window, overlap)
88+
global pyACA_mod
89+
if pyACA_mod is None:
90+
import pyACA
91+
92+
pyACA_mod = pyACA
93+
94+
[v, t] = pyACA_mod.computeFeature(cFeatureName, afAudioData, f_s, None, window, overlap)
8895
return (v, t)

src/goofi/nodes/inputs/eegrecording.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
from typing import Any, Dict, Tuple
22

3-
import mne
4-
import pandas as pd
5-
from mne.datasets import eegbci
6-
73
from goofi.node import Node
84
from goofi.params import FloatParam
95

@@ -25,6 +21,9 @@ def setup(self):
2521
"""
2622
Load the data and start the stream.
2723
"""
24+
import mne
25+
import pandas as pd
26+
from mne.datasets import eegbci
2827
from mne_lsl.player import PlayerLSL
2928

3029
# stop previous stream if it exists

src/goofi/nodes/inputs/meteomedia.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1+
from os.path import join
2+
3+
import numpy as np
4+
15
from goofi.data import Data, DataType
26
from goofi.node import Node
37
from goofi.params import FloatParam, StringParam
4-
import requests
5-
import numpy as np
6-
from os.path import join
78

89

910
class MeteoMedia(Node):
@@ -24,6 +25,11 @@ def config_params():
2425
},
2526
}
2627

28+
def setup(self):
29+
import requests
30+
31+
self.requests = requests
32+
2733
def process(self, latitude: Data, longitude: Data, location_name: Data):
2834
if latitude is None or longitude is None:
2935
return None
@@ -45,13 +51,13 @@ def process(self, latitude: Data, longitude: Data, location_name: Data):
4551
if location_name is None:
4652
url = f"https://api.tomorrow.io/v4/weather/realtime?location={np.float(lat_value)},{np.float(long_value)}&apikey={api_key}"
4753
headers = {"accept": "application/json"}
48-
response = requests.get(url, headers=headers)
54+
response = self.requests.get(url, headers=headers)
4955
print(response.status_code)
5056

5157
else:
5258
url = f"https://api.tomorrow.io/v4/weather/realtime?location={location_name}&apikey={api_key}"
5359
headers = {"accept": "application/json"}
54-
response = requests.get(url, headers=headers)
60+
response = self.requests.get(url, headers=headers)
5561
print(response.status_code)
5662
if response.status_code == 200:
5763
responses = response.json()

src/goofi/nodes/inputs/textgeneration.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
from os import path, environ
21
import json
3-
import requests
2+
from os import environ, path
3+
44
from goofi.data import Data, DataType
55
from goofi.node import Node
6-
from goofi.params import FloatParam, IntParam, StringParam, BoolParam
6+
from goofi.params import BoolParam, FloatParam, IntParam, StringParam
77

88

99
class TextGeneration(Node):
@@ -31,6 +31,10 @@ def config_params():
3131
}
3232

3333
def setup(self):
34+
import requests
35+
36+
self.requests = requests
37+
3438
self.client = None
3539
self.previous_model = None
3640
self.api_key_loaded = False
@@ -159,7 +163,7 @@ def generate_gemini_response(self, text, temp, keep_conversation, history=None):
159163
history = None
160164

161165
return response.text
162-
166+
163167
def generate_ollama_response(self, model, messages, temp):
164168
# Create an Ollama client instance
165169
if self.client is None:
@@ -172,10 +176,10 @@ def generate_ollama_response(self, model, messages, temp):
172176
"system": self.system_prompt,
173177
"temperature": temp,
174178
"max_tokens": self.params["text_generation"]["max_tokens"].value,
175-
}
179+
},
176180
)
177-
return response['message']['content']
178-
181+
return response["message"]["content"]
182+
179183
def generate_local_response(self, content):
180184
url = "http://127.0.0.1:5000/v1/chat/completions"
181185
headers = {"Content-Type": "application/json"}
@@ -186,7 +190,7 @@ def generate_local_response(self, content):
186190
"character": "Example",
187191
"messages": [{"role": "user", "content": content}],
188192
}
189-
response = requests.post(url, headers=headers, json=data, verify=False)
193+
response = self.requests.post(url, headers=headers, json=data, verify=False)
190194
return response.json()["choices"][0]["message"]["content"]
191195

192196
def save_conversation_to_json(self):
@@ -223,14 +227,14 @@ def process(self, prompt: Data):
223227
if self.api_key_loaded:
224228
pass
225229
else:
226-
self.load_api_key()
230+
self.load_api_key()
227231
generated_text = self.generate_anthropic_response(self.messages, temp)
228-
232+
229233
elif model.startswith("gemini-"):
230234
if self.api_key_loaded:
231235
pass
232236
else:
233-
self.load_api_key()
237+
self.load_api_key()
234238
generated_text = self.generate_gemini_response(prompt_, temp, keep_conversation)
235239

236240
elif model.startswith("local-"):
@@ -246,7 +250,7 @@ def process(self, prompt: Data):
246250

247251
if save_conversation:
248252
self.save_conversation_to_json()
249-
253+
250254
if self.params["text_generation"]["api_key"].value != self.api_key:
251255
self.api_key_changed()
252256

src/goofi/nodes/outputs/writecsv.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import os
33

44
import numpy as np
5-
import pandas as pd
65

76
from goofi.data import Data, DataType
87
from goofi.node import Node
@@ -23,6 +22,10 @@ def config_params():
2322
}
2423

2524
def setup(self):
25+
import pandas as pd
26+
27+
self.pd = pd
28+
2629
self.last_filename = None
2730
self.base_filename = None # To track the filename without the timestamp
2831
self.written_files = set() # Track files we've written headers to
@@ -69,7 +72,7 @@ def flatten(data):
6972
flattened_data = flatten(column_data)
7073

7174
# Create DataFrame with the flattened data
72-
df = pd.DataFrame({column_name: flattened_data})
75+
df = self.pd.DataFrame({column_name: flattened_data})
7376

7477
# Get the filename from the parameters
7578
filename = self.params["Write"]["filename"].value

0 commit comments

Comments
 (0)