@@ -163,14 +163,14 @@ def test_transformers_message_no_tool(self, monkeypatch):
163
163
do_sample = False ,
164
164
)
165
165
messages = [{"role" : "user" , "content" : [{"type" : "text" , "text" : "Hello!" }]}]
166
- output = model .generate (messages , stop_sequences = [ "great" ] ).content
167
- assert output == "assistant \n Hello "
166
+ output = model .generate (messages ).content
167
+ assert output == "Hello! I'm here "
168
168
169
169
output = model .generate_stream (messages , stop_sequences = ["great" ])
170
170
output_str = ""
171
171
for el in output :
172
172
output_str += el .content
173
- assert output_str == "assistant \n Hello "
173
+ assert output_str == "Hello! I'm here "
174
174
175
175
def test_transformers_message_vl_no_tool (self , shared_datadir , monkeypatch ):
176
176
monkeypatch .setattr ("huggingface_hub.constants.HF_HUB_DOWNLOAD_TIMEOUT" , 30 ) # instead of 10
@@ -183,15 +183,17 @@ def test_transformers_message_vl_no_tool(self, shared_datadir, monkeypatch):
183
183
device_map = "cpu" ,
184
184
do_sample = False ,
185
185
)
186
- messages = [{"role" : "user" , "content" : [{"type" : "text" , "text" : "Hello!" }, {"type" : "image" , "image" : img }]}]
187
- output = model .generate (messages , stop_sequences = ["great" ]).content
188
- assert output == "I am"
186
+ messages = [
187
+ {"role" : "user" , "content" : [{"type" : "text" , "text" : "What is this?" }, {"type" : "image" , "image" : img }]}
188
+ ]
189
+ output = model .generate (messages ).content
190
+ assert output == "This is a very"
189
191
190
192
output = model .generate_stream (messages , stop_sequences = ["great" ])
191
193
output_str = ""
192
194
for el in output :
193
195
output_str += el .content
194
- assert output_str == "I am "
196
+ assert output_str == "This is a very "
195
197
196
198
def test_parse_json_if_needed (self ):
197
199
args = "abc"
0 commit comments