Skip to content

Commit 5e66714

Browse files
authored
Merge pull request continuedev#4670 from continuedev/nate/stuff
remove key from test-extensions-cache
2 parents 35ef10c + 8cde458 commit 5e66714

File tree

2 files changed

+16
-11
lines changed

2 files changed

+16
-11
lines changed

.github/workflows/pr_checks.yaml

+1-2
Original file line numberDiff line numberDiff line change
@@ -420,8 +420,7 @@ jobs:
420420
id: test-extensions-cache
421421
with:
422422
path: extensions/vscode/e2e/.test-extensions
423-
# package.json used as the key because that's where the download script is defined
424-
key: vscode-test-extensions-${{ hashFiles('extensions/vscode/package.json') }}
423+
key: CONSTANT
425424

426425
- name: Download build artifact
427426
uses: actions/download-artifact@v4

core/llm/index.ts

+15-9
Original file line numberDiff line numberDiff line change
@@ -59,15 +59,17 @@ import {
5959
toFimBody,
6060
} from "./openaiTypeConverters.js";
6161

62-
6362
export class LLMError extends Error {
64-
constructor(message: string, public llm: ILLM) {
63+
constructor(
64+
message: string,
65+
public llm: ILLM,
66+
) {
6567
super(message);
6668
}
6769
}
6870

6971
export function isModelInstaller(provider: any): provider is ModelInstaller {
70-
return provider && typeof provider.installModel === 'function';
72+
return provider && typeof provider.installModel === "function";
7173
}
7274

7375
export abstract class BaseLLM implements ILLM {
@@ -396,7 +398,7 @@ export abstract class BaseLLM implements ILLM {
396398
let model = error?.match(/model '(.*)' not found/)?.[1];
397399
if (model && resp.url.match("127.0.0.1:11434")) {
398400
text = `The model "${model}" was not found. To download it, run \`ollama run ${model}\`.`;
399-
throw new LLMError(text, this);// No need to add HTTP status details
401+
throw new LLMError(text, this); // No need to add HTTP status details
400402
} else if (text.includes("/api/chat")) {
401403
text =
402404
"The /api/chat endpoint was not found. This may mean that you are using an older version of Ollama that does not support /api/chat. Upgrading to the latest version will solve the issue.";
@@ -781,7 +783,7 @@ export abstract class BaseLLM implements ILLM {
781783
}
782784

783785
let completion = "";
784-
let citations: null | string[] = null
786+
let citations: null | string[] = null;
785787

786788
try {
787789
if (this.templateMessages) {
@@ -809,8 +811,6 @@ export abstract class BaseLLM implements ILLM {
809811
completion = renderChatMessage(msg);
810812
} else {
811813
// Stream true
812-
console.log("Streaming");
813-
814814
const stream = this.openaiAdapter.chatCompletionStream(
815815
{
816816
...body,
@@ -824,7 +824,11 @@ export abstract class BaseLLM implements ILLM {
824824
completion += result.content;
825825
yield result;
826826
}
827-
if (!citations && (chunk as any).citations && Array.isArray((chunk as any).citations)) {
827+
if (
828+
!citations &&
829+
(chunk as any).citations &&
830+
Array.isArray((chunk as any).citations)
831+
) {
828832
citations = (chunk as any).citations;
829833
}
830834
}
@@ -851,7 +855,9 @@ export abstract class BaseLLM implements ILLM {
851855
await this.writeLog(`Completion:\n${completion}\n\n`);
852856

853857
if (citations) {
854-
await this.writeLog(`Citations:\n${citations.map((c, i) => `${i + 1}: ${c}`).join("\n")}\n\n`);
858+
await this.writeLog(
859+
`Citations:\n${citations.map((c, i) => `${i + 1}: ${c}`).join("\n")}\n\n`,
860+
);
855861
}
856862
}
857863

0 commit comments

Comments
 (0)