Skip to content

Commit ff09574

Browse files
feat(lightspeed): add api client (#2020)
feat(lightspeed): Add api client
1 parent 84ada10 commit ff09574

File tree

6 files changed

+200
-92
lines changed

6 files changed

+200
-92
lines changed

plugins/lightspeed/package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
"main": "src/index.ts",
55
"types": "src/index.ts",
66
"license": "Apache-2.0",
7-
"private": true,
87
"publishConfig": {
98
"access": "public",
109
"main": "dist/index.esm.js",
Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
import { ConfigApi, IdentityApi } from '@backstage/core-plugin-api';
2+
3+
import OpenAI from 'openai';
4+
5+
import { LightspeedProxyClient, Options } from './LightspeedProxyClient';
6+
7+
jest.mock('openai');
8+
9+
describe('LightspeedProxyClient', () => {
10+
let configApi: jest.Mocked<ConfigApi>;
11+
let identityApi: jest.Mocked<IdentityApi>;
12+
let lightspeedClient: LightspeedProxyClient;
13+
let mockCreateChatCompletion: jest.Mock;
14+
15+
beforeEach(() => {
16+
configApi = {
17+
getString: jest.fn().mockReturnValue('http://localhost:7007'),
18+
} as unknown as jest.Mocked<ConfigApi>;
19+
20+
identityApi = {
21+
getCredentials: jest.fn().mockResolvedValue({ token: 'mock-token' }),
22+
} as unknown as jest.Mocked<IdentityApi>;
23+
24+
const options: Options = { configApi, identityApi };
25+
lightspeedClient = new LightspeedProxyClient(options);
26+
27+
mockCreateChatCompletion = jest.fn().mockResolvedValue({} as any);
28+
const mockOpenAI = new OpenAI({});
29+
mockOpenAI.chat = {
30+
completions: {
31+
create: mockCreateChatCompletion,
32+
},
33+
} as any;
34+
35+
(OpenAI as unknown as jest.Mock).mockImplementation(() => mockOpenAI);
36+
});
37+
38+
describe('constructor', () => {
39+
it('should initialize the client with the correct baseURL', () => {
40+
expect(OpenAI).toHaveBeenCalledWith({
41+
baseURL: 'http://localhost:7007/api/proxy/lightspeed/api',
42+
apiKey: 'random-key',
43+
dangerouslyAllowBrowser: true,
44+
});
45+
});
46+
});
47+
48+
describe('getUserAuthorization', () => {
49+
it('should return the idToken from identityApi', async () => {
50+
const token = await lightspeedClient.getUserAuthorization();
51+
expect(token).toBe('mock-token');
52+
expect(identityApi.getCredentials).toHaveBeenCalledTimes(1);
53+
});
54+
});
55+
56+
describe('createChatCompletions', () => {
57+
it('should call openAIApi.chat.completions.create with correct parameters', async () => {
58+
const prompt = 'Test prompt';
59+
await lightspeedClient.createChatCompletions(prompt);
60+
61+
expect(mockCreateChatCompletion).toHaveBeenCalledWith(
62+
{
63+
messages: [
64+
{
65+
role: 'system',
66+
content:
67+
'You are a helpful assistant that can answer question in Red Hat Developer Hub.',
68+
},
69+
{ role: 'user', content: prompt },
70+
],
71+
model: 'llama3',
72+
stream: true,
73+
},
74+
{
75+
headers: {
76+
Authorization: 'Bearer mock-token',
77+
},
78+
},
79+
);
80+
});
81+
82+
it('should not include Authorization header if idToken is not available', async () => {
83+
identityApi.getCredentials.mockResolvedValueOnce({ token: undefined });
84+
85+
const prompt = 'Test prompt';
86+
await lightspeedClient.createChatCompletions(prompt);
87+
88+
expect(mockCreateChatCompletion).toHaveBeenCalledWith(
89+
{
90+
messages: [
91+
{
92+
role: 'system',
93+
content:
94+
'You are a helpful assistant that can answer question in Red Hat Developer Hub.',
95+
},
96+
{ role: 'user', content: prompt },
97+
],
98+
model: 'llama3',
99+
stream: true,
100+
},
101+
{
102+
headers: {},
103+
},
104+
);
105+
});
106+
});
107+
});
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import {
2+
ConfigApi,
3+
createApiRef,
4+
IdentityApi,
5+
} from '@backstage/core-plugin-api';
6+
7+
import OpenAI from 'openai';
8+
import { Stream } from 'openai/streaming';
9+
10+
export type LightspeedAPI = {
11+
createChatCompletions: (
12+
prompt: string,
13+
) => Promise<Stream<OpenAI.Chat.Completions.ChatCompletionChunk>>;
14+
};
15+
16+
export const lightspeedApiRef = createApiRef<LightspeedAPI>({
17+
id: 'plugin.lightspeed.service',
18+
});
19+
20+
export type Options = {
21+
configApi: ConfigApi;
22+
identityApi: IdentityApi;
23+
};
24+
25+
export class LightspeedProxyClient implements LightspeedAPI {
26+
// @ts-ignore
27+
private readonly configApi: ConfigApi;
28+
private readonly identityApi: IdentityApi;
29+
private readonly openAIApi: OpenAI;
30+
31+
constructor(options: Options) {
32+
this.configApi = options.configApi;
33+
this.identityApi = options.identityApi;
34+
this.openAIApi = new OpenAI({
35+
baseURL: `${this.configApi.getString('backend.baseUrl')}/api/proxy/lightspeed/api`,
36+
37+
// required but ignored
38+
apiKey: 'random-key',
39+
dangerouslyAllowBrowser: true,
40+
});
41+
}
42+
43+
async getUserAuthorization() {
44+
const { token: idToken } = await this.identityApi.getCredentials();
45+
return idToken;
46+
}
47+
48+
async createChatCompletions(prompt: string) {
49+
const idToken = await this.getUserAuthorization();
50+
return await this.openAIApi.chat.completions.create(
51+
{
52+
messages: [
53+
{
54+
role: 'system',
55+
content:
56+
'You are a helpful assistant that can answer question in Red Hat Developer Hub.',
57+
},
58+
{ role: 'user', content: prompt },
59+
],
60+
model: 'llama3',
61+
stream: true,
62+
},
63+
{
64+
headers: {
65+
...(idToken && { Authorization: `Bearer ${idToken}` }),
66+
},
67+
},
68+
);
69+
}
70+
}

plugins/lightspeed/src/components/LightspeedPage.tsx

Lines changed: 5 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
import React from 'react';
22

33
import { Content, Header, HeaderLabel, Page } from '@backstage/core-components';
4-
import { configApiRef, useApi } from '@backstage/core-plugin-api';
4+
import { useApi } from '@backstage/core-plugin-api';
55

66
import { Paper } from '@material-ui/core';
77
import { createStyles, makeStyles } from '@material-ui/core/styles';
8-
import OpenAI from 'openai';
98

9+
import { lightspeedApiRef } from '../api/LightspeedProxyClient';
1010
import { LightspeedInput } from './LightspeedInput';
1111
import { SystemMessage, UserMessage } from './Message';
1212

@@ -38,51 +38,31 @@ const useStyles = makeStyles(() =>
3838
export const LightspeedPage = () => {
3939
const classes = useStyles();
4040

41-
const configApi = useApi(configApiRef);
41+
const lightspeedApi = useApi(lightspeedApiRef);
4242

4343
const [, setChunkIndex] = React.useState(0);
4444
const [prompts, setPrompts] = React.useState<string[]>([]);
4545
const [completions, setCompletions] = React.useState<{
4646
[key: string]: string;
4747
}>({});
48-
const backendUrl = configApi.getString('backend.baseUrl');
49-
const openai = new OpenAI({
50-
baseURL: `${backendUrl}/api/proxy/lightspeed/api`,
51-
52-
// required but ignored
53-
apiKey: 'random-key',
54-
dangerouslyAllowBrowser: true,
55-
});
5648

5749
const handleInputPrompt = React.useCallback(
5850
async (prompt: string) => {
5951
setPrompts(p => [...p, prompt]);
6052
setChunkIndex(0);
6153

62-
const result = await openai.chat.completions.create({
63-
messages: [
64-
{
65-
role: 'system',
66-
content:
67-
'You are a helpful assistant that can answer question in Red Hat Developer Hub.',
68-
},
69-
{ role: 'user', content: prompt },
70-
],
71-
model: 'llama3',
72-
stream: true,
73-
});
54+
const result = await lightspeedApi.createChatCompletions(prompt);
7455

7556
for await (const chunk of result) {
7657
setChunkIndex(index => index + 1);
7758
setCompletions(c => {
78-
// console.log('string ---', s);
7959
c[prompt] =
8060
`${c[prompt] || ''}${chunk.choices[0]?.delta?.content || ''}`;
8161
return c;
8262
});
8363
}
8464
},
85-
[openai.chat.completions],
65+
[lightspeedApi],
8666
);
8767

8868
return (

plugins/lightspeed/src/plugin.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,33 @@
11
import {
2+
configApiRef,
3+
createApiFactory,
24
createPlugin,
35
createRoutableExtension,
6+
identityApiRef,
47
} from '@backstage/core-plugin-api';
58

9+
import {
10+
lightspeedApiRef,
11+
LightspeedProxyClient,
12+
} from './api/LightspeedProxyClient';
613
import { rootRouteRef } from './routes';
714

815
export const lightspeedPlugin = createPlugin({
916
id: 'lightspeed',
1017
routes: {
1118
root: rootRouteRef,
1219
},
20+
apis: [
21+
createApiFactory({
22+
api: lightspeedApiRef,
23+
deps: {
24+
configApi: configApiRef,
25+
identityApi: identityApiRef,
26+
},
27+
factory: ({ configApi, identityApi }) =>
28+
new LightspeedProxyClient({ configApi, identityApi }),
29+
}),
30+
],
1331
});
1432

1533
export const LightspeedPage = lightspeedPlugin.provide(

yarn.lock

Lines changed: 0 additions & 66 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)