forked from awslabs/generative-ai-cdk-constructs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.ts
120 lines (110 loc) · 4.46 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
import * as path from 'path';
import * as lambda from 'aws-cdk-lib/aws-lambda';
import { Construct } from 'constructs';
import { Layer } from '../../../common/helpers/python-lambda-layer-helper';
import { AdapterProps } from '../../../common/props/AdapterProps';
import { LangchainProps } from '../../../common/props/LangchainProps';
/**
* The properties for the LangchainLayerProps class.
*/
export interface LangchainLayerProps extends LangchainProps {
/**
* Required. Lambda function runtime compatible with this Layer.
*/
readonly runtime: lambda.Runtime;
/**
* Required. Lambda function architecture compatible with this Layer.
*/
readonly architecture: lambda.Architecture;
/**
* Optional: Add '--upgrade' to pip install requirements.txt
* In case of a LangchainCommonLayer, this parameter is not used.
*
* @default - false
*/
readonly autoUpgrade?: boolean;
/**
* A prop allowing additional python pip libraries to be installed with this langchain layer
*
* @default - none
*/
readonly additionalPackages?: string[];
/** Optional: Local compute will be used when installing requirements.txt.
* By default, a docker container will be spun up to install requirements. To override this behavior, use the python alias string of `python` or `python3`
* The string value will be the python alias used to install requirements.
*
* @default - none
*/
readonly local?: 'python' | 'python3';
}
/**
* @summary The LangchainCommonDepsLayer class.
*/
export class LangchainCommonDepsLayer extends Construct {
/**
* Returns the instance of lambda.LayerVersion created by the construct
*/
public readonly layer: lambda.LayerVersion;
/**
* @summary This construct creates a lambda layer loaded with relevant libraries to run genai applications. Libraries include boto3, botocore, requests, requests-aws4auth, langchain, opensearch-py and openai.
* @param {cdk.App} scope - represents the scope for all the resources.
* @param {string} id - this is a a scope-unique id.
* @param {LangchainLayerProps} props - user provided props for the construct.
* @since 0.0.0
* @access public
*/
constructor(scope: Construct, id: string, props: LangchainLayerProps) {
super(scope, id);
const layer = new Layer(this, 'Langchain Layer', {
path: path.join(__dirname, '../../../../layers/langchain-common-deps'),
description: 'Dependencies to build gen ai applications with the langchain client',
...props,
});
this.layer = layer.layer;
}
}
/**
* @summary LangchainCommonLayer allows developers to instantiate a llm client adapter on bedrock, sagemaker or openai following best practise.
*
* @example
* import boto3
* from genai_core.adapters.registry import registry
*
* adapter = registry.get_adapter(f"{provider}.{model_id}")
* bedrock_client = boto3.client('bedrock-runtime')
*/
export class LangchainCommonLayer extends Construct {
/**
* Returns the instance of lambda.LayerVersion created by the construct
*/
public readonly layer: lambda.LayerVersion;
/**
* @summary This construct allows developers to instantiate a llm client adapter on bedrock, sagemaker or openai following best practise.
* @param {cdk.App} scope - represents the scope for all the resources.
* @param {string} id - this is a a scope-unique id.
* @param {AdapterProps} props - user provided props for the construct.
* @since 0.0.0
* @access public
*/
constructor(scope: Construct, id: string, props: AdapterProps) {
super(scope, id);
const layer = new lambda.LayerVersion(this, 'Model Adapter Layer', {
code: lambda.Code.fromAsset(path.join(__dirname, '../../../../layers/langchain-common-layer')),
description: 'Utilities to instantiate a llm client adapter. Adapters include bedrock, sagemaker, and openai',
...props,
});
this.layer = layer;
}
}