Skip to content

Commit

Permalink
feat(ai-anthropic): 添加新组件,支持 Anthropic 模型及其 API
Browse files Browse the repository at this point in the history
  • Loading branch information
muxiangqiu committed Jan 5, 2025
1 parent 445077c commit ec988b1
Show file tree
Hide file tree
Showing 42 changed files with 2,431 additions and 101 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Change

## 3.4.0

- feat(ai-anthropic): 添加新组件 `@celljs/ai-anthropic`,支持 Anthropic 模型及其 API

## 3.3.0

- feat(ai-ollama): 添加新组件 `@celljs/ai-ollama`,支持 Ollama 通用能力
Expand Down
10 changes: 10 additions & 0 deletions ai-packages/ai-anthropic/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: [
require.resolve('@celljs/component/configs/build.eslintrc.json')
],
parserOptions: {
tsconfigRootDir: __dirname,
project: 'tsconfig.json'
}
};
82 changes: 82 additions & 0 deletions ai-packages/ai-anthropic/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# Cell - AI Anthropic Component

## 概览

AI Anthropic 模块是一个用于与 Anthropic API 交互的库,提供了生成聊天响应和嵌入向量的功能。通过简单易用的 API 接口,支持消息的创建、请求的发送和响应的处理。是 @celljs/ai-core 模块中所有模型服务接口抽象的一种实现。

## 特性

- 生成聊天响应
- 生成嵌入向量
- 支持流式响应
- 支持多种模型参数配置

## 安装

使用 npm 安装 AI Anthropic 模块:

```bash
npm install @celljs/ai-anthropic
```

或者使用 yarn:

```bash
yarn add @celljs/ai-anthropic
```

## 快速开始

以下是一个简单的示例,展示如何使用 AI Anthropic 模块生成聊天响应:

```typescript
import { AssistantMessage, PromptTemplate } from '@celljs/ai-core';
import { OllamChatModel, OllamModel, } from '@celljs/ai-anthropic';
import { Component Autowired } from '@celljs/core';

@Component()
export class AnthropicDemo {
@Autowired(AnthropicChatModel)
private anthropicChatModel: AnthropicChatModel;

@Autowired(PromptTemplate)
private promptTemplate: PromptTemplate;

/**
* Chat with Anthropic
*/
async chat() {
const prompt = await this.promptTemplate.create(
'Hello {name}',
{
chatOptions: { model: AnthropicModel.CLAUDE_3_5_SONNET },
variables: { name: 'Anthropic' }
}
);
const response = await this.anthropicChatModel.call(prompt);
console.log(response.result.output);
}

/**
* Stream chat response
*/
async stream() {
const prompt = await this.promptTemplate.create(
'Hello {name}',
{
chatOptions: { model: AnthropicModel.CLAUDE_3_5_SONNET },
variables: { name: 'Anthropic' }
}
);
const response$ = await this.anthropicChatModel.stream(prompt);
response$.subscribe({
next: response => console.log(response.result.output),
complete: () => console.log('Chat completed!')
});
}
}
```

## 许可证

本项目采用 MIT 许可证。
46 changes: 46 additions & 0 deletions ai-packages/ai-anthropic/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
{
"name": "@celljs/ai-anthropic",
"version": "3.3.0",
"description": "Authropic models support",
"main": "lib/common/index.js",
"typings": "lib/common/index.d.ts",
"dependencies": {
"@celljs/ai-core": "3.3.0",
"@celljs/core": "3.3.0",
"@celljs/http": "3.3.0",
"class-transformer": "^0.5.1",
"rxjs": "^6.6.0",
"tslib": "^2.8.0"
},
"publishConfig": {
"access": "public"
},
"keywords": [
"cell-component"
],
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/cellbang/cell.git"
},
"bugs": {
"url": "https://github.com/cellbang/cell/issues"
},
"homepage": "https://github.com/cellbang/cell",
"files": [
"lib",
"src",
"cell.yml"
],
"scripts": {
"lint": "cell-component lint",
"build": "cell-component build",
"watch": "cell-component watch",
"clean": "cell-component clean",
"test": "cell-component test:js"
},
"devDependencies": {
"@celljs/component": "3.3.0"
},
"gitHead": "bbf636b21ea1a347affcc05a5f6f58b35bedef6d"
}
165 changes: 165 additions & 0 deletions ai-packages/ai-anthropic/src/common/api/anthropic-api.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import { expect } from 'chai';
import { ChatRequest } from './chat-request';
import { RestOperations } from '@celljs/http';
import { createContainer } from '../test/test-container';
import { AnthropicAPI } from './api-protocol';
import { AnthropicModel } from './anthropic-model';
import { ContentBlock, ContentBlockType } from './content-block';
import { Role, AnthropicMessage } from './message';
import { ChatResponse } from './chat-response';
import '../index';

const container = createContainer();

describe('AnthropicAPIImpl', () => {
let anthropicAPI: AnthropicAPI;

beforeEach(() => {
container.rebind(RestOperations).toConstantValue({
post: async (url: string, data: any, config: any) => {
if (data.stream) {
const chatResponse = {
id: 'msg_123',
type: 'message',
role: Role.ASSISTANT,
content: [
{
type: ContentBlockType.TEXT,
text: 'How can I assist you today?'
}
],
model: data.model,
stop_reason: 'end_turn',
stop_sequence: undefined,
usage: {
input_tokens: 10,
output_tokens: 20
}
};
const steam = new ReadableStream({
start(controller) {
controller.enqueue('data: ' + JSON.stringify({
type: 'message_start',
data: chatResponse
}) + '\n\n');
controller.close();
}
});
return { data: steam };
}

return {
data: {
id: 'msg_123',
type: 'message',
role: Role.ASSISTANT,
content: [
{
type: ContentBlockType.TEXT,
text: 'How can I assist you today?'
}
],
model: data.model,
stop_reason: 'end_turn',
stop_sequence: undefined,
usage: {
input_tokens: 10,
output_tokens: 20
}
}
};
}
});
anthropicAPI = container.get(AnthropicAPI);
});

describe('chat', () => {
it('should return a ChatResponse', async () => {
const messages = [
new AnthropicMessage(
[new ContentBlock(ContentBlockType.TEXT, undefined, 'Hello')],
Role.USER
)
];
const chatRequest = ChatRequest.builder()
.withModel(AnthropicModel.CLAUDE_3_HAIKU)
.withMessages(messages)
.withMaxTokens(1000)
.withStream(false)
.build();

const response = await anthropicAPI.chat(chatRequest);
expect(response).to.be.instanceOf(ChatResponse);
expect(response.model).to.equal(AnthropicModel.CLAUDE_3_HAIKU);
});

it('should throw an error if stream mode is enabled', async () => {
const messages = [
new AnthropicMessage(
[new ContentBlock(ContentBlockType.TEXT, undefined, 'Hello')],
Role.USER
)
];
const chatRequest = ChatRequest.builder()
.withModel(AnthropicModel.CLAUDE_3_HAIKU)
.withMessages(messages)
.withMaxTokens(1000)
.withStream(true)
.build();

try {
await anthropicAPI.chat(chatRequest);
expect.fail('Should have thrown an error');
} catch (e) {
expect(e.message).to.equal('Request must set the stream property to false.');
}
});
});

describe('streamingChat', () => {
it('should return an Observable of ChatResponse', async () => {
const messages = [
new AnthropicMessage(
[new ContentBlock(ContentBlockType.TEXT, undefined, 'Hello')],
Role.USER
)
];
const chatRequest = ChatRequest.builder()
.withModel(AnthropicModel.CLAUDE_3_HAIKU)
.withMessages(messages)
.withMaxTokens(1000)
.withStream(true)
.build();

const response$ = await anthropicAPI.streamingChat(chatRequest);
response$.subscribe({
next: response => {
expect(response).to.be.instanceOf(ChatResponse);
expect(response.model).to.equal(AnthropicModel.CLAUDE_3_HAIKU);
}
});
});

it('should throw an error if stream mode is disabled', async () => {
const messages = [
new AnthropicMessage(
[new ContentBlock(ContentBlockType.TEXT, undefined, 'Hello')],
Role.USER
)
];
const chatRequest = ChatRequest.builder()
.withModel(AnthropicModel.CLAUDE_3_HAIKU)
.withMessages(messages)
.withMaxTokens(1000)
.withStream(false)
.build();

try {
await anthropicAPI.streamingChat(chatRequest);
expect.fail('Should have thrown an error');
} catch (e) {
expect(e.message).to.equal('Request must set the stream property to true.');
}
});
});
});
Loading

0 comments on commit ec988b1

Please sign in to comment.