Skip to content

Commit

Permalink
feat: chat 命令
Browse files Browse the repository at this point in the history
  • Loading branch information
crazyurus committed May 11, 2024
1 parent c3139e4 commit 19f6288
Show file tree
Hide file tree
Showing 12 changed files with 143 additions and 60 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ jobs:
registry-url: 'https://registry.npmjs.org'
- name: Install dependencies
run: |
npm install [email protected] -g
pnpm install --frozen-lockfile
corepack install
corepack pnpm install --frozen-lockfile
- name: Build
run: npm run build
- name: Publish
Expand Down
9 changes: 0 additions & 9 deletions .prettierrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,6 @@ module.exports = {
bracketSameLine: true,
useTabs: false,
plugins: [
'@ianvs/prettier-plugin-sort-imports',
'prettier-plugin-packagejson'
],
importOrder: [
'<BUILTIN_MODULES>',
'<THIRD_PARTY_MODULES>',
'',
'^@/(.*)$',
'',
'^[./]'
]
};
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,12 @@ Finally, tell us the content that needs to be detected and we can proceed.
$ aigc-detector detect [CONTENT]
```

Also, You can chat with the large model with the following command:

```sh
$ aigc-detector chat
```

For more ways to use `aigc-detector`, please refer to the help command.

```sh
Expand Down
5 changes: 2 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "aigc-detector",
"version": "1.0.2",
"version": "1.0.3",
"description": "Detect if content is generated by AI",
"keywords": [
"aigc",
Expand Down Expand Up @@ -58,7 +58,6 @@
"@babel/core": "^7.24.4",
"@commitlint/cli": "^19.2.2",
"@commitlint/config-conventional": "^19.2.2",
"@ianvs/prettier-plugin-sort-imports": "^4.2.1",
"@oclif/prettier-config": "^0.2.1",
"@types/inquirer": "^9.0.7",
"@types/node": "^18.11.9",
Expand All @@ -77,5 +76,5 @@
"engines": {
"node": ">=18.0.0"
},
"packageManager": "pnpm@9.0.6"
"packageManager": "pnpm@9.1.0"
}
38 changes: 7 additions & 31 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

94 changes: 94 additions & 0 deletions src/cli/commands/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import { AIMessage, HumanMessage } from '@langchain/core/messages';
import chalk from 'chalk';
import { ChatMessageHistory } from 'langchain/stores/message/in_memory';
import readline from 'node:readline';

import type { Platform } from '../../platform';

import { AIGC } from '../../core';
import BaseCommand from '../extends/command';

enum PromptRole {
AI = 'ai',
USER = 'user'
}

const promptMessageMap = {
[PromptRole.AI]: AIMessage,
[PromptRole.USER]: HumanMessage
};
const promptRoleDisplayMap = {
[PromptRole.AI]: {
color: 'yellow',
name: 'AI'
},
[PromptRole.USER]: {
color: 'green',
name: 'You'
}
} as const;

const reader = readline.createInterface({
input: process.stdin,
output: process.stdout
});

class ChatCommand extends BaseCommand {
static args = {};

static description = 'Chat with the LLM';

static examples = [];

static flags = {};

private lastMessage = 'How can I help you today?';

private messages = new ChatMessageHistory();

async run(): Promise<void> {
const config = await this.configManager.getAll();

if (Object.keys(config).length > 0) {
const detector = new AIGC({
apiKey: config.apiKey,
platform: config.platform as unknown as Platform
});
const userDisplay = this.getDisplayContent(PromptRole.USER);

// eslint-disable-next-line no-constant-condition
while (true) {
const aiMessage = await this.addMessage(PromptRole.AI, this.lastMessage);
const userMessage = await this.getUserMessage(aiMessage + `\n${userDisplay}`);
const answer = await detector.chat(userMessage, await this.messages.getMessages());

await this.addMessage(PromptRole.USER, userMessage);
this.lastMessage = answer;
}
} else {
this.showHelp();
}
}

private async addMessage(role: PromptRole, content: string): Promise<string> {
const Message = promptMessageMap[role];

await this.messages.addMessage(new Message(content));

return this.getDisplayContent(role) + content;
}

private getDisplayContent(role: PromptRole): string {
const roleDisplay = promptRoleDisplayMap[role];

return chalk[roleDisplay.color](`[${roleDisplay.name}] `);
}

private getUserMessage(aiMessage: string): Promise<string> {
return new Promise<string>((resolve) => {
reader.question(aiMessage, resolve);
});
}
}

export default ChatCommand;
3 changes: 2 additions & 1 deletion src/cli/commands/detect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ import { Args, Flags } from '@oclif/core';
import chalk from 'chalk';
import ora from 'ora';

import type { Platform } from '../../platform';

import { AIGC } from '../../core';
import { type Platform } from '../../platform';
import BaseCommand from '../extends/command';

class DetectCommand extends BaseCommand {
Expand Down
16 changes: 15 additions & 1 deletion src/core/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import type { BaseMessage } from '@langchain/core/messages';

import { PROMPT } from '../const';
import { getPlatform, type Platform } from '../platform';
import { getEnvConfig } from './env';
import { getDetectResult } from './utils';
Expand All @@ -19,9 +22,20 @@ export class AIGC {
this.platform = (env.platform as unknown as Platform) || options.platform;
}

public async chat(content: string, messages: BaseMessage[]) {
const platform = getPlatform(this.platform);
const result = await platform.invoke(
'You are a helpful assistant. Answer all questions to the best of your ability.',
{ content, messages },
this.apiKey
);

return result;
}

public async detect(content: string): Promise<ReturnType<typeof getDetectResult>> {
const platform = getPlatform(this.platform);
const result = await platform.invoke(content, this.apiKey);
const result = await platform.invoke(PROMPT, { content }, this.apiKey);

return getDetectResult(result);
}
Expand Down
19 changes: 9 additions & 10 deletions src/platform/base.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import { type BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';

import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts';
import { ChatOpenAI } from '@langchain/openai';
import { LLMChain } from 'langchain/chains';

import { PROMPT } from '../const';
type InvokeParameter = Parameters<InstanceType<typeof LLMChain>['invoke']>[0];

abstract class Platform {
protected temperature = 0.7;
Expand All @@ -20,22 +21,20 @@ abstract class Platform {
});
}

protected getPrompt(): ChatPromptTemplate {
protected getPrompt(prompt: string): ChatPromptTemplate {
return ChatPromptTemplate.fromMessages([
SystemMessagePromptTemplate.fromTemplate(PROMPT),
SystemMessagePromptTemplate.fromTemplate(prompt),
HumanMessagePromptTemplate.fromTemplate('Here is what needs to be evaluated: \n{content}')
]);
}

public async invoke(content?: string, apiKey?: string): Promise<string> {
const prompt = this.getPrompt();
public async invoke(prompt: string, params: InvokeParameter, apiKey?: string): Promise<string> {
const promptTemplate = this.getPrompt(prompt);
const chain = new LLMChain({
llm: this.getChatModel(apiKey),
prompt
});
const result = await chain.invoke({
content
prompt: promptTemplate
});
const result = await chain.invoke(params);

return result.text;
}
Expand Down
3 changes: 2 additions & 1 deletion src/platform/minimax.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';

import { ChatMinimax } from '@langchain/community/chat_models/minimax';
import { type BaseLanguageModel } from '@langchain/core/language_models/base';

import Platform from './base';

Expand Down
3 changes: 2 additions & 1 deletion src/platform/tongyi.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';

import { ChatAlibabaTongyi } from '@langchain/community/chat_models/alibaba_tongyi';
import { type BaseLanguageModel } from '@langchain/core/language_models/base';

import Platform from './base';

Expand Down
3 changes: 2 additions & 1 deletion src/platform/zhipu.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';

import { ChatZhipuAI } from '@langchain/community/chat_models/zhipuai';
import { type BaseLanguageModel } from '@langchain/core/language_models/base';

import Platform from './base';

Expand Down

0 comments on commit 19f6288

Please sign in to comment.