Skip to content

Commit

Permalink
Merge branch 'refs/heads/main' into beta
Browse files Browse the repository at this point in the history
# Conflicts:
#	models.json
#	server/src/manage-models/bind-class/binds/node-llama-cpp/node-llama-cpp-v2/node-llama-cpp-chat.ts
#	server/src/manage-models/bind-class/binds/node-llama-cpp/node-llama-cpp-v2/node-llama-cpp-v2.ts
  • Loading branch information
ido-pluto committed May 10, 2024
2 parents 74367e2 + cc54125 commit fd86b5d
Show file tree
Hide file tree
Showing 9 changed files with 62 additions and 15 deletions.
14 changes: 7 additions & 7 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,18 +80,18 @@ while (true) {
### Advanced API

This API is only available only in Node.js.
[demo](../examples/remotecall.js)

```ts
import {RemoteCatAI} from 'catai';
import progress from 'progress-stream';
```js
import { RemoteCatAI } from "catai";

const catai = new RemoteCatAI('ws://localhost:3000');
const catai = new RemoteCatAI("ws://localhost:3000");

const response = await catai.prompt('Write me 100 words story', token => {
progress.stdout.write(token);
const response = await catai.prompt("Write me 100 words story", (token) => {
process.stdout.write(token);
});

console.log(`Total text length: ${response.length}`);

catai.close();

```
2 changes: 2 additions & 0 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ You can config the model by the following steps:

[LLamaChatPromptOptions](https://withcatai.github.io/node-llama-cpp/api/type-aliases/LLamaChatPromptOptions)

You can edit the [systemPrompt](system-prompt.md) of the chat too.


3. Restart the server.
![Restart Button](./configuration/restart-button.png)
Expand Down
15 changes: 15 additions & 0 deletions docs/system-prompt.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# CatAi system-prompt

According to https://withcatai.github.io/node-llama-cpp/api/type-aliases/LlamaChatSessionOptions,
it is possible to modify the system-prompt of a chat.

This can be achieved by adding a systemPrompt key in modelSettings

![CatAi systemPrompt settings](system-prompt/settings.png)


Save and restart to apply.

Then the chat act like a pirate according to the systemPrompt you choose ;-)

![CatAi systemPrompt demo](system-prompt/demo.png)
Binary file added docs/system-prompt/demo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/system-prompt/settings.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
13 changes: 13 additions & 0 deletions examples/remotecall.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { RemoteCatAI } from "catai";

const catai = new RemoteCatAI("ws://localhost:3000");

catai.on("open", async () => {
console.log("Connected");
const response = await catai.prompt("Write me 100 words story", (token) => {
process.stdout.write(token);
});

console.log(`Total text length: ${response.length}`);
catai.close();
});
6 changes: 6 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,4 @@ export default class NodeLlamaCppV2 extends BaseBindClass<NodeLlamaCppOptions> {
...this.modelSettings.settings
});
}
}
}
25 changes: 18 additions & 7 deletions server/src/server/remote/remote-catai.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import WebSocket, {ClientOptions} from 'ws';
import {ClientRequestArgs} from 'http';
import {ChatContext} from '../../manage-models/bind-class/chat-context.js';
import WebSocket, { ClientOptions } from 'ws';
import { ClientRequestArgs } from 'http';
import { ChatContext } from '../../manage-models/bind-class/chat-context.js';

export default class RemoteCatAI extends ChatContext {
private _ws: WebSocket;
private _closed = false;
private _promiseOpen?: Promise<void>;

/**
* Connect to remote CatAI server, and use it as a chat context
Expand All @@ -28,10 +29,19 @@ export default class RemoteCatAI extends ChatContext {
if (this._closed) return;
this.emit('error', 'Connection closed: ' + code);
});

this._ws.on('open', () => {
this.emit("open");
});

this._promiseOpen = new Promise((resolve, reject) => {
this.once('open', resolve);
this.once('error', reject);
});
}

private _onMessage(message: string) {
const {event, value} = JSON.parse(message);
const { event, value } = JSON.parse(message);
switch (event) {
case 'token':
this.emit('token', value);
Expand All @@ -49,14 +59,15 @@ export default class RemoteCatAI extends ChatContext {
}

private _send(event: 'prompt' | 'abort', value: string) {
this._ws.send(JSON.stringify({event, value}));
this._ws.send(JSON.stringify({ event, value }));
}

abort(reason?: string): void {
this._send('abort', reason || 'Aborted by user');
}

prompt(prompt: string, onToken?: (token: string) => void): Promise<string | null> {
async prompt(prompt: string, onToken?: (token: string) => void): Promise<string | null> {
await this._promiseOpen;
this._send('prompt', prompt);

let buildText = '';
Expand All @@ -66,7 +77,7 @@ export default class RemoteCatAI extends ChatContext {
};
this.on('token', tokenEvent);

return new Promise<string | null>((resolve, reject) => {
return await new Promise<string | null>((resolve, reject) => {
this.once('error', reject);
this.once('modelResponseEnd', () => {
this.off('token', tokenEvent);
Expand Down

0 comments on commit fd86b5d

Please sign in to comment.