mirror of
https://github.com/cocktailpeanut/dalai
synced 2025-03-06 18:53:01 +01:00
New API
- dalai <model_type> add <model_name> - dalai serve
This commit is contained in:
parent
64a21d9b4b
commit
c5349d066e
@ -31,7 +31,7 @@ class Alpaca {
|
||||
}
|
||||
}
|
||||
}
|
||||
async get (...models) {
|
||||
async add (...models) {
|
||||
for(let model of models) {
|
||||
const venv_path = path.join(this.root.home, "venv")
|
||||
const python_path = platform == "win32" ? path.join(venv_path, "Scripts", "python.exe") : path.join(venv_path, 'bin', 'python')
|
||||
|
176
docs/README.md
176
docs/README.md
@ -10,11 +10,7 @@ Run LLaMA and Alpaca on your computer.
|
||||
|
||||
## JUST RUN THIS
|
||||
|
||||
<img src="alpa.png" class='round'>
|
||||
|
||||
or
|
||||
|
||||
<img src="llam.png" class='round'>
|
||||
<img src="dalaicli.png" class='round'>
|
||||
|
||||
## TO GET
|
||||
|
||||
@ -116,53 +112,36 @@ First install dalai:
|
||||
npm install -g dalai
|
||||
```
|
||||
|
||||
### Step 3. Install Engines
|
||||
### Step 3. Add models
|
||||
|
||||
Currently supported engines are `llama` and `alpaca`.
|
||||
|
||||
#### Install LLaMA
|
||||
#### Add alpaca models
|
||||
|
||||
To install `llama`, run:
|
||||
Currently alpaca only has the 7B model:
|
||||
|
||||
```
|
||||
dalai llama install
|
||||
dalai alpaca add 7B
|
||||
```
|
||||
|
||||
#### Install Alpaca
|
||||
|
||||
To install `alpaca`, run:
|
||||
|
||||
```
|
||||
dalai alpaca install
|
||||
```
|
||||
|
||||
### Step 4. Get Models
|
||||
|
||||
#### Download LLaMA models
|
||||
#### Add llama models
|
||||
|
||||
To download llama models, you can run:
|
||||
|
||||
```
|
||||
dalai llama get 7B
|
||||
dalai llama add 7B
|
||||
```
|
||||
|
||||
|
||||
or to download multiple models:
|
||||
|
||||
```
|
||||
dalai llama get 7B 13B
|
||||
```
|
||||
|
||||
#### Download Alpaca models
|
||||
|
||||
Currently alpaca only has the 7B model:
|
||||
|
||||
```
|
||||
dalai alpaca get 7B
|
||||
dalai llama add 7B 13B
|
||||
```
|
||||
|
||||
|
||||
### Step 3. Run Web UI
|
||||
### Step 4. Run Web UI
|
||||
|
||||
After everything has been installed, run the following command to launch the web UI server:
|
||||
|
||||
@ -224,62 +203,40 @@ Install Node.js:
|
||||
|
||||
After both have been installed, open powershell and type `python` to see if the application exists. And also type `node` to see if the application exists as well.
|
||||
|
||||
Once you've checked that they both exist, try the `npx dalai llama` command again.
|
||||
|
||||
Once you've checked that they both exist, try the `npm install -g llama` command again.
|
||||
|
||||
---
|
||||
|
||||
|
||||
### Step 3. Install Engines
|
||||
### Step 3. Add models
|
||||
|
||||
Currently supported engines are `llama` and `alpaca`.
|
||||
|
||||
#### Install LLaMA
|
||||
#### Add alpaca models
|
||||
|
||||
To install `llama`, run:
|
||||
Currently alpaca only has the 7B model:
|
||||
|
||||
```
|
||||
dalai llama install
|
||||
dalai alpaca add 7B
|
||||
```
|
||||
|
||||
#### Install Alpaca
|
||||
|
||||
To install `alpaca`, run:
|
||||
|
||||
```
|
||||
dalai alpaca install
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 4. Get Models
|
||||
|
||||
#### Download LLaMA models
|
||||
#### Add llama models
|
||||
|
||||
To download llama models, you can run:
|
||||
|
||||
```
|
||||
dalai llama get 7B
|
||||
dalai llama add 7B
|
||||
```
|
||||
|
||||
|
||||
or to download multiple models:
|
||||
|
||||
```
|
||||
dalai llama get 7B 13B
|
||||
dalai llama add 7B 13B
|
||||
```
|
||||
|
||||
#### Download Alpaca models
|
||||
|
||||
Currently alpaca only has the 7B model:
|
||||
|
||||
```
|
||||
dalai alpaca get 7B
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 5. Run Web UI
|
||||
### Step 4. Run Web UI
|
||||
|
||||
After everything has been installed, run the following command to launch the web UI server:
|
||||
|
||||
@ -289,7 +246,6 @@ dalai serve
|
||||
|
||||
and open http://localhost:3000 in your browser. Have fun!
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
@ -324,53 +280,38 @@ First install dalai:
|
||||
npm install -g dalai
|
||||
```
|
||||
|
||||
### Step 3. Install Engines
|
||||
---
|
||||
|
||||
### Step 3. Add models
|
||||
|
||||
Currently supported engines are `llama` and `alpaca`.
|
||||
|
||||
#### Install LLaMA
|
||||
#### Add alpaca models
|
||||
|
||||
To install `llama`, run:
|
||||
Currently alpaca only has the 7B model:
|
||||
|
||||
```
|
||||
dalai llama install
|
||||
dalai alpaca add 7B
|
||||
```
|
||||
|
||||
#### Install Alpaca
|
||||
|
||||
To install `alpaca`, run:
|
||||
|
||||
```
|
||||
dalai alpaca install
|
||||
```
|
||||
|
||||
### Step 4. Get Models
|
||||
|
||||
#### Download LLaMA models
|
||||
#### Add llama models
|
||||
|
||||
To download llama models, you can run:
|
||||
|
||||
```
|
||||
dalai llama get 7B
|
||||
dalai llama add 7B
|
||||
```
|
||||
|
||||
|
||||
or to download multiple models:
|
||||
|
||||
```
|
||||
dalai llama get 7B 13B
|
||||
```
|
||||
|
||||
#### Download Alpaca models
|
||||
|
||||
Currently alpaca only has the 7B model:
|
||||
|
||||
```
|
||||
dalai alpaca get 7B
|
||||
dalai llama add 7B 13B
|
||||
```
|
||||
|
||||
|
||||
### Step 3. Run Web UI
|
||||
### Step 4. Run Web UI
|
||||
|
||||
After everything has been installed, run the following command to launch the web UI server:
|
||||
|
||||
@ -381,64 +322,8 @@ dalai serve
|
||||
and open http://localhost:3000 in your browser. Have fun!
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
# Commands
|
||||
|
||||
## 1. install
|
||||
|
||||
### LLaMA
|
||||
|
||||
Install the core engine for the model
|
||||
|
||||
```
|
||||
dalai llama install
|
||||
```
|
||||
|
||||
### Alpaca
|
||||
|
||||
Install the core engine for the model
|
||||
|
||||
```
|
||||
dalai alpaca install
|
||||
```
|
||||
|
||||
## 2. get
|
||||
|
||||
Download the full LLaMA model and convert and compress them
|
||||
|
||||
### LLaMA
|
||||
|
||||
Download one model:
|
||||
|
||||
```
|
||||
dalai llama get 7B
|
||||
```
|
||||
|
||||
Download multiple models:
|
||||
|
||||
```
|
||||
dalai llama get 7B 13B
|
||||
```
|
||||
|
||||
### Alpaca
|
||||
|
||||
Currently only 7B available:
|
||||
|
||||
```
|
||||
dalai alpaca get 7B
|
||||
```
|
||||
|
||||
|
||||
## 3. serve
|
||||
|
||||
Start a dalai server and an API endpoint (powered by socket.io)
|
||||
|
||||
```
|
||||
dalai serve
|
||||
```
|
||||
|
||||
|
||||
---
|
||||
|
||||
@ -704,7 +589,10 @@ await dalai.install("7B", "13B")
|
||||
|
||||
As of `dalai@0.3.0` the recommended way to use dalai is through `npm install -g` (not the `npx` method)
|
||||
|
||||
The simplest way to make sure you have the correct version is running:
|
||||
To make sure you update to the latest, first find the latest version at https://www.npmjs.com/package/dalai
|
||||
|
||||
|
||||
Let's say the new version is `0.3.0`. Then you just need to run:
|
||||
|
||||
```
|
||||
npm install -g dalai@0.3.0
|
||||
|
BIN
docs/dalaicli.png
Normal file
BIN
docs/dalaicli.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 60 KiB |
22
index.js
22
index.js
@ -18,6 +18,7 @@ const platform = os.platform()
|
||||
const shell = platform === 'win32' ? 'powershell.exe' : 'bash';
|
||||
const L = require("./llama")
|
||||
const A = require("./alpaca")
|
||||
const exists = s => new Promise(r=>fs.access(s, fs.constants.F_OK, e => r(!e)))
|
||||
class Dalai {
|
||||
constructor(home) {
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
@ -158,7 +159,8 @@ class Dalai {
|
||||
model: `models/${Model || "7B"}/ggml-model-q4_0.bin`,
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.resolve(this.home, Core, "models", Model))) {
|
||||
let e = await exists(path.resolve(this.home, Core, "models", Model))
|
||||
if (!e) {
|
||||
cb(`File does not exist: ${Model}. Try "dalai ${Core} get ${Model}" first.`)
|
||||
return
|
||||
}
|
||||
@ -198,8 +200,18 @@ class Dalai {
|
||||
})
|
||||
}
|
||||
}
|
||||
async get(core, ...models) {
|
||||
let res = await this.cores[core].get(...models)
|
||||
async add(core, ...models) {
|
||||
// first install
|
||||
let engine = this.cores[core]
|
||||
let e = await exists(path.resolve(engine.home));
|
||||
if (e) {
|
||||
// already exists, no need to install
|
||||
} else {
|
||||
await this.install(core)
|
||||
}
|
||||
|
||||
// next add the models
|
||||
let res = await this.cores[core].add(...models)
|
||||
return res
|
||||
}
|
||||
async installed() {
|
||||
@ -218,7 +230,8 @@ class Dalai {
|
||||
|
||||
console.log({ modelFolders })
|
||||
for(let modelFolder of modelFolders) {
|
||||
if (fs.existsSync(path.resolve(modelsPath, modelFolder, 'ggml-model-q4_0.bin'))) {
|
||||
let e = await exists(path.resolve(modelsPath, modelFolder, 'ggml-model-q4_0.bin'))
|
||||
if (e) {
|
||||
modelNames.push(`${core}.${modelFolder}`)
|
||||
console.log("exists", modelFolder)
|
||||
}
|
||||
@ -233,7 +246,6 @@ class Dalai {
|
||||
*
|
||||
**************************************************************************************************************/
|
||||
let engine = this.cores[core]
|
||||
let exists = s => new Promise(r=>fs.access(s, fs.constants.F_OK, e => r(!e)))
|
||||
let e = await exists(path.resolve(engine.home));
|
||||
if (e) {
|
||||
console.log("try fetching", engine.home, engine.url)
|
||||
|
2
llama.js
2
llama.js
@ -32,7 +32,7 @@ class LLaMA {
|
||||
}
|
||||
}
|
||||
}
|
||||
async get (...models) {
|
||||
async add (...models) {
|
||||
if (models.length === 0) models = ["7B"]
|
||||
for(let model of models) {
|
||||
if (!["7B", "13B", "30B", "65B"].includes(model)) {
|
||||
|
4
package-lock.json
generated
4
package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "dalai",
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "dalai",
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.6",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dalai",
|
||||
"version": "0.2.4",
|
||||
"version": "0.2.6",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"author": "cocktailpeanut",
|
||||
|
Loading…
x
Reference in New Issue
Block a user