File size: 4,230 Bytes
5e3ba51 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
# Train
## Environment
```bash
cd scripts
python -m venv venv
source venv/bin/activate
pip install -U -r requirements.in
```
## Train Tokenizer
```bash
time python -B train_tokenizer.py
```
## Pretrain
```bash
python -B prepare_pretrain_datasets.py
```
```bash
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain-model-0.yaml
litgpt convert_pretrained_checkpoint ../out/pretrain-0/final/ ../out/pretrain-0-final-checkpoint
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain-model-1.yaml
litgpt convert_pretrained_checkpoint ../out/pretrain-1/final/ ../out/pretrain-1-final-checkpoint
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain-model-2.yaml
litgpt convert_pretrained_checkpoint ../out/pretrain-2/final/ ../out/pretrain-2-final-checkpoint
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain-model-3.yaml
litgpt convert_pretrained_checkpoint ../out/pretrain-3/final/ ../out/pretrain-3-final-checkpoint
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain-model-4.yaml
litgpt convert_pretrained_checkpoint ../out/pretrain-4/final/ ../out/pretrain-4-final-checkpoint
# NOTE: unused
# CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain-model-5.yaml
# litgpt convert_pretrained_checkpoint ../out/pretrain-5/final/ ../out/pretrain-5-final-checkpoint
```
### Continued Pretraining
```bash
python -B prepare_contrain_datasets.py
```
```bash
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config contrain-model-0.yaml
litgpt convert_pretrained_checkpoint ../out/contrain-0/final/ ../out/contrain-0-final-checkpoint
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config contrain-model-1.yaml
litgpt convert_pretrained_checkpoint ../out/contrain-1/final/ ../out/contrain-1-final-checkpoint
```
## Chat with Pretrained model
```bash
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt chat out/pretrain-0/final/
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt chat out/pretrain-1/final/
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt chat out/pretrain-2/final/
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt chat out/pretrain-3/final/
CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt chat out/pretrain-4/final/
# CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt chat out/pretrain-5/final/
```
<!-- OLD -->
## Model
### Pretraining
```bash
litgpt pretrain --config ./pretrain-model.yaml
litgpt convert_from_litgpt out/pretrain/final/ out/converted_pretrain
cp config.json out/pretrain/final/
cp config.json out/converted_pretrain/
```
```python
import torch
from safetensors.torch import save_file
state_dict = torch.load('out/converted_pretrain/model.pth', map_location='cpu')
save_file(state_dict, 'out/converted_pretrain/model.safetensors')
```
### Continued Pretraining
```bash
litgpt convert_pretrained_checkpoint out/pretrain/final/ out/pretrain_checkpoint/final/
cp config.json out/pretrain_checkpoint/final/
litgpt pretrain --config ./contrain-model.yaml
litgpt convert_from_litgpt out/contrain/final/ out/converted_contrain
cp config.json out/converted_contrain/
```
```python
import torch
from safetensors.torch import save_file
state_dict = torch.load('out/converted_contrain/model.pth', map_location='cpu')
save_file(state_dict, 'out/converted_contrain/model.safetensors')
```
```bash
cp out/converted_contrain/model.pth ./
cp out/converted_contrain/model.safetensors ./
```
|