|
--- |
|
license: other |
|
license_name: deepcode-ai |
|
license_link: LICENSE |
|
--- |
|
|
|
## 3. Quick Start |
|
### Installation |
|
On the basis of `Python >= 3.8` environment, install the necessary dependencies by running the following command: |
|
```shell |
|
git clone https://github.com/deepcode-ai/DeepCode-VL |
|
cd DeepCode-VL |
|
pip install -e . |
|
``` |
|
### Simple Inference Example |
|
```python |
|
import torch |
|
from transformers import AutoModelForCausalLM |
|
from deepcode_vl.models import VLChatProcessor, MultiModalityCausalLM |
|
from deepcode_vl.utils.io import load_pil_images |
|
# specify the path to the model |
|
model_path = "deepcode-ai/deepcode-base" |
|
vl_chat_processor: VLChatProcessor = VLChatProcessor.from_pretrained(model_path) |
|
tokenizer = vl_chat_processor.tokenizer |
|
vl_gpt: MultiModalityCausalLM = AutoModelForCausalLM.from_pretrained(model_path, trust_remote_code=True) |
|
vl_gpt = vl_gpt.to(torch.bfloat16).cuda().eval() |
|
conversation = [ |
|
{ |
|
"role": "User", |
|
"content": "<image_placeholder>Describe each stage of this image.", |
|
"images": ["./images/training_pipelines.png"] |
|
}, |
|
{ |
|
"role": "Assistant", |
|
"content": "" |
|
} |
|
] |
|
# load images and prepare for inputs |
|
pil_images = load_pil_images(conversation) |
|
prepare_inputs = vl_chat_processor( |
|
conversations=conversation, |
|
images=pil_images, |
|
force_batchify=True |
|
).to(vl_gpt.device) |
|
# run image encoder to get the image embeddings |
|
inputs_embeds = vl_gpt.prepare_inputs_embeds(**prepare_inputs) |
|
# run the model to get the response |
|
outputs = vl_gpt.language_model.generate( |
|
inputs_embeds=inputs_embeds, |
|
attention_mask=prepare_inputs.attention_mask, |
|
pad_token_id=tokenizer.eos_token_id, |
|
bos_token_id=tokenizer.bos_token_id, |
|
eos_token_id=tokenizer.eos_token_id, |
|
max_new_tokens=512, |
|
do_sample=False, |
|
use_cache=True |
|
) |
|
answer = tokenizer.decode(outputs[0].cpu().tolist(), skip_special_tokens=True) |
|
print(f"{prepare_inputs['sft_format'][0]}", answer) |
|
``` |
|
### CLI Chat |
|
```bash |
|
python cli_chat.py --model_path "deepcode-ai/deepcode-base" |
|
# or local path |
|
python cli_chat.py --model_path "local model path" |
|
`` |