View file src/colab/solar_10_7b_instruct_v1_0.py - Download

# -*- coding: utf-8 -*-
"""SOLAR-10.7B-Instruct-v1.0.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1N2nYspl6F3eohxe-KiyaYzYd4aMeG5nd

https://www.atpostlight.com/?_=%2Fupstage%2FSOLAR-10.7B-Instruct-v1.0%23KJWqMdlUlBnjPuoSWRPngYr2fc9jFA%3D%3D
"""

!pip install accelerate

import torch
from transformers import AutoModelForCausalLM, AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("Upstage/SOLAR-10.7B-Instruct-v1.0")
model = AutoModelForCausalLM.from_pretrained(
    "Upstage/SOLAR-10.7B-Instruct-v1.0",
    device_map="auto",
    torch_dtype=torch.float16,
)

conversation = [ {'role': 'user', 'content': 'Hello?'} ]

prompt = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)

inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(**inputs, use_cache=True, max_length=4096)
output_text = tokenizer.decode(outputs[0])
print(output_text)