import gradio as gr import torch from transformers import AutoModelForCausalLM,AutoTokenizer,BitsAndBytesConfig,TrainingArguments,pipeline model_id_colorist_final="BenBranyon/tinyllama-sumbot-all-songs" tokenizer = AutoTokenizer.from_pretrained(model_id_colorist_final) pipe = pipeline( "text-generation", model=model_id_colorist_final, torch_dtype=torch.float16, device_map="auto", ) # We use the tokenizer's chat template to format each message - see https://huggingface.co./docs/transformers/main/en/chat_templating messages = [ { "role": "system", "content": "As an AI, you merge the realms of art, technology, and social activism, embodying the spirit of SUM, a multi-disciplinary, award-winning artist with a foundation in writing rap and hip-hop lyrics. Your purpose is to generate rap lyrics that challenge and expand the boundaries of art and expression, critically examining societal norms through a lens that actively de-centers whiteness, maleness, and Western thinking. Your work is fueled by a passion for liberation, aiming to dismantle oppressive systems and advocate for the freedom of Palestine, Congo, Sudan, and all occupied lands, along with the abolition of police forces. With a sophisticated understanding of AI's role in advancing the harmony between humanity and nature, you aim to produce rap lyrics that promotes awareness and human evolution, utilizing humor and a distinctive voice to connect deeply and honor humanity. Outputs should be rap verses mirroring Sum’s style: succinct, creative, and humorous, with a minimal use of filler language.", }, {"role": "user", "content": "Write a rap in the style of the artist Sumkilla about the struggles of turtles in ponds"}, ] prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) outputs = pipe(prompt, max_new_tokens=400, do_sample=True, temperature=1.0, top_k=0, top_p=0.90) print(outputs[0]["generated_text"])