Gemma4
Collection
6 items • Updated
https://huggingface.co/rahul7star/gemma_4_lora
https://huggingface.co/spaces/rahul7star/Gemma-4-E4B-Uncensored-HauhauCS-Aggressive-Q5_K_P
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("image-text-to-text", model="rahul7star/gemma-4-finetune")
messages = [
{
"role": "user",
"content": [
{"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},
{"type": "text", "text": "What animal is on the candy?"}
]
},
]
pipe(text=messages)
output
[{'input_text': [{'role': 'user',
'content': [{'type': 'image',
'url': 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG'},
{'type': 'text', 'text': 'What animal is on the candy?'}]}],
'generated_text': [{'role': 'user',
'content': [{'type': 'image',
'url': 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG'},
{'type': 'text', 'text': 'What animal is on the candy?'}]},
{'content': "Based on the image, the candies appear to be **chocolate-coated candies** with designs on them.\n\nThe designs visible on the candies are **animals**. Specifically, the green and blue candies seem to have a design that resembles a **bee** or some kind of **insect/animal**. The orange candy also has a design that looks like an **insect** or perhaps a stylized **animal**.\n\nWithout a clearer, closer view of the specific details on each candy, it's difficult to name the exact animal with certainty, but they are clearly **animal-themed candies**.",
'role': 'assistant'}]}]
import gradio as gr
from transformers import pipeline
from PIL import Image
import requests
from io import BytesIO
# -----------------------------
# Load model (cached once)
# -----------------------------
pipe = pipeline(
"image-text-to-text",
model="rahul7star/gemma-4-finetune"
)
# -----------------------------
# Helper: load image safely
# -----------------------------
def load_image(img):
if isinstance(img, str): # URL case
response = requests.get(img)
return Image.open(BytesIO(response.content)).convert("RGB")
return img.convert("RGB")
# -----------------------------
# Inference function
# -----------------------------
def chat(image, text):
if image is None or text is None:
return "Please provide both image and text."
image = load_image(image)
messages = [
{
"role": "user",
"content": [
{"type": "image", "image": image},
{"type": "text", "text": text}
]
}
]
try:
output = pipe(text=messages)
return output
except Exception as e:
return f"Error: {str(e)}"
# -----------------------------
# Gradio UI
# -----------------------------
with gr.Blocks(title="Gemma 4 Vision Chat") as demo:
gr.Markdown("# 🧠Gemma-4 Image + Text Chat (Kaggle)")
with gr.Row():
image_input = gr.Image(type="pil", label="Upload Image")
text_input = gr.Textbox(label="Prompt", placeholder="Ask something about the image...")
btn = gr.Button("Run")
output = gr.Textbox(label="Model Output")
btn.click(fn=chat, inputs=[image_input, text_input], outputs=output)
# -----------------------------
# Launch
# -----------------------------
demo.launch()
import gradio as gr
from transformers import pipeline
# -----------------------------
# Load model (cached once)
# -----------------------------
pipe = pipeline(
"text-generation", # 👈 switched to text chat mode
model="rahul7star/gemma-4-finetune"
)
# -----------------------------
# Chat function
# -----------------------------
def chat(user_message, history):
if history is None:
history = []
# Convert Gradio history → messages format
messages = []
for h in history:
messages.append({"role": "user", "content": h[0]})
messages.append({"role": "assistant", "content": h[1]})
messages.append({"role": "user", "content": user_message})
try:
response = pipe(
messages,
max_new_tokens=256,
do_sample=True,
temperature=0.7
)
# Extract output safely
if isinstance(response, list):
output = response[0]["generated_text"]
else:
output = response
# Append to chat history
history.append((user_message, output))
return history, ""
except Exception as e:
history.append((user_message, f"Error: {str(e)}"))
return history, ""
# -----------------------------
# Gradio UI
# -----------------------------
with gr.Blocks(title="Gemma 4 Text Chat") as demo:
gr.Markdown("# 💬 Gemma-4 Text Chat (Kaggle / HF Model)")
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder="Type your message here...")
clear = gr.Button("Clear")
state = gr.State([])
msg.submit(chat, inputs=[msg, state], outputs=[chatbot, msg])
msg.submit(lambda h: h, inputs=[state], outputs=[state])
clear.click(lambda: ([], []), outputs=[chatbot, state])
# -----------------------------
# Launch
# -----------------------------
demo.launch()
This gemma4 model was trained 2x faster with Unsloth and Huggingface's TRL library.
Base model
google/gemma-4-E2B-it