import requests from bs4 import BeautifulSoup import openai import gradio as gr import os from dotenv import load_dotenv # Load environment variables from .env file load_dotenv() openai.api_key = os.getenv("OPENAI_API_KEY") # Function to scrape content from a URL def scrape_content(url): response = requests.get(url) soup = BeautifulSoup(response.content, 'html.parser') # Example of extracting title and body content - modify based on actual structure of the websites title = soup.find('title').get_text() paragraphs = soup.find_all('p') content = '\n'.join([para.get_text() for para in paragraphs]) return title, content # Function to create flashcards using OpenAI def create_flashcards(content): prompt = f"Create flashcards for UPSC exam preparation from the following content:\n\n{content}\n\n" response = openai.chat.completions.create( model="gpt-3.5-turbo", messages=[ {"role": "system", "content": "You are a helpful assistant for UPSC exam preparation that creates flash cards when a news URL is given as input,The flash card should include category that the topic belongs to on the front side along with a question framed and the back side content will be the answer to the question."}, {"role": "user", "content": prompt} ], max_tokens=500, temperature=0.7 ) flashcards = response.choices[0].message.content.strip() return flashcards # Function to process a single URL and generate flashcards def process_url(url): if not url: return "No URL provided." title, content = scrape_content(url) flashcards = create_flashcards(content) return f"Title: {title}\n\nFlashcards:\n{flashcards}" # Gradio interface iface = gr.Interface( fn=process_url, inputs=gr.Textbox(lines=2, placeholder="Enter URL here..."), outputs="text", title="UPSC Flashcard Generator", description="Enter a News Site URL to generate flashcard for UPSC preparation." ) # Launch the interface if __name__ == "__main__": iface.launch(share=True)