AIMaster7 commited on
Commit
de40268
·
verified ·
1 Parent(s): 4761cce

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +28 -21
main.py CHANGED
@@ -4,14 +4,15 @@ import uuid
4
  import json
5
  import os
6
  import requests
7
- import httpx # New import for async requests
8
- import asyncio # New import for the background task
9
  from fastapi import FastAPI, HTTPException
10
  from pydantic import BaseModel
11
  from PIL import Image
12
  import logging
13
  from io import BytesIO
14
  import time
 
15
 
16
  # --- Configuration ---
17
  # Set up logging for debugging
@@ -25,8 +26,8 @@ PROXY_REFRESH_INTERVAL = 5 * 60 # 5 minutes in seconds
25
  # --- FastAPI App Initialization ---
26
  app = FastAPI(
27
  title="Image Generation API",
28
- description="An API to generate images with periodic proxy list refreshing.",
29
- version="1.3.0" # Version bumped for the new feature
30
  )
31
 
32
  # ----------- Proxy Management -----------
@@ -54,17 +55,14 @@ async def fetch_proxies():
54
  logger.warning("Proxy list was fetched but is empty.")
55
  PROXY_LIST = []
56
  else:
57
- # This is an atomic update, so it's safe in this asyncio context
58
  PROXY_LIST = proxies
59
  logger.info(f"Successfully refreshed proxy list. Loaded {len(PROXY_LIST)} proxies.")
60
 
61
  except httpx.RequestError as e:
62
  logger.error(f"Failed to fetch or refresh proxy list: {e}")
63
- # We don't clear the list on failure, to keep using old proxies if the source is temporarily down
64
  except Exception as e:
65
  logger.error(f"An unexpected error occurred during proxy fetch: {e}")
66
 
67
-
68
  async def schedule_proxy_refresh():
69
  """
70
  A background task that runs forever, refreshing the proxy list periodically.
@@ -85,17 +83,12 @@ async def startup_event():
85
  asyncio.create_task(schedule_proxy_refresh())
86
 
87
  def get_random_proxy():
88
- """
89
- Selects a random proxy from the global list and formats it for the requests library.
90
- Returns None if the proxy list is empty.
91
- """
92
  if not PROXY_LIST:
93
  return None
94
-
95
  proxy = random.choice(PROXY_LIST)
96
  return {"http": proxy, "https": proxy}
97
 
98
-
99
  # ----------- Models -----------
100
  class GenerationRequest(BaseModel):
101
  prompt: str
@@ -114,6 +107,17 @@ def generate_session_hash():
114
  def generate_zerogpu_uuid():
115
  return f"n{random.randint(1000,9999)}_{uuid.uuid4().hex[:12]}_creitl"
116
 
 
 
 
 
 
 
 
 
 
 
 
117
  # ----------- Helper Function to Upload to SnapZion -----------
118
  def upload_to_snapzion(file_content: BytesIO, file_name: str):
119
  token = os.getenv("SNAPZION_API_TOKEN")
@@ -141,10 +145,16 @@ async def generate_image(req: GenerationRequest):
141
  session_hash = req.session_hash if req.session_hash else generate_session_hash()
142
  uuid_token = generate_zerogpu_uuid()
143
 
 
 
 
 
 
144
  headers_post = {
145
  "accept": "*/*",
146
  "accept-language": "en-US,en;q=0.9",
147
  "content-type": "application/json",
 
148
  "origin": "https://heartsync-nsfw-uncensored.hf.space",
149
  "referer": "https://heartsync-nsfw-uncensored.hf.space/?not-for-all-audiences=true&__theme=system",
150
  "sec-ch-ua": '"Not/A)Brand";v="8", "Chromium";v="126", "Google Chrome";v="126"',
@@ -166,7 +176,7 @@ async def generate_image(req: GenerationRequest):
166
  }
167
 
168
  try:
169
- # Step 1: Join Queue (with proxy)
170
  join_proxy = get_random_proxy()
171
  proxy_ip_log = join_proxy['http'].split('@')[-1] if join_proxy else 'None'
172
  logger.info(f"Step 1: Joining queue with proxy: {proxy_ip_log}")
@@ -178,13 +188,13 @@ async def generate_image(req: GenerationRequest):
178
  join_response.raise_for_status()
179
  logger.info("Successfully joined queue. Now listening for SSE data.")
180
 
181
- # Step 2: Listen to queue via SSE (with another proxy)
182
  sse_proxy = get_random_proxy()
183
  proxy_ip_log = sse_proxy['http'].split('@')[-1] if sse_proxy else 'None'
184
  logger.info(f"Step 2: Listening to SSE stream with proxy: {proxy_ip_log}")
185
 
186
  sse_url = f"https://heartsync-nsfw-uncensored.hf.space/gradio_api/queue/data?session_hash={session_hash}"
187
- sse_headers = {"accept": "text/event-stream", "user-agent": headers_post["user-agent"]}
188
 
189
  with requests.get(sse_url, headers=sse_headers, stream=True, proxies=sse_proxy, timeout=300) as sse_response:
190
  sse_response.raise_for_status()
@@ -194,9 +204,7 @@ async def generate_image(req: GenerationRequest):
194
  if decoded_line.startswith("data:"):
195
  try:
196
  data = json.loads(decoded_line[5:].strip())
197
- msg = data.get("msg")
198
-
199
- if msg == "process_completed":
200
  logger.info("Process completed. Extracting image data.")
201
  output_list = data.get("output", {}).get("data", [])
202
  if not output_list or not isinstance(output_list, list) or len(output_list) == 0:
@@ -208,7 +216,7 @@ async def generate_image(req: GenerationRequest):
208
  if not image_url:
209
  raise HTTPException(status_code=500, detail="Image data received, but the URL is missing.")
210
 
211
- # Step 3: Download image (with a third proxy)
212
  download_proxy = get_random_proxy()
213
  proxy_ip_log = download_proxy['http'].split('@')[-1] if download_proxy else 'None'
214
  logger.info(f"Step 3: Downloading image with proxy: {proxy_ip_log}")
@@ -228,7 +236,6 @@ async def generate_image(req: GenerationRequest):
228
  logger.info(f"Image successfully uploaded to SnapZion: {snapzion_url}")
229
 
230
  return {"success": True, "image_url": snapzion_url}
231
-
232
  except (json.JSONDecodeError, IndexError): continue
233
 
234
  raise HTTPException(status_code=504, detail="Stream closed before generation could complete.")
 
4
  import json
5
  import os
6
  import requests
7
+ import httpx
8
+ import asyncio
9
  from fastapi import FastAPI, HTTPException
10
  from pydantic import BaseModel
11
  from PIL import Image
12
  import logging
13
  from io import BytesIO
14
  import time
15
+ from datetime import datetime, timedelta # New imports for date manipulation
16
 
17
  # --- Configuration ---
18
  # Set up logging for debugging
 
26
  # --- FastAPI App Initialization ---
27
  app = FastAPI(
28
  title="Image Generation API",
29
+ description="An API to generate images with periodic proxy refreshing and randomized headers.",
30
+ version="1.4.0" # Version bumped for the new feature
31
  )
32
 
33
  # ----------- Proxy Management -----------
 
55
  logger.warning("Proxy list was fetched but is empty.")
56
  PROXY_LIST = []
57
  else:
 
58
  PROXY_LIST = proxies
59
  logger.info(f"Successfully refreshed proxy list. Loaded {len(PROXY_LIST)} proxies.")
60
 
61
  except httpx.RequestError as e:
62
  logger.error(f"Failed to fetch or refresh proxy list: {e}")
 
63
  except Exception as e:
64
  logger.error(f"An unexpected error occurred during proxy fetch: {e}")
65
 
 
66
  async def schedule_proxy_refresh():
67
  """
68
  A background task that runs forever, refreshing the proxy list periodically.
 
83
  asyncio.create_task(schedule_proxy_refresh())
84
 
85
  def get_random_proxy():
86
+ """Selects a random proxy from the global list."""
 
 
 
87
  if not PROXY_LIST:
88
  return None
 
89
  proxy = random.choice(PROXY_LIST)
90
  return {"http": proxy, "https": proxy}
91
 
 
92
  # ----------- Models -----------
93
  class GenerationRequest(BaseModel):
94
  prompt: str
 
107
  def generate_zerogpu_uuid():
108
  return f"n{random.randint(1000,9999)}_{uuid.uuid4().hex[:12]}_creitl"
109
 
110
+ def generate_random_future_date_header():
111
+ """
112
+ Generates a random date string for an HTTP header, between now and 24 hours in the future.
113
+ """
114
+ # Add a random number of seconds (up to 24 hours) to the current time
115
+ random_seconds = random.randint(0, 24 * 3600)
116
+ future_time = datetime.utcnow() + timedelta(seconds=random_seconds)
117
+ # Format according to RFC 7231 for HTTP 'Date' header
118
+ return future_time.strftime('%a, %d %b %Y %H:%M:%S GMT')
119
+
120
+
121
  # ----------- Helper Function to Upload to SnapZion -----------
122
  def upload_to_snapzion(file_content: BytesIO, file_name: str):
123
  token = os.getenv("SNAPZION_API_TOKEN")
 
145
  session_hash = req.session_hash if req.session_hash else generate_session_hash()
146
  uuid_token = generate_zerogpu_uuid()
147
 
148
+ # --- HEADER CUSTOMIZATION ---
149
+ # Generate a new random Date header for each request
150
+ random_date_header = generate_random_future_date_header()
151
+ logger.info(f"Using randomized Date header for this request: {random_date_header}")
152
+
153
  headers_post = {
154
  "accept": "*/*",
155
  "accept-language": "en-US,en;q=0.9",
156
  "content-type": "application/json",
157
+ "Date": random_date_header, # <-- Here is the new randomized header
158
  "origin": "https://heartsync-nsfw-uncensored.hf.space",
159
  "referer": "https://heartsync-nsfw-uncensored.hf.space/?not-for-all-audiences=true&__theme=system",
160
  "sec-ch-ua": '"Not/A)Brand";v="8", "Chromium";v="126", "Google Chrome";v="126"',
 
176
  }
177
 
178
  try:
179
+ # Step 1: Join Queue
180
  join_proxy = get_random_proxy()
181
  proxy_ip_log = join_proxy['http'].split('@')[-1] if join_proxy else 'None'
182
  logger.info(f"Step 1: Joining queue with proxy: {proxy_ip_log}")
 
188
  join_response.raise_for_status()
189
  logger.info("Successfully joined queue. Now listening for SSE data.")
190
 
191
+ # Step 2: Listen to queue via SSE
192
  sse_proxy = get_random_proxy()
193
  proxy_ip_log = sse_proxy['http'].split('@')[-1] if sse_proxy else 'None'
194
  logger.info(f"Step 2: Listening to SSE stream with proxy: {proxy_ip_log}")
195
 
196
  sse_url = f"https://heartsync-nsfw-uncensored.hf.space/gradio_api/queue/data?session_hash={session_hash}"
197
+ sse_headers = {"accept": "text/event-stream", "user-agent": headers_post["user-agent"], "Date": random_date_header}
198
 
199
  with requests.get(sse_url, headers=sse_headers, stream=True, proxies=sse_proxy, timeout=300) as sse_response:
200
  sse_response.raise_for_status()
 
204
  if decoded_line.startswith("data:"):
205
  try:
206
  data = json.loads(decoded_line[5:].strip())
207
+ if data.get("msg") == "process_completed":
 
 
208
  logger.info("Process completed. Extracting image data.")
209
  output_list = data.get("output", {}).get("data", [])
210
  if not output_list or not isinstance(output_list, list) or len(output_list) == 0:
 
216
  if not image_url:
217
  raise HTTPException(status_code=500, detail="Image data received, but the URL is missing.")
218
 
219
+ # Step 3: Download image
220
  download_proxy = get_random_proxy()
221
  proxy_ip_log = download_proxy['http'].split('@')[-1] if download_proxy else 'None'
222
  logger.info(f"Step 3: Downloading image with proxy: {proxy_ip_log}")
 
236
  logger.info(f"Image successfully uploaded to SnapZion: {snapzion_url}")
237
 
238
  return {"success": True, "image_url": snapzion_url}
 
239
  except (json.JSONDecodeError, IndexError): continue
240
 
241
  raise HTTPException(status_code=504, detail="Stream closed before generation could complete.")