Update handler.py
Browse files- handler.py +2 -3
handler.py
CHANGED
|
@@ -62,13 +62,12 @@ class EndpointHandler():
|
|
| 62 |
use_safetensors=True,
|
| 63 |
).to("cuda")
|
| 64 |
|
| 65 |
-
self.pidinet = PidiNetDetector.from_pretrained("lllyasviel/Annotators").to("cuda")
|
| 66 |
-
|
| 67 |
self.pipeline.unet = torch.compile(self.pipeline.unet, mode="reduce-overhead", fullgraph=True)
|
| 68 |
self.refiner.unet = torch.compile(self.refiner.unet, mode="reduce-overhead", fullgraph=True)
|
| 69 |
self.pipeline.enable_model_cpu_offload()
|
| 70 |
self.refiner.enable_model_cpu_offload()
|
| 71 |
-
|
|
|
|
| 72 |
|
| 73 |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
| 74 |
"""
|
|
|
|
| 62 |
use_safetensors=True,
|
| 63 |
).to("cuda")
|
| 64 |
|
|
|
|
|
|
|
| 65 |
self.pipeline.unet = torch.compile(self.pipeline.unet, mode="reduce-overhead", fullgraph=True)
|
| 66 |
self.refiner.unet = torch.compile(self.refiner.unet, mode="reduce-overhead", fullgraph=True)
|
| 67 |
self.pipeline.enable_model_cpu_offload()
|
| 68 |
self.refiner.enable_model_cpu_offload()
|
| 69 |
+
|
| 70 |
+
self.pidinet = PidiNetDetector.from_pretrained("lllyasviel/Annotators").to("cuda")
|
| 71 |
|
| 72 |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
| 73 |
"""
|