Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -132,15 +132,15 @@ print('Loading VAE took: ', elapsed_time, 'seconds') | |
| 132 | 
             
            st = time.time()
         | 
| 133 |  | 
| 134 | 
             
            #pipe = StableDiffusionXLInstantIDImg2ImgPipeline.from_pretrained("stablediffusionapi/albedobase-xl-v21",
         | 
| 135 | 
            -
            pipe = StableDiffusionXLInstantIDImg2ImgPipeline.from_pretrained("frankjoshua/albedobaseXL_v21",
         | 
| 136 | 
            -
                                                                             vae=vae,
         | 
| 137 | 
            -
                                                                             controlnet=[identitynet, zoedepthnet],
         | 
| 138 | 
            -
                                                                             torch_dtype=torch.float16)
         | 
| 139 |  | 
| 140 | 
            -
             | 
| 141 | 
            -
             | 
| 142 | 
            -
             | 
| 143 | 
            -
             | 
| 144 |  | 
| 145 | 
             
            pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config, use_karras_sigmas=True)
         | 
| 146 | 
             
            pipe.load_ip_adapter_instantid(face_adapter)
         | 
| @@ -374,7 +374,7 @@ def generate_image(prompt, negative, face_emb, face_image, face_kps, image_stren | |
| 374 | 
             
                    image=face_image,
         | 
| 375 | 
             
                    strength=1-image_strength,
         | 
| 376 | 
             
                    control_image=control_images,
         | 
| 377 | 
            -
                    num_inference_steps= | 
| 378 | 
             
                    guidance_scale=guidance_scale,
         | 
| 379 | 
             
                    controlnet_conditioning_scale=control_scales,
         | 
| 380 | 
             
                ).images[0]
         | 
|  | |
| 132 | 
             
            st = time.time()
         | 
| 133 |  | 
| 134 | 
             
            #pipe = StableDiffusionXLInstantIDImg2ImgPipeline.from_pretrained("stablediffusionapi/albedobase-xl-v21",
         | 
| 135 | 
            +
            #pipe = StableDiffusionXLInstantIDImg2ImgPipeline.from_pretrained("frankjoshua/albedobaseXL_v21",
         | 
| 136 | 
            +
            #                                                                 vae=vae,
         | 
| 137 | 
            +
            #                                                                 controlnet=[identitynet, zoedepthnet],
         | 
| 138 | 
            +
            #                                                                 torch_dtype=torch.float16)
         | 
| 139 |  | 
| 140 | 
            +
            pipe = StableDiffusionXLInstantIDImg2ImgPipeline.from_pretrained("SG161222/RealVisXL_V5.0", 
         | 
| 141 | 
            +
                                                                            vae=vae,
         | 
| 142 | 
            +
                                                                            controlnet=[identitynet, zoedepthnet],
         | 
| 143 | 
            +
                                                                            torch_dtype=torch.float16)
         | 
| 144 |  | 
| 145 | 
             
            pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config, use_karras_sigmas=True)
         | 
| 146 | 
             
            pipe.load_ip_adapter_instantid(face_adapter)
         | 
|  | |
| 374 | 
             
                    image=face_image,
         | 
| 375 | 
             
                    strength=1-image_strength,
         | 
| 376 | 
             
                    control_image=control_images,
         | 
| 377 | 
            +
                    num_inference_steps=36,
         | 
| 378 | 
             
                    guidance_scale=guidance_scale,
         | 
| 379 | 
             
                    controlnet_conditioning_scale=control_scales,
         | 
| 380 | 
             
                ).images[0]
         | 
 
			
