mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-18 20:27:57 +00:00
fix(diffusers): correctly parse height and width request without parametrization (#4082)
* fix(diffusers): allow to specify width and height without enable-parameters Let's simplify usage by not gating width and height by parameters Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * chore: use sane defaults Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
b59e16742e
commit
b425a870b0
@ -409,8 +409,6 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
|||||||
# create a dictionary of values for the parameters
|
# create a dictionary of values for the parameters
|
||||||
options = {
|
options = {
|
||||||
"negative_prompt": request.negative_prompt,
|
"negative_prompt": request.negative_prompt,
|
||||||
"width": request.width,
|
|
||||||
"height": request.height,
|
|
||||||
"num_inference_steps": steps,
|
"num_inference_steps": steps,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -428,13 +426,13 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
|||||||
keys = options.keys()
|
keys = options.keys()
|
||||||
|
|
||||||
if request.EnableParameters != "":
|
if request.EnableParameters != "":
|
||||||
keys = request.EnableParameters.split(",")
|
keys = [key.strip() for key in request.EnableParameters.split(",")]
|
||||||
|
|
||||||
if request.EnableParameters == "none":
|
if request.EnableParameters == "none":
|
||||||
keys = []
|
keys = []
|
||||||
|
|
||||||
# create a dictionary of parameters by using the keys from EnableParameters and the values from defaults
|
# create a dictionary of parameters by using the keys from EnableParameters and the values from defaults
|
||||||
kwargs = {key: options[key] for key in keys}
|
kwargs = {key: options.get(key) for key in keys if key in options}
|
||||||
|
|
||||||
# Set seed
|
# Set seed
|
||||||
if request.seed > 0:
|
if request.seed > 0:
|
||||||
@ -445,6 +443,12 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
|||||||
if self.PipelineType == "FluxPipeline":
|
if self.PipelineType == "FluxPipeline":
|
||||||
kwargs["max_sequence_length"] = 256
|
kwargs["max_sequence_length"] = 256
|
||||||
|
|
||||||
|
if request.width:
|
||||||
|
kwargs["width"] = request.width
|
||||||
|
|
||||||
|
if request.height:
|
||||||
|
kwargs["height"] = request.height
|
||||||
|
|
||||||
if self.PipelineType == "FluxTransformer2DModel":
|
if self.PipelineType == "FluxTransformer2DModel":
|
||||||
kwargs["output_type"] = "pil"
|
kwargs["output_type"] = "pil"
|
||||||
kwargs["generator"] = torch.Generator("cpu").manual_seed(0)
|
kwargs["generator"] = torch.Generator("cpu").manual_seed(0)
|
||||||
@ -464,6 +468,7 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
|||||||
export_to_video(video_frames, request.dst)
|
export_to_video(video_frames, request.dst)
|
||||||
return backend_pb2.Result(message="Media generated successfully", success=True)
|
return backend_pb2.Result(message="Media generated successfully", success=True)
|
||||||
|
|
||||||
|
print(f"Generating image with {kwargs=}", file=sys.stderr)
|
||||||
image = {}
|
image = {}
|
||||||
if COMPEL:
|
if COMPEL:
|
||||||
conditioning, pooled = self.compel.build_conditioning_tensor(prompt)
|
conditioning, pooled = self.compel.build_conditioning_tensor(prompt)
|
||||||
|
@ -136,6 +136,11 @@ func ImageEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appCon
|
|||||||
config.Backend = model.StableDiffusionBackend
|
config.Backend = model.StableDiffusionBackend
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !strings.Contains(input.Size, "x") {
|
||||||
|
input.Size = "512x512"
|
||||||
|
log.Warn().Msgf("Invalid size, using default 512x512")
|
||||||
|
}
|
||||||
|
|
||||||
sizeParts := strings.Split(input.Size, "x")
|
sizeParts := strings.Split(input.Size, "x")
|
||||||
if len(sizeParts) != 2 {
|
if len(sizeParts) != 2 {
|
||||||
return fmt.Errorf("invalid value for 'size'")
|
return fmt.Errorf("invalid value for 'size'")
|
||||||
|
Loading…
Reference in New Issue
Block a user