1
0
Fork 0

apply lint

This commit is contained in:
Sean Sube 2023-11-12 16:38:56 -06:00
parent d3784158fa
commit 1ee6761340
Signed by: ssube
GPG Key ID: 3EED7B957D362AF1
6 changed files with 42 additions and 16 deletions

View File

@ -115,7 +115,12 @@ def make_tile_mask(
# build gradients
edge_t, edge_l, edge_b, edge_r = edges
grad_x, grad_y = [int(not edge_l), 1, 1, int(not edge_r)], [int(not edge_t), 1, 1, int(not edge_b)]
grad_x, grad_y = [int(not edge_l), 1, 1, int(not edge_r)], [
int(not edge_t),
1,
1,
int(not edge_b),
]
logger.debug("tile gradients: %s, %s, %s, %s", points_w, points_h, grad_x, grad_y)
mult_x = [np.interp(i, points_w, grad_x) for i in range(tile_w)]

View File

@ -660,8 +660,10 @@ class OnnxStableDiffusionPanoramaPipeline(DiffusionPipeline):
region_noise_pred_uncond, region_noise_pred_text = np.split(
region_noise_pred, 2
)
region_noise_pred = region_noise_pred_uncond + guidance_scale * (
region_noise_pred_text - region_noise_pred_uncond
region_noise_pred = (
region_noise_pred_uncond
+ guidance_scale
* (region_noise_pred_text - region_noise_pred_uncond)
)
# compute the previous noisy sample x_t -> x_t-1

View File

@ -502,8 +502,10 @@ class StableDiffusionXLPanoramaPipelineMixin(StableDiffusionXLImg2ImgPipelineMix
region_noise_pred_uncond, region_noise_pred_text = np.split(
region_noise_pred, 2
)
region_noise_pred = region_noise_pred_uncond + guidance_scale * (
region_noise_pred_text - region_noise_pred_uncond
region_noise_pred = (
region_noise_pred_uncond
+ guidance_scale
* (region_noise_pred_text - region_noise_pred_uncond)
)
if guidance_rescale > 0.0:
# Based on 3.4. in https://arxiv.org/pdf/2305.08891.pdf

View File

@ -459,7 +459,9 @@ def slice_prompt(prompt: str, slice: int) -> str:
return prompt
Region = Tuple[int, int, int, int, float, Tuple[float, Tuple[bool, bool, bool, bool]], str]
Region = Tuple[
int, int, int, int, float, Tuple[float, Tuple[bool, bool, bool, bool]], str
]
def parse_region_group(group: Tuple[str, ...]) -> Region:
@ -478,12 +480,15 @@ def parse_region_group(group: Tuple[str, ...]) -> Region:
int(bottom),
int(right),
float(weight),
(float(feather_radius), (
"T" in feather_edges,
"L" in feather_edges,
"B" in feather_edges,
"R" in feather_edges,
)),
(
float(feather_radius),
(
"T" in feather_edges,
"L" in feather_edges,
"B" in feather_edges,
"R" in feather_edges,
),
),
prompt,
)

View File

@ -8,7 +8,7 @@ class NetworkModel:
tokens: List[str]
type: NetworkType
def __init__(self, name: str, type: NetworkType, tokens = None) -> None:
def __init__(self, name: str, type: NetworkType, tokens=None) -> None:
self.name = name
self.tokens = tokens or []
self.type = type

View File

@ -213,7 +213,11 @@ def load_extras(server: ServerContext):
labels[model_name] = model["label"]
if "tokens" in model:
logger.debug("collecting tokens for model %s from %s", model_name, file)
logger.debug(
"collecting tokens for model %s from %s",
model_name,
file,
)
extra_tokens[model_name] = model["tokens"]
if "inversions" in model:
@ -359,7 +363,10 @@ def load_models(server: ServerContext) -> None:
)
logger.debug("loaded Textual Inversion models from disk: %s", inversion_models)
network_models.extend(
[NetworkModel(model, "inversion", tokens=extra_tokens.get(model, [])) for model in inversion_models]
[
NetworkModel(model, "inversion", tokens=extra_tokens.get(model, []))
for model in inversion_models
]
)
lora_models = list_model_globs(
@ -370,7 +377,12 @@ def load_models(server: ServerContext) -> None:
base_path=path.join(server.model_path, "lora"),
)
logger.debug("loaded LoRA models from disk: %s", lora_models)
network_models.extend([NetworkModel(model, "lora", tokens=extra_tokens.get(model, [])) for model in lora_models])
network_models.extend(
[
NetworkModel(model, "lora", tokens=extra_tokens.get(model, []))
for model in lora_models
]
)
def load_params(server: ServerContext) -> None: