1
0
Fork 0

feat(api): add params endpoint, defaults file

This commit is contained in:
Sean Sube 2023-01-09 22:58:37 -06:00
parent 6b3c0fea45
commit 03fd728ab0
2 changed files with 90 additions and 13 deletions

View File

@ -23,7 +23,9 @@ from io import BytesIO
from PIL import Image from PIL import Image
from struct import pack from struct import pack
from os import environ, makedirs, path, scandir from os import environ, makedirs, path, scandir
from typing import Tuple, Union from typing import Any, Tuple, Union
import json
import numpy as np import numpy as np
# defaults # defaults
@ -45,10 +47,12 @@ max_width = 512
# paths # paths
model_path = environ.get('ONNX_WEB_MODEL_PATH', '../models') model_path = environ.get('ONNX_WEB_MODEL_PATH', '../models')
output_path = environ.get('ONNX_WEB_OUTPUT_PATH', '../outputs') output_path = environ.get('ONNX_WEB_OUTPUT_PATH', '../outputs')
params_path = environ.get('ONNX_WEB_PARAMS_PATH', './params.json')
# pipeline caching # pipeline caching
available_models = [] available_models = []
config_params = {}
last_pipeline_instance = None last_pipeline_instance = None
last_pipeline_options = (None, None, None) last_pipeline_options = (None, None, None)
last_pipeline_scheduler = None last_pipeline_scheduler = None
@ -189,8 +193,15 @@ def load_models():
available_models = [f.name for f in scandir(model_path) if f.is_dir()] available_models = [f.name for f in scandir(model_path) if f.is_dir()]
def load_params():
global config_params
with open(params_path) as f:
config_params = json.load(f)
check_paths() check_paths()
load_models() load_models()
load_params()
app = Flask(__name__) app = Flask(__name__)
# routes # routes
@ -212,6 +223,11 @@ def list_models():
return json_with_cors(available_models) return json_with_cors(available_models)
@app.route('/settings/params')
def list_params():
return json_with_cors(config_params)
@app.route('/settings/platforms') @app.route('/settings/platforms')
def list_platforms(): def list_platforms():
return json_with_cors(list(platform_providers.keys())) return json_with_cors(list(platform_providers.keys()))
@ -274,7 +290,8 @@ def img2img():
strength=strength, strength=strength,
).images[0] ).images[0]
(output_file, output_full) = make_output_path('img2img', (prompt, cfg, steps, height, width, seed)) (output_file, output_full) = make_output_path('img2img', seed,
(prompt, cfg, negative_prompt, steps, strength, height, width))
print("img2img output: %s" % output_full) print("img2img output: %s" % output_full)
image.save(output_full) image.save(output_full)
@ -284,13 +301,13 @@ def img2img():
'model': model, 'model': model,
'provider': provider, 'provider': provider,
'scheduler': scheduler.__name__, 'scheduler': scheduler.__name__,
'seed': seed,
'prompt': prompt,
'cfg': cfg, 'cfg': cfg,
'negativePrompt': negative_prompt,
'steps': steps, 'steps': steps,
'height': default_height, 'height': default_height,
'width': default_width, 'width': default_width,
'prompt': prompt,
'seed': seed,
'negativePrompt': negative_prompt,
} }
}) })
@ -314,7 +331,8 @@ def txt2img():
num_inference_steps=steps, num_inference_steps=steps,
).images[0] ).images[0]
(output_file, output_full) = make_output_path('txt2img', (prompt, cfg, steps, height, width, seed)) (output_file, output_full) = make_output_path('txt2img',
seed, (prompt, cfg, negative_prompt, steps, height, width))
print("txt2img output: %s" % output_full) print("txt2img output: %s" % output_full)
image.save(output_full) image.save(output_full)
@ -324,13 +342,13 @@ def txt2img():
'model': model, 'model': model,
'provider': provider, 'provider': provider,
'scheduler': scheduler.__name__, 'scheduler': scheduler.__name__,
'seed': seed,
'prompt': prompt,
'cfg': cfg, 'cfg': cfg,
'negativePrompt': negative_prompt,
'steps': steps, 'steps': steps,
'height': height, 'height': height,
'width': width, 'width': width,
'prompt': prompt,
'seed': seed,
'negativePrompt': negative_prompt,
} }
}) })
@ -364,7 +382,8 @@ def inpaint():
width=width, width=width,
).images[0] ).images[0]
(output_file, output_full) = make_output_path('inpaint', (prompt, cfg, steps, height, width, seed)) (output_file, output_full) = make_output_path(
'inpaint', (prompt, cfg, steps, height, width, seed))
print("inpaint output: %s" % output_full) print("inpaint output: %s" % output_full)
image.save(output_full) image.save(output_full)
@ -374,13 +393,13 @@ def inpaint():
'model': model, 'model': model,
'provider': provider, 'provider': provider,
'scheduler': scheduler.__name__, 'scheduler': scheduler.__name__,
'seed': seed,
'prompt': prompt,
'cfg': cfg, 'cfg': cfg,
'negativePrompt': negative_prompt,
'steps': steps, 'steps': steps,
'height': default_height, 'height': default_height,
'width': default_width, 'width': default_width,
'prompt': prompt,
'seed': seed,
'negativePrompt': negative_prompt,
} }
}) })

58
api/params.json Normal file
View File

@ -0,0 +1,58 @@
{
"cfg": {
"default": 6,
"min": 1,
"max": 30,
"step": 0.1
},
"height": {
"default": 512,
"min": 64,
"max": 512,
"step": 8
},
"model": {
"default": "stable-diffusion-onnx-v1-5",
"keys": []
},
"negativePrompt": {
"default": "",
"keys": []
},
"platform": {
"default": "amd",
"keys": []
},
"prompt": {
"default": "an astronaut eating a hamburger",
"keys": []
},
"scheduler": {
"default": "euler-a",
"keys": []
},
"steps": {
"default": 25,
"min": 1,
"max": 200,
"step": 1
},
"seed": {
"default": -1,
"min": -1,
"max": 4294967295,
"step": 1
},
"strength": {
"default": 0.5,
"min": 0,
"max": 1,
"step": 0.01
},
"width": {
"default": 512,
"min": 64,
"max": 512,
"step": 8
}
}