Add a barebones interrogate API
This commit is contained in:
parent
737eb28fac
commit
bdc9083798
4 changed files with 45 additions and 7 deletions
|
@ -198,7 +198,7 @@ def prepare_enviroment():
|
||||||
def start_webui():
|
def start_webui():
|
||||||
print(f"Launching Web UI with arguments: {' '.join(sys.argv[1:])}")
|
print(f"Launching Web UI with arguments: {' '.join(sys.argv[1:])}")
|
||||||
import webui
|
import webui
|
||||||
webui.webui()
|
webui.webui_or_api()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from modules.api.models import StableDiffusionTxt2ImgProcessingAPI, StableDiffusionImg2ImgProcessingAPI
|
from modules.api.models import StableDiffusionTxt2ImgProcessingAPI, StableDiffusionImg2ImgProcessingAPI, InterrogateAPI
|
||||||
from modules.processing import StableDiffusionProcessingTxt2Img, StableDiffusionProcessingImg2Img, process_images
|
from modules.processing import StableDiffusionProcessingTxt2Img, StableDiffusionProcessingImg2Img, process_images
|
||||||
from modules.sd_samplers import all_samplers
|
from modules.sd_samplers import all_samplers
|
||||||
from modules.extras import run_pnginfo
|
from modules.extras import run_pnginfo
|
||||||
|
@ -25,6 +25,11 @@ class ImageToImageResponse(BaseModel):
|
||||||
parameters: Json
|
parameters: Json
|
||||||
info: Json
|
info: Json
|
||||||
|
|
||||||
|
class InterrogateResponse(BaseModel):
|
||||||
|
caption: str = Field(default=None, title="Caption", description="The generated caption for the image.")
|
||||||
|
parameters: Json
|
||||||
|
info: Json
|
||||||
|
|
||||||
|
|
||||||
class Api:
|
class Api:
|
||||||
def __init__(self, app, queue_lock):
|
def __init__(self, app, queue_lock):
|
||||||
|
@ -33,6 +38,7 @@ class Api:
|
||||||
self.queue_lock = queue_lock
|
self.queue_lock = queue_lock
|
||||||
self.app.add_api_route("/sdapi/v1/txt2img", self.text2imgapi, methods=["POST"])
|
self.app.add_api_route("/sdapi/v1/txt2img", self.text2imgapi, methods=["POST"])
|
||||||
self.app.add_api_route("/sdapi/v1/img2img", self.img2imgapi, methods=["POST"])
|
self.app.add_api_route("/sdapi/v1/img2img", self.img2imgapi, methods=["POST"])
|
||||||
|
self.app.add_api_route("/sdapi/v1/interrogate", self.interrogateapi, methods=["POST"])
|
||||||
|
|
||||||
def __base64_to_image(self, base64_string):
|
def __base64_to_image(self, base64_string):
|
||||||
# if has a comma, deal with prefix
|
# if has a comma, deal with prefix
|
||||||
|
@ -118,6 +124,23 @@ class Api:
|
||||||
|
|
||||||
return ImageToImageResponse(images=b64images, parameters=json.dumps(vars(img2imgreq)), info=processed.js())
|
return ImageToImageResponse(images=b64images, parameters=json.dumps(vars(img2imgreq)), info=processed.js())
|
||||||
|
|
||||||
|
def interrogateapi(self, interrogatereq: InterrogateAPI):
|
||||||
|
image_b64 = interrogatereq.image
|
||||||
|
if image_b64 is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Image not found")
|
||||||
|
|
||||||
|
populate = interrogatereq.copy(update={ # Override __init__ params
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
img = self.__base64_to_image(image_b64)
|
||||||
|
|
||||||
|
# Override object param
|
||||||
|
with self.queue_lock:
|
||||||
|
processed = shared.interrogator.interrogate(img)
|
||||||
|
|
||||||
|
return InterrogateResponse(caption=processed, parameters=json.dumps(vars(interrogatereq)), info=None)
|
||||||
|
|
||||||
def extrasapi(self):
|
def extrasapi(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,12 @@ class PydanticModelGenerator:
|
||||||
|
|
||||||
|
|
||||||
self._model_name = model_name
|
self._model_name = model_name
|
||||||
self._class_data = merge_class_params(class_instance)
|
|
||||||
|
if class_instance is not None:
|
||||||
|
self._class_data = merge_class_params(class_instance)
|
||||||
|
else:
|
||||||
|
self._class_data = {}
|
||||||
|
|
||||||
self._model_def = [
|
self._model_def = [
|
||||||
ModelDef(
|
ModelDef(
|
||||||
field=underscore(k),
|
field=underscore(k),
|
||||||
|
@ -106,3 +111,9 @@ StableDiffusionImg2ImgProcessingAPI = PydanticModelGenerator(
|
||||||
StableDiffusionProcessingImg2Img,
|
StableDiffusionProcessingImg2Img,
|
||||||
[{"key": "sampler_index", "type": str, "default": "Euler"}, {"key": "init_images", "type": list, "default": None}, {"key": "denoising_strength", "type": float, "default": 0.75}, {"key": "mask", "type": str, "default": None}, {"key": "include_init_images", "type": bool, "default": False, "exclude" : True}]
|
[{"key": "sampler_index", "type": str, "default": "Euler"}, {"key": "init_images", "type": list, "default": None}, {"key": "denoising_strength", "type": float, "default": 0.75}, {"key": "mask", "type": str, "default": None}, {"key": "include_init_images", "type": bool, "default": False, "exclude" : True}]
|
||||||
).generate_model()
|
).generate_model()
|
||||||
|
|
||||||
|
InterrogateAPI = PydanticModelGenerator(
|
||||||
|
"Interrogate",
|
||||||
|
None,
|
||||||
|
[{"key": "image", "type": str, "default": None}]
|
||||||
|
).generate_model()
|
12
webui.py
12
webui.py
|
@ -146,7 +146,9 @@ def webui():
|
||||||
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||||
|
|
||||||
if (launch_api):
|
if (launch_api):
|
||||||
create_api(app)
|
print('launching API')
|
||||||
|
api = create_api(app)
|
||||||
|
api.launch(server_name="0.0.0.0" if cmd_opts.listen else "127.0.0.1", port=cmd_opts.port if cmd_opts.port else 7861)
|
||||||
|
|
||||||
wait_on_server(demo)
|
wait_on_server(demo)
|
||||||
|
|
||||||
|
@ -161,10 +163,12 @@ def webui():
|
||||||
print('Restarting Gradio')
|
print('Restarting Gradio')
|
||||||
|
|
||||||
|
|
||||||
|
def webui_or_api():
|
||||||
task = []
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if cmd_opts.nowebui:
|
if cmd_opts.nowebui:
|
||||||
api_only()
|
api_only()
|
||||||
else:
|
else:
|
||||||
webui()
|
webui()
|
||||||
|
|
||||||
|
task = []
|
||||||
|
if __name__ == "__main__":
|
||||||
|
webui_or_api()
|
Loading…
Reference in a new issue