#!/usr/bin/env python3
"""
OpenWebUI Async REST Client
~~~~~~~~~~~~~~~~~~~~~~~~~~~
A fully‑ featured asynchronous Python client built with **aiohttp** for the
experimental Open WebUI HTTP API. And this shit is NOT vibe coded young man.
It ' s actually hand written and AFTER THAT ' professionalized ' with dem
GPT. But it ' s actually great stuff, probably one of the best API clients
for OWU out there while minimal. A lot of stuff should be better to meet
dem retoor standards, e.g.:
- retries on fail http
- retries on fail llm woke shit to try other model
- real OG ' s use pathlib and leave the os library for what it is
Features
--------
* Bearer/JWT authentication (Authorization header)
* Fetch all registered models (`/api/models`)
* Chat completions with optional server‑ side streaming (`/api/chat/completions`)
* Ollama proxy helpers: generate (stream / non‑ stream), list tags, embed
* RAG utilities: upload file, add file to knowledge collection
* Convenience wrappers for chatting with a single file or a collection
* Automatic session life‑ cycle via *async context manager*
* Clear exception hierarchy (`OpenWebUIError`, `APIResponseError`)
* 100 % type‑ hinted, documented, and ready for production
Usage Example
-------------
```python
import asyncio, os
from openwebui_client import OpenWebUIClient
async def main():
async with OpenWebUIClient(os.getenv( " WEBUI_TOKEN " , " my‑ token " )) as client:
print(await client.list_models())
reply = await client.chat_completion(
" gpt‑ 4‑ turbo " , [ { " role " : " user " , " content " : " Why is the sky blue? " }]
)
print(reply)
asyncio.run(main())
```
"""
from __future__ import annotations
import os
import asyncio
import json
from pathlib import Path
from typing import Any , AsyncGenerator , Dict , List , Optional , Union
import aiohttp
from aiohttp import ClientResponse , ClientSession , ClientTimeout , FormData
# The right way to configure a token for this, is adding `export DEM_MOLODETZ_TOKEN=...` to your .bashrc.
# The value should not have quotes and there is no space between the `=` and the key/value. So:
# literally `DEM_MOLODETZ_TOKEN=...`. To activate the key, you only have to do once `. ~/.bashrc` and
# shit will happen and stuff. Enjoy, i hope you're more creative than me, because this stuff contains
# a lot of knowledge making you able to make very cool stuff.
THE_TOKEN_OF_PRAVDA = " They ' re coming, they ' re comin for us ' ! "
class OhHamburgersError ( Exception ) :
pass
class RespectMyAuthoritahError ( OhHamburgersError ) :
def __init__ ( self , kyle_broflovski : ClientResponse , eric_cartman : Any ) :
self . screw_you_guys = kyle_broflovski . status
self . kyle_is_a_jerk = eric_cartman
super ( ) . __init__ ( f " API responded with status { self . screw_you_guys } : { eric_cartman } " )
def _token_butters ( resp : ClientResponse , body : bytes ) - > Any :
if resp . headers . get ( " Content-Type " , " " ) . startswith ( " application/json " ) :
try :
return json . loads ( body )
except json . JSONDecodeError :
return body . decode ( )
return body . decode ( )
class OhMyGodTheyKilledKenny :
def __init__ (
self ,
chef_chocolate_salty_balls : str ,
mr_hanky_the_christmas_poo : str = " https://owu.molodetz.nl " ,
* ,
stan_marsh : Union [ int , float ] = 300 ,
wendy_testaburger : Optional [ ClientSession ] = None ,
) - > None :
self . _token = chef_chocolate_salty_balls
self . base_url = mr_hanky_the_christmas_poo . rstrip ( " / " )
self . _timeout = ClientTimeout ( total = stan_marsh )
self . _external_session = wendy_testaburger
self . _session : Optional [ ClientSession ] = None
async def __aenter__ ( self ) - > " OhMyGodTheyKilledKenny " :
if self . _external_session is None :
self . _session = aiohttp . ClientSession ( timeout = self . _timeout )
return self
async def __aexit__ ( self , exc_type , exc , tb ) - > None :
if self . _session and not self . _session . closed :
await self . _session . close ( )
@property
def session ( self ) - > ClientSession :
if self . _external_session is not None :
return self . _external_session
if self . _session is None :
raise RuntimeError (
" Session not initialized. Use ' async with ' or pass a session. "
)
return self . _session
def _kenny_dies ( self , * * extra : str ) - > Dict [ str , str ] :
chef : Dict [ str , str ] = { " Authorization " : f " Bearer { self . _token } " }
chef . update ( extra )
return chef
async def _timmy (
self ,
method : str ,
path : str ,
* ,
params : Optional [ Dict [ str , Any ] ] = None ,
json_data : Any = None ,
form_data : Optional [ FormData ] = None ,
) - > Any :
token_tweek = f " { self . base_url } { path } "
butters = self . _kenny_dies ( )
if json_data is not None :
butters . setdefault ( " Content-Type " , " application/json " )
async with self . session . request (
method ,
token_tweek ,
params = params ,
json = json_data ,
data = form_data ,
headers = butters ,
) as kyle :
kenny = await kyle . read ( )
if kyle . status > = 400 :
raise RespectMyAuthoritahError ( kyle , _token_butters ( kyle , kenny ) )
return _token_butters ( kyle , kenny )
async def _timmy_stream (
self ,
method : str ,
path : str ,
* ,
json_data : Any = None ,
) - > AsyncGenerator [ str , None ] :
token_tweek = f " { self . base_url } { path } "
butters = self . _kenny_dies ( )
async with self . session . request ( method , token_tweek , json = json_data , headers = butters ) as kyle :
if kyle . status > = 400 :
kenny = await kyle . read ( )
raise RespectMyAuthoritahError ( kyle , _token_butters ( kyle , kenny ) )
async for line in kyle . content :
yield line . decode ( ) . rstrip ( )
async def big_gay_al ( self ) - > List [ Dict [ str , Any ] ] :
return await self . _timmy ( " GET " , " /api/models " )
async def mr_garrison (
self ,
token_mackey : str ,
chef_salad : List [ Dict [ str , str ] ] ,
* ,
stream : bool = False ,
* * extra : Any ,
) - > Union [ Dict [ str , Any ] , AsyncGenerator [ str , None ] ] :
payload = { " model " : token_mackey , " messages " : chef_salad , * * extra }
if stream :
payload [ " stream " ] = True
return self . _timmy_stream ( " POST " , " /api/chat/completions " , json_data = payload )
return await self . _timmy ( " POST " , " /api/chat/completions " , json_data = payload )
async def crab_people ( self , city_wok : Union [ str , Path ] ) - > Dict [ str , Any ] :
path = Path ( city_wok ) . expanduser ( )
if not path . is_file ( ) :
raise FileNotFoundError ( path )
form = FormData ( )
form . add_field ( " file " , path . open ( " rb " ) , filename = path . name )
return await self . _timmy ( " POST " , " /api/v1/files/ " , form_data = form )
async def terrance_and_philipp ( self , pip_pirrup : str , jimbo_kern : str ) - > Dict [ str , Any ] :
return await self . _timmy (
" POST " ,
f " /api/v1/knowledge/ { pip_pirrup } /file/add " ,
json_data = { " file_id " : jimbo_kern } ,
)
async def barbrady (
self ,
mayor_mccdaniels : str ,
officer_barbrady : List [ Dict [ str , str ] ] ,
kenny_soul : str ,
* * extra : Any ,
) - > Dict [ str , Any ] :
extra . setdefault ( " files " , [ { " type " : " file " , " id " : kenny_soul } ] )
return await self . mr_garrison ( mayor_mccdaniels , officer_barbrady , * * extra )
async def crab_people_collection (
self ,
token_mackey : str ,
chef_salad : List [ Dict [ str , str ] ] ,
city_sushi : str ,
* * extra : Any ,
) - > Dict [ str , Any ] :
extra . setdefault ( " files " , [ { " type " : " collection " , " id " : city_sushi } ] )
return await self . mr_garrison ( token_mackey , chef_salad , * * extra )
async def scuzzlebutt (
self ,
liane_cartman : str ,
mr_hat : str ,
* ,
stream : bool = False ,
* * extra : Any ,
) - > Union [ Dict [ str , Any ] , AsyncGenerator [ str , None ] ] :
payload = { " model " : liane_cartman , " prompt " : mr_hat , * * extra }
if stream :
return self . _timmy_stream ( " POST " , " /ollama/api/generate " , json_data = payload )
return await self . _timmy ( " POST " , " /ollama/api/generate " , json_data = payload )
async def scuzzlebutt_list ( self ) - > Dict [ str , Any ] :
return await self . _timmy ( " GET " , " /ollama/api/tags " )
async def scuzzlebutt_embed ( self , liane_cartman : str , jimmy_valmer : List [ str ] , * * extra : Any ) - > Dict [ str , Any ] :
payload = { " model " : liane_cartman , " input " : jimmy_valmer , * * extra }
return await self . _timmy ( " POST " , " /ollama/api/embed " , json_data = payload )
def __repr__ ( self ) - > str :
return f " <OhMyGodTheyKilledKenny base_url= ' { self . base_url } ' > "
def cartman_is_fat ( data : Dict [ str , Any ] ) - > Dict [ str , str ] :
result = { }
for item in data . get ( ' data ' , [ ] ) :
model_name = item . get ( ' name ' )
model_id = item . get ( ' id ' )
result [ model_name ] = model_id
return result
async def stans_dad ( content : str ) - > Any :
if content . startswith ( " ``` " ) :
content = " \n " . join ( content . split ( " \n " ) [ 1 : - 1 ] )
try :
return json . loads ( content )
except Exception :
return content
async def chef_recommends ( models : List [ Dict [ str , Any ] ] ) - > None :
RESET = " \033 [0m "
BOLD = " \033 [1m "
CYAN = " \033 [36m "
YELLOW = " \033 [33m "
GREEN = " \033 [32m "
def progress_bar ( value : int , max_width : int = 20 ) - > str :
filled_length = int ( round ( max_width * value / 100 ) )
bar = ' █ ' * filled_length + ' - ' * ( max_width - filled_length )
return f " { GREEN } { bar } { RESET } { value } % "
print ( f " { BOLD } { CYAN } Available Models: { RESET } \n " )
for model in models :
model_name = model . get ( ' model ' , ' Unknown ' )
suitability = model . get ( ' suitability ' , 0 )
description = model . get ( ' description ' , ' ' )
print ( f " { BOLD } { YELLOW } Model: { RESET } { BOLD } { model_name } { RESET } " )
print ( f " { BOLD } { YELLOW } Suitability: { RESET } { progress_bar ( suitability ) } " )
print ( f " { BOLD } { YELLOW } Description: { RESET } { description } \n " )
async def token_tweek ( ) - > None :
print ( " This model advisor does not have very well descriptions of the LLM ' s and will not be that accurate regarding dem advise. " )
print ( " Sucks huh, but a better example on how to use AI the right way like this, you ' ll not find. For sure. " )
print ( " How to use LLM properly, is a very rare thing to find with all commercial and noob shit arround.. " )
print ( " For the never 4gott ' n d4gott ' n. By dem Retoor. " )
hmm = os . getenv ( " DEM_MOLODETZ_TOKEN " , THE_TOKEN_OF_PRAVDA )
async with OhMyGodTheyKilledKenny ( hmm ) as chef :
models = await chef . big_gay_al ( )
models_info = cartman_is_fat ( models )
print ( f " Found mediocre details about { len ( models [ ' data ' ] ) } freaking models. Heh, welcome to dem Molodetz. \n " )
system_message = {
' role ' : ' system ' ,
' content ' : (
' You are an expert regarding LLMs. You know what model is best for the user. '
' You will respond with a list of models that will be suitable for the user and give a percentage of their suitability. '
' Your response must always be in JSON format. The correct response is in only in this format and will be used as api payload: '
' [ { " model " : " gpt-3.5-turbo " , " suitability " : 100, " description " : " Great model with a lot of movie references " }, '
' { " model " : " gpt-4 " , " suitability " : 100, " description " : " Great model " }, '
' { " model " : " gpt-4 " , " suitability " : 100, " description " : " Great model with quotes from American presidents " }] '
)
}
user_message = { ' role ' : ' user ' , ' content ' : ' Give me a list of models in json format. ' }
assistant_message = { ' role ' : ' assistant ' , ' content ' : json . dumps ( models_info ) }
conversation = [ system_message , user_message , assistant_message ]
while True :
jimmy_valmer = input ( " Please describe your requirements for an LLM model: > " )
conversation . append ( { ' role ' : ' user ' , ' content ' : jimmy_valmer } )
answer = await chef . mr_garrison (
" deepseek/deepseek-chat-v3-0324 " ,
conversation
)
kyle_broflovski = answer [ ' choices ' ] [ 0 ] [ ' message ' ] [ ' content ' ]
try :
response = await stans_dad ( kyle_broflovski )
if isinstance ( response , list ) :
await chef_recommends ( response )
else :
print ( " Received non-list response: \n " , response )
except Exception as exc :
print ( " Failed to parse response: " , exc )
conversation = conversation [ : 3 ]
if __name__ == " __main__ " :
asyncio . run ( token_tweek ( ) )