diff --git a/README.md b/README.md
index 4029655..7999869 100644
--- a/README.md
+++ b/README.md
@@ -1,26 +1,24 @@
-# R
+# R Vibe Tool
 
-This is a CLI vibe coding tool written in C and is available for Linux x86 only.
-It works on OpenAI but is easy to convert to Ollama since Ollama supports OpenAI api on /v1/api. 
-The tool is made for my private use, but since it works so well, I decided to publish it. It contains some experimental stuff like Claude. Claude works.
+R is a CLI vibe tool for Linux with nice markdown output. It has agent support. Put your instructions into `~/.rcontext.txt`to describe how it should behave. Tip: include the saving of files as instruction step. Use the `index` tool to initialize only source files and no environment / node_modules files to continue on an existing project! Good to instruct that first of your `.rcontext.txt` file. 
 
-If you need some help to get Ollama / Claude working, contact me on [Snek](https://snek.molodetz.nl).
+By default it uses OpenAI GPT-3.5-turbo as the model. It will use my limited OpenAI key as example.
+That will thus only work temporary! Good news, Ollama and Claude are also supported
 
-What I vibed with it:
- - A part of the tool itself. It had many example code so it was easy to add modifications.
- - A web socket notification system in C#.
+An example what I have vibed with is [this](https://molodetz.nl/projects/streamii/README.md.html).
 
-The application has a built-in OPENAI_API_KEY with limited resources, so people can try.
-
-## Download
-```
-wget https://retoor.molodetz.nl/api/packages/retoor/generic/r/1.0.0/r
+## How to get it working under OLLAMA
+### Ollama molodetz server is slow.
+```bash
+export R_MODEL="qwen2.5:3b"
+export R_BASE_URL="https://ollama.molodetz.nl"
+./r
 ```
 
-## Configure OpenAI API key
-Update your bashrc with `export OPENAI_API_KEY=sk-...`.
-
-## Working on an existing project.
-When starting on existing project, use `init`. Now you can ask it to make modifications to your files / system.
-
-
+## How to get it working under ANTHROPIC CLAUDE
+```
+export R_MODEL="claude-3-5-haiku-20241022"
+export R_BASE_URL="https://api.anthropic.com"
+export R_KEY="sk-ant-"
+./r
+```
diff --git a/http_curl.h b/http_curl.h
index eb9c5c3..7d0b1ae 100644
--- a/http_curl.h
+++ b/http_curl.h
@@ -76,6 +76,8 @@ char *curl_post(const char *url, const char *data) {
     curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *)&response);
     res = curl_easy_perform(curl);
     if (res != CURLE_OK) {
+      fprintf(stderr, "Url: %s\n",data);
+      fprintf(stderr, "Data: %s\n",data);
       fprintf(stderr, "An error occurred: %s\n", curl_easy_strerror(res));
     }
     curl_slist_free_all(headers);
@@ -114,4 +116,4 @@ char *curl_get(const char *url) {
   return response.data;
 }
 
-#endif
\ No newline at end of file
+#endif
diff --git a/openai.h b/openai.h
index 112f9f9..257998f 100644
--- a/openai.h
+++ b/openai.h
@@ -46,7 +46,7 @@ bool openai_system(char *message_content) {
 
 struct json_object *openai_process_chat_message(const char *api_url,
                                                 const char *json_data) {
-  char *response = curl_post(api_url, json_data);
+    char *response = curl_post(api_url, json_data);
   if (!response) {
     fprintf(stderr, "Failed to get response.\n");
     return NULL;
@@ -80,6 +80,7 @@ struct json_object *openai_process_chat_message(const char *api_url,
   struct json_object *choices_array;
   if (!json_object_object_get_ex(parsed_json, "choices", &choices_array)) {
     fprintf(stderr, "Failed to get 'choices' array.\n%s\n", response);
+    fprintf(stderr, "%s\n", json_object_to_json_string(parsed_json));
     json_object_put(parsed_json);
     return NULL;
   }
diff --git a/rpylib.so b/rpylib.so
index ae413e3..56cd075 100755
Binary files a/rpylib.so and b/rpylib.so differ