{ "extension": ".c", "source": "// Written by retoor@molodetz.nl\n\n// This source code initializes a command-line application that uses OpenAI for chat interactions, handles user inputs, and can start a simple HTTP server with CGI support. The code allows command execution, markdown parsing, and OpenAI chat integration.\n\n// External imports used in this code:\n// - openai.h\n// - markdown.h\n// - plugin.h\n// - line.h\n\n// MIT License\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n#include \"openai.h\"\n#include \"markdown.h\"\n#include \"plugin.h\"\n#include \"line.h\"\n#include \n#include \n#include \n#include \n#include \n\nchar *get_prompt_from_args(int c, char **argv) {\n char *prompt = malloc(1024 * 1024 + 1);\n prompt[0] = 0;\n for (int i = 1; i < c; i++) {\n if (argv[i][0] == '-')\n break;\n strncat(prompt, argv[i], 1024 * 1024);\n if (i < c - 1) {\n strncat(prompt, \" \", 1024 * 1024);\n } else {\n strncat(prompt, \".\", 1024 * 1024);\n }\n }\n if (!*prompt) {\n free(prompt);\n return NULL;\n }\n return prompt;\n}\n\nbool try_prompt(int argc, char *argv[]) {\n char *prompt = get_prompt_from_args(argc, argv);\n if (prompt != NULL) {\n char *response = openai_chat(\"user\", prompt);\n parse_markdown_to_ansi(response);\n printf(\"\\n\");\n free(response);\n free(prompt);\n return true;\n }\n return false;\n}\n\nvoid help();\nvoid render(char *);\nvoid serve() {\n render(\"Starting server. *Put executables in a dir named cgi-bin and they will behave as web pages.*\");\n int res = system(\"python3 -m http.server --cgi\");\n (void)res;\n}\n\nvoid render(char *content) {\n parse_markdown_to_ansi(content);\n printf(\"\\n\\n\");\n}\n\nvoid repl() {\n line_init();\n setbuf(stdout, NULL);\n char *line;\n char *previous_line = NULL;\n while ((line = line_read(\"> \"))) {\n if (!line || !*line) {\n line = previous_line;\n }\n if (!line || !*line)\n continue;\n previous_line = line;\n if (line[0] == '!') {\n plugin_run(line + 1);\n continue;\n }\n if (!strncmp(line, \"exit\", 4)) {\n exit(0);\n }\n if (!strncmp(line, \"help\", 4)) {\n help();\n continue;\n }\n if (!strncmp(line, \"serve\", 5)) {\n serve();\n }\n if (!strncmp(line, \"spar \", 5)) {\n char *response = line + 5;\n while (true) {\n render(response);\n sleep(2);\n response = openai_chat(\"user\", response);\n }\n }\n if (!strncmp(line, \"ls\", 2) || !strncmp(line, \"list\", 4)) {\n int offset = 2;\n if (!strncmp(line, \"list\", 4)) {\n offset = 4;\n }\n char *command = (char *)malloc(strlen(line) + 42);\n command[0] = 0;\n strcpy(command, \"ls \");\n strcat(command, line + offset);\n int res = system(command);\n (void)res;\n free(command);\n continue;\n }\n\n line_add_history(line);\n char *response = openai_chat(\"user\", line);\n render(response);\n free(response);\n }\n}\n\nvoid help() {\n char help_text[1024 * 1024] = {0};\n char *template = \"# Help\\n\"\n \"Written by retoor@molodetz.nl.\\n\\n\"\n \"## Features\\n\"\n \" - navigate through history using `arrows`.\\n\"\n \" - navigate through history with **recursive search** using `ctrl+r`.\\n\"\n \" - **inception with python** for *incoming* and *outgoing* content.\\n\"\n \" - markdown and **syntax highlighting**.\\n\"\n \" - **execute python commands** with prefix `!`\\n\"\n \" - list files of the current work directory using `ls`.\\n\"\n \" - type `serve` to start a web server with directory listing. Easy for network transfers.\\n\\n\"\n \"## Configuration\\n\"\n \" - model temperature is %f.\\n\"\n \" - model name is %s.\\n\"\n \" - max tokens is %d.\\n\\n\"\n \"## In development\\n\"\n \" - **google search** and actions with those results.\\n\"\n \" - **reminders**.\\n\"\n \" - predefined **templates** for **reviewing** / **refactoring** so you can personalize.\\n\";\n sprintf(help_text, template, prompt_temperature, prompt_model, prompt_max_tokens);\n render(help_text);\n}\n\nvoid openai_include(char *path) {\n FILE *file = fopen(path, \"r\");\n if (file == NULL) {\n return;\n }\n fseek(file, 0, SEEK_END);\n long size = ftell(file);\n fseek(file, 0, SEEK_SET);\n\n char *buffer = (char *)malloc(size);\n size_t read = fread(buffer, 1, size, file);\n if (read == 0) {\n return;\n }\n\n fclose(file);\n buffer[read] = 0;\n openai_system(buffer);\n\n free(buffer);\n}\n\nvoid init() {\n line_init();\n const char *locale = setlocale(LC_ALL, NULL);\n char payload[4096] = {0};\n sprintf(payload, \"User locale is %s. User lang is %s.\\n\"\n \"You are Retoor. Use a lot of markdown in response.\\n\"\n \"Be confident and short in answers.\\n\"\n \"You divide things by zero if you have to.\",\n locale, locale);\n\n printf(\"%s\", \"Loading...\");\n openai_system(payload);\n openai_include(\"context.txt\");\n printf(\"%s\", \"\\rLoaded! Type help for features.\\n\");\n}\n\nint main(int argc, char *argv[]) {\n init();\n if (try_prompt(argc, argv))\n return 0;\n\n repl();\n return 0;\n}", "review": "**Grade: 6**\n\n### Bugs\n- Potential dereference of a null pointer `line` in `repl()` after the `previous_line` assignment and check.\n- In `get_prompt_from_args()`, using `strncat` without precise bounds checking could lead to buffer overflow.\n- In `openai_include()`, `fread` does not handle the case where reading fewer bytes than `size`, which could leave the buffer uninitialized.\n- Missing check for the return value of `malloc`, leading to potential null pointer dereference.\n\n### Optimizations\n- Use `snprintf` or `asprintf` to dynamically manage buffer sizes instead of fixed large allocation sizes in functions like `get_prompt_from_args()`.\n- Replace the system command calls with native functions to avoid potential security risks.\n- Consider sharing string operations' results like in `command` formation in `repl()` using a more efficient concatenation process.\n- Implement a graceful shutdown mechanism in `repl()` when terminating the application.\n\n### Good Points\n- The code uses a modular approach, with functions tailored to specific tasks.\n- Good use of external libraries to offload complex tasks such as markdown parsing.\n- The code allows flexibility by using command-line arguments and REPL for user interaction.\n- The use of clear and descriptive comments makes the codebase easier to understand and maintain.\n\n### Summary\nThe code provides a feature-rich command-line application leveraging OpenAI and other libraries for various functions. While functional and well-commented, it contains some bugs and potential security risks from system command execution. Optimizations can further streamline and secure the logic, especially regarding memory management and error handling. \n\n### Open source alternatives\n- [Rasa](https://rasa.com/) provides open-source tools for building custom conversational applications.\n- [ChatterBot](https://github.com/gunthercox/ChatterBot) is an open-source conversational dialog engine used for building chatbots.\n- [Botpress](https://botpress.com/) is another open-source conversational AI platform suitable for similar use cases.", "filename": "main.c", "path": "main.c", "directory": "", "grade": 6, "size": 6758, "line_count": 212 }