diff --git a/packages/kbot/cpp/CMakeLists.txt b/packages/kbot/cpp/CMakeLists.txt index 330728fe..3c5a47f4 100644 --- a/packages/kbot/cpp/CMakeLists.txt +++ b/packages/kbot/cpp/CMakeLists.txt @@ -62,10 +62,26 @@ FetchContent_Declare( GIT_SHALLOW TRUE ) +FetchContent_Declare( + nlohmann_json + GIT_REPOSITORY https://github.com/nlohmann/json.git + GIT_TAG v3.11.3 + GIT_SHALLOW TRUE +) + +FetchContent_Declare( + liboai + GIT_REPOSITORY https://github.com/jasonduncan/liboai.git + GIT_TAG main + GIT_SHALLOW TRUE + SOURCE_SUBDIR liboai +) + set(TF_BUILD_TESTS OFF CACHE BOOL "" FORCE) set(TF_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE) -FetchContent_MakeAvailable(cli11 tomlplusplus Catch2 asio concurrentqueue taskflow) +set(JSON_BuildTests OFF CACHE BOOL "" FORCE) +FetchContent_MakeAvailable(cli11 tomlplusplus Catch2 asio concurrentqueue taskflow nlohmann_json) # ── Packages ───────────────────────────────────────────────────────────────── add_subdirectory(packages/logger) add_subdirectory(packages/html) @@ -79,6 +95,9 @@ add_subdirectory(packages/gadm_reader) add_subdirectory(packages/grid) add_subdirectory(packages/search) add_subdirectory(packages/enrichers) + +add_subdirectory(packages/liboai/liboai) + add_subdirectory(packages/kbot) # ── Sources ────────────────────────────────────────────────────────────────── @@ -102,6 +121,11 @@ target_include_directories(${PROJECT_NAME} PRIVATE ) # Define standalone ASIO (since it's not boost) +if(WIN32) + # Enable math constants like M_PI + add_compile_definitions(_USE_MATH_DEFINES) + add_compile_definitions(NOMINMAX) +endif() target_compile_definitions(${PROJECT_NAME} PRIVATE ASIO_STANDALONE=1 ASIO_NO_DEPRECATED=1) diff --git a/packages/kbot/cpp/build_out.txt b/packages/kbot/cpp/build_out.txt new file mode 100644 index 00000000..2189004b Binary files /dev/null and b/packages/kbot/cpp/build_out.txt differ diff --git a/packages/kbot/cpp/build_out.utf8.txt b/packages/kbot/cpp/build_out.utf8.txt new file mode 100644 index 00000000..85bef50c --- /dev/null +++ b/packages/kbot/cpp/build_out.utf8.txt @@ -0,0 +1,58 @@ +MSBuild version 18.4.0+6e61e96ac for .NET Framework + + libcurl_object.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\libcurl_object.dir\Debug\libcurl_object.lib + Catch2.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\catch2-build\src\Debug\Catch2d.lib + Catch2WithMain.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\catch2-build\src\Debug\Catch2Maind.lib + lexbor_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\lexbor-build\Debug\lexbor_static.lib + html.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\html\Debug\html.lib + libcurl_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\Debug\libcurl-d.lib + http.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\http\Debug\http.lib + json.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\json\Debug\json.lib + spdlog.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\spdlog-build\Debug\spdlogd.lib + logger.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\logger\Debug\logger.lib + enrichers.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\enrichers\Debug\enrichers.lib + geo.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\geo\Debug\geo.lib + gadm_reader.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\gadm_reader\Debug\gadm_reader.lib + grid.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\grid\Debug\grid.lib + ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\ipc\Debug\ipc.lib + azure.cpp +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + + chat.cpp +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + + Generating Code... + postgres.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\postgres\Debug\postgres.lib + polymech.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\polymech\Debug\polymech.lib + search.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\search\Debug\search.lib + test_enrichers.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_enrichers.exe + test_functional.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_functional.exe + test_gadm_reader.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_gadm_reader.exe + test_geo.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_geo.exe + test_grid.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_grid.exe + test_gridsearch_ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_gridsearch_ipc.exe + test_html.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_html.exe + test_http.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_http.exe + test_ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_ipc.exe + test_json.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_json.exe + test_logger.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_logger.exe + test_polymech.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_polymech.exe + test_polymech_e2e.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_polymech_e2e.exe + test_postgres.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_postgres.exe + test_postgres_live.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_postgres_live.exe + test_search.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_search.exe + test_supabase.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_supabase.exe diff --git a/packages/kbot/cpp/cli_build.txt b/packages/kbot/cpp/cli_build.txt new file mode 100644 index 00000000..daccda51 Binary files /dev/null and b/packages/kbot/cpp/cli_build.txt differ diff --git a/packages/kbot/cpp/cli_build.utf8.txt b/packages/kbot/cpp/cli_build.utf8.txt new file mode 100644 index 00000000..e15047e7 --- /dev/null +++ b/packages/kbot/cpp/cli_build.utf8.txt @@ -0,0 +1,152 @@ +CMake is re-running because C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/CMakeFiles/generate.stamp is out-of-date. + the file 'C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/CMakeLists.txt' + is newer than 'C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/CMakeFiles/generate.stamp.depend' + result='-1' +-- Selecting Windows SDK version 10.0.26100.0 to target Windows 10.0.18363. +cmake : CMake Deprecation Warning at +build/release/_deps/cli11-src/CMakeLists.txt:1 (cmake_minimum_required): +At line:1 char:1 ++ cmake --build build/release --target polymech-cli > cli_build.txt 2>& ... ++ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + CategoryInfo : NotSpecified: (CMake Deprecati...imum_required) + ::String) [], RemoteException + + FullyQualifiedErrorId : NativeCommandError + + Compatibility with CMake < 3.10 will be removed from a future version of + CMake. + + Update the VERSION argument value. Or, use the ... syntax + to tell CMake that the project requires at least but has been updated + to work with policies introduced by or earlier. + + +CMake Deprecation Warning at +build/release/_deps/concurrentqueue-src/CMakeLists.txt:1 +(cmake_minimum_required): + Compatibility with CMake < 3.10 will be removed from a future version of + CMake. + + Update the VERSION argument value. Or, use the ... syntax + to tell CMake that the project requires at least but has been updated + to work with policies introduced by or earlier. + + +-- CMAKE_ROOT: C:/Program Files/CMake/share/cmake-4.2 +-- PROJECT_NAME: Taskflow +-- CMAKE_HOST_SYSTEM: Windows-10.0.18363 +-- CMAKE_BUILD_TYPE: Release +-- CMAKE_CXX_COMPILER: C:/Program Files/Microsoft Visual Studio/18/Community/VC/Tools/MSVC/14.50.35717/bin/Hostx64/x64/cl.exe +-- CMAKE_CXX_COMPILER_ID: MSVC +-- CMAKE_CXX_COMPILER_VERSION: 19.50.35726.0 +-- CMAKE_CXX_FLAGS: /DWIN32 /D_WINDOWS /EHsc +-- CMAKE_CUDA_COMPILER: +-- CMAKE_CUDA_COMPILER_ID: +-- CMAKE_CUDA_COMPILER_VERSION: +-- CMAKE_CUDA_FLAGS: +-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras +-- CMAKE_CURRENT_SOURCE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src +-- CMAKE_CURRENT_BINARY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-build +-- CMAKE_CURRENT_LIST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src +-- CMAKE_EXE_LINKER_FLAGS: /machine:x64 +-- CMAKE_INSTALL_PREFIX: C:/Program Files (x86)/polymech-cli +-- CMAKE_INSTALL_FULL_INCLUDEDIR: C:/Program Files (x86)/polymech-cli/include +-- CMAKE_INSTALL_FULL_LIBDIR: C:/Program Files (x86)/polymech-cli/lib +-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras +-- CMAKE_PREFIX_PATH: +-- TF_BUILD_BENCHMARKS: OFF +-- TF_BUILD_PROFILER: +-- TF_BUILD_CUDA: OFF +-- TF_BUILD_SYCL: OFF +-- TF_BUILD_SYCL_BITCODE: +-- TF_BUILD_TESTS: OFF +-- TF_BUILD_EXAMPLES: OFF +-- TF_UTEST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/unittests +-- TF_EXAMPLE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/examples +-- TF_BENCHMARK_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/benchmarks +-- TF_3RD_PARTY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/3rd-party +-- Using the multi-header code from C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/nlohmann_json-src/include/ +-- Build spdlog: 1.15.1 +-- Build type: Release +-- Project name: lexbor +-- Build without Threads +-- Lexbor version: 2.4.0 +-- Set Windows definitions +-- Append module: core (1.8.0) +-- Append module: css (1.2.0) +-- Append module: dom (1.7.0) +-- Append module: encoding (2.1.0) +-- Append module: html (2.5.0) +-- Append module: ns (1.2.0) +-- Append module: punycode (1.1.0) +-- Append module: selectors (0.3.0) +-- Append module: tag (1.3.0) +-- Append module: unicode (0.2.0) +-- Append module: url (0.2.0) +-- Append module: utils (0.3.0) +-- CFLAGS: /DWIN32 /D_WINDOWS /O2 +-- CXXFLAGS: /DWIN32 /D_WINDOWS /EHsc /O2 +-- Feature ASAN: disable +-- Feature Fuzzer: disable +-- Using CMake version 4.2.3 +-- curl version=[8.12.1] +-- CMake platform flags: WIN32 MSVC-1950 +-- Performing Test HAVE_WIN32_WINNT +-- Performing Test HAVE_WIN32_WINNT - Success +-- Found _WIN32_WINNT=0x0a00 +-- Could NOT find Libidn2 (missing: LIBIDN2_INCLUDE_DIR LIBIDN2_LIBRARY) +-- Protocols: dict file ftp ftps gopher gophers http https imap imaps ipfs ipns mqtt pop3 pop3s rtsp smb smbs smtp smtps telnet tftp ws wss +-- Features: alt-svc AsynchDNS HSTS HTTPS-proxy IPv6 Kerberos Largefile NTLM SPNEGO SSL SSPI threadsafe UnixSockets +-- Enabled SSL backends: Schannel +CMake Warning (dev) at C:/Program +Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:1963 (message): + Calling FetchContent_Populate(rapidjson) is deprecated, call + FetchContent_MakeAvailable(rapidjson) instead. Policy CMP0169 can be set + to OLD to allow FetchContent_Populate(rapidjson) to be called directly for + now, but the ability to call it with declared details will be removed + completely in a future version. +Call Stack (most recent call first): + packages/json/CMakeLists.txt:17 (FetchContent_Populate) +This warning is for project developers. Use -Wno-dev to suppress it. + +-- Configuring done (14.7s) +-- Generating done (1.6s) +-- Build files have been written to: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release +MSBuild version 18.4.0+6e61e96ac for .NET Framework + + lexbor_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\lexbor-build\Debug\lexbor_static.lib + html.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\html\Debug\html.lib + libcurl_object.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\libcurl_object.dir\Debug\libcurl_object.lib + libcurl_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\Debug\libcurl-d.lib + http.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\http\Debug\http.lib + json.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\json\Debug\json.lib + spdlog.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\spdlog-build\Debug\spdlogd.lib + logger.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\logger\Debug\logger.lib + enrichers.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\enrichers\Debug\enrichers.lib + geo.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\geo\Debug\geo.lib + gadm_reader.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\gadm_reader\Debug\gadm_reader.lib + grid.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\grid\Debug\grid.lib + ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\ipc\Debug\ipc.lib + azure.cpp +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + + chat.cpp +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + + Generating Code... + postgres.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\postgres\Debug\postgres.lib + polymech.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\polymech\Debug\polymech.lib + search.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\search\Debug\search.lib diff --git a/packages/kbot/cpp/cmake_out.txt b/packages/kbot/cpp/cmake_out.txt new file mode 100644 index 00000000..a4870ac7 Binary files /dev/null and b/packages/kbot/cpp/cmake_out.txt differ diff --git a/packages/kbot/cpp/cmake_out.utf8.txt b/packages/kbot/cpp/cmake_out.utf8.txt new file mode 100644 index 00000000..3fd65cd6 --- /dev/null +++ b/packages/kbot/cpp/cmake_out.utf8.txt @@ -0,0 +1,116 @@ +-- Selecting Windows SDK version 10.0.26100.0 to target Windows 10.0.18363. +cmake : CMake Deprecation Warning at +build/release/_deps/cli11-src/CMakeLists.txt:1 (cmake_minimum_required): +At line:1 char:1 ++ cmake --preset release > cmake_out.txt 2>&1 ++ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + CategoryInfo : NotSpecified: (CMake Deprecati...imum_required) + ::String) [], RemoteException + + FullyQualifiedErrorId : NativeCommandError + + Compatibility with CMake < 3.10 will be removed from a future version of + CMake. + + Update the VERSION argument value. Or, use the ... syntax + to tell CMake that the project requires at least but has been updated + to work with policies introduced by or earlier. + + +CMake Deprecation Warning at +build/release/_deps/concurrentqueue-src/CMakeLists.txt:1 +(cmake_minimum_required): + Compatibility with CMake < 3.10 will be removed from a future version of + CMake. + + Update the VERSION argument value. Or, use the ... syntax + to tell CMake that the project requires at least but has been updated + to work with policies introduced by or earlier. + + +-- CMAKE_ROOT: C:/Program Files/CMake/share/cmake-4.2 +-- PROJECT_NAME: Taskflow +-- CMAKE_HOST_SYSTEM: Windows-10.0.18363 +-- CMAKE_BUILD_TYPE: Release +-- CMAKE_CXX_COMPILER: C:/Program Files/Microsoft Visual Studio/18/Community/VC/Tools/MSVC/14.50.35717/bin/Hostx64/x64/cl.exe +-- CMAKE_CXX_COMPILER_ID: MSVC +-- CMAKE_CXX_COMPILER_VERSION: 19.50.35726.0 +-- CMAKE_CXX_FLAGS: /DWIN32 /D_WINDOWS /EHsc +-- CMAKE_CUDA_COMPILER: +-- CMAKE_CUDA_COMPILER_ID: +-- CMAKE_CUDA_COMPILER_VERSION: +-- CMAKE_CUDA_FLAGS: +-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras +-- CMAKE_CURRENT_SOURCE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src +-- CMAKE_CURRENT_BINARY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-build +-- CMAKE_CURRENT_LIST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src +-- CMAKE_EXE_LINKER_FLAGS: /machine:x64 +-- CMAKE_INSTALL_PREFIX: C:/Program Files (x86)/polymech-cli +-- CMAKE_INSTALL_FULL_INCLUDEDIR: C:/Program Files (x86)/polymech-cli/include +-- CMAKE_INSTALL_FULL_LIBDIR: C:/Program Files (x86)/polymech-cli/lib +-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras +-- CMAKE_PREFIX_PATH: +-- TF_BUILD_BENCHMARKS: OFF +-- TF_BUILD_PROFILER: +-- TF_BUILD_CUDA: OFF +-- TF_BUILD_SYCL: OFF +-- TF_BUILD_SYCL_BITCODE: +-- TF_BUILD_TESTS: OFF +-- TF_BUILD_EXAMPLES: OFF +-- TF_UTEST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/unittests +-- TF_EXAMPLE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/examples +-- TF_BENCHMARK_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/benchmarks +-- TF_3RD_PARTY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/3rd-party +-- Using the multi-header code from C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/nlohmann_json-src/include/ +MSBuild version 18.4.0+6e61e96ac for .NET Framework + + Performing download step (git clone) for 'liboai-populate' + Cloning into 'liboai-src'... + fatal: invalid reference: master + CMake Error at +liboai-subbuild/liboai-populate-prefix/tmp/liboai-populate-gitclone.cmake:61 +(message): + Failed to checkout tag: 'master' + + +C:\Program Files\Microsoft Visual Studio\18\Community\MSBuild\Microsoft\VC\v18 +0\Microsoft.CppCommon.targets(254,5): error MSB8066: Custom build for 'C:\User +s\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\libo +ai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-downlo +ad.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\rel +ease\_deps\liboai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai- +populate-update.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\ +cpp\build\release\_deps\liboai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3 +cfb99\liboai-populate-patch.rule;C:\Users\zx\Desktop\polymech\polymech-mono\pa +ckages\kbot\cpp\build\release\_deps\liboai-subbuild\CMakeFiles\0bc8a27c65ae432 +6176df6e15e3cfb99\liboai-populate-configure.rule;C:\Users\zx\Desktop\polymech\ +polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-subbuild\CMakeFiles +\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-build.rule;C:\Users\zx\Deskt +op\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-subbuil +d\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-install.rule;C:\ +Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\ +liboai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-te +st.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\rel +ease\_deps\liboai-subbuild\CMakeFiles\c45f806c12e81c5a75110db639d0307b\liboai- +populate-complete.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbo +t\cpp\build\release\_deps\liboai-subbuild\CMakeFiles\949e3a6a5a2e9b16592c5c7a3 +65f0bec\liboai-populate.rule' exited with code 1. [C:\Users\zx\Desktop\polymec +h\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-subbuild\liboai-p +opulate.vcxproj] + +CMake Error at C:/Program +Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:1928 (message): + Build step for liboai failed: 1 +Call Stack (most recent call first): + C:/Program Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:1619 +(__FetchContent_populateSubbuild) + C:/Program +Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:2155:EVAL:2 +(__FetchContent_doPopulation) + C:/Program Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:2155 +(cmake_language) + C:/Program Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:2394 +(__FetchContent_Populate) + CMakeLists.txt:84 (FetchContent_MakeAvailable) + + +-- Configuring incomplete, errors occurred! diff --git a/packages/kbot/cpp/cmake_out2.txt b/packages/kbot/cpp/cmake_out2.txt new file mode 100644 index 00000000..03921772 Binary files /dev/null and b/packages/kbot/cpp/cmake_out2.txt differ diff --git a/packages/kbot/cpp/cmake_out2.utf8.txt b/packages/kbot/cpp/cmake_out2.utf8.txt new file mode 100644 index 00000000..6e7eeb31 --- /dev/null +++ b/packages/kbot/cpp/cmake_out2.utf8.txt @@ -0,0 +1,69 @@ +-- Selecting Windows SDK version 10.0.26100.0 to target Windows 10.0.18363. +cmake : CMake Deprecation Warning at +build/release/_deps/cli11-src/CMakeLists.txt:1 (cmake_minimum_required): +At line:1 char:1 ++ cmake --preset release > cmake_out2.txt 2>&1 ; Get-Content cmake_out2 ... ++ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + CategoryInfo : NotSpecified: (CMake Deprecati...imum_required) + ::String) [], RemoteException + + FullyQualifiedErrorId : NativeCommandError + + Compatibility with CMake < 3.10 will be removed from a future version of + CMake. + + Update the VERSION argument value. Or, use the ... syntax + to tell CMake that the project requires at least but has been updated + to work with policies introduced by or earlier. + + +CMake Deprecation Warning at +build/release/_deps/concurrentqueue-src/CMakeLists.txt:1 +(cmake_minimum_required): + Compatibility with CMake < 3.10 will be removed from a future version of + CMake. + + Update the VERSION argument value. Or, use the ... syntax + to tell CMake that the project requires at least but has been updated + to work with policies introduced by or earlier. + + +-- CMAKE_ROOT: C:/Program Files/CMake/share/cmake-4.2 +-- PROJECT_NAME: Taskflow +-- CMAKE_HOST_SYSTEM: Windows-10.0.18363 +-- CMAKE_BUILD_TYPE: Release +-- CMAKE_CXX_COMPILER: C:/Program Files/Microsoft Visual Studio/18/Community/VC/Tools/MSVC/14.50.35717/bin/Hostx64/x64/cl.exe +-- CMAKE_CXX_COMPILER_ID: MSVC +-- CMAKE_CXX_COMPILER_VERSION: 19.50.35726.0 +-- CMAKE_CXX_FLAGS: /DWIN32 /D_WINDOWS /EHsc +-- CMAKE_CUDA_COMPILER: +-- CMAKE_CUDA_COMPILER_ID: +-- CMAKE_CUDA_COMPILER_VERSION: +-- CMAKE_CUDA_FLAGS: +-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras +-- CMAKE_CURRENT_SOURCE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src +-- CMAKE_CURRENT_BINARY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-build +-- CMAKE_CURRENT_LIST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src +-- CMAKE_EXE_LINKER_FLAGS: /machine:x64 +-- CMAKE_INSTALL_PREFIX: C:/Program Files (x86)/polymech-cli +-- CMAKE_INSTALL_FULL_INCLUDEDIR: C:/Program Files (x86)/polymech-cli/include +-- CMAKE_INSTALL_FULL_LIBDIR: C:/Program Files (x86)/polymech-cli/lib +-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras +-- CMAKE_PREFIX_PATH: +-- TF_BUILD_BENCHMARKS: OFF +-- TF_BUILD_PROFILER: +-- TF_BUILD_CUDA: OFF +-- TF_BUILD_SYCL: OFF +-- TF_BUILD_SYCL_BITCODE: +-- TF_BUILD_TESTS: OFF +-- TF_BUILD_EXAMPLES: OFF +-- TF_UTEST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/unittests +-- TF_EXAMPLE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/examples +-- TF_BENCHMARK_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/benchmarks +-- TF_3RD_PARTY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/3rd-party +-- Using the multi-header code from C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/nlohmann_json-src/include/ +CMake Error at build/release/_deps/liboai-src/liboai/CMakeLists.txt:64 +(message): + CURL not found and CURL::libcurl target does not exist + + +-- Configuring incomplete, errors occurred! diff --git a/packages/kbot/cpp/oai_build.txt b/packages/kbot/cpp/oai_build.txt new file mode 100644 index 00000000..27bbe8f5 Binary files /dev/null and b/packages/kbot/cpp/oai_build.txt differ diff --git a/packages/kbot/cpp/oai_build.utf8.txt b/packages/kbot/cpp/oai_build.utf8.txt new file mode 100644 index 00000000..5e99e33c --- /dev/null +++ b/packages/kbot/cpp/oai_build.utf8.txt @@ -0,0 +1,25 @@ +MSBuild version 18.4.0+6e61e96ac for .NET Framework + + libcurl_object.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\libcurl_object.dir\Debug\libcurl_object.lib + libcurl_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\Debug\libcurl-d.lib + azure.cpp +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/azure.cpp') + + chat.cpp +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + +C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj] + (compiling source file '../liboai-src/liboai/components/chat.cpp') + + Generating Code... diff --git a/packages/kbot/cpp/packages/kbot/CMakeLists.txt b/packages/kbot/cpp/packages/kbot/CMakeLists.txt index ba0eb54f..b2c0cbb6 100644 --- a/packages/kbot/cpp/packages/kbot/CMakeLists.txt +++ b/packages/kbot/cpp/packages/kbot/CMakeLists.txt @@ -4,6 +4,7 @@ project(kbot CXX) add_library(kbot STATIC kbot.cpp + llm_client.cpp ) target_include_directories(kbot PUBLIC @@ -14,4 +15,5 @@ target_include_directories(kbot PUBLIC target_link_libraries(kbot PUBLIC logger json + oai ) diff --git a/packages/kbot/cpp/packages/kbot/kbot.cpp b/packages/kbot/cpp/packages/kbot/kbot.cpp index 08bb9897..7203388c 100644 --- a/packages/kbot/cpp/packages/kbot/kbot.cpp +++ b/packages/kbot/cpp/packages/kbot/kbot.cpp @@ -2,6 +2,7 @@ #include #include #include "logger/logger.h" +#include "llm_client.h" namespace polymech { namespace kbot { @@ -17,9 +18,22 @@ int run_kbot_ai_pipeline(const KBotOptions& opts, const KBotCallbacks& cb) { tf::Taskflow taskflow; taskflow.emplace([opts, cb](){ - logger::info("Executing kbot ai tasks via Taskflow -> emit events..."); - if (cb.onEvent) { - cb.onEvent("ai_progress", "{\"message\":\"Task stub completed\"}"); + logger::info("Executing kbot ai completion via LLMClient..."); + LLMClient client(opts); + + std::string target_prompt = opts.prompt.empty() ? "Respond with 'Hello from KBot C++ AI Pipeline!'" : opts.prompt; + LLMResponse res = client.execute_chat(target_prompt); + + if (res.success) { + logger::info("AI Response:\n" + res.text); + if (cb.onEvent) { + cb.onEvent("ai_progress", "{\"message\":\"Task completion received\"}"); + } + } else { + logger::error("AI Task Failed: " + res.error); + if (cb.onEvent) { + cb.onEvent("ai_error", "{\"error\":\"Task failed\"}"); + } } }); diff --git a/packages/kbot/cpp/packages/kbot/llm_client.cpp b/packages/kbot/cpp/packages/kbot/llm_client.cpp new file mode 100644 index 00000000..4b3611c7 --- /dev/null +++ b/packages/kbot/cpp/packages/kbot/llm_client.cpp @@ -0,0 +1,91 @@ +#include "llm_client.h" +#include "logger/logger.h" +#include +#include + +namespace polymech { +namespace kbot { + +LLMClient::LLMClient(const KBotOptions& opts) + : api_key_(opts.api_key), model_(opts.model), router_(opts.router) { + + // Set default base_url_ according to client.ts mappings + if (opts.base_url.empty()) { + if (router_ == "openrouter") base_url_ = "https://openrouter.ai/api/v1"; + else if (router_ == "openai") base_url_ = ""; // liboai uses the default URL automatically + else if (router_ == "deepseek") base_url_ = "https://api.deepseek.com/v1"; + else if (router_ == "huggingface")base_url_ = "https://api-inference.huggingface.co/v1"; + else if (router_ == "ollama") base_url_ = "http://localhost:11434/v1"; + else if (router_ == "fireworks") base_url_ = "https://api.fireworks.ai/v1"; + else if (router_ == "gemini") base_url_ = "https://generativelanguage.googleapis.com/v1beta"; // or gemini openai compat endpt + else if (router_ == "xai") base_url_ = "https://api.x.ai/v1"; + else base_url_ = "https://api.openai.com/v1"; // Fallback to openai API + } else { + base_url_ = opts.base_url; + } + + // Default models based on router (from client.ts) + if (model_.empty()) { + if (router_ == "openrouter") model_ = "anthropic/claude-sonnet-4"; + else if (router_ == "openai") model_ = "gpt-4o"; + else if (router_ == "deepseek") model_ = "deepseek-chat"; + else if (router_ == "huggingface") model_ = "meta-llama/2"; + else if (router_ == "ollama") model_ = "leonard"; + else if (router_ == "fireworks") model_ = "llama-v2-70b-chat"; + else if (router_ == "gemini") model_ = "gemini-1.5-pro"; + else if (router_ == "xai") model_ = "grok-1"; + else model_ = "gpt-4o"; + } +} + +LLMClient::~LLMClient() = default; + +LLMResponse LLMClient::execute_chat(const std::string& prompt) { + LLMResponse res; + + if (api_key_.empty()) { + res.success = false; + res.error = "API Key is empty."; + return res; + } + + oai::OpenAI oai_impl; + + // Use liboai Auth component. + // If we need a custom base_url, liboai uses oai_impl.auth.SetBaseUrl() if it exists. + bool success = oai_impl.auth.SetKey(api_key_); + if (!success) { + res.success = false; + res.error = "Failed to set API Key in liboai."; + return res; + } + + // Set custom base URL for OpenRouter/DeepSeek. + oai_impl.auth.SetBaseUrl(base_url_); + + std::string target_model = model_.empty() ? "gpt-4o" : model_; + + try { + oai::Response response = oai_impl.ChatCompletion->create( + target_model, + { + {{"role", "user"}, {"content", prompt}} + } + ); + + res.success = true; + res.text = response["choices"][0]["message"]["content"].get(); + + } catch (std::exception& e) { + res.success = false; + res.error = e.what(); + } catch (...) { + res.success = false; + res.error = "Unknown error occurred inside LLMClient execute_chat."; + } + + return res; +} + +} // namespace kbot +} // namespace polymech diff --git a/packages/kbot/cpp/packages/kbot/llm_client.h b/packages/kbot/cpp/packages/kbot/llm_client.h new file mode 100644 index 00000000..392fd71c --- /dev/null +++ b/packages/kbot/cpp/packages/kbot/llm_client.h @@ -0,0 +1,32 @@ +#pragma once + +#include +#include "kbot.h" + +namespace polymech { +namespace kbot { + +struct LLMResponse { + std::string text; + bool success = false; + std::string error; +}; + +class LLMClient { +public: + // Initialize the client with the options (api_key, model, router). + explicit LLMClient(const KBotOptions& opts); + ~LLMClient(); + + // Execute a basic chat completion using the provided prompt. + LLMResponse execute_chat(const std::string& prompt); + +private: + std::string api_key_; + std::string model_; + std::string router_; + std::string base_url_; +}; + +} // namespace kbot +} // namespace polymech diff --git a/packages/kbot/cpp/packages/liboai/.github/ISSUE_TEMPLATE/bug_report.yml b/packages/kbot/cpp/packages/liboai/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000..73e5ca7b --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,49 @@ +name: Bug report +description: Create a report to help us improve +labels: ["bug"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: textarea + id: what-happened + attributes: + label: Describe the bug + description: A clear and concise description of what the bug is, and any additional context. + placeholder: Tell us what you see! + validations: + required: true + - type: textarea + id: repro-steps + attributes: + label: To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. Fetch a '...' + 2. Update the '....' + 3. See error + validations: + required: true + - type: textarea + id: code-snippets + attributes: + label: Code snippets + description: If applicable, add code snippets to help explain your problem. + render: C++ + validations: + required: false + - type: input + id: os + attributes: + label: OS + placeholder: macOS + validations: + required: true + - type: input + id: lib-version + attributes: + label: Library version + placeholder: liboai v1.0.0 + validations: + required: true diff --git a/packages/kbot/cpp/packages/liboai/.github/ISSUE_TEMPLATE/feature_request.yml b/packages/kbot/cpp/packages/liboai/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000..de963b32 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,20 @@ +name: Feature request +description: Suggest an idea for this library +labels: ["feature-request"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this feature request! + - type: textarea + id: feature + attributes: + label: Describe the feature or improvement you're requesting + description: A clear and concise description of what you want to happen. + validations: + required: true + - type: textarea + id: context + attributes: + label: Additional context + description: Add any other context about the feature request here. diff --git a/packages/kbot/cpp/packages/liboai/.gitignore b/packages/kbot/cpp/packages/liboai/.gitignore new file mode 100644 index 00000000..f2ad07a4 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/.gitignore @@ -0,0 +1,6 @@ +.vs +[Bb]uild* +out +TestApp +.cache +/.idea diff --git a/packages/kbot/cpp/packages/liboai/AGENTS.md b/packages/kbot/cpp/packages/liboai/AGENTS.md new file mode 100644 index 00000000..e1aec7ca --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/AGENTS.md @@ -0,0 +1,24 @@ +# AGENTS.md + +This repo is a maintained fork of liboai. Our goal is to make it more reliable and feature-complete without breaking existing api. + +## Core Principles +- Preserve backward compatibility; add features without breaking existing APIs. +- Favor small, composable changes over rewrites. +- Keep the codebase clean and maintainable; document anything user-facing. +- Prioritize stability, correctness, and clear error handling. + +## Current Priorities +- Add OpenAI Responses API support for GPT-5.2 and gpt-5.2-pro. +- Keep Chat Completions and other existing components intact. +- Add documentation and examples for new features. + +## Workflow +- Update docs whenever you add or change public APIs. +- Use existing patterns and naming conventions in liboai. +- Avoid introducing new dependencies unless justified. + +## Notes +- The initial Responses API implementation should accept raw JSON payloads. +- A ResponseInput helper is planned, but not part of the initial implementation. +- Azure Responses support is out of scope for now. diff --git a/packages/kbot/cpp/packages/liboai/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/CMakeLists.txt new file mode 100644 index 00000000..328d0402 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/CMakeLists.txt @@ -0,0 +1,22 @@ +cmake_minimum_required(VERSION 3.21) + +project(liboai) + +IF(WIN32) + set(VCPKG_CMAKE_PATH $ENV{VCPKG_ROOT}/scripts/buildsystems/vcpkg.cmake CACHE FILEPATH "Location of vcpkg.cmake") + include(${VCPKG_CMAKE_PATH}) + find_package(ZLIB REQUIRED) + find_package(nlohmann_json CONFIG REQUIRED) + find_package(CURL REQUIRED) +ENDIF() + +option(BUILD_EXAMPLES "Build example applications" OFF) +set_property(GLOBAL PROPERTY USE_FOLDERS ON) + +add_subdirectory(liboai) + +if(BUILD_EXAMPLES) + add_subdirectory(documentation) +endif() + +set_property(DIRECTORY PROPERTY VS_STARTUP_PROJECT oai) diff --git a/packages/kbot/cpp/packages/liboai/LICENSE b/packages/kbot/cpp/packages/liboai/LICENSE new file mode 100644 index 00000000..33e8ba36 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Dread + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/packages/kbot/cpp/packages/liboai/README.md b/packages/kbot/cpp/packages/liboai/README.md new file mode 100644 index 00000000..5cab6868 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/README.md @@ -0,0 +1,100 @@ +

+ +

+ +
+

Introduction

+

liboai is a simple, unofficial C++17 library for the OpenAI API. It allows developers to access OpenAI endpoints through a simple collection of methods and classes. The library can most effectively be thought of as a spiritual port of OpenAI's Python library, simply called openai, due to its similar structure - with few exceptions. + +

Features

+ +- [x] [ChatGPT](https://github.com/D7EAD/liboai/tree/main/documentation/chat) +- [x] [Responses API](https://platform.openai.com/docs/api-reference/responses/create) +- [X] [Audio](https://github.com/D7EAD/liboai/tree/main/documentation/audio) +- [X] [Azure](https://github.com/D7EAD/liboai/tree/main/documentation/azure) +- [X] [Functions](https://platform.openai.com/docs/api-reference/chat/create) +- [x] [Image DALL·E](https://github.com/D7EAD/liboai/tree/main/documentation/images) +- [x] [Models](https://github.com/D7EAD/liboai/tree/main/documentation/models) +- [x] [Completions](https://github.com/D7EAD/liboai/tree/main/documentation/completions) +- [x] [Edit](https://github.com/D7EAD/liboai/tree/main/documentation/edits) +- [x] [Embeddings](https://github.com/D7EAD/liboai/tree/main/documentation/embeddings) +- [x] [Files](https://github.com/D7EAD/liboai/tree/main/documentation/files) +- [x] [Fine-tunes](https://github.com/D7EAD/liboai/tree/main/documentation/fine-tunes) +- [x] [Moderation](https://github.com/D7EAD/liboai/tree/main/documentation/moderations) +- [X] Asynchronous Support + +

Usage

+See below for just how similar in style liboai and its Python alternative are when generating an image using DALL-E.

+
+DALL-E Generation in Python. +
+ +```py +import openai +import os + +openai.api_key = os.getenv("OPENAI_API_KEY") +response = openai.Image.create( + prompt="A snake in the grass!", + n=1, + size="256x256" +) +print(response["data"][0]["url"]) +``` +
+ +
+DALL-E Generation in C++. +
+ +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + oai.auth.SetKeyEnv("OPENAI_API_KEY"); + + Response res = oai.Image->create( + "A snake in the grass!", + 1, + "256x256" + ); + + std::cout << res["data"][0]["url"] << std::endl; +} +``` + +
+ +

Running the above will print out the URL to the resulting generated image, which may or may not look similar to the one found below.

+ + + + + + +
Example Image
+ + + +
+ +

Keep in mind the above C++ example is a minimal example and is not an exception-safe snippet. Please see the documentation for more detailed and exception-safe code snippets.

+ +

Dependencies

+

For the library to work the way it does, it relies on two major dependencies. These dependencies can be found listed below.

+ +- nlohmann-json +- cURL + +*If building the library using the provided solution, it is recommended to install these dependencies using vcpkg.* + +

Documentation

+

For detailed documentation and additional code examples, see the library's documentation here. + +

Contributing

+

Artificial intelligence is an exciting and quickly-changing field. + +If you'd like to partake in further placing the power of AI in the hands of everyday people, please consider contributing by submitting new code and features via a **Pull Request**. If you have any issues using the library, or just want to suggest new features, feel free to contact me directly using the info on my profile or open an **Issue**. diff --git a/packages/kbot/cpp/packages/liboai/ROADMAP.md b/packages/kbot/cpp/packages/liboai/ROADMAP.md new file mode 100644 index 00000000..b694a6ed --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/ROADMAP.md @@ -0,0 +1,25 @@ +# liboai Roadmap + +This is a living backlog of improvements and ideas as we deepen our use of the library. It is intentionally lightweight and updated as we discover new needs. + +## Now +- Responses API support (GPT-5.2, gpt-5.2-pro) +- Keep all existing APIs stable and intact + +## Next +- Responses streaming helpers and SSE parsing +- ResponseInput helper to build Responses `input` items +- `output_text` convenience helper for Responses outputs +- Structured outputs helpers for `text.format` +- Tool definition builders for Responses (`tools`, `tool_choice`) + +## Later +- More robust testing coverage (unit + integration samples) +- Improved error messaging with request context (safe, no secrets) +- Expanded docs and cookbook-style examples +- Performance pass on JSON construction and streaming + +## Observations +- The Conversation class is useful for Chat Completions; Responses lacks an equivalent. +- The library is stable but needs modernization for new OpenAI primitives. +- Maintaining compatibility is critical for existing users. diff --git a/packages/kbot/cpp/packages/liboai/documentation/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/CMakeLists.txt new file mode 100644 index 00000000..0840c592 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.13) + +project(documentation) + +macro(add_example target_name source_name) + add_executable(${target_name} "${source_name}") + target_link_libraries(${target_name} oai) + set_target_properties(${target_name} PROPERTIES FOLDER "examples/${PROJECT_NAME}") +endmacro() + +macro(add_basic_example source_base_name) + add_example(${source_base_name} "${source_base_name}.cpp") +endmacro() + +add_subdirectory(audio/examples) +add_subdirectory(authorization/examples) +add_subdirectory(azure/examples) +add_subdirectory(chat/examples) +add_subdirectory(chat/conversation/examples) +add_subdirectory(completions/examples) +add_subdirectory(edits/examples) +add_subdirectory(embeddings/examples) +add_subdirectory(files/examples) +add_subdirectory(fine-tunes/examples) +add_subdirectory(images/examples) +add_subdirectory(models/examples) +add_subdirectory(moderations/examples) +add_subdirectory(responses/examples) diff --git a/packages/kbot/cpp/packages/liboai/documentation/README.md b/packages/kbot/cpp/packages/liboai/documentation/README.md new file mode 100644 index 00000000..7499979f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/README.md @@ -0,0 +1,217 @@ +

Documentation

+

Both above and below, you can find resources and documentation for each component of the library.

+ +

Basic Usage

+

In order to understand how to use each component of the library, it would be ideal to first understand the basic structure of the library as a whole. When using liboai in a project, you should only include one header file, liboai.h. This header provides an interface to all other components of the library such as Images, Completions, etc. + +See below for both a correct and incorrect example.

+ + + + + + + + + +
CorrectIncorrect
+ +```cpp +#include "liboai.h" + +int main() { + ... +} +``` + + + +```cpp +#include "fine_tunes.h" +#include "models.h" +// etc... + +int main() { + ... +} +``` + +
+ +
+

Once we have properly included the necessary header file to use the library--and assuming symbols are linked properly--we can make use of the class in liboai.h to get started. At some point in our source code, we will have to choose when to define a liboai::OpenAI object to access component interfaces. Each component interface stored in this object offers methods associated with it, so, for instance, interface Image will have a method create(...) to generate an image from text. Each non-async method returns a liboai::Response containing response information whereas async methods return a liboai::FutureResponse. However, before we start using these methods, we must first set our authorization information--otherwise it will not work! + +liboai::OpenAI also houses another important member, the authorization member, which is used to set authorization information (such as the API key and organization IDs) before we call the API methods. For more information on additional members found in liboai::Authorization, refer to the authorization folder above. + +See below for both a correct and incorrect control flow when generating an image.

+ + + + + + + + + +
CorrectIncorrect
+ +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // Set our API key using an environment variable. + // This is recommended as hard-coding API keys is + // insecure. + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + Response response = oai.Image->create( + "a siamese cat!" + ); + } + + ... +} +``` + + + +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // Failure to set authorization info! + // Will fail, exception will be thrown! + Response response = oai.Image->create( + "a siamese cat!" + ); + + ... +} +``` + +
+ +
+

As you can see above, authentication-set related functions return booleans to indicate success and failure, whereas component methods will throw an exception, OpenAIException or OpenAIRateLimited, to indicate their success or failure; these should be checked for accordingly. Below you can find an exception-safe version of the above correct snippet.

+ + + + + + + +
Correct, exception-safe
+ +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create( + "a siamese cat!" + ); + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + + ... + } +} +``` + +
+ +
+

Now, once we have made a call using a component interface, we most certainly want to get the information out of it. To do this, using our knowledge of the format of the API responses, we can extract the information, such as the resulting image's URL, using JSON indexing on the liboai::Response object. See below for an example where we print the generated image's URL.

+ + + + + + + +
Accessing JSON Response Data
+ +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create( + "a siamese cat!" + ); + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} +``` + +
+ +
+

What if we want to do more than just print the URL of the image? Why not download it right when it's done? Thankfully, liboai has a convenient function for that, Network::Download(...) (and Network::DownloadAsync(...)). See below for an example of downloading a freshly generated image. + + + + + + +
Downloading a Generated Image
+ +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create( + "a siamese cat!" + ); + Network::Download( + "C:/some/folder/file.png", // to + response["data"][0]["url"].get(), // from + oai.auth.GetAuthorizationHeaders() + ); + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} +``` + +
+ +
+

After a successful run of the above snippet, the file found at the URL returned from the component call will be download to the path C:/some/folder/file.png. +
+ +

Synopsis

+

Each component interface found within liboai::OpenAI follows the same pattern found above. Whether you want to generate images, completions, or fine-tune models, the control flow should follow--or remain similar to--the above examples. + +For detailed examples regarding individual component interfaces, refer to the appropriate folder listed above.

+ +

Project Maintenance

+

Maintainers can find PR workflow notes in documentation/maintenance.

diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/README.md b/packages/kbot/cpp/packages/liboai/documentation/audio/README.md new file mode 100644 index 00000000..96ed24f0 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/README.md @@ -0,0 +1,96 @@ +

Audio

+

The Audio class is defined in audio.h at liboai::Audio, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Audio endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Turn audio to text. +- Turn text to audio. + +
+

Methods

+

This document covers the method(s) located in audio.h. You can find their function signature(s) below.

+ +

Create a Transcription

+

Transcribes audio into the input language. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response transcribe( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt, + std::optional language = std::nullopt +) const & noexcept(false); +``` + +

Create a Transcription (async)

+

Asynchronously transcribes audio into the input language. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse transcribe_async( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt, + std::optional language = std::nullopt +) const& noexcept(false); +``` + +

Create a Translation

+

Translates audio into English. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response translate( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt +) const & noexcept(false); +``` + +

Create a Translation (async)

+

Asynchronously translates audio into English. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse translate_async( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt +) const& noexcept(false); +``` + +

Text to Speech

+

Turn text into lifelike spoken audio. Returns a liboai::Response containing response data. The audio data is in the content field of the liboai::Response

+ +```cpp +liboai::Response speech( + const std::string& model, + const std::string& voice, + const std::string& input, + std::optional response_format = std::nullopt, + std::optional speed = std::nullopt +) const& noexcept(false); +``` + +

Text to Speech (async)

+

Asynchronously turn text into lifelike spoken audio. Returns a liboai::FutureResponse containing response data. The audio data is in the content field of the liboai::Response

+ +```cpp +liboai::FutureResponse speech_async( + const std::string& model, + const std::string& voice, + const std::string& input, + std::optional response_format = std::nullopt, + std::optional speed = std::nullopt +) const& noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/CMakeLists.txt new file mode 100644 index 00000000..c476f4b5 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required(VERSION 3.13) + +project(audio) + +add_basic_example(create_speech) +add_basic_example(create_speech_async) +add_basic_example(create_transcription) +add_basic_example(create_transcription_async) +add_basic_example(create_translation) +add_basic_example(create_translation_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_speech.cpp b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_speech.cpp new file mode 100644 index 00000000..306bf9fb --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_speech.cpp @@ -0,0 +1,24 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response res = oai.Audio->speech( + "tts-1", + "alloy", + "Today is a wonderful day to build something people love!" + ); + std::ofstream ocout("demo.mp3", std::ios::binary); + ocout << res.content; + ocout.close(); + std::cout << res.content.size() << std::endl; + } + catch (const std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_speech_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_speech_async.cpp new file mode 100644 index 00000000..cd404cbf --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_speech_async.cpp @@ -0,0 +1,31 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + auto fut = oai.Audio->speech_async( + "tts-1", + "alloy", + "Today is a wonderful day to build something people love!" + ); + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto res = fut.get(); + std::ofstream ocout("demo.mp3", std::ios::binary); + ocout << res.content; + ocout.close(); + std::cout << res.content.size() << std::endl; + } + catch (const std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_transcription.cpp b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_transcription.cpp new file mode 100644 index 00000000..a766bb07 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_transcription.cpp @@ -0,0 +1,20 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response res = oai.Audio->transcribe( + "C:/some/folder/audio.mp3", + "whisper-1" + ); + std::cout << res["text"].get() << std::endl; + } + catch (const std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_transcription_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_transcription_async.cpp new file mode 100644 index 00000000..b5eb00d5 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_transcription_async.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Audio->transcribe_async( + "C:/some/folder/file.mp3", + "whisper-1" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_translation.cpp b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_translation.cpp new file mode 100644 index 00000000..6aa0c000 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_translation.cpp @@ -0,0 +1,20 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response res = oai.Audio->translate( + "C:/some/folder/file.mp3", + "whisper-1" + ); + std::cout << res["text"] << std::endl; + } + catch (const std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_translation_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_translation_async.cpp new file mode 100644 index 00000000..db9e303f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/audio/examples/create_translation_async.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Audio->translate_async( + "C:/some/folder/file.mp3", + "whisper-1" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/README.md b/packages/kbot/cpp/packages/liboai/documentation/authorization/README.md new file mode 100644 index 00000000..d466afcc --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/README.md @@ -0,0 +1,177 @@ +

Authorization

+

The Authorization class is defined in authorization.h at liboai::Authorization. This class is responsible for sharing all set authorization information with all component classes in liboai. + +All authorization information should be set prior to the calling of any component methods such as Images, Embeddings, and so on. Failure to do so will result in a liboai::OpenAIException due to authorization failure on OpenAI's end.

+ +
+

Methods

+

This document covers the method(s) located in authorization.h. You can find their function signature(s) below.

+ +

Get Authorizer

+

Returns a reference to the liboai::Authorization singleton shared among all components.

+ +```cpp +static Authorization& Authorizer() noexcept; +``` + +

Set API Key

+

Sets the API key to use in subsequent component calls.

+ +```cpp +bool SetKey(std::string_view key) noexcept; +``` + +

Set Azure API Key

+

Sets the Azure API key to use in subsequent component calls.

+ +```cpp +bool SetAzureKey(std::string_view key) noexcept; +``` + +

Set Active Directory Azure API Key

+

Sets the Active Directory Azure API key to use in subsequent component calls.

+ +```cpp +bool SetAzureKeyAD(std::string_view key) noexcept; +``` + +

Set API Key (File)

+

Sets the API key to use in subsequent component calls from data found in file at path.

+ +```cpp +bool SetKeyFile(const std::filesystem::path& path) noexcept; +``` + +

Set Azure API Key (File)

+

Sets the Azure API key to use in subsequent component calls from data found in file at path.

+ +```cpp +bool SetAzureKeyFile(const std::filesystem::path& path) noexcept; +``` + +

Set Active Directory Azure API Key (File)

+

Sets the Active Directory Azure API key to use in subsequent component calls from data found in file at path.

+ +```cpp +bool SetAzureKeyFileAD(const std::filesystem::path& path) noexcept; +``` + +

Set API Key (Environment Variable)

+

Sets the API key to use in subsequent component calls from an environment variable.

+ +```cpp +bool SetKeyEnv(std::string_view var) noexcept; +``` + +

Set Azure API Key (Environment Variable)

+

Sets the Azure API key to use in subsequent component calls from an environment variable.

+ +```cpp +bool SetAzureKeyEnv(std::string_view var) noexcept; +``` + +

Set Active Directory Azure API Key (Environment Variable)

+

Sets the Active Directory Azure API key to use in subsequent component calls from an environment variable.

+ +```cpp +bool SetAzureKeyEnvAD(std::string_view var) noexcept; +``` + +

Set Organization ID

+

Sets the organization ID to send in subsequent component calls.

+ +```cpp +bool SetOrganization(std::string_view org) noexcept; +``` + +

Set Organization ID (File)

+

Sets the organization ID to send in subsequent component calls from data found in file at path.

+ +```cpp +bool SetOrganizationFile(const std::filesystem::path& path) noexcept; +``` + +

Set Organization ID (Environment Variable)

+

Sets the organization ID to send in subsequent component calls from an environment variable.

+ +```cpp +bool SetOrganizationEnv(std::string_view var) noexcept; +``` + +

Set Proxies

+

Sets the proxy, or proxies, to use in subsequent component calls.

+ +```cpp +void SetProxies(const std::initializer_list>& hosts) noexcept; +void SetProxies(std::initializer_list>&& hosts) noexcept; +void SetProxies(const std::map& hosts) noexcept; +void SetProxies(std::map&& hosts) noexcept; +``` + +

Set Proxy Authentication

+

Sets the username and password to use when using a certain proxy protocol.

+ +```cpp +void SetProxyAuth(const std::map& proto_up) noexcept; +``` + +

Set Timeout

+

Sets the timeout in milliseconds for the library to use in component calls.

+ +```cpp +void SetMaxTimeout(int32_t ms) noexcept +``` + +

Get Key

+

Returns the currently set API key.

+ +```cpp +constexpr const std::string& GetKey() const noexcept; +``` + +

Get Organization ID

+

Returns the currently set organization ID.

+ +```cpp +constexpr const std::string& GetOrganization() const noexcept; +``` + + +

Get Proxies

+

Returns the currently set proxies.

+ +```cpp +netimpl::components::Proxies GetProxies() const noexcept; +``` + +

Get Proxy Authentication

+

Returns the currently set proxy authentication information.

+ +```cpp +netimpl::components::ProxyAuthentication GetProxyAuth() const noexcept; +``` + +

Get Timeout

+

Returns the currently set timeout.

+ +```cpp +netimpl::components::Timeout GetMaxTimeout() const noexcept; +``` + +

Get Authorization Headers

+

Returns the currently set authorization headers based on set information.

+ +```cpp +constexpr const netimpl::components::Header& GetAuthorizationHeaders() const noexcept; +``` + +

Get Azure Authorization Headers

+

Returns the currently set Azure authorization headers based on set information.

+ +```cpp +constexpr const netimpl::components::Header& GetAzureAuthorizationHeaders() const noexcept; +``` + +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/CMakeLists.txt new file mode 100644 index 00000000..1fa3fd49 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.13) + +project(authorization) + +add_basic_example(set_azure_key) +add_basic_example(set_azure_key_env) +add_basic_example(set_azure_key_file) +add_basic_example(set_key) +add_basic_example(set_key_env_var) +add_basic_example(set_key_file) +add_basic_example(set_organization) +add_basic_example(set_organization_env_var) +add_basic_example(set_organization_file) +add_basic_example(set_proxies) +add_basic_example(set_proxy_auth) diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key.cpp new file mode 100644 index 00000000..a4f065ce --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetAzureKey("hard-coded-key")) { // NOT recommended + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key_env.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key_env.cpp new file mode 100644 index 00000000..a5797bf3 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key_env.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key_file.cpp new file mode 100644 index 00000000..dee52cab --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_azure_key_file.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetAzureKeyFile("C:/some/folder/key.dat")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key.cpp new file mode 100644 index 00000000..599e3b4c --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKey("hard-coded-key")) { // NOT recommended + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key_env_var.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key_env_var.cpp new file mode 100644 index 00000000..58c3d61b --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key_env_var.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key_file.cpp new file mode 100644 index 00000000..e76415b6 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_key_file.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyFile("C:/some/folder/key.dat")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization.cpp new file mode 100644 index 00000000..9686880f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY") && oai.auth.SetOrganization("org-123")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization_env_var.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization_env_var.cpp new file mode 100644 index 00000000..0e3926a9 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization_env_var.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + + int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY") && oai.auth.SetOrganizationEnv("OPENAI_ORG_ID")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization_file.cpp new file mode 100644 index 00000000..55b1ce0c --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_organization_file.cpp @@ -0,0 +1,10 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY") && oai.auth.SetOrganizationFile("C:/some/folder/org.dat")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_proxies.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_proxies.cpp new file mode 100644 index 00000000..d11aad30 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_proxies.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + /* + Set some proxies: + when we go to an http site, use fakeproxy1 + when we go to an https site, use fakeproxy2 + */ + oai.auth.SetProxies({ + { "http", "http://www.fakeproxy1.com" }, + { "https", "https://www.fakeproxy2.com" } + }); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + // ... + } +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_proxy_auth.cpp b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_proxy_auth.cpp new file mode 100644 index 00000000..4ef28bae --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/authorization/examples/set_proxy_auth.cpp @@ -0,0 +1,31 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + /* + Set some proxies: + when we go to an http site, use fakeproxy1 + when we go to an https site, use fakeproxy2 + */ + oai.auth.SetProxies({ + { "http", "http://www.fakeproxy1.com" }, + { "https", "https://www.fakeproxy2.com" } + }); + + /* + Set the per-protocol proxy auth info: + when we go to an http site, use fakeuser1 and fakepass1 + when we go to an https site, use fakeuser2 and fakepass2 + */ + oai.auth.SetProxyAuth({ + {"http", {"fakeuser1", "fakepass1"}}, + {"https", {"fakeuser2", "fakepass2"}}, + }); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + // ... + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/README.md b/packages/kbot/cpp/packages/liboai/documentation/azure/README.md new file mode 100644 index 00000000..d979761a --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/README.md @@ -0,0 +1,204 @@ +

Azure

+

The Azure class is defined in azure.h at liboai::Azure, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Azure OpenAI API components. + +
+

Methods

+

This document covers the method(s) located in azure.h. You can find their function signature(s) below.

+ +

Create a Completion

+

Given a prompt, the model will return one or more predicted completions. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create_completion( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create a Completion (async)

+

Given a prompt, the model will asynchronously return one or more predicted completions. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_completion_async( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create an Embedding

+

Creates an embedding vector representing the input text. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create_embedding( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + const std::string& input, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create an Embedding (async)

+

Asynchronously creates an embedding vector representing the input text. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_embedding_async( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + const std::string& input, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create a Chat Completion

+

Creates a completion for the chat message. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create_chat_completion( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + const Conversation& conversation, + std::optional temperature = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create a Chat Completion (async)

+

Asynchronously creates a completion for the chat message. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_chat_completion_async( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + const Conversation& conversation, + std::optional temperature = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Request an Image Generation

+

Generate a batch of images from a text caption. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response request_image_generation( + const std::string& resource_name, + const std::string& api_version, + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt +) const & noexcept(false); +``` + +

Request an Image Generation (async)

+

Asynchronously generate a batch of images from a text caption. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse request_image_generation_async( + const std::string& resource_name, + const std::string& api_version, + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt +) const & noexcept(false); +``` + +

Get a Previously Generated Image

+

Retrieve the results (URL) of a previously called image generation operation. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response get_generated_image( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id +) const & noexcept(false); +``` + +

Get a Previously Generated Image (async)

+

Asynchronously retrieve the results (URL) of a previously called image generation operation. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse get_generated_image_async( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id +) const & noexcept(false); +``` + +

Delete a Previously Generated Image

+

Deletes the corresponding image from the Azure server. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response delete_generated_image( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id +) const & noexcept(false); +``` + +

Delete a Previously Generated Image (async)

+

Asynchronously deletes the corresponding image from the Azure server. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse delete_generated_image_async( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/CMakeLists.txt new file mode 100644 index 00000000..22760b45 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/CMakeLists.txt @@ -0,0 +1,16 @@ +cmake_minimum_required(VERSION 3.13) + +project(azure) + +add_example(create_chat_completion_azure "create_chat_completion.cpp") +add_example(create_chat_completion_async_azure "create_chat_completion_async.cpp") +add_basic_example(create_completion) +add_basic_example(create_completion_async) +add_example(create_embedding_azure "create_embedding.cpp") +add_example(create_embedding_async_azure "create_embedding_async.cpp") +add_basic_example(delete_generated_image) +add_basic_example(delete_generated_image_async) +add_basic_example(get_generated_image) +add_basic_example(get_generated_image_async) +add_basic_example(request_image_generation) +add_basic_example(request_image_generation_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_chat_completion.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_chat_completion.cpp new file mode 100644 index 00000000..d9b1c87e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_chat_completion.cpp @@ -0,0 +1,28 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + Conversation convo; + convo.AddUserData("Hi, how are you?"); + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + Response res = oai.Azure->create_chat_completion( + "resource", "deploymentID", "api_version", + convo + ); + + // update the conversation with the response + convo.Update(res); + + // print the response from the API + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_chat_completion_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_chat_completion_async.cpp new file mode 100644 index 00000000..06d64c09 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_chat_completion_async.cpp @@ -0,0 +1,37 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + Conversation convo; + convo.AddUserData("Hi, how are you?"); + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Azure->create_chat_completion_async( + "resource", "deploymentID", "api_version", + convo + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto res = fut.get(); + + // update the conversation with the response + convo.Update(res); + + // print the response from the API + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_completion.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_completion.cpp new file mode 100644 index 00000000..0f14f282 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_completion.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + Response res = oai.Azure->create_completion( + "resource", "deploymentID", "api_version", + "Write a short poem about a snowman." + ); + + std::cout << res["choices"][0]["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_completion_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_completion_async.cpp new file mode 100644 index 00000000..a94bc00e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_completion_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + auto fut = oai.Azure->create_completion_async( + "resource", "deploymentID", "api_version", + "Write a short poem about a snowman." + ); + + // do other stuff + + // wait for the future to be ready + fut.wait(); + + // get the result + auto res = fut.get(); + + std::cout << res["choices"][0]["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_embedding.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_embedding.cpp new file mode 100644 index 00000000..61070f8f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_embedding.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + Response res = oai.Azure->create_embedding( + "resource", "deploymentID", "api_version", + "String to get embedding for" + ); + + std::cout << res << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_embedding_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_embedding_async.cpp new file mode 100644 index 00000000..8734cad3 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/create_embedding_async.cpp @@ -0,0 +1,27 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + auto fut = oai.Azure->create_embedding_async( + "resource", "deploymentID", "api_version", + "String to get embedding for" + ); + + // do other work + + // wait for the future to complete + auto res = fut.get(); + + // output the response + std::cout << res << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/delete_generated_image.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/delete_generated_image.cpp new file mode 100644 index 00000000..e261e65d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/delete_generated_image.cpp @@ -0,0 +1,22 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + Response res = oai.Azure->delete_generated_image( + "resource", "api_version", + "f508bcf2-e651-4b4b-85a7-58ad77981ffa" + ); + + // output the response + std::cout << res << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/delete_generated_image_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/delete_generated_image_async.cpp new file mode 100644 index 00000000..714442f7 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/delete_generated_image_async.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + auto fut = oai.Azure->delete_generated_image_async( + "resource", "api_version", + "f508bcf2-e651-4b4b-85a7-58ad77981ffa" + ); + + // do other work + + // wait for the future to complete + fut.wait(); + + // get the result + auto res = fut.get(); + + // output the response + std::cout << res << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/get_generated_image.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/get_generated_image.cpp new file mode 100644 index 00000000..8b189df7 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/get_generated_image.cpp @@ -0,0 +1,22 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + Response res = oai.Azure->get_generated_image( + "resource", "api_version", + "f508bcf2-e651-4b4b-85a7-58ad77981ffa" + ); + + // output the response + std::cout << res << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/get_generated_image_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/get_generated_image_async.cpp new file mode 100644 index 00000000..08ce9b7a --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/get_generated_image_async.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + auto fut = oai.Azure->get_generated_image_async( + "resource", "api_version", + "f508bcf2-e651-4b4b-85a7-58ad77981ffa" + ); + + // do other work + + // wait for the future to complete + fut.wait(); + + // get the result + auto res = fut.get(); + + // output the response + std::cout << res << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/request_image_generation.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/request_image_generation.cpp new file mode 100644 index 00000000..c8694721 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/request_image_generation.cpp @@ -0,0 +1,24 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + Response res = oai.Azure->request_image_generation( + "resource", "api_version", + "A snake in the grass!", + 1, + "512x512" + ); + + // output the response + std::cout << res["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/azure/examples/request_image_generation_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/request_image_generation_async.cpp new file mode 100644 index 00000000..06c6a090 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/azure/examples/request_image_generation_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) { + try { + auto fut = oai.Azure->request_image_generation_async( + "resource", "api_version", + "A snake in the grass!", + 1, + "512x512" + ); + + // do other work + + // wait for the future to complete + auto res = fut.get(); + + // output the response + std::cout << res["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/README.md b/packages/kbot/cpp/packages/liboai/documentation/chat/README.md new file mode 100644 index 00000000..bf32b68f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/README.md @@ -0,0 +1,63 @@ +

Chat

+

The ChatCompletion class is defined in chat.h at liboai::ChatCompletion, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Chat endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Given a chat conversation, the model will return a chat completion response. + +> **Note** +> +> Before attempting to use the below methods, it is **highly** recommended +> to read through the documentation, and thoroughly understand the use, +> of the Conversation class as it is used +> in tandem with the `ChatCompletion` methods to keep track of chat +> history and succinctly form a conversation with the OpenAI chat +> endpoint. + +

Methods

+

This document covers the method(s) located in chat.h. You can find their function signature(s) below.

+ +

Create a Chat Completion

+

Creates a completion for the ongoing conversation. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& model, + const Conversation& conversation, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create a Chat Completion (async)

+

Asynchronously creates a completion for the ongoing conversation. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& model, + const Conversation& conversation, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const& noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/README.md b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/README.md new file mode 100644 index 00000000..8c706e71 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/README.md @@ -0,0 +1,409 @@ +

Conversation

+ +

Contents

+

You can jump to any content found on this page using the links below. +

+ +The Conversation class is defined at liboai::Conversation. + +This class can most effectively be thought of as a container for any conversation(s) that one may wish to carry out with a given model using the ChatCompletion methods. It keeps track of the history of the conversation for subsequent calls to the methods, allows a developer to set system directions, retrieve the last response, add user input, and so on. + +

Basic Use

+ +Each method found in ChatCompletion requires an existing object of class Conversation be provided. Before providing such an object to a method such as liboai::ChatCompletion::create, we must first populate it--perhaps with a question to ask the model we choose, like so: + + + + + + + + +
Creating a Conversation
+ +```cpp +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you?"); + + ... +} +``` + +
+ +Once we add a message to our Conversation, we can then supply it to a method such as liboai::ChatCompletion::create to begin our conversation starting with our user data, like so: + + + + + + + +
Starting the Conversation
+ +```cpp +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + ... + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} +``` + +
+ +Assuming that our request succeeded without throwing an exception, the response to our user data in our Conversation can be found in our Response object. We must now update our Conversation object with the response like so: + + + + + + + +
Updating our Conversation
+ +```cpp +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update our conversation with the response + convo.Update(response); + + ... + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} +``` + +
+ +After we update our Conversation, it now contains the original question we asked the model, as well as the response from the model. Now we can extract the response like so: + + + + + + + +
Printing the Response
+ +```cpp +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update our conversation with the response + convo.Update(response); + + // print the response + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} +``` + +
+ +This may print something along the lines of the following: +* "As an AI language model, I do not have emotions, but I am always responsive and ready to assist. How can I help you today?" + +

Usage Pattern

+As you have hopefully noticed, there is a pattern that can be followed with Conversation. Generally, when we want to make use of the methods found within liboai::ChatCompletion, we should adhere to the following series of steps: +
    +
  1. Create a Conversation object.
  2. +
  3. Set the user data (or optional, single-time system data as well), which is the user's input such as a question or a command.
  4. +
  5. Provide the object to ChatCompletion::create or a similar method.
  6. +
  7. Update the Conversation object with the response from the API.
  8. +
  9. Retrieve the chat model's response from the Conversation object.
  10. +
  11. Repeat steps 2, 3, 4, and 5 until the conversation is complete.
  12. +
+ +

The Use of System

+Other than setting user data in our Conversation objects, we can also set an optional system parameter that instructs the model on how to respond. If we wish to make use of this system parameter, we can do so like so: + + + + + + + +
Setting System Data to Guide Models
+ +```cpp +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // set the system message first - helps guide the model + convo.SetSystemData("You are a helpful bot that only answers questions about OpenAI."); + + // add a message to the conversation + convo.AddUserData("Hello, how are you?"); + + ... +} +``` + +
+ +Keep in mind that it is **highly** important to set the system data before user data. Furthermore, it is important to note that, according to OpenAI, some models (such as gpt-3.5-turbo-0301) do not always pay attention to this system data. As a result, it may be more efficient to set guiding data as user data like so: + + + + + + + +
Alternate Ways to Guide
+ +```cpp +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add guiding data and a message to the conversation + convo.AddUserData("You are a helpful bot that only answers questions about OpenAI: Hello, how are you?"); + + ... +} +``` + +
+ +

Synopsis

+With the use of Conversation objects, as we carry on a given conversation, our object will keep track of not only the history of the conversation we are having, but its contained context as well. That means that if we were to, at first, ask our model "When was last year's Super Bowl," and then subsequently ask it, "Who played in it," it would be aware of the context of the conversation for the second inquiry and answer accordingly. +
+
+In general, objects of class liboai::Conversation allow us to more easily engage in conversation with existing and future conversational chat models via the use of liboai::ChatCompletion methods. + +

Methods

+Below you can find the function signature(s) of the class methods found within liboai::Conversation. + +

Constructors

+

Constructors available to construct a Conversation object.

+ +```cpp +Conversation(); +Conversation(const Conversation& other); +Conversation(Conversation&& old) noexcept; +Conversation(std::string_view system_data); +Conversation(std::string_view system_data, std::string_view user_data); +Conversation(std::string_view system_data, std::initializer_list user_data); +Conversation(std::initializer_list user_data); +explicit Conversation(const std::vector& user_data); +``` + +

Assignment Operators

+

Operator overloads for assignment.

+ +```cpp +Conversation& operator=(const Conversation& other); +Conversation& operator=(Conversation&& old) noexcept; +``` + +

Set System Data

+

Sets the system parameter in the conversation that can be used to influence how the model may respond to input. This should always be called before setting user data, if used. Returns a bool indicating success.

+ +```cpp +bool SetSystemData(std::string_view data) & noexcept(false); +``` + +

Pop System Data

+

Removes (pops) the set system data. Returns a bool indicating success.

+ +```cpp +bool PopSystemData() & noexcept(false); +``` + + +

Add User Data

+

Adds user input to the conversation, such as a command or question to pose to a model. Returns a bool indicating success.

+ +```cpp +bool AddUserData(std::string_view data) & noexcept(false); +bool AddUserData(std::string_view data, std::string_view name) & noexcept(false); +``` + +

Pop User Data

+

Removes (pops) the most recently added user input to the conversation as long as it is the tail of the conversation. Returns a bool indicating success.

+ +```cpp +bool PopUserData() & noexcept(false); +``` + +

Get Last Response

+

Retrieves the last response from the conversation if one exists. This can be called when the last item in the conversation is an answer from a chat model, such as after the conversation is updated with a successful response from liboai::ChatCompletion::create. Returns a non-empty std::string containing the response from the chat model if one exists, empty otherwise.

+ +```cpp +std::string GetLastResponse() const & noexcept; +``` + +

Pop Last Response

+

Removes (pops) the last response from a chat model within the conversation if the tail of the conversation is a response. This can be called to remove a chat model response from the conversation after updating the conversation with said response. Returns a bool indicating success.

+ +```cpp +bool PopLastResponse() & noexcept(false); +``` + +

Check if Last Response is Function Call

+

Returns whether the most recent response, following a call to Update or a complete AppendStreamData, contains a function_call or not. Returns a boolean indicating if the last response is a function call.

+ +```cpp +bool LastResponseIsFunctionCall() const & noexcept; +``` + +

Get the Name of the Last Response's Function Call

+

Returns the name of the function_call in the most recent response. This should only be called if LastResponseIsFunctionCall() returns true. Returns a std::string containing the name of the last response's function call, empty if non-existent.

+ +```cpp +std::string GetLastFunctionCallName() const & noexcept(false); +``` + +

Get the Arguments of the Last Response's Function Call

+

Returns the arguments of the function_call in the most recent response in their raw JSON form. This should only be called if LastResponseIsFunctionCall() returns true. Returns a std::string containing the name of the last response's arguments in JSON form, empty if non-existent.

+ +```cpp +std::string GetLastFunctionCallArguments() const & noexcept(false); +``` + +

Update Conversation

+

Updates the conversation given a Response object. This method updates the conversation given a Response object. This method should only be used if AppendStreamData was NOT used immediately before it. + +For instance, if we made a call to create*(), and provided a callback function to stream and, within this callback, we used AppendStreamData to update the conversation per message, we would NOT want to use this method. In this scenario, the AppendStreamData method would have already updated the conversation, so this method would be a bad idea to call afterwards. Returns a bool indicating success.

+ +```cpp +bool Update(std::string_view history) & noexcept(false); +bool Update(const Response& response) & noexcept(false); +``` + +

Export Conversation

+

Exports the entire conversation to a JSON string. This method exports the conversation to a JSON string. The JSON string can be used to save the conversation to a file. The exported string contains both the conversation and included functions, if any. Returns the JSON string representing the conversation.

+ +```cpp +std::string Export() const & noexcept(false); +``` + +

Import Conversation

+

Imports a conversation from a JSON string. This method imports a conversation from a JSON string. The JSON string should be the JSON string returned from a call to Export(). Returns a boolean indicating success.

+ +```cpp +bool Import() const & noexcept(false); +``` + +

Append Stream Data

+

Appends stream data (SSEs) from streamed methods. This method updates the conversation given a token from a streamed method. This method should be used when using streamed methods such as ChatCompletion::create or create_async with a callback supplied. This function should be called from within the stream's callback function receiving the SSEs. Returns a boolean indicating data appending success.

+ +```cpp +bool AppendStreamData(std::string data) & noexcept(false); +``` + +

Set Function(s)

+

Sets the functions to be used for the conversation. This method sets the functions to be used for the conversation. Returns a boolean indicating success.

+ +```cpp +bool SetFunctions(Functions functions) & noexcept(false); +``` + +

Pop Function(s)

+

Pops any previously set functions.

+ +```cpp +void PopFunctions() & noexcept(false); +``` + +

Get Raw JSON Conversation

+

Retrieves the raw JSON of the conversation; the same functionality can be achieved using the operator<<(...) overload. Returns a std::string containing the JSON of the conversation.

+ +```cpp +std::string GetRawConversation() const & noexcept; +``` + +

Get Raw JSON Functions

+

Returns the raw JSON dump of the internal functions object in string format - if one exists.

+ +```cpp +std::string GetRawFunctions() const & noexcept; +``` + +

Get Functions JSON Object

+

Returns the JSON object of the set functions.

+ +```cpp +const nlohmann::json& GetFunctionsJSON() const & noexcept; +``` + +

Get Internal JSON

+

Retrieves a const-ref of the internal JSON object containing the conversation. Returns a const nlohmann::json& object.

+ +```cpp +const nlohmann::json& GetJSON() const & noexcept; +``` + +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder here and in the previous directory.

diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/CMakeLists.txt new file mode 100644 index 00000000..8e314741 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/CMakeLists.txt @@ -0,0 +1,13 @@ +cmake_minimum_required(VERSION 3.13) + +project(conversation) + +add_basic_example(adduserdata) +add_basic_example(getjsonobject) +add_basic_example(getlastresponse) +add_basic_example(getrawconversation) +add_basic_example(poplastresponse) +add_basic_example(popsystemdata) +add_basic_example(popuserdata) +add_basic_example(setsystemdata) +add_basic_example(update) diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/adduserdata.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/adduserdata.cpp new file mode 100644 index 00000000..7d68cbba --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/adduserdata.cpp @@ -0,0 +1,15 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add user data - such as a question + convo.AddUserData("What is the meaning of life?"); + + // ... +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getjsonobject.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getjsonobject.cpp new file mode 100644 index 00000000..aa880a79 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getjsonobject.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you? What time is it for you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update the conversation with the response + convo.Update(response); + + // get the internal conversation JSON object + nlohmann::json json = convo.GetJSON(); + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getlastresponse.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getlastresponse.cpp new file mode 100644 index 00000000..dc636304 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getlastresponse.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you? What time is it for you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update the conversation with the response + convo.Update(response); + + // print the conversation + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getrawconversation.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getrawconversation.cpp new file mode 100644 index 00000000..551afa93 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/getrawconversation.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you? What time is it for you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update the conversation with the response + convo.Update(response); + + // print the raw JSON conversation string + std::cout << convo.GetRawConversation() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/poplastresponse.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/poplastresponse.cpp new file mode 100644 index 00000000..f2776b76 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/poplastresponse.cpp @@ -0,0 +1,33 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you? What time is it for you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update the conversation with the response + convo.Update(response); + + // print the conversation + std::cout << convo.GetLastResponse() << std::endl; + + // pop (remove) the last response from the conversation + convo.PopLastResponse(); + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/popsystemdata.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/popsystemdata.cpp new file mode 100644 index 00000000..09175d66 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/popsystemdata.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // set system message to guide the chat model + convo.SetSystemData("You are helpful bot."); + + // remove the set system message + convo.PopSystemData(); + + // add a different system message + convo.SetSystemData("You are a helpful bot that enjoys business."); + + // ... +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/popuserdata.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/popuserdata.cpp new file mode 100644 index 00000000..956ea776 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/popuserdata.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add user data - such as a question + convo.AddUserData("What is the meaning of life?"); + + // pop (remove) the above added user data + convo.PopUserData(); + + // add different user data + convo.AddUserData("What is the size of the universe?"); + + // ... +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/setsystemdata.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/setsystemdata.cpp new file mode 100644 index 00000000..ee373ef5 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/setsystemdata.cpp @@ -0,0 +1,15 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // set system message to guide the chat model + convo.SetSystemData("You are helpful bot."); + + // ... +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/update.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/update.cpp new file mode 100644 index 00000000..dc636304 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/conversation/examples/update.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("Hello, how are you? What time is it for you?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update the conversation with the response + convo.Update(response); + + // print the conversation + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/CMakeLists.txt new file mode 100644 index 00000000..20c5572e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/CMakeLists.txt @@ -0,0 +1,7 @@ +cmake_minimum_required(VERSION 3.13) + +project(chat) + +add_basic_example(create_chat_completion) +add_basic_example(create_chat_completion_async) +add_basic_example(ongoing_user_convo) diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/examples/create_chat_completion.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/create_chat_completion.cpp new file mode 100644 index 00000000..31c4c859 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/create_chat_completion.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("What is the point of taxes?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update our conversation with the response + convo.Update(response); + + // print the response + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/examples/create_chat_completion_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/create_chat_completion_async.cpp new file mode 100644 index 00000000..43ca1fa7 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/create_chat_completion_async.cpp @@ -0,0 +1,38 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // add a message to the conversation + convo.AddUserData("What is the point of taxes?"); + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + auto fut = oai.ChatCompletion->create_async( + "gpt-3.5-turbo", convo + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // update our conversation with the response + convo.Update(response); + + // print the response + std::cout << convo.GetLastResponse() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/chat/examples/ongoing_user_convo.cpp b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/ongoing_user_convo.cpp new file mode 100644 index 00000000..7ee2d35c --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/chat/examples/ongoing_user_convo.cpp @@ -0,0 +1,39 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + // create a conversation + Conversation convo; + + // holds next user input + std::string input; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + while (true) { + // get next user input + std::cout << "You: "; std::getline(std::cin, input); + + // add user input to conversation + convo.AddUserData(input); + + // get response from OpenAI + Response response = oai.ChatCompletion->create( + "gpt-3.5-turbo", convo + ); + + // update our conversation with the response + convo.Update(response); + + // print the response + std::cout << "Bot: " << convo.GetLastResponse() << std::endl; + } + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/completions/README.md b/packages/kbot/cpp/packages/liboai/documentation/completions/README.md new file mode 100644 index 00000000..45c18c65 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/completions/README.md @@ -0,0 +1,63 @@ +

Completions

+

The Completions class is defined in completions.h at liboai::Completions, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Completions endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position. + +
+

Methods

+

This document covers the method(s) located in completions.h. You can find their function signature(s) below.

+ +

Create a Completion

+

Creates a completion for the provided prompt and parameters. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& model_id, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create a Completion (async)

+

Asynchronously creates a completion for the provided prompt and parameters. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& model_id, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional> stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/completions/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/completions/examples/CMakeLists.txt new file mode 100644 index 00000000..fdb88b29 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/completions/examples/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.13) + +project(completions) + +add_basic_example(generate_completion) +add_basic_example(generate_completion_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/completions/examples/generate_completion.cpp b/packages/kbot/cpp/packages/liboai/documentation/completions/examples/generate_completion.cpp new file mode 100644 index 00000000..c287dc72 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/completions/examples/generate_completion.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Completion->create( + "text-davinci-003", + "Say this is a test", + std::nullopt, + 7 + ); + std::cout << response["choices"][0]["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/completions/examples/generate_completion_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/completions/examples/generate_completion_async.cpp new file mode 100644 index 00000000..0e2b11ec --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/completions/examples/generate_completion_async.cpp @@ -0,0 +1,32 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Completion->create_async( + "text-davinci-003", + "Say this is a test", + std::nullopt, + 7 + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["choices"][0]["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/edits/README.md b/packages/kbot/cpp/packages/liboai/documentation/edits/README.md new file mode 100644 index 00000000..ae26b457 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/edits/README.md @@ -0,0 +1,43 @@ +

Edits

+

The Edits class is defined in edits.h at liboai::Edits, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Edits endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Given a prompt and an instruction, the model will return an edited version of the prompt. + +
+

Methods

+

This document covers the method(s) located in edits.h. You can find their function signature(s) below.

+ +

Create an Edit

+

Creates a new edit for the provided input, instruction, and parameters. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional instruction = std::nullopt, + std::optional n = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt +) const & noexcept(false); +``` + +

Create an Edit (async)

+

Asynchronously creates a new edit for the provided input, instruction, and parameters. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional instruction = std::nullopt, + std::optional n = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/edits/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/edits/examples/CMakeLists.txt new file mode 100644 index 00000000..8c9a6c9f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/edits/examples/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.13) + +project(edits) + +add_basic_example(create_edit) +add_basic_example(create_edit_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/edits/examples/create_edit.cpp b/packages/kbot/cpp/packages/liboai/documentation/edits/examples/create_edit.cpp new file mode 100644 index 00000000..4fb47baf --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/edits/examples/create_edit.cpp @@ -0,0 +1,20 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Edit->create( + "text-davinci-edit-001", + "What day of the wek is it?", + "Fix the spelling mistakes" + ); + std::cout << response["choices"][0]["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/edits/examples/create_edit_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/edits/examples/create_edit_async.cpp new file mode 100644 index 00000000..631b4797 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/edits/examples/create_edit_async.cpp @@ -0,0 +1,31 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Edit->create_async( + "text-davinci-edit-001", + "What day of the wek is it?", + "Fix the spelling mistakes" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["choices"][0]["text"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/embeddings/README.md b/packages/kbot/cpp/packages/liboai/documentation/embeddings/README.md new file mode 100644 index 00000000..afa51694 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/embeddings/README.md @@ -0,0 +1,37 @@ +

Embeddings

+

The Embeddings class is defined in embeddings.h at liboai::Embeddings, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Embeddings endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms. + +
+

Methods

+

This document covers the method(s) located in embeddings.h. You can find their function signature(s) below.

+ +

Create an Embedding

+

Creates an embedding vector representing the input text. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create an Embedding (async)

+

Asynchronously creates an embedding vector representing the input text. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/CMakeLists.txt new file mode 100644 index 00000000..1cac2481 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.13) + +project(embeddings) + +add_basic_example(create_embedding) +add_basic_example(create_embedding_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/create_embedding.cpp b/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/create_embedding.cpp new file mode 100644 index 00000000..f1c65c50 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/create_embedding.cpp @@ -0,0 +1,19 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Embedding->create( + "text-embedding-ada-002", + "The food was delicious and the waiter..." + ); + std::cout << response["data"][0]["embedding"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/create_embedding_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/create_embedding_async.cpp new file mode 100644 index 00000000..10c59e30 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/embeddings/examples/create_embedding_async.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Embedding->create_async( + "text-embedding-ada-002", + "The food was delicious and the waiter..." + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"][0]["embedding"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/README.md b/packages/kbot/cpp/packages/liboai/documentation/files/README.md new file mode 100644 index 00000000..a4810240 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/README.md @@ -0,0 +1,103 @@ +

Files

+

The Files class is defined in files.h at liboai::Files, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Files endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Files are used to upload documents that can be used with features like Fine-tuning. + +
+

Methods

+

This document covers the method(s) located in files.h. You can find their function signature(s) below.

+ +

List Files

+

Gets a list of files that belong to the user's organization. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response list() const & noexcept(false); +``` + +

List Files (async)

+

Asynchronously gets a list of files that belong to the user's organization. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse list_async() const & noexcept(false); +``` + +

Upload File

+

Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::filesystem::path& file, + const std::string& purpose +) const & noexcept(false); +``` + +

Upload File (async)

+

Asynchronously upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::filesystem::path& file, + const std::string& purpose +) const & noexcept(false); +``` + +

Delete a File

+

Deletes a file. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response remove( + const std::string& file_id +) const & noexcept(false); +``` + +

Delete a File (async)

+

Asynchronously deletes a file. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse remove_async( + const std::string& file_id +) const & noexcept(false); +``` + +

Retrieve File

+

Returns information about a specific file. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response retrieve( + const std::string& file_id +) const & noexcept(false); +``` + +

Retrieve File (async)

+

Asynchronously returns information about a specific file. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse retrieve_async( + const std::string& file_id +) const & noexcept(false); +``` + +

Retrieve File Content (Download)

+

Returns the contents of the specified file and downloads it to the provided path. Returns a bool indicating failure or success.

+ +```cpp +bool download( + const std::string& file_id, + const std::string& save_to +) const & noexcept(false); +``` + +

Retrieve File Content (Download) (async)

+

Asynchronously returns the contents of the specified file and downloads it to the provided path. Returns a future bool indicating failure or success.

+ +```cpp +std::future download_async( + const std::string& file_id, + const std::string& save_to +) const & noexcept(false); +``` + +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/files/examples/CMakeLists.txt new file mode 100644 index 00000000..527dc0f9 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.13) + +project(files) + +add_basic_example(delete_file) +add_basic_example(delete_file_async) +add_basic_example(download_uploaded_file) +add_basic_example(download_uploaded_file_async) +add_basic_example(list_files) +add_basic_example(list_files_async) +add_basic_example(retrieve_file) +add_basic_example(retrieve_file_async) +add_basic_example(upload_file) +add_basic_example(upload_file_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/delete_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/delete_file.cpp new file mode 100644 index 00000000..f555b37f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/delete_file.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.File->remove( + "file-XjGxS3KTG0uNmNOK362iJua3" + ); + std::cout << response["deleted"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/delete_file_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/delete_file_async.cpp new file mode 100644 index 00000000..e488cec6 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/delete_file_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.File->remove_async( + "file-XjGxS3KTG0uNmNOK362iJua3" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["deleted"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/download_uploaded_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/download_uploaded_file.cpp new file mode 100644 index 00000000..ca2783ca --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/download_uploaded_file.cpp @@ -0,0 +1,20 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + if (oai.File->download("file-XjGxS3KTG0uNmNOK362iJua3", "C:/some/folder/file.jsonl")) { + std::cout << "File downloaded successfully!" << std::endl; + } + else { + std::cout << "File download failed!" << std::endl; + } + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/download_uploaded_file_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/download_uploaded_file_async.cpp new file mode 100644 index 00000000..e2492350 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/download_uploaded_file_async.cpp @@ -0,0 +1,31 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.File->download_async( + "file-XjGxS3KTG0uNmNOK362iJua3", "C:/some/folder/file.jsonl" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // check if downloaded successfully + if (fut.get()) { + std::cout << "File downloaded successfully!" << std::endl; + } + else { + std::cout << "File download failed!" << std::endl; + } + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/list_files.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/list_files.cpp new file mode 100644 index 00000000..ceeae5ee --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/list_files.cpp @@ -0,0 +1,16 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.File->list(); + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/list_files_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/list_files_async.cpp new file mode 100644 index 00000000..411f7373 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/list_files_async.cpp @@ -0,0 +1,27 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.File->list_async(); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/retrieve_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/retrieve_file.cpp new file mode 100644 index 00000000..5c385308 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/retrieve_file.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.File->retrieve( + "file-XjGxS3KTG0uNmNOK362iJua3" + ); + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/retrieve_file_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/retrieve_file_async.cpp new file mode 100644 index 00000000..5c6fb03e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/retrieve_file_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.File->retrieve_async( + "file-XjGxS3KTG0uNmNOK362iJua3" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/upload_file.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/upload_file.cpp new file mode 100644 index 00000000..a09dfd8e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/upload_file.cpp @@ -0,0 +1,19 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.File->create( + "C:/some/folder/file.jsonl", + "fine-tune" + ); + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/files/examples/upload_file_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/files/examples/upload_file_async.cpp new file mode 100644 index 00000000..09fdbe23 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/files/examples/upload_file_async.cpp @@ -0,0 +1,30 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.File->create_async( + "C:/some/folder/file.jsonl", + "fine-tune" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/README.md b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/README.md new file mode 100644 index 00000000..73facf14 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/README.md @@ -0,0 +1,144 @@ +

Fine-Tunes

+

The FineTunes class is defined in fine_tunes.h at liboai::FineTunes, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Fine-tunes endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Manage fine-tuning jobs to tailor a model to your specific training data. + +
+

Methods

+

This document covers the method(s) located in fine_tunes.h. You can find their function signature(s) below.

+ +

Create a Fine-Tune

+

Creates a job that fine-tunes a specified model from a given dataset. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& training_file, + std::optional validation_file = std::nullopt, + std::optional model_id = std::nullopt, + std::optional n_epochs = std::nullopt, + std::optional batch_size = std::nullopt, + std::optional learning_rate_multiplier = std::nullopt, + std::optional prompt_loss_weight = std::nullopt, + std::optional compute_classification_metrics = std::nullopt, + std::optional classification_n_classes = std::nullopt, + std::optional classification_positive_class = std::nullopt, + std::optional> classification_betas = std::nullopt, + std::optional suffix = std::nullopt +) const & noexcept(false); +``` + +

Create a Fine-Tune (async)

+

Asynchronously creates a job that fine-tunes a specified model from a given dataset. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& training_file, + std::optional validation_file = std::nullopt, + std::optional model_id = std::nullopt, + std::optional n_epochs = std::nullopt, + std::optional batch_size = std::nullopt, + std::optional learning_rate_multiplier = std::nullopt, + std::optional prompt_loss_weight = std::nullopt, + std::optional compute_classification_metrics = std::nullopt, + std::optional classification_n_classes = std::nullopt, + std::optional classification_positive_class = std::nullopt, + std::optional> classification_betas = std::nullopt, + std::optional suffix = std::nullopt +) const & noexcept(false); +``` + +

List Fine-Tunes

+

List your organization's fine-tuning jobs. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response list() const & noexcept(false); +``` + + +

List Fine-Tunes (async)

+

Asynchronously list your organization's fine-tuning jobs. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse list_async() const & noexcept(false); +``` + +

Retrieve Fine-Tune

+

Gets info about the fine-tune job. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response retrieve( + const std::string& fine_tune_id +) const & noexcept(false); +``` + +

Retrieve Fine-Tune (async)

+

Asynchronously gets info about the fine-tune job. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse retrieve_async( + const std::string& fine_tune_id +) const & noexcept(false); +``` + +

Cancel Fine-Tune

+

Immediately cancel a fine-tune job. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response cancel( + const std::string& fine_tune_id +) const & noexcept(false); +``` + +

Cancel Fine-Tune (async)

+

Asynchronously and immediately cancel a fine-tune job. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse cancel_async( + const std::string& fine_tune_id +) const & noexcept(false); +``` + +

List Fine-Tune Events

+

Get fine-grained status updates for a fine-tune job. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response list_events( + const std::string& fine_tune_id, + std::optional> stream = std::nullopt +) const & noexcept(false); +``` + +

List Fine-Tune Events (async)

+

Asynchronously get fine-grained status updates for a fine-tune job. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse list_events_async( + const std::string& fine_tune_id, + std::optional> stream = std::nullopt +) const & noexcept(false); +``` + +

Delete Fine-Tune Model

+

Delete a fine-tuned model. You must have the Owner role in your organization. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response remove( + const std::string& model +) const & noexcept(false); +``` + +

Delete Fine-Tune Model (async)

+

Asynchronously delete a fine-tuned model. You must have the Owner role in your organization. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse remove_async( + const std::string& model +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/CMakeLists.txt new file mode 100644 index 00000000..96044028 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/CMakeLists.txt @@ -0,0 +1,16 @@ +cmake_minimum_required(VERSION 3.13) + +project(fine-tunes) + +add_basic_example(cancel_fine_tune) +add_basic_example(cancel_fine_tune_async) +add_basic_example(create_fine_tune) +add_basic_example(create_fine_tune_async) +add_basic_example(delete_fine_tune_model) +add_basic_example(delete_fine_tune_model_async) +add_basic_example(list_fine_tune_events) +add_basic_example(list_fine_tune_events_async) +add_basic_example(list_fine_tunes) +add_basic_example(list_fine_tunes_async) +add_basic_example(retrieve_fine_tune) +add_basic_example(retrieve_fine_tune_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/cancel_fine_tune.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/cancel_fine_tune.cpp new file mode 100644 index 00000000..aa49aa24 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/cancel_fine_tune.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.FineTune->cancel( + "ft-AF1WoRqd3aJAHsqc9NY7iL8F" + ); + std::cout << response["status"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/cancel_fine_tune_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/cancel_fine_tune_async.cpp new file mode 100644 index 00000000..53c0d0f7 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/cancel_fine_tune_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.FineTune->cancel_async( + "ft-AF1WoRqd3aJAHsqc9NY7iL8F" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["status"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/create_fine_tune.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/create_fine_tune.cpp new file mode 100644 index 00000000..9b5b4022 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/create_fine_tune.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.FineTune->create( + "file-XGinujblHPwGLSztz8cPS8XY" + ); + std::cout << response["events"][0]["message"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/create_fine_tune_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/create_fine_tune_async.cpp new file mode 100644 index 00000000..70c23c5d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/create_fine_tune_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.FineTune->create_async( + "file-XGinujblHPwGLSztz8cPS8XY" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["events"][0]["message"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/delete_fine_tune_model.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/delete_fine_tune_model.cpp new file mode 100644 index 00000000..a7ba4e79 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/delete_fine_tune_model.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.FineTune->remove( + "curie:ft-acmeco-2021-03-03-21-44-20" + ); + std::cout << response["deleted"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/delete_fine_tune_model_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/delete_fine_tune_model_async.cpp new file mode 100644 index 00000000..40fad9fd --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/delete_fine_tune_model_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.FineTune->remove_async( + "curie:ft-acmeco-2021-03-03-21-44-20" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["deleted"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tune_events.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tune_events.cpp new file mode 100644 index 00000000..53b42659 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tune_events.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.FineTune->list_events( + "ft-AF1WoRqd3aJAHsqc9NY7iL8F" + ); + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tune_events_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tune_events_async.cpp new file mode 100644 index 00000000..3df5c92c --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tune_events_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.FineTune->list_events_async( + "ft-AF1WoRqd3aJAHsqc9NY7iL8F" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tunes.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tunes.cpp new file mode 100644 index 00000000..3c1e9d8d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tunes.cpp @@ -0,0 +1,16 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.FineTune->list(); + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tunes_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tunes_async.cpp new file mode 100644 index 00000000..ba21e1fd --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/list_fine_tunes_async.cpp @@ -0,0 +1,27 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.FineTune->list_async(); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/retrieve_fine_tune.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/retrieve_fine_tune.cpp new file mode 100644 index 00000000..6ada2a40 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/retrieve_fine_tune.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.FineTune->retrieve( + "ft-AF1WoRqd3aJAHsqc9NY7iL8F" + ); + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/retrieve_fine_tune_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/retrieve_fine_tune_async.cpp new file mode 100644 index 00000000..05914600 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/fine-tunes/examples/retrieve_fine_tune_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.FineTune->retrieve_async( + "ft-AF1WoRqd3aJAHsqc9NY7iL8F" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/README.md b/packages/kbot/cpp/packages/liboai/documentation/images/README.md new file mode 100644 index 00000000..7e98066e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/README.md @@ -0,0 +1,97 @@ +

Images

+

The Images class is defined in images.h at liboai::Images, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Images endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Given a prompt and/or an input image, the model will generate a new image. + +
+

Methods

+

This document covers the method(s) located in images.h. You can find their function signature(s) below.

+ +

Create an Image

+

Creates an image given a prompt. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create an Image (async)

+

Asynchronously creates an image given a prompt. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create Image Edit

+

Creates an edited or extended image given an original image and a prompt. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create_edit( + const std::filesystem::path& image, + const std::string& prompt, + std::optional mask = std::nullopt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create Image Edit (async)

+

Asynchronously creates an edited or extended image given an original image and a prompt. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_edit_async( + const std::filesystem::path& image, + const std::string& prompt, + std::optional mask = std::nullopt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create Image Variation

+

Creates a variation of a given image. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create_variation( + const std::filesystem::path& image, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

Create Image Variation (async)

+

Asynchronously creates a variation of a given image. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_variation_async( + const std::filesystem::path& image, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/images/examples/CMakeLists.txt new file mode 100644 index 00000000..776e0dad --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/CMakeLists.txt @@ -0,0 +1,12 @@ +cmake_minimum_required(VERSION 3.13) + +project(images) + +# compilation error +add_basic_example(download_generated_image) +add_basic_example(generate_edit) +add_basic_example(generate_edit_async) +add_basic_example(generate_image) +add_basic_example(generate_image_async) +add_basic_example(generate_variation) +add_basic_example(generate_variation_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/download_generated_image.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/download_generated_image.cpp new file mode 100644 index 00000000..680f29a3 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/download_generated_image.cpp @@ -0,0 +1,22 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create( + "a siamese cat!" + ); + Network::Download( + "C:/some/folder/file.png", // to + response["data"][0]["url"].get(), // from + netimpl::components::Header() + ); + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_edit.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_edit.cpp new file mode 100644 index 00000000..b13bfa61 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_edit.cpp @@ -0,0 +1,21 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create_edit( + "C:/some/folder/otter.png", + "A cute baby sea otter wearing a beret", + "C:/some/folder/mask.png" + ); + + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_edit_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_edit_async.cpp new file mode 100644 index 00000000..69d512cc --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_edit_async.cpp @@ -0,0 +1,31 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Image->create_edit_async( + "C:/some/folder/otter.png", + "A cute baby sea otter wearing a beret", + "C:/some/folder/mask.png" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_image.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_image.cpp new file mode 100644 index 00000000..afb3cef9 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_image.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create( + "a siamese cat!" + ); + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_image_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_image_async.cpp new file mode 100644 index 00000000..499a2bbb --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_image_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Image->create_async( + "a siamese cat!" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_variation.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_variation.cpp new file mode 100644 index 00000000..b045df30 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_variation.cpp @@ -0,0 +1,19 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Image->create_variation( + "C:/some/folder/otter.png" + ); + + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_variation_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_variation_async.cpp new file mode 100644 index 00000000..4f875fec --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/images/examples/generate_variation_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Image->create_variation_async( + "C:/some/folder/otter.png" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"][0]["url"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/installation/README.md b/packages/kbot/cpp/packages/liboai/documentation/installation/README.md new file mode 100644 index 00000000..5501fb57 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/installation/README.md @@ -0,0 +1,47 @@ +

Making Use of liboai

+

In order to integrate the power of artificial intelligence and liboai into your codebase, you have a couple of options.

+ +

Integrate via source code

+

As liboai implements a cURL wrapper internally and uses a pure C++ JSON solution, liboai's header and implementation files can be added to an existing C++17 project and compiled alongside it. However, in order to do so, the project must have the following elements:

+ +* cURL available and linked to the project. +* nlohmann-json available. +* Compiling to C++17. + +

Assuming your existing codebase has the above in mind, you can safely add liboai's header and implementation files to your existing project and compile.

+ +

It's as easy as that!

+ +

Integrate via a static/dynamic library

+

Another means of integrating liboai into an existing C++17 project is as a static or dynamic library. This is slightly more complicated than simply including the source code of the library into your existing project, but can certainly be done in few steps.

+ +

Static and dynamic libraries take many forms:

+ +* Windows + * Dynamic-Link Library (.dll) + * Static Library (.lib) +* Linux + * Shared Object (.so) + * Static Library (.a) +* MacOS + * Dynamic Library (.dylib) + * Static Library (.a) + +

However, their underlying concepts remain the same.

+ +

Turning liboai into a library

+

The process of compiling liboai into a static or dynamic library is not as hard as it may seem. Simply, using your IDE of choice, perform the following: + + 1. Ensure cURL and nlohmann-json are installed. + 2. Create a new C++ project. + 3. Import the liboai source code (.cpp and .h files). + 4. *Link your project to the cURL library. + 5. *Make sure you are targeting C++17. + 6. *Compile as a static or dynamic library. + +

Now, in the project you'd like to integrate liboai into: + + 1. Include the liboai header files (.h files). + 2. *Link to the output static or dynamic library you compiled in the above steps. + +*NOTE: how you do these steps depends on your choice of development environment. They can either be done in an IDE or a compiler on the command line. diff --git a/packages/kbot/cpp/packages/liboai/documentation/maintenance/README.md b/packages/kbot/cpp/packages/liboai/documentation/maintenance/README.md new file mode 100644 index 00000000..c97edff1 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/maintenance/README.md @@ -0,0 +1,40 @@ +

Maintainer Notes

+

This doc summarizes where PRs live now and how to review them efficiently.

+ +

Repositories and PR numbering

+
    +
  • Canonical repo (accept PRs here): jasonduncan/liboai.
  • +
  • Upstream repo (archived): D7EAD/liboai.
  • +
  • PR numbers are per-repository, so PR #1 in our repo is unrelated to PR #1 upstream.
  • +
+ +

Common gh commands

+
gh pr list --repo jasonduncan/liboai
+
+gh pr view 1 --repo jasonduncan/liboai
+
+gh pr diff 1 --repo jasonduncan/liboai
+
+gh pr checkout 1 --repo jasonduncan/liboai
+
+ +

Upstream PRs are read-only history but can be useful for reference:

+
gh pr list --repo D7EAD/liboai
+
+ +

Remotes

+
git remote -v
+
+

Expected remotes:

+
    +
  • origin: jasonduncan/liboai
  • +
  • upstream: D7EAD/liboai
  • +
+ +

Review checklist (build / CMake changes)

+
    +
  • Verify minimum CMake version compatibility.
  • +
  • Confirm dependency targets exist for both find_package and vendored targets.
  • +
  • Test top-level build + install, and add_subdirectory usage in a parent project.
  • +
  • Ensure new options are documented and defaults preserve existing behavior.
  • +
diff --git a/packages/kbot/cpp/packages/liboai/documentation/models/README.md b/packages/kbot/cpp/packages/liboai/documentation/models/README.md new file mode 100644 index 00000000..fd5d0f1e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/models/README.md @@ -0,0 +1,45 @@ +

Models

+

The Models class is defined in models.h at liboai::Models, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Models endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- List and describe the various models available in the API. + +
+

Methods

+

This document covers the method(s) located in models.h. You can find their function signature(s) below.

+ +

List Models

+

Lists the currently available models, and provides basic information about each one such as the owner and availability. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response list() const & noexcept(false); +``` + +

List Models (async)

+

Asynchronously lists the currently available models, and provides basic information about each one such as the owner and availability. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse list_async() const & noexcept(false); +``` + +

Retrieve Model

+

Retrieves a model instance, providing basic information about the model such as the owner and permissioning. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response retrieve( + const std::string& model +) const & noexcept(false); +``` + +

Retrieve Model (async)

+

Asynchronously retrieves a model instance, providing basic information about the model such as the owner and permissioning. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse retrieve_async( + const std::string& model +) const & noexcept(false); +``` + +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/models/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/models/examples/CMakeLists.txt new file mode 100644 index 00000000..7cbb7ef4 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/models/examples/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.13) + +project(models) + +add_basic_example(list_models) +add_basic_example(list_models_async) +add_basic_example(retrieve_model) +add_basic_example(retrieve_model_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/models/examples/list_models.cpp b/packages/kbot/cpp/packages/liboai/documentation/models/examples/list_models.cpp new file mode 100644 index 00000000..f50f9a27 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/models/examples/list_models.cpp @@ -0,0 +1,16 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Model->list(); + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/models/examples/list_models_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/models/examples/list_models_async.cpp new file mode 100644 index 00000000..d17561e7 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/models/examples/list_models_async.cpp @@ -0,0 +1,27 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Model->list_async(); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["data"] << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/models/examples/retrieve_model.cpp b/packages/kbot/cpp/packages/liboai/documentation/models/examples/retrieve_model.cpp new file mode 100644 index 00000000..fd7b2de3 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/models/examples/retrieve_model.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Model->retrieve( + "text-davinci-003" + ); + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/models/examples/retrieve_model_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/models/examples/retrieve_model_async.cpp new file mode 100644 index 00000000..8ea13971 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/models/examples/retrieve_model_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Model->retrieve_async( + "text-davinci-003" + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/moderations/README.md b/packages/kbot/cpp/packages/liboai/documentation/moderations/README.md new file mode 100644 index 00000000..f1380a7e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/moderations/README.md @@ -0,0 +1,35 @@ +

Moderations

+

The Moderations class is defined in moderations.h at liboai::Moderations, and its interface can ideally be accessed through a liboai::OpenAI object. + +This class and its associated liboai::OpenAI interface allow access to the Moderations endpoint of the OpenAI API; this endpoint's functionality can be found below.

+- Given a input text, outputs if the model classifies it as violating OpenAI's content policy. + +
+

Methods

+

This document covers the method(s) located in moderations.h. You can find their function signature(s) below.

+ +

Create a Moderation

+

Classifies if text violates OpenAI's Content Policy. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& input, + std::optional model = std::nullopt +) const & noexcept(false); +``` + +

Create a Moderation (async)

+

Asynchronously classifies if text violates OpenAI's Content Policy. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& input, + std::optional model = std::nullopt +) const & noexcept(false); +``` + +

All function parameters marked optional are not required and are resolved on OpenAI's end if not supplied.

+ +
+

Example Usage

+

For example usage of the above function(s), please refer to the examples folder. diff --git a/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/CMakeLists.txt new file mode 100644 index 00000000..e89e2561 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.13) + +project(moderations) + +add_basic_example(create_moderation) +add_basic_example(create_moderation_async) diff --git a/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/create_moderation.cpp b/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/create_moderation.cpp new file mode 100644 index 00000000..e1b4c689 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/create_moderation.cpp @@ -0,0 +1,18 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Moderation->create( + "I want to kill them." + ); + std::cout << response["results"][0]["flagged"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/create_moderation_async.cpp b/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/create_moderation_async.cpp new file mode 100644 index 00000000..46e9017d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/moderations/examples/create_moderation_async.cpp @@ -0,0 +1,29 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + // call async method; returns a future + auto fut = oai.Moderation->create_async( + "I want to kill them." + ); + + // do other work... + + // check if the future is ready + fut.wait(); + + // get the contained response + auto response = fut.get(); + + // print some response data + std::cout << response["results"][0]["flagged"].get() << std::endl; + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/responses/README.md b/packages/kbot/cpp/packages/liboai/documentation/responses/README.md new file mode 100644 index 00000000..786693eb --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/responses/README.md @@ -0,0 +1,114 @@ +

Responses

+

The Responses class is defined in responses.h at liboai::Responses, and its interface can be accessed through a liboai::OpenAI object.

+ +

This class provides access to the Responses API. It offers a typed create overload for common fields and a raw JSON overload for full flexibility.

+ +

Methods

+

This document covers the method(s) located in responses.h. You can find their function signature(s) below.

+ +

Build a Request Payload

+

Builds a Responses API request payload from typed parameters.

+ +```cpp +static nlohmann::json build_request( + const std::string& model, + const nlohmann::json& input, + std::optional instructions = std::nullopt, + std::optional reasoning = std::nullopt, + std::optional text = std::nullopt, + std::optional max_output_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional seed = std::nullopt, + std::optional tools = std::nullopt, + std::optional tool_choice = std::nullopt, + std::optional parallel_tool_calls = std::nullopt, + std::optional store = std::nullopt, + std::optional previous_response_id = std::nullopt, + std::optional include = std::nullopt, + std::optional metadata = std::nullopt, + std::optional user = std::nullopt, + std::optional truncation = std::nullopt, + std::optional stream = std::nullopt +); +``` + +

Create a Response

+

Creates a response from typed parameters. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const std::string& model, + const nlohmann::json& input, + std::optional instructions = std::nullopt, + std::optional reasoning = std::nullopt, + std::optional text = std::nullopt, + std::optional max_output_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional seed = std::nullopt, + std::optional tools = std::nullopt, + std::optional tool_choice = std::nullopt, + std::optional parallel_tool_calls = std::nullopt, + std::optional store = std::nullopt, + std::optional previous_response_id = std::nullopt, + std::optional include = std::nullopt, + std::optional metadata = std::nullopt, + std::optional user = std::nullopt, + std::optional truncation = std::nullopt, + std::optional> stream = std::nullopt +) const & noexcept(false); +``` + +

Create a Response (raw JSON)

+

Creates a response from a raw JSON payload. Returns a liboai::Response containing response data.

+ +```cpp +liboai::Response create( + const nlohmann::json& request, + std::optional> stream = std::nullopt +) const & noexcept(false); +``` + +

Create a Response (async)

+

Asynchronously creates a response from typed parameters. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const std::string& model, + const nlohmann::json& input, + std::optional instructions = std::nullopt, + std::optional reasoning = std::nullopt, + std::optional text = std::nullopt, + std::optional max_output_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional seed = std::nullopt, + std::optional tools = std::nullopt, + std::optional tool_choice = std::nullopt, + std::optional parallel_tool_calls = std::nullopt, + std::optional store = std::nullopt, + std::optional previous_response_id = std::nullopt, + std::optional include = std::nullopt, + std::optional metadata = std::nullopt, + std::optional user = std::nullopt, + std::optional truncation = std::nullopt, + std::optional> stream = std::nullopt +) const & noexcept(false); +``` + +

Create a Response (async, raw JSON)

+

Asynchronously creates a response from a raw JSON payload. Returns a liboai::FutureResponse containing future response data.

+ +```cpp +liboai::FutureResponse create_async( + const nlohmann::json& request, + std::optional> stream = std::nullopt +) const & noexcept(false); +``` + +

When using streaming, include "stream": true in the request (or pass a stream callback to the typed overload) and provide a stream callback to receive SSE data.

+ +

Example Usage

+

For example usage of the above function(s), please refer to the examples folder.

+

Examples include create_response.cpp (raw JSON) and create_response_typed.cpp (typed parameters).

diff --git a/packages/kbot/cpp/packages/liboai/documentation/responses/TECHNICAL_PLAN.md b/packages/kbot/cpp/packages/liboai/documentation/responses/TECHNICAL_PLAN.md new file mode 100644 index 00000000..28e28427 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/responses/TECHNICAL_PLAN.md @@ -0,0 +1,112 @@ +# Responses API Migration Plan (GPT-5.2) + +## Goal +Add first-class support for the OpenAI Responses API so liboai can use GPT-5.2 and gpt-5.2-pro, while keeping existing Chat Completions support for backward compatibility. + +## Current State (Repo Reality) +- `liboai::ChatCompletion` calls `/v1/chat/completions` and uses `Conversation` to manage messages and function calls. +- No Responses API component exists. +- Streaming parsers and conversation updates are tightly coupled to Chat Completions response shape. +- JSON request building uses `JsonConstructor` with explicit parameter lists. + +## Requirements from GPT-5.2 / Responses API +- New endpoint: `POST /v1/responses`. +- New request shape: `input` can be a string or an array of items; `instructions` can be separate. +- New parameters: `reasoning.effort`, `text.verbosity`, `text.format` (structured outputs), `max_output_tokens`. +- Model-specific constraint: `temperature`, `top_p`, and `logprobs` are only allowed when `reasoning.effort == "none"`. +- Tool definitions use `tools` with internally-tagged objects, and `tool_choice` supports `allowed_tools`. +- Multi-turn can chain with `previous_response_id`. + +## Proposed API Design + +### 1) New Component: `liboai::Responses` +- Files: + - `liboai/components/responses.cpp` + - `liboai/include/components/responses.h` +- Wiring: + - Add to `liboai/include/liboai.h` and `liboai/CMakeLists.txt`. +- Methods: + - `Response create(const nlohmann::json& request, ...) const`. + - `FutureResponse create_async(const nlohmann::json& request, ...) const`. + - Raw JSON payloads only for the initial implementation. + +### 2) Request Builder Types (Deferred) +We will start with raw JSON requests to avoid introducing new abstractions. +If needed later, add a `ResponseRequest` or similar builder to simplify common cases. + +### 3) Input Helpers (Deferred) +- `ResponseInput` helper to build `input` items: + - `AddSystem(string)` + - `AddUser(string)` + - `AddAssistant(string)` + - `AddToolCall(...)` + - `AddToolOutput(...)` +- Provide a static adapter: `ResponseInput::FromConversation(const Conversation&)`. + +### 4) Tool Definitions +- New `Tools` helper to build `tools` arrays: + - Function tools: `{ "type": "function", "name", "description", "parameters" }` + - Custom tools: `{ "type": "custom", "name", "description" }` + - Built-ins: `{ "type": "web_search" }` and others (pass-through JSON) +- New `ToolChoice` helper to build `tool_choice` objects, including `allowed_tools`. + +### 5) Response Parsing Helpers +- Add optional helpers to extract `output_text` from the response JSON. +- Keep `liboai::Response` unchanged for compatibility; add a small utility function in `Responses` or a separate helper header. + +### 6) Streaming +- New streaming parser for Responses SSE events. +- Provide `ResponsesStreamCallback` that surfaces: + - `delta_text` (if any) + - `event_type` (for tool calls, output items, completion) + - a partial `Response` or updated `ResponseInput` (optional) +- Confirm event schema from official Responses streaming docs before implementation. + +### 7) Backward Compatibility +- Keep `ChatCompletion` intact. +- Consider adding optional `verbosity` and `reasoning_effort` parameters to `ChatCompletion` for GPT-5.2 users who stay on Chat Completions. +- Do not remove `Conversation` or existing behavior. + +## Implementation Plan (Milestones) + +### Milestone 1: Scaffolding +- [x] Add Responses component files and wire into build system. +- [x] Add minimal `create` that accepts a raw JSON payload. +- [x] Add a basic usage example (string input). + +### Milestone 2: Core Params and Validation +- [ ] Implement `instructions`, `reasoning`, `text`, `max_output_tokens`. +- [ ] Add validation for `temperature` / `top_p` / `logprobs` when `reasoning.effort != "none"`. +- [ ] Add `store`, `previous_response_id`, `include`, `metadata`. + +### Milestone 3: Tools and Structured Outputs +- [ ] Add `Tools` and `ToolChoice` helpers. +- [ ] Support `text.format` for JSON schema structured outputs. +- [ ] Provide minimal tool-call example usage. + +### Milestone 4: Streaming and Output Helpers +- [ ] Implement SSE parsing for Responses. +- [ ] Add `output_text` helper extraction. +- [ ] Add streaming example and docs. + +### Milestone 5: Docs and Migration Guidance +- [x] Add `documentation/responses/README.md` with API usage. +- [x] Update `documentation/README.md` to link Responses docs. +- [x] Update root `README.md` feature list to include Responses API. + +## Testing Plan +- Compile examples in `documentation/responses/examples`. +- Add at least one integration test snippet (manual run) for: + - simple string input + - tool definition + tool call response parsing + - structured output +- If a unit test framework is added later, add a JSON shape test for `ResponseInput` and `ToolChoice` builders. + +## Risks and Mitigations +- Streaming event schema is different from Chat Completions: verify with official docs before implementing. +- Tool calling item shapes in Responses are not identical to Chat Completions: parse by `type` and `call_id`. +- Parameter incompatibilities for GPT-5.2: add validation and helpful error messages. + +## Open Questions for Jason +- Should `ResponseInput` be added after the basic Responses component ships? +- Should we add `output_text()` convenience on `liboai::Response`, or keep helpers in `Responses` only? diff --git a/packages/kbot/cpp/packages/liboai/documentation/responses/examples/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/documentation/responses/examples/CMakeLists.txt new file mode 100644 index 00000000..cbf781aa --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/responses/examples/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.13) + +project(responses) + +add_basic_example(create_response) +add_basic_example(create_response_typed) diff --git a/packages/kbot/cpp/packages/liboai/documentation/responses/examples/create_response.cpp b/packages/kbot/cpp/packages/liboai/documentation/responses/examples/create_response.cpp new file mode 100644 index 00000000..635d0957 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/responses/examples/create_response.cpp @@ -0,0 +1,47 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + nlohmann::json request; + request["model"] = "gpt-5.2-pro"; + request["input"] = "Hello from the Responses API."; + + Response response = oai.Responses->create(request); + + std::string output_text; + if (response.raw_json.contains("output")) { + for (const auto& item : response["output"]) { + if (item.contains("type") && item["type"] == "message") { + if (item.contains("content") && item["content"].is_array()) { + for (const auto& content : item["content"]) { + if (content.contains("type") && content["type"] == "output_text" && content.contains("text")) { + output_text = content["text"].get(); + break; + } + } + } + } + + if (!output_text.empty()) { + break; + } + } + } + + if (!output_text.empty()) { + std::cout << output_text << std::endl; + } + else { + std::cout << response << std::endl; + } + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/documentation/responses/examples/create_response_typed.cpp b/packages/kbot/cpp/packages/liboai/documentation/responses/examples/create_response_typed.cpp new file mode 100644 index 00000000..99eb0034 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/documentation/responses/examples/create_response_typed.cpp @@ -0,0 +1,48 @@ +#include "liboai.h" + +using namespace liboai; + +int main() { + OpenAI oai; + + if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) { + try { + Response response = oai.Responses->create( + "gpt-5.2-pro", + "Hello from the typed Responses API." + ); + + // std::cout << response << std::endl; + // std::cout << response["choices"][0]["text"].get() << std::endl; + std::string output_text; + if (response.raw_json.contains("output")) { + for (const auto& item : response["output"]) { + if (item.contains("type") && item["type"] == "message") { + if (item.contains("content") && item["content"].is_array()) { + for (const auto& content : item["content"]) { + if (content.contains("type") && content["type"] == "output_text" && content.contains("text")) { + output_text = content["text"].get(); + break; + } + } + } + } + + if (!output_text.empty()) { + break; + } + } + } + + if (!output_text.empty()) { + std::cout << output_text << std::endl; + } + else { + std::cout << response << std::endl; + } + } + catch (std::exception& e) { + std::cout << e.what() << std::endl; + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/flake.lock b/packages/kbot/cpp/packages/liboai/flake.lock new file mode 100644 index 00000000..e95f5a77 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/flake.lock @@ -0,0 +1,61 @@ +{ + "nodes": { + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1736012469, + "narHash": "sha256-/qlNWm/IEVVH7GfgAIyP6EsVZI6zjAx1cV5zNyrs+rI=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "8f3e1f807051e32d8c95cd12b9b421623850a34d", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/packages/kbot/cpp/packages/liboai/flake.nix b/packages/kbot/cpp/packages/liboai/flake.nix new file mode 100644 index 00000000..925b0e63 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/flake.nix @@ -0,0 +1,17 @@ +{ + description = "C++ Development Environment"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + }; + + outputs = { self, nixpkgs, flake-utils }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + in { + devShells.default = import ./shell.nix { pkgs = pkgs; }; + } + ); +} diff --git a/packages/kbot/cpp/packages/liboai/images/_logo.png b/packages/kbot/cpp/packages/liboai/images/_logo.png new file mode 100644 index 00000000..189522f6 Binary files /dev/null and b/packages/kbot/cpp/packages/liboai/images/_logo.png differ diff --git a/packages/kbot/cpp/packages/liboai/images/snake.png b/packages/kbot/cpp/packages/liboai/images/snake.png new file mode 100644 index 00000000..a3ce566a Binary files /dev/null and b/packages/kbot/cpp/packages/liboai/images/snake.png differ diff --git a/packages/kbot/cpp/packages/liboai/liboai/CMakeLists.txt b/packages/kbot/cpp/packages/liboai/liboai/CMakeLists.txt new file mode 100644 index 00000000..a74f62b4 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/CMakeLists.txt @@ -0,0 +1,194 @@ +cmake_minimum_required(VERSION 3.21) + +include(CMakePackageConfigHelpers) + +project(oai VERSION 4.0.1) + +if(MSVC) + set(CMAKE_DEBUG_POSTFIX "d") +endif() + +# When liboai is used as a vendored dependency (via add_subdirectory), +# its install/export rules can break the parent build if its link +# dependencies (eg. CURL::libcurl) are not part of liboai's export set. +# Only enable install/export when building liboai as the top-level project. +set(_LIBOAI_DEFAULT_INSTALL OFF) +if(PROJECT_IS_TOP_LEVEL) + set(_LIBOAI_DEFAULT_INSTALL ON) +endif() +option(LIBOAI_INSTALL "Enable liboai install/export rules" ${_LIBOAI_DEFAULT_INSTALL}) + +# Try to find nlohmann_json, but if not found, use a target if it exists +# (common when nlohmann_json is provided by a parent project via add_subdirectory/add_library). +if(TARGET nlohmann_json::nlohmann_json) + set(nlohmann_json_FOUND TRUE) +elseif(TARGET nlohmann_json) + set(nlohmann_json_FOUND TRUE) + add_library(nlohmann_json::nlohmann_json ALIAS nlohmann_json) +else() + find_package(nlohmann_json CONFIG QUIET) + if(NOT nlohmann_json_FOUND) + message(FATAL_ERROR "nlohmann_json not found and no nlohmann_json target exists") + endif() +endif() + +# Try to find CURL, but if not found, use the vendored target if it exists +if(TARGET CURL::libcurl) + set(CURL_FOUND TRUE) +elseif(TARGET libcurl) + add_library(CURL::libcurl ALIAS libcurl) + set(CURL_FOUND TRUE) +elseif(TARGET curl) + add_library(CURL::libcurl ALIAS curl) + set(CURL_FOUND TRUE) +else() + find_package(CURL QUIET) + if(CURL_FOUND AND NOT TARGET CURL::libcurl) + if(TARGET libcurl) + add_library(CURL::libcurl ALIAS libcurl) + elseif(TARGET curl) + add_library(CURL::libcurl ALIAS curl) + else() + add_library(CURL::libcurl INTERFACE IMPORTED) + if(CURL_LIBRARIES) + set_property(TARGET CURL::libcurl PROPERTY + INTERFACE_LINK_LIBRARIES "${CURL_LIBRARIES}") + endif() + if(CURL_INCLUDE_DIRS) + set_property(TARGET CURL::libcurl PROPERTY + INTERFACE_INCLUDE_DIRECTORIES "${CURL_INCLUDE_DIRS}") + endif() + endif() + endif() + if(NOT TARGET CURL::libcurl) + message(FATAL_ERROR "CURL not found and CURL::libcurl target does not exist") + endif() +endif() + +add_library(${PROJECT_NAME}) + +function(make_absolute_paths result_var) + set(absolute_paths) + foreach(file IN LISTS ARGN) + list(APPEND absolute_paths "${CMAKE_CURRENT_SOURCE_DIR}/${file}") + endforeach() + set(${result_var} "${absolute_paths}" PARENT_SCOPE) +endfunction() + +set(HEADERS_RELATIVE + "include/liboai.h" +) + +make_absolute_paths(HEADERS ${HEADERS_RELATIVE}) +source_group("include" FILES ${HEADERS}) + +set(COMPONENT_HEADERS_RELATIVE + "include/components/audio.h" + "include/components/azure.h" + "include/components/chat.h" + "include/components/completions.h" + "include/components/edits.h" + "include/components/embeddings.h" + "include/components/files.h" + "include/components/fine_tunes.h" + "include/components/images.h" + "include/components/models.h" + "include/components/moderations.h" + "include/components/responses.h" +) + +make_absolute_paths(COMPONENT_HEADERS ${COMPONENT_HEADERS_RELATIVE}) +source_group("include/components" FILES ${COMPONENT_HEADERS}) + +set(COMPONENT_SOURCES_RELATIVE + "components/audio.cpp" + "components/azure.cpp" + "components/chat.cpp" + "components/completions.cpp" + "components/edits.cpp" + "components/embeddings.cpp" + "components/files.cpp" + "components/fine_tunes.cpp" + "components/images.cpp" + "components/models.cpp" + "components/moderations.cpp" + "components/responses.cpp" +) + +make_absolute_paths(COMPONENT_SOURCES ${COMPONENT_SOURCES_RELATIVE}) +source_group("source/components" FILES ${COMPONENT_SOURCES}) + +set(CORE_HEADERS_RELATIVE + "include/core/authorization.h" + "include/core/exception.h" + "include/core/netimpl.h" + "include/core/network.h" + "include/core/response.h" +) + +make_absolute_paths(CORE_HEADERS ${CORE_HEADERS_RELATIVE}) +source_group("include/core" FILES ${CORE_HEADERS}) + +set(CORE_SOURCES_RELATIVE + "core/authorization.cpp" + "core/netimpl.cpp" + "core/response.cpp" +) + +make_absolute_paths(CORE_SOURCES ${CORE_SOURCES_RELATIVE}) +source_group("source/core" FILES ${CORE_SOURCES}) + +target_sources(${PROJECT_NAME} + PRIVATE + ${COMPONENT_SOURCES} + ${CORE_SOURCES} + PUBLIC + "$" + "$" + "$" + "$" + "$" + "$" +) + +target_compile_features(${PROJECT_NAME} PUBLIC cxx_std_17) + +target_link_libraries(${PROJECT_NAME} + PUBLIC + nlohmann_json::nlohmann_json + CURL::libcurl +) + +target_include_directories(${PROJECT_NAME} + PUBLIC + "$" + "$" +) + +if(LIBOAI_INSTALL) + install(TARGETS ${PROJECT_NAME} DESTINATION lib EXPORT ${PROJECT_NAME}Targets) + install(FILES ${HEADERS} DESTINATION "include") + install(FILES ${COMPONENT_HEADERS} DESTINATION "include/components") + install(FILES ${CORE_HEADERS} DESTINATION "include/core") + + configure_package_config_file("${CMAKE_CURRENT_SOURCE_DIR}/Config.cmake.in" + "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" + INSTALL_DESTINATION "lib/cmake/${PROJECT_NAME}" + ) + + write_basic_package_version_file( + "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" + COMPATIBILITY AnyNewerVersion + ) + + install(EXPORT ${PROJECT_NAME}Targets + FILE ${PROJECT_NAME}Targets.cmake + NAMESPACE oai:: + DESTINATION "lib/cmake/${PROJECT_NAME}" + ) + install(FILES + "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" + "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" + DESTINATION "lib/cmake/${PROJECT_NAME}" + ) +endif() diff --git a/packages/kbot/cpp/packages/liboai/liboai/Config.cmake.in b/packages/kbot/cpp/packages/liboai/liboai/Config.cmake.in new file mode 100644 index 00000000..202da493 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/Config.cmake.in @@ -0,0 +1,6 @@ +@PACKAGE_INIT@ + +include("${CMAKE_CURRENT_LIST_DIR}/oaiTargets.cmake") + +find_package(nlohmann_json CONFIG REQUIRED) +find_package(CURL REQUIRED) diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/audio.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/audio.cpp new file mode 100644 index 00000000..3b498d48 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/audio.cpp @@ -0,0 +1,100 @@ +#include "../include/components/audio.h" + +liboai::Response liboai::Audio::transcribe(const std::filesystem::path& file, const std::string& model, std::optional prompt, std::optional response_format, std::optional temperature, std::optional language) const& noexcept(false) { + if (!this->Validate(file)) { + throw liboai::exception::OpenAIException( + "File path provided is non-existent, is not a file, or is empty.", + liboai::exception::EType::E_FILEERROR, + "liboai::Audio::transcribe(...)" + ); + } + + netimpl::components::Multipart form = { + { "file", netimpl::components::File{file.generic_string()} }, + { "model", model } + }; + + if (prompt) { form.parts.push_back({ "prompt", prompt.value() }); } + if (response_format) { form.parts.push_back({ "response_format", response_format.value() }); } + if (temperature) { form.parts.push_back({ "temperature", std::to_string(temperature.value()) }); } + if (language) { form.parts.push_back({ "language", language.value() }); } + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/audio/transcriptions", "multipart/form-data", + this->auth_.GetAuthorizationHeaders(), + std::move(form), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Audio::transcribe_async(const std::filesystem::path& file, const std::string& model, std::optional prompt, std::optional response_format, std::optional temperature, std::optional language) const& noexcept(false) { + return std::async(std::launch::async, &liboai::Audio::transcribe, this, file, model, prompt, response_format, temperature, language); +} + +liboai::Response liboai::Audio::translate(const std::filesystem::path& file, const std::string& model, std::optional prompt, std::optional response_format, std::optional temperature) const& noexcept(false) { + if (!this->Validate(file)) { + throw liboai::exception::OpenAIException( + "File path provided is non-existent, is not a file, or is empty.", + liboai::exception::EType::E_FILEERROR, + "liboai::Audio::translate(...)" + ); + } + + netimpl::components::Multipart form = { + { "file", netimpl::components::File{file.generic_string()} }, + { "model", model } + }; + + if (prompt) { form.parts.push_back({ "prompt", std::move(prompt.value()) }); } + if (response_format) { form.parts.push_back({ "response_format", std::move(response_format.value()) }); } + if (temperature) { form.parts.push_back({ "temperature", std::to_string(temperature.value()) }); } + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/audio/translations", "multipart/form-data", + this->auth_.GetAuthorizationHeaders(), + std::move(form), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Audio::translate_async(const std::filesystem::path& file, const std::string& model, std::optional prompt, std::optional response_format, std::optional temperature) const& noexcept(false) { + return std::async(std::launch::async, &liboai::Audio::translate, this, file, model, prompt, response_format, temperature); +} + +liboai::Response liboai::Audio::speech(const std::string& model, const std::string& voice, const std::string& input, std::optional response_format, std::optional speed) const& noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("model", model); + jcon.push_back("voice", voice); + jcon.push_back("input", input); + + if (response_format) { jcon.push_back("response_format", std::move(response_format.value())); } + if (speed) { jcon.push_back("speed", speed.value()); } + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/audio/speech", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Audio::speech_async(const std::string& model, const std::string& voice, const std::string& input, std::optional response_format, std::optional speed) const& noexcept(false) { + return std::async(std::launch::async, &liboai::Audio::translate, this, model, voice, input, response_format, speed); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/azure.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/azure.cpp new file mode 100644 index 00000000..91c5621e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/azure.cpp @@ -0,0 +1,206 @@ +#include "../include/components/azure.h" + +liboai::Response liboai::Azure::create_completion(const std::string& resource_name, const std::string& deployment_id, const std::string& api_version, std::optional prompt, std::optional suffix, std::optional max_tokens, std::optional temperature, std::optional top_p, std::optional n, std::optional> stream, std::optional logprobs, std::optional echo, std::optional> stop, std::optional presence_penalty, std::optional frequency_penalty, std::optional best_of, std::optional> logit_bias, std::optional user) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("prompt", std::move(prompt)); + jcon.push_back("suffix", std::move(suffix)); + jcon.push_back("max_tokens", std::move(max_tokens)); + jcon.push_back("temperature", std::move(temperature)); + jcon.push_back("top_p", std::move(top_p)); + jcon.push_back("n", std::move(n)); + jcon.push_back("stream", stream); + jcon.push_back("logprobs", std::move(logprobs)); + jcon.push_back("echo", std::move(echo)); + jcon.push_back("stop", std::move(stop)); + jcon.push_back("presence_penalty", std::move(presence_penalty)); + jcon.push_back("frequency_penalty", std::move(frequency_penalty)); + jcon.push_back("best_of", std::move(best_of)); + jcon.push_back("logit_bias", std::move(logit_bias)); + jcon.push_back("user", std::move(user)); + + netimpl::components::Parameters params; + params.Add({ "api-version", api_version }); + + Response res; + res = this->Request( + Method::HTTP_POST, ("https://" + resource_name + this->azure_root_ + "/deployments/" + deployment_id), "/completions", "application/json", + this->auth_.GetAzureAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + std::move(params), + stream ? netimpl::components::WriteCallback{std::move(stream.value())} : netimpl::components::WriteCallback{}, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Azure::create_completion_async(const std::string& resource_name, const std::string& deployment_id, const std::string& api_version, std::optional prompt, std::optional suffix, std::optional max_tokens, std::optional temperature, std::optional top_p, std::optional n, std::optional> stream, std::optional logprobs, std::optional echo, std::optional> stop, std::optional presence_penalty, std::optional frequency_penalty, std::optional best_of, std::optional> logit_bias, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Azure::create_completion, this, resource_name, deployment_id, api_version, prompt, suffix, max_tokens, temperature, top_p, n, stream, logprobs, echo, stop, presence_penalty, frequency_penalty, best_of, logit_bias, user); +} + +liboai::Response liboai::Azure::create_embedding(const std::string& resource_name, const std::string& deployment_id, const std::string& api_version, const std::string& input, std::optional user) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("input", input); + jcon.push_back("user", std::move(user)); + + netimpl::components::Parameters params; + params.Add({ "api-version", api_version }); + + Response res; + res = this->Request( + Method::HTTP_POST, ("https://" + resource_name + this->azure_root_ + "/deployments/" + deployment_id), "/embeddings", "application/json", + this->auth_.GetAzureAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + std::move(params), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Azure::create_embedding_async(const std::string& resource_name, const std::string& deployment_id, const std::string& api_version, const std::string& input, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Azure::create_embedding, this, resource_name, deployment_id, api_version, input, user); +} + +liboai::Response liboai::Azure::create_chat_completion(const std::string& resource_name, const std::string& deployment_id, const std::string& api_version, Conversation& conversation, std::optional function_call, std::optional temperature, std::optional n, std::optional stream, std::optional> stop, std::optional max_tokens, std::optional presence_penalty, std::optional frequency_penalty, std::optional> logit_bias, std::optional user) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("temperature", std::move(temperature)); + jcon.push_back("n", std::move(n)); + jcon.push_back("stop", std::move(stop)); + jcon.push_back("max_tokens", std::move(max_tokens)); + jcon.push_back("presence_penalty", std::move(presence_penalty)); + jcon.push_back("frequency_penalty", std::move(frequency_penalty)); + jcon.push_back("logit_bias", std::move(logit_bias)); + jcon.push_back("user", std::move(user)); + + if (function_call) { + if (function_call.value() == "none" || function_call.value() == "auto") { + nlohmann::json j; j["function_call"] = function_call.value(); + jcon.push_back("function_call", j["function_call"]); + } + else { + nlohmann::json j; j["function_call"] = { {"name", function_call.value()} }; + jcon.push_back("function_call", j["function_call"]); + } + } + + StrippedStreamCallback _sscb = nullptr; + if (stream) { + _sscb = [stream, &conversation](std::string data, intptr_t userdata) -> bool { + ChatStreamCallback _stream = stream.value(); + return _stream(data, userdata, conversation); + }; + + jcon.push_back("stream", _sscb); + } + + if (conversation.GetJSON().contains("messages")) { + jcon.push_back("messages", conversation.GetJSON()["messages"]); + } + + if (conversation.HasFunctions()) { + jcon.push_back("functions", conversation.GetFunctionsJSON()["functions"]); + } + + netimpl::components::Parameters params; + params.Add({ "api-version", api_version }); + + Response res; + res = this->Request( + Method::HTTP_POST, ("https://" + resource_name + this->azure_root_ + "/deployments/" + deployment_id), "/chat/completions", "application/json", + this->auth_.GetAzureAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + std::move(params), + _sscb ? netimpl::components::WriteCallback{std::move(_sscb)} : netimpl::components::WriteCallback{}, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Azure::create_chat_completion_async(const std::string& resource_name, const std::string& deployment_id, const std::string& api_version, Conversation& conversation, std::optional function_call, std::optional temperature, std::optional n, std::optional stream, std::optional> stop, std::optional max_tokens, std::optional presence_penalty, std::optional frequency_penalty, std::optional> logit_bias, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Azure::create_chat_completion, this, resource_name, deployment_id, api_version, std::ref(conversation), function_call, temperature, n, stream, stop, max_tokens, presence_penalty, frequency_penalty, logit_bias, user); +} + +liboai::Response liboai::Azure::request_image_generation(const std::string& resource_name, const std::string& api_version, const std::string& prompt, std::optional n, std::optional size) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("prompt", prompt); + jcon.push_back("n", std::move(n)); + jcon.push_back("size", std::move(size)); + + netimpl::components::Parameters params; + params.Add({ "api-version", api_version }); + + Response res; + res = this->Request( + Method::HTTP_POST, ("https://" + resource_name + this->azure_root_), "/images/generations:submit", "application/json", + this->auth_.GetAzureAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + std::move(params), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Azure::request_image_generation_async(const std::string& resource_name, const std::string& api_version, const std::string& prompt, std::optional n, std::optional size) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Azure::request_image_generation, this, resource_name, api_version, prompt, n, size); +} + +liboai::Response liboai::Azure::get_generated_image(const std::string& resource_name, const std::string& api_version, const std::string& operation_id) const & noexcept(false) { + netimpl::components::Parameters params; + params.Add({ "api-version", api_version }); + + Response res; + res = this->Request( + Method::HTTP_GET, ("https://" + resource_name + this->azure_root_), "/operations/images/" + operation_id, "application/json", + this->auth_.GetAzureAuthorizationHeaders(), + std::move(params), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Azure::get_generated_image_async(const std::string& resource_name, const std::string& api_version, const std::string& operation_id) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Azure::get_generated_image, this, resource_name, api_version, operation_id); +} + +liboai::Response liboai::Azure::delete_generated_image(const std::string& resource_name, const std::string& api_version, const std::string& operation_id) const & noexcept(false) { + netimpl::components::Parameters params; + params.Add({ "api-version", api_version }); + + Response res; + res = this->Request( + Method::HTTP_DELETE, ("https://" + resource_name + this->azure_root_), "/operations/images/" + operation_id, "application/json", + this->auth_.GetAzureAuthorizationHeaders(), + std::move(params), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Azure::delete_generated_image_async(const std::string& resource_name, const std::string& api_version, const std::string& operation_id) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Azure::delete_generated_image, this, resource_name, api_version, operation_id); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/chat.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/chat.cpp new file mode 100644 index 00000000..5ce9b63d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/chat.cpp @@ -0,0 +1,1163 @@ +#include "../include/components/chat.h" + +liboai::Conversation::Conversation() { + this->_conversation["messages"] = nlohmann::json::array(); +} + +liboai::Conversation::Conversation(const Conversation& other) { + this->_conversation = other._conversation; + this->_functions = other._functions; + this->_last_resp_is_fc = other._last_resp_is_fc; +} + +liboai::Conversation::Conversation(Conversation&& old) noexcept { + this->_conversation = std::move(old._conversation); + this->_functions = std::move(old._functions); + this->_last_resp_is_fc = old._last_resp_is_fc; + + old._conversation = nlohmann::json::object(); + old._functions = nlohmann::json::object(); +} + +liboai::Conversation::Conversation(std::string_view system_data) { + this->_conversation["messages"] = nlohmann::json::array(); + auto result = this->SetSystemData(system_data); +} + +liboai::Conversation::Conversation(std::string_view system_data, std::string_view user_data) { + this->_conversation["messages"] = nlohmann::json::array(); + auto result = this->SetSystemData(system_data); + result = this->AddUserData(user_data); +} + +liboai::Conversation::Conversation(std::string_view system_data, std::initializer_list user_data) { + this->_conversation["messages"] = nlohmann::json::array(); + auto result = this->SetSystemData(system_data); + + for (auto& data : user_data) { + auto result = this->AddUserData(data); + } +} + +liboai::Conversation::Conversation(std::initializer_list user_data) { + this->_conversation["messages"] = nlohmann::json::array(); + + for (auto& data : user_data) { + auto result = this->AddUserData(data); + } +} + +liboai::Conversation::Conversation(const std::vector& user_data) { + this->_conversation["messages"] = nlohmann::json::array(); + + for (auto& data : user_data) { + auto result = this->AddUserData(data); + } +} + +liboai::Conversation& liboai::Conversation::operator=(const Conversation& other) { + this->_conversation = other._conversation; + this->_functions = other._functions; + this->_last_resp_is_fc = other._last_resp_is_fc; + return *this; +} + +liboai::Conversation& liboai::Conversation::operator=(Conversation&& old) noexcept { + this->_conversation = std::move(old._conversation); + this->_functions = std::move(old._functions); + this->_last_resp_is_fc = old._last_resp_is_fc; + + old._conversation = nlohmann::json::object(); + old._functions = nlohmann::json::object(); + + return *this; +} + +bool liboai::Conversation::ChangeFirstSystemMessage(std::string_view new_data) & noexcept(false) { + if (!new_data.empty() && !this->_conversation["messages"].empty()) { + if (this->_conversation["messages"][0]["role"].get() == "system") { + this->_conversation["messages"][0]["content"] = new_data; + return true; // System message changed successfuly + } + return false; // First message is not a system message + } + return false; // New data is empty or conversation is empty +} + +bool liboai::Conversation::SetSystemData(std::string_view data) & noexcept(false) { + // if data provided is non-empty + if (!data.empty()) { + // if system is not set already - only one system message shall exist in any + // conversation + for (auto& message : this->_conversation["messages"].items()) { + if (message.value()["role"].get() == "system") { + return false; // system already set + } + } + this->_conversation["messages"].push_back({ { "role", "system" }, {"content", data} }); + return true; // system set successfully + } + return false; // data is empty +} + +bool liboai::Conversation::PopSystemData() & noexcept(false) { + // if conversation is non-empty + if (!this->_conversation["messages"].empty()) { + // if first message is system + if (this->_conversation["messages"][0]["role"].get() == "system") { + this->_conversation["messages"].erase(0); + return true; // system message popped successfully + } + return false; // first message is not system + } + return false; // conversation is empty +} + +void liboai::Conversation::EraseExtra() { + if (_conversation["messages"].size() > _max_history_size) { + // Ensure the system message is preserved + auto first_msg = _conversation["messages"].begin(); + if (first_msg != _conversation["messages"].end() && (*first_msg)["role"].get() == "system") { + _conversation["messages"].erase(first_msg + 1); + } else { + _conversation["messages"].erase(first_msg); + } + } +} + +bool liboai::Conversation::AddUserData(std::string_view data) & noexcept(false) { + // if data provided is non-empty + if (!data.empty()) { + EraseExtra(); + this->_conversation["messages"].push_back({ { "role", "user" }, {"content", data} }); + return true; // user data added successfully + } + return false; // data is empty +} + +bool liboai::Conversation::AddUserData(std::string_view data, std::string_view name) & noexcept(false) { + // if data provided is non-empty + if (!data.empty()) { + EraseExtra(); + this->_conversation["messages"].push_back( + { + {"role", "user"}, + {"content", data}, + {"name", name} + } + ); + return true; // user data added successfully + } + return false; // data is empty +} + +bool liboai::Conversation::PopUserData() & noexcept(false) { + // if conversation is not empty + if (!this->_conversation["messages"].empty()) { + // if last message is user message + if (this->_conversation["messages"].back()["role"].get() == "user") { + this->_conversation["messages"].erase(this->_conversation["messages"].end() - 1); + return true; // user data popped successfully + } + return false; // last message is not user message + } + return false; // conversation is empty +} + +std::string liboai::Conversation::GetLastResponse() const & noexcept { + // if conversation is not empty + if (!this->_conversation["messages"].empty()) { + // if last message is from assistant + if (this->_conversation["messages"].back()["role"].get() == "assistant") { + return this->_conversation["messages"].back()["content"].get(); + } + } + return ""; // no response found +} + +bool liboai::Conversation::LastResponseIsFunctionCall() const & noexcept { + return this->_last_resp_is_fc; +} + +std::string liboai::Conversation::GetLastFunctionCallName() const & noexcept(false) { + if (this->_conversation.contains("function_call")) { + if (this->_conversation["function_call"].contains("name")) { + return this->_conversation["function_call"]["name"].get(); + } + } + + return ""; +} + +std::string liboai::Conversation::GetLastFunctionCallArguments() const & noexcept(false) { + if (this->_conversation.contains("function_call")) { + if (this->_conversation["function_call"].contains("arguments")) { + return this->_conversation["function_call"]["arguments"].get(); + } + } + + return ""; +} + +bool liboai::Conversation::PopLastResponse() & noexcept(false) { + // if conversation is not empty + if (!this->_conversation["messages"].empty()) { + // if last message is assistant message + if (this->_conversation["messages"].back()["role"].get() == "assistant") { + this->_conversation["messages"].erase(this->_conversation["messages"].end() - 1); + return true; // assistant data popped successfully + } + return false; // last message is not assistant message + } + return false; // conversation is empty +} + +bool liboai::Conversation::Update(std::string_view response) & noexcept(false) { + // reset "last response is function call" flag + if (this->_last_resp_is_fc) { + if (this->_conversation.contains("function_call")) { + this->_conversation.erase("function_call"); + } + this->_last_resp_is_fc = false; + } + + // if response is non-empty + if (!response.empty()) { + nlohmann::json j = nlohmann::json::parse(response); + if (j.contains("choices")) { // top level, several messages + for (auto& choice : j["choices"].items()) { + if (choice.value().contains("message")) { + if (choice.value()["message"].contains("role") && choice.value()["message"].contains("content")) { + if (!choice.value()["message"]["content"].is_null()) { + EraseExtra(); + this->_conversation["messages"].push_back( + { + { "role", choice.value()["message"]["role"] }, + { "content", choice.value()["message"]["content"] } + } + ); + } + else { + EraseExtra(); + this->_conversation["messages"].push_back( + { + { "role", choice.value()["message"]["role"] }, + { "content", "" } + } + ); + } + + if (choice.value()["message"].contains("function_call")) { + // if a function_call is present in the response, the + // conversation is not updated as there is no assistant + // response to be added. However, we do add the function + // information + + this->_conversation["function_call"] = nlohmann::json::object(); + if (choice.value()["message"]["function_call"].contains("name")) { + this->_conversation["function_call"]["name"] = choice.value()["message"]["function_call"]["name"]; + } + if (choice.value()["message"]["function_call"].contains("arguments")) { + this->_conversation["function_call"]["arguments"] = choice.value()["message"]["function_call"]["arguments"]; + } + + this->_last_resp_is_fc = true; + } + + return true; // conversation updated successfully + } + else { + return false; // response is not valid + } + } + else { + return false; // no response found + } + } + } + else if (j.contains("message")) { // mid level, single message + if (j["message"].contains("role") && j["message"].contains("content")) { + if (j["message"]["content"].is_null()) { + EraseExtra(); + this->_conversation["messages"].push_back( + { + { "role", j["message"]["role"] }, + { "content", j["message"]["content"] } + } + ); + } + else { + EraseExtra(); + this->_conversation["messages"].push_back( + { + { "role", j["message"]["role"] }, + { "content", "" } + } + ); + } + + if (j["message"].contains("function_call")) { + // if a function_call is present in the response, the + // conversation is not updated as there is no assistant + // response to be added. However, we do add the function + // information + + this->_conversation["function_call"] = nlohmann::json::object(); + if (j["message"]["function_call"].contains("name")) { + this->_conversation["function_call"]["name"] = j["message"]["function_call"]["name"]; + } + if (j["message"]["function_call"].contains("arguments")) { + this->_conversation["function_call"]["arguments"] = j["message"]["function_call"]["arguments"]; + } + + this->_last_resp_is_fc = true; + } + + return true; // conversation updated successfully + } + else { + return false; // response is not valid + } + } + else if (j.contains("role") && j.contains("content")) { // low level, single message + if (j["message"]["content"].is_null()) { + EraseExtra(); + this->_conversation["messages"].push_back( + { + { "role", j["message"]["role"] }, + { "content", j["message"]["content"] } + } + ); + } + else { + EraseExtra(); + this->_conversation["messages"].push_back( + { + { "role", j["message"]["role"] }, + { "content", "" } + } + ); + } + + if (j["message"].contains("function_call")) { + // if a function_call is present in the response, the + // conversation is not updated as there is no assistant + // response to be added. However, we do add the function + // information + this->_conversation["function_call"] = nlohmann::json::object(); + if (j["message"]["function_call"].contains("name")) { + this->_conversation["function_call"]["name"] = j["message"]["function_call"]["name"]; + } + if (j["message"]["function_call"].contains("arguments")) { + this->_conversation["function_call"]["arguments"] = j["message"]["function_call"]["arguments"]; + } + + this->_last_resp_is_fc = true; + } + + return true; // conversation updated successfully + } + else { + return false; // invalid response + } + } + return false; // response is empty +} + +bool liboai::Conversation::Update(const Response& response) & noexcept(false) { + return this->Update(response.content); +} + +std::string liboai::Conversation::Export() const & noexcept(false) { + nlohmann::json j; + + if (!this->_conversation.empty()) { + j["messages"] = this->_conversation["messages"]; + + if (this->_functions) { + j["functions"] = this->_functions.value()["functions"]; + } + + return j.dump(4); // conversation exported successfully + } + + return ""; // conversation is empty +} + +bool liboai::Conversation::Import(std::string_view json) & noexcept(false) { + if (!json.empty()) { + nlohmann::json j = nlohmann::json::parse(json); + + if (j.contains("messages")) { + this->_conversation["messages"] = j["messages"]; + + if (j.contains("functions")) { + this->_functions = nlohmann::json(); + this->_functions.value()["functions"] = j["functions"]; + } + + return true; // conversation imported successfully + } + + return false; // no messages found + } + + return false; // json is empty +} + +bool liboai::Conversation::AppendStreamData(std::string data) & noexcept(false) { + if (!data.empty()) { + std::string delta; + bool completed = false; + return this->ParseStreamData(data, delta, completed); + } + + return false; // data is empty +} + +bool liboai::Conversation::AppendStreamData(std::string data, std::string& delta, bool& completed) & noexcept(false){ + if (!data.empty()) { + return this->ParseStreamData(data, delta, completed); + } + + return false; +} + + +bool liboai::Conversation::SetFunctions(Functions functions) & noexcept(false) { + nlohmann::json j = functions.GetJSON(); + + if (!j.empty() && j.contains("functions") && j["functions"].size() > 0) { + this->_functions = std::move(j); + return true; // functions set successfully + } + + return false; // functions are empty +} + +void liboai::Conversation::PopFunctions() & noexcept(false) { + this->_functions = std::nullopt; +} + +std::string liboai::Conversation::GetRawConversation() const & noexcept { + return this->_conversation.dump(4); +} + +const nlohmann::json& liboai::Conversation::GetJSON() const & noexcept { + return this->_conversation; +} + +std::string liboai::Conversation::GetRawFunctions() const & noexcept { + return this->HasFunctions() ? this->_functions.value().dump(4) : ""; +} + +const nlohmann::json& liboai::Conversation::GetFunctionsJSON() const & noexcept { + return this->_functions.value(); +} + +std::vector liboai::Conversation::SplitStreamedData(std::string data) const noexcept(false) { + // remove all instances of the string "data: " from the string + this->RemoveStrings(data, "data: "); + + /* + Splits the streamed data into a vector of strings + via delimiter of two newlines. + + For instance, a string of "Hello\n\nWorld" would + be split into a vector of {"Hello", "World"}, and + a string of "Hello World" would be split into + a vector of {"Hello World"}. + */ + if (!data.empty()) { + std::vector split_data; + std::string temp; + std::istringstream iss(data); + while (std::getline(iss, temp)) { + if (temp.empty()) { + split_data.push_back(temp); + } + else { + split_data.push_back(temp); + } + } + + // remove empty strings from the vector + split_data.erase(std::remove_if(split_data.begin(), split_data.end(), [](const std::string& s) { return s.empty(); }), split_data.end()); + + return split_data; + } + + return {}; +} + +void liboai::Conversation::RemoveStrings(std::string& s, std::string_view p) const noexcept(false) { + std::string::size_type i = s.find(p); + while (i != std::string::npos) { + s.erase(i, p.length()); + i = s.find(p, i); + } +} + +std::vector liboai::Conversation::SplitFullStreamedData(std::string data) const noexcept(false) { + if (data.empty()) { + return {}; + } + + std::vector split_data; + std::string temp; + std::istringstream iss(data); + while (std::getline(iss, temp)) { + if (temp.empty()) { + split_data.push_back(temp); + } + else { + split_data.push_back(temp); + } + } + + // remove empty strings from the vector + split_data.erase(std::remove_if(split_data.begin(), split_data.end(), [](const std::string& s) { return s.empty(); }), split_data.end()); + + return split_data; +} + +bool liboai::Conversation::ParseStreamData(std::string data, std::string& delta_content, bool& completed){ + if (!_last_incomplete_buffer.empty()) { + data = _last_incomplete_buffer + data; + _last_incomplete_buffer.clear(); + } + + std::vector data_lines = SplitFullStreamedData(data); + + if (data_lines.empty()){ + return false; + } + + // create an empty message at the end of the conversation, + // marked as "pending" to indicate that the response is + // still being processed. This flag will be removed once + // the response is processed. If the marking already + // exists, keep appending to the same message. + if (this->_conversation["messages"].empty() || !this->_conversation["messages"].back().contains("pending")) { + this->_conversation["messages"].push_back( + { + { "role", "" }, + { "content", "" }, + { "pending", true } + } + ); + } + + for (auto& line : data_lines){ + if (line.find("data: [DONE]") == std::string::npos) { + /* + j should have content in the form of: + {"id":"chatcmpl-7SKOck29emvbBbDS6cHg5xwnRrsLO","object":"chat.completion.chunk","created":1686985942,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} + where "delta" may be empty + */ + this->RemoveStrings(line, "data: "); + + nlohmann::json j; + try { + j = nlohmann::json::parse(line); + } catch (const std::exception& e) { + _last_incomplete_buffer = line; + continue; + } + + if (j.contains("choices")) { + if (j["choices"][0].contains("delta")) { + if (!j["choices"][0]["delta"].empty() && !j["choices"][0]["delta"].is_null()) { + if (j["choices"][0]["delta"].contains("role")) { + this->_conversation["messages"].back()["role"] = j["choices"][0]["delta"]["role"]; + } + + if (j["choices"][0]["delta"].contains("content")) { + if (!j["choices"][0]["delta"]["content"].empty() && !j["choices"][0]["delta"]["content"].is_null()) { + std::string stream_content = j["choices"][0]["delta"]["content"].get(); + this->_conversation["messages"].back()["content"] = this->_conversation["messages"].back()["content"].get() + stream_content; + delta_content += stream_content; + } + + // function calls do not have a content field, + // set _last_resp_is_fc to false and remove any + // previously set function_call field in the + // conversation + if (this->_last_resp_is_fc) { + if (this->_conversation.contains("function_call")) { + this->_conversation.erase("function_call"); + } + this->_last_resp_is_fc = false; + } + } + + if (j["choices"][0]["delta"].contains("function_call")) { + if (!j["choices"][0]["delta"]["function_call"].empty() && !j["choices"][0]["delta"]["function_call"].is_null()) { + if (j["choices"][0]["delta"]["function_call"].contains("name")) { + if (!j["choices"][0]["delta"]["function_call"]["name"].empty() && !j["choices"][0]["delta"]["function_call"]["name"].is_null()) { + if (!this->_conversation["messages"].back().contains("function_call")) { + this->_conversation["function_call"] = { { "name", j["choices"][0]["delta"]["function_call"]["name"] } }; + this->_last_resp_is_fc = true; + } + } + } + else if (j["choices"][0]["delta"]["function_call"].contains("arguments")) { + if (!j["choices"][0]["delta"]["function_call"]["arguments"].empty() && !j["choices"][0]["delta"]["function_call"]["arguments"].is_null()) { + if (!this->_conversation["function_call"].contains("arguments")) { + this->_conversation["function_call"].push_back({ "arguments", j["choices"][0]["delta"]["function_call"]["arguments"] }); + } + else { + this->_conversation["function_call"]["arguments"] = this->_conversation["function_call"]["arguments"].get() + j["choices"][0]["delta"]["function_call"]["arguments"].get(); + } + } + } + } + } + } + } + } else { + return false; // no "choices" found - invalid + } + } else { + // the response is complete, erase the "pending" flag + this->_conversation["messages"].back().erase("pending"); + completed = true; + } + } + + return true; // last message received +} + + + +liboai::Response liboai::ChatCompletion::create(const std::string& model, Conversation& conversation, std::optional function_call, std::optional temperature, std::optional top_p, std::optional n, std::optional stream, std::optional> stop, std::optional max_tokens, std::optional presence_penalty, std::optional frequency_penalty, std::optional> logit_bias, std::optional user) const& noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("model", model); + jcon.push_back("temperature", std::move(temperature)); + jcon.push_back("top_p", std::move(top_p)); + jcon.push_back("n", std::move(n)); + jcon.push_back("stop", std::move(stop)); + jcon.push_back("max_tokens", std::move(max_tokens)); + jcon.push_back("presence_penalty", std::move(presence_penalty)); + jcon.push_back("frequency_penalty", std::move(frequency_penalty)); + jcon.push_back("logit_bias", std::move(logit_bias)); + jcon.push_back("user", std::move(user)); + + if (function_call) { + if (function_call.value() == "none" || function_call.value() == "auto") { + nlohmann::json j; j["function_call"] = function_call.value(); + jcon.push_back("function_call", j["function_call"]); + } + else { + nlohmann::json j; j["function_call"] = { {"name", function_call.value()} }; + jcon.push_back("function_call", j["function_call"]); + } + } + + StrippedStreamCallback _sscb = nullptr; + if (stream) { + _sscb = [stream, &conversation](std::string data, intptr_t userdata) -> bool { + ChatStreamCallback _stream = stream.value(); + return _stream(data, userdata, conversation); + }; + + jcon.push_back("stream", _sscb); + } + + if (conversation.GetJSON().contains("messages")) { + jcon.push_back("messages", conversation.GetJSON()["messages"]); + } + + if (conversation.HasFunctions()) { + jcon.push_back("functions", conversation.GetFunctionsJSON()["functions"]); + } + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/chat/completions", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + _sscb ? netimpl::components::WriteCallback{std::move(_sscb)} : netimpl::components::WriteCallback{}, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::ChatCompletion::create_async(const std::string& model, Conversation& conversation, std::optional function_call, std::optional temperature, std::optional top_p, std::optional n, std::optional stream, std::optional> stop, std::optional max_tokens, std::optional presence_penalty, std::optional frequency_penalty, std::optional> logit_bias, std::optional user) const& noexcept(false) { + return std::async(std::launch::async, &liboai::ChatCompletion::create, this, model, std::ref(conversation), function_call, temperature, top_p, n, stream, stop, max_tokens, presence_penalty, frequency_penalty, logit_bias, user); +} + +namespace liboai { + +std::ostream& operator<<(std::ostream& os, const Conversation& conv) { + os << conv.GetRawConversation() << std::endl << (conv.HasFunctions() ? conv.GetRawFunctions() : ""); + + return os; +} + +} + +liboai::Functions::Functions() { + this->_functions["functions"] = nlohmann::json::array(); +} + +liboai::Functions::Functions(const Functions& other) { + this->_functions = other._functions; +} + +liboai::Functions::Functions(Functions&& old) noexcept { + this->_functions = std::move(old._functions); + old._functions = nlohmann::json::object(); +} + +liboai::Functions& liboai::Functions::operator=(const Functions& other) { + this->_functions = other._functions; + return *this; +} + +liboai::Functions& liboai::Functions::operator=(Functions&& old) noexcept { + this->_functions = std::move(old._functions); + old._functions = nlohmann::json::object(); + return *this; +} + +bool liboai::Functions::AddFunction(std::string_view function_name) & noexcept(false) { + if (this->GetFunctionIndex(function_name) == -1) { + this->_functions["functions"].push_back({ {"name", function_name} }); + return true; // function added + } + return false; // function already exists +} + +bool liboai::Functions::AddFunctions(std::initializer_list function_names) & noexcept(false) { + if (function_names.size() > 0) { + for (auto& function_name : function_names) { + if (this->GetFunctionIndex(function_name) == -1) { + this->_functions["functions"].push_back({ {"name", function_name} }); + } + } + return true; // functions added + } + return false; // functions not added (size 0) +} + +bool liboai::Functions::AddFunctions(std::vector function_names) & noexcept(false) { + if (function_names.size() > 0) { + for (auto& function_name : function_names) { + if (this->GetFunctionIndex(function_name) == -1) { + this->_functions["functions"].push_back({ {"name", std::move(function_name)} }); + } + } + return true; // functions added + } + return false; // functions not added (size 0) +} + +bool liboai::Functions::PopFunction(std::string_view function_name) & noexcept(false) { + auto index = this->GetFunctionIndex(function_name); + + if (index != -1) { + this->_functions["functions"].erase(this->_functions["functions"].begin() + index); + return true; // function removed + } + + return false; // function not removed +} + +bool liboai::Functions::PopFunctions(std::initializer_list function_names) & noexcept(false) { + if (function_names.size() > 0) { + for (auto& function_name : function_names) { + auto index = this->GetFunctionIndex(function_name); + + if (index != -1) { + this->_functions["functions"].erase(this->_functions["functions"].begin() + index); + } + } + + return true; // functions removed + } + + return false; // functions not removed (size 0) +} + +bool liboai::Functions::PopFunctions(std::vector function_names) & noexcept(false) { + if (function_names.size() > 0) { + for (auto& function_name : function_names) { + auto index = this->GetFunctionIndex(function_name); + + if (index != -1) { + this->_functions["functions"].erase(this->_functions["functions"].begin() + index); + } + } + return true; // functions removed + } + + return false; // functions not removed (size 0) +} + +bool liboai::Functions::SetDescription(std::string_view target, std::string_view description) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (!this->_functions["functions"][i].contains("description")) { + this->_functions["functions"][i]["description"] = description; + return true; // description set successfully + } + return false; // already has a description + } + + return false; // function does not exist +} + +bool liboai::Functions::PopDescription(std::string_view target) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("description")) { + this->_functions["functions"][i].erase("description"); + return true; // description removed successfully + } + return false; // does not have a description + } + + return false; // function does not exist +} + +bool liboai::Functions::SetRequired(std::string_view target, std::initializer_list params) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1 && params.size() > 0) { + if (this->_functions["functions"][i].contains("parameters")) { + for (auto& parameter : params) { + this->_functions["functions"][i]["parameters"]["required"] = std::move(params); + return true; // required parameters set successfully + } + } + } + + return false; // required parameters not set +} + +bool liboai::Functions::SetRequired(std::string_view target, std::vector params) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1 && params.size() > 0) { + if (this->_functions["functions"][i].contains("parameters")) { + for (auto& parameter : params) { + this->_functions["functions"][i]["parameters"]["required"] = std::move(params); + return true; // required parameters set successfully + } + } + } + + return false; // required parameters not set +} + +bool liboai::Functions::PopRequired(std::string_view target) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + if (this->_functions["functions"][i]["parameters"].contains("required")) { + this->_functions["functions"][i]["parameters"].erase("required"); + return true; // required parameters removed successfully + } + } + } + + return false; // required parameters not removed +} + +bool liboai::Functions::AppendRequired(std::string_view target, std::string_view param) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + if (this->_functions["functions"][i]["parameters"].contains("required")) { + this->_functions["functions"][i]["parameters"]["required"].push_back(param); + return true; // required parameter appended successfully + } + } + } + + return false; // required parameter not appended +} + +bool liboai::Functions::AppendRequired(std::string_view target, std::initializer_list params) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1 && params.size() > 0) { + if (this->_functions["functions"][i].contains("parameters")) { + if (this->_functions["functions"][i]["parameters"].contains("required")) { + for (auto& param : params) { + this->_functions["functions"][i]["parameters"]["required"].push_back(param); + } + + return true; // required parameters appended successfully + } + } + } + + return false; // required parameters not appended +} + +bool liboai::Functions::AppendRequired(std::string_view target, std::vector params) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1 && params.size() > 0) { + if (this->_functions["functions"][i].contains("parameters")) { + if (this->_functions["functions"][i]["parameters"].contains("required")) { + for (auto& param : params) { + this->_functions["functions"][i]["parameters"]["required"].push_back(std::move(param)); + } + + return true; // required parameters appended successfully + } + } + } + + return false; // required parameters not appended +} + +bool liboai::Functions::SetParameter(std::string_view target, FunctionParameter parameter) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (!this->_functions["functions"][i].contains("parameters")) { + this->_functions["functions"][i]["parameters"] = nlohmann::json::object(); + this->_functions["functions"][i]["parameters"]["properties"] = nlohmann::json::object(); + this->_functions["functions"][i]["parameters"]["type"] = "object"; + + this->_functions["functions"][i]["parameters"]["properties"].push_back( + { parameter.name, { + { "type", std::move(parameter.type) }, + { "description", std::move(parameter.description) } + }} + ); + + if (parameter.enumeration) { + this->_functions["functions"][i]["parameters"]["properties"][parameter.name]["enum"] = std::move(parameter.enumeration.value()); + } + + return true; // parameter set successfully + } + } + + return false; // function non-existent, or parameters already set (use AppendParameter(s)) +} + +bool liboai::Functions::SetParameters(std::string_view target, std::initializer_list parameters) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (!this->_functions["functions"][i].contains("parameters") && parameters.size() > 0) { + this->_functions["functions"][i]["parameters"] = nlohmann::json::object(); + this->_functions["functions"][i]["parameters"]["properties"] = nlohmann::json::object(); + this->_functions["functions"][i]["parameters"]["type"] = "object"; + + for (auto& parameter : parameters) { + if (!this->_functions["functions"][i]["parameters"]["properties"].contains(parameter.name)) { + this->_functions["functions"][i]["parameters"]["properties"].push_back( + { parameter.name, { + { "type", parameter.type }, + { "description", parameter.description } + } } + ); + + if (parameter.enumeration) { + this->_functions["functions"][i]["parameters"]["properties"][parameter.name]["enum"] = parameter.enumeration.value(); + } + } + } + + return true; // parameter set successfully + } + } + + return false; // function non-existent, or parameters already set (use AppendParameter(s)) +} + +bool liboai::Functions::SetParameters(std::string_view target, std::vector parameters) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (!this->_functions["functions"][i].contains("parameters") && parameters.size() > 0) { + this->_functions["functions"][i]["parameters"] = nlohmann::json::object(); + this->_functions["functions"][i]["parameters"]["properties"] = nlohmann::json::object(); + this->_functions["functions"][i]["parameters"]["type"] = "object"; + + for (auto& parameter : parameters) { + if (!this->_functions["functions"][i]["parameters"]["properties"].contains(parameter.name)) { + this->_functions["functions"][i]["parameters"]["properties"].push_back( + { parameter.name, { + { "type", std::move(parameter.type) }, + { "description", std::move(parameter.description) } + } } + ); + + if (parameter.enumeration) { + this->_functions["functions"][i]["parameters"]["properties"][parameter.name]["enum"] = std::move(parameter.enumeration.value()); + } + } + } + + return true; // parameter set successfully + } + } + + return false; // function non-existent, or parameters already set (use AppendParameter(s)) +} + +bool liboai::Functions::PopParameters(std::string_view target) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + this->_functions["functions"][i].erase("parameters"); + return true; // parameters removed successfully + } + } + + return false; // parameters not removed +} + +bool liboai::Functions::PopParameters(std::string_view target, std::initializer_list param_names) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + for (auto& param_name : param_names) { + if (this->_functions["functions"][i]["parameters"]["properties"].contains(param_name)) { + this->_functions["functions"][i]["parameters"]["properties"].erase(param_name); + } + } + + return true; // parameters removed successfully + } + } + + return false; // parameters not removed +} + +bool liboai::Functions::PopParameters(std::string_view target, std::vector param_names) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + for (auto& param_name : param_names) { + if (this->_functions["functions"][i]["parameters"]["properties"].contains(param_name)) { + this->_functions["functions"][i]["parameters"]["properties"].erase(param_name); + } + } + + return true; // parameters removed successfully + } + } + + return false; // parameters not removed +} + +bool liboai::Functions::AppendParameter(std::string_view target, FunctionParameter parameter) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + if (!this->_functions["functions"][i]["parameters"]["properties"].contains(parameter.name)) { + this->_functions["functions"][i]["parameters"]["properties"].push_back( + { parameter.name, { + { "type", std::move(parameter.type) }, + { "description", std::move(parameter.description) } + }} + ); + + if (parameter.enumeration) { + this->_functions["functions"][i]["parameters"]["properties"][parameter.name]["enum"] = std::move(parameter.enumeration.value()); + } + + return true; // parameter appended successfully + } + } + } + + return false; // parameter not appended +} + +bool liboai::Functions::AppendParameters(std::string_view target, std::initializer_list parameters) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + for (auto& parameter : parameters) { + if (!this->_functions["functions"][i]["parameters"]["properties"].contains(parameter.name)) { + this->_functions["functions"][i]["parameters"]["properties"].push_back( + { parameter.name, { + { "type", parameter.type }, + { "description", parameter.description } + } } + ); + + if (parameter.enumeration) { + this->_functions["functions"][i]["parameters"]["properties"][parameter.name]["enum"] = parameter.enumeration.value(); + } + } + } + + return true; // parameters appended successfully + } + } + + return false; // parameters not appended +} + +bool liboai::Functions::AppendParameters(std::string_view target, std::vector parameters) & noexcept(false) { + index i = this->GetFunctionIndex(target); + + if (i != -1) { + if (this->_functions["functions"][i].contains("parameters")) { + for (auto& parameter : parameters) { + if (!this->_functions["functions"][i]["parameters"]["properties"].contains(parameter.name)) { + this->_functions["functions"][i]["parameters"]["properties"].push_back( + { parameter.name, { + { "type", std::move(parameter.type) }, + { "description", std::move(parameter.description) } + } } + ); + + if (parameter.enumeration) { + this->_functions["functions"][i]["parameters"]["properties"][parameter.name]["enum"] = std::move(parameter.enumeration.value()); + } + } + } + + return true; // parameters appended successfully + } + } + + return false; // parameters not appended +} + +const nlohmann::json& liboai::Functions::GetJSON() const & noexcept { + return this->_functions; +} + +liboai::Functions::index liboai::Functions::GetFunctionIndex(std::string_view function_name) const & noexcept(false) { + index i = 0; + + if (!this->_functions.empty()) { + for (auto& [key, value] : this->_functions["functions"].items()) { + if (value.contains("name")) { + if (value["name"].get() == function_name) { + return i; + } + } + i++; + } + } + + return -1; +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/completions.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/completions.cpp new file mode 100644 index 00000000..16951ab4 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/completions.cpp @@ -0,0 +1,40 @@ +#include "../include/components/completions.h" + +liboai::Response liboai::Completions::create(const std::string& model_id, std::optional prompt, std::optional suffix, std::optional max_tokens, std::optional temperature, std::optional top_p, std::optional n, std::optional> stream, std::optional logprobs, std::optional echo, std::optional> stop, std::optional presence_penalty, std::optional frequency_penalty, std::optional best_of, std::optional> logit_bias, std::optional user) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("model", model_id); + jcon.push_back("prompt", std::move(prompt)); + jcon.push_back("suffix", std::move(suffix)); + jcon.push_back("max_tokens", std::move(max_tokens)); + jcon.push_back("temperature", std::move(temperature)); + jcon.push_back("top_p", std::move(top_p)); + jcon.push_back("n", std::move(n)); + jcon.push_back("stream", stream); + jcon.push_back("logprobs", std::move(logprobs)); + jcon.push_back("echo", std::move(echo)); + jcon.push_back("stop", std::move(stop)); + jcon.push_back("presence_penalty", std::move(presence_penalty)); + jcon.push_back("frequency_penalty", std::move(frequency_penalty)); + jcon.push_back("best_of", std::move(best_of)); + jcon.push_back("logit_bias", std::move(logit_bias)); + jcon.push_back("user", std::move(user)); + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/completions", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + stream ? netimpl::components::WriteCallback{std::move(stream.value())} : netimpl::components::WriteCallback{}, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Completions::create_async(const std::string& model_id, std::optional prompt, std::optional suffix, std::optional max_tokens, std::optional temperature, std::optional top_p, std::optional n, std::optional> stream, std::optional logprobs, std::optional echo, std::optional> stop, std::optional presence_penalty, std::optional frequency_penalty, std::optional best_of, std::optional> logit_bias, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Completions::create, this, model_id, prompt, suffix, max_tokens, temperature, top_p, n, stream, logprobs, echo, stop, presence_penalty, frequency_penalty, best_of, logit_bias, user); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/edits.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/edits.cpp new file mode 100644 index 00000000..7e851d5f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/edits.cpp @@ -0,0 +1,29 @@ +#include "../include/components/edits.h" + +liboai::Response liboai::Edits::create(const std::string& model_id, std::optional input, std::optional instruction, std::optional n, std::optional temperature, std::optional top_p) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("model", model_id); + jcon.push_back("input", std::move(input)); + jcon.push_back("instruction", std::move(instruction)); + jcon.push_back("n", std::move(n)); + jcon.push_back("temperature", std::move(temperature)); + jcon.push_back("top_p", std::move(top_p)); + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/edits", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Edits::create_async(const std::string& model_id, std::optional input, std::optional instruction, std::optional n, std::optional temperature, std::optional top_p) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Edits::create, this, model_id, input, instruction, n, temperature, top_p); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/embeddings.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/embeddings.cpp new file mode 100644 index 00000000..c453d679 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/embeddings.cpp @@ -0,0 +1,26 @@ +#include "../include/components/embeddings.h" + +liboai::Response liboai::Embeddings::create(const std::string& model_id, std::optional input, std::optional user) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("model", model_id); + jcon.push_back("input", std::move(input)); + jcon.push_back("user", std::move(user)); + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/embeddings", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Embeddings::create_async(const std::string& model_id, std::optional input, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Embeddings::create, this, model_id, input, user); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/files.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/files.cpp new file mode 100644 index 00000000..c9196ef9 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/files.cpp @@ -0,0 +1,95 @@ +#include "../include/components/files.h" + +liboai::Response liboai::Files::list() const & noexcept(false) { + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/files", "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Files::list_async() const & noexcept(false) { + return std::async(std::launch::async, &liboai::Files::list, this); +} + +liboai::Response liboai::Files::create(const std::filesystem::path& file, const std::string& purpose) const & noexcept(false) { + if (!this->Validate(file)) { + throw liboai::exception::OpenAIException( + "File path provided is non-existent, is not a file, or is empty.", + liboai::exception::EType::E_FILEERROR, + "liboai::Files::create(...)" + ); + } + + netimpl::components::Multipart form = { + { "purpose", purpose }, + { "file", netimpl::components::File{file.generic_string()} } + }; + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/files", "multipart/form-data", + this->auth_.GetAuthorizationHeaders(), + std::move(form), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Files::create_async(const std::filesystem::path& file, const std::string& purpose) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Files::create, this, file, purpose); +} + +liboai::Response liboai::Files::remove(const std::string& file_id) const & noexcept(false) { + Response res; + res = this->Request( + Method::HTTP_DELETE, this->openai_root_, "/files/" + file_id, "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Files::remove_async(const std::string& file_id) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Files::remove, this, file_id); +} + +liboai::Response liboai::Files::retrieve(const std::string& file_id) const & { + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/files/" + file_id, "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Files::retrieve_async(const std::string& file_id) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Files::retrieve, this, file_id); +} + +bool liboai::Files::download(const std::string& file_id, const std::string& save_to) const & noexcept(false) { + return Network::Download( + save_to, + ("https://api.openai.com/v1/files/" + file_id + "/content"), + this->auth_.GetAuthorizationHeaders() + ); +} + +std::future liboai::Files::download_async(const std::string& file_id, const std::string& save_to) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Files::download, this, file_id, save_to); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/fine_tunes.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/fine_tunes.cpp new file mode 100644 index 00000000..475ae9d5 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/fine_tunes.cpp @@ -0,0 +1,125 @@ +#include "../include/components/fine_tunes.h" + +liboai::Response liboai::FineTunes::create(const std::string& training_file, std::optional validation_file, std::optional model_id, std::optional n_epochs, std::optional batch_size, std::optional learning_rate_multiplier, std::optional prompt_loss_weight, std::optional compute_classification_metrics, std::optional classification_n_classes, std::optional classification_positive_class, std::optional> classification_betas, std::optional suffix) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("training_file", training_file); + jcon.push_back("validation_file", std::move(validation_file)); + jcon.push_back("model_id", std::move(model_id)); + jcon.push_back("n_epochs", std::move(n_epochs)); + jcon.push_back("batch_size", std::move(batch_size)); + jcon.push_back("learning_rate_multiplier", std::move(learning_rate_multiplier)); + jcon.push_back("prompt_loss_weight", std::move(prompt_loss_weight)); + jcon.push_back("compute_classification_metrics", std::move(compute_classification_metrics)); + jcon.push_back("classification_n_classes", std::move(classification_n_classes)); + jcon.push_back("classification_positive_class", std::move(classification_positive_class)); + jcon.push_back("classification_betas", std::move(classification_betas)); + jcon.push_back("suffix", std::move(suffix)); + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/fine-tunes", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::FineTunes::create_async(const std::string& training_file, std::optional validation_file, std::optional model_id, std::optional n_epochs, std::optional batch_size, std::optional learning_rate_multiplier, std::optional prompt_loss_weight, std::optional compute_classification_metrics, std::optional classification_n_classes, std::optional classification_positive_class, std::optional> classification_betas, std::optional suffix) const & noexcept(false) { + return std::async(std::launch::async, &liboai::FineTunes::create, this, training_file, validation_file, model_id, n_epochs, batch_size, learning_rate_multiplier, prompt_loss_weight, compute_classification_metrics, classification_n_classes, classification_positive_class, classification_betas, suffix); +} + +liboai::Response liboai::FineTunes::list() const& { + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/fine-tunes", "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::FineTunes::list_async() const & noexcept(false) { + return std::async(std::launch::async, &liboai::FineTunes::list, this); +} + +liboai::Response liboai::FineTunes::retrieve(const std::string& fine_tune_id) const& { + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/fine-tunes/" + fine_tune_id, "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::FineTunes::retrieve_async(const std::string& fine_tune_id) const & noexcept(false) { + return std::async(std::launch::async, &liboai::FineTunes::retrieve, this, fine_tune_id); +} + +liboai::Response liboai::FineTunes::cancel(const std::string& fine_tune_id) const& { + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/fine-tunes/" + fine_tune_id + "/cancel", "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::FineTunes::cancel_async(const std::string& fine_tune_id) const & noexcept(false) { + return std::async(std::launch::async, &liboai::FineTunes::cancel, this, fine_tune_id); +} + +liboai::Response liboai::FineTunes::list_events(const std::string& fine_tune_id, std::optional> stream) const & noexcept(false) { + netimpl::components::Parameters params; + stream ? params.Add({"stream", "true"}) : void(); + + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/fine-tunes/" + fine_tune_id + "/events", "application/json", + this->auth_.GetAuthorizationHeaders(), + std::move(params), + stream ? netimpl::components::WriteCallback{std::move(stream.value())} : netimpl::components::WriteCallback{}, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::FineTunes::list_events_async(const std::string& fine_tune_id, std::optional> stream) const & noexcept(false) { + return std::async(std::launch::async, &liboai::FineTunes::list_events, this, fine_tune_id, stream); +} + +liboai::Response liboai::FineTunes::remove(const std::string& model) const& noexcept(false) { + Response res; + res = this->Request( + Method::HTTP_DELETE, this->openai_root_, "/models/" + model, "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::FineTunes::remove_async(const std::string& model) const & noexcept(false) { + return std::async(std::launch::async, &liboai::FineTunes::remove, this, model); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/images.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/images.cpp new file mode 100644 index 00000000..1754aee5 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/images.cpp @@ -0,0 +1,109 @@ +#include "../include/components/images.h" + +liboai::Response liboai::Images::create(const std::string& prompt, std::optional n, std::optional size, std::optional response_format, std::optional user) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("prompt", prompt); + jcon.push_back("n", std::move(n)); + jcon.push_back("size", std::move(size)); + jcon.push_back("response_format", std::move(response_format)); + jcon.push_back("user", std::move(user)); + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/images/generations", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Images::create_async(const std::string& prompt, std::optional n, std::optional size, std::optional response_format, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Images::create, this, prompt, n, size, response_format, user); +} + +liboai::Response liboai::Images::create_edit(const std::filesystem::path& image, const std::string& prompt, std::optional mask, std::optional n, std::optional size, std::optional response_format, std::optional user) const & noexcept(false) { + if (!this->Validate(image)) { + throw liboai::exception::OpenAIException( + "File path provided is non-existent, is not a file, or is empty.", + liboai::exception::EType::E_FILEERROR, + "liboai::Images::create_edit(...)" + ); + } + + netimpl::components::Multipart form = { + { "prompt", prompt }, + { "image", netimpl::components::File{image.generic_string()} } + }; + + if (mask) { + if (!this->Validate(mask.value())) { + throw liboai::exception::OpenAIException( + "File path provided is non-existent, is not a file, or is empty.", + liboai::exception::EType::E_FILEERROR, + "liboai::Images::create_edit(...)" + ); + } + form.parts.push_back({ "mask", netimpl::components::File{mask.value().generic_string()} }); + } + if (n) { form.parts.push_back({ "n", n.value() }); } + if (size) { form.parts.push_back({ "size", size.value() }); } + if (response_format) { form.parts.push_back({ "response_format", response_format.value() }); } + if (user) { form.parts.push_back({ "user", user.value() }); } + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/images/edits", "multipart/form-data", + this->auth_.GetAuthorizationHeaders(), + std::move(form), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Images::create_edit_async(const std::filesystem::path& image, const std::string& prompt, std::optional mask, std::optional n, std::optional size, std::optional response_format, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Images::create_edit, this, image, prompt, mask, n, size, response_format, user); +} + +liboai::Response liboai::Images::create_variation(const std::filesystem::path& image, std::optional n, std::optional size, std::optional response_format, std::optional user) const & noexcept(false) { + if (!this->Validate(image)) { + throw liboai::exception::OpenAIException( + "File path provided is non-existent, is not a file, or is empty.", + liboai::exception::EType::E_FILEERROR, + "liboai::Images::create_variation(...)" + ); + } + + netimpl::components::Multipart form = { + { "image", netimpl::components::File{image.generic_string()} } + }; + + if (n) { form.parts.push_back({ "n", n.value() }); } + if (size) { form.parts.push_back({ "size", size.value() }); } + if (response_format) { form.parts.push_back({ "response_format", response_format.value() }); } + if (user) { form.parts.push_back({ "user", user.value() }); } + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/images/variations", "multipart/form-data", + this->auth_.GetAuthorizationHeaders(), + std::move(form), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Images::create_variation_async(const std::filesystem::path& image, std::optional n, std::optional size, std::optional response_format, std::optional user) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Images::create_variation, this, image, n, size, response_format, user); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/models.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/models.cpp new file mode 100644 index 00000000..41c3e08f --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/models.cpp @@ -0,0 +1,35 @@ +#include "../include/components/models.h" + +liboai::Response liboai::Models::list() const & noexcept(false) { + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/models", "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Models::list_async() const & noexcept(false) { + return std::async(std::launch::async, &liboai::Models::list, this); +} + +liboai::Response liboai::Models::retrieve(const std::string& model) const & noexcept(false) { + Response res; + res = this->Request( + Method::HTTP_GET, this->openai_root_, "/models/" + model, "application/json", + this->auth_.GetAuthorizationHeaders(), + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Models::retrieve_async(const std::string& model) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Models::retrieve, this, model); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/moderations.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/moderations.cpp new file mode 100644 index 00000000..9f1b98e3 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/moderations.cpp @@ -0,0 +1,25 @@ +#include "../include/components/moderations.h" + +liboai::Response liboai::Moderations::create(const std::string& input, std::optional model) const & noexcept(false) { + liboai::JsonConstructor jcon; + jcon.push_back("input", input); + jcon.push_back("model", std::move(model)); + + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/moderations", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + jcon.dump() + }, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Moderations::create_async(const std::string& input, std::optional model) const & noexcept(false) { + return std::async(std::launch::async, &liboai::Moderations::create, this, input, model); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/components/responses.cpp b/packages/kbot/cpp/packages/liboai/liboai/components/responses.cpp new file mode 100644 index 00000000..9c9e7d3d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/components/responses.cpp @@ -0,0 +1,221 @@ +#include "../include/components/responses.h" + +nlohmann::json liboai::Responses::build_request( + const std::string& model, + const nlohmann::json& input, + std::optional instructions, + std::optional reasoning, + std::optional text, + std::optional max_output_tokens, + std::optional temperature, + std::optional top_p, + std::optional seed, + std::optional tools, + std::optional tool_choice, + std::optional parallel_tool_calls, + std::optional store, + std::optional previous_response_id, + std::optional include, + std::optional metadata, + std::optional user, + std::optional truncation, + std::optional stream +) { + nlohmann::json request; + request["model"] = model; + request["input"] = input; + + if (instructions) { + request["instructions"] = std::move(*instructions); + } + if (reasoning) { + request["reasoning"] = std::move(*reasoning); + } + if (text) { + request["text"] = std::move(*text); + } + if (max_output_tokens) { + request["max_output_tokens"] = *max_output_tokens; + } + if (temperature) { + request["temperature"] = *temperature; + } + if (top_p) { + request["top_p"] = *top_p; + } + if (seed) { + request["seed"] = *seed; + } + if (tools) { + request["tools"] = std::move(*tools); + } + if (tool_choice) { + request["tool_choice"] = std::move(*tool_choice); + } + if (parallel_tool_calls) { + request["parallel_tool_calls"] = *parallel_tool_calls; + } + if (store) { + request["store"] = *store; + } + if (previous_response_id) { + request["previous_response_id"] = std::move(*previous_response_id); + } + if (include) { + request["include"] = std::move(*include); + } + if (metadata) { + request["metadata"] = std::move(*metadata); + } + if (user) { + request["user"] = std::move(*user); + } + if (truncation) { + request["truncation"] = std::move(*truncation); + } + if (stream) { + request["stream"] = *stream; + } + + return request; +} + +liboai::Response liboai::Responses::create( + const std::string& model, + const nlohmann::json& input, + std::optional instructions, + std::optional reasoning, + std::optional text, + std::optional max_output_tokens, + std::optional temperature, + std::optional top_p, + std::optional seed, + std::optional tools, + std::optional tool_choice, + std::optional parallel_tool_calls, + std::optional store, + std::optional previous_response_id, + std::optional include, + std::optional metadata, + std::optional user, + std::optional truncation, + std::optional stream +) const & noexcept(false) { + const auto request = liboai::Responses::build_request( + model, + input, + std::move(instructions), + std::move(reasoning), + std::move(text), + std::move(max_output_tokens), + std::move(temperature), + std::move(top_p), + std::move(seed), + std::move(tools), + std::move(tool_choice), + std::move(parallel_tool_calls), + std::move(store), + std::move(previous_response_id), + std::move(include), + std::move(metadata), + std::move(user), + std::move(truncation), + stream ? std::optional(true) : std::nullopt + ); + + return this->create(request, std::move(stream)); +} + +liboai::Response liboai::Responses::create(const nlohmann::json& request, std::optional stream) const & noexcept(false) { + Response res; + res = this->Request( + Method::HTTP_POST, this->openai_root_, "/responses", "application/json", + this->auth_.GetAuthorizationHeaders(), + netimpl::components::Body { + request.dump(4) + }, + stream ? netimpl::components::WriteCallback{std::move(stream.value())} : netimpl::components::WriteCallback{}, + this->auth_.GetProxies(), + this->auth_.GetProxyAuth(), + this->auth_.GetMaxTimeout() + ); + + return res; +} + +liboai::FutureResponse liboai::Responses::create_async( + const std::string& model, + const nlohmann::json& input, + std::optional instructions, + std::optional reasoning, + std::optional text, + std::optional max_output_tokens, + std::optional temperature, + std::optional top_p, + std::optional seed, + std::optional tools, + std::optional tool_choice, + std::optional parallel_tool_calls, + std::optional store, + std::optional previous_response_id, + std::optional include, + std::optional metadata, + std::optional user, + std::optional truncation, + std::optional stream +) const & noexcept(false) { + return std::async( + std::launch::async, + [this, + model, + input, + instructions, + reasoning, + text, + max_output_tokens, + temperature, + top_p, + seed, + tools, + tool_choice, + parallel_tool_calls, + store, + previous_response_id, + include, + metadata, + user, + truncation, + stream]() mutable { + return this->create( + model, + input, + std::move(instructions), + std::move(reasoning), + std::move(text), + std::move(max_output_tokens), + std::move(temperature), + std::move(top_p), + std::move(seed), + std::move(tools), + std::move(tool_choice), + std::move(parallel_tool_calls), + std::move(store), + std::move(previous_response_id), + std::move(include), + std::move(metadata), + std::move(user), + std::move(truncation), + std::move(stream) + ); + } + ); +} + +liboai::FutureResponse liboai::Responses::create_async(const nlohmann::json& request, std::optional stream) const & noexcept(false) { + return std::async( + std::launch::async, + [this, request, stream]() mutable { + return this->create(request, std::move(stream)); + } + ); +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/core/authorization.cpp b/packages/kbot/cpp/packages/liboai/liboai/core/authorization.cpp new file mode 100644 index 00000000..e1df959b --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/core/authorization.cpp @@ -0,0 +1,197 @@ +#include "../include/core/authorization.h" + +liboai::Authorization::~Authorization() { + netimpl::components::EncodedAuthentication().SecureStringClear(this->key_); +} + +bool liboai::Authorization::SetKey(std::string_view key) noexcept { + if (!key.empty()) { + this->key_ = key; + if (this->openai_auth_headers_.count("Authorization") > 0) { + this->openai_auth_headers_.erase("Authorization"); + } + this->openai_auth_headers_["Authorization"] = ("Bearer " + this->key_); + return true; + } + return false; +} + +bool liboai::Authorization::SetAzureKey(std::string_view key) noexcept { + if (!key.empty()) { + this->key_ = key; + if (this->azure_auth_headers_.size() > 0) { + this->azure_auth_headers_.clear(); + } + this->azure_auth_headers_["api-key"] = this->key_; + return true; + } + return false; +} + +bool liboai::Authorization::SetAzureKeyAD(std::string_view key) noexcept { + if (!key.empty()) { + this->key_ = key; + if (this->azure_auth_headers_.size() > 0) { + this->azure_auth_headers_.clear(); + } + this->azure_auth_headers_["Authorization"] = ("Bearer " + this->key_); + return true; + } + return false; +} + +bool liboai::Authorization::SetKeyFile(const std::filesystem::path& path) noexcept { + if (std::filesystem::exists(path) && std::filesystem::is_regular_file(path) && std::filesystem::file_size(path) > 0) { + std::ifstream file(path); + if (file.is_open()) { + std::getline(file, this->key_); + if (this->openai_auth_headers_.count("Authorization") > 0) { + this->openai_auth_headers_.erase("Authorization"); + } + this->openai_auth_headers_["Authorization"] = ("Bearer " + this->key_); + return true; + } + } + return false; +} + +bool liboai::Authorization::SetAzureKeyFile(const std::filesystem::path& path) noexcept { + if (std::filesystem::exists(path) && std::filesystem::is_regular_file(path) && std::filesystem::file_size(path) > 0) { + std::ifstream file(path); + if (file.is_open()) { + std::getline(file, this->key_); + if (this->azure_auth_headers_.size() > 0) { + this->azure_auth_headers_.clear(); + } + this->azure_auth_headers_["api-key"] = this->key_; + return true; + } + } + return false; +} + +bool liboai::Authorization::SetAzureKeyFileAD(const std::filesystem::path& path) noexcept { + if (std::filesystem::exists(path) && std::filesystem::is_regular_file(path) && std::filesystem::file_size(path) > 0) { + std::ifstream file(path); + if (file.is_open()) { + std::getline(file, this->key_); + if (this->azure_auth_headers_.size() > 0) { + this->azure_auth_headers_.clear(); + } + this->azure_auth_headers_["Authorization"] = ("Bearer " + this->key_); + return true; + } + } + return false; +} + +bool liboai::Authorization::SetKeyEnv(std::string_view var) noexcept { + if (!var.empty()) { + const char* key = std::getenv(var.data()); + if (key != nullptr) { + this->key_ = key; + if (this->openai_auth_headers_.count("Authorization") > 0) { + this->openai_auth_headers_.erase("Authorization"); + } + this->openai_auth_headers_["Authorization"] = ("Bearer " + this->key_); + return true; + } + return false; + } + return false; +} + +bool liboai::Authorization::SetAzureKeyEnv(std::string_view var) noexcept { + if (!var.empty()) { + const char* key = std::getenv(var.data()); + if (key != nullptr) { + this->key_ = key; + if (this->azure_auth_headers_.size() > 0) { + this->azure_auth_headers_.clear(); + } + this->azure_auth_headers_["api-key"] = this->key_; + return true; + } + return false; + } + return false; +} + +bool liboai::Authorization::SetAzureKeyEnvAD(std::string_view var) noexcept { + if (!var.empty()) { + const char* key = std::getenv(var.data()); + if (key != nullptr) { + this->key_ = key; + if (this->azure_auth_headers_.size() > 0) { + this->azure_auth_headers_.clear(); + } + this->azure_auth_headers_["Authorization"] = ("Bearer " + this->key_); + return true; + } + return false; + } + return false; +} + +bool liboai::Authorization::SetOrganization(std::string_view org) noexcept { + if (!org.empty()) { + this->org_ = std::move(org); + if (this->openai_auth_headers_.count("OpenAI-Organization") > 0) { + this->openai_auth_headers_.erase("OpenAI-Organization"); + } + this->openai_auth_headers_["OpenAI-Organization"] = this->org_; + return true; + } + return false; +} + +bool liboai::Authorization::SetOrganizationFile(const std::filesystem::path& path) noexcept { + if (std::filesystem::exists(path) && std::filesystem::is_regular_file(path) && std::filesystem::file_size(path) > 0) { + std::ifstream file(path); + if (file.is_open()) { + std::getline(file, this->key_); + if (this->openai_auth_headers_.count("OpenAI-Organization") > 0) { + this->openai_auth_headers_.erase("OpenAI-Organization"); + } + this->openai_auth_headers_["OpenAI-Organization"] = this->org_; + return true; + } + } + return false; +} + +bool liboai::Authorization::SetOrganizationEnv(std::string_view var) noexcept { + if (!var.empty()) { + const char* org = std::getenv(var.data()); + if (org != nullptr) { + this->org_ = org; + if (this->openai_auth_headers_.count("OpenAI-Organization") > 0) { + this->openai_auth_headers_.erase("OpenAI-Organization"); + } + this->openai_auth_headers_["OpenAI-Organization"] = this->org_; + return true; + } + return false; + } + return false; +} + +void liboai::Authorization::SetProxies(const std::initializer_list>& hosts) noexcept { + this->proxies_ = netimpl::components::Proxies(hosts); +} + +void liboai::Authorization::SetProxies(std::initializer_list>&& hosts) noexcept { + this->proxies_ = netimpl::components::Proxies(std::move(hosts)); +} + +void liboai::Authorization::SetProxies(const std::map& hosts) noexcept { + this->proxies_ = netimpl::components::Proxies(hosts); +} + +void liboai::Authorization::SetProxies(std::map&& hosts) noexcept { + this->proxies_ = netimpl::components::Proxies(std::move(hosts)); +} + +void liboai::Authorization::SetProxyAuth(const std::map& proto_up) noexcept { + this->proxyAuth_ = netimpl::components::ProxyAuthentication(proto_up); +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/core/netimpl.cpp b/packages/kbot/cpp/packages/liboai/liboai/core/netimpl.cpp new file mode 100644 index 00000000..6a274f00 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/core/netimpl.cpp @@ -0,0 +1,1573 @@ +#include "../include/core/netimpl.h" + +liboai::netimpl::CurlHolder::CurlHolder() { + std::lock_guard lock{ this->curl_easy_get_mutex_() }; + + if (!_flag) { + curl_version_info_data* data = curl_version_info(CURLVERSION_NOW); + + // if curl doesn't have ssl enabled, throw an exception + if (!(data->features & CURL_VERSION_SSL)) { + throw liboai::exception::OpenAIException( + "Curl does not have SSL enabled.", + liboai::exception::EType::E_CURLERROR, + "liboai::netimpl::CurlHolder::CurlHolder()" + ); + } + else { + // flag set to true to avoid future checks if SSL present + _flag = true; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] SSL is enabled; check flag set.\n", + __func__ + ); + #endif + } + } + + this->curl_ = curl_easy_init(); + if (!this->curl_) { + throw liboai::exception::OpenAIException( + curl_easy_strerror(CURLE_FAILED_INIT), + liboai::exception::EType::E_CURLERROR, + "liboai::netimpl::CurlHolder::CurlHolder()" + ); + } + + #if defined(LIBOAI_DEBUG) + curl_easy_setopt(this->curl_, CURLOPT_VERBOSE, 1L); + #endif + + #if defined(LIBOAI_DISABLE_PEERVERIFY) + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] LIBOAI_DISABLE_PEERVERIFY set; peer verification disabled.\n", + __func__ + ); + #endif + curl_easy_setopt(this->curl_, CURLOPT_SSL_VERIFYPEER, 0L); + #else + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] LIBOAI_DISABLE_PEERVERIFY not set; peer verification enabled.\n", + __func__ + ); + #endif + curl_easy_setopt(this->curl_, CURLOPT_SSL_VERIFYPEER, 1L); + #endif +} + +liboai::netimpl::CurlHolder::~CurlHolder() { + if (this->curl_) { + curl_easy_cleanup(this->curl_); + this->curl_ = nullptr; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] curl_easy_cleanup() called.\n", + __func__ + ); + #endif + } +} + +liboai::netimpl::Session::~Session() { + if (this->headers) { + curl_slist_free_all(this->headers); + this->headers = nullptr; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] curl_slist_free_all() called.\n", + __func__ + ); + #endif + } + + #if LIBCURL_VERSION_MAJOR < 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + if (this->form) { + curl_formfree(this->form); + this->form = nullptr; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] curl_formfree() called.\n", + __func__ + ); + #endif + } + #endif + + #if LIBCURL_VERSION_MAJOR > 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR >= 56) + if (this->mime) { + curl_mime_free(this->mime); + this->mime = nullptr; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] curl_mime_free() called.\n", + __func__ + ); + #endif + } + #endif +} + +void liboai::netimpl::Session::Prepare() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[11]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + // add parameters to base url + if (!this->parameter_string_.empty()) { + this->url_ += "?"; + this->url_ += this->parameter_string_; + } + this->url_str = this->url_; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set URL for Session (0x%p) to %s.\n", + __func__, this, this->url_str.c_str() + ); + #endif + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_URL, this->url_.c_str()); + + const std::string protocol_socket5_hostname = "socket5_hostname"; + if (proxies_.has(protocol_socket5_hostname)) { + e[1] = curl_easy_setopt(this->curl_, CURLOPT_PROXY, proxies_[protocol_socket5_hostname].c_str()); + e[2] = curl_easy_setopt(this->curl_, CURLOPT_PROXYTYPE, CURLPROXY_SOCKS5_HOSTNAME); + + if (proxyAuth_.has(protocol_socket5_hostname)) { + e[3] = curl_easy_setopt(this->curl_, CURLOPT_PROXYUSERNAME, proxyAuth_.GetUsername(protocol_socket5_hostname)); + e[4] = curl_easy_setopt(this->curl_, CURLOPT_PROXYPASSWORD, proxyAuth_.GetPassword(protocol_socket5_hostname)); + } + } else { + // set proxy if available + const std::string protocol = url_.substr(0, url_.find(':')); + if (proxies_.has(protocol)) { + e[1] = curl_easy_setopt(this->curl_, CURLOPT_PROXY, proxies_[protocol].c_str()); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_PROXY for Session (0x%p) to %s.\n", + __func__, this, proxies_[protocol].c_str() + ); + #endif + + if (proxyAuth_.has(protocol)) { + e[2] = curl_easy_setopt(this->curl_, CURLOPT_PROXYUSERNAME, proxyAuth_.GetUsername(protocol)); + e[3] = curl_easy_setopt(this->curl_, CURLOPT_PROXYPASSWORD, proxyAuth_.GetPassword(protocol)); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_PROXYUSERNAME and CURLOPT_PROXYPASSWORD for Session (0x%p) to %s and %s.\n", + __func__, this, proxyAuth_.GetUsername(protocol), proxyAuth_.GetPassword(protocol) + ); + #endif + } + } + } + + // accept all encoding types + e[5] = curl_easy_setopt(this->curl_, CURLOPT_ACCEPT_ENCODING, ""); + + #if LIBCURL_VERSION_MAJOR > 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR >= 71) + e[6] = curl_easy_setopt(this->curl_, CURLOPT_SSL_OPTIONS, CURLSSLOPT_NATIVE_CA); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_SSL_OPTIONS for Session (0x%p) to CURLSSLOPT_NATIVE_CA.\n", + __func__, this + ); + #endif + #endif + + // set string the response will be sent to + if (!this->write_.callback) { + e[7] = curl_easy_setopt(this->curl_, CURLOPT_WRITEFUNCTION, liboai::netimpl::components::writeFunction); + e[8] = curl_easy_setopt(this->curl_, CURLOPT_WRITEDATA, &this->response_string_); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] No user supplied WriteCallback. Set CURLOPT_WRITEFUNCTION and CURLOPT_WRITEDATA for Session (0x%p) to 0x%p and 0x%p.\n", + __func__, this, liboai::netimpl::components::writeFunction, &this->response_string_ + ); + #endif + } + + // set string the raw headers will be sent to + e[9] = curl_easy_setopt(this->curl_, CURLOPT_HEADERFUNCTION, liboai::netimpl::components::writeFunction); + e[10] = curl_easy_setopt(this->curl_, CURLOPT_HEADERDATA, &this->header_string_); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_HEADERFUNCTION and CURLOPT_HEADERDATA for Session (0x%p) to 0x%p and 0x%p.\n", + __func__, this, liboai::netimpl::components::writeFunction, &this->header_string_ + ); + #endif + + ErrorCheck(e, 11, "liboai::netimpl::Session::Prepare()"); +} + +void liboai::netimpl::Session::PrepareDownloadInternal() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[7]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + if (!this->parameter_string_.empty()) { + this->url_ += "?"; + this->url_ += this->parameter_string_; + } + this->url_str = this->url_; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set URL for Session (0x%p) to %s.\n", + __func__, this, this->url_str.c_str() + ); + #endif + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_URL, this->url_.c_str()); + + const std::string protocol_socket5_hostname = "socket5_hostname"; + if (proxies_.has(protocol_socket5_hostname)) { + e[1] = curl_easy_setopt(this->curl_, CURLOPT_PROXY, proxies_[protocol_socket5_hostname].c_str()); + e[2] = curl_easy_setopt(this->curl_, CURLOPT_PROXYTYPE, CURLPROXY_SOCKS5_HOSTNAME); + if (proxyAuth_.has(protocol_socket5_hostname)) { + e[3] = curl_easy_setopt(this->curl_, CURLOPT_PROXYUSERNAME, proxyAuth_.GetUsername(protocol_socket5_hostname)); + e[4] = curl_easy_setopt(this->curl_, CURLOPT_PROXYPASSWORD, proxyAuth_.GetPassword(protocol_socket5_hostname)); + } + } else { + const std::string protocol = url_.substr(0, url_.find(':')); + if (proxies_.has(protocol)) { + e[1] = curl_easy_setopt(this->curl_, CURLOPT_PROXY, proxies_[protocol].c_str()); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_PROXY for Session (0x%p) to %s.\n", + __func__, this, proxies_[protocol].c_str() + ); + #endif + + if (proxyAuth_.has(protocol)) { + e[2] = curl_easy_setopt(this->curl_, CURLOPT_PROXYUSERNAME, proxyAuth_.GetUsername(protocol)); + e[3] = curl_easy_setopt(this->curl_, CURLOPT_PROXYPASSWORD, proxyAuth_.GetPassword(protocol)); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_PROXYUSERNAME and CURLOPT_PROXYPASSWORD for Session (0x%p) to %s and %s.\n", + __func__, this, proxyAuth_.GetUsername(protocol), proxyAuth_.GetPassword(protocol) + ); + #endif + } + } + } + + e[5] = curl_easy_setopt(this->curl_, CURLOPT_HEADERFUNCTION, liboai::netimpl::components::writeFunction); + e[6] = curl_easy_setopt(this->curl_, CURLOPT_HEADERDATA, &this->header_string_); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_HEADERFUNCTION and CURLOPT_HEADERDATA for Session (0x%p) to 0x%p and 0x%p.\n", + __func__, this, liboai::netimpl::components::writeFunction, &this->header_string_ + ); + #endif + + ErrorCheck(e, 7, "liboai::netimpl::Session::PrepareDownloadInternal()"); +} + +CURLcode liboai::netimpl::Session::Perform() { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called curl_easy_perform() for Session (0x%p).\n", + __func__, this + ); + #endif + + CURLcode e = curl_easy_perform(this->curl_); + ErrorCheck(e, "liboai::netimpl::Session::Perform()"); + return e; +} + +liboai::Response liboai::netimpl::Session::BuildResponseObject() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[3]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called ParseResponseHeader() for Session (0x%p).\n", + __func__, this + ); + #endif + + // fill status line and reason + this->ParseResponseHeader(this->header_string_, &this->status_line, &this->reason); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called curl_easy_getinfo() for Session (0x%p) to get status code.\n", + __func__, this + ); + #endif + + // get status code + e[0] = curl_easy_getinfo(this->curl_, CURLINFO_RESPONSE_CODE, &this->status_code); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called curl_easy_getinfo() for Session (0x%p) to get elapsed time.\n", + __func__, this + ); + #endif + + // get elapsed time + e[1] = curl_easy_getinfo(this->curl_, CURLINFO_TOTAL_TIME, &this->elapsed); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called curl_easy_getinfo() for Session (0x%p) to get effective url.\n", + __func__, this + ); + #endif + + // get url + char* effective_url = nullptr; + e[2] = curl_easy_getinfo(this->curl_, CURLINFO_EFFECTIVE_URL, &effective_url); + this->url_str = (effective_url ? effective_url : ""); + + ErrorCheck(e, 3, "liboai::netimpl::Session::BuildResponseObject()"); + + // fill content + this->content = this->response_string_; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Constructed response object.\n", + __func__ + ); + #endif + + return liboai::Response { + std::move(this->url_str), + std::move(this->content), + std::move(this->status_line), + std::move(this->reason), + this->status_code, + this->elapsed + }; +} + +liboai::Response liboai::netimpl::Session::Complete() { + this->hasBody = false; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called BuildResponseObject().\n", + __func__ + ); + #endif + + return this->BuildResponseObject(); +} + +liboai::Response liboai::netimpl::Session::CompleteDownload() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[2]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_HEADERFUNCTION, nullptr); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_HEADERDATA, 0); + + ErrorCheck(e, 2, "liboai::netimpl::Session::CompleteDownload()"); + + this->hasBody = false; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called BuildResponseObject().\n", + __func__ + ); + #endif + + return this->BuildResponseObject(); +} + +void liboai::netimpl::Session::PrepareGet() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[5]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + if (this->hasBody) { + e[0] = curl_easy_setopt(this->curl_, CURLOPT_NOBODY, 0L); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_CUSTOMREQUEST, "GET"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_NOBODY and CURLOPT_CUSTOMREQUEST for Session (0x%p) to 0L and \"GET\".\n", + __func__, this + ); + #endif + } + else { + e[2] = curl_easy_setopt(this->curl_, CURLOPT_NOBODY, 0L); + e[3] = curl_easy_setopt(this->curl_, CURLOPT_CUSTOMREQUEST, nullptr); + e[4] = curl_easy_setopt(this->curl_, CURLOPT_HTTPGET, 1L); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_NOBODY, CURLOPT_CUSTOMREQUEST and CURLOPT_HTTPGET for Session (0x%p) to 0L, nullptr and 1L.\n", + __func__, this + ); + #endif + } + + ErrorCheck(e, 5, "liboai::netimpl::Session::PrepareGet()"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Prepare().\n", + __func__ + ); + #endif + + this->Prepare(); +} + +liboai::Response liboai::netimpl::Session::Get() { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called PrepareGet().\n", + __func__ + ); + #endif + + this->PrepareGet(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Perform().\n", + __func__ + ); + #endif + + this->Perform(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Complete().\n", + __func__ + ); + #endif + + return Complete(); +} + +void liboai::netimpl::Session::PreparePost() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[4]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_NOBODY, 0L); + if (this->hasBody) { + e[1] = curl_easy_setopt(this->curl_, CURLOPT_CUSTOMREQUEST, nullptr); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_NOBODY and CURLOPT_CUSTOMREQUEST for Session (0x%p) to 0L and nullptr.\n", + __func__, this + ); + #endif + } + else { + e[2] = curl_easy_setopt(this->curl_, CURLOPT_POSTFIELDS, ""); + e[3] = curl_easy_setopt(this->curl_, CURLOPT_CUSTOMREQUEST, "POST"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_NOBODY, CURLOPT_POSTFIELDS and CURLOPT_CUSTOMREQUEST for Session (0x%p) to 0L, \"\" and \"POST\".\n", + __func__, this + ); + #endif + } + + ErrorCheck(e, 4, "liboai::netimpl::Session::PreparePost()"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Prepare().\n", + __func__ + ); + #endif + + this->Prepare(); +} + +liboai::Response liboai::netimpl::Session::Post() { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called PreparePost().\n", + __func__ + ); + #endif + + this->PreparePost(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Perform().\n", + __func__ + ); + #endif + + Perform(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Complete().\n", + __func__ + ); + #endif + + return Complete(); +} + +void liboai::netimpl::Session::PrepareDelete() { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[3]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_HTTPGET, 0L); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_NOBODY, 0L); + e[2] = curl_easy_setopt(this->curl_, CURLOPT_CUSTOMREQUEST, "DELETE"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_HTTPGET, CURLOPT_NOBODY and CURLOPT_CUSTOMREQUEST for Session (0x%p) to 0L, 0L and \"DELETE\".\n", + __func__, this + ); + #endif + + ErrorCheck(e, 3, "liboai::netimpl::Session::PrepareDelete()"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Prepare().\n", + __func__ + ); + #endif + + this->Prepare(); +} + +liboai::Response liboai::netimpl::Session::Delete() { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called PrepareDelete().\n", + __func__ + ); + #endif + + this->PrepareDelete(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Perform().\n", + __func__ + ); + #endif + + Perform(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Complete().\n", + __func__ + ); + #endif + + return Complete(); +} + +void liboai::netimpl::Session::PrepareDownload(std::ofstream& file) { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[5]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_NOBODY, 0L); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_HTTPGET, 1); + e[2] = curl_easy_setopt(this->curl_, CURLOPT_WRITEFUNCTION, liboai::netimpl::components::writeFileFunction); + e[3] = curl_easy_setopt(this->curl_, CURLOPT_WRITEDATA, &file); + e[4] = curl_easy_setopt(this->curl_, CURLOPT_CUSTOMREQUEST, nullptr); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_NOBODY, CURLOPT_HTTPGET, CURLOPT_WRITEFUNCTION, CURLOPT_WRITEDATA and CURLOPT_CUSTOMREQUEST for Session (0x%p) to 0L, 1L, liboai::netimpl::components::writeFileFunction, &file and nullptr.\n", + __func__, this + ); + #endif + + ErrorCheck(e, 5, "liboai::netimpl::Session::PrepareDownload()"); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called PrepareDownloadInternal().\n", + __func__ + ); + #endif + + this->PrepareDownloadInternal(); +} + +liboai::Response liboai::netimpl::Session::Download(std::ofstream& file) { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called PrepareDownload().\n", + __func__ + ); + #endif + + this->PrepareDownload(file); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called Perform().\n", + __func__ + ); + #endif + + this->Perform(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called CompleteDownload().\n", + __func__ + ); + #endif + + return CompleteDownload(); +} + +void liboai::netimpl::Session::ClearContext() { + if (curl_) { + curl_easy_reset(curl_); + } + status_code = 0; + elapsed = 0.0; + status_line.clear(); + content.clear(); + url_str.clear(); + reason.clear(); + + if (this->headers) { + curl_slist_free_all(this->headers); + this->headers = nullptr; + +#if defined(LIBOAI_DEBUG) + _liboai_dbg("[dbg] [@%s] curl_slist_free_all() called.\n", __func__); +#endif + } + +#if LIBCURL_VERSION_MAJOR < 7 || \ + (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + if (this->form) { + curl_formfree(this->form); + this->form = nullptr; + +#if defined(LIBOAI_DEBUG) + _liboai_dbg("[dbg] [@%s] curl_formfree() called.\n", __func__); +#endif + } +#endif + +#if LIBCURL_VERSION_MAJOR > 7 || \ + (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR >= 56) + if (this->mime) { + curl_mime_free(this->mime); + this->mime = nullptr; + +#if defined(LIBOAI_DEBUG) + _liboai_dbg("[dbg] [@%s] curl_mime_free() called.\n", __func__); +#endif + } +#endif + + hasBody = false; + parameter_string_.clear(); + url_.clear(); + response_string_.clear(); + header_string_.clear(); + write_ = netimpl::components::WriteCallback{}; +} + +void liboai::netimpl::Session::ParseResponseHeader(const std::string& headers, std::string* status_line, std::string* reason) { + std::vector lines; + std::istringstream stream(headers); + { + std::string line; + while (std::getline(stream, line, '\n')) { + lines.push_back(line); + } + } + + for (std::string& line : lines) { + if (line.substr(0, 5) == "HTTP/") { + // set the status_line if it was given + if ((status_line != nullptr) || (reason != nullptr)) { + line.resize(std::min(line.size(), line.find_last_not_of("\t\n\r ") + 1)); + if (status_line != nullptr) { + *status_line = line; + } + + // set the reason if it was given + if (reason != nullptr) { + const size_t pos1 = line.find_first_of("\t "); + size_t pos2 = std::string::npos; + if (pos1 != std::string::npos) { + pos2 = line.find_first_of("\t ", pos1 + 1); + } + if (pos2 != std::string::npos) { + line.erase(0, pos2 + 1); + *reason = line; + } + } + } + } + + if (line.length() > 0) { + const size_t found = line.find(':'); + if (found != std::string::npos) { + std::string value = line.substr(found + 1); + value.erase(0, value.find_first_not_of("\t ")); + value.resize(std::min(value.size(), value.find_last_not_of("\t\n\r ") + 1)); + } + } + } + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Parsed response header.\n", + __func__ + ); + #endif +} + +void liboai::netimpl::Session::SetOption(const components::Url& url) { + this->SetUrl(url); +} + +void liboai::netimpl::Session::SetUrl(const components::Url& url) { + this->url_ = url.str(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set base URL for Session (0x%p) to \"%s\".\n", + __func__, this, this->url_.c_str() + ); + #endif +} + +void liboai::netimpl::Session::SetOption(const components::Body& body) { + this->SetBody(body); +} + +void liboai::netimpl::Session::SetBody(const components::Body& body) { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[2]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + this->hasBody = true; + e[0] = curl_easy_setopt(this->curl_, CURLOPT_POSTFIELDSIZE_LARGE, static_cast(body.str().length())); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_POSTFIELDS, body.c_str()); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_POSTFIELDSIZE_LARGE and CURLOPT_POSTFIELDS for Session (0x%p) to %lld and \"%s\".\n", + __func__, this, static_cast(body.str().length()), body.c_str() + ); + #endif + + ErrorCheck(e, 2, "liboai::netimpl::Session::SetBody()"); +} + +void liboai::netimpl::Session::SetOption(components::Body&& body) { + this->SetBody(std::move(body)); +} + +void liboai::netimpl::Session::SetBody(components::Body&& body) { + // holds error codes - all init to OK to prevent errors + // when checking unset values + CURLcode e[2]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + this->hasBody = true; + e[0] = curl_easy_setopt(this->curl_, CURLOPT_POSTFIELDSIZE_LARGE, static_cast(body.str().length())); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_COPYPOSTFIELDS, body.c_str()); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set CURLOPT_POSTFIELDSIZE_LARGE and CURLOPT_COPYPOSTFIELDS for Session (0x%p) to %lld and \"%s\".\n", + __func__, this, static_cast(body.str().length()), body.c_str() + ); + #endif + + ErrorCheck(e, 2, "liboai::netimpl::Session::SetBody()"); +} + +void liboai::netimpl::Session::SetOption(const components::Multipart& multipart) { + this->SetMultipart(multipart); +} + +void liboai::netimpl::Session::SetMultipart(const components::Multipart& multipart) { + #if LIBCURL_VERSION_MAJOR < 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + CURLFORMcode fe[2]; memset(fe, CURLFORMcode::CURL_FORMADD_OK, sizeof(fe)); + CURLcode e; + + curl_httppost* lastptr = nullptr; + + for (const auto& part : multipart.parts) { + std::vector formdata; + if (!part.content_type.empty()) { + formdata.push_back({CURLFORM_CONTENTTYPE, part.content_type.c_str()}); + } + if (part.is_file) { + CURLFORMcode f; + for (const auto& file : part.files) { + formdata.push_back({CURLFORM_COPYNAME, part.name.c_str()}); + formdata.push_back({CURLFORM_FILE, file.filepath.c_str()}); + if (file.hasOverridedFilename()) { + formdata.push_back({CURLFORM_FILENAME, file.overrided_filename.c_str()}); + } + formdata.push_back({CURLFORM_END, nullptr}); + f = curl_formadd(&this->form, &lastptr, CURLFORM_ARRAY, formdata.data(), CURLFORM_END); + + // check each file + ErrorCheck(f, "liboai::netimpl::Session::SetMultipart() @ is_file[formadd]"); + + formdata.clear(); + } + } else if (part.is_buffer) { + fe[0] = curl_formadd(&this->form, &lastptr, CURLFORM_COPYNAME, part.name.c_str(), CURLFORM_BUFFER, part.value.c_str(), CURLFORM_BUFFERPTR, part.data, CURLFORM_BUFFERLENGTH, part.datalen, CURLFORM_END); + } else { + formdata.push_back({CURLFORM_COPYNAME, part.name.c_str()}); + formdata.push_back({CURLFORM_COPYCONTENTS, part.value.c_str()}); + formdata.push_back({CURLFORM_END, nullptr}); + fe[1] = curl_formadd(&this->form, &lastptr, CURLFORM_ARRAY, formdata.data(), CURLFORM_END); + } + } + e = curl_easy_setopt(this->curl_, CURLOPT_HTTPPOST, this->form); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set multipart for Session (0x%p) using curl_formadd() and CURLOPT_HTTPPOST.\n", + __func__, this + ); + #endif + + ErrorCheck(fe, 2, "liboai::netimpl::Session::SetMultipart()"); + ErrorCheck(e, "liboai::netimpl::Session::SetMultipart()"); + + this->hasBody = true; + #endif + + #if LIBCURL_VERSION_MAJOR > 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR >= 56) + CURLcode e[6]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + this->mime = curl_mime_init(this->curl_); + if (!this->mime) { + throw liboai::exception::OpenAIException( + "curl_mime_init() failed", + liboai::exception::EType::E_CURLERROR, + "liboai::netimpl::Session::SetMultipart()" + ); + } + + for (const auto& part : multipart.parts) { + std::vector mimedata; + if (!part.content_type.empty()) { + mimedata.push_back(curl_mime_addpart(this->mime)); + e[0] = curl_mime_type(mimedata.back(), part.content_type.c_str()); + } + if (part.is_file) { + CURLcode fe[3]; memset(fe, CURLcode::CURLE_OK, sizeof(fe)); + for (const auto& file : part.files) { + mimedata.push_back(curl_mime_addpart(this->mime)); + fe[0] = curl_mime_name(mimedata.back(), part.name.c_str()); + fe[1] = curl_mime_filedata(mimedata.back(), file.filepath.c_str()); + if (file.hasOverridedFilename()) { + fe[2] = curl_mime_filename(mimedata.back(), file.overrided_filename.c_str()); + } + + // check each file + ErrorCheck(fe, 3, "liboai::netimpl::Session::SetMultipart() @ is_file[mime]"); + } + } + else if (part.is_buffer) { + mimedata.push_back(curl_mime_addpart(this->mime)); + e[1] = curl_mime_name(mimedata.back(), part.name.c_str()); + e[2] = curl_mime_filename(mimedata.back(), part.value.c_str()); + e[3] = curl_mime_data(mimedata.back(), reinterpret_cast(part.data), part.datalen); + } + else { + mimedata.push_back(curl_mime_addpart(this->mime)); + e[3] = curl_mime_name(mimedata.back(), part.name.c_str()); + e[4] = curl_mime_data(mimedata.back(), part.value.c_str(), CURL_ZERO_TERMINATED); + } + } + e[5] = curl_easy_setopt(this->curl_, CURLOPT_MIMEPOST, this->mime); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set multipart for Session (0x%p) using curl_mime_addpart() and CURLOPT_MIMEPOST.\n", + __func__, this + ); + #endif + + ErrorCheck(e, 6, "liboai::netimpl::Session::SetMultipart()"); + + this->hasBody = true; + #endif +} + +void liboai::netimpl::Session::SetOption(components::Multipart&& multipart) { + this->SetMultipart(std::move(multipart)); +} + +void liboai::netimpl::Session::SetMultipart(components::Multipart&& multipart) { + #if LIBCURL_VERSION_MAJOR < 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + CURLFORMcode fe[2]; memset(fe, CURLFORMcode::CURL_FORMADD_OK, sizeof(fe)); + CURLcode e; + + curl_httppost* lastptr = nullptr; + + for (const auto& part : multipart.parts) { + std::vector formdata; + if (!part.content_type.empty()) { + formdata.push_back({ CURLFORM_CONTENTTYPE, part.content_type.c_str() }); + } + if (part.is_file) { + CURLFORMcode f; + for (const auto& file : part.files) { + formdata.push_back({ CURLFORM_COPYNAME, part.name.c_str() }); + formdata.push_back({ CURLFORM_FILE, file.filepath.c_str() }); + if (file.hasOverridedFilename()) { + formdata.push_back({ CURLFORM_FILENAME, file.overrided_filename.c_str() }); + } + formdata.push_back({ CURLFORM_END, nullptr }); + f = curl_formadd(&this->form, &lastptr, CURLFORM_ARRAY, formdata.data(), CURLFORM_END); + + // check each file + ErrorCheck(f, "liboai::netimpl::Session::SetMultipart() @ is_file[formadd]"); + + formdata.clear(); + } + } + else if (part.is_buffer) { + fe[0] = curl_formadd(&this->form, &lastptr, CURLFORM_COPYNAME, part.name.c_str(), CURLFORM_BUFFER, part.value.c_str(), CURLFORM_BUFFERPTR, part.data, CURLFORM_BUFFERLENGTH, part.datalen, CURLFORM_END); + } + else { + formdata.push_back({ CURLFORM_COPYNAME, part.name.c_str() }); + formdata.push_back({ CURLFORM_COPYCONTENTS, part.value.c_str() }); + formdata.push_back({ CURLFORM_END, nullptr }); + fe[1] = curl_formadd(&this->form, &lastptr, CURLFORM_ARRAY, formdata.data(), CURLFORM_END); + } + } + e = curl_easy_setopt(this->curl_, CURLOPT_HTTPPOST, this->form); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set multipart for Session (0x%p) using curl_formadd() and CURLOPT_HTTPPOST.\n", + __func__, this + ); + #endif + + ErrorCheck(fe, 2, "liboai::netimpl::Session::SetMultipart()"); + ErrorCheck(e, "liboai::netimpl::Session::SetMultipart()"); + + this->hasBody = true; + #endif + + #if LIBCURL_VERSION_MAJOR > 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR >= 56) + CURLcode e[6]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + curl_mimepart* _part = nullptr; + + this->mime = curl_mime_init(this->curl_); + if (!this->mime) { + throw liboai::exception::OpenAIException( + "curl_mime_init() failed", + liboai::exception::EType::E_CURLERROR, + "liboai::netimpl::Session::SetMultipart()" + ); + } + + for (const auto& part : multipart.parts) { + std::vector mimedata; + if (!part.content_type.empty()) { + mimedata.push_back(curl_mime_addpart(this->mime)); + e[0] = curl_mime_type(mimedata.back(), part.content_type.c_str()); + } + if (part.is_file) { + CURLcode fe[3]; memset(fe, CURLcode::CURLE_OK, sizeof(fe)); + for (const auto& file : part.files) { + mimedata.push_back(curl_mime_addpart(this->mime)); + fe[0] = curl_mime_name(mimedata.back(), part.name.c_str()); + fe[1] = curl_mime_filedata(mimedata.back(), file.filepath.c_str()); + if (file.hasOverridedFilename()) { + fe[2] = curl_mime_filename(mimedata.back(), file.overrided_filename.c_str()); + } + + // check each file + ErrorCheck(fe, 3, "liboai::netimpl::Session::SetMultipart() @ is_file[mime]"); + } + } + else if (part.is_buffer) { + mimedata.push_back(curl_mime_addpart(this->mime)); + e[1] = curl_mime_name(mimedata.back(), part.name.c_str()); + e[2] = curl_mime_filename(mimedata.back(), part.value.c_str()); + e[3] = curl_mime_data(mimedata.back(), reinterpret_cast(part.data), part.datalen); + } + else { + mimedata.push_back(curl_mime_addpart(this->mime)); + e[3] = curl_mime_name(mimedata.back(), part.name.c_str()); + e[4] = curl_mime_data(mimedata.back(), part.value.c_str(), CURL_ZERO_TERMINATED); + } + } + e[5] = curl_easy_setopt(this->curl_, CURLOPT_MIMEPOST, this->mime); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set multipart for Session (0x%p) using curl_mime_addpart() and CURLOPT_MIMEPOST.\n", + __func__, this + ); + #endif + + ErrorCheck(e, 6, "liboai::netimpl::Session::SetMultipart()"); + + this->hasBody = true; + #endif +} + +std::string liboai::netimpl::CurlHolder::urlEncode(const std::string& s) { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] URL-encode string \"%s\".\n", + __func__, s.c_str() + ); + #endif + + char* output = curl_easy_escape(this->curl_, s.c_str(), static_cast(s.length())); + if (output) { + std::string result = output; + curl_free(output); + return result; + } + return ""; +} + +std::string liboai::netimpl::CurlHolder::urlDecode(const std::string& s) { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] URL-decode string \"%s\".\n", + __func__, s.c_str() + ); + #endif + + char* output = curl_easy_unescape(this->curl_, s.c_str(), static_cast(s.length()), nullptr); + if (output) { + std::string result = output; + curl_free(output); + return result; + } + return ""; +} + +std::string liboai::netimpl::components::urlEncodeHelper(const std::string& s) { + CurlHolder c; + return c.urlEncode(s); +} + +std::string liboai::netimpl::components::urlDecodeHelper(const std::string& s) { + CurlHolder c; + return c.urlDecode(s); +} + +size_t liboai::netimpl::components::writeUserFunction(char* ptr, size_t size, size_t nmemb, const WriteCallback* write) { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called with %zu bytes.\n", + __func__, size * nmemb + ); + #endif + + size *= nmemb; + return (*write)({ ptr, size }) ? size : 0; +} + +size_t liboai::netimpl::components::writeFunction(char* ptr, size_t size, size_t nmemb, std::string* data) { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called with %zu bytes.\n", + __func__, size * nmemb + ); + #endif + + size *= nmemb; + data->append(ptr, size); + return size; +} + +size_t liboai::netimpl::components::writeFileFunction(char* ptr, size_t size, size_t nmemb, std::ofstream* file) { + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Called with %zu bytes.\n", + __func__, size * nmemb + ); + #endif + + size *= nmemb; + file->write(ptr, static_cast(size)); + return size; +} + +long liboai::netimpl::components::Timeout::Milliseconds() const { + static_assert(std::is_samems)>::value, "Following casting expects milliseconds."); + + if (ms.count() > static_cast((std::numeric_limits::max)())) { + throw std::overflow_error("cpr::Timeout: timeout value overflow: " + std::to_string(ms.count()) + " ms."); + } + + if (ms.count() < static_cast((std::numeric_limits::min)())) { + throw std::underflow_error("cpr::Timeout: timeout value underflow: " + std::to_string(ms.count()) + " ms."); + } + + return static_cast(ms.count()); +} + +liboai::netimpl::components::Files::iterator liboai::netimpl::components::Files::begin() { + return this->files.begin(); +} + +liboai::netimpl::components::Files::iterator liboai::netimpl::components::Files::end() { + return this->files.end(); +} + +liboai::netimpl::components::Files::const_iterator liboai::netimpl::components::Files::begin() const { + return this->files.begin(); +} + +liboai::netimpl::components::Files::const_iterator liboai::netimpl::components::Files::end() const { + return this->files.end(); +} + +liboai::netimpl::components::Files::const_iterator liboai::netimpl::components::Files::cbegin() const { + return this->files.cbegin(); +} + +liboai::netimpl::components::Files::const_iterator liboai::netimpl::components::Files::cend() const { + return this->files.cend(); +} + +void liboai::netimpl::components::Files::emplace_back(const File& file) { + this->files.emplace_back(file); +} + +void liboai::netimpl::components::Files::push_back(const File& file) { + this->files.push_back(file); +} + +void liboai::netimpl::components::Files::pop_back() { + this->files.pop_back(); +} + +liboai::netimpl::components::Multipart::Multipart(const std::initializer_list& parts) + : parts{ parts } {} + +liboai::netimpl::components::Parameters::Parameters(const std::initializer_list& parameters) { + this->Add(parameters); +} + +void liboai::netimpl::components::Parameters::Add(const std::initializer_list& parameters) { + for (const auto& parameter : parameters) { + this->parameters_.emplace_back(parameter.key, parameter.value); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Added parameter \"%s\" with value \"%s\".\n", + __func__, parameter.key.c_str(), parameter.value.c_str() + ); + #endif + } +} + +void liboai::netimpl::components::Parameters::Add(const Parameter& parameter) { + this->parameters_.emplace_back(parameter.key, parameter.value); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Added parameter \"%s\" with value \"%s\".\n", + __func__, parameter.key.c_str(), parameter.value.c_str() + ); + #endif +} + +bool liboai::netimpl::components::Parameters::Empty() const { + return this->parameters_.empty(); +} + +std::string liboai::netimpl::components::Parameters::BuildParameterString() const { + std::string parameter_string; + + if (this->parameters_.size() == 1) { + parameter_string += this->parameters_.front().key + "=" + this->parameters_.front().value; + } + else { + for (const auto& parameter : this->parameters_) { + parameter_string += parameter.key + "=" + parameter.value + "&"; + } + parameter_string.pop_back(); + } + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Built parameter string \"%s\".\n", + __func__, parameter_string.c_str() + ); + #endif + + return parameter_string; +} + +void liboai::netimpl::Session::SetOption(const components::Header& header) { + this->SetHeader(header); +} + +void liboai::netimpl::Session::SetHeader(const components::Header& header) { + CURLcode e; + + for (const std::pair& item : header) { + std::string header_string = item.first; + if (item.second.empty()) { + header_string += ";"; + } else { + header_string += ": " + item.second; + } + + curl_slist* temp = curl_slist_append(this->headers, header_string.c_str()); + if (temp) { + this->headers = temp; + } + } + + curl_slist* temp; +// Causes cURL error for simple GET requests +// curl_slist* temp = curl_slist_append(this->headers, "Transfer-Encoding: chunked"); +// if (temp) { +// this->headers = temp; +// } + + // remove preset curl headers for files >1MB + temp = curl_slist_append(this->headers, "Expect:"); + if (temp) { + this->headers = temp; + } + + e = curl_easy_setopt(this->curl_, CURLOPT_HTTPHEADER, this->headers); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set headers.\n", + __func__ + ); + #endif + + ErrorCheck(e, "liboai::netimpl::Session::SetHeader()"); +} + +void liboai::netimpl::Session::SetOption(const components::Parameters& parameters) { + this->SetParameters(parameters); +} + +void liboai::netimpl::Session::SetParameters(const components::Parameters& parameters) { + if (!parameters.Empty()) { + this->parameter_string_ = parameters.BuildParameterString(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set parameters.\n", + __func__ + ); + #endif + } +} + +void liboai::netimpl::Session::SetOption(components::Parameters&& parameters) { + this->SetParameters(std::move(parameters)); +} + +void liboai::netimpl::Session::SetParameters(components::Parameters&& parameters) { + if (!parameters.Empty()) { + this->parameter_string_ = parameters.BuildParameterString(); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set parameters.\n", + __func__ + ); + #endif + } +} + +void liboai::netimpl::Session::SetOption(const components::Timeout& timeout) { + this->SetTimeout(timeout); +} + +void liboai::netimpl::Session::SetTimeout(const components::Timeout& timeout) { + CURLcode e = curl_easy_setopt(this->curl_, CURLOPT_TIMEOUT_MS, timeout.Milliseconds()); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set timeout to %ld milliseconds\n", + __func__, timeout.Milliseconds() + ); + #endif + + ErrorCheck(e, "liboai::netimpl::Session::SetTimeout()"); +} + +void liboai::netimpl::Session::SetOption(const components::Proxies& proxies) { + this->SetProxies(proxies); +} + +void liboai::netimpl::Session::SetProxies(const components::Proxies& proxies) { + this->proxies_ = proxies; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set proxies.\n", + __func__ + ); + #endif +} + +void liboai::netimpl::Session::SetOption(components::Proxies&& proxies) { + this->SetProxies(std::move(proxies)); +} + +void liboai::netimpl::Session::SetProxies(components::Proxies&& proxies) { + this->proxies_ = std::move(proxies); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set proxies.\n", + __func__ + ); + #endif +} + +void liboai::netimpl::Session::SetOption(const components::ProxyAuthentication& proxy_auth) { + this->SetProxyAuthentication(proxy_auth); +} + +void liboai::netimpl::Session::SetProxyAuthentication(const components::ProxyAuthentication& proxy_auth) { + this->proxyAuth_ = proxy_auth; + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set proxy authentication.\n", + __func__ + ); + #endif +} + +void liboai::netimpl::Session::SetOption(components::ProxyAuthentication&& proxy_auth) { + this->SetProxyAuthentication(std::move(proxy_auth)); +} + +void liboai::netimpl::Session::SetProxyAuthentication(components::ProxyAuthentication&& proxy_auth) { + this->proxyAuth_ = std::move(proxy_auth); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set proxy authentication.\n", + __func__ + ); + #endif +} + +void liboai::netimpl::Session::SetOption(const components::WriteCallback& write) { + this->SetWriteCallback(write); +} + +void liboai::netimpl::Session::SetWriteCallback(const components::WriteCallback& write) { + if (write.callback) { + CURLcode e[2]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_WRITEFUNCTION, components::writeUserFunction); + this->write_ = write; + e[1] = curl_easy_setopt(this->curl_, CURLOPT_WRITEDATA, &this->write_); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set user supplied write callback.\n", + __func__ + ); + #endif + + ErrorCheck(e, 2, "liboai::netimpl::Session::SetWriteCallback()"); + } +} + +void liboai::netimpl::Session::SetOption(components::WriteCallback&& write) { + this->SetWriteCallback(std::move(write)); +} + +void liboai::netimpl::Session::SetWriteCallback(components::WriteCallback&& write) { + if (write.callback) { + CURLcode e[2]; memset(e, CURLcode::CURLE_OK, sizeof(e)); + + e[0] = curl_easy_setopt(this->curl_, CURLOPT_WRITEFUNCTION, components::writeUserFunction); + this->write_ = std::move(write); + e[1] = curl_easy_setopt(this->curl_, CURLOPT_WRITEDATA, &this->write_); + + #if defined(LIBOAI_DEBUG) + _liboai_dbg( + "[dbg] [@%s] Set user supplied write callback.\n", + __func__ + ); + #endif + + ErrorCheck(e, 2, "liboai::netimpl::Session::SetWriteCallback()"); + } +} + +liboai::netimpl::components::Proxies::Proxies(const std::initializer_list>& hosts) + : hosts_{ hosts } {} + +liboai::netimpl::components::Proxies::Proxies(const std::map& hosts) + : hosts_{hosts} {} + +bool liboai::netimpl::components::Proxies::has(const std::string& protocol) const { + return hosts_.count(protocol) > 0; +} + +const std::string& liboai::netimpl::components::Proxies::operator[](const std::string& protocol) { + return hosts_[protocol]; +} + +liboai::netimpl::components::EncodedAuthentication::~EncodedAuthentication() noexcept { + this->SecureStringClear(this->username); + this->SecureStringClear(this->password); +} + +const std::string& liboai::netimpl::components::EncodedAuthentication::GetUsername() const { + return this->username; +} + +const std::string& liboai::netimpl::components::EncodedAuthentication::GetPassword() const { + return this->password; +} + +#if defined(__STDC_LIB_EXT1__) +void liboai::netimpl::components::EncodedAuthentication::SecureStringClear(std::string& s) { + if (s.empty()) { + return; + } + memset_s(&s.front(), s.length(), 0, s.length()); + s.clear(); +} +#elif defined(_WIN32) +void liboai::netimpl::components::EncodedAuthentication::SecureStringClear(std::string& s) { + if (s.empty()) { + return; + } + SecureZeroMemory(&s.front(), s.length()); + s.clear(); +} +#else +#if defined(__clang__) +#pragma clang optimize off // clang +#elif defined(__GNUC__) || defined(__MINGW32__) || defined(__MINGW32__) || defined(__MINGW64__) +#pragma GCC push_options // g++ +#pragma GCC optimize("O0") // g++ +#endif +void liboai::netimpl::components::EncodedAuthentication::SecureStringClear(std::string& s) { + if (s.empty()) { + return; + } + + char* ptr = &(s[0]); + memset(ptr, '\0', s.length()); + s.clear(); +} + +#if defined(__clang__) +#pragma clang optimize on // clang +#elif defined(__GNUC__) || defined(__MINGW32__) || defined(__MINGW32__) || defined(__MINGW64__) +#pragma GCC pop_options // g++ +#endif +#endif + +bool liboai::netimpl::components::ProxyAuthentication::has(const std::string& protocol) const { + return proxyAuth_.count(protocol) > 0; +} + +const char* liboai::netimpl::components::ProxyAuthentication::GetUsername(const std::string& protocol) { + return proxyAuth_[protocol].username.c_str(); +} + +const char* liboai::netimpl::components::ProxyAuthentication::GetPassword(const std::string& protocol) { + return proxyAuth_[protocol].password.c_str(); +} + +void liboai::netimpl::ErrorCheck(CURLcode* ecodes, size_t size, std::string_view where) { + if (ecodes) { + for (size_t i = 0; i < size; ++i) { + if (ecodes[i] != CURLE_OK) { + throw liboai::exception::OpenAIException( + curl_easy_strerror(ecodes[i]), + liboai::exception::EType::E_CURLERROR, + where + ); + } + } + } +} + +void liboai::netimpl::ErrorCheck(CURLcode ecode, std::string_view where) { + if (ecode != CURLE_OK) { + throw liboai::exception::OpenAIException( + curl_easy_strerror(ecode), + liboai::exception::EType::E_CURLERROR, + where + ); + } +} + +#if LIBCURL_VERSION_MAJOR < 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + void liboai::netimpl::ErrorCheck(CURLFORMcode* ecodes, size_t size, std::string_view where) { + if (ecodes) { + for (size_t i = 0; i < size; ++i) { + if (ecodes[i] != CURL_FORMADD_OK) { + throw liboai::exception::OpenAIException( + "curl_formadd() failed.", + liboai::exception::EType::E_CURLERROR, + where + ); + } + } + } + } + + void liboai::netimpl::ErrorCheck(CURLFORMcode ecode, std::string_view where) { + if (ecode != CURL_FORMADD_OK) { + throw liboai::exception::OpenAIException( + "curl_formadd() failed.", + liboai::exception::EType::E_CURLERROR, + where + ); + } + } +#endif diff --git a/packages/kbot/cpp/packages/liboai/liboai/core/response.cpp b/packages/kbot/cpp/packages/liboai/liboai/core/response.cpp new file mode 100644 index 00000000..8a0d19dd --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/core/response.cpp @@ -0,0 +1,113 @@ +#include "../include/core/response.h" + +liboai::Response::Response(const liboai::Response& other) noexcept + : status_code(other.status_code), elapsed(other.elapsed), status_line(other.status_line), + content(other.content), url(other.url), reason(other.reason), raw_json(other.raw_json) {} + +liboai::Response::Response(liboai::Response&& other) noexcept + : status_code(other.status_code), elapsed(other.elapsed), status_line(std::move(other.status_line)), + content(std::move(other.content)), url(std::move(other.url)), reason(std::move(other.reason)), raw_json(std::move(other.raw_json)) {} + +liboai::Response::Response(std::string&& url, std::string&& content, std::string&& status_line, std::string&& reason, long status_code, double elapsed) noexcept(false) + : status_code(status_code), elapsed(elapsed), status_line(std::move(status_line)), + content(std::move(content)), url(url), reason(std::move(reason)) +{ + try { + if (!this->content.empty()) { + if (this->content[0] == '{') { + this->raw_json = nlohmann::json::parse(this->content); + } + else { + this->raw_json = nlohmann::json(); + } + } + else { + this->raw_json = nlohmann::json(); + } + } + catch (nlohmann::json::parse_error& e) { + throw liboai::exception::OpenAIException( + e.what(), + liboai::exception::EType::E_FAILURETOPARSE, + "liboai::Response::Response(std::string&&, std::string&&, ...)" + ); + } + + // check the response for errors -- nothrow on success + this->CheckResponse(); +} + +liboai::Response& liboai::Response::operator=(const liboai::Response& other) noexcept { + this->status_code = other.status_code; + this->elapsed = other.elapsed; + this->status_line = other.status_line; + this->content = other.content; + this->url = other.url; + this->reason = other.reason; + this->raw_json = other.raw_json; + + return *this; +} + +liboai::Response& liboai::Response::operator=(liboai::Response&& other) noexcept { + this->status_code = other.status_code; + this->elapsed = other.elapsed; + this->status_line = std::move(other.status_line); + this->content = std::move(other.content); + this->url = std::move(other.url); + this->reason = std::move(other.reason); + this->raw_json = std::move(other.raw_json); + + return *this; +} + +namespace liboai { + +std::ostream& operator<<(std::ostream& os, const Response& r) { + !r.raw_json.empty() ? os << r.raw_json.dump(4) : os << "null"; + return os; +} + +} + +void liboai::Response::CheckResponse() const noexcept(false) { + if (this->status_code == 429) { + throw liboai::exception::OpenAIRateLimited( + !this->reason.empty() ? this->reason : "Rate limited", + liboai::exception::EType::E_RATELIMIT, + "liboai::Response::CheckResponse()" + ); + } + else if (this->status_code == 0) { + throw liboai::exception::OpenAIException( + "A connection error occurred", + liboai::exception::EType::E_CONNECTIONERROR, + "liboai::Response::CheckResponse()" + ); + } + else if (this->status_code < 200 || this->status_code >= 300) { + if (this->raw_json.contains("error")) { + try { + throw liboai::exception::OpenAIException( + this->raw_json["error"]["message"].get(), + liboai::exception::EType::E_APIERROR, + "liboai::Response::CheckResponse()" + ); + } + catch (nlohmann::json::parse_error& e) { + throw liboai::exception::OpenAIException( + e.what(), + liboai::exception::EType::E_FAILURETOPARSE, + "liboai::Response::CheckResponse()" + ); + } + } + else { + throw liboai::exception::OpenAIException( + !this->reason.empty() ? this->reason : "An unknown error occurred", + liboai::exception::EType::E_BADREQUEST, + "liboai::Response::CheckResponse()" + ); + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/audio.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/audio.h new file mode 100644 index 00000000..4861bafd --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/audio.h @@ -0,0 +1,199 @@ +#pragma once + +/* + audio.h : Audio component class for OpenAI. + This class contains all the methods for the Audio component + of the OpenAI API. This class provides access to 'Audio' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Audio final : private Network { + public: + Audio(const std::string &root): Network(root) {} + ~Audio() = default; + Audio(const Audio&) = delete; + Audio(Audio&&) = delete; + + Audio& operator=(const Audio&) = delete; + Audio& operator=(Audio&&) = delete; + + /* + @brief Transcribes audio into the input language. + + @param *file The audio file to transcribe. + @param *model The model to use for transcription. + Only 'whisper-1' is currently available. + @param prompt An optional text to guide the model's style + or continue a previous audio segment. The + prompt should match the audio language. + @param response_format The format of the transcript output. + @param temperature The sampling temperature, between 0 and 1. + Higher values like 0.8 will make the output + more random, while lower values like 0.2 + will make it more focused and deterministic. + If set to 0, the model will use log probability + to automatically increase the temperature until + certain thresholds are hit. + @param language The language of the audio file. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response transcribe( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt, + std::optional language = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously transcribes audio into the input language. + + @param *file The audio file to transcribe. + @param *model The model to use for transcription. + Only 'whisper-1' is currently available. + @param prompt An optional text to guide the model's style + or continue a previous audio segment. The + prompt should match the audio language. + @param response_format The format of the transcript output. + @param temperature The sampling temperature, between 0 and 1. + Higher values like 0.8 will make the output + more random, while lower values like 0.2 + will make it more focused and deterministic. + If set to 0, the model will use log probability + to automatically increase the temperature until + certain thresholds are hit. + @param language The language of the audio file. + + @returns A liboai::Response future containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse transcribe_async( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt, + std::optional language = std::nullopt + ) const& noexcept(false); + + /* + @brief Translates audio into English. + + @param *file The audio file to translate. + @param *model The model to use for translation. + Only 'whisper-1' is currently available. + @param prompt An optional text to guide the model's style + or continue a previous audio segment. + @param response_format The format of the transcript output. + @param temperature The sampling temperature, between 0 and 1. + Higher values like 0.8 will make the output + more random, while lower values like 0.2 + will make it more focused and deterministic. + If set to 0, the model will use log probability + to automatically increase the temperature until + certain thresholds are hit. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response translate( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously translates audio into English. + + @param *file The audio file to translate. + @param *model The model to use for translation. + Only 'whisper-1' is currently available. + @param prompt An optional text to guide the model's style + or continue a previous audio segment. + @param response_format The format of the transcript output. + @param temperature The sampling temperature, between 0 and 1. + Higher values like 0.8 will make the output + more random, while lower values like 0.2 + will make it more focused and deterministic. + If set to 0, the model will use log probability + to automatically increase the temperature until + certain thresholds are hit. + + @returns A liboai::Response future containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse translate_async( + const std::filesystem::path& file, + const std::string& model, + std::optional prompt = std::nullopt, + std::optional response_format = std::nullopt, + std::optional temperature = std::nullopt + ) const& noexcept(false); + + /* + @brief Turn text into lifelike spoken audio. + + @param *model The model to use for translation. + Only 'tts-1' and 'tts-1-hd' are currently available. + @param *voice The voice to use when generating the audio. + Supported voices are alloy, echo, fable, onyx, nova, and shimmer. + @param *input The text to generate audio for. + The maximum length is 4096 characters. + @param response_format The format to audio in. + Supported formats are mp3, opus, aac, flac, wav, and pcm. + @param speed The speed of the generated audio. + Select a value from 0.25 to 4.0. 1.0 is the default. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response speech( + const std::string& model, + const std::string& voice, + const std::string& input, + std::optional response_format = std::nullopt, + std::optional speed = std::nullopt + ) const& noexcept(false); + + /* + @brief Asynchronously turn text into lifelike spoken audio. + + @param *model The model to use for translation. + Only 'tts-1' and 'tts-1-hd' are currently available. + @param *voice The voice to use when generating the audio. + Supported voices are alloy, echo, fable, onyx, nova, and shimmer. + @param *input The text to generate audio for. + The maximum length is 4096 characters. + @param response_format The format to audio in. + Supported formats are mp3, opus, aac, flac, wav, and pcm. + @param speed The speed of the generated audio. + Select a value from 0.25 to 4.0. 1.0 is the default. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse speech_async( + const std::string& model, + const std::string& voice, + const std::string& input, + std::optional response_format = std::nullopt, + std::optional speed = std::nullopt + ) const& noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/azure.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/azure.h new file mode 100644 index 00000000..8e064020 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/azure.h @@ -0,0 +1,304 @@ +#pragma once + +/* + azure.h : Azure component class for OpenAI. + Azure provides their own API for access to the OpenAI API. + This class provides methods that, provided that the proper + Azure authentication information has been set, allows users + to access the OpenAI API through Azure. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" +#include "chat.h" + +namespace liboai { + class Azure final : private Network { + public: + Azure(const std::string &root): Network(root) {} + NON_COPYABLE(Azure) + NON_MOVABLE(Azure) + ~Azure() = default; + + using ChatStreamCallback = std::function; + using StreamCallback = std::function; + + /* + @brief Given a prompt, the model will return one or more + predicted completions, and can also return the + probabilities of alternative tokens at each position. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *deployment_id The deployment name you chose when you deployed the model. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param Refer to liboai::Completions::create for more information on the remaining parameters. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create_completion( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Given a prompt, the model will asynchronously return + one or more predicted completions, and can also return the + probabilities of alternative tokens at each position. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *deployment_id The deployment name you chose when you deployed the model. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param Refer to liboai::Completions::create for more information on the remaining parameters. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_completion_async( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Creates an embedding vector representing the input text. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *deployment_id The deployment name you chose when you deployed the model. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *input Input text to get embeddings for, encoded as a string. The number of input tokens + varies depending on what model you are using. + @param Refer to liboai::Embeddings::create for more information on the remaining parameters. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create_embedding( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + const std::string& input, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates an embedding vector representing the input text. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *deployment_id The deployment name you chose when you deployed the model. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *input Input text to get embeddings for, encoded as a string. The number of input tokens + varies depending on what model you are using. + @param Refer to liboai::Embeddings::create for more information on the remaining parameters. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_embedding_async( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + const std::string& input, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Creates a completion for the chat message. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *deployment_id The deployment name you chose when you deployed the model. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *conversation A Conversation object containing the conversation data. + @param Refer to liboai::Chat::create for more information on the remaining parameters. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create_chat_completion( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + Conversation& conversation, + std::optional function_call = std::nullopt, + std::optional temperature = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a completion for the chat message. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *deployment_id The deployment name you chose when you deployed the model. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *conversation A Conversation object containing the conversation data. + @param Refer to liboai::Chat::create for more information on the remaining parameters. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_chat_completion_async( + const std::string& resource_name, + const std::string& deployment_id, + const std::string& api_version, + Conversation& conversation, + std::optional function_call = std::nullopt, + std::optional temperature = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Generate a batch of images from a text caption. + Image generation is currently only available with api-version=2023-06-01-preview. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *prompt The text to create an image from. + @param n The number of images to create. + @param size The size of the image to create. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response request_image_generation( + const std::string& resource_name, + const std::string& api_version, + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously generate a batch of images from a text caption. + Image generation is currently only available with api-version=2023-06-01-preview. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *prompt The text to create an image from. + @param n The number of images to create. + @param size The size of the image to create. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse request_image_generation_async( + const std::string& resource_name, + const std::string& api_version, + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt + ) const & noexcept(false); + + /* + @brief Retrieve the results (URL) of a previously called image generation operation. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *operation_id The GUID that identifies the original image generation request. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response get_generated_image( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id + ) const & noexcept(false); + + /* + @brief Asynchronously retrieve the results (URL) of a previously called image generation operation. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *operation_id The GUID that identifies the original image generation request. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse get_generated_image_async( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id + ) const & noexcept(false); + + /* + @brief Deletes the corresponding image from the Azure server. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *operation_id The GUID that identifies the original image generation request. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response delete_generated_image( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id + ) const & noexcept(false); + + /* + @brief Asynchronously deletes the corresponding image from the Azure server. + + @param *resource_name The name of your Azure OpenAI Resource. + @param *api_version The API version to use for this operation. This follows the YYYY-MM-DD format. + @param *operation_id The GUID that identifies the original image generation request. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse delete_generated_image_async( + const std::string& resource_name, + const std::string& api_version, + const std::string& operation_id + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + using StrippedStreamCallback = std::function; + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/chat.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/chat.h new file mode 100644 index 00000000..e3283d71 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/chat.h @@ -0,0 +1,974 @@ +#pragma once + +/* + chat.h : Chat component header file + This class contains all the methods for the Chat component + of the OpenAI API. This class provides access to 'Chat' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +#include + +namespace liboai { + /* + @brief Class containing methods for building Function objects to supply + to the OpenAI ChatCompletions component class via the associated + Conversation class. + */ + class Functions final { + public: + Functions(); + Functions(const Functions& other); + Functions(Functions&& old) noexcept; + template ...>, int> = 0> + Functions(_Fname... function_names) { auto result = this->AddFunctions(function_names...); } + ~Functions() = default; + + Functions& operator=(const Functions& other); + Functions& operator=(Functions&& old) noexcept; + + /* + @brief Denotes a parameter of a function, which includes + the parameter's name, type, description, and an optional + enumeration. + + @param name The name of the parameter. + @param type The type of the parameter. + @param description The description of the parameter. + @param enumeration An optional enumeration of possible + values for the parameter. + */ + struct FunctionParameter { + FunctionParameter() = default; + FunctionParameter( + std::string_view name, + std::string_view type, + std::string_view description, + std::optional> enumeration = std::nullopt + ) : name(name), type(type), description(description), enumeration(enumeration) {} + FunctionParameter(const FunctionParameter& other) = default; + FunctionParameter(FunctionParameter&& old) noexcept = default; + ~FunctionParameter() = default; + + FunctionParameter& operator=(const FunctionParameter& other) = default; + FunctionParameter& operator=(FunctionParameter&& old) noexcept = default; + + std::string name; + std::string type; + std::string description; + std::optional> enumeration; + }; + + /* + @brief Adds a function named 'function_name' to the list of + functions. This function, once added, can then be + referenced in subsequent 'Functions' class method calls + by the name provided here. + + @param *function_name The name of the function to add. + + @returns True/False denoting whether the function was added + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AddFunction(std::string_view function_name) & noexcept(false); + + /* + @brief Same as AddFunction, but allows for adding multiple + functions at once. + + @param *function_names The name of the function to add. + + @returns True/False denoting whether the functions were added + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AddFunctions(std::initializer_list function_names) & noexcept(false); + + /* + @brief Same as AddFunction, but allows for adding multiple + functions at once. + + @param *function_names The name of the function to add. + + @returns True/False denoting whether the functions were added + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AddFunctions(std::vector function_names) & noexcept(false); + + /* + @brief Same as AddFunction, but allows for adding multiple + functions at once. + + @param *function_names The name of the function to add. + + @returns True/False denoting whether the functions were added + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool AddFunctions(_Fnames... function_names) & noexcept(false) { + return this->AddFunctions({ function_names... }); + } + + /* + @brief Pops the specified function from the list of functions. + This will also remove any associated name, description, + parameters, and so on as it involves removing the entire + 'function_name' key from the JSON object. + + @param *function_name The name of the function to pop. + + @returns True/False denoting whether the function was popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopFunction(std::string_view function_name) & noexcept(false); + + /* + @brief Same as PopFunction, but allows for popping multiple + functions at once. + + @param *function_names The name of the function to pop. + + @returns True/False denoting whether the functions were popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopFunctions(std::initializer_list function_names) & noexcept(false); + + /* + @brief Same as PopFunction, but allows for popping multiple + functions at once. + + @param *function_names The name of the function to pop. + + @returns True/False denoting whether the functions were popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopFunctions(std::vector function_names) & noexcept(false); + + /* + @brief Same as PopFunction, but allows for popping multiple + functions at once. + + @param *function_names The name of the function to pop. + + @returns True/False denoting whether the functions were popped + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool PopFunctions(_Fnames... function_names) & noexcept(false) { + return this->PopFunctions({ function_names... }); + } + + /* + @brief Sets a previously added function's description. + + @param *target The name of the function to set the description of. + @param *description The description to set for the function. + + @returns True/False denoting whether the description was set + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetDescription(std::string_view target, std::string_view description) & noexcept(false); + + /* + @brief Pops a previously added function's description. + + @param *target The name of the function to pop the description of. + + @returns True/False denoting whether the description was popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopDescription(std::string_view target) & noexcept(false); + + /* + @brief Sets which set function parameters are required. + + @param *target The name of the function to set the required parameters of. + @param *params A series of parameter names to set as required. + + @returns True/False denoting whether the required parameters were set + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetRequired(std::string_view target, std::initializer_list params) & noexcept(false); + + /* + @brief Sets which set function parameters are required. + + @param *target The name of the function to set the required parameters of. + @param *params A series of parameter names to set as required. + + @returns True/False denoting whether the required parameters were set + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetRequired(std::string_view target, std::vector params) & noexcept(false); + + /* + @brief Sets which set function parameters are required. + + @param *target The name of the function to set the required parameters of. + @param *params A series of parameter names to set as required. + + @returns True/False denoting whether the required parameters were set + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool SetRequired(std::string_view target, _Rp... params) & noexcept(false) { + return SetRequired(target, { params... }); + } + + /* + @brief Pops previously set required function parameters. + + @param *target The name of the function to pop the required parameters of. + + @returns True/False denoting whether the required parameters were popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopRequired(std::string_view target) & noexcept(false); + + /* + @brief Appends a parameter to a previously set series of required function + parameters. This function should only be called if required parameters + have already been set for 'target' via SetRequired(). + + @param *target The name of the function to append the required parameter to. + @param *param The name of the parameter to append to the required parameters. + + @returns True/False denoting whether the required parameter was appended + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendRequired(std::string_view target, std::string_view param) & noexcept(false); + + /* + @brief Appends multiple parameters to a previously set series of required function + parameters. This function should only be called if required parameters have + already been set for 'target' via SetRequired(). + + @param *target The name of the function to append the required parameter to. + @param *params The name of the parameters to append to the required parameters. + + @returns True/False denoting whether the required parameter was appended + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendRequired(std::string_view target, std::initializer_list params) & noexcept(false); + + /* + @brief Appends multiple parameters to a previously set series of required function + parameters. This function should only be called if required parameters have + already been set for 'target' via SetRequired(). + + @param *target The name of the function to append the required parameter to. + @param *params The name of the parameters to append to the required parameters. + + @returns True/False denoting whether the required parameter was appended + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendRequired(std::string_view target, std::vector params) & noexcept(false); + + /* + @brief Appends multiple parameters to a previously set series of required function + parameters. This function should only be called if required parameters have + already been set for 'target' via SetRequired(). + + @param *target The name of the function to append the required parameter to. + @param *params The name of the parameters to append to the required parameters. + + @returns True/False denoting whether the required parameter was appended + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool AppendRequired(std::string_view target, _Rp... params) & noexcept(false) { + return AppendRequired(target, { params... }); + } + + /* + @brief Adds a single parameter to an added function. + + @param *target The name of the function to add the parameter to. + @param *parameter The parameter to add to the function. + + @returns True/False denoting whether the parameter was added + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetParameter(std::string_view target, FunctionParameter parameter) & noexcept(false); + + /* + @brief Adds a series of parameters to an added function. + + @param *target The name of the function to add the parameters to. + @param *parameters The parameters to add to the function. + + @returns True/False denoting whether the parameters were added + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetParameters(std::string_view target, std::initializer_list parameters) & noexcept(false); + + /* + @brief Adds a series of parameters to an added function. + + @param *target The name of the function to add the parameters to. + @param *parameters The parameters to add to the function. + + @returns True/False denoting whether the parameters were added + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetParameters(std::string_view target, std::vector parameters) & noexcept(false); + + /* + @brief Adds a series of parameters to an added function. + + @param *target The name of the function to add the parameters to. + @param *parameters The parameters to add to the function. + + @returns True/False denoting whether the parameters were added + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool SetParameters(std::string_view target, _Fp... parameters) & noexcept(false) { + return SetParameters(target, { parameters... }); + } + + /* + @brief Pops all of a function's set parameters. + This function removes set 'required' values and anything + else that falls under the category of 'parameters' as a + result of removing the entire 'parameters' section. + + @param *target The name of the function to pop the parameters of. + + @returns True/False denoting whether the parameters were popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopParameters(std::string_view target) & noexcept(false); + + /* + @brief Pops one or more of a function's set parameters. + + @param *target The name of the function to pop the parameters of. + @param *params The names of the parameters to pop. + + @returns True/False denoting whether the parameters were popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopParameters(std::string_view target, std::initializer_list param_names) & noexcept(false); + + /* + @brief Pops one or more of a function's set parameters. + + @param *target The name of the function to pop the parameters of. + @param *params The names of the parameters to pop. + + @returns True/False denoting whether the parameters were popped + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopParameters(std::string_view target, std::vector param_names) & noexcept(false); + + /* + @brief Pops one or more of a function's set parameters. + + @param *target The name of the function to pop the parameters of. + @param *params The names of the parameters to pop. + + @returns True/False denoting whether the parameters were popped + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool PopParameters(std::string_view target, _Pname... param_names) & noexcept(false) { + return PopParameters(target, { param_names... }); + } + + /* + @brief Appends a single parameter to a previously added function. + + @param *target The name of the function to append the parameter to. + @param *parameter The parameter to append to the function. + + @returns True/False denoting whether the parameter was appended + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendParameter(std::string_view target, FunctionParameter parameter) & noexcept(false); + + /* + @brief Appends a series of parameters to a previously added function. + + @param *target The name of the function to append the parameters to. + @param *parameters The parameters to append to the function. + + @returns True/False denoting whether the parameters were appended + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendParameters(std::string_view target, std::initializer_list parameters) & noexcept(false); + + /* + @brief Appends a series of parameters to a previously added function. + + @param *target The name of the function to append the parameters to. + @param *parameters The parameters to append to the function. + + @returns True/False denoting whether the parameters were appended + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendParameters(std::string_view target, std::vector parameters) & noexcept(false); + + /* + @brief Appends a series of parameters to a previously added function. + + @param *target The name of the function to append the parameters to. + @param *parameters The parameters to append to the function. + + @returns True/False denoting whether the parameters were appended + successfully. + */ + template ...>, int> = 0> + [[nodiscard]] bool AppendParameters(std::string_view target, _Fp... parameters) & noexcept(false) { + return AppendParameters(target, { parameters... }); + } + + /* + @brief Returns the JSON object of the internal conversation. + */ + LIBOAI_EXPORT const nlohmann::json& GetJSON() const & noexcept; + + private: + using index = std::size_t; + [[nodiscard]] index GetFunctionIndex(std::string_view function_name) const & noexcept(false); + + nlohmann::json _functions; + }; + + /* + @brief Class containing, and used for keeping track of, the chat history. + An object of this class should be created, set with system and user data, + and provided to ChatCompletion::create (system is optional). + + The general usage of this class is as follows: + 1. Create a ChatCompletion::Conversation object. + 2. Set the user data, which is the user's input - such as + a question or a command as well as optionally set the + system data to guide how the assistant responds. + 3. Provide the ChatCompletion::Conversation object to + ChatCompletion::create. + 4. Update the ChatCompletion::Conversation object with + the response from the API - either the object or the + response content can be used to update the object. + 5. Retrieve the assistant's response from the + ChatCompletion::Conversation object. + 6. Repeat steps 2, 3, 4 and 5 until the conversation is + complete. + + After providing the object to ChatCompletion::create, the object will + be updated with the 'assistant' response - this response is the + assistant's response to the user's input. A developer could then + retrieve this response and display it to the user, and then set the + next user input in the object and pass it back to ChatCompletion::create, + if desired. + */ + class Conversation final { + public: + Conversation(); + Conversation(const Conversation& other); + Conversation(Conversation&& old) noexcept; + Conversation(std::string_view system_data); + Conversation(std::string_view system_data, std::string_view user_data); + Conversation(std::string_view system_data, std::initializer_list user_data); + Conversation(std::initializer_list user_data); + explicit Conversation(const std::vector& user_data); + ~Conversation() = default; + + Conversation& operator=(const Conversation& other); + Conversation& operator=(Conversation&& old) noexcept; + + friend std::ostream& operator<<(std::ostream& os, const Conversation& conv); + + + /* + @brief Changes the content of the first system message + in the conversation. This method updates the content + of the first system message in the conversation, if + it exists and is of type "system". If the first message + is not a system message or the conversation is empty, + the method will return false. + + @param new_data A string_view containing the new content + for the system message. Must be non-empty. + + @returns True/False denoting whether the first system + message was changed successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool ChangeFirstSystemMessage(std::string_view new_data) & noexcept(false); + + /* + @brief Sets the system data for the conversation. + This method sets the system data for the conversation. + The system data is the data that helps set the behavior + of the assistant so it knows how to respond. + + @param *data The system data to set. + + @returns True/False denoting whether the system data was set + successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetSystemData(std::string_view data) & noexcept(false); + + /* + @brief Removes the set system data from the top of the conversation. + The system data must be the first data set, if used, + in order to be removed. If the system data is not + the first data set, this method will return false. + + @returns True/False denoting whether the system data was + removed successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopSystemData() & noexcept(false); + + /* + @brief Adds user input to the conversation. + This method adds user input to the conversation. + The user input is the user's input - such as a question + or a command. + + If using a system prompt, the user input should be + provided after the system prompt is set - i.e. after + SetSystemData() is called. + + @param *data The user input to add. + + @returns True/False denoting whether the user input was + added successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AddUserData(std::string_view data) & noexcept(false); + + /* + @brief Adds user input to the conversation. + This method adds user input to the conversation. + The user input is the user's input - such as a question + or a command. + + If using a system prompt, the user input should be + provided after the system prompt is set - i.e. after + SetSystemData() is called. + + @param *data The user input to add. + @param *name The name of the author of this message. + name is required if role is function, and + it should be the name of the function whose + response is in the content. + + @returns True/False denoting whether the user input was + added successfully. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AddUserData( + std::string_view data, + std::string_view name + ) & noexcept(false); + + /* + @brief Removes the last added user data. + + @returns True/False denoting whether the user data was removed. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopUserData() & noexcept(false); + + /* + @brief Gets the last response from the assistant. + This method gets the last response from the assistant. + The response is the assistant's response to the user's + input. + */ + LIBOAI_EXPORT std::string GetLastResponse() const & noexcept; + + /* + @brief Returns whether the most recent response, following + a call to Update, contains a function_call or not. + + It is important to note that, when making use of functions, + a developer must call this method to determine whether + the response contains a function call or if it contains a + regular response. If the response contains a function call, + + + @returns True/False denoting whether the most recent response + contains a function_call or not. + */ + [[nodiscard]] + LIBOAI_EXPORT bool LastResponseIsFunctionCall() const & noexcept; + + /* + @brief Returns the name of the function_call in the most recent + response. This should only be called if LastResponseIsFunctionCall() + returns true. + */ + [[nodiscard]] + LIBOAI_EXPORT std::string GetLastFunctionCallName() const & noexcept(false); + + /* + @brief Returns the arguments of the function_call in the most + recent response in their raw JSON form. This should only + be called if LastResponseIsFunctionCall() returns true. + */ + [[nodiscard]] + LIBOAI_EXPORT std::string GetLastFunctionCallArguments() const & noexcept(false); + + /* + @brief Removes the last assistant response. + + @returns True/False denoting whether the last response was removed. + */ + [[nodiscard]] + LIBOAI_EXPORT bool PopLastResponse() & noexcept(false); + + /* + @brief Updates the conversation given JSON data. + This method updates the conversation given JSON data. + The JSON data should be the JSON 'messages' data returned + from the OpenAI API. + + This method should only be used if AppendStreamData was NOT + used immediately before it. + + For instance, if we made a call to create*(), and provided a + callback function to stream and, within this callback, we used + AppendStreamData to update the conversation per message, we + would NOT want to use this method. In this scenario, the + AppendStreamData method would have already updated the + conversation, so this method would be a bad idea to call + afterwards. + + @param *history The JSON data to update the conversation with. + This should be the 'messages' array of data returned + from a call to ChatCompletion::create. + + @returns True/False denoting whether the conversation was updated. + */ + [[nodiscard]] + LIBOAI_EXPORT bool Update(std::string_view history) & noexcept(false); + + /* + @brief Updates the conversation given a Response object. + This method updates the conversation given a Response object. + This method should only be used if AppendStreamData was NOT + used immediately before it. + + For instance, if we made a call to create*(), and provided a + callback function to stream and, within this callback, we used + AppendStreamData to update the conversation per message, we + would NOT want to use this method. In this scenario, the + AppendStreamData method would have already updated the + conversation, so this method would be a bad idea to call + afterwards. + + @param *response The Response to update the conversation with. + This should be the Response returned from a call + to ChatCompletion::create. + + @returns True/False denoting whether the update was successful. + */ + [[nodiscard]] + LIBOAI_EXPORT bool Update(const Response& response) & noexcept(false); + + /* + @brief Exports the entire conversation to a JSON string. + This method exports the conversation to a JSON string. + The JSON string can be used to save the conversation + to a file. The exported string contains both the + conversation and included functions, if any. + + @returns The JSON string representing the conversation. + */ + [[nodiscard]] + LIBOAI_EXPORT std::string Export() const & noexcept(false); + + /* + @brief Imports a conversation from a JSON string. + This method imports a conversation from a JSON string. + The JSON string should be the JSON string returned + from a call to Export(). + + @param *json The JSON string to import the conversation from. + + @returns True/False denoting whether the conversation was imported. + */ + [[nodiscard]] + LIBOAI_EXPORT bool Import(std::string_view json) & noexcept(false); + + /* + @brief Appends stream data (SSEs) from streamed methods. + This method updates the conversation given a token from a + streamed method. This method should be used when using + streamed methods such as ChatCompletion::create or + create_async with a callback supplied. This function should + be called from within the stream's callback function + receiving the SSEs. + + @param *token Streamed token (data) to update the conversation with. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendStreamData(std::string data) & noexcept(false); + + + /* + @brief Appends stream data (SSEs) from streamed methods. + This method updates the conversation given a token from a + streamed method. This method should be used when using + streamed methods such as ChatCompletion::create or + create_async with a callback supplied. This function should + be called from within the stream's callback function + receiving the SSEs. + + @param *token Streamed token (data) to update the conversation with. + @param *delta output parameter. The delta to append to the conversation. + @param *completed output parameter. Whether the stream is completed. + */ + [[nodiscard]] + LIBOAI_EXPORT bool AppendStreamData(std::string data, std::string& delta, bool& completed) & noexcept(false); + + /* + @brief Sets the functions to be used for the conversation. + This method sets the functions to be used for the conversation. + + @param *functions The functions to set. + + @returns True/False denoting whether the functions were set. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetFunctions(Functions functions) & noexcept(false); + + /* + @brief Pops any previously set functions. + + @param *functions The functions to set. + */ + LIBOAI_EXPORT void PopFunctions() & noexcept(false); + + /* + @brief Returns the raw JSON dump of the internal conversation object + in string format. + */ + LIBOAI_EXPORT std::string GetRawConversation() const & noexcept; + + /* + @brief Returns the JSON object of the internal conversation. + */ + LIBOAI_EXPORT const nlohmann::json& GetJSON() const & noexcept; + + /* + @brief Returns the raw JSON dump of the internal functions object + in string format - if one exists. + */ + LIBOAI_EXPORT std::string GetRawFunctions() const & noexcept; + + /* + @brief Returns the JSON object of the set functions. + */ + LIBOAI_EXPORT const nlohmann::json& GetFunctionsJSON() const & noexcept; + + /* + @brief Returns whether the conversation has functions or not. this function call from ChatComplete + */ + [[nodiscard]] constexpr bool HasFunctions() const & noexcept { return this->_functions ? true : false; } + + /** + * @brief Sets the maximum history size for the conversation. + * + * @param size The maximum number of messages allowed in the conversation history. + * Older messages will be removed when the limit is exceeded. + */ + void SetMaxHistorySize(size_t size) noexcept { _max_history_size = size; } + + private: + friend class ChatCompletion; friend class Azure; + [[nodiscard]] std::vector SplitStreamedData(std::string data) const noexcept(false); + void RemoveStrings(std::string& s, std::string_view p) const noexcept(false); + void EraseExtra(); + /* + @brief split full stream data that read from remote server. + @returns vector of string that contains the split data that will contains the last termination string(data: "DONE"). + */ + [[nodiscard]] std::vector SplitFullStreamedData(std::string data) const noexcept(false); + bool ParseStreamData(std::string data, std::string& delta, bool& completed); + + nlohmann::json _conversation; + std::optional _functions = std::nullopt; + bool _last_resp_is_fc = false; + std::string _last_incomplete_buffer; + size_t _max_history_size = (std::numeric_limits::max)(); + }; + + class ChatCompletion final : private Network { + public: + ChatCompletion(const std::string &root): Network(root) {} + NON_COPYABLE(ChatCompletion) + NON_MOVABLE(ChatCompletion) + ~ChatCompletion() = default; + + using ChatStreamCallback = std::function; + + /* + @brief Creates a completion for the chat message. + + @param *model ID of the model to use. Currently, + only gpt-3.5-turbo and gpt-3.5-turbo-0301 + are supported. + @param *conversation A Conversation object containing the + conversation data. + @param function_call Controls how the model responds to function calls. "none" + means the model does not call a function, and responds to + the end-user. "auto" means the model can pick between an + end-user or calling a function. + @param temperature What sampling temperature to use, + between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values + like 0.2 will make it more focused and deterministic. + @param top_p An alternative to sampling with temperature, called + nucleus sampling, where the model considers the results + of the tokens with top_p probability mass. So 0.1 means + only the tokens comprising the top 10% probability mass + are considered. + @param n How many chat completion choices to generate for each + input message. + @param stream If set, partial message deltas will be sent, like in + ChatGPT. Tokens will be sent as data-only server-sent + vents as they become available, with the stream terminated + by a data: [DONE] message. + @param stop to 4 sequences where the API will stop generating further + tokens. + @param max_tokens The maximum number of tokens allowed for the generated answer. + By default, the number of tokens the model can return will be + (4096 - prompt tokens). + @param presence_penalty Number between -2.0 and 2.0. Positive values penalize new tokens + based on whether they appear in the text so far, increasing the + model's likelihood to talk about new topics. + @param frequency_penalty Number between -2.0 and 2.0. Positive values penalize new tokens + based on their existing frequency in the text so far, decreasing + the model's likelihood to repeat the same line verbatim. + @param logit_bias Modify the likelihood of specified tokens appearing in the completion. + @param user The user ID to associate with the request. This is used to + prevent abuse of the API. + + @returns A liboai::Response object containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& model, + Conversation& conversation, + std::optional function_call = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a completion for the chat message. + + @param *model ID of the model to use. Currently, + only gpt-3.5-turbo and gpt-3.5-turbo-0301 + are supported. + @param *conversation A Conversation object containing the + conversation data. + @param function_call Controls how the model responds to function calls. "none" + means the model does not call a function, and responds to + the end-user. "auto" means the model can pick between an + end-user or calling a function. + @param temperature What sampling temperature to use, + between 0 and 2. Higher values like 0.8 will + make the output more random, while lower values + like 0.2 will make it more focused and deterministic. + @param top_p An alternative to sampling with temperature, called + nucleus sampling, where the model considers the results + of the tokens with top_p probability mass. So 0.1 means + only the tokens comprising the top 10% probability mass + are considered. + @param n How many chat completion choices to generate for each + input message. + @param stream If set, partial message deltas will be sent, like in + ChatGPT. Tokens will be sent as data-only server-sent + vents as they become available, with the stream terminated + by a data: [DONE] message. + @param stop to 4 sequences where the API will stop generating further + tokens. + @param max_tokens The maximum number of tokens allowed for the generated answer. + By default, the number of tokens the model can return will be + (4096 - prompt tokens). + @param presence_penalty Number between -2.0 and 2.0. Positive values penalize new tokens + based on whether they appear in the text so far, increasing the + model's likelihood to talk about new topics. + @param frequency_penalty Number between -2.0 and 2.0. Positive values penalize new tokens + based on their existing frequency in the text so far, decreasing + the model's likelihood to repeat the same line verbatim. + @param logit_bias Modify the likelihood of specified tokens appearing in the completion. + @param user The user ID to associate with the request. This is used to + prevent abuse of the API. + + @returns A liboai::Response future containing the + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& model, + Conversation& conversation, + std::optional function_call = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional> stop = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + using StrippedStreamCallback = std::function; + }; +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/completions.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/completions.h new file mode 100644 index 00000000..2804af11 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/completions.h @@ -0,0 +1,171 @@ +#pragma once + +/* + completions.h : Completions component class for OpenAI. + This class contains all the methods for the Completions component + of the OpenAI API. This class provides access to 'Completions' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Completions final : private Network { + public: + Completions(const std::string &root): Network(root) {} + NON_COPYABLE(Completions) + NON_MOVABLE(Completions) + ~Completions() = default; + + using StreamCallback = std::function; + + /* + @brief Given a prompt, the model will return one or more + predicted completions, and can also return the + probabilities of alternative tokens at each position. + + @param *model The model to use for completion. + @param prompt The prompt(s) to generate completions for. + @param suffix The suffix that comes after a completion of inserted text. + @param max_tokens The maximum number of tokens to generate in a completion. + @param temperature The temperature for the model. Higher values will result in more + creative completions, while lower values will result in more + repetitive completions. + @param top_p The top_p for the model. This is the probability mass that the + model will consider when making predictions. Lower values will + result in more creative completions, while higher values will + result in more repetitive completions. + @param n The number of completions to generate. + @param stream Stream partial progress back to the client. A callback function + that is called each time new data is received from the API. If + no callback is supplied, this parameter is disabled and the + API will wait until the completion is finished before returning + the response. + + @param logprobs The number of log probabilities to return for each token. + @param echo Whether to include the prompt in the returned completion. + @param stop A list of tokens that the model will stop generating completions + at. This can be a single token or a list of tokens. + @param presence_penalty The presence penalty for the model. This is a number between + -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's + likelihood to talk about new topics. + @param frequency_penalty The frequency penalty for the model. This is a number between + -2.0 and 2.0. Positive values penalize new tokens based on + their existing frequency in the text so far, decreasing the + model's likelihood to repeat the same line verbatim. + @param best_of Generates best_of completions server-side and returns the "best" + one. When used with n, best_of controls the number of candidate + completions and n specifies how many to return � best_of must be + greater than n + + Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have + reasonable settings for max_tokens and stop. + @param logit_bias Modify the likelihood of specified tokens appearing in the completion. + Accepts a json object that maps tokens (specified by their token ID + in the GPT tokenizer) to an associated bias value from -100 to 100. + @param user A unique identifier representing your end-user. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& model_id, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Given a prompt, the model will return one or more + predicted completions asynchronously, and can also + return the probabilities of alternative tokens at each + position. + + @param *model The model to use for completion. + @param prompt The prompt(s) to generate completions for. + @param suffix The suffix that comes after a completion of inserted text. + @param max_tokens The maximum number of tokens to generate in a completion. + @param temperature The temperature for the model. Higher values will result in more + creative completions, while lower values will result in more + repetitive completions. + @param top_p The top_p for the model. This is the probability mass that the + model will consider when making predictions. Lower values will + result in more creative completions, while higher values will + result in more repetitive completions. + @param n The number of completions to generate. + @param stream Stream partial progress back to the client. A callback function + that is called each time new data is received from the API. If + no callback is supplied, this parameter is disabled and the + API will wait until the completion is finished before returning + the response. + + @param logprobs The number of log probabilities to return for each token. + @param echo Whether to include the prompt in the returned completion. + @param stop A list of tokens that the model will stop generating completions + at. This can be a single token or a list of tokens. + @param presence_penalty The presence penalty for the model. This is a number between + -2.0 and 2.0. Positive values penalize new tokens based on + whether they appear in the text so far, increasing the model's + likelihood to talk about new topics. + @param frequency_penalty The frequency penalty for the model. This is a number between + -2.0 and 2.0. Positive values penalize new tokens based on + their existing frequency in the text so far, decreasing the + model's likelihood to repeat the same line verbatim. + @param best_of Generates best_of completions server-side and returns the "best" + one. When used with n, best_of controls the number of candidate + completions and n specifies how many to return � best_of must be + greater than n + + Because this parameter generates many completions, it can quickly + consume your token quota. Use carefully and ensure that you have + reasonable settings for max_tokens and stop. + @param logit_bias Modify the likelihood of specified tokens appearing in the completion. + Accepts a json object that maps tokens (specified by their token ID + in the GPT tokenizer) to an associated bias value from -100 to 100. + @param user A unique identifier representing your end-user. + + @returns A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& model_id, + std::optional prompt = std::nullopt, + std::optional suffix = std::nullopt, + std::optional max_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional n = std::nullopt, + std::optional stream = std::nullopt, + std::optional logprobs = std::nullopt, + std::optional echo = std::nullopt, + std::optional> stop = std::nullopt, + std::optional presence_penalty = std::nullopt, + std::optional frequency_penalty = std::nullopt, + std::optional best_of = std::nullopt, + std::optional> logit_bias = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/edits.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/edits.h new file mode 100644 index 00000000..61c2f155 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/edits.h @@ -0,0 +1,90 @@ +#pragma once + +/* + edits.h : Edits component class for OpenAI. + This class contains all the methods for the Edits component + of the OpenAI API. This class provides access to 'Edits' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Edits final : private Network { + public: + Edits(const std::string &root): Network(root) {} + NON_COPYABLE(Edits) + NON_MOVABLE(Edits) + ~Edits() = default; + + /* + @brief Creates a new edit for the provided input, + instruction, and parameters + + @param *model The model to use for the edit. + @param input The input text to edit. + @param instruction The instruction to edit the input. + @param n The number of edits to return. + @param temperature Higher values means the model will take more + risks. Try 0.9 for more creative applications, + and 0 (argmax sampling) for ones with a + well-defined answer. + @param top_p An alternative to sampling with temperature, + called nucleus sampling, where the model + considers the results of the tokens with + top_p probability mass. So 0.1 means only + the tokens comprising the top 10% probability + mass are considered. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional instruction = std::nullopt, + std::optional n = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a new edit for the + provided input, instruction, and parameters + + @param *model The model to use for the edit. + @param input The input text to edit. + @param instruction The instruction to edit the input. + @param n The number of edits to return. + @param temperature Higher values means the model will take more + risks. Try 0.9 for more creative applications, + and 0 (argmax sampling) for ones with a + well-defined answer. + @param top_p An alternative to sampling with temperature, + called nucleus sampling, where the model + considers the results of the tokens with + top_p probability mass. So 0.1 means only + the tokens comprising the top 10% probability + mass are considered. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional instruction = std::nullopt, + std::optional n = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/embeddings.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/embeddings.h new file mode 100644 index 00000000..5deff3cc --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/embeddings.h @@ -0,0 +1,60 @@ +#pragma once + +/* + embeddings.h : Embeddings component class for OpenAI. + This class contains all the methods for the Embeddings component + of the OpenAI API. This class provides access to 'Embeddings' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Embeddings final : private Network { + public: + Embeddings(const std::string &root): Network(root) {} + NON_COPYABLE(Embeddings) + NON_MOVABLE(Embeddings) + ~Embeddings() = default; + + /* + @brief Creates an embedding vector representing the input text. + + @param *model The model to use for the edit. + @param input The input text to edit. + @param user A unique identifier representing your end-user + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates an embedding vector representing the input text. + + @param *model The model to use for the edit. + @param input The input text to edit. + @param user A unique identifier representing your end-user + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& model_id, + std::optional input = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/files.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/files.h new file mode 100644 index 00000000..6effb62c --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/files.h @@ -0,0 +1,157 @@ +#pragma once + +/* + files.h : Files component class for OpenAI. + This class contains all the methods for the Files component + of the OpenAI API. This class provides access to 'Files' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Files final : private Network { + public: + Files(const std::string &root): Network(root) {} + NON_COPYABLE(Files) + NON_MOVABLE(Files) + ~Files() = default; + + /* + @brief Returns a list of files that belong to the user's organization. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response list() const & noexcept(false); + + /* + @brief Asynchronously returns a list of files that belong to the + user's organization. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse list_async() const & noexcept(false); + + /* + @brief Upload a file that contains document(s) to be + used across various endpoints/features. Currently, + the size of all the files uploaded by one organization + can be up to 1 GB. + + @param file The JSON Lines file to be uploaded (path). + @param purpose The intended purpose of the uploaded documents. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::filesystem::path& file, + const std::string& purpose + ) const & noexcept(false); + + /* + @brief Asynchronously upload a file that contains document(s) + to be used across various endpoints/features. Currently, + the size of all the files uploaded by one organization + can be up to 1 GB. + + @param file The JSON Lines file to be uploaded (path). + @param purpose The intended purpose of the uploaded documents. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::filesystem::path& file, + const std::string& purpose + ) const & noexcept(false); + + /* + @brief Delete [remove] a file. + + @param *file_id The ID of the file to use for this request + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response remove( + const std::string& file_id + ) const & noexcept(false); + + /* + @brief Asynchronously delete [remove] a file. + + @param *file_id The ID of the file to use for this request + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse remove_async( + const std::string& file_id + ) const & noexcept(false); + + /* + @brief Returns information about a specific file. + + @param *file_id The ID of the file to use for this request + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response retrieve( + const std::string& file_id + ) const & noexcept(false); + + /* + @brief Asynchronously returns information about a specific file. + + @param *file_id The ID of the file to use for this request + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse retrieve_async( + const std::string& file_id + ) const & noexcept(false); + + /* + @brief Downloads the contents of the specified file + to the specified path. + + @param *file_id The ID of the file to use for this request + @param *save_to The path to save the file to + + @return a boolean value indicating whether the file was + successfully downloaded or not. + */ + LIBOAI_EXPORT bool download( + const std::string& file_id, + const std::string& save_to + ) const & noexcept(false); + + /* + @brief Asynchronously downloads the contents of the specified file + to the specified path. + + @param *file_id The ID of the file to use for this request + @param *save_to The path to save the file to + + @return a boolean future indicating whether the file was + successfully downloaded or not. + */ + LIBOAI_EXPORT std::future download_async( + const std::string& file_id, + const std::string& save_to + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/fine_tunes.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/fine_tunes.h new file mode 100644 index 00000000..7768eb8d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/fine_tunes.h @@ -0,0 +1,232 @@ +#pragma once + +/* + fine_tunes.h : Fine-tunes component class for OpenAI. + This class contains all the methods for the Fine-tunes component + of the OpenAI API. This class provides access to 'Fine-tunes' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class FineTunes final : private Network { + public: + FineTunes(const std::string &root): Network(root) {} + NON_COPYABLE(FineTunes) + NON_MOVABLE(FineTunes) + ~FineTunes() = default; + + using StreamCallback = std::function; + + /* + @brief Creates a job that fine-tunes a specified model from a + given dataset. + + @param *training_file The ID of an uploaded file that contains + training data. + @param validation_file The ID of an uploaded file that contains + validation data. + @param model The name of the base model to fine-tune. + @param n_epochs The number of epochs to train for. + @param batch_size The batch size to use for training. + @param learning_rate_multiplier The learning rate multiplier to use for training. + @param prompt_loss_weight The prompt loss weight to use for training. + @param compute_classification_metrics If set, we calculate classification-specific metrics + such as accuracy and F-1 score using the validation + set at the end of every epoch. + @param classification_n_classes The number of classes in the classification task. + @param classification_positive_class The positive class in binary classification. + @param classification_betas If this is provided, we calculate F-beta scores at the + specified beta values. + @param suffix A suffix to append to the model name. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& training_file, + std::optional validation_file = std::nullopt, + std::optional model_id = std::nullopt, + std::optional n_epochs = std::nullopt, + std::optional batch_size = std::nullopt, + std::optional learning_rate_multiplier = std::nullopt, + std::optional prompt_loss_weight = std::nullopt, + std::optional compute_classification_metrics = std::nullopt, + std::optional classification_n_classes = std::nullopt, + std::optional classification_positive_class = std::nullopt, + std::optional> classification_betas = std::nullopt, + std::optional suffix = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a job that fine-tunes a specified + model from a given dataset. + + @param *training_file The ID of an uploaded file that contains + training data. + @param validation_file The ID of an uploaded file that contains + validation data. + @param model The name of the base model to fine-tune. + @param n_epochs The number of epochs to train for. + @param batch_size The batch size to use for training. + @param learning_rate_multiplier The learning rate multiplier to use for training. + @param prompt_loss_weight The prompt loss weight to use for training. + @param compute_classification_metrics If set, we calculate classification-specific metrics + such as accuracy and F-1 score using the validation + set at the end of every epoch. + @param classification_n_classes The number of classes in the classification task. + @param classification_positive_class The positive class in binary classification. + @param classification_betas If this is provided, we calculate F-beta scores at the + specified beta values. + @param suffix A suffix to append to the model name. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& training_file, + std::optional validation_file = std::nullopt, + std::optional model_id = std::nullopt, + std::optional n_epochs = std::nullopt, + std::optional batch_size = std::nullopt, + std::optional learning_rate_multiplier = std::nullopt, + std::optional prompt_loss_weight = std::nullopt, + std::optional compute_classification_metrics = std::nullopt, + std::optional classification_n_classes = std::nullopt, + std::optional classification_positive_class = std::nullopt, + std::optional> classification_betas = std::nullopt, + std::optional suffix = std::nullopt + ) const & noexcept(false); + + /* + @brief List your organization's fine-tuning jobs + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response list() const & noexcept(false); + + /* + @brief Asynchronously list your organization's fine-tuning jobs + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse list_async() const & noexcept(false); + + /* + @brief Returns information about a specific file. + + @param *fine_tune_id The ID of the fine-tune job + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response retrieve( + const std::string& fine_tune_id + ) const & noexcept(false); + + /* + @brief Asynchronously returns information about a specific file. + + @param *fine_tune_id The ID of the fine-tune job + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse retrieve_async( + const std::string& fine_tune_id + ) const & noexcept(false); + + /* + @brief Immediately cancel a fine-tune job. + + @param *fine_tune_id The ID of the fine-tune job to cancel + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response cancel( + const std::string& fine_tune_id + ) const & noexcept(false); + + /* + @brief Immediately cancel a fine-tune job asynchronously. + + @param *fine_tune_id The ID of the fine-tune job to cancel + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse cancel_async( + const std::string& fine_tune_id + ) const & noexcept(false); + + /* + @brief Get fine-grained status updates for a fine-tune job. + + @param *fine_tune_id The ID of the fine-tune job to get events for. + @param stream Callback to stream events for the fine-tune job. + If no callback is supplied, this parameter is + disabled and the API will wait until the completion + is finished before returning the response. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response list_events( + const std::string& fine_tune_id, + std::optional stream = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously get fine-grained status updates for a fine-tune job. + + @param *fine_tune_id The ID of the fine-tune job to get events for. + @param stream Callback to stream events for the fine-tune job. + If no callback is supplied, this parameter is + disabled and the API will wait until the completion + is finished before returning the response. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse list_events_async( + const std::string& fine_tune_id, + std::optional stream = std::nullopt + ) const & noexcept(false); + + /* + @brief Delete a fine-tuned model. You must have the Owner role in your organization. + + @param *model The model to delete + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response remove( + const std::string& model + ) const & noexcept(false); + + /* + @brief Asynchronously deletes a fine-tuned model. You must have the Owner role in your organization. + + @param *model The model to delete + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse remove_async( + const std::string& model + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/images.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/images.h new file mode 100644 index 00000000..a81d522b --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/images.h @@ -0,0 +1,164 @@ +#pragma once + +/* + images.h : Images component class for OpenAI. + This class contains all the methods for the Images component + of the OpenAI API. This class provides access to 'Images' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Images final : private Network { + public: + Images(const std::string &root): Network(root) {} + NON_COPYABLE(Images) + NON_MOVABLE(Images) + ~Images() = default; + + /* + @brief Images component method to create an image from + provided text. + + @param *prompt The text to create an image from. + @param n The number of images to create. + @param size The size of the image to create. + @param response_format The format of the response. + @param user A unique identifier representing an end-user. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Images component method to asynchronously create an + image from provided text. + + @param *prompt The text to create an image from. + @param n The number of images to create. + @param size The size of the image to create. + @param response_format The format of the response. + @param user A unique identifier representing an end-user. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& prompt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Images component method to produce an edited + image from a provided base image and mask image + according to given text. + + @param *image The image to edit (path). + @param *prompt The text description of the desired image. + @param mask The mask to edit the image with (path). + @param n The number of images to create. + @param size The size of the image to create. + @param response_format The format of the response. + @param user A unique identifier representing an end-user. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create_edit( + const std::filesystem::path& image, + const std::string& prompt, + std::optional mask = std::nullopt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Images component method to asynchronously + produce an edited image from a provided base + image and mask image according to given text. + + @param *image The image to edit (path). + @param *prompt The text description of the desired image. + @param mask The mask to edit the image with (path). + @param n The number of images to create. + @param size The size of the image to create. + @param response_format The format of the response. + @param user A unique identifier representing an end-user. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_edit_async( + const std::filesystem::path& image, + const std::string& prompt, + std::optional mask = std::nullopt, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Images component method to produce a variation + of a supplied image. + + @param *image The image to produce a variation of (path). + @param n The number of images to create. + @param size The size of the image to create. + @param response_format The format of the response. + @param user A unique identifier representing an end-user. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create_variation( + const std::filesystem::path& image, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + /* + @brief Images component method to asynchronously produce + a variation of a supplied image. + + @param *image The image to produce a variation of (path). + @param n The number of images to create. + @param size The size of the image to create. + @param response_format The format of the response. + @param user A unique identifier representing an end-user. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_variation_async( + const std::filesystem::path& image, + std::optional n = std::nullopt, + std::optional size = std::nullopt, + std::optional response_format = std::nullopt, + std::optional user = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/models.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/models.h new file mode 100644 index 00000000..d020b6bf --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/models.h @@ -0,0 +1,68 @@ +#pragma once + +/* + models.h : Models component class for OpenAI. + This class contains all the methods for the Models component + of the OpenAI API. This class provides access to 'Models' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Models final : private Network { + public: + Models(const std::string &root): Network(root) {} + NON_COPYABLE(Models) + NON_MOVABLE(Models) + ~Models() = default; + + /* + @brief List all available models. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response list() const & noexcept(false); + + /* + @brief Asynchronously list all available models. + + @returns A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse list_async() const & noexcept(false); + + /* + @brief Retrieve a specific model's information. + + #param *model The model to retrieve information for. + + @returns A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response retrieve( + const std::string& model + ) const & noexcept(false); + + /* + @brief Asynchronously retrieve a specific model's information. + + @param *model The model to retrieve information for. + + @returns A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse retrieve_async( + const std::string& model + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/moderations.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/moderations.h new file mode 100644 index 00000000..34dcf50a --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/moderations.h @@ -0,0 +1,58 @@ +#pragma once + +/* + moderations.h : Moderations component class for OpenAI. + This class contains all the methods for the Moderations component + of the OpenAI API. This class provides access to 'Moderations' + endpoints on the OpenAI API and should be accessed via the + liboai.h header file through an instantiated liboai::OpenAI + object after setting necessary authentication information + through the liboai::Authorization::Authorizer() singleton + object. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Moderations final : private Network { + public: + Moderations(const std::string &root): Network(root) {} + NON_COPYABLE(Moderations) + NON_MOVABLE(Moderations) + ~Moderations() = default; + + /* + @brief Create a new moderation and classify + if the given text is safe or unsafe. + + @param *input The text to be moderated. + @param model The model to use for the moderation. + + @return A liboai::Response object containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& input, + std::optional model = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a new moderation and classifies + if the given text is safe or unsafe. + + @param *input The text to be moderated. + @param model The model to use for the moderation. + + @return A liboai::Response future containing the image(s) + data in JSON format. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& input, + std::optional model = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/components/responses.h b/packages/kbot/cpp/packages/liboai/liboai/include/components/responses.h new file mode 100644 index 00000000..8d733b87 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/components/responses.h @@ -0,0 +1,193 @@ +#pragma once + +/* + responses.h : Responses component class for OpenAI. + This class provides access to the Responses API endpoints. + It is available via liboai.h through an instantiated liboai::OpenAI + object after setting necessary authentication information. +*/ + +#include "../core/authorization.h" +#include "../core/response.h" + +namespace liboai { + class Responses final : private Network { + public: + using StreamCallback = std::function; + + Responses(const std::string &root): Network(root) {} + NON_COPYABLE(Responses) + NON_MOVABLE(Responses) + ~Responses() = default; + + /* + @brief Builds a Responses API request payload. + + @param *model The model ID to use. + @param *input Input for the response (string or array of items). + @param instructions Optional system-level instructions. + @param reasoning Optional reasoning configuration. + @param text Optional text output configuration. + @param max_output_tokens Optional max output tokens to generate. + @param temperature Optional sampling temperature. + @param top_p Optional nucleus sampling value. + @param seed Optional deterministic seed. + @param tools Optional tools array. + @param tool_choice Optional tool choice configuration. + @param parallel_tool_calls Optional parallel tool calls toggle. + @param store Optional storage toggle. + @param previous_response_id Optional prior response ID for continuity. + @param include Optional include array. + @param metadata Optional metadata object. + @param user Optional user ID. + @param truncation Optional truncation setting. + @param stream Optional stream flag. + + @return A JSON object representing the request payload. + */ + LIBOAI_EXPORT static nlohmann::json build_request( + const std::string& model, + const nlohmann::json& input, + std::optional instructions = std::nullopt, + std::optional reasoning = std::nullopt, + std::optional text = std::nullopt, + std::optional max_output_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional seed = std::nullopt, + std::optional tools = std::nullopt, + std::optional tool_choice = std::nullopt, + std::optional parallel_tool_calls = std::nullopt, + std::optional store = std::nullopt, + std::optional previous_response_id = std::nullopt, + std::optional include = std::nullopt, + std::optional metadata = std::nullopt, + std::optional user = std::nullopt, + std::optional truncation = std::nullopt, + std::optional stream = std::nullopt + ); + + /* + @brief Creates a response using the Responses API. + + @param *model The model ID to use. + @param *input Input for the response (string or array of items). + @param instructions Optional system-level instructions. + @param reasoning Optional reasoning configuration. + @param text Optional text output configuration. + @param max_output_tokens Optional max output tokens to generate. + @param temperature Optional sampling temperature. + @param top_p Optional nucleus sampling value. + @param seed Optional deterministic seed. + @param tools Optional tools array. + @param tool_choice Optional tool choice configuration. + @param parallel_tool_calls Optional parallel tool calls toggle. + @param store Optional storage toggle. + @param previous_response_id Optional prior response ID for continuity. + @param include Optional include array. + @param metadata Optional metadata object. + @param user Optional user ID. + @param truncation Optional truncation setting. + @param stream Optional stream callback for SSE responses. + + @return A liboai::Response object containing response data. + */ + LIBOAI_EXPORT liboai::Response create( + const std::string& model, + const nlohmann::json& input, + std::optional instructions = std::nullopt, + std::optional reasoning = std::nullopt, + std::optional text = std::nullopt, + std::optional max_output_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional seed = std::nullopt, + std::optional tools = std::nullopt, + std::optional tool_choice = std::nullopt, + std::optional parallel_tool_calls = std::nullopt, + std::optional store = std::nullopt, + std::optional previous_response_id = std::nullopt, + std::optional include = std::nullopt, + std::optional metadata = std::nullopt, + std::optional user = std::nullopt, + std::optional truncation = std::nullopt, + std::optional stream = std::nullopt + ) const & noexcept(false); + + /* + @brief Creates a response using the Responses API from a raw JSON payload. + + @param *request The raw JSON payload for the request. + @param stream Optional stream callback for SSE responses. + + @return A liboai::Response object containing response data. + */ + LIBOAI_EXPORT liboai::Response create( + const nlohmann::json& request, + std::optional stream = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a response using the Responses API. + + @param *model The model ID to use. + @param *input Input for the response (string or array of items). + @param instructions Optional system-level instructions. + @param reasoning Optional reasoning configuration. + @param text Optional text output configuration. + @param max_output_tokens Optional max output tokens to generate. + @param temperature Optional sampling temperature. + @param top_p Optional nucleus sampling value. + @param seed Optional deterministic seed. + @param tools Optional tools array. + @param tool_choice Optional tool choice configuration. + @param parallel_tool_calls Optional parallel tool calls toggle. + @param store Optional storage toggle. + @param previous_response_id Optional prior response ID for continuity. + @param include Optional include array. + @param metadata Optional metadata object. + @param user Optional user ID. + @param truncation Optional truncation setting. + @param stream Optional stream callback for SSE responses. + + @return A liboai::FutureResponse containing future response data. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const std::string& model, + const nlohmann::json& input, + std::optional instructions = std::nullopt, + std::optional reasoning = std::nullopt, + std::optional text = std::nullopt, + std::optional max_output_tokens = std::nullopt, + std::optional temperature = std::nullopt, + std::optional top_p = std::nullopt, + std::optional seed = std::nullopt, + std::optional tools = std::nullopt, + std::optional tool_choice = std::nullopt, + std::optional parallel_tool_calls = std::nullopt, + std::optional store = std::nullopt, + std::optional previous_response_id = std::nullopt, + std::optional include = std::nullopt, + std::optional metadata = std::nullopt, + std::optional user = std::nullopt, + std::optional truncation = std::nullopt, + std::optional stream = std::nullopt + ) const & noexcept(false); + + /* + @brief Asynchronously creates a response using the Responses API from a raw JSON payload. + + @param *request The raw JSON payload for the request. + @param stream Optional stream callback for SSE responses. + + @return A liboai::FutureResponse containing future response data. + */ + LIBOAI_EXPORT liboai::FutureResponse create_async( + const nlohmann::json& request, + std::optional stream = std::nullopt + ) const & noexcept(false); + + private: + Authorization& auth_ = Authorization::Authorizer(); + }; +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/core/authorization.h b/packages/kbot/cpp/packages/liboai/liboai/include/core/authorization.h new file mode 100644 index 00000000..b5b0c340 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/core/authorization.h @@ -0,0 +1,246 @@ +#pragma once + +/* + authorization.h : liboai authorization header. + This header file provides declarations for authorization + directives for authorizing requests with the OpenAI API. + Each component class makes use of a single object accessed + via liboai::Authorization::Authorizer() to retrieve and use + user-set authorization information to successfully complete + component API requests. +*/ + +#define _CRT_SECURE_NO_WARNINGS + +#include +#include +#include +#include +#include "network.h" + +namespace liboai { + class Authorization final { + public: // cons/des, operator deletions + Authorization() = default; + NON_COPYABLE(Authorization) + NON_MOVABLE(Authorization) + ~Authorization(); + + public: // member methods + /* + @brief Singleton paradigm access method. + @return A reference to the singleton instance of this class + to be used in all component classes. + */ + static Authorization& Authorizer() noexcept { + static Authorization instance; + return instance; + } + + /* + @brief Sets the authorization key for the OpenAI API + as the passed string. + @param key : The authorization key to use in component calls. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetKey(std::string_view key) noexcept; + + /* + @brief Sets the authorization key for the Azure OpenAI API + as the passed string. + @param key : The authorization key to use in Azure component calls. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetAzureKey(std::string_view key) noexcept; + + /* + @brief Sets the Active Directory authorization token for the Azure OpenAI API + as the passed string. + @param key : The authorization key to use in Azure component calls. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetAzureKeyAD(std::string_view key) noexcept; + + /* + @brief Sets the authorization key for the OpenAI API + as the first line present in the file at the passed path. + @param path : The path to the file containing the authorization key. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetKeyFile(const std::filesystem::path& path) noexcept; + + /* + @brief Sets the authorization key for the Azure OpenAI API + as the first line present in the file at the passed path. + @param key : The path to the file containing the authorization key. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetAzureKeyFile(const std::filesystem::path& path) noexcept; + + /* + @brief Sets the Active Directory authorization token for the Azure OpenAI API + as the first line present in the file at the passed path. + @param key : The path to the file containing the authorization key. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetAzureKeyFileAD(const std::filesystem::path& path) noexcept; + + /* + @brief Sets the authorization key for the OpenAI API + as the value stored in the environment variable with + the passed name. + @param var : The name of the environment variable to + retrieve the authorization key from. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetKeyEnv(std::string_view var) noexcept; + + /* + @brief Sets the authorization key for the Azure OpenAI API + as the value stored in the environment variable with + the passed name. + @param var : The name of the environment variable to + retrieve the authorization key from. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetAzureKeyEnv(std::string_view var) noexcept; + + /* + @brief Sets the Active Directory authorization token for the Azure OpenAI API + as the value stored in the environment variable with + the passed name. + @param var : The name of the environment variable to + retrieve the authorization key from. + @returns True if the key was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetAzureKeyEnvAD(std::string_view var) noexcept; + + /* + @brief Sets the organization identifier as the passed + string for use in component calls. + @param org : The organization identifier to use in + component calls. + @returns True if the ID was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetOrganization(std::string_view org) noexcept; + + /* + @brief Sets the organization identifier as the first + line present in the file at the passed path for use + in component calls. + @param path : The path to the file containing the + organization identifier. + @returns True if the ID was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetOrganizationFile(const std::filesystem::path& path) noexcept; + + /* + @brief Sets the organization identifier as the value + stored in the environment variable with the passed + name for use in component calls. + @param var : The name of the environment variable to + retrieve the organization identifier from. + @returns True if the ID was set successfully, false otherwise. + */ + [[nodiscard]] + LIBOAI_EXPORT bool SetOrganizationEnv(std::string_view var) noexcept; + + /* + @brief Sets proxies to use for component calls. + @param hosts : The hosts to use as proxies in + paired { "protocol", "host" } format. + */ + LIBOAI_EXPORT void SetProxies(const std::initializer_list>& hosts) noexcept; + + /* + @brief Sets proxies to use for component calls. + @param hosts : The hosts to use as proxies in + paired { "protocol", "host" } format. + */ + LIBOAI_EXPORT void SetProxies(std::initializer_list>&& hosts) noexcept; + + /* + @brief Sets proxies to use for component calls. + @param hosts : The hosts to use as proxies in + paired { "protocol", "host" } format. + */ + LIBOAI_EXPORT void SetProxies(const std::map& hosts) noexcept; + + /* + @brief Sets proxies to use for component calls. + @param hosts : The hosts to use as proxies in + paired { "protocol", "host" } format. + */ + LIBOAI_EXPORT void SetProxies(std::map&& hosts) noexcept; + + /* + @brief Sets authentication information for proxies per-protocol. + + @param proto_up : A {protocol, {uname, passwd}} map to use for + authentication with proxies on a per-protocol basis. + */ + LIBOAI_EXPORT void SetProxyAuth(const std::map& proto_up) noexcept; + + /* + @brief Sets the timeout for component calls in milliseconds. + */ + LIBOAI_EXPORT void SetMaxTimeout(int32_t ms) noexcept { this->timeout_ = netimpl::components::Timeout(ms); } + + /* + @brief Returns currently the set authorization key. + */ + constexpr const std::string& GetKey() const noexcept { return this->key_; } + + /* + @brief Returns the currently set organization identifier. + */ + constexpr const std::string& GetOrganization() const noexcept { return this->org_; } + + /* + @returns The currently set proxies. + */ + netimpl::components::Proxies GetProxies() const noexcept { return this->proxies_; } + + /* + @returns The currently set proxy authentication information. + */ + netimpl::components::ProxyAuthentication GetProxyAuth() const noexcept { return this->proxyAuth_; } + + /* + @returns The currently set timeout. + */ + netimpl::components::Timeout GetMaxTimeout() const noexcept { return this->timeout_; } + + /* + @returns An authorization header with the + currently set authorization information for use + in component calls. + */ + constexpr const netimpl::components::Header& GetAuthorizationHeaders() const noexcept { return this->openai_auth_headers_; } + + /* + @returns An authorization header with the + currently set Azure authorization information for use + in Azure component calls. + */ + constexpr const netimpl::components::Header& GetAzureAuthorizationHeaders() const noexcept { return this->azure_auth_headers_; } + + private: // member variables + std::string key_, org_; + netimpl::components::Header openai_auth_headers_, azure_auth_headers_; + netimpl::components::Proxies proxies_; + netimpl::components::ProxyAuthentication proxyAuth_; + netimpl::components::Timeout timeout_ = { 30000 }; + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/core/exception.h b/packages/kbot/cpp/packages/liboai/liboai/include/core/exception.h new file mode 100644 index 00000000..286a328d --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/core/exception.h @@ -0,0 +1,86 @@ +#pragma once + +/* + exception.h : liboai exception header. + This header file provides declarations for exception + directives for handling exceptions thrown by liboai + component classes. +*/ + +#include +#include +#include + +#if defined(LIBOAI_DEBUG) + #define _liboai_dbg(fmt, ...) printf(fmt, __VA_ARGS__); +#endif + +namespace liboai { + namespace exception { + enum class EType : uint8_t { + E_FAILURETOPARSE, + E_BADREQUEST, + E_APIERROR, + E_RATELIMIT, + E_CONNECTIONERROR, + E_FILEERROR, + E_CURLERROR + }; + + constexpr const char* _etype_strs_[7] = { + "E_FAILURETOPARSE:0x00", + "E_BADREQUEST:0x01", + "E_APIERROR:0x02", + "E_RATELIMIT:0x03", + "E_CONNECTIONERROR:0x04", + "E_FILEERROR:0x05", + "E_CURLERROR:0x06" + }; + + class OpenAIException : public std::exception { + public: + OpenAIException() = default; + OpenAIException(const OpenAIException& rhs) noexcept + : error_type_(rhs.error_type_), data_(rhs.data_), locale_(rhs.locale_) { this->fmt_str_ = (this->locale_ + ": " + this->data_ + " (" + this->GetETypeString(this->error_type_) + ")"); } + OpenAIException(OpenAIException&& rhs) noexcept + : error_type_(rhs.error_type_), data_(std::move(rhs.data_)), locale_(std::move(rhs.locale_)) { this->fmt_str_ = (this->locale_ + ": " + this->data_ + " (" + this->GetETypeString(this->error_type_) + ")"); } + OpenAIException(std::string_view data, EType error_type, std::string_view locale) noexcept + : error_type_(error_type), data_(data), locale_(locale) { this->fmt_str_ = (this->locale_ + ": " + this->data_ + " (" + this->GetETypeString(this->error_type_) + ")"); } + + const char* what() const noexcept override { + return this->fmt_str_.c_str(); + } + + constexpr const char* GetETypeString(EType type) const noexcept { + return _etype_strs_[static_cast(type)]; + } + + private: + EType error_type_; + std::string data_, locale_, fmt_str_; + }; + + class OpenAIRateLimited : public std::exception { + public: + OpenAIRateLimited() = default; + OpenAIRateLimited(const OpenAIRateLimited& rhs) noexcept + : error_type_(rhs.error_type_), data_(rhs.data_), locale_(rhs.locale_) { this->fmt_str_ = (this->locale_ + ": " + this->data_ + " (" + this->GetETypeString(this->error_type_) + ")"); } + OpenAIRateLimited(OpenAIRateLimited&& rhs) noexcept + : error_type_(rhs.error_type_), data_(std::move(rhs.data_)), locale_(std::move(rhs.locale_)) { this->fmt_str_ = (this->locale_ + ": " + this->data_ + " (" + this->GetETypeString(this->error_type_) + ")"); } + OpenAIRateLimited(std::string_view data, EType error_type, std::string_view locale) noexcept + : error_type_(error_type), data_(data), locale_(locale) { this->fmt_str_ = (this->locale_ + ": " + this->data_ + " (" + this->GetETypeString(this->error_type_) + ")"); } + + const char* what() const noexcept override { + return this->fmt_str_.c_str(); + } + + constexpr const char* GetETypeString(EType type) const noexcept { + return _etype_strs_[static_cast(type)]; + } + + private: + EType error_type_; + std::string data_, locale_, fmt_str_; + }; + } +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/core/netimpl.h b/packages/kbot/cpp/packages/liboai/liboai/include/core/netimpl.h new file mode 100644 index 00000000..c5f82f63 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/core/netimpl.h @@ -0,0 +1,759 @@ +#pragma once + +/* + Copyright (c) 2017-2021 Huu Nguyen + Copyright (c) 2022 libcpr and many other contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + netimpl.h : Holds the internal network control-flow implementation. + This header file provides the internal interface(s) used to + allow files such as network.h to properly work. It contains + the internal cURL network wrapping functionality and all + other network-related functionality. + + This was created to remove the dependency on the library + cURL for People (CPR). +*/ + +#include +#include +#include +#include +#include +#include +#include "response.h" + +namespace liboai { + namespace netimpl { + static bool _flag = false; + + void ErrorCheck(CURLcode* ecodes, size_t size, std::string_view where); + void ErrorCheck(CURLcode ecode, std::string_view where); + + #if LIBCURL_VERSION_MAJOR < 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + void ErrorCheck(CURLFORMcode* ecodes, size_t size, std::string_view where); + void ErrorCheck(CURLFORMcode ecode, std::string_view where); + #endif + + class CurlHolder { + public: + CurlHolder(); + NON_COPYABLE(CurlHolder) + NON_MOVABLE(CurlHolder) + virtual ~CurlHolder(); + + std::string urlEncode(const std::string& s); + std::string urlDecode(const std::string& s); + + private: + static std::mutex& curl_easy_get_mutex_() { + static std::mutex g_curl_mutex; + return g_curl_mutex; + } + + protected: + CURL* curl_ = nullptr; + }; + + /* + Contains all components that can be passed to below free methods + Get, Post, and Delete such as Url, Headers, Body, Multipart, + etc. + */ + namespace components { + template + class StringHolder { + public: + StringHolder() = default; + explicit StringHolder(std::string str) : str_(std::move(str)) {} + explicit StringHolder(std::string_view str) : str_(str) {} + explicit StringHolder(const char* str) : str_(str) {} + StringHolder(const char* str, size_t len) : str_(str, len) {} + StringHolder(const std::initializer_list args) { + str_ = std::accumulate(args.begin(), args.end(), str_); + } + StringHolder(const StringHolder& other) = default; + StringHolder(StringHolder&& old) noexcept = default; + virtual ~StringHolder() = default; + + StringHolder& operator=(StringHolder&& old) noexcept = default; + StringHolder& operator=(const StringHolder& other) = default; + + explicit operator std::string() const { + return str_; + } + + T operator+(const char* rhs) const { + return T(str_ + rhs); + } + + T operator+(const std::string& rhs) const { + return T(str_ + rhs); + } + + T operator+(const StringHolder& rhs) const { + return T(str_ + rhs.str_); + } + + void operator+=(const char* rhs) { + str_ += rhs; + } + void operator+=(const std::string& rhs) { + str_ += rhs; + } + void operator+=(const StringHolder& rhs) { + str_ += rhs; + } + + bool operator==(const char* rhs) const { + return str_ == rhs; + } + bool operator==(const std::string& rhs) const { + return str_ == rhs; + } + bool operator==(const StringHolder& rhs) const { + return str_ == rhs.str_; + } + + bool operator!=(const char* rhs) const { + return str_.c_str() != rhs; + } + bool operator!=(const std::string& rhs) const { + return str_ != rhs; + } + bool operator!=(const StringHolder& rhs) const { + return str_ != rhs.str_; + } + + const std::string& str() { + return str_; + } + const std::string& str() const { + return str_; + } + const char* c_str() const { + return str_.c_str(); + } + const char* data() const { + return str_.data(); + } + + protected: + std::string str_{}; + }; + + struct File final { + File(const File& other) { + this->filepath = other.filepath; + this->overrided_filename = other.overrided_filename; + } + File(File&& old) noexcept { + this->filepath = std::move(old.filepath); + this->overrided_filename = std::move(old.overrided_filename); + } + explicit File(std::string p_filepath, const std::string& p_overrided_filename = {}) : filepath(std::move(p_filepath)), overrided_filename(p_overrided_filename) {} + + File& operator=(const File& other) { + this->filepath = other.filepath; + this->overrided_filename = other.overrided_filename; + return *this; + } + + File& operator=(File&& old) noexcept { + this->filepath = std::move(old.filepath); + this->overrided_filename = std::move(old.overrided_filename); + return *this; + } + + std::string filepath; + std::string overrided_filename; + + bool hasOverridedFilename() const noexcept { + return !overrided_filename.empty(); + }; + }; + + class Files final { + public: + Files() = default; + Files(const Files& other) { + this->files = other.files; + } + Files(Files&& old) noexcept { + this->files = std::move(old.files); + } + Files(const File& p_file) : files{ p_file } {}; + Files(const std::initializer_list& p_files) : files{ p_files } {}; + Files(const std::initializer_list& p_filepaths) { + for (const std::string& filepath : p_filepaths) { + files.emplace_back(File(filepath)); + } + }; + ~Files() noexcept = default; + + Files& operator=(const Files& other) { + this->files = other.files; + return *this; + } + Files& operator=(Files&& old) noexcept { + this->files = std::move(old.files); + return *this; + } + + using iterator = std::vector::iterator; + using const_iterator = std::vector::const_iterator; + + iterator begin(); + iterator end(); + const_iterator begin() const; + const_iterator end() const; + const_iterator cbegin() const; + const_iterator cend() const; + void emplace_back(const File& file); + void push_back(const File& file); + void pop_back(); + + private: + std::vector files; + }; + + class Url final : public StringHolder { + public: + Url() = default; + Url(std::string url) : StringHolder(std::move(url)) {} + Url(std::string_view url) : StringHolder(url) {} + Url(const char* url) : StringHolder(url) {} + Url(const char* str, size_t len) : StringHolder(std::string(str, len)) {} + Url(const std::initializer_list args) : StringHolder(args) {} + Url(const Url& other) = default; + Url(Url&& old) noexcept = default; + ~Url() override = default; + + Url& operator=(Url&& old) noexcept = default; + Url& operator=(const Url& other) = default; + }; + + class Body final : public StringHolder { + public: + Body() = default; + Body(const Body& other) { this->str_ = other.str_; } + Body(Body&& old) noexcept { this->str_ = std::move(old.str_); } + Body(std::string body) : StringHolder(std::move(body)) {} + Body(std::string_view body) : StringHolder(body) {} + Body(const char* body) : StringHolder(body) {} + Body(const char* str, size_t len) : StringHolder(str, len) {} + Body(const std::initializer_list args) : StringHolder(args) {} + Body(const File& file) { + std::ifstream is(file.filepath, std::ifstream::binary); + if (!is) { + throw std::invalid_argument("Can't open the file for HTTP request body!"); + } + + is.seekg(0, std::ios::end); + const std::streampos length = is.tellg(); + is.seekg(0, std::ios::beg); + std::string buffer; + buffer.resize(static_cast(length)); + is.read(buffer.data(), length); + str_ = std::move(buffer); + } + ~Body() override = default; + + Body& operator=(Body&& old) noexcept { + this->str_ = std::move(old.str_); + return *this; + } + Body& operator=(const Body& other) { + this->str_ = other.str_; + return *this; + } + }; + + struct Buffer final { + using data_t = const unsigned char*; + + template + Buffer(Iterator begin, Iterator end, std::filesystem::path&& p_filename) + : data{ reinterpret_cast(&(*begin)) }, datalen{ static_cast(std::distance(begin, end)) }, filename(std::move(p_filename)) { + is_random_access_iterator(begin, end); + static_assert(sizeof(*begin) == 1, "Only byte buffers can be used"); + } + + template + typename std::enable_if::iterator_category, std::random_access_iterator_tag>::value>::type is_random_access_iterator(Iterator /* begin */, Iterator /* end */) {} + + data_t data; + long datalen; + const std::filesystem::path filename; + }; + + struct Part final { + Part(const Part& other) { + this->name = other.name; + this->value = other.value; + this->content_type = other.content_type; + this->data = other.data; + this->datalen = other.datalen; + this->is_file = other.is_file; + this->is_buffer = other.is_buffer; + this->files = other.files; + } + Part(Part&& old) noexcept { + this->name = std::move(old.name); + this->value = std::move(old.value); + this->content_type = std::move(old.content_type); + this->data = old.data; + this->datalen = old.datalen; + this->is_file = old.is_file; + this->is_buffer = old.is_buffer; + this->files = std::move(old.files); + } + Part(const std::string& p_name, const std::string& p_value, const std::string& p_content_type = {}) : name{ p_name }, value{ p_value }, content_type{ p_content_type }, is_file{ false }, is_buffer{ false } {} + Part(const std::string& p_name, const std::int32_t& p_value, const std::string& p_content_type = {}) : name{ p_name }, value{ std::to_string(p_value) }, content_type{ p_content_type }, is_file{ false }, is_buffer{ false } {} + Part(const std::string& p_name, const Files& p_files, const std::string& p_content_type = {}) : name{ p_name }, value{}, content_type{ p_content_type }, is_file{ true }, is_buffer{ false }, files{ p_files } {} + Part(const std::string& p_name, Files&& p_files, const std::string& p_content_type = {}) : name{ p_name }, value{}, content_type{ p_content_type }, is_file{ true }, is_buffer{ false }, files{ std::move(p_files) } {} + Part(const std::string& p_name, const Buffer& buffer, const std::string& p_content_type = {}) : name{ p_name }, value{ buffer.filename.string() }, content_type{ p_content_type }, data{ buffer.data }, datalen{ buffer.datalen }, is_file{ false }, is_buffer{ true } {} + + Part& operator=(const Part& other) { + this->name = other.name; + this->value = other.value; + this->content_type = other.content_type; + this->data = other.data; + this->datalen = other.datalen; + this->is_file = other.is_file; + this->is_buffer = other.is_buffer; + this->files = other.files; + return *this; + } + Part& operator=(Part&& old) noexcept { + this->name = std::move(old.name); + this->value = std::move(old.value); + this->content_type = std::move(old.content_type); + this->data = old.data; + this->datalen = old.datalen; + this->is_file = old.is_file; + this->is_buffer = old.is_buffer; + this->files = std::move(old.files); + return *this; + } + + std::string name; + std::string value; + std::string content_type; + Buffer::data_t data{ nullptr }; + long datalen{ 0 }; + bool is_file; + bool is_buffer; + + Files files; + }; + + class Multipart final { + public: + Multipart() = default; + Multipart(const Multipart& other) { + this->parts = other.parts; + } + Multipart(Multipart&& old) noexcept { + this->parts = std::move(old.parts); + } + + Multipart& operator=(const Multipart& other) { + this->parts = other.parts; + return *this; + } + Multipart& operator=(Multipart&& old) noexcept { + this->parts = std::move(old.parts); + return *this; + } + + Multipart(const std::initializer_list& parts); + + std::vector parts; + }; + + struct CaseInsensitiveCompare { + bool operator()(const std::string& a, const std::string& b) const noexcept { + return std::lexicographical_compare(a.begin(), a.end(), b.begin(), b.end(), [](unsigned char ac, unsigned char bc) { + return std::tolower(ac) < std::tolower(bc); + }); + } + }; + using Header = std::map; + + struct Parameter final { + Parameter() = default; + Parameter(const Parameter& other) { + this->key = other.key; + this->value = other.value; + } + Parameter(Parameter&& old) noexcept { + this->key = std::move(old.key); + this->value = std::move(old.value); + } + Parameter(std::string p_key, std::string p_value) : key{ std::move(p_key) }, value{ std::move(p_value) } {} + + Parameter& operator=(const Parameter& other) { + this->key = other.key; + this->value = other.value; + return *this; + } + Parameter& operator=(Parameter&& old) noexcept { + this->key = std::move(old.key); + this->value = std::move(old.value); + return *this; + } + + std::string key; + std::string value; + }; + + class Parameters final { + public: + Parameters() = default; + Parameters(const Parameters& other) { + this->parameters_ = other.parameters_; + } + Parameters(Parameters&& old) noexcept { + this->parameters_ = std::move(old.parameters_); + } + Parameters(const std::initializer_list& parameters); + + Parameters& operator=(const Parameters& other) { + this->parameters_ = other.parameters_; + return *this; + } + Parameters& operator=(Parameters&& old) noexcept { + this->parameters_ = std::move(old.parameters_); + return *this; + } + + void Add(const std::initializer_list& parameters); + void Add(const Parameter& parameter); + bool Empty() const; + + std::string BuildParameterString() const; + + private: + std::vector parameters_; + }; + + class Timeout final { + public: + Timeout(const std::chrono::milliseconds& duration) : ms{ duration } {} + Timeout(const std::int32_t& milliseconds) : Timeout{ std::chrono::milliseconds(milliseconds) } {} + + long Milliseconds() const; + + std::chrono::milliseconds ms; + }; + + class Proxies final { + public: + Proxies() = default; + Proxies(const Proxies& other) { + this->hosts_ = other.hosts_; + } + Proxies(Proxies&& old) noexcept { + this->hosts_ = std::move(old.hosts_); + } + Proxies(const std::initializer_list>& hosts); + Proxies(const std::map& hosts); + + Proxies& operator=(const Proxies& other) { + this->hosts_ = other.hosts_; + return *this; + } + Proxies& operator=(Proxies&& old) noexcept { + this->hosts_ = std::move(old.hosts_); + return *this; + } + + bool has(const std::string& protocol) const; + const std::string& operator[](const std::string& protocol); + + private: + std::map hosts_; + }; + + std::string urlEncodeHelper(const std::string& s); + std::string urlDecodeHelper(const std::string& s); + + class ProxyAuthentication; + class EncodedAuthentication final { + friend ProxyAuthentication; + + public: + EncodedAuthentication() = default; + EncodedAuthentication(const EncodedAuthentication& other) { + this->username = other.username; + this->password = other.password; + } + EncodedAuthentication(EncodedAuthentication&& old) noexcept { + this->username = std::move(old.username); + this->password = std::move(old.password); + } + EncodedAuthentication(const std::string& p_username, const std::string& p_password) : username(urlEncodeHelper(p_username)), password(urlEncodeHelper(p_password)) {} + virtual ~EncodedAuthentication() noexcept; + + EncodedAuthentication& operator=(EncodedAuthentication&& old) noexcept { + this->username = std::move(old.username); + this->password = std::move(old.password); + return *this; + } + EncodedAuthentication& operator=(const EncodedAuthentication& other) { + this->username = other.username; + this->password = other.password; + return *this; + } + + [[nodiscard]] const std::string& GetUsername() const; + [[nodiscard]] const std::string& GetPassword() const; + + void SecureStringClear(std::string& str); + + private: + std::string username; + std::string password; + }; + + class ProxyAuthentication final { + public: + ProxyAuthentication() = default; + ProxyAuthentication(const ProxyAuthentication& other) { + this->proxyAuth_ = other.proxyAuth_; + } + ProxyAuthentication(ProxyAuthentication&& old) noexcept { + this->proxyAuth_ = std::move(old.proxyAuth_); + } + ProxyAuthentication(const std::initializer_list>& auths) : proxyAuth_{auths} {} + explicit ProxyAuthentication(const std::map& auths) : proxyAuth_{auths} {} + + ProxyAuthentication& operator=(const ProxyAuthentication& other) { + this->proxyAuth_ = other.proxyAuth_; + return *this; + } + ProxyAuthentication& operator=(ProxyAuthentication&& old) noexcept { + this->proxyAuth_ = std::move(old.proxyAuth_); + return *this; + } + + [[nodiscard]] bool has(const std::string& protocol) const; + const char* GetUsername(const std::string& protocol); + const char* GetPassword(const std::string& protocol); + + private: + std::map proxyAuth_; + }; + + class WriteCallback final { + public: + WriteCallback() = default; + WriteCallback(const WriteCallback& other) : callback(other.callback), userdata(other.userdata) {} + WriteCallback(WriteCallback&& old) noexcept : callback(std::move(old.callback)), userdata(std::move(old.userdata)) {} + WriteCallback(std::function p_callback, intptr_t p_userdata = 0) + : userdata(p_userdata), callback(std::move(p_callback)) {} + + WriteCallback& operator=(const WriteCallback& other) { + this->callback = other.callback; + this->userdata = other.userdata; + return *this; + } + WriteCallback& operator=(WriteCallback&& old) noexcept { + this->callback = std::move(old.callback); + this->userdata = std::move(old.userdata); + return *this; + } + + [[nodiscard]] bool operator()(std::string data) const { + return callback(std::move(data), userdata); + } + + intptr_t userdata{}; + std::function callback; + }; + size_t writeUserFunction(char* ptr, size_t size, size_t nmemb, const WriteCallback* write); + size_t writeFunction(char* ptr, size_t size, size_t nmemb, std::string* data); + size_t writeFileFunction(char* ptr, size_t size, size_t nmemb, std::ofstream* file); + }; + + /* + Class for sessions; each session is a single request. + Each call to Network::Request should follow the + following schema: + + 1. Create a session object. + 2. Set the session's options. + 3. Call the session's X() method where X is the + request method (GET, POST, etc.). + 4. Return the resulting Response object. + */ + class Session final : private CurlHolder { + public: + Session() = default; + ~Session() override; + + liboai::Response Get(); + liboai::Response Post(); + liboai::Response Delete(); + liboai::Response Download(std::ofstream& file); + void ClearContext(); + + + private: + template + friend void set_options(Session&, _Options&&...); + + void Prepare(); + void PrepareDownloadInternal(); + CURLcode Perform(); + liboai::Response BuildResponseObject(); + liboai::Response Complete(); + liboai::Response CompleteDownload(); + + void PrepareGet(); + void PreparePost(); + void PrepareDelete(); + void PrepareDownload(std::ofstream& file); + + void ParseResponseHeader(const std::string& headers, std::string* status_line, std::string* reason); + + void SetOption(const components::Url& url); + void SetUrl(const components::Url& url); + + void SetOption(const components::Body& body); + void SetBody(const components::Body& body); + void SetOption(components::Body&& body); + void SetBody(components::Body&& body); + + void SetOption(const components::Multipart& multipart); + void SetMultipart(const components::Multipart& multipart); + void SetOption(components::Multipart&& multipart); + void SetMultipart(components::Multipart&& multipart); + + void SetOption(const components::Header& header); + void SetHeader(const components::Header& header); + + void SetOption(const components::Parameters& parameters); + void SetParameters(const components::Parameters& parameters); + void SetOption(components::Parameters&& parameters); + void SetParameters(components::Parameters&& parameters); + + void SetOption(const components::Timeout& timeout); + void SetTimeout(const components::Timeout& timeout); + + void SetOption(const components::Proxies& proxies); + void SetProxies(const components::Proxies& proxies); + void SetOption(components::Proxies&& proxies); + void SetProxies(components::Proxies&& proxies); + + void SetOption(const components::ProxyAuthentication& proxy_auth); + void SetProxyAuthentication(const components::ProxyAuthentication& proxy_auth); + void SetOption(components::ProxyAuthentication&& proxy_auth); + void SetProxyAuthentication(components::ProxyAuthentication&& proxy_auth); + + void SetOption(const components::WriteCallback& write); + void SetWriteCallback(const components::WriteCallback& write); + void SetOption(components::WriteCallback&& write); + void SetWriteCallback(components::WriteCallback&& write); + + long status_code = 0; double elapsed = 0.0; + std::string status_line{}, content{}, url_str{}, reason{}; + + // internally-used members... + curl_slist* headers = nullptr; + #if LIBCURL_VERSION_MAJOR < 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 56) + curl_httppost* form = nullptr; + #endif + + #if LIBCURL_VERSION_MAJOR > 7 || (LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR >= 56) + curl_mime* mime = nullptr; + #endif + + bool hasBody = false; + std::string parameter_string_, url_, + response_string_, header_string_; + components::Proxies proxies_; + components::ProxyAuthentication proxyAuth_; + components::WriteCallback write_; + }; + + template + liboai::Response Get(_Options&&... options) { + Session session; + set_options(session, std::forward<_Options>(options)...); + return session.Get(); + } + + template + liboai::Response GetWithSession(Session& session, _Options&&... options) { + session.ClearContext(); + set_options(session, std::forward<_Options>(options)...); + return session.Get(); + } + + template + liboai::Response Post(_Options&&... options) { + Session session; + set_options(session, std::forward<_Options>(options)...); + return session.Post(); + } + + template + liboai::Response PostWithSession(Session& session, _Options&&... options) { + session.ClearContext(); + set_options(session, std::forward<_Options>(options)...); + return session.Post(); + } + + template + liboai::Response Delete(_Options&&... options) { + Session session; + set_options(session, std::forward<_Options>(options)...); + return session.Delete(); + } + + template + liboai::Response DeleteWithSession(Session& session, _Options&&... options) { + session.ClearContext(); + set_options(session, std::forward<_Options>(options)...); + return session.Delete(); + } + + template + liboai::Response Download(std::ofstream& file, _Options&&... options) { + Session session; + set_options(session, std::forward<_Options>(options)...); + return session.Download(file); + } + + template + liboai::Response DownloadWithSession(Session& session, std::ofstream& file, _Options&&... options) { + session.ClearContext(); + set_options(session, std::forward<_Options>(options)...); + return session.Download(file); + } + + template + void set_options(Session& session, _Options&&... opts) { + (session.SetOption(std::forward<_Options>(opts)), ...); + } + } +} diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/core/network.h b/packages/kbot/cpp/packages/liboai/liboai/include/core/network.h new file mode 100644 index 00000000..4658c183 --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/core/network.h @@ -0,0 +1,264 @@ +#pragma once + +/* + network.h : liboai network implementation. + This header file provides declarations for the abstracted liboai + Network implementation. Each component class will inherit from + this class to make use of the network functionality provided by + it. + + For instance, making a call to liboai::Image::Create(...) will + make use of both this class to send the request to the OpenAI API + and liboai::Authorization to provide the user's authorization + information to successfully complete the request. +*/ + +#include +#include +#include "netimpl.h" + +namespace liboai { + class Network { + public: + /* + @brief Initialise the Network instance to use + the provided API url. + + @param root The URL to direct API calls to. + */ + Network(const std::string &root) noexcept: openai_root_(root) {} + NON_COPYABLE(Network) + NON_MOVABLE(Network) + + /* + @brief Function to download a file at 'from' + to file path 'to.' Useful for downloading + images from the OpenAI API given a URL to + 'from.' + + This function is not to be confused with + liboai::File::download(...) which is used + to download .jsonl files from the OpenAI API. + + @param *to The path and filename to download the file to. + @param *from Where to download the file data from + (such as a URL). + + @returns Bool indicating success or failure. + */ + [[nodiscard]] + static inline bool Download( + const std::string& to, + const std::string& from, + netimpl::components::Header authorization + ) noexcept(false) { + std::ofstream file(to, std::ios::binary); + Response res; + res = netimpl::Download( + file, + netimpl::components::Url{ from }, + std::move(authorization) + ); + + return res.status_code == 200; + } + + [[nodiscard]] + static inline bool DownloadWithSession( + const std::string& to, + const std::string& from, + netimpl::components::Header authorization, + netimpl::Session& session + ) noexcept(false) { + std::ofstream file(to, std::ios::binary); + Response res; + res = netimpl::DownloadWithSession( + session, + file, + netimpl::components::Url{ from }, + std::move(authorization) + ); + + return res.status_code == 200; + } + + /* + @brief Function to asynchronously download a + file at 'from' to file path 'to.' Useful + for downloading images from the OpenAI API + given a URL to 'from.' + + This function is not to be confused with + liboai::File::download(...) which is used + to download .jsonl files from the OpenAI API. + + @param *to The path and filename to download the file to. + @param *from Where to download the file data from + (such as a URL). + + @returns Future bool indicating success or failure. + */ + [[nodiscard]] + static inline std::future DownloadAsync( + const std::string& to, + const std::string& from, + netimpl::components::Header authorization + ) noexcept(false) { + return std::async( + std::launch::async, [&]() -> bool { + std::ofstream file(to, std::ios::binary); + Response res; + res = netimpl::Download( + file, + netimpl::components::Url{ from }, + std::move(authorization) + ); + + return res.status_code == 200; + } + ); + } + + [[nodiscard]] + static inline std::future DownloadAsyncWithSession( + const std::string& to, + const std::string& from, + netimpl::components::Header authorization, + netimpl::Session& session + ) noexcept(false) { + return std::async( + std::launch::async, [&]() -> bool { + std::ofstream file(to, std::ios::binary); + Response res; + res = netimpl::DownloadWithSession( + session, + file, + netimpl::components::Url{ from }, + std::move(authorization) + ); + + return res.status_code == 200; + } + ); + } + + protected: + enum class Method : uint8_t { + HTTP_GET, // GET + HTTP_POST, // POST + HTTP_DELETE // DELETE + }; + + template >...>, int> = 0> + inline Response Request( + const Method& http_method, + const std::string& root, + const std::string& endpoint, + const std::string& content_type, + std::optional headers = std::nullopt, + _Params&&... parameters + ) const { + netimpl::components::Header _headers = { { "Content-Type", content_type } }; + if (headers) { + if (headers.value().size() != 0) { + for (auto& i : headers.value()) { + _headers.insert(std::move(i)); + } + } + } + + Response res; + if constexpr (sizeof...(parameters) > 0) { + res = Network::MethodSchema::_method[static_cast(http_method)]( + netimpl::components::Url { root + endpoint }, + std::move(_headers), + std::forward<_Params>(parameters)... + ); + } + else { + res = Network::MethodSchema::_method[static_cast(http_method)]( + netimpl::components::Url { root + endpoint }, + std::move(_headers) + ); + } + + return res; + } + + + template >...>, int> = 0> + inline Response RequestWithSession( + const Method& http_method, + const std::string& root, + const std::string& endpoint, + const std::string& content_type, + netimpl::Session& session, + std::optional headers = std::nullopt, + _Params&&... parameters + ) const { + netimpl::components::Header _headers = { { "Content-Type", content_type } }; + if (headers) { + if (headers.value().size() != 0) { + for (auto& i : headers.value()) { + _headers.insert(std::move(i)); + } + } + } + + Response res; + if constexpr (sizeof...(parameters) > 0) { + res = Network::MethodSchemaWithSession::_method[static_cast(http_method)]( + session, + netimpl::components::Url { root + endpoint }, + std::move(_headers), + std::forward<_Params>(parameters)... + ); + } + else { + res = Network::MethodSchemaWithSession::_method[static_cast(http_method)]( + session, + netimpl::components::Url { root + endpoint }, + std::move(_headers) + ); + } + + return res; + } + + /* + @brief Function to validate the existence and validity of + a file located at a provided file path. This is used + in functions that take a file path as a parameter + to ensure that the file exists and is valid. + */ + bool Validate(const std::filesystem::path& path) const { + // checks if the file exists, is a regular file, and is not empty + if (std::filesystem::exists(path) && std::filesystem::is_regular_file(path)) { + return std::filesystem::file_size(path) > 0; + } + return false; + } + + const std::string openai_root_; + const std::string azure_root_ = ".openai.azure.com/openai"; + + private: + template struct MethodSchema { + inline static std::function _method[3] = { + netimpl::Get , + netimpl::Post , + netimpl::Delete + }; + }; + + template struct MethodSchemaWithSession { + inline static std::function _method[3] = { + netimpl::GetWithSession , + netimpl::PostWithSession , + netimpl::DeleteWithSession + }; + }; + }; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/core/response.h b/packages/kbot/cpp/packages/liboai/liboai/include/core/response.h new file mode 100644 index 00000000..1ad29a8a --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/core/response.h @@ -0,0 +1,122 @@ +#pragma once + +/* + response.h : liboai response container implementation. + This header file provides declarations for the liboai Response + implementation. Each component class will include this header + and use the Response class to return data to the user. + + For instance, making a call to liboai::Image::Create(...) will + return a liboai::Response object. The user can then check the + object and retrieve the data found in the response as needed. + + This class will construct itself from the output of + liboai::Network::Request(...) (cpr::Response) and parse it + into a usable format for the user to access via this class. +*/ + +#if defined(__linux__) || defined(__APPLE__) + #define LIBOAI_EXPORT +#else + #define LIBOAI_EXPORT __declspec(dllexport) +#endif + +#define NON_COPYABLE(Class) Class(const Class&) = delete; Class& operator=(const Class&) = delete; +#define NON_MOVABLE(Class) Class(Class&&) = delete; Class& operator=(Class&&) = delete; + +#include +#include +#include +#include +#include "exception.h" + +namespace liboai { + template struct has_value_type : std::false_type {}; + template struct has_value_type> : std::true_type {}; + template inline constexpr const bool has_value_type_v = has_value_type::value; + + class JsonConstructor final { + public: + JsonConstructor() {} + JsonConstructor(const JsonConstructor& other) noexcept : _json(other._json) {} + JsonConstructor(JsonConstructor&& old) noexcept : _json(std::move(old._json)) {} + + template + void push_back(std::string_view key, const _Ty& value) { + if constexpr (std::is_same_v<_Ty, std::optional>>) { + if (value) { + this->_json[key.data()] = true; + } + } + else if constexpr (std::is_same_v<_Ty, std::function>) { + if (value) { + this->_json[key.data()] = true; + } + } + else { + this->_json[key.data()] = value; + } + } + + template , std::is_same<_Ty, std::optional>>, int> = 0> + void push_back(std::string_view key, _Ty&& value) { + if (value) { + this->_json[key.data()] = std::forward(value.value()); + } + } + + std::string dump() const { + return this->_json.dump(4); + } + + private: + nlohmann::json _json; + }; + + class Response final { + public: + Response() = default; + Response(const liboai::Response& other) noexcept; + Response(liboai::Response&& old) noexcept; + Response( + std::string&& url, + std::string&& content, + std::string&& status_line, + std::string&& reason, + long status_code, + double elapsed + ) noexcept(false); + + Response& operator=(const liboai::Response& other) noexcept; + Response& operator=(liboai::Response&& old) noexcept; + + /* + @brief Transparent operator[] wrapper to nlohmann::json to + access the Response object as if it were a json object. + */ + template + nlohmann::json::const_reference operator[](const _Ty& key) const noexcept { + return this->raw_json[key]; + } + + /* + @brief std::ostream operator<< overload to allow for + pretty printing of the Response object. + */ + LIBOAI_EXPORT friend std::ostream& operator<<(std::ostream& os, const Response& r); + + public: + long status_code = 0; double elapsed = 0.0; + std::string status_line{}, content{}, url{}, reason{}; + nlohmann::json raw_json{}; + + private: + /* + @brief Used internally during construction to check the response + for errors and throw exceptions if necessary. + */ + LIBOAI_EXPORT void CheckResponse() const noexcept(false); + }; + using FutureResponse = std::future; +} \ No newline at end of file diff --git a/packages/kbot/cpp/packages/liboai/liboai/include/liboai.h b/packages/kbot/cpp/packages/liboai/liboai/include/liboai.h new file mode 100644 index 00000000..54736bec --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/liboai/include/liboai.h @@ -0,0 +1,146 @@ +#pragma once + +/* + Copyright (c) 2012-2022 Johnny (pseud. Dread) and others + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + liboai.h : main library header. + This header file provides an interface to all component classes + in the library. It is the only header file that needs to be + included in order to use the library. +*/ + +#include "components/audio.h" +#include "components/azure.h" +#include "components/chat.h" +#include "components/completions.h" +#include "components/edits.h" +#include "components/embeddings.h" +#include "components/files.h" +#include "components/fine_tunes.h" +#include "components/images.h" +#include "components/models.h" +#include "components/moderations.h" +#include "components/responses.h" + +namespace liboai { + class OpenAI { + public: + OpenAI(const std::string &root = "https://api.openai.com/v1"): + Audio(std::make_unique(root)), + Azure(std::make_unique(root)), + ChatCompletion(std::make_unique(root)), + Completion(std::make_unique(root)), + Edit(std::make_unique(root)), + Embedding(std::make_unique(root)), + File(std::make_unique(root)), + FineTune(std::make_unique(root)), + Image(std::make_unique(root)), + Model(std::make_unique(root)), + Moderation(std::make_unique(root)), + Responses(std::make_unique(root)) + {} + OpenAI(OpenAI const&) = delete; + OpenAI(OpenAI&&) = delete; + void operator=(OpenAI const&) = delete; + void operator=(OpenAI&&) = delete; + + public: // component interfaces + /* + @brief A pointer to the Audio component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Audio; + + /* + @brief A pointer to the Azure component class that + provides access to its API endpoints. + */ + std::unique_ptr Azure; + + /* + @brief A pointer to the Chat component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr ChatCompletion; + + /* + @brief A pointer to the Completions component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Completion; + + /* + @brief A pointer to the Edits component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Edit; + + /* + @brief A pointer to the Embeddings component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Embedding; + + /* + @brief A pointer to the Files component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr File; + + /* + @brief A pointer to the FineTunes component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr FineTune; + + /* + @brief A pointer to the Images component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Image; + + /* + @brief A pointer to the Models component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Model; + + /* + @brief A pointer to the Moderations component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Moderation; + + /* + @brief A pointer to the Responses component class that + provides access to its OpenAI API endpoints. + */ + std::unique_ptr Responses; + + public: + /* + @brief Convenience reference to the Authorization class + singleton used to set authorization information. + */ + Authorization& auth = Authorization::Authorizer(); + }; +} diff --git a/packages/kbot/cpp/packages/liboai/shell.nix b/packages/kbot/cpp/packages/liboai/shell.nix new file mode 100644 index 00000000..2827d99e --- /dev/null +++ b/packages/kbot/cpp/packages/liboai/shell.nix @@ -0,0 +1,14 @@ +{ pkgs ? import {} }: pkgs.mkShell { + buildInputs = with pkgs; [ + gcc + cmake + ninja + clang-tools + lldb + + zlib + curl + nlohmann_json + ]; +} + diff --git a/packages/kbot/cpp/src/cmd_kbot.cpp b/packages/kbot/cpp/src/cmd_kbot.cpp index 1e8b3c71..e6637fb6 100644 --- a/packages/kbot/cpp/src/cmd_kbot.cpp +++ b/packages/kbot/cpp/src/cmd_kbot.cpp @@ -2,10 +2,27 @@ #include "logger/logger.h" #include #include +#include #include +#include +#include namespace polymech { +// Helper to reliably extract API keys for any router from postgres.toml [services] section +static std::string get_api_key_for_router(const toml::table& cfg, const std::string& router) { + if (router == "ollama") return "ollama"; + std::string key = router.empty() ? "OPENROUTER" : router; + std::transform(key.begin(), key.end(), key.begin(), [](unsigned char c){ return std::toupper(c); }); + + if (key == "OPENAI") { + auto val = cfg["services"]["OPENAI_KEY"].value_or(std::string("")); + if (!val.empty()) return val; + } + + return cfg["services"][key].value_or(std::string("")); +} + // Global states for CLI mode static kbot::KBotOptions g_kbot_opts; static kbot::KBotRunOptions g_run_opts; @@ -20,6 +37,7 @@ CLI::App* setup_cmd_kbot(CLI::App& app) { ai_cmd = kbot_cmd->add_subcommand("ai", "Run KBot AI tasks"); ai_cmd->add_option("-p,--path", g_kbot_opts.path, "Target directory")->default_val("."); ai_cmd->add_option("--prompt", g_kbot_opts.prompt, "The prompt. Supports file paths and vars."); + ai_cmd->add_option("-c,--config", g_kbot_opts.config_path, "Config file for API Keys")->default_val("config/postgres.toml"); ai_cmd->add_option("--output", g_kbot_opts.output, "Optional output path for modified files"); ai_cmd->add_option("--dst", g_kbot_opts.dst, "Optional destination path for the result"); ai_cmd->add_option("--append", g_kbot_opts.append, "How to handle output if --dst exists: concat|merge|replace")->default_val("concat"); @@ -52,6 +70,17 @@ bool is_kbot_ai_parsed() { return ai_cmd != nullptr && ai_cmd->parsed(); } bool is_kbot_run_parsed() { return run_cmd != nullptr && run_cmd->parsed(); } int run_cmd_kbot_ai() { + // Fallback logic if API key isn't explicitly provided on CLI + if (g_kbot_opts.api_key.empty() && !g_kbot_opts.config_path.empty()) { + try { + auto cfg = toml::parse_file(g_kbot_opts.config_path); + g_kbot_opts.api_key = get_api_key_for_router(cfg, g_kbot_opts.router); + logger::debug("Loaded API Key from fallback config: " + g_kbot_opts.config_path); + } catch (const std::exception& e) { + logger::warn("Failed to load generic fallback kbot config: " + std::string(e.what())); + } + } + return kbot::run_kbot_ai_pipeline(g_kbot_opts, kbot::KBotCallbacks{}); } @@ -62,6 +91,7 @@ int run_cmd_kbot_run() { int run_kbot_ai_ipc(const std::string& payload, const std::string& jobId, const kbot::KBotCallbacks& cb) { kbot::KBotOptions opts; opts.job_id = jobId; + opts.config_path = "config/postgres.toml"; // Fixed path for IPC worker // Optional: Parse JSON from payload to overwrite opts variables using rapidjson rapidjson::Document doc; @@ -69,6 +99,14 @@ int run_kbot_ai_ipc(const std::string& payload, const std::string& jobId, const if (!doc.HasParseError() && doc.IsObject()) { if (doc.HasMember("prompt") && doc["prompt"].IsString()) opts.prompt = doc["prompt"].GetString(); if (doc.HasMember("dry_run") && doc["dry_run"].IsBool()) opts.dry_run = doc["dry_run"].GetBool(); + if (doc.HasMember("api_key") && doc["api_key"].IsString()) opts.api_key = doc["api_key"].GetString(); + } + + if (opts.api_key.empty()) { + try { + auto cfg = toml::parse_file(opts.config_path); + opts.api_key = get_api_key_for_router(cfg, opts.router); + } catch (...) {} } logger::info("Receiving AI task over IPC... job: " + jobId);