liboai 1/2

This commit is contained in:
lovebird 2026-03-29 22:00:13 +02:00
parent 04e056d395
commit 84a2372afb
181 changed files with 13767 additions and 4 deletions

View File

@ -62,10 +62,26 @@ FetchContent_Declare(
GIT_SHALLOW TRUE
)
FetchContent_Declare(
nlohmann_json
GIT_REPOSITORY https://github.com/nlohmann/json.git
GIT_TAG v3.11.3
GIT_SHALLOW TRUE
)
FetchContent_Declare(
liboai
GIT_REPOSITORY https://github.com/jasonduncan/liboai.git
GIT_TAG main
GIT_SHALLOW TRUE
SOURCE_SUBDIR liboai
)
set(TF_BUILD_TESTS OFF CACHE BOOL "" FORCE)
set(TF_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE)
FetchContent_MakeAvailable(cli11 tomlplusplus Catch2 asio concurrentqueue taskflow)
set(JSON_BuildTests OFF CACHE BOOL "" FORCE)
FetchContent_MakeAvailable(cli11 tomlplusplus Catch2 asio concurrentqueue taskflow nlohmann_json)
# Packages
add_subdirectory(packages/logger)
add_subdirectory(packages/html)
@ -79,6 +95,9 @@ add_subdirectory(packages/gadm_reader)
add_subdirectory(packages/grid)
add_subdirectory(packages/search)
add_subdirectory(packages/enrichers)
add_subdirectory(packages/liboai/liboai)
add_subdirectory(packages/kbot)
# Sources
@ -102,6 +121,11 @@ target_include_directories(${PROJECT_NAME} PRIVATE
)
# Define standalone ASIO (since it's not boost)
if(WIN32)
# Enable math constants like M_PI
add_compile_definitions(_USE_MATH_DEFINES)
add_compile_definitions(NOMINMAX)
endif()
target_compile_definitions(${PROJECT_NAME} PRIVATE ASIO_STANDALONE=1 ASIO_NO_DEPRECATED=1)

Binary file not shown.

View File

@ -0,0 +1,58 @@
MSBuild version 18.4.0+6e61e96ac for .NET Framework
libcurl_object.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\libcurl_object.dir\Debug\libcurl_object.lib
Catch2.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\catch2-build\src\Debug\Catch2d.lib
Catch2WithMain.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\catch2-build\src\Debug\Catch2Maind.lib
lexbor_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\lexbor-build\Debug\lexbor_static.lib
html.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\html\Debug\html.lib
libcurl_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\Debug\libcurl-d.lib
http.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\http\Debug\http.lib
json.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\json\Debug\json.lib
spdlog.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\spdlog-build\Debug\spdlogd.lib
logger.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\logger\Debug\logger.lib
enrichers.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\enrichers\Debug\enrichers.lib
geo.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\geo\Debug\geo.lib
gadm_reader.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\gadm_reader\Debug\gadm_reader.lib
grid.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\grid\Debug\grid.lib
ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\ipc\Debug\ipc.lib
azure.cpp
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
chat.cpp
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
Generating Code...
postgres.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\postgres\Debug\postgres.lib
polymech.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\polymech\Debug\polymech.lib
search.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\search\Debug\search.lib
test_enrichers.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_enrichers.exe
test_functional.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_functional.exe
test_gadm_reader.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_gadm_reader.exe
test_geo.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_geo.exe
test_grid.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_grid.exe
test_gridsearch_ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_gridsearch_ipc.exe
test_html.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_html.exe
test_http.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_http.exe
test_ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_ipc.exe
test_json.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_json.exe
test_logger.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_logger.exe
test_polymech.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_polymech.exe
test_polymech_e2e.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_polymech_e2e.exe
test_postgres.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_postgres.exe
test_postgres_live.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_postgres_live.exe
test_search.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_search.exe
test_supabase.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\dist\test_supabase.exe

Binary file not shown.

View File

@ -0,0 +1,152 @@
CMake is re-running because C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/CMakeFiles/generate.stamp is out-of-date.
the file 'C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/CMakeLists.txt'
is newer than 'C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/CMakeFiles/generate.stamp.depend'
result='-1'
-- Selecting Windows SDK version 10.0.26100.0 to target Windows 10.0.18363.
cmake : CMake Deprecation Warning at
build/release/_deps/cli11-src/CMakeLists.txt:1 (cmake_minimum_required):
At line:1 char:1
+ cmake --build build/release --target polymech-cli > cli_build.txt 2>& ...
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ CategoryInfo : NotSpecified: (CMake Deprecati...imum_required)
::String) [], RemoteException
+ FullyQualifiedErrorId : NativeCommandError
Compatibility with CMake < 3.10 will be removed from a future version of
CMake.
Update the VERSION argument <min> value. Or, use the <min>...<max> syntax
to tell CMake that the project requires at least <min> but has been updated
to work with policies introduced by <max> or earlier.
CMake Deprecation Warning at
build/release/_deps/concurrentqueue-src/CMakeLists.txt:1
(cmake_minimum_required):
Compatibility with CMake < 3.10 will be removed from a future version of
CMake.
Update the VERSION argument <min> value. Or, use the <min>...<max> syntax
to tell CMake that the project requires at least <min> but has been updated
to work with policies introduced by <max> or earlier.
-- CMAKE_ROOT: C:/Program Files/CMake/share/cmake-4.2
-- PROJECT_NAME: Taskflow
-- CMAKE_HOST_SYSTEM: Windows-10.0.18363
-- CMAKE_BUILD_TYPE: Release
-- CMAKE_CXX_COMPILER: C:/Program Files/Microsoft Visual Studio/18/Community/VC/Tools/MSVC/14.50.35717/bin/Hostx64/x64/cl.exe
-- CMAKE_CXX_COMPILER_ID: MSVC
-- CMAKE_CXX_COMPILER_VERSION: 19.50.35726.0
-- CMAKE_CXX_FLAGS: /DWIN32 /D_WINDOWS /EHsc
-- CMAKE_CUDA_COMPILER:
-- CMAKE_CUDA_COMPILER_ID:
-- CMAKE_CUDA_COMPILER_VERSION:
-- CMAKE_CUDA_FLAGS:
-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras
-- CMAKE_CURRENT_SOURCE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src
-- CMAKE_CURRENT_BINARY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-build
-- CMAKE_CURRENT_LIST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src
-- CMAKE_EXE_LINKER_FLAGS: /machine:x64
-- CMAKE_INSTALL_PREFIX: C:/Program Files (x86)/polymech-cli
-- CMAKE_INSTALL_FULL_INCLUDEDIR: C:/Program Files (x86)/polymech-cli/include
-- CMAKE_INSTALL_FULL_LIBDIR: C:/Program Files (x86)/polymech-cli/lib
-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras
-- CMAKE_PREFIX_PATH:
-- TF_BUILD_BENCHMARKS: OFF
-- TF_BUILD_PROFILER:
-- TF_BUILD_CUDA: OFF
-- TF_BUILD_SYCL: OFF
-- TF_BUILD_SYCL_BITCODE:
-- TF_BUILD_TESTS: OFF
-- TF_BUILD_EXAMPLES: OFF
-- TF_UTEST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/unittests
-- TF_EXAMPLE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/examples
-- TF_BENCHMARK_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/benchmarks
-- TF_3RD_PARTY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/3rd-party
-- Using the multi-header code from C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/nlohmann_json-src/include/
-- Build spdlog: 1.15.1
-- Build type: Release
-- Project name: lexbor
-- Build without Threads
-- Lexbor version: 2.4.0
-- Set Windows definitions
-- Append module: core (1.8.0)
-- Append module: css (1.2.0)
-- Append module: dom (1.7.0)
-- Append module: encoding (2.1.0)
-- Append module: html (2.5.0)
-- Append module: ns (1.2.0)
-- Append module: punycode (1.1.0)
-- Append module: selectors (0.3.0)
-- Append module: tag (1.3.0)
-- Append module: unicode (0.2.0)
-- Append module: url (0.2.0)
-- Append module: utils (0.3.0)
-- CFLAGS: /DWIN32 /D_WINDOWS /O2
-- CXXFLAGS: /DWIN32 /D_WINDOWS /EHsc /O2
-- Feature ASAN: disable
-- Feature Fuzzer: disable
-- Using CMake version 4.2.3
-- curl version=[8.12.1]
-- CMake platform flags: WIN32 MSVC-1950
-- Performing Test HAVE_WIN32_WINNT
-- Performing Test HAVE_WIN32_WINNT - Success
-- Found _WIN32_WINNT=0x0a00
-- Could NOT find Libidn2 (missing: LIBIDN2_INCLUDE_DIR LIBIDN2_LIBRARY)
-- Protocols: dict file ftp ftps gopher gophers http https imap imaps ipfs ipns mqtt pop3 pop3s rtsp smb smbs smtp smtps telnet tftp ws wss
-- Features: alt-svc AsynchDNS HSTS HTTPS-proxy IPv6 Kerberos Largefile NTLM SPNEGO SSL SSPI threadsafe UnixSockets
-- Enabled SSL backends: Schannel
CMake Warning (dev) at C:/Program
Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:1963 (message):
Calling FetchContent_Populate(rapidjson) is deprecated, call
FetchContent_MakeAvailable(rapidjson) instead. Policy CMP0169 can be set
to OLD to allow FetchContent_Populate(rapidjson) to be called directly for
now, but the ability to call it with declared details will be removed
completely in a future version.
Call Stack (most recent call first):
packages/json/CMakeLists.txt:17 (FetchContent_Populate)
This warning is for project developers. Use -Wno-dev to suppress it.
-- Configuring done (14.7s)
-- Generating done (1.6s)
-- Build files have been written to: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release
MSBuild version 18.4.0+6e61e96ac for .NET Framework
lexbor_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\lexbor-build\Debug\lexbor_static.lib
html.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\html\Debug\html.lib
libcurl_object.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\libcurl_object.dir\Debug\libcurl_object.lib
libcurl_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\Debug\libcurl-d.lib
http.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\http\Debug\http.lib
json.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\json\Debug\json.lib
spdlog.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\spdlog-build\Debug\spdlogd.lib
logger.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\logger\Debug\logger.lib
enrichers.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\enrichers\Debug\enrichers.lib
geo.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\geo\Debug\geo.lib
gadm_reader.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\gadm_reader\Debug\gadm_reader.lib
grid.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\grid\Debug\grid.lib
ipc.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\ipc\Debug\ipc.lib
azure.cpp
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
chat.cpp
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
Generating Code...
postgres.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\postgres\Debug\postgres.lib
polymech.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\polymech\Debug\polymech.lib
search.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\packages\search\Debug\search.lib

Binary file not shown.

View File

@ -0,0 +1,116 @@
-- Selecting Windows SDK version 10.0.26100.0 to target Windows 10.0.18363.
cmake : CMake Deprecation Warning at
build/release/_deps/cli11-src/CMakeLists.txt:1 (cmake_minimum_required):
At line:1 char:1
+ cmake --preset release > cmake_out.txt 2>&1
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ CategoryInfo : NotSpecified: (CMake Deprecati...imum_required)
::String) [], RemoteException
+ FullyQualifiedErrorId : NativeCommandError
Compatibility with CMake < 3.10 will be removed from a future version of
CMake.
Update the VERSION argument <min> value. Or, use the <min>...<max> syntax
to tell CMake that the project requires at least <min> but has been updated
to work with policies introduced by <max> or earlier.
CMake Deprecation Warning at
build/release/_deps/concurrentqueue-src/CMakeLists.txt:1
(cmake_minimum_required):
Compatibility with CMake < 3.10 will be removed from a future version of
CMake.
Update the VERSION argument <min> value. Or, use the <min>...<max> syntax
to tell CMake that the project requires at least <min> but has been updated
to work with policies introduced by <max> or earlier.
-- CMAKE_ROOT: C:/Program Files/CMake/share/cmake-4.2
-- PROJECT_NAME: Taskflow
-- CMAKE_HOST_SYSTEM: Windows-10.0.18363
-- CMAKE_BUILD_TYPE: Release
-- CMAKE_CXX_COMPILER: C:/Program Files/Microsoft Visual Studio/18/Community/VC/Tools/MSVC/14.50.35717/bin/Hostx64/x64/cl.exe
-- CMAKE_CXX_COMPILER_ID: MSVC
-- CMAKE_CXX_COMPILER_VERSION: 19.50.35726.0
-- CMAKE_CXX_FLAGS: /DWIN32 /D_WINDOWS /EHsc
-- CMAKE_CUDA_COMPILER:
-- CMAKE_CUDA_COMPILER_ID:
-- CMAKE_CUDA_COMPILER_VERSION:
-- CMAKE_CUDA_FLAGS:
-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras
-- CMAKE_CURRENT_SOURCE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src
-- CMAKE_CURRENT_BINARY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-build
-- CMAKE_CURRENT_LIST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src
-- CMAKE_EXE_LINKER_FLAGS: /machine:x64
-- CMAKE_INSTALL_PREFIX: C:/Program Files (x86)/polymech-cli
-- CMAKE_INSTALL_FULL_INCLUDEDIR: C:/Program Files (x86)/polymech-cli/include
-- CMAKE_INSTALL_FULL_LIBDIR: C:/Program Files (x86)/polymech-cli/lib
-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras
-- CMAKE_PREFIX_PATH:
-- TF_BUILD_BENCHMARKS: OFF
-- TF_BUILD_PROFILER:
-- TF_BUILD_CUDA: OFF
-- TF_BUILD_SYCL: OFF
-- TF_BUILD_SYCL_BITCODE:
-- TF_BUILD_TESTS: OFF
-- TF_BUILD_EXAMPLES: OFF
-- TF_UTEST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/unittests
-- TF_EXAMPLE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/examples
-- TF_BENCHMARK_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/benchmarks
-- TF_3RD_PARTY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/3rd-party
-- Using the multi-header code from C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/nlohmann_json-src/include/
MSBuild version 18.4.0+6e61e96ac for .NET Framework
Performing download step (git clone) for 'liboai-populate'
Cloning into 'liboai-src'...
fatal: invalid reference: master
CMake Error at
liboai-subbuild/liboai-populate-prefix/tmp/liboai-populate-gitclone.cmake:61
(message):
Failed to checkout tag: 'master'
C:\Program Files\Microsoft Visual Studio\18\Community\MSBuild\Microsoft\VC\v18
0\Microsoft.CppCommon.targets(254,5): error MSB8066: Custom build for 'C:\User
s\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\libo
ai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-downlo
ad.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\rel
ease\_deps\liboai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-
populate-update.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\
cpp\build\release\_deps\liboai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3
cfb99\liboai-populate-patch.rule;C:\Users\zx\Desktop\polymech\polymech-mono\pa
ckages\kbot\cpp\build\release\_deps\liboai-subbuild\CMakeFiles\0bc8a27c65ae432
6176df6e15e3cfb99\liboai-populate-configure.rule;C:\Users\zx\Desktop\polymech\
polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-subbuild\CMakeFiles
\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-build.rule;C:\Users\zx\Deskt
op\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-subbuil
d\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-install.rule;C:\
Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\
liboai-subbuild\CMakeFiles\0bc8a27c65ae4326176df6e15e3cfb99\liboai-populate-te
st.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\rel
ease\_deps\liboai-subbuild\CMakeFiles\c45f806c12e81c5a75110db639d0307b\liboai-
populate-complete.rule;C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbo
t\cpp\build\release\_deps\liboai-subbuild\CMakeFiles\949e3a6a5a2e9b16592c5c7a3
65f0bec\liboai-populate.rule' exited with code 1. [C:\Users\zx\Desktop\polymec
h\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-subbuild\liboai-p
opulate.vcxproj]
CMake Error at C:/Program
Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:1928 (message):
Build step for liboai failed: 1
Call Stack (most recent call first):
C:/Program Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:1619
(__FetchContent_populateSubbuild)
C:/Program
Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:2155:EVAL:2
(__FetchContent_doPopulation)
C:/Program Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:2155
(cmake_language)
C:/Program Files/CMake/share/cmake-4.2/Modules/FetchContent.cmake:2394
(__FetchContent_Populate)
CMakeLists.txt:84 (FetchContent_MakeAvailable)
-- Configuring incomplete, errors occurred!

Binary file not shown.

View File

@ -0,0 +1,69 @@
-- Selecting Windows SDK version 10.0.26100.0 to target Windows 10.0.18363.
cmake : CMake Deprecation Warning at
build/release/_deps/cli11-src/CMakeLists.txt:1 (cmake_minimum_required):
At line:1 char:1
+ cmake --preset release > cmake_out2.txt 2>&1 ; Get-Content cmake_out2 ...
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ CategoryInfo : NotSpecified: (CMake Deprecati...imum_required)
::String) [], RemoteException
+ FullyQualifiedErrorId : NativeCommandError
Compatibility with CMake < 3.10 will be removed from a future version of
CMake.
Update the VERSION argument <min> value. Or, use the <min>...<max> syntax
to tell CMake that the project requires at least <min> but has been updated
to work with policies introduced by <max> or earlier.
CMake Deprecation Warning at
build/release/_deps/concurrentqueue-src/CMakeLists.txt:1
(cmake_minimum_required):
Compatibility with CMake < 3.10 will be removed from a future version of
CMake.
Update the VERSION argument <min> value. Or, use the <min>...<max> syntax
to tell CMake that the project requires at least <min> but has been updated
to work with policies introduced by <max> or earlier.
-- CMAKE_ROOT: C:/Program Files/CMake/share/cmake-4.2
-- PROJECT_NAME: Taskflow
-- CMAKE_HOST_SYSTEM: Windows-10.0.18363
-- CMAKE_BUILD_TYPE: Release
-- CMAKE_CXX_COMPILER: C:/Program Files/Microsoft Visual Studio/18/Community/VC/Tools/MSVC/14.50.35717/bin/Hostx64/x64/cl.exe
-- CMAKE_CXX_COMPILER_ID: MSVC
-- CMAKE_CXX_COMPILER_VERSION: 19.50.35726.0
-- CMAKE_CXX_FLAGS: /DWIN32 /D_WINDOWS /EHsc
-- CMAKE_CUDA_COMPILER:
-- CMAKE_CUDA_COMPILER_ID:
-- CMAKE_CUDA_COMPILER_VERSION:
-- CMAKE_CUDA_FLAGS:
-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras
-- CMAKE_CURRENT_SOURCE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src
-- CMAKE_CURRENT_BINARY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-build
-- CMAKE_CURRENT_LIST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src
-- CMAKE_EXE_LINKER_FLAGS: /machine:x64
-- CMAKE_INSTALL_PREFIX: C:/Program Files (x86)/polymech-cli
-- CMAKE_INSTALL_FULL_INCLUDEDIR: C:/Program Files (x86)/polymech-cli/include
-- CMAKE_INSTALL_FULL_LIBDIR: C:/Program Files (x86)/polymech-cli/lib
-- CMAKE_MODULE_PATH: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/catch2-src/extras
-- CMAKE_PREFIX_PATH:
-- TF_BUILD_BENCHMARKS: OFF
-- TF_BUILD_PROFILER:
-- TF_BUILD_CUDA: OFF
-- TF_BUILD_SYCL: OFF
-- TF_BUILD_SYCL_BITCODE:
-- TF_BUILD_TESTS: OFF
-- TF_BUILD_EXAMPLES: OFF
-- TF_UTEST_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/unittests
-- TF_EXAMPLE_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/examples
-- TF_BENCHMARK_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/benchmarks
-- TF_3RD_PARTY_DIR: C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/taskflow-src/3rd-party
-- Using the multi-header code from C:/Users/zx/Desktop/polymech/polymech-mono/packages/kbot/cpp/build/release/_deps/nlohmann_json-src/include/
CMake Error at build/release/_deps/liboai-src/liboai/CMakeLists.txt:64
(message):
CURL not found and CURL::libcurl target does not exist
-- Configuring incomplete, errors occurred!

Binary file not shown.

View File

@ -0,0 +1,25 @@
MSBuild version 18.4.0+6e61e96ac for .NET Framework
libcurl_object.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\libcurl_object.dir\Debug\libcurl_object.lib
libcurl_static.vcxproj -> C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\curl-build\lib\Debug\libcurl-d.lib
azure.cpp
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/azure.cpp')
chat.cpp
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): warning C4003: not enough arguments for function-like macro invocation 'max' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2589: '(': illegal token on right side of '::' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-src\liboai\include\components\chat.h(836,60): error C2059: syntax error: ')' [C:\Users\zx\Desktop\polymech\polymech-mono\packages\kbot\cpp\build\release\_deps\liboai-build\oai.vcxproj]
(compiling source file '../liboai-src/liboai/components/chat.cpp')
Generating Code...

View File

@ -4,6 +4,7 @@ project(kbot CXX)
add_library(kbot STATIC
kbot.cpp
llm_client.cpp
)
target_include_directories(kbot PUBLIC
@ -14,4 +15,5 @@ target_include_directories(kbot PUBLIC
target_link_libraries(kbot PUBLIC
logger
json
oai
)

View File

@ -2,6 +2,7 @@
#include <taskflow/taskflow.hpp>
#include <iostream>
#include "logger/logger.h"
#include "llm_client.h"
namespace polymech {
namespace kbot {
@ -17,9 +18,22 @@ int run_kbot_ai_pipeline(const KBotOptions& opts, const KBotCallbacks& cb) {
tf::Taskflow taskflow;
taskflow.emplace([opts, cb](){
logger::info("Executing kbot ai tasks via Taskflow -> emit events...");
if (cb.onEvent) {
cb.onEvent("ai_progress", "{\"message\":\"Task stub completed\"}");
logger::info("Executing kbot ai completion via LLMClient...");
LLMClient client(opts);
std::string target_prompt = opts.prompt.empty() ? "Respond with 'Hello from KBot C++ AI Pipeline!'" : opts.prompt;
LLMResponse res = client.execute_chat(target_prompt);
if (res.success) {
logger::info("AI Response:\n" + res.text);
if (cb.onEvent) {
cb.onEvent("ai_progress", "{\"message\":\"Task completion received\"}");
}
} else {
logger::error("AI Task Failed: " + res.error);
if (cb.onEvent) {
cb.onEvent("ai_error", "{\"error\":\"Task failed\"}");
}
}
});

View File

@ -0,0 +1,91 @@
#include "llm_client.h"
#include "logger/logger.h"
#include <liboai.h>
#include <iostream>
namespace polymech {
namespace kbot {
LLMClient::LLMClient(const KBotOptions& opts)
: api_key_(opts.api_key), model_(opts.model), router_(opts.router) {
// Set default base_url_ according to client.ts mappings
if (opts.base_url.empty()) {
if (router_ == "openrouter") base_url_ = "https://openrouter.ai/api/v1";
else if (router_ == "openai") base_url_ = ""; // liboai uses the default URL automatically
else if (router_ == "deepseek") base_url_ = "https://api.deepseek.com/v1";
else if (router_ == "huggingface")base_url_ = "https://api-inference.huggingface.co/v1";
else if (router_ == "ollama") base_url_ = "http://localhost:11434/v1";
else if (router_ == "fireworks") base_url_ = "https://api.fireworks.ai/v1";
else if (router_ == "gemini") base_url_ = "https://generativelanguage.googleapis.com/v1beta"; // or gemini openai compat endpt
else if (router_ == "xai") base_url_ = "https://api.x.ai/v1";
else base_url_ = "https://api.openai.com/v1"; // Fallback to openai API
} else {
base_url_ = opts.base_url;
}
// Default models based on router (from client.ts)
if (model_.empty()) {
if (router_ == "openrouter") model_ = "anthropic/claude-sonnet-4";
else if (router_ == "openai") model_ = "gpt-4o";
else if (router_ == "deepseek") model_ = "deepseek-chat";
else if (router_ == "huggingface") model_ = "meta-llama/2";
else if (router_ == "ollama") model_ = "leonard";
else if (router_ == "fireworks") model_ = "llama-v2-70b-chat";
else if (router_ == "gemini") model_ = "gemini-1.5-pro";
else if (router_ == "xai") model_ = "grok-1";
else model_ = "gpt-4o";
}
}
LLMClient::~LLMClient() = default;
LLMResponse LLMClient::execute_chat(const std::string& prompt) {
LLMResponse res;
if (api_key_.empty()) {
res.success = false;
res.error = "API Key is empty.";
return res;
}
oai::OpenAI oai_impl;
// Use liboai Auth component.
// If we need a custom base_url, liboai uses oai_impl.auth.SetBaseUrl() if it exists.
bool success = oai_impl.auth.SetKey(api_key_);
if (!success) {
res.success = false;
res.error = "Failed to set API Key in liboai.";
return res;
}
// Set custom base URL for OpenRouter/DeepSeek.
oai_impl.auth.SetBaseUrl(base_url_);
std::string target_model = model_.empty() ? "gpt-4o" : model_;
try {
oai::Response response = oai_impl.ChatCompletion->create(
target_model,
{
{{"role", "user"}, {"content", prompt}}
}
);
res.success = true;
res.text = response["choices"][0]["message"]["content"].get<std::string>();
} catch (std::exception& e) {
res.success = false;
res.error = e.what();
} catch (...) {
res.success = false;
res.error = "Unknown error occurred inside LLMClient execute_chat.";
}
return res;
}
} // namespace kbot
} // namespace polymech

View File

@ -0,0 +1,32 @@
#pragma once
#include <string>
#include "kbot.h"
namespace polymech {
namespace kbot {
struct LLMResponse {
std::string text;
bool success = false;
std::string error;
};
class LLMClient {
public:
// Initialize the client with the options (api_key, model, router).
explicit LLMClient(const KBotOptions& opts);
~LLMClient();
// Execute a basic chat completion using the provided prompt.
LLMResponse execute_chat(const std::string& prompt);
private:
std::string api_key_;
std::string model_;
std::string router_;
std::string base_url_;
};
} // namespace kbot
} // namespace polymech

View File

@ -0,0 +1,49 @@
name: Bug report
description: Create a report to help us improve
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: textarea
id: what-happened
attributes:
label: Describe the bug
description: A clear and concise description of what the bug is, and any additional context.
placeholder: Tell us what you see!
validations:
required: true
- type: textarea
id: repro-steps
attributes:
label: To Reproduce
description: Steps to reproduce the behavior.
placeholder: |
1. Fetch a '...'
2. Update the '....'
3. See error
validations:
required: true
- type: textarea
id: code-snippets
attributes:
label: Code snippets
description: If applicable, add code snippets to help explain your problem.
render: C++
validations:
required: false
- type: input
id: os
attributes:
label: OS
placeholder: macOS
validations:
required: true
- type: input
id: lib-version
attributes:
label: Library version
placeholder: liboai v1.0.0
validations:
required: true

View File

@ -0,0 +1,20 @@
name: Feature request
description: Suggest an idea for this library
labels: ["feature-request"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this feature request!
- type: textarea
id: feature
attributes:
label: Describe the feature or improvement you're requesting
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: context
attributes:
label: Additional context
description: Add any other context about the feature request here.

View File

@ -0,0 +1,6 @@
.vs
[Bb]uild*
out
TestApp
.cache
/.idea

View File

@ -0,0 +1,24 @@
# AGENTS.md
This repo is a maintained fork of liboai. Our goal is to make it more reliable and feature-complete without breaking existing api.
## Core Principles
- Preserve backward compatibility; add features without breaking existing APIs.
- Favor small, composable changes over rewrites.
- Keep the codebase clean and maintainable; document anything user-facing.
- Prioritize stability, correctness, and clear error handling.
## Current Priorities
- Add OpenAI Responses API support for GPT-5.2 and gpt-5.2-pro.
- Keep Chat Completions and other existing components intact.
- Add documentation and examples for new features.
## Workflow
- Update docs whenever you add or change public APIs.
- Use existing patterns and naming conventions in liboai.
- Avoid introducing new dependencies unless justified.
## Notes
- The initial Responses API implementation should accept raw JSON payloads.
- A ResponseInput helper is planned, but not part of the initial implementation.
- Azure Responses support is out of scope for now.

View File

@ -0,0 +1,22 @@
cmake_minimum_required(VERSION 3.21)
project(liboai)
IF(WIN32)
set(VCPKG_CMAKE_PATH $ENV{VCPKG_ROOT}/scripts/buildsystems/vcpkg.cmake CACHE FILEPATH "Location of vcpkg.cmake")
include(${VCPKG_CMAKE_PATH})
find_package(ZLIB REQUIRED)
find_package(nlohmann_json CONFIG REQUIRED)
find_package(CURL REQUIRED)
ENDIF()
option(BUILD_EXAMPLES "Build example applications" OFF)
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
add_subdirectory(liboai)
if(BUILD_EXAMPLES)
add_subdirectory(documentation)
endif()
set_property(DIRECTORY PROPERTY VS_STARTUP_PROJECT oai)

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 Dread
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,100 @@
<p align="center">
<img src="/images/_logo.png">
</p>
<hr>
<h1>Introduction</h1>
<p><code>liboai</code> is a simple, <b>unofficial</b> C++17 library for the OpenAI API. It allows developers to access OpenAI endpoints through a simple collection of methods and classes. The library can most effectively be thought of as a <b>spiritual port</b> of OpenAI's Python library, simply called <code>openai</code>, due to its similar structure - with few exceptions.
<h3>Features</h3>
- [x] [ChatGPT](https://github.com/D7EAD/liboai/tree/main/documentation/chat)
- [x] [Responses API](https://platform.openai.com/docs/api-reference/responses/create)
- [X] [Audio](https://github.com/D7EAD/liboai/tree/main/documentation/audio)
- [X] [Azure](https://github.com/D7EAD/liboai/tree/main/documentation/azure)
- [X] [Functions](https://platform.openai.com/docs/api-reference/chat/create)
- [x] [Image DALL·E](https://github.com/D7EAD/liboai/tree/main/documentation/images)
- [x] [Models](https://github.com/D7EAD/liboai/tree/main/documentation/models)
- [x] [Completions](https://github.com/D7EAD/liboai/tree/main/documentation/completions)
- [x] [Edit](https://github.com/D7EAD/liboai/tree/main/documentation/edits)
- [x] [Embeddings](https://github.com/D7EAD/liboai/tree/main/documentation/embeddings)
- [x] [Files](https://github.com/D7EAD/liboai/tree/main/documentation/files)
- [x] [Fine-tunes](https://github.com/D7EAD/liboai/tree/main/documentation/fine-tunes)
- [x] [Moderation](https://github.com/D7EAD/liboai/tree/main/documentation/moderations)
- [X] Asynchronous Support
<h1>Usage</h1>
See below for just how similar in style <code>liboai</code> and its Python alternative are when generating an image using DALL-E.</p>
<details open>
<summary>DALL-E Generation in Python.</summary>
<br>
```py
import openai
import os
openai.api_key = os.getenv("OPENAI_API_KEY")
response = openai.Image.create(
prompt="A snake in the grass!",
n=1,
size="256x256"
)
print(response["data"][0]["url"])
```
</details>
<details open>
<summary>DALL-E Generation in C++.</summary>
<br>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
oai.auth.SetKeyEnv("OPENAI_API_KEY");
Response res = oai.Image->create(
"A snake in the grass!",
1,
"256x256"
);
std::cout << res["data"][0]["url"] << std::endl;
}
```
</details>
<p>Running the above will print out the URL to the resulting generated image, which may or may not look similar to the one found below.</p>
<table>
<tr>
<th>Example Image</th>
</tr>
<td>
<img src="/images/snake.png">
</td>
</tr>
</table>
<p><i>Keep in mind the above C++ example is a minimal example and is not an exception-safe snippet. Please see <a href="/documentation">the documentation</a> for more detailed and exception-safe code snippets.</i></p>
<h1>Dependencies</h1>
<p>For the library to work the way it does, it relies on two major dependencies. These dependencies can be found listed below.<p>
- <a href="https://github.com/nlohmann/json">nlohmann-json</a>
- <a href="https://curl.se/">cURL</a>
*If building the library using the provided solution, it is recommended to install these dependencies using <b>vcpkg</b>.*
<h1>Documentation</h1>
<p>For detailed documentation and additional code examples, see the library's documentation <a href="/documentation">here</a>.
<h1>Contributing</h1>
<p>Artificial intelligence is an exciting and quickly-changing field.
If you'd like to partake in further placing the power of AI in the hands of everyday people, please consider contributing by submitting new code and features via a **Pull Request**. If you have any issues using the library, or just want to suggest new features, feel free to contact me directly using the info on my <a href="https://github.com/D7EAD">profile</a> or open an **Issue**.

View File

@ -0,0 +1,25 @@
# liboai Roadmap
This is a living backlog of improvements and ideas as we deepen our use of the library. It is intentionally lightweight and updated as we discover new needs.
## Now
- Responses API support (GPT-5.2, gpt-5.2-pro)
- Keep all existing APIs stable and intact
## Next
- Responses streaming helpers and SSE parsing
- ResponseInput helper to build Responses `input` items
- `output_text` convenience helper for Responses outputs
- Structured outputs helpers for `text.format`
- Tool definition builders for Responses (`tools`, `tool_choice`)
## Later
- More robust testing coverage (unit + integration samples)
- Improved error messaging with request context (safe, no secrets)
- Expanded docs and cookbook-style examples
- Performance pass on JSON construction and streaming
## Observations
- The Conversation class is useful for Chat Completions; Responses lacks an equivalent.
- The library is stable but needs modernization for new OpenAI primitives.
- Maintaining compatibility is critical for existing users.

View File

@ -0,0 +1,28 @@
cmake_minimum_required(VERSION 3.13)
project(documentation)
macro(add_example target_name source_name)
add_executable(${target_name} "${source_name}")
target_link_libraries(${target_name} oai)
set_target_properties(${target_name} PROPERTIES FOLDER "examples/${PROJECT_NAME}")
endmacro()
macro(add_basic_example source_base_name)
add_example(${source_base_name} "${source_base_name}.cpp")
endmacro()
add_subdirectory(audio/examples)
add_subdirectory(authorization/examples)
add_subdirectory(azure/examples)
add_subdirectory(chat/examples)
add_subdirectory(chat/conversation/examples)
add_subdirectory(completions/examples)
add_subdirectory(edits/examples)
add_subdirectory(embeddings/examples)
add_subdirectory(files/examples)
add_subdirectory(fine-tunes/examples)
add_subdirectory(images/examples)
add_subdirectory(models/examples)
add_subdirectory(moderations/examples)
add_subdirectory(responses/examples)

View File

@ -0,0 +1,217 @@
<h1>Documentation</h1>
<p>Both above and below, you can find resources and documentation for each component of the library.</p>
<h3>Basic Usage</h3>
<p>In order to understand how to use each component of the library, it would be ideal to first understand the basic structure of the library as a whole. When using <code>liboai</code> in a project, you <b>should</b> only include one header file, <code>liboai.h</code>. This header provides an interface to all other components of the library such as <code>Images</code>, <code>Completions</code>, etc.
See below for both a correct and incorrect example.</p>
<table>
<tr>
<th>Correct</th>
<th>Incorrect</th>
</tr>
<tr>
<td>
```cpp
#include "liboai.h"
int main() {
...
}
```
</td>
<td>
```cpp
#include "fine_tunes.h"
#include "models.h"
// etc...
int main() {
...
}
```
</td>
</tr>
</table>
<br>
<p>Once we have properly included the necessary header file to use the library--and assuming symbols are linked properly--we can make use of the class in <code>liboai.h</code> to get started. At some point in our source code, we will have to choose when to define a <code>liboai::OpenAI</code> object to access component interfaces. Each component interface stored in this object offers methods associated with it, so, for instance, interface <code>Image</code> will have a method <code>create(...)</code> to generate an image from text. Each non-async method returns a <code>liboai::Response</code> containing response information whereas async methods return a <code>liboai::FutureResponse</code>. However, before we start using these methods, we must first set our authorization information--otherwise it will not work!
<code>liboai::OpenAI</code> also houses another important member, the authorization member, which is used to set authorization information (such as the API key and organization IDs) before we call the API methods. For more information on additional members found in <code>liboai::Authorization</code>, refer to the <a href="./authorization">authorization</a> folder above.
See below for both a correct and incorrect control flow when generating an image.</p>
<table>
<tr>
<th>Correct</th>
<th>Incorrect</th>
</tr>
<tr>
<td>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// Set our API key using an environment variable.
// This is recommended as hard-coding API keys is
// insecure.
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
Response response = oai.Image->create(
"a siamese cat!"
);
}
...
}
```
</td>
<td>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// Failure to set authorization info!
// Will fail, exception will be thrown!
Response response = oai.Image->create(
"a siamese cat!"
);
...
}
```
</td>
</tr>
</table>
<br>
<p>As you can see above, authentication-set related functions return booleans to indicate success and failure, whereas component methods will throw an exception, <code>OpenAIException</code> or <code>OpenAIRateLimited</code>, to indicate their success or failure; these should be checked for accordingly. Below you can find an exception-safe version of the above correct snippet.</p>
<table>
<tr>
<th>Correct, exception-safe</th>
</tr>
<tr>
<td>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.Image->create(
"a siamese cat!"
);
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
...
}
}
```
</td>
</tr>
</table>
<br>
<p>Now, once we have made a call using a component interface, we most certainly want to get the information out of it. To do this, using our knowledge of the format of the API responses, we can extract the information, such as the resulting image's URL, using JSON indexing on the <code>liboai::Response</code> object. See below for an example where we print the generated image's URL.</p>
<table>
<tr>
<th>Accessing JSON Response Data</th>
</tr>
<tr>
<td>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.Image->create(
"a siamese cat!"
);
std::cout << response["data"][0]["url"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
```
</td>
</tr>
</table>
<br>
<p>What if we want to do more than just print the URL of the image? Why not download it right when it's done? Thankfully, <code>liboai</code> has a convenient function for that, <code>Network::Download(...)</code> (and <code>Network::DownloadAsync(...)</code>). See below for an example of downloading a freshly generated image.
<table>
<tr>
<th>Downloading a Generated Image</th>
</tr>
<td>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.Image->create(
"a siamese cat!"
);
Network::Download(
"C:/some/folder/file.png", // to
response["data"][0]["url"].get<std::string>(), // from
oai.auth.GetAuthorizationHeaders()
);
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
```
</td>
</tr>
</table>
<br>
<p>After a successful run of the above snippet, the file found at the URL returned from the component call will be download to the path <code>C:/some/folder/file.png</code>.
<br>
<h1>Synopsis</h1>
<p>Each component interface found within <code>liboai::OpenAI</code> follows the same pattern found above. Whether you want to generate images, completions, or fine-tune models, the control flow should follow--or remain similar to--the above examples.
For detailed examples regarding individual component interfaces, refer to the appropriate folder listed above.</p>
<h3>Project Maintenance</h3>
<p>Maintainers can find PR workflow notes in <a href="./maintenance">documentation/maintenance</a>.</p>

View File

@ -0,0 +1,96 @@
<h1>Audio</h1>
<p>The <code>Audio</code> class is defined in <code>audio.h</code> at <code>liboai::Audio</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://beta.openai.com/docs/api-reference/audio">Audio</a> endpoint of the OpenAI API; this endpoint's functionality can be found below.</p>
- Turn audio to text.
- Turn text to audio.
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>audio.h</code>. You can find their function signature(s) below.</p>
<h3>Create a Transcription</h3>
<p>Transcribes audio into the input language. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response transcribe(
const std::filesystem::path& file,
const std::string& model,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> response_format = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<std::string> language = std::nullopt
) const & noexcept(false);
```
<h3>Create a Transcription (async)</h3>
<p>Asynchronously transcribes audio into the input language. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse transcribe_async(
const std::filesystem::path& file,
const std::string& model,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> response_format = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<std::string> language = std::nullopt
) const& noexcept(false);
```
<h3>Create a Translation</h3>
<p>Translates audio into English. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response translate(
const std::filesystem::path& file,
const std::string& model,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> response_format = std::nullopt,
std::optional<float> temperature = std::nullopt
) const & noexcept(false);
```
<h3>Create a Translation (async)</h3>
<p>Asynchronously translates audio into English. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse translate_async(
const std::filesystem::path& file,
const std::string& model,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> response_format = std::nullopt,
std::optional<float> temperature = std::nullopt
) const& noexcept(false);
```
<h3>Text to Speech</h3>
<p>Turn text into lifelike spoken audio. Returns a <code>liboai::Response</code> containing response data. The audio data is in the <code>content</code> field of the <code>liboai::Response</code></p>
```cpp
liboai::Response speech(
const std::string& model,
const std::string& voice,
const std::string& input,
std::optional<std::string> response_format = std::nullopt,
std::optional<float> speed = std::nullopt
) const& noexcept(false);
```
<h3>Text to Speech (async)</h3>
<p>Asynchronously turn text into lifelike spoken audio. Returns a <code>liboai::FutureResponse</code> containing response data. The audio data is in the <code>content</code> field of the <code>liboai::Response</code></p>
```cpp
liboai::FutureResponse speech_async(
const std::string& model,
const std::string& voice,
const std::string& input,
std::optional<std::string> response_format = std::nullopt,
std::optional<float> speed = std::nullopt
) const& noexcept(false);
```
<p>All function parameters marked <code>optional</code> are not required and are resolved on OpenAI's end if not supplied.</p>
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,10 @@
cmake_minimum_required(VERSION 3.13)
project(audio)
add_basic_example(create_speech)
add_basic_example(create_speech_async)
add_basic_example(create_transcription)
add_basic_example(create_transcription_async)
add_basic_example(create_translation)
add_basic_example(create_translation_async)

View File

@ -0,0 +1,24 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response res = oai.Audio->speech(
"tts-1",
"alloy",
"Today is a wonderful day to build something people love!"
);
std::ofstream ocout("demo.mp3", std::ios::binary);
ocout << res.content;
ocout.close();
std::cout << res.content.size() << std::endl;
}
catch (const std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,31 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
auto fut = oai.Audio->speech_async(
"tts-1",
"alloy",
"Today is a wonderful day to build something people love!"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto res = fut.get();
std::ofstream ocout("demo.mp3", std::ios::binary);
ocout << res.content;
ocout.close();
std::cout << res.content.size() << std::endl;
}
catch (const std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,20 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response res = oai.Audio->transcribe(
"C:/some/folder/audio.mp3",
"whisper-1"
);
std::cout << res["text"].get<std::string>() << std::endl;
}
catch (const std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.Audio->transcribe_async(
"C:/some/folder/file.mp3",
"whisper-1"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,20 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response res = oai.Audio->translate(
"C:/some/folder/file.mp3",
"whisper-1"
);
std::cout << res["text"] << std::endl;
}
catch (const std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.Audio->translate_async(
"C:/some/folder/file.mp3",
"whisper-1"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,177 @@
<h1>Authorization</h1>
<p>The <code>Authorization</code> class is defined in <code>authorization.h</code> at <code>liboai::Authorization</code>. This class is responsible for sharing all set authorization information with all component classes in <code>liboai</code>.
All authorization information should be set prior to the calling of any component methods such as <code>Images</code>, <code>Embeddings</code>, and so on. Failure to do so will result in a <code>liboai::OpenAIException</code> due to authorization failure on OpenAI's end.</p>
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>authorization.h</code>. You can find their function signature(s) below.</p>
<h3>Get Authorizer</h3>
<p>Returns a reference to the <code>liboai::Authorization</code> singleton shared among all components.</p>
```cpp
static Authorization& Authorizer() noexcept;
```
<h3>Set API Key</h3>
<p>Sets the API key to use in subsequent component calls.</p>
```cpp
bool SetKey(std::string_view key) noexcept;
```
<h3>Set Azure API Key</h3>
<p>Sets the Azure API key to use in subsequent component calls.</p>
```cpp
bool SetAzureKey(std::string_view key) noexcept;
```
<h3>Set Active Directory Azure API Key</h3>
<p>Sets the Active Directory Azure API key to use in subsequent component calls.</p>
```cpp
bool SetAzureKeyAD(std::string_view key) noexcept;
```
<h3>Set API Key (File)</h3>
<p>Sets the API key to use in subsequent component calls from data found in file at path.</p>
```cpp
bool SetKeyFile(const std::filesystem::path& path) noexcept;
```
<h3>Set Azure API Key (File)</h3>
<p>Sets the Azure API key to use in subsequent component calls from data found in file at path.</p>
```cpp
bool SetAzureKeyFile(const std::filesystem::path& path) noexcept;
```
<h3>Set Active Directory Azure API Key (File)</h3>
<p>Sets the Active Directory Azure API key to use in subsequent component calls from data found in file at path.</p>
```cpp
bool SetAzureKeyFileAD(const std::filesystem::path& path) noexcept;
```
<h3>Set API Key (Environment Variable)</h3>
<p>Sets the API key to use in subsequent component calls from an environment variable.</p>
```cpp
bool SetKeyEnv(std::string_view var) noexcept;
```
<h3>Set Azure API Key (Environment Variable)</h3>
<p>Sets the Azure API key to use in subsequent component calls from an environment variable.</p>
```cpp
bool SetAzureKeyEnv(std::string_view var) noexcept;
```
<h3>Set Active Directory Azure API Key (Environment Variable)</h3>
<p>Sets the Active Directory Azure API key to use in subsequent component calls from an environment variable.</p>
```cpp
bool SetAzureKeyEnvAD(std::string_view var) noexcept;
```
<h3>Set Organization ID</h3>
<p>Sets the organization ID to send in subsequent component calls.</p>
```cpp
bool SetOrganization(std::string_view org) noexcept;
```
<h3>Set Organization ID (File)</h3>
<p>Sets the organization ID to send in subsequent component calls from data found in file at path.</p>
```cpp
bool SetOrganizationFile(const std::filesystem::path& path) noexcept;
```
<h3>Set Organization ID (Environment Variable)</h3>
<p>Sets the organization ID to send in subsequent component calls from an environment variable.</p>
```cpp
bool SetOrganizationEnv(std::string_view var) noexcept;
```
<h3>Set Proxies</h3>
<p>Sets the proxy, or proxies, to use in subsequent component calls.</p>
```cpp
void SetProxies(const std::initializer_list<std::pair<const std::string, std::string>>& hosts) noexcept;
void SetProxies(std::initializer_list<std::pair<const std::string, std::string>>&& hosts) noexcept;
void SetProxies(const std::map<std::string, std::string>& hosts) noexcept;
void SetProxies(std::map<std::string, std::string>&& hosts) noexcept;
```
<h3>Set Proxy Authentication</h3>
<p>Sets the username and password to use when using a certain proxy protocol.</p>
```cpp
void SetProxyAuth(const std::map<std::string, netimpl::components::EncodedAuthentication>& proto_up) noexcept;
```
<h3>Set Timeout</h3>
<p>Sets the timeout in milliseconds for the library to use in component calls.</p>
```cpp
void SetMaxTimeout(int32_t ms) noexcept
```
<h3>Get Key</h3>
<p>Returns the currently set API key.</p>
```cpp
constexpr const std::string& GetKey() const noexcept;
```
<h3>Get Organization ID</h3>
<p>Returns the currently set organization ID.</p>
```cpp
constexpr const std::string& GetOrganization() const noexcept;
```
<h3>Get Proxies</h3>
<p>Returns the currently set proxies.</p>
```cpp
netimpl::components::Proxies GetProxies() const noexcept;
```
<h3>Get Proxy Authentication</h3>
<p>Returns the currently set proxy authentication information.</p>
```cpp
netimpl::components::ProxyAuthentication GetProxyAuth() const noexcept;
```
<h3>Get Timeout</h3>
<p>Returns the currently set timeout.</p>
```cpp
netimpl::components::Timeout GetMaxTimeout() const noexcept;
```
<h3>Get Authorization Headers</h3>
<p>Returns the currently set authorization headers based on set information.</p>
```cpp
constexpr const netimpl::components::Header& GetAuthorizationHeaders() const noexcept;
```
<h3>Get Azure Authorization Headers</h3>
<p>Returns the currently set Azure authorization headers based on set information.</p>
```cpp
constexpr const netimpl::components::Header& GetAzureAuthorizationHeaders() const noexcept;
```
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,15 @@
cmake_minimum_required(VERSION 3.13)
project(authorization)
add_basic_example(set_azure_key)
add_basic_example(set_azure_key_env)
add_basic_example(set_azure_key_file)
add_basic_example(set_key)
add_basic_example(set_key_env_var)
add_basic_example(set_key_file)
add_basic_example(set_organization)
add_basic_example(set_organization_env_var)
add_basic_example(set_organization_file)
add_basic_example(set_proxies)
add_basic_example(set_proxy_auth)

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKey("hard-coded-key")) { // NOT recommended
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyFile("C:/some/folder/key.dat")) {
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKey("hard-coded-key")) { // NOT recommended
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyFile("C:/some/folder/key.dat")) {
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY") && oai.auth.SetOrganization("org-123")) {
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY") && oai.auth.SetOrganizationEnv("OPENAI_ORG_ID")) {
// ...
}
}

View File

@ -0,0 +1,10 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY") && oai.auth.SetOrganizationFile("C:/some/folder/org.dat")) {
// ...
}
}

View File

@ -0,0 +1,21 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
/*
Set some proxies:
when we go to an http site, use fakeproxy1
when we go to an https site, use fakeproxy2
*/
oai.auth.SetProxies({
{ "http", "http://www.fakeproxy1.com" },
{ "https", "https://www.fakeproxy2.com" }
});
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
// ...
}
}

View File

@ -0,0 +1,31 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
/*
Set some proxies:
when we go to an http site, use fakeproxy1
when we go to an https site, use fakeproxy2
*/
oai.auth.SetProxies({
{ "http", "http://www.fakeproxy1.com" },
{ "https", "https://www.fakeproxy2.com" }
});
/*
Set the per-protocol proxy auth info:
when we go to an http site, use fakeuser1 and fakepass1
when we go to an https site, use fakeuser2 and fakepass2
*/
oai.auth.SetProxyAuth({
{"http", {"fakeuser1", "fakepass1"}},
{"https", {"fakeuser2", "fakepass2"}},
});
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
// ...
}
}

View File

@ -0,0 +1,204 @@
<h1>Azure</h1>
<p>The <code>Azure</code> class is defined in <code>azure.h</code> at <code>liboai::Azure</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://learn.microsoft.com/en-us/azure/cognitive-services/openai/reference">Azure</a> OpenAI API components.
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>azure.h</code>. You can find their function signature(s) below.</p>
<h3>Create a Completion</h3>
<p>Given a prompt, the model will return one or more predicted completions. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create_completion(
const std::string& resource_name,
const std::string& deployment_id,
const std::string& api_version,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> suffix = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<uint8_t> logprobs = std::nullopt,
std::optional<bool> echo = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<uint16_t> best_of = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create a Completion (async)</h3>
<p>Given a prompt, the model will asynchronously return one or more predicted completions. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_completion_async(
const std::string& resource_name,
const std::string& deployment_id,
const std::string& api_version,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> suffix = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<uint8_t> logprobs = std::nullopt,
std::optional<bool> echo = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<uint16_t> best_of = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create an Embedding</h3>
<p>Creates an embedding vector representing the input text. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create_embedding(
const std::string& resource_name,
const std::string& deployment_id,
const std::string& api_version,
const std::string& input,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create an Embedding (async)</h3>
<p>Asynchronously creates an embedding vector representing the input text. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_embedding_async(
const std::string& resource_name,
const std::string& deployment_id,
const std::string& api_version,
const std::string& input,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create a Chat Completion</h3>
<p>Creates a completion for the chat message. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create_chat_completion(
const std::string& resource_name,
const std::string& deployment_id,
const std::string& api_version,
const Conversation& conversation,
std::optional<float> temperature = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create a Chat Completion (async)</h3>
<p>Asynchronously creates a completion for the chat message. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_chat_completion_async(
const std::string& resource_name,
const std::string& deployment_id,
const std::string& api_version,
const Conversation& conversation,
std::optional<float> temperature = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Request an Image Generation</h3>
<p>Generate a batch of images from a text caption. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response request_image_generation(
const std::string& resource_name,
const std::string& api_version,
const std::string& prompt,
std::optional<uint8_t> n = std::nullopt,
std::optional<std::string> size = std::nullopt
) const & noexcept(false);
```
<h3>Request an Image Generation (async)</h3>
<p>Asynchronously generate a batch of images from a text caption. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse request_image_generation_async(
const std::string& resource_name,
const std::string& api_version,
const std::string& prompt,
std::optional<uint8_t> n = std::nullopt,
std::optional<std::string> size = std::nullopt
) const & noexcept(false);
```
<h3>Get a Previously Generated Image</h3>
<p>Retrieve the results (URL) of a previously called image generation operation. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response get_generated_image(
const std::string& resource_name,
const std::string& api_version,
const std::string& operation_id
) const & noexcept(false);
```
<h3>Get a Previously Generated Image (async)</h3>
<p>Asynchronously retrieve the results (URL) of a previously called image generation operation. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse get_generated_image_async(
const std::string& resource_name,
const std::string& api_version,
const std::string& operation_id
) const & noexcept(false);
```
<h3>Delete a Previously Generated Image</h3>
<p>Deletes the corresponding image from the Azure server. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response delete_generated_image(
const std::string& resource_name,
const std::string& api_version,
const std::string& operation_id
) const & noexcept(false);
```
<h3>Delete a Previously Generated Image (async)</h3>
<p>Asynchronously deletes the corresponding image from the Azure server. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse delete_generated_image_async(
const std::string& resource_name,
const std::string& api_version,
const std::string& operation_id
) const & noexcept(false);
```
<p>All function parameters marked <code>optional</code> are not required and are resolved on OpenAI's end if not supplied.</p>
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,16 @@
cmake_minimum_required(VERSION 3.13)
project(azure)
add_example(create_chat_completion_azure "create_chat_completion.cpp")
add_example(create_chat_completion_async_azure "create_chat_completion_async.cpp")
add_basic_example(create_completion)
add_basic_example(create_completion_async)
add_example(create_embedding_azure "create_embedding.cpp")
add_example(create_embedding_async_azure "create_embedding_async.cpp")
add_basic_example(delete_generated_image)
add_basic_example(delete_generated_image_async)
add_basic_example(get_generated_image)
add_basic_example(get_generated_image_async)
add_basic_example(request_image_generation)
add_basic_example(request_image_generation_async)

View File

@ -0,0 +1,28 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
Conversation convo;
convo.AddUserData("Hi, how are you?");
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
Response res = oai.Azure->create_chat_completion(
"resource", "deploymentID", "api_version",
convo
);
// update the conversation with the response
convo.Update(res);
// print the response from the API
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,37 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
Conversation convo;
convo.AddUserData("Hi, how are you?");
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.Azure->create_chat_completion_async(
"resource", "deploymentID", "api_version",
convo
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto res = fut.get();
// update the conversation with the response
convo.Update(res);
// print the response from the API
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,21 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
Response res = oai.Azure->create_completion(
"resource", "deploymentID", "api_version",
"Write a short poem about a snowman."
);
std::cout << res["choices"][0]["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,29 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
auto fut = oai.Azure->create_completion_async(
"resource", "deploymentID", "api_version",
"Write a short poem about a snowman."
);
// do other stuff
// wait for the future to be ready
fut.wait();
// get the result
auto res = fut.get();
std::cout << res["choices"][0]["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,21 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
Response res = oai.Azure->create_embedding(
"resource", "deploymentID", "api_version",
"String to get embedding for"
);
std::cout << res << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,27 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
auto fut = oai.Azure->create_embedding_async(
"resource", "deploymentID", "api_version",
"String to get embedding for"
);
// do other work
// wait for the future to complete
auto res = fut.get();
// output the response
std::cout << res << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,22 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
Response res = oai.Azure->delete_generated_image(
"resource", "api_version",
"f508bcf2-e651-4b4b-85a7-58ad77981ffa"
);
// output the response
std::cout << res << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
auto fut = oai.Azure->delete_generated_image_async(
"resource", "api_version",
"f508bcf2-e651-4b4b-85a7-58ad77981ffa"
);
// do other work
// wait for the future to complete
fut.wait();
// get the result
auto res = fut.get();
// output the response
std::cout << res << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,22 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
Response res = oai.Azure->get_generated_image(
"resource", "api_version",
"f508bcf2-e651-4b4b-85a7-58ad77981ffa"
);
// output the response
std::cout << res << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
auto fut = oai.Azure->get_generated_image_async(
"resource", "api_version",
"f508bcf2-e651-4b4b-85a7-58ad77981ffa"
);
// do other work
// wait for the future to complete
fut.wait();
// get the result
auto res = fut.get();
// output the response
std::cout << res << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,24 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
Response res = oai.Azure->request_image_generation(
"resource", "api_version",
"A snake in the grass!",
1,
"512x512"
);
// output the response
std::cout << res["data"][0]["url"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,29 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetAzureKeyEnv("AZURE_API_KEY")) {
try {
auto fut = oai.Azure->request_image_generation_async(
"resource", "api_version",
"A snake in the grass!",
1,
"512x512"
);
// do other work
// wait for the future to complete
auto res = fut.get();
// output the response
std::cout << res["data"][0]["url"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,63 @@
<h1>Chat</h1>
<p>The <code>ChatCompletion</code> class is defined in <code>chat.h</code> at <code>liboai::ChatCompletion</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://beta.openai.com/docs/api-reference/chat">Chat</a> endpoint of the OpenAI API; this endpoint's functionality can be found below.</p>
- Given a chat conversation, the model will return a chat completion response.
> **Note**
>
> Before attempting to use the below methods, it is **highly** recommended
> to read through the documentation, and thoroughly understand the use,
> of the <a href="./conversation">Conversation</a> class as it is used
> in tandem with the `ChatCompletion` methods to keep track of chat
> history and succinctly form a conversation with the OpenAI chat
> endpoint.
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>chat.h</code>. You can find their function signature(s) below.</p>
<h3>Create a Chat Completion</h3>
<p>Creates a completion for the ongoing conversation. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create(
const std::string& model,
const Conversation& conversation,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create a Chat Completion (async)</h3>
<p>Asynchronously creates a completion for the ongoing conversation. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_async(
const std::string& model,
const Conversation& conversation,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const& noexcept(false);
```
<p>All function parameters marked <code>optional</code> are not required and are resolved on OpenAI's end if not supplied.</p>
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,409 @@
<h1>Conversation</h1>
<h3>Contents</h3>
<p>You can jump to any content found on this page using the links below.
<ul>
<li><a href="https://github.com/D7EAD/liboai/tree/v2.3.0/documentation/chat/conversation#basic-use">Basic Use</a></li>
<li><a href="https://github.com/D7EAD/liboai/tree/v2.3.0/documentation/chat/conversation#the-use-of-system">The Use of System</a></li>
<li><a href="https://github.com/D7EAD/liboai/tree/v2.3.0/documentation/chat/conversation#usage-pattern">Usage Pattern</a></li>
<li><a href="https://github.com/D7EAD/liboai/tree/v2.3.0/documentation/chat/conversation#synopsis">Synopsis</a</li>
<li><a href="https://github.com/D7EAD/liboai/tree/v2.3.0/documentation/chat/conversation#synopsis">Methods</a></li>
</ul>
The <code>Conversation</code> class is defined at <code>liboai::Conversation</code>.
This class can most effectively be thought of as a container for any conversation(s) that one may wish to carry out with a given model using the <code>ChatCompletion</code> methods. It keeps track of the history of the conversation for subsequent calls to the methods, allows a developer to set <a href="https://platform.openai.com/docs/guides/chat/instructing-chat-models">system</a> directions, retrieve the last response, add user input, and so on.
<h3>Basic Use</h3>
Each method found in <code>ChatCompletion</code> requires an existing object of class <code>Conversation</code> be provided. Before providing such an object to a method such as <code>liboai::ChatCompletion::create</code>, we must first populate it--perhaps with a question to ask the model we choose, like so:
<table>
<tr>
<th>Creating a Conversation</th>
</tr>
<tr>
<td>
```cpp
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you?");
...
}
```
</td>
</tr>
</table>
Once we add a message to our <code>Conversation</code>, we can then supply it to a method such as <code>liboai::ChatCompletion::create</code> to begin our conversation starting with our user data, like so:
<table>
<tr>
<th>Starting the Conversation</th>
</tr>
<tr>
<td>
```cpp
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
...
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
```
</td>
</tr>
</table>
Assuming that our request succeeded without throwing an exception, the response to our user data in our <code>Conversation</code> can be found in our <code>Response</code> object. We must now update our <code>Conversation</code> object with the response like so:
<table>
<tr>
<th>Updating our Conversation</th>
</tr>
<tr>
<td>
```cpp
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update our conversation with the response
convo.Update(response);
...
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
```
</td>
</tr>
</table>
After we update our <code>Conversation</code>, it now contains the original question we asked the model, as well as the response from the model. Now we can extract the response like so:
<table>
<tr>
<th>Printing the Response</th>
</tr>
<tr>
<td>
```cpp
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update our conversation with the response
convo.Update(response);
// print the response
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}
```
</td>
</tr>
</table>
This may print something along the lines of the following:
* "<i>As an AI language model, I do not have emotions, but I am always responsive and ready to assist. How can I help you today?</i>"
<h3>Usage Pattern</h3>
As you have hopefully noticed, there is a pattern that can be followed with <code>Conversation</code>. Generally, when we want to make use of the methods found within <code>liboai::ChatCompletion</code>, we should adhere to the following series of steps:
<ol>
<li>Create a <code>Conversation</code> object.</li>
<li>Set the user data (or optional, single-time system data as well), which is the user's input such as a question or a command.</li>
<li>Provide the <codeChatCompletion::Conversation</code> object to <code>ChatCompletion::create</code> or a similar method.</li>
<li>Update the <code>Conversation</code> object with the response from the API.</li>
<li>Retrieve the chat model's response from the <code>Conversation</code> object.</li>
<li>Repeat steps 2, 3, 4, and 5 until the conversation is complete.</li>
</ol>
<h3>The Use of System</h3>
Other than setting user data in our <code>Conversation</code> objects, we can also set an optional system parameter that instructs the model on how to respond. If we wish to make use of this system parameter, we can do so like so:
<table>
<tr>
<th>Setting System Data to Guide Models</th>
</tr>
<tr>
<td>
```cpp
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// set the system message first - helps guide the model
convo.SetSystemData("You are a helpful bot that only answers questions about OpenAI.");
// add a message to the conversation
convo.AddUserData("Hello, how are you?");
...
}
```
</td>
</tr>
</table>
Keep in mind that it is **highly** important to set the system data before user data. Furthermore, it is important to note that, according to OpenAI, some models (such as gpt-3.5-turbo-0301) do not always pay attention to this system data. As a result, it may be more efficient to set guiding data as user data like so:
<table>
<tr>
<th>Alternate Ways to Guide</th>
</tr>
<tr>
<td>
```cpp
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add guiding data and a message to the conversation
convo.AddUserData("You are a helpful bot that only answers questions about OpenAI: Hello, how are you?");
...
}
```
</td>
</tr>
</table>
<h3>Synopsis</h3>
With the use of <code>Conversation</code> objects, as we carry on a given conversation, our object will keep track of not only the history of the conversation we are having, but its contained context as well. That means that if we were to, at first, ask our model "When was last year's Super Bowl," and then subsequently ask it, "Who played in it," it would be aware of the context of the conversation for the second inquiry and answer accordingly.
<br>
<br>
In general, objects of class <code>liboai::Conversation</code> allow us to more easily engage in conversation with existing and future conversational chat models via the use of <code>liboai::ChatCompletion</code> methods.
<h2>Methods</h2>
Below you can find the function signature(s) of the class methods found within <code>liboai::Conversation</code>.
<h3>Constructors</h3>
<p>Constructors available to construct a <code>Conversation</code> object.</p>
```cpp
Conversation();
Conversation(const Conversation& other);
Conversation(Conversation&& old) noexcept;
Conversation(std::string_view system_data);
Conversation(std::string_view system_data, std::string_view user_data);
Conversation(std::string_view system_data, std::initializer_list<std::string_view> user_data);
Conversation(std::initializer_list<std::string_view> user_data);
explicit Conversation(const std::vector<std::string>& user_data);
```
<h3>Assignment Operators</h3>
<p>Operator overloads for assignment.</p>
```cpp
Conversation& operator=(const Conversation& other);
Conversation& operator=(Conversation&& old) noexcept;
```
<h3>Set System Data</h3>
<p>Sets the system parameter in the conversation that can be used to influence how the model may respond to input. This should always be called before setting user data, if used. Returns a <code>bool</code> indicating success.</p>
```cpp
bool SetSystemData(std::string_view data) & noexcept(false);
```
<h3>Pop System Data</h3>
<p>Removes (pops) the set system data. Returns a <code>bool</code> indicating success.</p>
```cpp
bool PopSystemData() & noexcept(false);
```
<h3>Add User Data</h3>
<p>Adds user input to the conversation, such as a command or question to pose to a model. Returns a <code>bool</code> indicating success.</p>
```cpp
bool AddUserData(std::string_view data) & noexcept(false);
bool AddUserData(std::string_view data, std::string_view name) & noexcept(false);
```
<h3>Pop User Data</h3>
<p>Removes (pops) the most recently added user input to the conversation as long as it is the tail of the conversation. Returns a <code>bool</code> indicating success.</p>
```cpp
bool PopUserData() & noexcept(false);
```
<h3>Get Last Response</h3>
<p>Retrieves the last response from the conversation if one exists. This can be called when the last item in the conversation is an answer from a chat model, such as after the conversation is updated with a successful response from <code>liboai::ChatCompletion::create</code>. Returns a non-empty <code>std::string</code> containing the response from the chat model if one exists, empty otherwise.</p>
```cpp
std::string GetLastResponse() const & noexcept;
```
<h3>Pop Last Response</h3>
<p>Removes (pops) the last response from a chat model within the conversation if the tail of the conversation is a response. This can be called to remove a chat model response from the conversation after updating the conversation with said response. Returns a <code>bool</code> indicating success.</p>
```cpp
bool PopLastResponse() & noexcept(false);
```
<h3>Check if Last Response is Function Call</h3>
<p>Returns whether the most recent response, following a call to <code>Update</code> or a complete <code>AppendStreamData</code>, contains a function_call or not. Returns a boolean indicating if the last response is a function call.</p>
```cpp
bool LastResponseIsFunctionCall() const & noexcept;
```
<h3>Get the Name of the Last Response's Function Call</h3>
<p>Returns the name of the function_call in the most recent response. This should only be called if <code>LastResponseIsFunctionCall()</code> returns true. Returns a <code>std::string</code> containing the name of the last response's function call, empty if non-existent.</p>
```cpp
std::string GetLastFunctionCallName() const & noexcept(false);
```
<h3>Get the Arguments of the Last Response's Function Call</h3>
<p>Returns the arguments of the function_call in the most recent response in their raw JSON form. This should only be called if <code>LastResponseIsFunctionCall()</code> returns true. Returns a <code>std::string</code> containing the name of the last response's arguments in JSON form, empty if non-existent.</p>
```cpp
std::string GetLastFunctionCallArguments() const & noexcept(false);
```
<h3>Update Conversation</h3>
<p>Updates the conversation given a Response object. This method updates the conversation given a Response object. This method should only be used if <code>AppendStreamData</code> was NOT used immediately before it.
For instance, if we made a call to <code>create*()</code>, and provided a callback function to stream and, within this callback, we used <code>AppendStreamData</code> to update the conversation per message, we would NOT want to use this method. In this scenario, the <code>AppendStreamData</code> method would have already updated the conversation, so this method would be a bad idea to call afterwards. Returns a <code>bool</code> indicating success.</p>
```cpp
bool Update(std::string_view history) & noexcept(false);
bool Update(const Response& response) & noexcept(false);
```
<h3>Export Conversation</h3>
<p>Exports the entire conversation to a JSON string. This method exports the conversation to a JSON string. The JSON string can be used to save the conversation to a file. The exported string contains both the conversation and included functions, if any. Returns the JSON string representing the conversation.</p>
```cpp
std::string Export() const & noexcept(false);
```
<h3>Import Conversation</h3>
<p>Imports a conversation from a JSON string. This method imports a conversation from a JSON string. The JSON string should be the JSON string returned from a call to <code>Export()</code>. Returns a boolean indicating success.</p>
```cpp
bool Import() const & noexcept(false);
```
<h3>Append Stream Data</h3>
<p>Appends stream data (SSEs) from streamed methods. This method updates the conversation given a token from a streamed method. This method should be used when using streamed methods such as <code>ChatCompletion::create</code> or <code>create_async</code> with a callback supplied. This function should be called from within the stream's callback function receiving the SSEs. Returns a boolean indicating data appending success.</p>
```cpp
bool AppendStreamData(std::string data) & noexcept(false);
```
<h3>Set Function(s)</h3>
<p>Sets the functions to be used for the conversation. This method sets the functions to be used for the conversation. Returns a boolean indicating success.</p>
```cpp
bool SetFunctions(Functions functions) & noexcept(false);
```
<h3>Pop Function(s)</h3>
<p>Pops any previously set functions.</p>
```cpp
void PopFunctions() & noexcept(false);
```
<h3>Get Raw JSON Conversation</h3>
<p>Retrieves the raw JSON of the conversation; the same functionality can be achieved using the <code>operator<<(...)</code> overload. Returns a <code>std::string</code> containing the JSON of the conversation.</p>
```cpp
std::string GetRawConversation() const & noexcept;
```
<h3>Get Raw JSON Functions</h3>
<p>Returns the raw JSON dump of the internal functions object in string format - if one exists.</p>
```cpp
std::string GetRawFunctions() const & noexcept;
```
<h3>Get Functions JSON Object</h3>
<p>Returns the JSON object of the set functions.</p>
```cpp
const nlohmann::json& GetFunctionsJSON() const & noexcept;
```
<h3>Get Internal JSON </h3>
<p>Retrieves a <code>const</code>-ref of the internal JSON object containing the conversation. Returns a <code>const nlohmann::json&</code> object.</p>
```cpp
const nlohmann::json& GetJSON() const & noexcept;
```
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder here and in the previous directory.</p>

View File

@ -0,0 +1,13 @@
cmake_minimum_required(VERSION 3.13)
project(conversation)
add_basic_example(adduserdata)
add_basic_example(getjsonobject)
add_basic_example(getlastresponse)
add_basic_example(getrawconversation)
add_basic_example(poplastresponse)
add_basic_example(popsystemdata)
add_basic_example(popuserdata)
add_basic_example(setsystemdata)
add_basic_example(update)

View File

@ -0,0 +1,15 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add user data - such as a question
convo.AddUserData("What is the meaning of life?");
// ...
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you? What time is it for you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update the conversation with the response
convo.Update(response);
// get the internal conversation JSON object
nlohmann::json json = convo.GetJSON();
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you? What time is it for you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update the conversation with the response
convo.Update(response);
// print the conversation
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you? What time is it for you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update the conversation with the response
convo.Update(response);
// print the raw JSON conversation string
std::cout << convo.GetRawConversation() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,33 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you? What time is it for you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update the conversation with the response
convo.Update(response);
// print the conversation
std::cout << convo.GetLastResponse() << std::endl;
// pop (remove) the last response from the conversation
convo.PopLastResponse();
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,21 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// set system message to guide the chat model
convo.SetSystemData("You are helpful bot.");
// remove the set system message
convo.PopSystemData();
// add a different system message
convo.SetSystemData("You are a helpful bot that enjoys business.");
// ...
}

View File

@ -0,0 +1,21 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add user data - such as a question
convo.AddUserData("What is the meaning of life?");
// pop (remove) the above added user data
convo.PopUserData();
// add different user data
convo.AddUserData("What is the size of the universe?");
// ...
}

View File

@ -0,0 +1,15 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// set system message to guide the chat model
convo.SetSystemData("You are helpful bot.");
// ...
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("Hello, how are you? What time is it for you?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update the conversation with the response
convo.Update(response);
// print the conversation
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,7 @@
cmake_minimum_required(VERSION 3.13)
project(chat)
add_basic_example(create_chat_completion)
add_basic_example(create_chat_completion_async)
add_basic_example(ongoing_user_convo)

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("What is the point of taxes?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update our conversation with the response
convo.Update(response);
// print the response
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,38 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// add a message to the conversation
convo.AddUserData("What is the point of taxes?");
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
auto fut = oai.ChatCompletion->create_async(
"gpt-3.5-turbo", convo
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// update our conversation with the response
convo.Update(response);
// print the response
std::cout << convo.GetLastResponse() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,39 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
// create a conversation
Conversation convo;
// holds next user input
std::string input;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
while (true) {
// get next user input
std::cout << "You: "; std::getline(std::cin, input);
// add user input to conversation
convo.AddUserData(input);
// get response from OpenAI
Response response = oai.ChatCompletion->create(
"gpt-3.5-turbo", convo
);
// update our conversation with the response
convo.Update(response);
// print the response
std::cout << "Bot: " << convo.GetLastResponse() << std::endl;
}
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,63 @@
<h1>Completions</h1>
<p>The <code>Completions</code> class is defined in <code>completions.h</code> at <code>liboai::Completions</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://beta.openai.com/docs/api-reference/completions">Completions</a> endpoint of the OpenAI API; this endpoint's functionality can be found below.</p>
- Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>completions.h</code>. You can find their function signature(s) below.</p>
<h3>Create a Completion</h3>
<p>Creates a completion for the provided prompt and parameters. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create(
const std::string& model_id,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> suffix = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<uint8_t> logprobs = std::nullopt,
std::optional<bool> echo = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<uint16_t> best_of = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create a Completion (async)</h3>
<p>Asynchronously creates a completion for the provided prompt and parameters. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_async(
const std::string& model_id,
std::optional<std::string> prompt = std::nullopt,
std::optional<std::string> suffix = std::nullopt,
std::optional<uint16_t> max_tokens = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<std::function<bool(std::string, intptr_t)>> stream = std::nullopt,
std::optional<uint8_t> logprobs = std::nullopt,
std::optional<bool> echo = std::nullopt,
std::optional<std::vector<std::string>> stop = std::nullopt,
std::optional<float> presence_penalty = std::nullopt,
std::optional<float> frequency_penalty = std::nullopt,
std::optional<uint16_t> best_of = std::nullopt,
std::optional<std::unordered_map<std::string, int8_t>> logit_bias = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<p>All function parameters marked <code>optional</code> are not required and are resolved on OpenAI's end if not supplied.</p>
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,6 @@
cmake_minimum_required(VERSION 3.13)
project(completions)
add_basic_example(generate_completion)
add_basic_example(generate_completion_async)

View File

@ -0,0 +1,21 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.Completion->create(
"text-davinci-003",
"Say this is a test",
std::nullopt,
7
);
std::cout << response["choices"][0]["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,32 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.Completion->create_async(
"text-davinci-003",
"Say this is a test",
std::nullopt,
7
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["choices"][0]["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,43 @@
<h1>Edits</h1>
<p>The <code>Edits</code> class is defined in <code>edits.h</code> at <code>liboai::Edits</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://beta.openai.com/docs/api-reference/edits">Edits</a> endpoint of the OpenAI API; this endpoint's functionality can be found below.</p>
- Given a prompt and an instruction, the model will return an edited version of the prompt.
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>edits.h</code>. You can find their function signature(s) below.</p>
<h3>Create an Edit</h3>
<p>Creates a new edit for the provided input, instruction, and parameters. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create(
const std::string& model_id,
std::optional<std::string> input = std::nullopt,
std::optional<std::string> instruction = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt
) const & noexcept(false);
```
<h3>Create an Edit (async)</h3>
<p>Asynchronously creates a new edit for the provided input, instruction, and parameters. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_async(
const std::string& model_id,
std::optional<std::string> input = std::nullopt,
std::optional<std::string> instruction = std::nullopt,
std::optional<uint16_t> n = std::nullopt,
std::optional<float> temperature = std::nullopt,
std::optional<float> top_p = std::nullopt
) const & noexcept(false);
```
<p>All function parameters marked <code>optional</code> are not required and are resolved on OpenAI's end if not supplied.</p>
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,6 @@
cmake_minimum_required(VERSION 3.13)
project(edits)
add_basic_example(create_edit)
add_basic_example(create_edit_async)

View File

@ -0,0 +1,20 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.Edit->create(
"text-davinci-edit-001",
"What day of the wek is it?",
"Fix the spelling mistakes"
);
std::cout << response["choices"][0]["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,31 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.Edit->create_async(
"text-davinci-edit-001",
"What day of the wek is it?",
"Fix the spelling mistakes"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["choices"][0]["text"].get<std::string>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,37 @@
<h1>Embeddings</h1>
<p>The <code>Embeddings</code> class is defined in <code>embeddings.h</code> at <code>liboai::Embeddings</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://beta.openai.com/docs/api-reference/embeddings">Embeddings</a> endpoint of the OpenAI API; this endpoint's functionality can be found below.</p>
- Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>embeddings.h</code>. You can find their function signature(s) below.</p>
<h3>Create an Embedding</h3>
<p>Creates an embedding vector representing the input text. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create(
const std::string& model_id,
std::optional<std::string> input = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<h3>Create an Embedding (async)</h3>
<p>Asynchronously creates an embedding vector representing the input text. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_async(
const std::string& model_id,
std::optional<std::string> input = std::nullopt,
std::optional<std::string> user = std::nullopt
) const & noexcept(false);
```
<p>All function parameters marked <code>optional</code> are not required and are resolved on OpenAI's end if not supplied.</p>
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,6 @@
cmake_minimum_required(VERSION 3.13)
project(embeddings)
add_basic_example(create_embedding)
add_basic_example(create_embedding_async)

View File

@ -0,0 +1,19 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.Embedding->create(
"text-embedding-ada-002",
"The food was delicious and the waiter..."
);
std::cout << response["data"][0]["embedding"] << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.Embedding->create_async(
"text-embedding-ada-002",
"The food was delicious and the waiter..."
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["data"][0]["embedding"] << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,103 @@
<h1>Files</h1>
<p>The <code>Files</code> class is defined in <code>files.h</code> at <code>liboai::Files</code>, and its interface can ideally be accessed through a <code>liboai::OpenAI</code> object.
This class and its associated <code>liboai::OpenAI</code> interface allow access to the <a href="https://beta.openai.com/docs/api-reference/files">Files</a> endpoint of the OpenAI API; this endpoint's functionality can be found below.</p>
- Files are used to upload documents that can be used with features like Fine-tuning.
<br>
<h2>Methods</h2>
<p>This document covers the method(s) located in <code>files.h</code>. You can find their function signature(s) below.</p>
<h3>List Files</h3>
<p>Gets a list of files that belong to the user's organization. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response list() const & noexcept(false);
```
<h3>List Files (async)</h3>
<p>Asynchronously gets a list of files that belong to the user's organization. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse list_async() const & noexcept(false);
```
<h3>Upload File</h3>
<p>Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response create(
const std::filesystem::path& file,
const std::string& purpose
) const & noexcept(false);
```
<h3>Upload File (async)</h3>
<p>Asynchronously upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse create_async(
const std::filesystem::path& file,
const std::string& purpose
) const & noexcept(false);
```
<h3>Delete a File</h3>
<p>Deletes a file. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response remove(
const std::string& file_id
) const & noexcept(false);
```
<h3>Delete a File (async)</h3>
<p>Asynchronously deletes a file. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse remove_async(
const std::string& file_id
) const & noexcept(false);
```
<h3>Retrieve File</h3>
<p>Returns information about a specific file. Returns a <code>liboai::Response</code> containing response data.</p>
```cpp
liboai::Response retrieve(
const std::string& file_id
) const & noexcept(false);
```
<h3>Retrieve File (async)</h3>
<p>Asynchronously returns information about a specific file. Returns a <code>liboai::FutureResponse</code> containing future response data.</p>
```cpp
liboai::FutureResponse retrieve_async(
const std::string& file_id
) const & noexcept(false);
```
<h3>Retrieve File Content (Download)</h3>
<p>Returns the contents of the specified file and downloads it to the provided path. Returns a <code>bool</code> indicating failure or success.</p>
```cpp
bool download(
const std::string& file_id,
const std::string& save_to
) const & noexcept(false);
```
<h3>Retrieve File Content (Download) (async)</h3>
<p>Asynchronously returns the contents of the specified file and downloads it to the provided path. Returns a future <code>bool</code> indicating failure or success.</p>
```cpp
std::future<bool> download_async(
const std::string& file_id,
const std::string& save_to
) const & noexcept(false);
```
<br>
<h2>Example Usage</h2>
<p>For example usage of the above function(s), please refer to the <a href="./examples">examples</a> folder.

View File

@ -0,0 +1,14 @@
cmake_minimum_required(VERSION 3.13)
project(files)
add_basic_example(delete_file)
add_basic_example(delete_file_async)
add_basic_example(download_uploaded_file)
add_basic_example(download_uploaded_file_async)
add_basic_example(list_files)
add_basic_example(list_files_async)
add_basic_example(retrieve_file)
add_basic_example(retrieve_file_async)
add_basic_example(upload_file)
add_basic_example(upload_file_async)

View File

@ -0,0 +1,18 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.File->remove(
"file-XjGxS3KTG0uNmNOK362iJua3"
);
std::cout << response["deleted"].get<bool>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,29 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.File->remove_async(
"file-XjGxS3KTG0uNmNOK362iJua3"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["deleted"].get<bool>() << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,20 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
if (oai.File->download("file-XjGxS3KTG0uNmNOK362iJua3", "C:/some/folder/file.jsonl")) {
std::cout << "File downloaded successfully!" << std::endl;
}
else {
std::cout << "File download failed!" << std::endl;
}
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,31 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.File->download_async(
"file-XjGxS3KTG0uNmNOK362iJua3", "C:/some/folder/file.jsonl"
);
// do other work...
// check if the future is ready
fut.wait();
// check if downloaded successfully
if (fut.get()) {
std::cout << "File downloaded successfully!" << std::endl;
}
else {
std::cout << "File download failed!" << std::endl;
}
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,16 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.File->list();
std::cout << response["data"] << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,27 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.File->list_async();
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response["data"] << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,18 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.File->retrieve(
"file-XjGxS3KTG0uNmNOK362iJua3"
);
std::cout << response << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,29 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.File->retrieve_async(
"file-XjGxS3KTG0uNmNOK362iJua3"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,19 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
Response response = oai.File->create(
"C:/some/folder/file.jsonl",
"fine-tune"
);
std::cout << response << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

View File

@ -0,0 +1,30 @@
#include "liboai.h"
using namespace liboai;
int main() {
OpenAI oai;
if (oai.auth.SetKeyEnv("OPENAI_API_KEY")) {
try {
// call async method; returns a future
auto fut = oai.File->create_async(
"C:/some/folder/file.jsonl",
"fine-tune"
);
// do other work...
// check if the future is ready
fut.wait();
// get the contained response
auto response = fut.get();
// print some response data
std::cout << response << std::endl;
}
catch (std::exception& e) {
std::cout << e.what() << std::endl;
}
}
}

Some files were not shown because too many files have changed in this diff Show More