From 2f20e7e68fd3084feb5d6aa9a2ad490bf8fbe789 Mon Sep 17 00:00:00 2001 From: fantasy-peak <1356346239@qq.com> Date: Fri, 4 Aug 2023 13:43:07 +0900 Subject: [PATCH] Add Dockerfile --- .github/workflows/ubuntu.yaml | 37 ++++++++++++++------ Dockerfile | 18 ++++++++++ README.md | 16 ++++++++- include/cfg.h | 6 ++-- src/free_gpt.cpp | 63 +++++++++++++++++------------------ src/main.cpp | 7 ++-- xmake.lua | 6 ++-- 7 files changed, 98 insertions(+), 55 deletions(-) create mode 100644 Dockerfile diff --git a/.github/workflows/ubuntu.yaml b/.github/workflows/ubuntu.yaml index 1e1b7de..bb78206 100644 --- a/.github/workflows/ubuntu.yaml +++ b/.github/workflows/ubuntu.yaml @@ -1,6 +1,10 @@ name: Linux -on: [push, pull_request] +on: + push: + branches: [ "main", "dev" ] + pull_request: + branches: [ "main" ] jobs: build: @@ -11,18 +15,15 @@ jobs: os: [ubuntu-22.04] steps: - # - uses: actions/checkout@v1 - # - uses: xmake-io/github-action-setup-xmake@v1 - # with: - # xmake-version: branch@master - # actions-cache-folder: '.xmake-cache' - name: Installation run: | sudo apt-get update - sudo apt-get install -y libgl1-mesa-dev libglu1-mesa-dev p7zip gobjc gcc g++ wget - g++ -v + sudo apt-get install -y libgl1-mesa-dev libglu1-mesa-dev p7zip gobjc g++-13 wget + g++-13 -v + export CXX=g++-13 + export CC=gcc-13 wget https://github.com/xmake-io/xmake/releases/download/v2.8.1/xmake-v2.8.1.xz.run - chmod 777 xmake-v2.8.1.xz.run + chmod 777 xmake-v2.8.1.xz.run > a.txt ./xmake-v2.8.1.xz.run export XMAKE_ROOT="y" source ~/.xmake/profile @@ -33,7 +34,21 @@ jobs: run: | export XMAKE_ROOT="y" source ~/.xmake/profile + g++-13 -v + export CXX=g++-13 + export CC=gcc-13 xmake build -y xmake install -o . - cd bin - ldd cpp-freegpt-webui + ldd ./bin/cpp-freegpt-webui + - name: Docker login + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + - name: Build the Docker image + run: | + pwd + ls + docker build . -t ${{ secrets.DOCKERHUB_USERNAME }}/freegpt:latest + - name: Docker image push + run: docker push ${{ secrets.DOCKERHUB_USERNAME }}/freegpt:latest diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..5f9e238 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:23.04 + +# RUN apt-get update -y +# RUN apt-get install -y g++-13 +# RUN strings /lib/x86_64-linux-gnu/libstdc++.so.6 | grep GLIBCXX_3.4 + +WORKDIR /app + +ADD bin /app/bin +ADD cfg /app/cfg +ADD client /app/client + +RUN ls /app/bin +RUN ls /app/cfg + +WORKDIR /app/bin + +ENTRYPOINT ["sh", "-c", "./cpp-freegpt-webui ../cfg/cpp-free-gpt.yml"] diff --git a/README.md b/README.md index c87397b..7f9da69 100644 --- a/README.md +++ b/README.md @@ -30,8 +30,22 @@ cd bin Access the application in your browser using the URL: ``` -http://127.0.0.1:8085/chat +http://127.0.0.1:8858/chat ``` + +### Running the Docker +Pull the Docker image from Docker Hub: +``` +docker pull fantasypeak/freegpt:latest +``` + +Run the application using Docker: +``` +docker run --net=host -it --name freegpt fantasypeak/freegpt:latest +// OR +docker run -p 8858:8858 -it --name freegpt fantasypeak/freegpt:latest +``` + ### WebUI The application interface was incorporated from the [chatgpt-clone](https://github.com/xtekky/chatgpt-clone) repository. diff --git a/include/cfg.h b/include/cfg.h index 1ffe968..b851d8d 100644 --- a/include/cfg.h +++ b/include/cfg.h @@ -6,11 +6,9 @@ struct Config { std::string client_root_path; std::size_t interval{300}; std::size_t work_thread_num{8}; - std::size_t max_http_client_num{2}; - std::string host{"127.0.0.1"}; + std::string host{"0.0.0.0"}; std::string port{"8858"}; std::string chat_path{"/chat"}; std::vector providers; }; -YCS_ADD_STRUCT(Config, client_root_path, interval, work_thread_num, max_http_client_num, host, port, chat_path, - providers) +YCS_ADD_STRUCT(Config, client_root_path, interval, work_thread_num, host, port, chat_path, providers) diff --git a/src/free_gpt.cpp b/src/free_gpt.cpp index e73584b..8ae69f0 100644 --- a/src/free_gpt.cpp +++ b/src/free_gpt.cpp @@ -1,10 +1,11 @@ #include +#include +#include #include #include #include #include -#include #include #include #include @@ -13,7 +14,6 @@ #include #include #include -#include #include "free_gpt.h" @@ -27,18 +27,15 @@ template struct to_helper {}; template - requires std::convertible_to, typename Container::value_type> -Container operator|(R&& r, to_helper) { +requires std::convertible_to < std::ranges::range_value_t, +typename Container::value_type > Container operator|(R&& r, to_helper) { return Container{r.begin(), r.end()}; } } // namespace detail template - requires(!std::ranges::view) -inline auto to() { - return detail::to_helper{}; -} +requires(!std::ranges::view) inline auto to() { return detail::to_helper{}; } std::string md5(const std::string& str, bool reverse = true) { unsigned char hash[MD5_DIGEST_LENGTH]; @@ -57,19 +54,19 @@ std::string md5(const std::string& str, bool reverse = true) { return md5_str; } -boost::asio::awaitable, std::string>> createHttpClient( - boost::asio::ssl::context& ctx, std::string_view host, std::string_view port) { +boost::asio::awaitable, std::string>> +createHttpClient(boost::asio::ssl::context& ctx, std::string_view host, std::string_view port) { boost::beast::ssl_stream stream_{co_await boost::asio::this_coro::executor, ctx}; boost::system::error_code err{}; if (!SSL_set_tlsext_host_name(stream_.native_handle(), host.data())) { SPDLOG_ERROR("SSL_set_tlsext_host_name"); - co_return tl::make_unexpected(std::string("SSL_set_tlsext_host_name")); + co_return std::unexpected(std::string("SSL_set_tlsext_host_name")); } auto resolver = boost::asio::ip::tcp::resolver(co_await boost::asio::this_coro::executor); auto [ec, results] = co_await resolver.async_resolve(host.data(), port.data(), use_nothrow_awaitable); if (ec) { SPDLOG_INFO("async_resolve: {}", ec.message()); - co_return tl::make_unexpected(ec.message()); + co_return std::unexpected(ec.message()); } for (auto& endpoint : results) { std::stringstream ss; @@ -79,13 +76,13 @@ boost::asio::awaitable(ss), - [](unsigned char c) -> std::string { return fmt::format("{:02x}", int(c)); }); + [](unsigned char c) -> std::string { return std::format("{:02x}", int(c)); }); return ss.str() + random_key_str + random_iv_str; } @@ -420,7 +417,7 @@ boost::asio::awaitable FreeGpt::getGpt(std::shared_ptr ch, nlohma nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); - ch->try_send(err, fmt::format("json parse error: [{}]", fields.back())); + ch->try_send(err, std::format("json parse error: [{}]", fields.back())); continue; } auto str = line_json["choices"][0]["delta"]["content"].get(); @@ -446,21 +443,21 @@ boost::asio::awaitable FreeGpt::deepAi(std::shared_ptr ch, nlohma std::mt19937 mt(rd()); std::uniform_int_distribution dist(0, 100000000); uint64_t part1{dist(mt)}; - auto part2 = md5(user_agent + md5(user_agent + md5(fmt::format("{}{}x", user_agent, part1)))); - auto api_key = fmt::format("tryit-{}-{}", part1, part2); + auto part2 = md5(user_agent + md5(user_agent + md5(std::format("{}{}x", user_agent, part1)))); + auto api_key = std::format("tryit-{}-{}", part1, part2); constexpr char CRLF[] = "\r\n"; constexpr char MULTI_PART_BOUNDARY[] = "9bc627aea4f77e150e6057f78036e73f"; constexpr std::string_view host{"api.deepai.org"}; constexpr std::string_view port{"443"}; - boost::beast::http::request req{boost::beast::http::verb::post, "/make_me_a_pizza", - 11}; + boost::beast::http::request req{boost::beast::http::verb::post, + "/make_me_a_pizza", 11}; req.set(boost::beast::http::field::host, host); req.set(boost::beast::http::field::user_agent, user_agent); req.set("Api-Key", api_key); req.set(boost::beast::http::field::content_type, - fmt::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY)); + std::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY)); auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); nlohmann::json request_json{{{"role", "user"}, {"content", std::move(prompt)}}}; @@ -517,7 +514,7 @@ boost::asio::awaitable FreeGpt::aiTianhu(std::shared_ptr ch, nloh R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36)"); req.set(boost::beast::http::field::content_type, "application/json"); nlohmann::json data{ - {"prompt", fmt::format("user: {}\nassistant:", prompt)}, + {"prompt", std::format("user: {}\nassistant:", prompt)}, {"options", std::unordered_map{}}, {"systemMessage", "You are ChatGPT, a large language model trained by OpenAI. Follow " @@ -577,7 +574,7 @@ boost::asio::awaitable FreeGpt::aiTianhu(std::shared_ptr ch, nloh nlohmann::json rsp = nlohmann::json::parse(lines.back(), nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error"); - co_await ch->async_send(err, fmt::format("json parse error: {}", lines.back()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("json parse error: {}", lines.back()), use_nothrow_awaitable); co_return; } co_await ch->async_send(err, rsp.value("text", rsp.dump()), use_nothrow_awaitable); @@ -613,7 +610,7 @@ boost::asio::awaitable FreeGpt::aiChat(std::shared_ptr ch, nlohma R"(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36)"); nlohmann::json data{ - {"message", fmt::format("user: {}\nassistant:", prompt)}, + {"message", std::format("user: {}\nassistant:", prompt)}, {"temperature", 1}, {"presence_penalty", 0}, {"top_p", 1}, @@ -766,7 +763,7 @@ boost::asio::awaitable FreeGpt::chatGptAi(std::shared_ptr ch, nlo request.set("Content-Type", "application/x-www-form-urlencoded"); std::stringstream ss; - ss << "message=" << urlEncode(fmt::format("user: {}\nassistant: ", prompt)) << "&"; + ss << "message=" << urlEncode(std::format("user: {}\nassistant: ", prompt)) << "&"; ss << "_wpnonce=" << nonce << "&"; ss << "post_id=" << post_id << "&"; ss << "url=" << urlEncode("https://chatgpt.ai/gpt-4") << "&"; @@ -888,7 +885,7 @@ boost::asio::awaitable FreeGpt::chatFree(std::shared_ptr ch, nloh nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); - ch->try_send(err, fmt::format("json parse error: [{}]", fields.back())); + ch->try_send(err, std::format("json parse error: [{}]", fields.back())); continue; } auto str = line_json["choices"][0]["delta"]["content"].get(); @@ -922,7 +919,7 @@ boost::asio::awaitable FreeGpt::aiService(std::shared_ptr ch, nlo req.set("sec-fetch-site", "same-origin"); req.set(boost::beast::http::field::referer, "https://aiservice.vercel.app/chat"); - nlohmann::json data{{"input", fmt::format("user: {}\nassistant:", prompt)}}; + nlohmann::json data{{"input", std::format("user: {}\nassistant:", prompt)}}; req.body() = data.dump(); req.prepare_payload(); @@ -967,7 +964,7 @@ boost::asio::awaitable FreeGpt::aiService(std::shared_ptr ch, nlo nlohmann::json rsp = nlohmann::json::parse(res.body(), nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error"); - co_await ch->async_send(err, fmt::format("json parse error: {}", res.body()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("json parse error: {}", res.body()), use_nothrow_awaitable); co_return; } co_await ch->async_send(err, rsp.value("data", rsp.dump()), use_nothrow_awaitable); @@ -997,7 +994,7 @@ boost::asio::awaitable FreeGpt::weWordle(std::shared_ptr ch, nloh auto user_id = random(16); auto app_id = random(31); auto now = std::chrono::time_point_cast(std::chrono::system_clock::now()); - auto request_date = fmt::format("{:%Y-%m-%dT%H:%M:%S.000Z}", now); + auto request_date = std::format("{:%Y-%m-%dT%H:%M:%S.000Z}", now); constexpr std::string_view host = "wewordle.org"; constexpr std::string_view port = "443"; @@ -1048,10 +1045,10 @@ boost::asio::awaitable FreeGpt::weWordle(std::shared_ptr ch, nloh nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["user"] = user_id; - request["subscriber"]["originalAppUserId"] = fmt::format("$RCAnonymousID:{}", app_id); + request["subscriber"]["originalAppUserId"] = std::format("$RCAnonymousID:{}", app_id); request["subscriber"]["firstSeen"] = request_date; request["subscriber"]["requestDate"] = request_date; - request["messages"][0]["content"] = fmt::format("user: {}\nassistant:", prompt); + request["messages"][0]["content"] = std::format("user: {}\nassistant:", prompt); SPDLOG_INFO("{}", request.dump(2)); @@ -1099,12 +1096,12 @@ boost::asio::awaitable FreeGpt::weWordle(std::shared_ptr ch, nloh nlohmann::json rsp = nlohmann::json::parse(res.body(), nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error"); - co_await ch->async_send(err, fmt::format("json parse error: {}", res.body()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("json parse error: {}", res.body()), use_nothrow_awaitable); co_return; } if (!rsp.contains("message")) { SPDLOG_ERROR("not contains message: {}", rsp.dump()); - co_await ch->async_send(err, fmt::format("not contains message : {}", rsp.dump()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("not contains message : {}", rsp.dump()), use_nothrow_awaitable); co_return; } co_await ch->async_send(err, rsp["message"].value("content", rsp.dump()), use_nothrow_awaitable); diff --git a/src/main.cpp b/src/main.cpp index 0e35c61..2156f23 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -1,3 +1,4 @@ +#include #include #include #include @@ -82,7 +83,7 @@ boost::asio::awaitable startSession(boost::asio::ip::tcp::socket sock, Con if (http_path.back() == '/') http_path.remove_suffix(1); if (http_path == cfg.chat_path) { - auto html = createIndexHtml(fmt::format("{}/html/index.html", cfg.client_root_path), cfg); + auto html = createIndexHtml(std::format("{}/html/index.html", cfg.client_root_path), cfg); boost::beast::http::response res{boost::beast::http::status::ok, request.version()}; res.set(boost::beast::http::field::server, BOOST_BEAST_VERSION_STRING); @@ -95,7 +96,7 @@ boost::asio::awaitable startSession(boost::asio::ip::tcp::socket sock, Con } else if (request.target().starts_with(ASSETS_PATH)) { std::string req_path{request.target()}; req_path.erase(req_path.find(ASSETS_PATH), ASSETS_PATH.length()); - auto file = fmt::format("{}{}", cfg.client_root_path, req_path); + auto file = std::format("{}{}", cfg.client_root_path, req_path); SPDLOG_INFO("load: {}", file); boost::beast::error_code ec; boost::beast::http::file_body::value_type body; @@ -212,7 +213,6 @@ int main(int argc, char** argv) { return EXIT_FAILURE; } auto& cfg = config.value(); - SPDLOG_INFO("cfg.max_http_client_num: {}", cfg.max_http_client_num); SPDLOG_INFO("cfg.work_thread_num: {}", cfg.work_thread_num); FreeGpt app{cfg}; @@ -256,6 +256,7 @@ int main(int argc, char** argv) { smph_signal_main_to_thread.release(); }); smph_signal_main_to_thread.acquire(); + SPDLOG_INFO("stoped ..."); pool.stop(); return EXIT_SUCCESS; } diff --git a/xmake.lua b/xmake.lua index a3aabd1..6cbb4d3 100644 --- a/xmake.lua +++ b/xmake.lua @@ -5,8 +5,8 @@ set_xmakever("2.7.8") add_repositories("my_private_repo https://github.com/fantasy-peak/xmake-repo.git") add_requires("openssl", {system = false}) -add_requires("fmt", "yaml_cpp_struct", "nlohmann_json", "spdlog", "inja") -add_requires("boost", "plusaes", "tl_expected") +add_requires("yaml_cpp_struct", "nlohmann_json", "spdlog", "inja") +add_requires("boost", "plusaes") set_languages("c++23") set_policy("check.auto_ignore_flags", false) @@ -16,6 +16,6 @@ add_includedirs("include") target("cpp-freegpt-webui") set_kind("binary") add_files("src/*.cpp") - add_packages("openssl", "fmt", "yaml_cpp_struct", "nlohmann_json", "spdlog", "boost", "inja", "tl_expected", "plusaes") + add_packages("openssl", "yaml_cpp_struct", "nlohmann_json", "spdlog", "boost", "inja", "plusaes") add_syslinks("pthread") target_end()