diff --git a/.github/workflows/ubuntu.yaml b/.github/workflows/ubuntu.yaml index 6221283..2580ca8 100644 --- a/.github/workflows/ubuntu.yaml +++ b/.github/workflows/ubuntu.yaml @@ -34,6 +34,9 @@ jobs: run: | export XMAKE_ROOT="y" source ~/.xmake/profile + g++-13 -v + export CXX=g++-13 + export CC=gcc-13 xmake build -y xmake install -o . ldd ./bin/cpp-freegpt-webui diff --git a/src/free_gpt.cpp b/src/free_gpt.cpp index e73584b..9440118 100644 --- a/src/free_gpt.cpp +++ b/src/free_gpt.cpp @@ -3,8 +3,8 @@ #include #include #include +#include -#include #include #include #include @@ -117,7 +117,7 @@ std::string encrypt(const std::string& raw_data) { encrypted.size(), true); std::stringstream ss; std::transform(encrypted.begin(), encrypted.end(), std::ostream_iterator(ss), - [](unsigned char c) -> std::string { return fmt::format("{:02x}", int(c)); }); + [](unsigned char c) -> std::string { return std::format("{:02x}", int(c)); }); return ss.str() + random_key_str + random_iv_str; } @@ -420,7 +420,7 @@ boost::asio::awaitable FreeGpt::getGpt(std::shared_ptr ch, nlohma nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); - ch->try_send(err, fmt::format("json parse error: [{}]", fields.back())); + ch->try_send(err, std::format("json parse error: [{}]", fields.back())); continue; } auto str = line_json["choices"][0]["delta"]["content"].get(); @@ -446,21 +446,21 @@ boost::asio::awaitable FreeGpt::deepAi(std::shared_ptr ch, nlohma std::mt19937 mt(rd()); std::uniform_int_distribution dist(0, 100000000); uint64_t part1{dist(mt)}; - auto part2 = md5(user_agent + md5(user_agent + md5(fmt::format("{}{}x", user_agent, part1)))); - auto api_key = fmt::format("tryit-{}-{}", part1, part2); + auto part2 = md5(user_agent + md5(user_agent + md5(std::format("{}{}x", user_agent, part1)))); + auto api_key = std::format("tryit-{}-{}", part1, part2); constexpr char CRLF[] = "\r\n"; constexpr char MULTI_PART_BOUNDARY[] = "9bc627aea4f77e150e6057f78036e73f"; constexpr std::string_view host{"api.deepai.org"}; constexpr std::string_view port{"443"}; - boost::beast::http::request req{boost::beast::http::verb::post, "/make_me_a_pizza", - 11}; + boost::beast::http::request req{boost::beast::http::verb::post, + "/make_me_a_pizza", 11}; req.set(boost::beast::http::field::host, host); req.set(boost::beast::http::field::user_agent, user_agent); req.set("Api-Key", api_key); req.set(boost::beast::http::field::content_type, - fmt::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY)); + std::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY)); auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); nlohmann::json request_json{{{"role", "user"}, {"content", std::move(prompt)}}}; @@ -517,7 +517,7 @@ boost::asio::awaitable FreeGpt::aiTianhu(std::shared_ptr ch, nloh R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36)"); req.set(boost::beast::http::field::content_type, "application/json"); nlohmann::json data{ - {"prompt", fmt::format("user: {}\nassistant:", prompt)}, + {"prompt", std::format("user: {}\nassistant:", prompt)}, {"options", std::unordered_map{}}, {"systemMessage", "You are ChatGPT, a large language model trained by OpenAI. Follow " @@ -577,7 +577,7 @@ boost::asio::awaitable FreeGpt::aiTianhu(std::shared_ptr ch, nloh nlohmann::json rsp = nlohmann::json::parse(lines.back(), nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error"); - co_await ch->async_send(err, fmt::format("json parse error: {}", lines.back()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("json parse error: {}", lines.back()), use_nothrow_awaitable); co_return; } co_await ch->async_send(err, rsp.value("text", rsp.dump()), use_nothrow_awaitable); @@ -613,7 +613,7 @@ boost::asio::awaitable FreeGpt::aiChat(std::shared_ptr ch, nlohma R"(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36)"); nlohmann::json data{ - {"message", fmt::format("user: {}\nassistant:", prompt)}, + {"message", std::format("user: {}\nassistant:", prompt)}, {"temperature", 1}, {"presence_penalty", 0}, {"top_p", 1}, @@ -766,7 +766,7 @@ boost::asio::awaitable FreeGpt::chatGptAi(std::shared_ptr ch, nlo request.set("Content-Type", "application/x-www-form-urlencoded"); std::stringstream ss; - ss << "message=" << urlEncode(fmt::format("user: {}\nassistant: ", prompt)) << "&"; + ss << "message=" << urlEncode(std::format("user: {}\nassistant: ", prompt)) << "&"; ss << "_wpnonce=" << nonce << "&"; ss << "post_id=" << post_id << "&"; ss << "url=" << urlEncode("https://chatgpt.ai/gpt-4") << "&"; @@ -888,7 +888,7 @@ boost::asio::awaitable FreeGpt::chatFree(std::shared_ptr ch, nloh nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); - ch->try_send(err, fmt::format("json parse error: [{}]", fields.back())); + ch->try_send(err, std::format("json parse error: [{}]", fields.back())); continue; } auto str = line_json["choices"][0]["delta"]["content"].get(); @@ -922,7 +922,7 @@ boost::asio::awaitable FreeGpt::aiService(std::shared_ptr ch, nlo req.set("sec-fetch-site", "same-origin"); req.set(boost::beast::http::field::referer, "https://aiservice.vercel.app/chat"); - nlohmann::json data{{"input", fmt::format("user: {}\nassistant:", prompt)}}; + nlohmann::json data{{"input", std::format("user: {}\nassistant:", prompt)}}; req.body() = data.dump(); req.prepare_payload(); @@ -967,7 +967,7 @@ boost::asio::awaitable FreeGpt::aiService(std::shared_ptr ch, nlo nlohmann::json rsp = nlohmann::json::parse(res.body(), nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error"); - co_await ch->async_send(err, fmt::format("json parse error: {}", res.body()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("json parse error: {}", res.body()), use_nothrow_awaitable); co_return; } co_await ch->async_send(err, rsp.value("data", rsp.dump()), use_nothrow_awaitable); @@ -997,7 +997,7 @@ boost::asio::awaitable FreeGpt::weWordle(std::shared_ptr ch, nloh auto user_id = random(16); auto app_id = random(31); auto now = std::chrono::time_point_cast(std::chrono::system_clock::now()); - auto request_date = fmt::format("{:%Y-%m-%dT%H:%M:%S.000Z}", now); + auto request_date = std::format("{:%Y-%m-%dT%H:%M:%S.000Z}", now); constexpr std::string_view host = "wewordle.org"; constexpr std::string_view port = "443"; @@ -1048,10 +1048,10 @@ boost::asio::awaitable FreeGpt::weWordle(std::shared_ptr ch, nloh nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["user"] = user_id; - request["subscriber"]["originalAppUserId"] = fmt::format("$RCAnonymousID:{}", app_id); + request["subscriber"]["originalAppUserId"] = std::format("$RCAnonymousID:{}", app_id); request["subscriber"]["firstSeen"] = request_date; request["subscriber"]["requestDate"] = request_date; - request["messages"][0]["content"] = fmt::format("user: {}\nassistant:", prompt); + request["messages"][0]["content"] = std::format("user: {}\nassistant:", prompt); SPDLOG_INFO("{}", request.dump(2)); @@ -1099,12 +1099,12 @@ boost::asio::awaitable FreeGpt::weWordle(std::shared_ptr ch, nloh nlohmann::json rsp = nlohmann::json::parse(res.body(), nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error"); - co_await ch->async_send(err, fmt::format("json parse error: {}", res.body()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("json parse error: {}", res.body()), use_nothrow_awaitable); co_return; } if (!rsp.contains("message")) { SPDLOG_ERROR("not contains message: {}", rsp.dump()); - co_await ch->async_send(err, fmt::format("not contains message : {}", rsp.dump()), use_nothrow_awaitable); + co_await ch->async_send(err, std::format("not contains message : {}", rsp.dump()), use_nothrow_awaitable); co_return; } co_await ch->async_send(err, rsp["message"].value("content", rsp.dump()), use_nothrow_awaitable); diff --git a/src/main.cpp b/src/main.cpp index 4becabe..2156f23 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -1,3 +1,4 @@ +#include #include #include #include @@ -82,7 +83,7 @@ boost::asio::awaitable startSession(boost::asio::ip::tcp::socket sock, Con if (http_path.back() == '/') http_path.remove_suffix(1); if (http_path == cfg.chat_path) { - auto html = createIndexHtml(fmt::format("{}/html/index.html", cfg.client_root_path), cfg); + auto html = createIndexHtml(std::format("{}/html/index.html", cfg.client_root_path), cfg); boost::beast::http::response res{boost::beast::http::status::ok, request.version()}; res.set(boost::beast::http::field::server, BOOST_BEAST_VERSION_STRING); @@ -95,7 +96,7 @@ boost::asio::awaitable startSession(boost::asio::ip::tcp::socket sock, Con } else if (request.target().starts_with(ASSETS_PATH)) { std::string req_path{request.target()}; req_path.erase(req_path.find(ASSETS_PATH), ASSETS_PATH.length()); - auto file = fmt::format("{}{}", cfg.client_root_path, req_path); + auto file = std::format("{}{}", cfg.client_root_path, req_path); SPDLOG_INFO("load: {}", file); boost::beast::error_code ec; boost::beast::http::file_body::value_type body; diff --git a/xmake.lua b/xmake.lua index a3aabd1..0a6d2be 100644 --- a/xmake.lua +++ b/xmake.lua @@ -5,7 +5,7 @@ set_xmakever("2.7.8") add_repositories("my_private_repo https://github.com/fantasy-peak/xmake-repo.git") add_requires("openssl", {system = false}) -add_requires("fmt", "yaml_cpp_struct", "nlohmann_json", "spdlog", "inja") +add_requires("yaml_cpp_struct", "nlohmann_json", "spdlog", "inja") add_requires("boost", "plusaes", "tl_expected") set_languages("c++23") @@ -16,6 +16,6 @@ add_includedirs("include") target("cpp-freegpt-webui") set_kind("binary") add_files("src/*.cpp") - add_packages("openssl", "fmt", "yaml_cpp_struct", "nlohmann_json", "spdlog", "boost", "inja", "tl_expected", "plusaes") + add_packages("openssl", "yaml_cpp_struct", "nlohmann_json", "spdlog", "boost", "inja", "tl_expected", "plusaes") add_syslinks("pthread") target_end()