Skip to content

Commit

Permalink
Add ChatgptAi
Browse files Browse the repository at this point in the history
  • Loading branch information
fantasy-peak committed Jul 31, 2023
1 parent dea0e02 commit 723c71f
Show file tree
Hide file tree
Showing 4 changed files with 207 additions and 24 deletions.
Binary file added bin/cpp-freegpt-webui
Binary file not shown.
11 changes: 7 additions & 4 deletions include/free_gpt.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,21 @@ class FreeGpt final {

FreeGpt(Config&);

boost::asio::awaitable<void> getgpt(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> getGpt(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle);
boost::asio::awaitable<void> deepai(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> deepAi(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle);
boost::asio::awaitable<void> aitianhu(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> aiTianhu(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle);
boost::asio::awaitable<void> aichat(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> aiChat(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle);
boost::asio::awaitable<void> chatGptAi(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle);

private:
Config& m_cfg;
Expand Down
211 changes: 195 additions & 16 deletions src/free_gpt.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#include <iostream>
#include <random>
#include <ranges>
#include <regex>

#include <openssl/md5.h>
#include <spdlog/spdlog.h>
Expand All @@ -25,16 +26,17 @@ template <typename C>
struct to_helper {};

template <typename Container, std::ranges::range R>
requires std::convertible_to < std::ranges::range_value_t<R>,
typename Container::value_type >
Container operator|(R&& r, to_helper<Container>) {
requires std::convertible_to<std::ranges::range_value_t<R>,
typename Container::value_type>
Container operator|(R&& r, to_helper<Container>) {
return Container{r.begin(), r.end()};
}

} // namespace detail

template <std::ranges::range Container>
requires(!std::ranges::view<Container>) inline auto to() {
requires(!std::ranges::view<Container>)
inline auto to() {
return detail::to_helper<Container>{};
}

Expand Down Expand Up @@ -150,6 +152,19 @@ auto split_string(const std::string& input, const std::string& delimiter) {
return fields;
}

std::vector<std::string> findall(const std::string& pattern,
const std::string& text) {
std::regex re(pattern);
std::sregex_iterator it(text.begin(), text.end(), re);
std::sregex_iterator end;
std::vector<std::string> matches;
while (it != end) {
matches.push_back(it->str());
++it;
}
return matches;
}

enum class Status : uint8_t {
Ok,
Close,
Expand Down Expand Up @@ -241,7 +256,7 @@ boost::asio::awaitable<Status> send_recv_chunk(

FreeGpt::FreeGpt(Config& cfg) : m_cfg(cfg) {}

boost::asio::awaitable<void> FreeGpt::getgpt(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> FreeGpt::getGpt(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle) {
BOOST_SCOPE_EXIT(&ch, &handle) {
Expand Down Expand Up @@ -351,7 +366,7 @@ boost::asio::awaitable<void> FreeGpt::getgpt(std::shared_ptr<Channel> ch,
co_return;
}

boost::asio::awaitable<void> FreeGpt::deepai(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> FreeGpt::deepAi(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle) {
BOOST_SCOPE_EXIT(&ch, &handle) {
Expand Down Expand Up @@ -435,7 +450,7 @@ boost::asio::awaitable<void> FreeGpt::deepai(std::shared_ptr<Channel> ch,
co_return;
}

boost::asio::awaitable<void> FreeGpt::aitianhu(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> FreeGpt::aiTianhu(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle) {
BOOST_SCOPE_EXIT(&ch, &handle) {
Expand Down Expand Up @@ -535,13 +550,12 @@ boost::asio::awaitable<void> FreeGpt::aitianhu(std::shared_ptr<Channel> ch,
use_nothrow_awaitable);
co_return;
}
co_await ch->async_send(
err, rsp.value("text", std::string{"not found text from gpt response"}),
use_nothrow_awaitable);
co_await ch->async_send(err, rsp.value("text", rsp.dump()),
use_nothrow_awaitable);
co_return;
}

boost::asio::awaitable<void> FreeGpt::aichat(std::shared_ptr<Channel> ch,
boost::asio::awaitable<void> FreeGpt::aiChat(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle) {
BOOST_SCOPE_EXIT(&ch, &handle) {
Expand Down Expand Up @@ -639,10 +653,175 @@ boost::asio::awaitable<void> FreeGpt::aichat(std::shared_ptr<Channel> ch,
co_return;
}
SPDLOG_INFO("rsp: {}", rsp.dump());
co_await ch->async_send(
err,
rsp.value("message",
std::string{"not found message from gpt response"}),
use_nothrow_awaitable);
co_await ch->async_send(err, rsp.value("message", rsp.dump()),
use_nothrow_awaitable);
co_return;
}

boost::asio::awaitable<void> FreeGpt::chatGptAi(std::shared_ptr<Channel> ch,
nlohmann::json json,
HttpClientPool::handle handle) {
BOOST_SCOPE_EXIT(&ch, &handle) {
ch->close();
handle.recycle();
}
BOOST_SCOPE_EXIT_END
boost::system::error_code err{};

constexpr std::string_view host = "chatgpt.ai";
constexpr std::string_view port = "443";

boost::beast::http::request<boost::beast::http::string_body> req{
boost::beast::http::verb::get, "/", 11};

int recreate_num{0};
if (handle.empty()) {
create_client:
SPDLOG_INFO("create new client");
auto http_client_ptr = std::make_unique<HttpClient>();
auto client =
co_await create_http_client(*http_client_ptr->ctx, host, port);
if (!client.has_value()) {
SPDLOG_ERROR("create_http_client: {}", client.error());
co_await ch->async_send(err, client.error(), use_nothrow_awaitable);
co_return;
}
http_client_ptr->stream = std::make_shared<
boost::beast::ssl_stream<boost::beast::tcp_stream>>(
std::move(client.value()));
handle.reset(std::move(http_client_ptr));
}
auto& stream_ = *handle.get()->stream;

auto [ec, count] = co_await boost::beast::http::async_write(
stream_, req, use_nothrow_awaitable);
if (ec) {
SPDLOG_ERROR("{}", ec.message());
co_await ch->async_send(err, ec.message(), use_nothrow_awaitable);
co_return;
}
boost::beast::flat_buffer b;
boost::beast::http::response<boost::beast::http::string_body> res;
std::tie(ec, count) = co_await boost::beast::http::async_read(
stream_, b, res, use_nothrow_awaitable);
if (ec == boost::beast::http::error::end_of_stream) {
if (recreate_num == 0) {
recreate_num++;
goto create_client;
}
}
if (ec) {
SPDLOG_ERROR("{}", ec.message());
co_await ch->async_send(err, ec.message(), use_nothrow_awaitable);
co_return;
}
if (boost::beast::http::status::ok != res.result()) {
SPDLOG_ERROR("http code: {}", res.result_int());
co_await ch->async_send(err, res.reason(), use_nothrow_awaitable);
co_return;
}

static std::string pattern{
R"(data-nonce=".*"\n data-post-id=".*"\n data-url=".*"\n data-bot-id=".*"\n data-width)"};

std::vector<std::string> matches = findall(pattern, res.body());
if (matches.size() != 1) {
SPDLOG_ERROR("parsing login failed");
co_await ch->async_send(err, res.body(), use_nothrow_awaitable);
co_return;
}

std::regex reg("\"([^\"]*)\"");
std::sregex_iterator iter(matches[0].begin(), matches[0].end(), reg);
std::sregex_iterator end;
std::vector<std::string> results;
while (iter != end) {
results.emplace_back(iter->str(1));
iter++;
}
if (results.size() != 4) {
SPDLOG_ERROR("Failed to extract content");
co_await ch->async_send(err, "Failed to extract content",
use_nothrow_awaitable);
co_return;
}

auto& nonce = results[0];
auto& post_id = results[1];
auto& data_url = results[2];
auto& bot_id = results[3];

SPDLOG_INFO("data_nonce: {}", nonce);
SPDLOG_INFO("data_post_id: {}", post_id);
SPDLOG_INFO("data_url: {}", data_url);
SPDLOG_INFO("data_bot_id: {}", bot_id);

auto prompt = json.at("meta").at("content").at("parts").at(0).at("content");

boost::beast::http::request<boost::beast::http::string_body> request{
boost::beast::http::verb::post, "/gpt-4", 11};
request.set(boost::beast::http::field::host, host);
request.set("authority", "chatgpt.ai");
request.set("accept", "*/*");
request.set("accept-language",
"en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q="
"0.4,de;q=0.3");
request.set("cache-control", "no-cache");
request.set("pragma", "no-cache");
request.set(boost::beast::http::field::referer,
"https://chatgpt.ai/gpt-4/");
request.set("sec-ch-ua-mobile", "?0");
request.set(
"sec-ch-ua",
R"("Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114")");
request.set("sec-ch-ua-platform", R"("Windows")");
request.set("sec-fetch-dest", "empty");
request.set("sec-fetch-mode", "cors");
request.set("sec-fetch-site", "same-origin");
request.set(boost::beast::http::field::user_agent,
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36");

nlohmann::json data{
{"message", fmt::format("user: {}\nassistant: ", prompt)},
{"_wpnonce", nonce},
{"post_id", post_id},
{"url", "https://chatgpt.ai/gpt-4"},
{"action", "wpaicg_chat_shortcode_message"},
{"bot_id", bot_id},
};
request.body() = data.dump();
request.prepare_payload();

std::tie(ec, count) = co_await boost::beast::http::async_write(
stream_, request, use_nothrow_awaitable);
if (ec) {
SPDLOG_ERROR("{}", ec.message());
co_await ch->async_send(err, ec.message(), use_nothrow_awaitable);
co_return;
}
boost::beast::flat_buffer buffer;
boost::beast::http::response<boost::beast::http::string_body> response;
std::tie(ec, count) = co_await boost::beast::http::async_read(
stream_, buffer, response, use_nothrow_awaitable);
if (ec) {
SPDLOG_ERROR("{}", ec.message());
co_await ch->async_send(err, ec.message(), use_nothrow_awaitable);
co_return;
}
if (boost::beast::http::status::ok != response.result()) {
SPDLOG_ERROR("http code: {}", response.result_int());
co_await ch->async_send(err, response.reason(), use_nothrow_awaitable);
co_return;
}
nlohmann::json rsp = nlohmann::json::parse(response.body(), nullptr, false);
if (rsp.is_discarded()) {
SPDLOG_ERROR("json parse error");
co_await ch->async_send(err, "json parse error", use_nothrow_awaitable);
co_return;
}
SPDLOG_INFO("rsp: {}", rsp.dump());
co_await ch->async_send(err, rsp.value("data", rsp.dump()),
use_nothrow_awaitable);
co_return;
}
9 changes: 5 additions & 4 deletions src/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -267,10 +267,11 @@ int main(int argc, char** argv) {
SPDLOG_INFO("cfg.work_thread_num: {}", cfg.work_thread_num);
FreeGpt app{cfg};

ADD_METHOD("gpt-3.5-turbo-aitianhu", FreeGpt::aitianhu);
ADD_METHOD("gpt-3.5-turbo-aichat", FreeGpt::aichat);
ADD_METHOD("gpt-3.5-turbo-stream-deepai", FreeGpt::deepai);
ADD_METHOD("gpt-3.5-turbo-stream-getgpt", FreeGpt::getgpt);
ADD_METHOD("gpt-3.5-turbo-AItianhu", FreeGpt::aiTianhu);
ADD_METHOD("gpt-3.5-turbo-Aichat", FreeGpt::aiChat);
ADD_METHOD("gpt-4-ChatgptAi", FreeGpt::chatGptAi);
ADD_METHOD("gpt-3.5-turbo-stream-DeepAi", FreeGpt::deepAi);
ADD_METHOD("gpt-3.5-turbo-stream-GetGpt", FreeGpt::getGpt);

IoContextPool pool{cfg.work_thread_num};
pool.start();
Expand Down

0 comments on commit 723c71f

Please sign in to comment.