Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Dockerfile #2

Merged
merged 1 commit into from
Aug 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 26 additions & 11 deletions .github/workflows/ubuntu.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
name: Linux

on: [push, pull_request]
on:
push:
branches: [ "main", "dev" ]
pull_request:
branches: [ "main" ]

jobs:
build:
Expand All @@ -11,18 +15,15 @@ jobs:
os: [ubuntu-22.04]

steps:
# - uses: actions/checkout@v1
# - uses: xmake-io/github-action-setup-xmake@v1
# with:
# xmake-version: branch@master
# actions-cache-folder: '.xmake-cache'
- name: Installation
run: |
sudo apt-get update
sudo apt-get install -y libgl1-mesa-dev libglu1-mesa-dev p7zip gobjc gcc g++ wget
g++ -v
sudo apt-get install -y libgl1-mesa-dev libglu1-mesa-dev p7zip gobjc g++-13 wget
g++-13 -v
export CXX=g++-13
export CC=gcc-13
wget https://github.com/xmake-io/xmake/releases/download/v2.8.1/xmake-v2.8.1.xz.run
chmod 777 xmake-v2.8.1.xz.run
chmod 777 xmake-v2.8.1.xz.run > a.txt
./xmake-v2.8.1.xz.run
export XMAKE_ROOT="y"
source ~/.xmake/profile
Expand All @@ -33,7 +34,21 @@ jobs:
run: |
export XMAKE_ROOT="y"
source ~/.xmake/profile
g++-13 -v
export CXX=g++-13
export CC=gcc-13
xmake build -y
xmake install -o .
cd bin
ldd cpp-freegpt-webui
ldd ./bin/cpp-freegpt-webui
- name: Docker login
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Build the Docker image
run: |
pwd
ls
docker build . -t ${{ secrets.DOCKERHUB_USERNAME }}/freegpt:latest
- name: Docker image push
run: docker push ${{ secrets.DOCKERHUB_USERNAME }}/freegpt:latest
18 changes: 18 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FROM ubuntu:23.04

# RUN apt-get update -y
# RUN apt-get install -y g++-13
# RUN strings /lib/x86_64-linux-gnu/libstdc++.so.6 | grep GLIBCXX_3.4

WORKDIR /app

ADD bin /app/bin
ADD cfg /app/cfg
ADD client /app/client

RUN ls /app/bin
RUN ls /app/cfg

WORKDIR /app/bin

ENTRYPOINT ["sh", "-c", "./cpp-freegpt-webui ../cfg/cpp-free-gpt.yml"]
16 changes: 15 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,22 @@ cd bin

Access the application in your browser using the URL:
```
http://127.0.0.1:8085/chat
http://127.0.0.1:8858/chat
```

### Running the Docker
Pull the Docker image from Docker Hub:
```
docker pull fantasypeak/freegpt:latest
```

Run the application using Docker:
```
docker run --net=host -it --name freegpt fantasypeak/freegpt:latest
// OR
docker run -p 8858:8858 -it --name freegpt fantasypeak/freegpt:latest
```

### WebUI
The application interface was incorporated from the [chatgpt-clone](https://github.com/xtekky/chatgpt-clone) repository.

Expand Down
6 changes: 2 additions & 4 deletions include/cfg.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,9 @@ struct Config {
std::string client_root_path;
std::size_t interval{300};
std::size_t work_thread_num{8};
std::size_t max_http_client_num{2};
std::string host{"127.0.0.1"};
std::string host{"0.0.0.0"};
std::string port{"8858"};
std::string chat_path{"/chat"};
std::vector<std::string> providers;
};
YCS_ADD_STRUCT(Config, client_root_path, interval, work_thread_num, max_http_client_num, host, port, chat_path,
providers)
YCS_ADD_STRUCT(Config, client_root_path, interval, work_thread_num, host, port, chat_path, providers)
63 changes: 30 additions & 33 deletions src/free_gpt.cpp
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
#include <chrono>
#include <expected>
#include <format>
#include <iostream>
#include <random>
#include <ranges>
#include <regex>

#include <fmt/chrono.h>
#include <openssl/md5.h>
#include <spdlog/spdlog.h>
#include <boost/asio/as_tuple.hpp>
Expand All @@ -13,7 +14,6 @@
#include <boost/uuid/uuid_generators.hpp>
#include <boost/uuid/uuid_io.hpp>
#include <plusaes/plusaes.hpp>
#include <tl/expected.hpp>

#include "free_gpt.h"

Expand All @@ -27,18 +27,15 @@ template <typename C>
struct to_helper {};

template <typename Container, std::ranges::range R>
requires std::convertible_to<std::ranges::range_value_t<R>, typename Container::value_type>
Container operator|(R&& r, to_helper<Container>) {
requires std::convertible_to < std::ranges::range_value_t<R>,
typename Container::value_type > Container operator|(R&& r, to_helper<Container>) {
return Container{r.begin(), r.end()};
}

} // namespace detail

template <std::ranges::range Container>
requires(!std::ranges::view<Container>)
inline auto to() {
return detail::to_helper<Container>{};
}
requires(!std::ranges::view<Container>) inline auto to() { return detail::to_helper<Container>{}; }

std::string md5(const std::string& str, bool reverse = true) {
unsigned char hash[MD5_DIGEST_LENGTH];
Expand All @@ -57,19 +54,19 @@ std::string md5(const std::string& str, bool reverse = true) {
return md5_str;
}

boost::asio::awaitable<tl::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>> createHttpClient(
boost::asio::ssl::context& ctx, std::string_view host, std::string_view port) {
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>
createHttpClient(boost::asio::ssl::context& ctx, std::string_view host, std::string_view port) {
boost::beast::ssl_stream<boost::beast::tcp_stream> stream_{co_await boost::asio::this_coro::executor, ctx};
boost::system::error_code err{};
if (!SSL_set_tlsext_host_name(stream_.native_handle(), host.data())) {
SPDLOG_ERROR("SSL_set_tlsext_host_name");
co_return tl::make_unexpected(std::string("SSL_set_tlsext_host_name"));
co_return std::unexpected(std::string("SSL_set_tlsext_host_name"));
}
auto resolver = boost::asio::ip::tcp::resolver(co_await boost::asio::this_coro::executor);
auto [ec, results] = co_await resolver.async_resolve(host.data(), port.data(), use_nothrow_awaitable);
if (ec) {
SPDLOG_INFO("async_resolve: {}", ec.message());
co_return tl::make_unexpected(ec.message());
co_return std::unexpected(ec.message());
}
for (auto& endpoint : results) {
std::stringstream ss;
Expand All @@ -79,13 +76,13 @@ boost::asio::awaitable<tl::expected<boost::beast::ssl_stream<boost::beast::tcp_s
boost::beast::get_lowest_layer(stream_).expires_after(std::chrono::seconds(30));
if (auto [ec, _] = co_await boost::beast::get_lowest_layer(stream_).async_connect(results, use_nothrow_awaitable);
ec) {
co_return tl::make_unexpected(ec.message());
co_return std::unexpected(ec.message());
}
boost::beast::get_lowest_layer(stream_).expires_never();
std::tie(ec) = co_await stream_.async_handshake(boost::asio::ssl::stream_base::client, use_nothrow_awaitable);
if (ec) {
SPDLOG_INFO("async_handshake: {}", ec.message());
co_return tl::make_unexpected(ec.message());
co_return std::unexpected(ec.message());
}
co_return stream_;
}
Expand Down Expand Up @@ -117,7 +114,7 @@ std::string encrypt(const std::string& raw_data) {
encrypted.size(), true);
std::stringstream ss;
std::transform(encrypted.begin(), encrypted.end(), std::ostream_iterator<std::string>(ss),
[](unsigned char c) -> std::string { return fmt::format("{:02x}", int(c)); });
[](unsigned char c) -> std::string { return std::format("{:02x}", int(c)); });
return ss.str() + random_key_str + random_iv_str;
}

Expand Down Expand Up @@ -420,7 +417,7 @@ boost::asio::awaitable<void> FreeGpt::getGpt(std::shared_ptr<Channel> ch, nlohma
nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false);
if (line_json.is_discarded()) {
SPDLOG_ERROR("json parse error: [{}]", fields.back());
ch->try_send(err, fmt::format("json parse error: [{}]", fields.back()));
ch->try_send(err, std::format("json parse error: [{}]", fields.back()));
continue;
}
auto str = line_json["choices"][0]["delta"]["content"].get<std::string>();
Expand All @@ -446,21 +443,21 @@ boost::asio::awaitable<void> FreeGpt::deepAi(std::shared_ptr<Channel> ch, nlohma
std::mt19937 mt(rd());
std::uniform_int_distribution<uint64_t> dist(0, 100000000);
uint64_t part1{dist(mt)};
auto part2 = md5(user_agent + md5(user_agent + md5(fmt::format("{}{}x", user_agent, part1))));
auto api_key = fmt::format("tryit-{}-{}", part1, part2);
auto part2 = md5(user_agent + md5(user_agent + md5(std::format("{}{}x", user_agent, part1))));
auto api_key = std::format("tryit-{}-{}", part1, part2);

constexpr char CRLF[] = "\r\n";
constexpr char MULTI_PART_BOUNDARY[] = "9bc627aea4f77e150e6057f78036e73f";
constexpr std::string_view host{"api.deepai.org"};
constexpr std::string_view port{"443"};

boost::beast::http::request<boost::beast::http::string_body> req{boost::beast::http::verb::post, "/make_me_a_pizza",
11};
boost::beast::http::request<boost::beast::http::string_body> req{boost::beast::http::verb::post,
"/make_me_a_pizza", 11};
req.set(boost::beast::http::field::host, host);
req.set(boost::beast::http::field::user_agent, user_agent);
req.set("Api-Key", api_key);
req.set(boost::beast::http::field::content_type,
fmt::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY));
std::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY));

auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
nlohmann::json request_json{{{"role", "user"}, {"content", std::move(prompt)}}};
Expand Down Expand Up @@ -517,7 +514,7 @@ boost::asio::awaitable<void> FreeGpt::aiTianhu(std::shared_ptr<Channel> ch, nloh
R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36)");
req.set(boost::beast::http::field::content_type, "application/json");
nlohmann::json data{
{"prompt", fmt::format("user: {}\nassistant:", prompt)},
{"prompt", std::format("user: {}\nassistant:", prompt)},
{"options", std::unordered_map<std::string, std::string>{}},
{"systemMessage",
"You are ChatGPT, a large language model trained by OpenAI. Follow "
Expand Down Expand Up @@ -577,7 +574,7 @@ boost::asio::awaitable<void> FreeGpt::aiTianhu(std::shared_ptr<Channel> ch, nloh
nlohmann::json rsp = nlohmann::json::parse(lines.back(), nullptr, false);
if (rsp.is_discarded()) {
SPDLOG_ERROR("json parse error");
co_await ch->async_send(err, fmt::format("json parse error: {}", lines.back()), use_nothrow_awaitable);
co_await ch->async_send(err, std::format("json parse error: {}", lines.back()), use_nothrow_awaitable);
co_return;
}
co_await ch->async_send(err, rsp.value("text", rsp.dump()), use_nothrow_awaitable);
Expand Down Expand Up @@ -613,7 +610,7 @@ boost::asio::awaitable<void> FreeGpt::aiChat(std::shared_ptr<Channel> ch, nlohma
R"(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36)");

nlohmann::json data{
{"message", fmt::format("user: {}\nassistant:", prompt)},
{"message", std::format("user: {}\nassistant:", prompt)},
{"temperature", 1},
{"presence_penalty", 0},
{"top_p", 1},
Expand Down Expand Up @@ -766,7 +763,7 @@ boost::asio::awaitable<void> FreeGpt::chatGptAi(std::shared_ptr<Channel> ch, nlo
request.set("Content-Type", "application/x-www-form-urlencoded");

std::stringstream ss;
ss << "message=" << urlEncode(fmt::format("user: {}\nassistant: ", prompt)) << "&";
ss << "message=" << urlEncode(std::format("user: {}\nassistant: ", prompt)) << "&";
ss << "_wpnonce=" << nonce << "&";
ss << "post_id=" << post_id << "&";
ss << "url=" << urlEncode("https://chatgpt.ai/gpt-4") << "&";
Expand Down Expand Up @@ -888,7 +885,7 @@ boost::asio::awaitable<void> FreeGpt::chatFree(std::shared_ptr<Channel> ch, nloh
nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false);
if (line_json.is_discarded()) {
SPDLOG_ERROR("json parse error: [{}]", fields.back());
ch->try_send(err, fmt::format("json parse error: [{}]", fields.back()));
ch->try_send(err, std::format("json parse error: [{}]", fields.back()));
continue;
}
auto str = line_json["choices"][0]["delta"]["content"].get<std::string>();
Expand Down Expand Up @@ -922,7 +919,7 @@ boost::asio::awaitable<void> FreeGpt::aiService(std::shared_ptr<Channel> ch, nlo
req.set("sec-fetch-site", "same-origin");
req.set(boost::beast::http::field::referer, "https://aiservice.vercel.app/chat");

nlohmann::json data{{"input", fmt::format("user: {}\nassistant:", prompt)}};
nlohmann::json data{{"input", std::format("user: {}\nassistant:", prompt)}};
req.body() = data.dump();
req.prepare_payload();

Expand Down Expand Up @@ -967,7 +964,7 @@ boost::asio::awaitable<void> FreeGpt::aiService(std::shared_ptr<Channel> ch, nlo
nlohmann::json rsp = nlohmann::json::parse(res.body(), nullptr, false);
if (rsp.is_discarded()) {
SPDLOG_ERROR("json parse error");
co_await ch->async_send(err, fmt::format("json parse error: {}", res.body()), use_nothrow_awaitable);
co_await ch->async_send(err, std::format("json parse error: {}", res.body()), use_nothrow_awaitable);
co_return;
}
co_await ch->async_send(err, rsp.value("data", rsp.dump()), use_nothrow_awaitable);
Expand Down Expand Up @@ -997,7 +994,7 @@ boost::asio::awaitable<void> FreeGpt::weWordle(std::shared_ptr<Channel> ch, nloh
auto user_id = random(16);
auto app_id = random(31);
auto now = std::chrono::time_point_cast<std::chrono::seconds>(std::chrono::system_clock::now());
auto request_date = fmt::format("{:%Y-%m-%dT%H:%M:%S.000Z}", now);
auto request_date = std::format("{:%Y-%m-%dT%H:%M:%S.000Z}", now);

constexpr std::string_view host = "wewordle.org";
constexpr std::string_view port = "443";
Expand Down Expand Up @@ -1048,10 +1045,10 @@ boost::asio::awaitable<void> FreeGpt::weWordle(std::shared_ptr<Channel> ch, nloh
nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false);

request["user"] = user_id;
request["subscriber"]["originalAppUserId"] = fmt::format("$RCAnonymousID:{}", app_id);
request["subscriber"]["originalAppUserId"] = std::format("$RCAnonymousID:{}", app_id);
request["subscriber"]["firstSeen"] = request_date;
request["subscriber"]["requestDate"] = request_date;
request["messages"][0]["content"] = fmt::format("user: {}\nassistant:", prompt);
request["messages"][0]["content"] = std::format("user: {}\nassistant:", prompt);

SPDLOG_INFO("{}", request.dump(2));

Expand Down Expand Up @@ -1099,12 +1096,12 @@ boost::asio::awaitable<void> FreeGpt::weWordle(std::shared_ptr<Channel> ch, nloh
nlohmann::json rsp = nlohmann::json::parse(res.body(), nullptr, false);
if (rsp.is_discarded()) {
SPDLOG_ERROR("json parse error");
co_await ch->async_send(err, fmt::format("json parse error: {}", res.body()), use_nothrow_awaitable);
co_await ch->async_send(err, std::format("json parse error: {}", res.body()), use_nothrow_awaitable);
co_return;
}
if (!rsp.contains("message")) {
SPDLOG_ERROR("not contains message: {}", rsp.dump());
co_await ch->async_send(err, fmt::format("not contains message : {}", rsp.dump()), use_nothrow_awaitable);
co_await ch->async_send(err, std::format("not contains message : {}", rsp.dump()), use_nothrow_awaitable);
co_return;
}
co_await ch->async_send(err, rsp["message"].value("content", rsp.dump()), use_nothrow_awaitable);
Expand Down
7 changes: 4 additions & 3 deletions src/main.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#include <format>
#include <functional>
#include <semaphore>
#include <string>
Expand Down Expand Up @@ -82,7 +83,7 @@ boost::asio::awaitable<void> startSession(boost::asio::ip::tcp::socket sock, Con
if (http_path.back() == '/')
http_path.remove_suffix(1);
if (http_path == cfg.chat_path) {
auto html = createIndexHtml(fmt::format("{}/html/index.html", cfg.client_root_path), cfg);
auto html = createIndexHtml(std::format("{}/html/index.html", cfg.client_root_path), cfg);
boost::beast::http::response<boost::beast::http::string_body> res{boost::beast::http::status::ok,
request.version()};
res.set(boost::beast::http::field::server, BOOST_BEAST_VERSION_STRING);
Expand All @@ -95,7 +96,7 @@ boost::asio::awaitable<void> startSession(boost::asio::ip::tcp::socket sock, Con
} else if (request.target().starts_with(ASSETS_PATH)) {
std::string req_path{request.target()};
req_path.erase(req_path.find(ASSETS_PATH), ASSETS_PATH.length());
auto file = fmt::format("{}{}", cfg.client_root_path, req_path);
auto file = std::format("{}{}", cfg.client_root_path, req_path);
SPDLOG_INFO("load: {}", file);
boost::beast::error_code ec;
boost::beast::http::file_body::value_type body;
Expand Down Expand Up @@ -212,7 +213,6 @@ int main(int argc, char** argv) {
return EXIT_FAILURE;
}
auto& cfg = config.value();
SPDLOG_INFO("cfg.max_http_client_num: {}", cfg.max_http_client_num);
SPDLOG_INFO("cfg.work_thread_num: {}", cfg.work_thread_num);
FreeGpt app{cfg};

Expand Down Expand Up @@ -256,6 +256,7 @@ int main(int argc, char** argv) {
smph_signal_main_to_thread.release();
});
smph_signal_main_to_thread.acquire();
SPDLOG_INFO("stoped ...");
pool.stop();
return EXIT_SUCCESS;
}
Loading
Loading