From decc4877e7c4943e9344ebfd21ed6b71664ac991 Mon Sep 17 00:00:00 2001 From: jschang19 Date: Sun, 17 Dec 2023 03:45:16 +0800 Subject: [PATCH] feat: implement user interaction --- ChatGPT/include/ChatGPT.h | 2 +- ChatGPT/include/Game.h | 54 ++++++++++++++++++++++++++++------- ChatGPT/src/ChatGPT.cpp | 59 ++++++++++++++++++++++++++------------- main.cpp | 24 ++++++++-------- 4 files changed, 96 insertions(+), 43 deletions(-) diff --git a/ChatGPT/include/ChatGPT.h b/ChatGPT/include/ChatGPT.h index 380e7d9..9765881 100644 --- a/ChatGPT/include/ChatGPT.h +++ b/ChatGPT/include/ChatGPT.h @@ -15,7 +15,7 @@ namespace OpenAI { OpenAI::ChatCompletion askChatGPT(const std::string& role); std::string askWhisper(const std::string& audio_path); std::vector< Message > prompts; - void Add_prompt(const std::string& new_role ,const std::string& new_content); + void Add_prompt(const Message& prompt); private: std::string m_token; diff --git a/ChatGPT/include/Game.h b/ChatGPT/include/Game.h index 24b0c9b..9819ef1 100644 --- a/ChatGPT/include/Game.h +++ b/ChatGPT/include/Game.h @@ -1,34 +1,68 @@ #include #include +#include "ChatGPT.h" +#include namespace System { - struct UserChoice { - int story_id; - std::string content; + struct Option{ + std::string id; + std::string text; + Option(std::string id, std::string text) : id(id), text(text) {}; // constructor }; - + class Story { public: int id; std::string place; std::string content; - std::vector< UserChoice > choices; + std::string user_choice; + bool is_answered = false; + std::vector< System::Option > choices; Story(int id, std::string place, std::string content) : id(id), place(place), content(content) {} ~Story(); // destructor - void addChoice(int story_id, std::string content); private: }; + class Game { public: + int count; + int current_count = 0; + std::vector story_ids; + void addPrompt(OpenAI::ChatGPT& chatGpt, const int story_id){ + OpenAI::Message prompt = this->generateStoryPrompt(story_id); + chatGpt.Add_prompt(prompt); + }; + OpenAI::ChatCompletion sendToChatGPT(OpenAI::ChatGPT& chatGpt){ + auto response = chatGpt.askChatGPT("user"); + return response; + }; std::vector picked_story_ids; - Game(); // constructor + void parseGPTResponse(OpenAI::ChatCompletion& chatCompletion, int story_id){ + System::Story* story_ptr = this->getStoryPtrById(story_id); + nlohmann::json j2; + try { + j2 = nlohmann::json::parse(chatCompletion.choices[0].message.content); + std::vector< System::Option > choices; + for (auto& choice : j2["options"]) { + choices.push_back(System::Option(choice["id"], choice["text"])); + } + this->setOptions(story_id, choices); + }catch(std::exception& e){ + std::cerr<<"Game.h parsing Error: "+chatCompletion.choices[0].message.content; + } + }; + Game(); ~Game(); // destructor - std::vector< Story > stories; + std::vector< System::Story > stories; std::vector< int > getRandStoryIds(int num); - Story* getStoryPtrById(int id); - std::vector< Story > readTextFile(const std::string& filename); + System::Story* getStoryPtrById(int id); + OpenAI::Message generateStoryPrompt(int id); + void setOptions(int id, const std::vector< System::Option > &choices); + void printOptions(int id); + bool setUserChoice(int story_id, const std::string& user_choice_id); private: + std::vector< System::Story > readTextFile(const std::string& filename); }; } \ No newline at end of file diff --git a/ChatGPT/src/ChatGPT.cpp b/ChatGPT/src/ChatGPT.cpp index a90e81b..c234d66 100644 --- a/ChatGPT/src/ChatGPT.cpp +++ b/ChatGPT/src/ChatGPT.cpp @@ -22,15 +22,32 @@ OpenAI::ChatCompletion OpenAI::ChatGPT::askChatGPT(const std::string& role) { if (prompt_message==""){ //exception handling throw std::invalid_argument("Error:there is no prompt message, please use Add_prompts() to add prompt in Chatgpt"); } + nlohmann::json j; + j["model"] = "gpt-4-1106-preview"; + j["messages"] = nlohmann::json::parse("[" + prompt_message + "]"); + j["response_format"] = {{"type", "json_object"}}; + j["temperature"] = 1; + j["max_tokens"] = 1000; + j["n"] = 1; + + std::cout<< j.dump() << std::endl; - auto json="{\n" - " \"model\": \"gpt-3.5-turbo\",\n" - " \"messages\": ["+ prompt_message +"]\n" - "}"; + auto response = cpr::Post(cpr::Url{m_link}, + cpr::Body{j.dump()}, + cpr::Bearer({m_token}), + cpr::Header{{"Content-Type", "application/json"}}).text; - auto response = cpr::Post(cpr::Url{m_link},cpr::Body{json},cpr::Bearer({m_token}),cpr::Header{{"Content-Type","application/json"}}).text; OpenAI::ChatCompletion chatCompletion; - nlohmann::json j; + try { + nlohmann::json j_response = nlohmann::json::parse(response); + if (!j_response.contains("error")) { + from_json(j_response, chatCompletion); + } else { + throw OpenAI::Error{j_response.dump()}; + } + } catch (std::exception& e) { + std::cerr << "Error: " << e.what() << std::endl; + } try { j = nlohmann::json::parse(response); }catch(std::exception& e){ @@ -41,14 +58,15 @@ OpenAI::ChatCompletion OpenAI::ChatGPT::askChatGPT(const std::string& role) { }else{ throw OpenAI::Error{j.dump()}; } - - //adding the respond to this->prompts - std::string bot_response_string=""; - for(const auto& choice:chatCompletion.choices){ - bot_response_string+=choice.message.content; + // load chatCompletion.content as json + nlohmann::json j2; + try { + j2 = nlohmann::json::parse(chatCompletion.choices[0].message.content); + }catch(std::exception& e){ + std::cerr<<"parsing j2 Error: "+chatCompletion.choices[0].message.content; } - this->Add_prompt("system",bot_response_string); - + // add chatCompletion.choices[0].message.content into prompts + // this->Add_prompt(OpenAI::Message("assistant", j2["options"])); return chatCompletion; } @@ -75,19 +93,22 @@ std::string OpenAI::ChatGPT::askWhisper(const std::string &audio_path) { if(j.contains("error")) { throw OpenAI::Error{j.dump()}; } + return j["text"]; } -void OpenAI::ChatGPT::Add_prompt(const std::string& new_role ,const std::string& new_content){ - Message new_prompt(new_role, new_content); +void OpenAI::ChatGPT::Add_prompt(const Message& new_prompt){ this->prompts.push_back(new_prompt); } std::string OpenAI::ChatGPT::PromptsToStringContent(){ - std :: string return_string=""; + std :: string return_string="{\"role\": \"system\", \"content\": \"You are a text game system that generates interesting options to make young college players choose and laugh. you must produce content in Traditional Chinese.\"},"; for(int i=0; iprompts.size(); i++){ - return_string += " {\"role\": \"" + this->prompts[i].role + "\" , \"content\": \"" + this->prompts[i].content + "\" }"; - + return_string += " {\"role\": \"" + this->prompts[i].role + "\" , \"content\": \"" + this->prompts[i].content; + if (i == this->prompts.size()-1){ + return_string += ",幫我想 4 個好笑且具有創意的遊戲情境讓我選擇,並確保選項都跟之前我的選擇行動有關,都以「 你 」當作開頭,都以句號結尾,請用一個 key 叫 options 的 JSON 物件格式回覆我,而這個 options 的 value 是一個 Array,每個陣列元素要包含 id 跟 text 兩個 key,id 代號為 a, b, c, d"; + } + return_string += "\"}"; //處理換行符號 if (i != this->prompts.size()-1 ){ return_string+=",\n"; @@ -95,7 +116,5 @@ std::string OpenAI::ChatGPT::PromptsToStringContent(){ return_string+="\n"; } } - - return return_string; } diff --git a/main.cpp b/main.cpp index 0fdc7d5..2ad6e03 100644 --- a/main.cpp +++ b/main.cpp @@ -1,7 +1,6 @@ #include #include #include -#include "ChatGPT/include/ChatGPT.h" #include "ChatGPT/include/Error.h" #include "ChatGPT/include/Game.h" // this is the main function @@ -16,29 +15,30 @@ int main(int args,char** argv){ return 0; } System::Game game; + game.count=STORY_NUM; OpenAI::ChatGPT chatGpt{argv[1]}; // get random story ids std::vector story_ids = game.getRandStoryIds(STORY_NUM); + game.story_ids = story_ids; // get story pointers std::vector story_ptrs; try { for (int i=0; i