From 5e48db93697c4675572b7dcb96ee111f40bb5c7a Mon Sep 17 00:00:00 2001 From: BaimoQilin Date: Fri, 26 Apr 2024 21:59:07 +0800 Subject: [PATCH] feat!: Basic features --- .gitignore | 6 +++ config.py | 23 ++++++++ config.yaml | 54 +++++++++++++++++++ console.py | 66 +++++++++++++++++++++++ core.py | 121 +++++++++++++++++++++++++++++++++++++++++++ generated/demo.schem | Bin 0 -> 323 bytes generated/test.schem | Bin 0 -> 279 bytes log_writer.py | 26 ++++++++++ requirements.txt | 2 + 9 files changed, 298 insertions(+) create mode 100644 .gitignore create mode 100644 config.py create mode 100644 config.yaml create mode 100644 console.py create mode 100644 core.py create mode 100644 generated/demo.schem create mode 100644 generated/test.schem create mode 100644 log_writer.py create mode 100644 requirements.txt diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..55ef229 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +projects/* +!projects/template/ + +logs/* +test.py +__pycache__ \ No newline at end of file diff --git a/config.py b/config.py new file mode 100644 index 0000000..0a21187 --- /dev/null +++ b/config.py @@ -0,0 +1,23 @@ +import yaml +from log_writer import logger + +with open("config.yaml", "r") as conf: + config_content = yaml.safe_load(conf) + for key, value in config_content.items(): + if key == "CODING_MODEL" and value == "gpt-4": + globals()[key] = "gpt-4-turbo-preview" # Force using gpt-4-turbo-preview if the user set the CODING_MODEL to gpt-4. Because gpt-4 is not longer supports json modes. + globals()[key] = value + logger(f"config: {key} -> {value}") + +def edit_config(key, value): + with open("config.yaml", "r") as file: + lines = file.readlines() + + for i, line in enumerate(lines): + if f"{key}:" in line: + lines[i] = line.replace(line.split(":")[1].strip().strip('"'), f"{value}") + + with open("config.yaml", "w") as file: + file.writelines(lines) + + logger(f"edit_config: {key} -> {value}") \ No newline at end of file diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..f3ca695 --- /dev/null +++ b/config.yaml @@ -0,0 +1,54 @@ +# GPT SETTINGS # +# EDIT REQUIRED +# Get your api key from openai. Remember google/bing is always your best friend. +# Model names: gpt-4-turbo-preview, gpt-3.5-turbo, etc. +# Recommend -> gpt-4-turbo-preview, which codes more accurately and is less likely to write bugs, but is more expensive. + +API_KEY: "" +BASE_URL: "https://api.openai.com/v1/chat/completions" +GENERATE_MODEL: "gpt-4-turbo-2024-04-09" # Don't use gpt-4, because this model is longer supports json modes. + +# PROMPT SETTINGS # +# If you don't know what it is, please don't touch it. Be sure to backup before editing. + +## Structure Generation ## +SYS_GEN: | + You are a minecraft structure builder bot. You should design a building or a structure based on user's instructions. + Response in json like this: + { + \"materials\": [ + \"A\": \"minecraft:air\", + \"S\": \"minecraft:stone\", + \"G\": \"minecraft:glass\" + ], + \"structures\": [ + { + \"floor\": 0, + \"structure\": \"SSSSSSSS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSSSSSSSS\" + }, + { + \"floor\": 1, + \"structure\": \"SSGGGGSS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSSSSSSSS\" + }, + { + \"floor\": 2, + \"structure\": \"SSGGGGSS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSSSSSSSS\" + }, + { + \"floor\": 3, + \"structure\": \"SSSSSSSS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSAAAAAAS\nSSSSSSSS\" + }, + { + \"floor\": 4, + \"structure\": \"SSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\nSSSSSSSS\n\" + } + ] + } + Never response anything else. Do not design a building which is too large (more than 10 floors). Never use markdown format. Use \n for line feed. And for example , if there's a "t" before \", use \\t. + +USR_GEN: | + %DESCRIPTION% + +# Developer Settings # +DEBUG_MODE: False +VERSION_NUMBER: "Alpha-1.0" #NEVER EDIT THIS IF YOU DON'T KNOW WHAT ARE YOU DOING \ No newline at end of file diff --git a/console.py b/console.py new file mode 100644 index 0000000..df66807 --- /dev/null +++ b/console.py @@ -0,0 +1,66 @@ +import sys + +from log_writer import logger +import core +import config + +def generate_plugin(description): + response = core.askgpt(config.SYS_GEN, config.USR_GEN.replace("%DESCRIPTION%", description), config.GENERATE_MODEL) + + schem = core.text_to_schem(response) + + retry_times = 0 + + while schem is None and retry_times < 3: + logger("Json synax error. Regenerating...") + print("There's something wrong with the AI's reponse. Regenerating...") + schem = generate_plugin(description) + retry_times += 1 + + if retry_times == 3: + # If the AI generate the json response failed for 3 times, we will stop the program. + logger("Too much errors. Failed to regenerate.") + print("Failed to generate the schematic. We recommend you to change the generating model to gpt-4-turbo-preview or other smarter models.") + + print("""Options: + 1. Change the generating model to gpt-4-turbo-preview + 2. Exit the program""") + option = input("Please choose an option: ") + + if option == "1": + response = core.askgpt(config.SYS_GEN, config.USR_GEN.replace("%DESCRIPTION%", description), "gpt-4-turbo-preview") + schem = core.text_to_schem(response) + if schem is None: + print("Failed to generate the schematic again. This may be caused by a bug in the program or the AI model. Please report this issue to github.com/CubeGPT/BuilderGPT/issues ") + else: + sys.exit(1) + + return schem + +if __name__ == "__main__": + core.initialize() + + print("Welcome to BuilderGPT, an open source, free, AI-powered Minecraft structure generator developed by BaimoQilin (@Zhou-Shilin). Don't forget to check out the config.yaml configuration file, you need to fill in the OpenAI API key.\n") + + # Get user inputs + version = input("[0/2] What's your minecraft version? (eg. 1.20.1): ") + name = input("[1/2] What's the name of your structure? It will be the name of the generated *.schem file: ") + description = input("[2/2] What kind of structure would you like to generate? Describe as clear as possible: ") + + # Log user inputs + logger(f"console: input version {version}") + logger(f"console: input name {name}") + logger(f"console: input description {description}") + + print("Generating...") + + schem = generate_plugin(description) + + logger(f"console: Saving {name}.schem to generated/ folder.") + version_tag = core.input_version_to_mcs_tag(version) + schem.save("generated", name, version_tag) + + print("Generated. Get your schem file in folder generated.") + +else: + print("Error: Please run console.py as the main program instead of importing it from another program.") \ No newline at end of file diff --git a/core.py b/core.py new file mode 100644 index 0000000..e506dda --- /dev/null +++ b/core.py @@ -0,0 +1,121 @@ +from openai import OpenAI +import mcschematic +import sys +import json + +from log_writer import logger +import config + +def initialize(): + """ + Initializes the software. + + This function logs the software launch, including the version number and platform. + + Args: + None + + Returns: + None + """ + logger(f"Launch. Software version {config.VERSION_NUMBER}, platform {sys.platform}") + +def askgpt(system_prompt: str, user_prompt: str, model_name: str): + """ + Interacts with ChatGPT using the specified prompts. + + Args: + system_prompt (str): The system prompt. + user_prompt (str): The user prompt. + + Returns: + str: The response from ChatGPT. + """ + client = OpenAI(api_key=config.API_KEY, base_url=config.BASE_URL) + logger("Initialized the OpenAI client.") + + # Define the messages for the conversation + messages = [ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": user_prompt} + ] + + logger(f"askgpt: system {system_prompt}") + logger(f"askgpt: user {user_prompt}") + + # Create a chat completion + response = client.chat.completions.create( + model=model_name, + response_format={"type": "json_object"}, + messages=messages + ) + + logger(f"askgpt: response {response}") + + # Extract the assistant's reply + assistant_reply = response.choices[0].message.content + logger(f"askgpt: extracted reply {assistant_reply}") + return assistant_reply + +def text_to_schem(text: str): + """ + Converts a JSON string to a Minecraft schematic. + + Args: + text (str): The JSON string to convert. + + Returns: + mcschematic.MCSchematic: The Minecraft schematic. + + """ + try: + data = json.loads(text) + block_id_dict = {} + logger(f"text_to_command: loaded JSON data {data}") + schematic = mcschematic.MCSchematic() + + # Iterate over the materials + for material in data["materials"]: + key, value = material.split(": ") + block_id_dict[key.strip()] = value.strip('"') + + # Iterate over the structures + for structure in data["structures"]: + floor = structure["floor"] + structure_data = structure["structure"] + + # Iterate over the rows of the structure + rows = structure_data.split("\n") + + for y, row in enumerate(rows): + # Iterate over the blocks in each row + for x, block_id in enumerate(row): + # Get the corresponding block from the materials dictionary + block = block_id_dict.get(block_id) + if block: + schematic.setBlock((x, floor, y), block) + return schematic + + except (json.decoder.JSONDecodeError, KeyError, TypeError, ValueError, AttributeError, IndexError) as e: + logger(f"text_to_command: failed to load JSON data. Error: {e}") + return None + +def input_version_to_mcs_tag(input_version): + """ + Converts an input version string to the corresponding MCSchematic tag. + + Args: + input_version (str): The input version string in the format "X.Y.Z". + + Returns: + str: The MCSchematic tag corresponding to the input version. + + Example: + >>> input_version_to_mcs_tag("1.20.1") + 'JE_1_20_1' + """ + version = input_version.split(".") + return getattr(mcschematic.Version, f"JE_{version[0]}_{version[1]}_{version[2]}") + +if __name__ == "__main__": + print("This script is not meant to be run directly. Please run console.py instead.") \ No newline at end of file diff --git a/generated/demo.schem b/generated/demo.schem new file mode 100644 index 0000000000000000000000000000000000000000..dd575c804ca55f8e54f1a1edb33eec1ba3901f12 GIT binary patch literal 323 zcmV-J0lfYniwFpLQYvNw|72xtZ!U9VXk~2x%~HWm!axumXppvqc<=}I=Ee8{9#NtP zh$JL{-iCJCPTX$I?uhai{(-;hmP$pBhJ)|2GjC_!R-mgD;@b0jNY3p{$w}bX7Aw;)i$f8gRn>JLh zEQQk5hSZB=ldHL|xWw(bQwJhyww0nXYQk%ac6*^%9O+D@^h_p2Bg$VcP?HfhYD88u zSubJMqIs=!({|~d!pR+RUI_GjI3bfu0dW_4OHO?h1`yA@j0jEft@ai}|BDDo@7R8iiwK6h V_Zj<7BuuUu_X){|oKiId004_XoGAbR literal 0 HcmV?d00001 diff --git a/generated/test.schem b/generated/test.schem new file mode 100644 index 0000000000000000000000000000000000000000..b1f2fb79a38f1ca7862b6aa0a6277e16f9174351 GIT binary patch literal 279 zcmV+y0qFi8iwFo4sVimz|8!+@bS`saXk~2xT~I+z!$1sli6Bi-PJF;OE{F$kEL3o4 zM5RhCaB~tTvDj?T#uV}de$8%ysNyy=J09;ig=AB<+>uHdLA+%*NN)k)B!crhQks

7GWe&+bTXv*e!G41HA=;wp_l)m@ z*akPImyX8E0gpm(J!;!qTuI@`jaMi-u&J@)H?|d7MgO=Z!&b%Of}Ig|KEf7iq(JjC zcI}(O*#k?{D$G1wv2C;s@N_3tKoO*CAEU}Seflo56yjeYf~*tErK4JJNL;82F(eDq dm#^V^!43&z2Vv=dqQb+$1>XtVjzf9@006l5f7}27 literal 0 HcmV?d00001 diff --git a/log_writer.py b/log_writer.py new file mode 100644 index 0000000..6b522de --- /dev/null +++ b/log_writer.py @@ -0,0 +1,26 @@ +import os +from datetime import datetime + +first_call_time = None + +def get_log_filename(): + global first_call_time + + if first_call_time is None: + first_call_time = datetime.now() + + log_filename = first_call_time.strftime("logs/%b-%d-%H-%M-%S-%Y") + + return log_filename + +def logger(text: str): + log_filename = get_log_filename() + + timestamp_prefix = datetime.now().strftime("[%H:%M:%S]") + + log_line = f"{timestamp_prefix} {text}\n" + + os.makedirs(os.path.dirname(log_filename), exist_ok=True) + + with open(log_filename + ".log", "a") as log_file: + log_file.write(log_line) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6e42ce1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +openai>=1.13.3 +pyyaml \ No newline at end of file