Press n or j to go to the next uncovered block, b, p or k for the previous block.
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | 1x 1x 1x 1x 1x 1x 1x 15x 15x 15x 15x 15x 15x 15x 15x 15x 15x 15x 1x 1x 1x 1x 1x 1x 1x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 1x 1x 1x 1x 5x 4x 4x 4x 4x 4x 5x 2x 2x 2x 2x 2x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 4x 5x 1x 1x 1x 1x 1x 1x 1x 4x 4x 4x 4x 4x 4x 4x 5x 1x 1x 1x | "use strict";
import Message from "./message.js";
import OpenAI from "openai";
// Client class to interact with OpenAI's API
class Client {
constructor(apiKey, opts) {
opts = opts || {};
this.opts = {
model: opts.model || "gpt-5.2",
instructions:
opts.instructions ||
"You are a helpful assistant in a Minecraft world. Answer questions and provide information relevant to the game.",
};
this.openAI = new OpenAI({
apiKey: apiKey,
});
}
// The chat method sends the player's message to OpenAI API
// and returns the assistant's reply.
// The memory parameter is used to manage conversation history for each player,
// which is included in the messages sent to OpenAI API to provide context for
// generating replies.
async chat(memory, player, message) {
// Construct the messages payload for OpenAI API.
// It always includes the developer instructions, accompanied by
// conversation history, and the user message for the current chat.
const params = {
model: this.opts.model,
messages: [{ role: "developer", content: this.opts.instructions }],
};
let conversation;
if (memory.exists(player)) {
// If there's prior conversation for the player,
// the conversation history will be included in the messages
// sent to OpenAI API in order to provide context
conversation = memory.retrieve(player);
} else {
// If there's no prior conversation for the player,
// then initialize a new conversation
memory.initialize(player);
conversation = memory.retrieve(player);
}
for (const message of conversation.getMessages()) {
params.messages.push({
role: message.getRole(),
content: message.getContent(),
});
}
const userMessage = new Message("user", message);
params.messages.push({
role: userMessage.getRole(),
content: userMessage.getContent(),
});
let reply;
try {
const chatCompletion = await this.openAI.chat.completions.create(params);
reply = chatCompletion.choices[0].message.content;
} catch (error) {
if (error instanceof OpenAI.APIError) {
error = new Error(
`An OpenAI error has occurred: ${error.status} ${error.type} ${error.code} ${error.message}`,
);
}
throw error;
}
// register the user message and assistant reply in memory
memory.register(player, userMessage);
const assistantMessage = new Message("assistant", reply);
memory.register(player, assistantMessage);
return reply;
}
}
export { Client as default };
|