Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Context Templates #36

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions Inneal/Models/SwiftDataModels.swift
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,12 @@ enum Services: String, Codable, CaseIterable, Identifiable {
var id: Self { self }
}

enum ContextTemplate: String, Codable, CaseIterable, Identifiable {
case automatic = "Automatic"
case llama3Instruct = "Llama 3"
var id: Self { self }
}

@Model
class UserSettings {
var userCharacter: Character?
Expand Down Expand Up @@ -92,6 +98,10 @@ class Chat {
var preferredContextWindow: PreferredContextWindow = PreferredContextWindow.large
var preferredResponseSize: PreferredResponseSize = PreferredResponseSize.small

// Context

var contextTemplate: String?

// Computed Properties

var unwrappedMessages: [ChatMessage] {
Expand Down
68 changes: 44 additions & 24 deletions Inneal/Views/Chat/ChatSettingsView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ struct ChatSettingsView: View {

@State var settingsMode: SettingsMode = .basic
@State var customUserName: String = ""
@State var contextTemplate: ContextTemplate = .automatic

var body: some View {
NavigationStack {
Expand Down Expand Up @@ -255,6 +256,15 @@ struct ChatSettingsView: View {
Section(footer: Text("Whether to allow multiple lines in AI responses. Disable this if the AI starts generating rubbish.")) {
Toggle("Multiline Replies", isOn: $chat.allowMultilineReplies)
}

Section {
Picker("Context Template", selection: $contextTemplate) {
ForEach(ContextTemplate.allCases) { template in
Text(template.rawValue)
}
}
}

Section(footer: Text("Randomness of sampling. High values can increase creativity but may make text less sensible. Lower values will make text more predictable but can become repetitious.")) {
LabeledContent("Temperature", value: String(format: "%.1f", hordeParams.temperature))
Slider(value: $hordeParams.temperature, in: 0.1 ... 2, step: 0.1)
Expand Down Expand Up @@ -315,30 +325,6 @@ struct ChatSettingsView: View {
ProgressView()
ProgressView()
}
}.onAppear {
if currentHordeConfigObject == nil {
do {
let descriptor = FetchDescriptor<APIConfiguration>(predicate: #Predicate { $0.serviceName == "horde" })
let configurations = try modelContext.fetch(descriptor)
if configurations.isEmpty {
Log.debug("Configs empty, creating")
let baseHordeConfig = APIConfiguration(serviceName: "horde", configurationData: "0000000000".data(using: .utf8)!)
modelContext.insert(baseHordeConfig)
currentHordeConfigObject = baseHordeConfig
viewModel.currentKudos = nil
viewModel.currentUserName = nil
} else if let hordeConfig = configurations.first, let configData = hordeConfig.configurationData, let keyString = String(data: configData, encoding: .utf8) {
Log.debug("Found config, loading in")
viewModel.apiKey = keyString
currentHordeConfigObject = hordeConfig
viewModel.currentKudos = nil
viewModel.currentUserName = nil
}
viewModel.onAppear()
} catch {
fatalError("Unable to find or create AI Horde config")
}
}
}
Link("Visit AIHorde.net", destination: URL(string: "https://aihorde.net")!)
}
Expand Down Expand Up @@ -376,9 +362,37 @@ struct ChatSettingsView: View {
#endif
.onAppear(perform: {
customUserName = chat.userName ?? ""
if chat.contextTemplate == "llama3" {
contextTemplate = .llama3Instruct
} else {
contextTemplate = .automatic
}
Task {
await viewModel.populateModels()
}
if currentHordeConfigObject == nil {
do {
let descriptor = FetchDescriptor<APIConfiguration>(predicate: #Predicate { $0.serviceName == "horde" })
let configurations = try modelContext.fetch(descriptor)
if configurations.isEmpty {
Log.debug("Configs empty, creating")
let baseHordeConfig = APIConfiguration(serviceName: "horde", configurationData: "0000000000".data(using: .utf8)!)
modelContext.insert(baseHordeConfig)
currentHordeConfigObject = baseHordeConfig
viewModel.currentKudos = nil
viewModel.currentUserName = nil
} else if let hordeConfig = configurations.first, let configData = hordeConfig.configurationData, let keyString = String(data: configData, encoding: .utf8) {
Log.debug("Found config, loading in")
viewModel.apiKey = keyString
currentHordeConfigObject = hordeConfig
viewModel.currentKudos = nil
viewModel.currentUserName = nil
}
viewModel.onAppear()
} catch {
fatalError("Unable to find or create AI Horde config")
}
}
})
}
}
Expand All @@ -389,6 +403,12 @@ struct ChatSettingsView: View {
let hordeSettingsToSave = HordeRequest(prompt: "", params: hordeParams, models: hordeRequest.models, workers: hordeRequest.workers)
let serializedSettings = try JSONEncoder().encode(hordeSettingsToSave)
chat.hordeSettings = serializedSettings
switch contextTemplate {
case .automatic:
chat.contextTemplate = nil
case .llama3Instruct:
chat.contextTemplate = "llama3"
}
} catch {
Log.error("Unable to save settings to chat: \(error)")
}
Expand Down
145 changes: 106 additions & 39 deletions Inneal/Views/Chat/ViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -133,24 +133,53 @@ extension ChatView {
var eligibleModels = baseHordeRequest.models
var eligibleWorkers = baseHordeRequest.workers

var permanentPrompt = "## {{char}}\n- You're \"{{char}}\" in this never-ending roleplay with \"{{user}}\".\n### Input:\n"
if chat.unwrappedCharacters.count > 1 {
let presentCharacters = chat.unwrappedCharacters.filter { $0 != character }
let characterNames = presentCharacters.compactMap({"\"\($0.name)\""}).joined(separator: ", ")
permanentPrompt = "## {{char}}\n- You're \"{{char}}\" in this never-ending roleplay with \"{{user}}\", \(characterNames).\n### Input:\n"
var contextTemplate: ContextTemplate = .automatic
if chat.contextTemplate == "llama3" {
contextTemplate = .llama3Instruct
}
permanentPrompt += character.characterDescription.isEmpty ? "" : "\(character.characterDescription)\n"
permanentPrompt += character.personality.isEmpty ? "" : "{{char}}'s personality: \(character.personality)\n"
permanentPrompt += character.scenario.isEmpty ? "" : "Scenario: \(character.scenario)\n"

var postHistoryPrompt = character.postHistoryInstructions.isEmpty ? "" : "\(character.postHistoryInstructions.replacingOccurrences(of: "{{original}}", with: "").trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines))\n"
// MARK: - Build permanent prompt
var permanentPrompt = ""
var postHistoryPrompt = ""

if let userCharacter, userCharacter != character {
permanentPrompt += "\n"
permanentPrompt += "\(userCharacter.characterDescription.swapPlaceholders(userName: character.name, charName: userCharacter.name, userSettings: userSettings))\n"
}
if contextTemplate == .llama3Instruct {
permanentPrompt += "<|start_header_id|>system<|end_header_id|>\n\nYou are an expert actor that can fully immerse yourself into any role given. You do not break character for any reason, even if someone tries addressing you as an AI or language model. Currently your role is {{char}}, which is described in detail below. As {{char}}, continue the exchange with {{user}}.\n"
if chat.unwrappedCharacters.count > 1 {
let presentCharacters = chat.unwrappedCharacters.filter { $0 != character }
let characterNames = presentCharacters.compactMap({"\($0.name)"}).joined(separator: ", ")
permanentPrompt += "<|start_header_id|>system<|end_header_id|>\n\nYou are an expert actor that can fully immerse yourself into any role given. You do not break character for any reason, even if someone tries addressing you as an AI or language model. Currently your role is {{char}}, which is described in detail below. As {{char}}, continue the exchange with {{user}}, \(characterNames).\n"
}
permanentPrompt += character.characterDescription.isEmpty ? "" : "\(character.characterDescription)\n"
permanentPrompt += character.personality.isEmpty ? "" : "{{char}}'s personality: \(character.personality)\n"
permanentPrompt += character.scenario.isEmpty ? "" : "Scenario: \(character.scenario)\n\n"

postHistoryPrompt += character.postHistoryInstructions.isEmpty ? "" : "<|start_header_id|>[System]<|end_header_id|>\n\n\(character.postHistoryInstructions.replacingOccurrences(of: "{{original}}", with: "").trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines))<|eot_id|>"

if let userCharacter, userCharacter != character {
permanentPrompt += "[\(userCharacter.characterDescription.swapPlaceholders(userName: character.name, charName: userCharacter.name, userSettings: userSettings))]\n"
}
permanentPrompt += "<|eot_id|>"
} else {
// MARK - Default
permanentPrompt += "## {{char}}\n- You're \"{{char}}\" in this never-ending roleplay with \"{{user}}\".\n### Input:\n"
if chat.unwrappedCharacters.count > 1 {
let presentCharacters = chat.unwrappedCharacters.filter { $0 != character }
let characterNames = presentCharacters.compactMap({"\"\($0.name)\""}).joined(separator: ", ")
permanentPrompt = "## {{char}}\n- You're \"{{char}}\" in this never-ending roleplay with \"{{user}}\", \(characterNames).\n### Input:\n"
}
permanentPrompt += character.characterDescription.isEmpty ? "" : "\(character.characterDescription)\n"
permanentPrompt += character.personality.isEmpty ? "" : "{{char}}'s personality: \(character.personality)\n"
permanentPrompt += character.scenario.isEmpty ? "" : "Scenario: \(character.scenario)\n"

permanentPrompt += "### Response:\n(OOC) Understood. I will take this info into account for the roleplay. (end OOC)"
postHistoryPrompt += character.postHistoryInstructions.isEmpty ? "" : "\(character.postHistoryInstructions.replacingOccurrences(of: "{{original}}", with: "").trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines))\n"

if let userCharacter, userCharacter != character {
permanentPrompt += "\n"
permanentPrompt += "\(userCharacter.characterDescription.swapPlaceholders(userName: character.name, charName: userCharacter.name, userSettings: userSettings))\n"
}

permanentPrompt += "### Response:\n(OOC) Understood. I will take this info into account for the roleplay. (end OOC)"
}

let permanentTokens = countTokens(permanentPrompt) + countTokens(postHistoryPrompt)
Log.debug("Permanent tokens: \(permanentTokens)")
Expand Down Expand Up @@ -283,60 +312,98 @@ extension ChatView {
}

var prompt = permanentPrompt
prompt += "\n\n"
var currentTokenCount = permanentTokens
Log.debug("Permanent prompt applied, \(currentTokenCount) tokens used, \(maxContentLength - currentTokenCount) remain.")

// MARK: - Build Message History
var messageHistory = ""
if contentAlternate {
_ = history.removeFirst()
}
for m in history {
let message = "\(m.fromUser ? "{{user}}" : "{{char}}"): \(m.content)\n".swapPlaceholders(userName: userName, charName: m.character?.name, userSettings: userSettings)
let tokens = countTokens(message)
if (maxContentLength - (currentTokenCount + tokens)) >= 0 {
messageHistory = "\(message)\(messageHistory)"
currentTokenCount += tokens
if contextTemplate == .llama3Instruct {
let message = "<|start_header_id|>[\(m.fromUser ? "{{user}}" : "{{char}}")]<|end_header_id|>\n\n\(m.content)<|eot_id|>".swapPlaceholders(userName: userName, charName: m.character?.name, userSettings: userSettings)
let tokens = countTokens(message)
if (maxContentLength - (currentTokenCount + tokens)) >= 0 {
messageHistory = "\(message)\(messageHistory)"
currentTokenCount += tokens
}
} else {
let message = "\(m.fromUser ? "{{user}}" : "{{char}}"): \(m.content)\n".swapPlaceholders(userName: userName, charName: m.character?.name, userSettings: userSettings)
let tokens = countTokens(message)
if (maxContentLength - (currentTokenCount + tokens)) >= 0 {
messageHistory = "\(message)\(messageHistory)"
currentTokenCount += tokens
}
}
}
Log.debug("Built message history, \(currentTokenCount) tokens used, \(maxContentLength - currentTokenCount) remain.")

// MARK: - Example Message History
var exampleMessageHistory = ""
var exampleChats = character.exampleMessage.components(separatedBy: "<START>")
exampleChats = exampleChats.map { $0.trimmingCharacters(in: .whitespacesAndNewlines) }.filter { !$0.isEmpty }
for m in exampleChats {
let tokens = countTokens(m)
if (maxContentLength - (currentTokenCount + tokens)) >= 0 {
exampleMessageHistory.append("### New Roleplay:\n\(m)\n")
currentTokenCount += tokens

if contextTemplate != .llama3Instruct {
var exampleChats = character.exampleMessage.components(separatedBy: "<START>")
exampleChats = exampleChats.map { $0.trimmingCharacters(in: .whitespacesAndNewlines) }.filter { !$0.isEmpty }
for m in exampleChats {
let tokens = countTokens(m)
if (maxContentLength - (currentTokenCount + tokens)) >= 0 {
exampleMessageHistory.append("### New Roleplay:\n\(m)\n")
currentTokenCount += tokens
}
}
}
Log.debug("Built message examples, \(currentTokenCount) tokens used, \(maxContentLength - currentTokenCount) remain.")


if !exampleMessageHistory.isEmpty {
prompt.append(exampleMessageHistory)
prompt.append("\n")
}

prompt.append("### New Roleplay:\n")
prompt.append(messageHistory)
prompt.append(postHistoryPrompt)

if imitation {
prompt.append("{{user}}:")
if contextTemplate == .llama3Instruct {
prompt.append(messageHistory)
prompt.append(postHistoryPrompt)
if imitation {
prompt.append("<|start_header_id|>[{{user}}]<|end_header_id|>\n")
} else {
prompt.append("<|start_header_id|>[{{char}}]<|end_header_id|>\n")
}
} else {
prompt.append("{{char}}:")
prompt.append("### New Roleplay:\n")
prompt.append(messageHistory)
prompt.append(postHistoryPrompt)

if imitation {
prompt.append("{{user}}:")
} else {
prompt.append("{{char}}:")
}
}

prompt = prompt.swapPlaceholders(userName: userName, charName: character.name, userSettings: userSettings)

Log.debug("Total token count: \(countTokens(prompt))")

var stopSequence = ["{{user}}:", "\n{{user}} "]
for character in chat.characters ?? [] {
stopSequence.append("\n\(character.name): ")
character.name.components(separatedBy: .whitespaces).forEach { namePart in
stopSequence.append("\n\(namePart): ")
// MARK: - Build Stop Sequences
var stopSequence: [String] = []
if contextTemplate == .llama3Instruct {
stopSequence.append(contentsOf: ["\n{{user}}:", "\n{{user}}", "<|eot_id|>", "<|start_header_id|>[{{user}}]<|end_header_id|>", "<|start_header_id|>system<|end_header_id|>"])
for character in chat.characters ?? [] {
stopSequence.append("\n\(character.name):")
stopSequence.append("<|start_header_id|>[\(character.name)]<|end_header_id|>")
character.name.components(separatedBy: .whitespaces).forEach { namePart in
stopSequence.append("\n\(namePart): ")
stopSequence.append("<|start_header_id|>[\(namePart)]<|end_header_id|>")
}
}
} else {
stopSequence.append(contentsOf: ["{{user}}:", "\n{{user}} "])
for character in chat.characters ?? [] {
stopSequence.append("\n\(character.name): ")
character.name.components(separatedBy: .whitespaces).forEach { namePart in
stopSequence.append("\n\(namePart): ")
}
}
}

Expand Down