Skip to content

Commit

Permalink
Events tweaks (#529)
Browse files Browse the repository at this point in the history
  • Loading branch information
sceuick authored Jul 27, 2023
1 parent 0c90351 commit 47f8fa7
Show file tree
Hide file tree
Showing 56 changed files with 1,938 additions and 870 deletions.
1 change: 1 addition & 0 deletions common/adapters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,7 @@ export const SUPPORTS_INSTRUCT: { [key in AIAdapter]?: (user: AppSchema.User) =>
openai: () => true,
kobold: (opts) => opts.thirdPartyFormat !== 'kobold',
openrouter: () => true,
scale: () => true,
}

/**
Expand Down
7 changes: 5 additions & 2 deletions common/guidance/grammar.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,17 @@ Expression = body:(Variable / Text)+ {
}
Variable "variable" = "[" val:Char+ pipe:Pipe? "]" { return { kind: 'variable', name: val.join(''), pipe } }
Variable "variable" = "[" val:Char+ pipe:Pipe? tokens:TokensPipe? "]" { return { kind: 'variable', name: val.join(''), pipe, tokens } }
Pipe = _ "|" _ pipe:(WordPipe / SentencePipe) _ { return pipe }
Pipe = Sep pipe:(WordPipe / SentencePipe) _ { return pipe }
WordPipe = ("words" / "word"i) _ "=" _ value:Number { return { type: "words", value } }
SentencePipe = "sentence"i { return { type: "sentence" } }
TokensPipe "max-tokens" = Sep "tokens"i _ "=" _ value:Number { return value }
Number "number" = nums:[0-9]+ { return +nums.join('') }
Text "text" = !(Variable) ch:(.) { return ch }
Char = ch:[a-z0-9_-]i { return ch }
Sep = _ "|" _
_ "whitespace" = [ \t]*
`
61 changes: 58 additions & 3 deletions common/guidance/guidance-parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { grammar } from './grammar'
type PNode = VarNode | TextNode

type TextNode = { kind: 'text'; text: string }
type VarNode = { kind: 'variable'; name: string; pipe?: Pipe }
type VarNode = { kind: 'variable'; name: string; pipe?: Pipe; tokens?: number }

type Pipe = { type: 'sentence' } | { type: 'words'; value: number }

Expand All @@ -15,7 +15,7 @@ const parser = peggy.generate(grammar.trim(), {
})

type GuidanceOpts = {
infer: (prompt: string) => Promise<string>
infer: (prompt: string, maxTokens?: number) => Promise<string>

/**
* Will replace {{holders}} with their corresponding values
Expand All @@ -34,6 +34,61 @@ export function guidanceParse(template: string): PNode[] {
return ast
}

export async function rerunGuidanceValues<
T extends Record<string, string> = Record<string, string>
>(template: string, rerun: string[], opts: GuidanceOpts) {
const nodes = guidanceParse(inject(template, opts.placeholders))
const values = opts.previous || {}

for (const name of rerun) {
let found = false
for (const node of nodes) {
if (node.kind === 'variable' && node.name === name) {
found = true
break
}
}

if (!found)
throw new Error(`Cannot re-run guidance: Requested variable "${name}" is not in template`)
}

const done = new Set<string>()

let prompt = ''
for (const node of nodes) {
switch (node.kind) {
case 'text':
prompt += node.text
continue

case 'variable': {
const prev = values[node.name]
if (!rerun.includes(node.name)) {
if (prev === undefined) {
throw new Error(`Cannot re-run guidance: Missing previous value "${node.name}"`)
}
prompt += prev
continue
}

const results = await opts.infer(prompt.trim(), node.tokens)
const value = handlePipe(results.trim(), node.pipe)
values[node.name] = value
done.add(node.name)

if (done.size === rerun.length) {
return { values: values as T }
}

continue
}
}
}

return { values: values as T }
}

export async function runGuidanceChain<T extends Record<string, string> = Record<string, string>>(
templates: string[],
opts: GuidanceOpts
Expand All @@ -47,7 +102,7 @@ export async function runGuidanceChain<T extends Record<string, string> = Record
Object.assign(values, result.values)
}

return values as T
return { values: values as T }
}

export async function runGuidance<T extends Record<string, string> = Record<string, string>>(
Expand Down
150 changes: 90 additions & 60 deletions common/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { buildMemoryPrompt } from './memory'
import { defaultPresets, getFallbackPreset, isDefaultPreset } from './presets'
import { parseTemplate } from './template-parser'
import { Encoder } from './tokenize'
import { elapsedSince, trimSentence } from './util'
import { elapsedSince, getBotName, trimSentence } from './util'
import { Memory } from './types'

export const SAMPLE_CHAT_MARKER = `System: New conversation started. Previous conversations are examples only.`
Expand Down Expand Up @@ -71,7 +71,7 @@ export type PromptOpts = {
userEmbeds: Memory.UserEmbed[]
}

type BuildPromptOpts = {
export type BuildPromptOpts = {
kind?: GenerateRequestV2['kind']
chat: AppSchema.Chat
char: AppSchema.Character
Expand Down Expand Up @@ -233,10 +233,12 @@ type InjectOpts = {
encoder: Encoder
}

export function injectPlaceholders(
template: string,
{ opts, parts, history: hist, encoder, ...rest }: InjectOpts
) {
export function injectPlaceholders(template: string, inject: InjectOpts) {
// if (!inject.opts.settings?.useTemplateParser) {
// return parseTemplateV1(template, inject)
// }

const { opts, parts, history: hist, encoder, ...rest } = inject
const profile = opts.members.find((mem) => mem.userId === opts.chat.userId)
const sender = opts.impersonate?.name || profile?.handle || 'You'

Expand All @@ -259,56 +261,26 @@ export function injectPlaceholders(
hist.lines = next
}

if (opts.settings?.useTemplateParser) {
try {
const lines = !hist
? []
: hist.order === 'desc'
? hist.lines.slice().reverse()
: hist.lines.slice()
const { adapter, model } = getAdapter(opts.chat, opts.user, opts.settings)

const result = parseTemplate(template, {
...opts,
sender: profile!,
parts,
lines,
...rest,
})
return result
} catch (ex) {}
}
const lines = !hist
? []
: hist.order === 'desc'
? hist.lines.slice().reverse()
: hist.lines.slice()

let prompt = template
// UJB must be first to replace placeholders within the UJB
// Note: for character post-history-instructions, this is off-spec behavior
.replace(HOLDERS.ujb, parts.ujb || '')
.replace(HOLDERS.sampleChat, newline(sampleChat))
.replace(HOLDERS.scenario, parts.scenario || '')
.replace(HOLDERS.memory, newline(parts.memory))
.replace(HOLDERS.persona, parts.persona)
.replace(HOLDERS.impersonating, parts.impersonality || '')
.replace(HOLDERS.allPersonas, parts.allPersonas?.join('\n') || '')
.replace(HOLDERS.post, parts.post.join('\n'))
.replace(HOLDERS.linebreak, '\n')
.replace(HOLDERS.chatAge, elapsedSince(opts.chat.createdAt))
.replace(HOLDERS.idleDuration, elapsedSince(rest.lastMessage || ''))
.replace(HOLDERS.chatEmbed, parts.chatEmbeds.join('\n') || '')
.replace(HOLDERS.userEmbed, parts.userEmbeds.join('\n') || '')
// system prompt should not support other placeholders
.replace(HOLDERS.systemPrompt, newline(parts.systemPrompt))
// All placeholders support {{char}} and {{user}} placeholders therefore these must be last
.replace(BOT_REPLACE, opts.replyAs.name)
.replace(SELF_REPLACE, sender)

if (hist) {
const messages = hist.order === 'asc' ? hist.lines.slice().reverse() : hist.lines.slice()
const { adapter, model } = getAdapter(opts.chat, opts.user, opts.settings)
const maxContext = getContextLimit(opts.settings, adapter, model)
const history = fillPromptWithLines(encoder, maxContext, prompt, messages).reverse()
prompt = prompt.replace(HOLDERS.history, history.join('\n'))
}

return prompt
const result = parseTemplate(template, {
...opts,
sender: profile!,
parts,
lines,
...rest,
limit: {
context: getContextLimit(opts.settings, adapter, model),
encoder,
},
})
return result
}

function removeUnusedPlaceholders(template: string, parts: PromptParts) {
Expand Down Expand Up @@ -602,11 +574,9 @@ function getLinesForPrompt(
? opts.impersonate.name
: profiles.get(msg.userId || opts.chat.userId)?.handle || 'You'

return fillPlaceholders(
msg,
opts.characters[msg.characterId!]?.name || opts.replyAs?.name || char.name,
sender
).trim()
const botName = getBotName(opts.chat, msg, opts.characters, opts.replyAs, char)

return fillPlaceholders(msg, botName, sender).trim()
}

const history = messages.slice().sort(sortMessagesDesc).map(formatMsg)
Expand Down Expand Up @@ -645,6 +615,7 @@ export function fillPromptWithLines(
function fillPlaceholders(chatMsg: AppSchema.ChatMessage, char: string, user: string): string {
const prefix = chatMsg.system ? 'System' : chatMsg.characterId ? char : user
const msg = chatMsg.msg.replace(BOT_REPLACE, char).replace(SELF_REPLACE, user)

return `${prefix}: ${msg}`
}

Expand Down Expand Up @@ -768,7 +739,7 @@ export function getAdapter(
* When we know the maximum context limit for a particular LLM, ensure that the context limit we use does not exceed it.
*/

function getContextLimit(
export function getContextLimit(
gen: Partial<AppSchema.GenSettings> | undefined,
adapter: AIAdapter,
model: string
Expand Down Expand Up @@ -861,6 +832,65 @@ export function trimTokens(opts: TrimOpts) {
return output
}

export function parseTemplateV1(
template: string,
{ opts, parts, history: hist, encoder, ...rest }: InjectOpts
) {
const profile = opts.members.find((mem) => mem.userId === opts.chat.userId)
const sender = opts.impersonate?.name || profile?.handle || 'You'

// Automatically inject example conversation if not included in the prompt
const sampleChat = parts.sampleChat?.join('\n')
if (!template.match(HOLDERS.sampleChat) && sampleChat && hist) {
const next = hist.lines.filter((line) => !line.includes(SAMPLE_CHAT_MARKER))

const postSample =
opts.settings?.service && SUPPORTS_INSTRUCT[opts.settings.service]
? SAMPLE_CHAT_MARKER
: '<START>'

const msg = `${SAMPLE_CHAT_PREAMBLE}\n${sampleChat}\n${postSample}`
.replace(BOT_REPLACE, opts.replyAs.name)
.replace(SELF_REPLACE, sender)
if (hist.order === 'asc') next.unshift(msg)
else next.push(msg)

hist.lines = next
}

let prompt = template
// UJB must be first to replace placeholders within the UJB
// Note: for character post-history-instructions, this is off-spec behavior
.replace(HOLDERS.ujb, parts.ujb || '')
.replace(HOLDERS.sampleChat, newline(sampleChat))
.replace(HOLDERS.scenario, parts.scenario || '')
.replace(HOLDERS.memory, newline(parts.memory))
.replace(HOLDERS.persona, parts.persona)
.replace(HOLDERS.impersonating, parts.impersonality || '')
.replace(HOLDERS.allPersonas, parts.allPersonas?.join('\n') || '')
.replace(HOLDERS.post, parts.post.join('\n'))
.replace(HOLDERS.linebreak, '\n')
.replace(HOLDERS.chatAge, elapsedSince(opts.chat.createdAt))
.replace(HOLDERS.idleDuration, elapsedSince(rest.lastMessage || ''))
.replace(HOLDERS.chatEmbed, parts.chatEmbeds.join('\n') || '')
.replace(HOLDERS.userEmbed, parts.userEmbeds.join('\n') || '')
// system prompt should not support other placeholders
.replace(HOLDERS.systemPrompt, newline(parts.systemPrompt))
// All placeholders support {{char}} and {{user}} placeholders therefore these must be last
.replace(BOT_REPLACE, opts.replyAs.name)
.replace(SELF_REPLACE, sender)

if (hist) {
const messages = hist.order === 'asc' ? hist.lines.slice().reverse() : hist.lines.slice()
const { adapter, model } = getAdapter(opts.chat, opts.user, opts.settings)
const maxContext = getContextLimit(opts.settings, adapter, model)
const history = fillPromptWithLines(encoder, maxContext, prompt, messages).reverse()
prompt = prompt.replace(HOLDERS.history, history.join('\n'))
}

return prompt
}

function newline(value: string | undefined) {
if (!value) return ''
return '\n' + value
Expand Down
Loading

0 comments on commit 47f8fa7

Please sign in to comment.