Skip to content

Commit

Permalink
feat(sense): basic llm threads support (#1316)
Browse files Browse the repository at this point in the history
Co-authored-by: Cheslav Zhuravsky <[email protected]>
  • Loading branch information
cyborgshead and happylolonly authored Oct 18, 2024
1 parent 4857bd5 commit 67d8e7b
Show file tree
Hide file tree
Showing 30 changed files with 2,302 additions and 183 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
"@graphql-codegen/typescript-react-apollo": "^4.3.0",
"@keplr-wallet/types": "^0.11.52",
"@milkdown/kit": "^7.5.0",
"@octokit/types": "^13.6.1",
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.10",
"@rjsf/core": "^3.2.1",
"@storybook/addon-essentials": "^7.2.3",
Expand Down Expand Up @@ -73,7 +74,6 @@
"constants-browserify": "^1.0.0",
"copy-webpack-plugin": "^11.0.0",
"cosmjs-types": "^0.9.0",
"@octokit/types": "^13.6.1",
"cross-env": "^6.0.3",
"css-loader": "^6.7.3",
"css-minimizer-webpack-plugin": "^4.0.0",
Expand Down Expand Up @@ -234,6 +234,7 @@
"lodash": "^4.17.21",
"long": "^5.2.0",
"node-polyfill-webpack-plugin": "^2.0.1",
"openai": "^4.67.1",
"process": "^0.11.10",
"query-string": "^6.12.1",
"ramda": "^0.29.0",
Expand Down
2 changes: 1 addition & 1 deletion src/constants/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Networks } from 'src/types/networks';
import defaultNetworks from './defaultNetworks';

// FIXME: seems temp
function isWorker() {
export function isWorker() {
return (
typeof WorkerGlobalScope !== 'undefined' &&
self instanceof WorkerGlobalScope
Expand Down
37 changes: 24 additions & 13 deletions src/containers/Search/LLMSpark/LLMSpark.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,17 @@ import Loader2 from 'src/components/ui/Loader2';
import useGetIPFSHash from 'src/features/ipfs/hooks/useGetIPFSHash';
import { isCID } from 'src/utils/ipfs/helpers';
import { isDevEnv } from 'src/utils/dev';
import AdviserHoverWrapper from 'src/features/adviser/AdviserHoverWrapper/AdviserHoverWrapper';
import { testVar } from '.';
import styles from './LLMSpark.module.scss';

// WIP

const provider = createOpenRouter({
export const provider = createOpenRouter({
['a' + 'piK' + 'ey']: `sk-or-v1-${atob(testVar)}`,
});

const modelName = isDevEnv()
export const modelName = isDevEnv()
? 'meta-llama/llama-3-8b-instruct:free'
: 'openai/gpt-4o-mini';

Expand Down Expand Up @@ -61,24 +62,34 @@ export function useIsLLMPageParam() {
return isLLM;
}

export function LLMAvatar() {
export function LLMAvatar({ onlyImg }) {
return (
<div
style={{
display: 'flex',
alignItems: 'center',
}}
>
<img
src="https://robohash.org/llama"
style={{
width: '20px',
height: '20px',
borderRadius: '50%',
marginRight: '5px',
}}
/>
{model.modelId}
<AdviserHoverWrapper adviserContent={modelName}>
<Link
to={`${routes.settings.path}/llm`}
style={{
display: 'flex',
alignItems: 'center',
}}
>
<img
src={`https://robohash.org/${modelName}`}
style={{
width: '20px',
height: '20px',
borderRadius: '50%',
marginRight: '5px',
}}
/>
{!onlyImg && model.modelId}
</Link>
</AdviserHoverWrapper>
</div>
);
}
Expand Down
1 change: 1 addition & 0 deletions src/containers/ipfs/components/metaInfo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ function RowItem({ text, value }) {
);
}

// TODO: seems not used and need to be removed
function MetaInfo({ data, cid }) {
try {
if (!data) {
Expand Down
124 changes: 118 additions & 6 deletions src/features/sense/redux/sense.redux.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,17 @@ import {
SenseItemLinkMeta,
SenseListItem,
SenseListItemTransactionMeta,
SenseUnread,
} from 'src/services/backend/types/sense';
import { isParticle } from '../../particle/utils';
import { SenseItemId } from '../types/sense';
import { EntryType } from 'src/services/CozoDb/types/entities';
import {
MsgMultiSendValue,
MsgSendValue,
} from 'src/services/backend/services/indexer/types';
import { RootState } from 'src/redux/store';
// Add this import for generating unique thread IDs
import { SenseItemId } from '../types/sense';
import { isParticle } from '../../particle/utils';
import { isWorker } from 'src/constants/config';

// similar to blockchain/tx/message type
export type SenseItem = {
Expand Down Expand Up @@ -62,6 +63,11 @@ type SliceState = {
neurons: number;
};
};
llm: {
// Change from messages array to threads array
threads: LLMThread[];
currentThreadId: string | null; // Keep track of the currently selected thread
};
};

const initialState: SliceState = {
Expand All @@ -78,6 +84,13 @@ const initialState: SliceState = {
neurons: 0,
},
},
llm: {
// Change from messages array to threads array
threads: !isWorker()
? (JSON.parse(localStorage.getItem('llmThreads') || '[]') as LLMThread[])
: [],
currentThreadId: null, // Keep track of the currently selected thread
},
};

function formatApiData(item: SenseListItem): SenseItem {
Expand Down Expand Up @@ -137,7 +150,7 @@ function formatApiData(item: SenseListItem): SenseItem {
Object.assign(formatted, {
type: 'cyber.graph.v1beta1.MsgCyberlink',
from: meta.neuron,
meta: meta,
meta,
fromLog: true,
});

Expand Down Expand Up @@ -224,6 +237,20 @@ function checkIfMessageExists(chat: Chat, newMessage: SenseItem) {
return isMessageExists;
}

// Add LLM types
export interface LLMMessage {
text: string;
sender: 'user' | 'llm';
timestamp: number;
}

export interface LLMThread {
id: string;
title?: string;
dateUpdated: number;
messages: LLMMessage[];
}

const slice = createSlice({
name: 'sense',
initialState,
Expand Down Expand Up @@ -332,6 +359,81 @@ const slice = createSlice({
reset() {
return initialState;
},
// LLM reducers
createLLMThread(
state,
action: PayloadAction<{ id: string; title?: string }>
) {
const newThread: LLMThread = {
id: action.payload.id,
messages: [],
dateUpdated: Date.now(),
title:
action.payload.title ||
`Conversation ${state.llm.threads.length + 1}`,
};
state.llm.threads.push(newThread);
state.llm.currentThreadId = action.payload.id;
localStorage.setItem('llmThreads', JSON.stringify(state.llm.threads));
},

selectLLMThread(state, action: PayloadAction<{ id: string }>) {
state.llm.currentThreadId = action.payload.id;
},

addLLMMessageToThread(
state,
action: PayloadAction<{ threadId: string; message: LLMMessage }>
) {
const thread = state.llm.threads.find(
(t) => t.id === action.payload.threadId
);
if (thread) {
thread.messages.push(action.payload.message);
thread.dateUpdated = action.payload.message.timestamp;
localStorage.setItem('llmThreads', JSON.stringify(state.llm.threads));
}
},

// Add action to replace the last message (for updating "waiting..." message)
replaceLastLLMMessageInThread(
state,
action: PayloadAction<{ threadId: string; message: LLMMessage }>
) {
const thread = state.llm.threads.find(
(t) => t.id === action.payload.threadId
);
if (thread && thread.messages.length > 0) {
thread.messages[thread.messages.length - 1] = action.payload.message;
localStorage.setItem('llmThreads', JSON.stringify(state.llm.threads));
}
},

deleteLLMThread(state, action: PayloadAction<{ id: string }>) {
const newT = state.llm.threads.filter(
(thread) => thread.id !== action.payload.id
);

console.log('newT', newT);

state.llm.threads = newT;

if (state.llm.currentThreadId === action.payload.id) {
state.llm.currentThreadId = null;
}

// Object.assign(state.llm, {
// threads: newT,
// });

localStorage.setItem('llmThreads', JSON.stringify(state.llm.threads));
},

clearLLMThreads(state) {
state.llm.threads = [];
state.llm.currentThreadId = null;
localStorage.removeItem('llmThreads');
},
},

extraReducers: (builder) => {
Expand Down Expand Up @@ -451,8 +553,18 @@ const selectUnreadCounts = createSelector(
}
);

export const { addSenseItem, updateSenseItem, updateSenseList, reset } =
slice.actions;
export const {
addSenseItem,
updateSenseItem,
updateSenseList,
reset,
createLLMThread,
deleteLLMThread,
selectLLMThread,
addLLMMessageToThread,
replaceLastLLMMessageInThread,
clearLLMThreads,
} = slice.actions;

export { getSenseList, getSenseChat, markAsRead };

Expand Down
95 changes: 95 additions & 0 deletions src/features/sense/ui/ActionBar/ActionBarLLM.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import { useState } from 'react';
import { ActionBar, Button, Input } from 'src/components';
import { useAppDispatch, useAppSelector } from 'src/redux/hooks';
import {
createLLMThread,
addLLMMessageToThread,
replaceLastLLMMessageInThread,
} from 'src/features/sense/redux/sense.redux';
import { v4 as uuidv4 } from 'uuid';
import styles from './ActionBar.module.scss';
import { llmRequest } from '../../../../containers/Search/LLMSpark/LLMSpark';
import VoiceInteraction from '../VoiceInteraction/VoiceInteraction';

function ActionBarLLM() {
const [text, setText] = useState('');
const currentThreadId = useAppSelector(
(state) => state.sense.llm.currentThreadId
);
const dispatch = useAppDispatch();

const sendMessage = async () => {
if (!text.trim()) {
return;
}

let threadId = currentThreadId;
if (!threadId) {
// Create new thread if none selected
threadId = uuidv4();
dispatch(createLLMThread({ id: threadId }));
}

// Add user's message to the thread
const userMessage = {
text,
sender: 'user',
timestamp: Date.now(),
};
dispatch(addLLMMessageToThread({ threadId, message: userMessage }));

// Clear the input field
setText('');

// Add "waiting..." message
const waitingMessage = {
text: 'waiting...',
sender: 'llm',
timestamp: Date.now(),
};
dispatch(addLLMMessageToThread({ threadId, message: waitingMessage }));

// Send message to LLM API
try {
const responseText = await llmRequest(text);

// Replace "waiting..." message with the actual response
const llmMessage = {
text: responseText,
sender: 'llm',
timestamp: Date.now(),
};
dispatch(
replaceLastLLMMessageInThread({ threadId, message: llmMessage })
);
} catch (error) {
// Handle error: Remove the "waiting..." message
dispatch(
replaceLastLLMMessageInThread({
threadId,
message: {
text: 'Error: Failed to get response.',
sender: 'llm',
timestamp: Date.now(),
},
})
);
console.error('LLM request failed:', error);
}
};

return (
<ActionBar>
<Input
placeholder="Ask the model"
value={text}
onChange={(e) => setText(e.target.value)}
className={styles.input}
/>
<Button onClick={sendMessage}>Send</Button>
<VoiceInteraction />
</ActionBar>
);
}

export default ActionBarLLM;
Loading

0 comments on commit 67d8e7b

Please sign in to comment.