Skip to content

Commit

Permalink
Merge pull request #116 from autonomys/ref/separate-timeline-response
Browse files Browse the repository at this point in the history
Ref/separate timeline response
  • Loading branch information
Xm0onh authored Jan 8, 2025
2 parents d9d460d + 80957b4 commit 648ca06
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 16 deletions.
24 changes: 13 additions & 11 deletions auto-agents-framework/src/agents/tools/fetchTimelineTool.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,25 +29,27 @@ export const createFetchTimelineTool = (twitterApi: TwitterApi) =>
numRandomFollowers: number;
}) => {
try {
const myTimelineTweets = (
await twitterApi.getMyTimeline(numTimelineTweets, processedIds)
).slice(0, numTimelineTweets);
const myTimelineTweets = await twitterApi.getMyTimeline(numTimelineTweets, processedIds);
const followingRecents = await twitterApi.getFollowingRecentTweets(
numFollowingRecentTweets,
numRandomFollowers,
);
const tweets = new Set([...myTimelineTweets, ...followingRecents]);
const sortedTweets = Array.from(tweets).sort(
(a, b) => new Date(b.timeParsed!).getTime() - new Date(a.timeParsed!).getTime(),
);
logger.info('Timeline tweets:', { tweets: sortedTweets.length });
return {
tweets: sortedTweets,
const tweets = {
timelineTweets: myTimelineTweets,
followingRecents: followingRecents,
};
logger.info('Timeline tweets:', {
timelineTweets: tweets.timelineTweets.length,
followingRecents: tweets.followingRecents.length,
});
return { tweets };
} catch (error) {
logger.error('Error in fetchTimelineTool:', error);
return {
tweets: [],
tweets: {
timelineTweets: [],
followingRecents: [],
},
};
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ export const createAnalyzeTrendNode =
(config: WorkflowConfig) => async (state: typeof State.State) => {
logger.info('Analyze Trend Node - Analyzing trends');

const tweets = Array.from(state.timelineTweets.values()).map(({ username, text }) => ({
const tweets = Array.from(state.trendAnalysisTweets.values()).map(({ username, text }) => ({
username,
text,
}));
logger.info('Tweets:', { tweets: tweets.length });
logger.info('Tweets for trend analysis:', { tweets: tweets.length });

const trendAnalysis = await config.prompts.trendPrompt
.pipe(config.llms.analyze)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,51 +23,71 @@ export const createCollectDataNode =
];
logger.info('Processed IDs:', { processedIds: processedIds.length });

//////////MY RECENT REPLIES//////////
const myRecentRepliesToolResponse = await invokeFetchMyRecentRepliesTool(config.toolNode, {
maxRecentReplies: twitterConfig.MAX_MY_RECENT_REPLIES,
});
const myRecentRepliesContent =
myRecentRepliesToolResponse.messages[myRecentRepliesToolResponse.messages.length - 1].content;
const myRecentReplies = convertMessageContentToTweets(myRecentRepliesContent);

//////////TIMELINE & TREND//////////
const timelineToolResponse = await invokeFetchTimelineTool(config.toolNode, {
processedIds,
numTimelineTweets: twitterConfig.NUM_TIMELINE_TWEETS,
numFollowingRecentTweets: twitterConfig.NUM_FOLLOWING_RECENT_TWEETS,
numRandomFollowers: twitterConfig.NUM_RANDOM_FOLLOWERS,
});
const timelineContent =
timelineToolResponse.messages[timelineToolResponse.messages.length - 1].content;
const timelineTweets = convertMessageContentToTweets(timelineContent);
const timelineContent = JSON.parse(
timelineToolResponse.messages[timelineToolResponse.messages.length - 1].content,
);
const timelineTweetsConverted = convertMessageContentToTweets(
JSON.stringify({ tweets: timelineContent.tweets.timelineTweets }),
);
const followingRecentsConverted = convertMessageContentToTweets(
JSON.stringify({ tweets: timelineContent.tweets.followingRecents }),
);
const trendAnalysisTweets = [...timelineTweetsConverted, ...followingRecentsConverted];
const timelineTweets = [
...Array.from(timelineTweetsConverted)
.sort(() => Math.random() - 0.5)
.slice(0, twitterConfig.NUM_TIMELINE_TWEETS),
...followingRecentsConverted,
];

//////////MENTIONS//////////
const mentionsToolResponse = await invokeFetchMentionsTool(config.toolNode, {
maxMentions: twitterConfig.MAX_MENTIONS,
});
const mentionsContent =
mentionsToolResponse.messages[mentionsToolResponse.messages.length - 1].content;
const mentionsTweets = convertMessageContentToTweets(mentionsContent);

//////////MY RECENT TWEETS//////////
const myRecentTweetsToolResponse = await invokeFetchMyRecentTweetsTool(config.toolNode, {
maxMyRecentTweets: twitterConfig.MAX_MY_RECENT_TWEETS,
});
const myRecentTweetsContent =
myRecentTweetsToolResponse.messages[myRecentTweetsToolResponse.messages.length - 1].content;
const myRecentTweets = convertMessageContentToTweets(myRecentTweetsContent);

//////////REPLIED TO TWEET IDS//////////
const myRepliedToIds = JSON.parse(myRecentTweetsContent).repliedToTweetIds
? JSON.parse(myRecentTweetsContent).repliedToTweetIds
: [];

logger.info('Tool response received:', {
myRecentRepliesCount: myRecentReplies.length,
timelineMessageCount: timelineTweets.length,
trendAnalysisMessageCount: trendAnalysisTweets.length,
mentionsMessageCount: mentionsTweets.length,
myRecentTweetsCount: myRecentTweets.length,
repliedToTweetIds: myRepliedToIds.length,
});

return {
timelineTweets: new Set(timelineTweets),
trendAnalysisTweets: new Set(trendAnalysisTweets),
mentionsTweets: new Set(mentionsTweets),
myRecentTweets: new Set(myRecentTweets),
myRecentReplies: new Set(myRecentReplies),
Expand Down
4 changes: 4 additions & 0 deletions auto-agents-framework/src/agents/workflows/kol/workflow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ export const State = Annotation.Root({
default: () => [],
reducer: (curr, update) => update,
}),
trendAnalysisTweets: Annotation<ReadonlySet<Tweet>>({
default: () => new Set(),
reducer: (curr, update) => new Set([...update]),
}),
trendAnalysis: Annotation<TrendAnalysis>,
dsnData: Annotation<Record<string, any>[]>({
default: () => [],
Expand Down

0 comments on commit 648ca06

Please sign in to comment.