mirror of
https://github.com/4ian/GDevelop.git
synced 2025-10-15 10:19:04 +00:00
Compare commits
1 Commits
ai-ux-impr
...
cursor/imp
Author | SHA1 | Date | |
---|---|---|---|
![]() |
d8174561d2 |
@@ -8,6 +8,83 @@ namespace gdjs {
|
||||
let _hasPlayerJustClosedLeaderboardView = false;
|
||||
let _preferSendConnectedPlayerScore = true;
|
||||
|
||||
// Rolling-window rate limiting state (successful entries only):
|
||||
// - Global: at most 12 successful entries across all leaderboards in the past minute
|
||||
// - Per-leaderboard: at most 6 successful entries on the same leaderboard in the past minute
|
||||
const ROLLING_WINDOW_MS = 60 * 1000;
|
||||
const GLOBAL_SUCCESS_LIMIT_PER_MINUTE = 12;
|
||||
const PER_LEADERBOARD_SUCCESS_LIMIT_PER_MINUTE = 6;
|
||||
|
||||
// Store timestamps of successful entries to implement the rolling window.
|
||||
let _successfulEntriesTimestampsGlobal: number[] = [];
|
||||
let _successfulEntriesTimestampsByLeaderboard: {
|
||||
[leaderboardId: string]: number[];
|
||||
} = {};
|
||||
|
||||
// Reservations to avoid concurrency overshoot (successes + reservations never exceed limits).
|
||||
let _reservedSendsGlobal: number = 0;
|
||||
let _reservedSendsByLeaderboard: { [leaderboardId: string]: number } = {};
|
||||
|
||||
const _pruneOldSuccessfulEntries = () => {
|
||||
const threshold = Date.now() - ROLLING_WINDOW_MS;
|
||||
// Global pruning
|
||||
_successfulEntriesTimestampsGlobal = _successfulEntriesTimestampsGlobal.filter(
|
||||
(ts) => ts >= threshold
|
||||
);
|
||||
// Per-leaderboard pruning
|
||||
Object.keys(_successfulEntriesTimestampsByLeaderboard).forEach(
|
||||
(leaderboardId) => {
|
||||
const timestamps = _successfulEntriesTimestampsByLeaderboard[
|
||||
leaderboardId
|
||||
];
|
||||
const pruned = timestamps.filter((ts) => ts >= threshold);
|
||||
_successfulEntriesTimestampsByLeaderboard[leaderboardId] = pruned;
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const _attemptReserveQuota = (leaderboardId: string): boolean => {
|
||||
_pruneOldSuccessfulEntries();
|
||||
const globalCount = _successfulEntriesTimestampsGlobal.length;
|
||||
const reservedGlobal = _reservedSendsGlobal;
|
||||
|
||||
const perLeaderboardTimestamps =
|
||||
_successfulEntriesTimestampsByLeaderboard[leaderboardId] || [];
|
||||
const perLeaderboardCount = perLeaderboardTimestamps.length;
|
||||
const reservedForLeaderboard =
|
||||
_reservedSendsByLeaderboard[leaderboardId] || 0;
|
||||
|
||||
const wouldExceedGlobal =
|
||||
globalCount + reservedGlobal >= GLOBAL_SUCCESS_LIMIT_PER_MINUTE;
|
||||
const wouldExceedPerLeaderboard =
|
||||
perLeaderboardCount + reservedForLeaderboard >=
|
||||
PER_LEADERBOARD_SUCCESS_LIMIT_PER_MINUTE;
|
||||
|
||||
if (wouldExceedGlobal || wouldExceedPerLeaderboard) return false;
|
||||
|
||||
_reservedSendsGlobal += 1;
|
||||
_reservedSendsByLeaderboard[leaderboardId] =
|
||||
( _reservedSendsByLeaderboard[leaderboardId] || 0) + 1;
|
||||
return true;
|
||||
};
|
||||
|
||||
const _releaseReservedQuota = (leaderboardId: string): void => {
|
||||
if (_reservedSendsGlobal > 0) _reservedSendsGlobal -= 1;
|
||||
if ((_reservedSendsByLeaderboard[leaderboardId] || 0) > 0) {
|
||||
_reservedSendsByLeaderboard[leaderboardId] -= 1;
|
||||
}
|
||||
};
|
||||
|
||||
const _recordSuccessfulEntry = (leaderboardId: string): void => {
|
||||
const now = Date.now();
|
||||
_successfulEntriesTimestampsGlobal.push(now);
|
||||
const perLeaderboard =
|
||||
_successfulEntriesTimestampsByLeaderboard[leaderboardId] || [];
|
||||
perLeaderboard.push(now);
|
||||
_successfulEntriesTimestampsByLeaderboard[leaderboardId] = perLeaderboard;
|
||||
_pruneOldSuccessfulEntries();
|
||||
};
|
||||
|
||||
gdjs.registerRuntimeScenePostEventsCallback(() => {
|
||||
// Set it back to false for the next frame.
|
||||
_hasPlayerJustClosedLeaderboardView = false;
|
||||
@@ -35,6 +112,15 @@ namespace gdjs {
|
||||
|
||||
/**
|
||||
* Hold the state of the save of a score for a leaderboard.
|
||||
*
|
||||
* Existing protections:
|
||||
* - 500ms per-leaderboard throttle between save starts (error code: TOO_FAST).
|
||||
* - Ignore same player+score as previous successful save (error code: SAME_AS_PREVIOUS).
|
||||
* - Ignore duplicate in-flight save for same player/score.
|
||||
*
|
||||
* New rolling-window limits (successful entries only):
|
||||
* - Global: at most 12 successful entries across all leaderboards in the past minute.
|
||||
* - Per-leaderboard: at most 6 successful entries on the same leaderboard in the past minute.
|
||||
*/
|
||||
class ScoreSavingState {
|
||||
lastScoreSavingStartedAt: number | null = null;
|
||||
@@ -115,10 +201,12 @@ namespace gdjs {
|
||||
}
|
||||
|
||||
startSaving({
|
||||
leaderboardId,
|
||||
playerName,
|
||||
playerId,
|
||||
score,
|
||||
}: {
|
||||
leaderboardId: string;
|
||||
playerName?: string;
|
||||
playerId?: string;
|
||||
score: number;
|
||||
@@ -154,6 +242,18 @@ namespace gdjs {
|
||||
throw new Error('Ignoring this saving request.');
|
||||
}
|
||||
|
||||
// New rolling-window limits (in addition to the existing 500ms per-leaderboard throttle above):
|
||||
// - Global limit: at most 12 successful entries across all leaderboards in the past minute.
|
||||
// - Per-leaderboard limit: at most 6 successful entries on the same leaderboard in the past minute.
|
||||
// These are enforced by reserving a slot before sending and releasing it after completion.
|
||||
if (!_attemptReserveQuota(leaderboardId)) {
|
||||
logger.warn(
|
||||
'Too many leaderboard entries were sent in the last minute. Ignoring this one.'
|
||||
);
|
||||
this._setError('TOO_MANY_ENTRIES_IN_A_MINUTE');
|
||||
throw new Error('Ignoring this saving request.');
|
||||
}
|
||||
|
||||
let resolveSavingPromise: () => void;
|
||||
const savingPromise = new Promise<void>((resolve) => {
|
||||
resolveSavingPromise = resolve;
|
||||
@@ -174,6 +274,11 @@ namespace gdjs {
|
||||
'Score saving result received, but another save was launched in the meantime - ignoring the result of this one.'
|
||||
);
|
||||
|
||||
// Still record the successful entry for rate limiting purposes,
|
||||
// then release the reserved quota taken at start.
|
||||
_recordSuccessfulEntry(leaderboardId);
|
||||
_releaseReservedQuota(leaderboardId);
|
||||
|
||||
// Still finish the promise that can be waited upon:
|
||||
resolveSavingPromise();
|
||||
return;
|
||||
@@ -186,6 +291,10 @@ namespace gdjs {
|
||||
this.lastSavedLeaderboardEntry = leaderboardEntry;
|
||||
this.hasScoreBeenSaved = true;
|
||||
|
||||
// Record the success and release the reservation.
|
||||
_recordSuccessfulEntry(leaderboardId);
|
||||
_releaseReservedQuota(leaderboardId);
|
||||
|
||||
resolveSavingPromise();
|
||||
},
|
||||
closeSavingWithError: (errorCode) => {
|
||||
@@ -194,12 +303,22 @@ namespace gdjs {
|
||||
'Score saving result received, but another save was launched in the meantime - ignoring the result of this one.'
|
||||
);
|
||||
|
||||
// Release the reserved quota taken at start.
|
||||
_releaseReservedQuota(leaderboardId);
|
||||
|
||||
// Still finish the promise that can be waited upon:
|
||||
resolveSavingPromise();
|
||||
return;
|
||||
}
|
||||
|
||||
this._setError(errorCode);
|
||||
// If the entry was actually saved but response couldn't be parsed,
|
||||
// still count it as a success for rate limiting.
|
||||
if (errorCode === 'SAVED_ENTRY_CANT_BE_READ') {
|
||||
_recordSuccessfulEntry(leaderboardId);
|
||||
}
|
||||
// On error, release the reservation (success recorded only if above case).
|
||||
_releaseReservedQuota(leaderboardId);
|
||||
resolveSavingPromise();
|
||||
},
|
||||
};
|
||||
@@ -396,7 +515,7 @@ namespace gdjs {
|
||||
|
||||
try {
|
||||
const { closeSaving, closeSavingWithError } =
|
||||
scoreSavingState.startSaving({ playerName, score });
|
||||
scoreSavingState.startSaving({ leaderboardId, playerName, score });
|
||||
|
||||
try {
|
||||
const leaderboardEntry = await saveScore({
|
||||
@@ -440,7 +559,7 @@ namespace gdjs {
|
||||
|
||||
try {
|
||||
const { closeSaving, closeSavingWithError } =
|
||||
scoreSavingState.startSaving({ playerId, score });
|
||||
scoreSavingState.startSaving({ leaderboardId, playerId, score });
|
||||
|
||||
try {
|
||||
const leaderboardEntryId = await saveScore({
|
||||
|
@@ -6,7 +6,6 @@ import Text from '../../UI/Text';
|
||||
import { Trans, t } from '@lingui/macro';
|
||||
import {
|
||||
type AiRequest,
|
||||
type AiRequestUserMessage,
|
||||
type AiRequestMessageAssistantFunctionCall,
|
||||
} from '../../Utils/GDevelopServices/Generation';
|
||||
import RaisedButton from '../../UI/RaisedButton';
|
||||
@@ -44,7 +43,6 @@ import {
|
||||
getDefaultAiConfigurationPresetId,
|
||||
} from '../AiConfiguration';
|
||||
import { AiConfigurationPresetSelector } from './AiConfigurationPresetSelector';
|
||||
import { AiRequestContext } from '../AiRequestContext';
|
||||
|
||||
const TOO_MANY_USER_MESSAGES_WARNING_COUNT = 5;
|
||||
const TOO_MANY_USER_MESSAGES_ERROR_COUNT = 10;
|
||||
@@ -284,9 +282,6 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
}: Props,
|
||||
ref
|
||||
) => {
|
||||
const { aiRequestStorage } = React.useContext(AiRequestContext);
|
||||
const { aiRequests } = aiRequestStorage;
|
||||
|
||||
// TODO: store the default mode in the user preferences?
|
||||
const [newAiRequestMode, setNewAiRequestMode] = React.useState<
|
||||
'chat' | 'agent'
|
||||
@@ -341,11 +336,7 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
userRequestTextPerAiRequestId,
|
||||
setUserRequestTextPerRequestId,
|
||||
] = React.useState<{ [string]: string }>({});
|
||||
const [historyIndex, setHistoryIndex] = React.useState<number>(-1);
|
||||
const [savedCurrentText, setSavedCurrentText] = React.useState<string>('');
|
||||
const scrollViewRef = React.useRef<ScrollViewInterface | null>(null);
|
||||
const textAreaRefForNewChat = React.useRef<any>(null);
|
||||
const textAreaRefForExistingChat = React.useRef<any>(null);
|
||||
const [shouldAutoScroll, setShouldAutoScroll] = React.useState<boolean>(
|
||||
true
|
||||
);
|
||||
@@ -408,26 +399,13 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
[newAiRequestMode, hasOpenedProject]
|
||||
);
|
||||
|
||||
const onUserRequestTextChange = React.useCallback(
|
||||
(userRequestText: string, aiRequestIdToChange: string) => {
|
||||
setUserRequestTextPerRequestId(userRequestTextPerAiRequestId => ({
|
||||
...userRequestTextPerAiRequestId,
|
||||
[aiRequestIdToChange]: userRequestText,
|
||||
}));
|
||||
// Reset history navigation when field is cleared,
|
||||
// so that pressing up goes to the last message again.
|
||||
if (!userRequestText && historyIndex !== -1) {
|
||||
setHistoryIndex(-1);
|
||||
setSavedCurrentText('');
|
||||
}
|
||||
},
|
||||
[historyIndex]
|
||||
);
|
||||
|
||||
React.useImperativeHandle(ref, () => ({
|
||||
resetUserInput: (aiRequestId: string | null) => {
|
||||
const aiRequestIdToReset: string = aiRequestId || '';
|
||||
onUserRequestTextChange('', aiRequestIdToReset);
|
||||
setUserRequestTextPerRequestId(userRequestTextPerAiRequestId => ({
|
||||
...userRequestTextPerAiRequestId,
|
||||
[aiRequestIdToReset]: '',
|
||||
}));
|
||||
|
||||
if (scrollViewRef.current) {
|
||||
scrollViewRef.current.scrollToBottom({
|
||||
@@ -439,109 +417,6 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
|
||||
const { isMobile } = useResponsiveWindowSize();
|
||||
|
||||
// Build history from sent user messages across all aiRequests
|
||||
const requestsHistory = React.useMemo(
|
||||
() => {
|
||||
const history: Array<string> = [];
|
||||
|
||||
// Iterate through all aiRequests in reverse order (most recent first)
|
||||
Object.values(aiRequests)
|
||||
.reverse()
|
||||
.forEach(
|
||||
// $FlowFixMe - Object.values() loses the type of aiRequests.
|
||||
(request: AiRequest) => {
|
||||
const userMessages = request.output
|
||||
.filter(
|
||||
message =>
|
||||
message.type === 'message' && message.role === 'user'
|
||||
)
|
||||
.map(
|
||||
// $FlowFixMe - We filtered the type above.
|
||||
(message: AiRequestUserMessage) => {
|
||||
const userRequest = message.content.find(
|
||||
item => item.type === 'user_request'
|
||||
);
|
||||
return userRequest ? userRequest.text : '';
|
||||
}
|
||||
)
|
||||
.filter(text => text !== '');
|
||||
|
||||
history.push(...userMessages);
|
||||
}
|
||||
);
|
||||
|
||||
return history;
|
||||
},
|
||||
[aiRequests]
|
||||
);
|
||||
|
||||
// Reset history index when aiRequest changes,
|
||||
// ensuring pressing up and down doesn't depend on the previous aiRequest.
|
||||
React.useEffect(
|
||||
() => {
|
||||
setHistoryIndex(-1);
|
||||
setSavedCurrentText('');
|
||||
},
|
||||
[aiRequestId]
|
||||
);
|
||||
|
||||
const handleNavigateHistory = React.useCallback(
|
||||
(direction: 'up' | 'down') => {
|
||||
const currentText = userRequestTextPerAiRequestId[aiRequestId] || '';
|
||||
const textAreaRef = aiRequest
|
||||
? textAreaRefForExistingChat
|
||||
: textAreaRefForNewChat;
|
||||
|
||||
if (direction === 'up') {
|
||||
// Save current text when starting navigation,
|
||||
// so we can restore it if going back to current.
|
||||
if (historyIndex === -1) {
|
||||
setSavedCurrentText(currentText);
|
||||
}
|
||||
|
||||
const newIndex = historyIndex + 1;
|
||||
if (newIndex < requestsHistory.length) {
|
||||
setHistoryIndex(newIndex);
|
||||
const historicalText =
|
||||
requestsHistory[requestsHistory.length - 1 - newIndex];
|
||||
onUserRequestTextChange(historicalText, aiRequestId);
|
||||
|
||||
// Set cursor to start when navigating up,
|
||||
// otherwise it goes to the end of the text, making it harder
|
||||
// to navigate with one key press.
|
||||
if (textAreaRef.current) {
|
||||
// Use timeout so that the text is updated before setting the cursor position.
|
||||
setTimeout(() => {
|
||||
textAreaRef.current.setCursorPosition(0);
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
} else if (direction === 'down') {
|
||||
const newIndex = historyIndex - 1;
|
||||
|
||||
if (newIndex === -1) {
|
||||
// We're at the end of the history. Restore the saved current text.
|
||||
setHistoryIndex(-1);
|
||||
onUserRequestTextChange(savedCurrentText, aiRequestId);
|
||||
} else if (newIndex >= 0) {
|
||||
setHistoryIndex(newIndex);
|
||||
const historicalText =
|
||||
requestsHistory[requestsHistory.length - 1 - newIndex];
|
||||
onUserRequestTextChange(historicalText, aiRequestId);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
aiRequestId,
|
||||
historyIndex,
|
||||
requestsHistory,
|
||||
userRequestTextPerAiRequestId,
|
||||
savedCurrentText,
|
||||
onUserRequestTextChange,
|
||||
aiRequest,
|
||||
]
|
||||
);
|
||||
|
||||
const priceText = (
|
||||
<Text size="body-small" color="secondary" noMargin>
|
||||
{getPriceText({
|
||||
@@ -676,17 +551,20 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
<Column noMargin alignItems="stretch" justifyContent="stretch">
|
||||
<Spacer />
|
||||
<CompactTextAreaFieldWithControls
|
||||
ref={textAreaRefForNewChat}
|
||||
maxLength={6000}
|
||||
value={userRequestTextPerAiRequestId[''] || ''}
|
||||
disabled={isSending}
|
||||
hasNeonCorner
|
||||
hasAnimatedNeonCorner={isSending}
|
||||
errored={!!lastSendError}
|
||||
onChange={userRequestText => {
|
||||
onUserRequestTextChange(userRequestText, '');
|
||||
}}
|
||||
onNavigateHistory={handleNavigateHistory}
|
||||
onChange={userRequestText =>
|
||||
setUserRequestTextPerRequestId(
|
||||
userRequestTextPerAiRequestId => ({
|
||||
...userRequestTextPerAiRequestId,
|
||||
'': userRequestText,
|
||||
})
|
||||
)
|
||||
}
|
||||
onSubmit={() => {
|
||||
onStartNewAiRequest({
|
||||
mode: newAiRequestMode,
|
||||
@@ -1000,7 +878,6 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
</Paper>
|
||||
) : null}
|
||||
<CompactTextAreaFieldWithControls
|
||||
ref={textAreaRefForExistingChat}
|
||||
maxLength={6000}
|
||||
value={userRequestTextPerAiRequestId[aiRequestId] || ''}
|
||||
disabled={isSending || isForAnotherProject}
|
||||
@@ -1008,9 +885,13 @@ export const AiRequestChat = React.forwardRef<Props, AiRequestChatInterface>(
|
||||
hasNeonCorner
|
||||
hasAnimatedNeonCorner={isSending}
|
||||
onChange={userRequestText =>
|
||||
onUserRequestTextChange(userRequestText, aiRequestId)
|
||||
setUserRequestTextPerRequestId(
|
||||
userRequestTextPerAiRequestId => ({
|
||||
...userRequestTextPerAiRequestId,
|
||||
[aiRequestId]: userRequestText,
|
||||
})
|
||||
)
|
||||
}
|
||||
onNavigateHistory={handleNavigateHistory}
|
||||
placeholder={
|
||||
aiRequest.mode === 'agent'
|
||||
? isForAnotherProject
|
||||
|
@@ -5,7 +5,6 @@ import {
|
||||
fetchAiSettings,
|
||||
type AiRequest,
|
||||
type AiSettings,
|
||||
getAiRequests,
|
||||
} from '../Utils/GDevelopServices/Generation';
|
||||
import AuthenticatedUserContext from '../Profile/AuthenticatedUserContext';
|
||||
import { type EditorFunctionCallResult } from '../EditorFunctions/EditorFunctionCallRunner';
|
||||
@@ -83,11 +82,6 @@ const useEditorFunctionCallResultsStorage = (): EditorFunctionCallResultsStorage
|
||||
};
|
||||
|
||||
type AiRequestStorage = {|
|
||||
fetchAiRequests: () => Promise<void>,
|
||||
onLoadMoreAiRequests: () => Promise<void>,
|
||||
canLoadMore: boolean,
|
||||
error: ?Error,
|
||||
isLoading: boolean,
|
||||
aiRequests: { [string]: AiRequest },
|
||||
updateAiRequest: (aiRequestId: string, aiRequest: AiRequest) => void,
|
||||
refreshAiRequest: (aiRequestId: string) => Promise<void>,
|
||||
@@ -102,105 +96,20 @@ type AiRequestSendState = {|
|
||||
lastSendError: ?Error,
|
||||
|};
|
||||
|
||||
type PaginationState = {|
|
||||
aiRequests: { [string]: AiRequest },
|
||||
nextPageUri: ?Object,
|
||||
|};
|
||||
|
||||
const emptyPaginationState: PaginationState = {
|
||||
aiRequests: {},
|
||||
nextPageUri: null,
|
||||
};
|
||||
|
||||
export const useAiRequestsStorage = (): AiRequestStorage => {
|
||||
const { profile, getAuthorizationHeader } = React.useContext(
|
||||
AuthenticatedUserContext
|
||||
);
|
||||
|
||||
const [state, setState] = React.useState<PaginationState>(
|
||||
emptyPaginationState
|
||||
);
|
||||
const [error, setError] = React.useState<Error | null>(null);
|
||||
const [isLoading, setIsLoading] = React.useState<boolean>(false);
|
||||
|
||||
const fetchAiRequests = React.useCallback(
|
||||
async () => {
|
||||
if (!profile) return;
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const history = await getAiRequests(getAuthorizationHeader, {
|
||||
userId: profile.id,
|
||||
forceUri: null, // Fetch the first page.
|
||||
});
|
||||
if (!history) return;
|
||||
const aiRequestsById = history.aiRequests.reduce(
|
||||
(accumulator, aiRequest) => {
|
||||
accumulator[aiRequest.id] = aiRequest;
|
||||
return accumulator;
|
||||
},
|
||||
{}
|
||||
);
|
||||
setState({
|
||||
aiRequests: aiRequestsById,
|
||||
nextPageUri: history.nextPageUri,
|
||||
});
|
||||
} catch (err) {
|
||||
setError(err);
|
||||
console.error('Error fetching AI requests:', err);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[profile, getAuthorizationHeader]
|
||||
);
|
||||
|
||||
const onLoadMoreAiRequests = React.useCallback(
|
||||
async () => {
|
||||
if (!profile) return;
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const history = await getAiRequests(getAuthorizationHeader, {
|
||||
userId: profile.id,
|
||||
forceUri: state.nextPageUri,
|
||||
});
|
||||
if (!history) return;
|
||||
const newRequests = history.aiRequests;
|
||||
const currentRequestsById = state.aiRequests;
|
||||
|
||||
newRequests.forEach(newRequest => {
|
||||
// Add new requests to the state.
|
||||
if (!currentRequestsById[newRequest.id]) {
|
||||
currentRequestsById[newRequest.id] = newRequest;
|
||||
}
|
||||
});
|
||||
setState({
|
||||
aiRequests: currentRequestsById,
|
||||
nextPageUri: history.nextPageUri,
|
||||
});
|
||||
} catch (err) {
|
||||
setError(err);
|
||||
console.error('Error fetching AI requests:', err);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[profile, getAuthorizationHeader, state.nextPageUri, state.aiRequests]
|
||||
const [aiRequests, setAiRequests] = React.useState<{ [string]: AiRequest }>(
|
||||
{}
|
||||
);
|
||||
|
||||
const updateAiRequest = React.useCallback(
|
||||
(aiRequestId: string, aiRequest: AiRequest) => {
|
||||
setState(prevState => ({
|
||||
...prevState,
|
||||
aiRequests: {
|
||||
...(prevState.aiRequests || {}),
|
||||
[aiRequestId]: aiRequest,
|
||||
},
|
||||
setAiRequests(aiRequests => ({
|
||||
...aiRequests,
|
||||
[aiRequestId]: aiRequest,
|
||||
}));
|
||||
},
|
||||
[]
|
||||
@@ -270,12 +179,7 @@ export const useAiRequestsStorage = (): AiRequestStorage => {
|
||||
);
|
||||
|
||||
return {
|
||||
fetchAiRequests,
|
||||
onLoadMoreAiRequests,
|
||||
canLoadMore: !!state.nextPageUri,
|
||||
error,
|
||||
isLoading,
|
||||
aiRequests: state.aiRequests,
|
||||
aiRequests,
|
||||
updateAiRequest,
|
||||
refreshAiRequest,
|
||||
isSendingAiRequest,
|
||||
@@ -291,13 +195,8 @@ type AiRequestContextState = {|
|
||||
getAiSettings: () => AiSettings | null,
|
||||
|};
|
||||
|
||||
export const initialAiRequestContextState: AiRequestContextState = {
|
||||
export const AiRequestContext = React.createContext<AiRequestContextState>({
|
||||
aiRequestStorage: {
|
||||
fetchAiRequests: async () => {},
|
||||
onLoadMoreAiRequests: async () => {},
|
||||
canLoadMore: true,
|
||||
error: null,
|
||||
isLoading: false,
|
||||
aiRequests: {},
|
||||
updateAiRequest: () => {},
|
||||
refreshAiRequest: async () => {},
|
||||
@@ -312,10 +211,7 @@ export const initialAiRequestContextState: AiRequestContextState = {
|
||||
clearEditorFunctionCallResults: () => {},
|
||||
},
|
||||
getAiSettings: () => null,
|
||||
};
|
||||
export const AiRequestContext = React.createContext<AiRequestContextState>(
|
||||
initialAiRequestContextState
|
||||
);
|
||||
});
|
||||
|
||||
type AiRequestProviderProps = {|
|
||||
children: React.Node,
|
||||
|
@@ -480,9 +480,6 @@ export const AskAiEditor = React.memo<Props>(
|
||||
[onOpenLayout, onCreateProject]
|
||||
);
|
||||
|
||||
const {
|
||||
aiRequestStorage: { fetchAiRequests },
|
||||
} = React.useContext(AiRequestContext);
|
||||
const {
|
||||
selectedAiRequest,
|
||||
selectedAiRequestId,
|
||||
@@ -514,15 +511,6 @@ export const AskAiEditor = React.memo<Props>(
|
||||
[initialMode]
|
||||
);
|
||||
|
||||
React.useEffect(
|
||||
() => {
|
||||
fetchAiRequests();
|
||||
},
|
||||
// Only fetch once on mount (we provide a way to refresh in the history).
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[]
|
||||
);
|
||||
|
||||
const canStartNewChat = !!selectedAiRequestId;
|
||||
const onStartOrOpenChat = React.useCallback(
|
||||
({
|
||||
|
@@ -3,10 +3,14 @@ import * as React from 'react';
|
||||
import Drawer from '@material-ui/core/Drawer';
|
||||
import ButtonBase from '@material-ui/core/ButtonBase';
|
||||
import { Line, Column } from '../UI/Grid';
|
||||
import { ColumnStackLayout, LineStackLayout } from '../UI/Layout';
|
||||
import { ColumnStackLayout } from '../UI/Layout';
|
||||
import Text from '../UI/Text';
|
||||
import { Trans } from '@lingui/macro';
|
||||
import { type AiRequest } from '../Utils/GDevelopServices/Generation';
|
||||
import {
|
||||
getAiRequests,
|
||||
type AiRequest,
|
||||
} from '../Utils/GDevelopServices/Generation';
|
||||
import AuthenticatedUserContext from '../Profile/AuthenticatedUserContext';
|
||||
import Paper from '../UI/Paper';
|
||||
import ScrollView from '../UI/ScrollView';
|
||||
import FlatButton from '../UI/FlatButton';
|
||||
@@ -17,7 +21,6 @@ import formatDate from 'date-fns/format';
|
||||
import DrawerTopBar from '../UI/DrawerTopBar';
|
||||
import PlaceholderError from '../UI/PlaceholderError';
|
||||
import { textEllipsisStyle } from '../UI/TextEllipsis';
|
||||
import { AiRequestContext } from './AiRequestContext';
|
||||
|
||||
type Props = {|
|
||||
open: boolean,
|
||||
@@ -76,27 +79,23 @@ const getFirstUserRequestText = (aiRequest: AiRequest): string => {
|
||||
};
|
||||
|
||||
type AskAiHistoryContentProps = {|
|
||||
aiRequests: Array<AiRequest> | null,
|
||||
isLoading: boolean,
|
||||
error: ?Error,
|
||||
onSelectAiRequest: (aiRequest: AiRequest) => void,
|
||||
selectedAiRequestId: string | null,
|
||||
onFetchAiRequests: () => Promise<void>,
|
||||
|};
|
||||
|
||||
export const AskAiHistoryContent = ({
|
||||
aiRequests,
|
||||
isLoading,
|
||||
error,
|
||||
onSelectAiRequest,
|
||||
selectedAiRequestId,
|
||||
onFetchAiRequests,
|
||||
}: AskAiHistoryContentProps) => {
|
||||
const {
|
||||
aiRequestStorage: {
|
||||
aiRequests,
|
||||
fetchAiRequests,
|
||||
onLoadMoreAiRequests,
|
||||
canLoadMore,
|
||||
isLoading,
|
||||
error,
|
||||
},
|
||||
} = React.useContext(AiRequestContext);
|
||||
// $FlowFixMe - Flow loses type with Object.values
|
||||
const aiRequestsArray: AiRequest[] = Object.values(aiRequests);
|
||||
if (!aiRequestsArray.length && isLoading) {
|
||||
if (!aiRequests && isLoading) {
|
||||
return (
|
||||
<Column
|
||||
noMargin
|
||||
@@ -112,13 +111,13 @@ export const AskAiHistoryContent = ({
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<PlaceholderError onRetry={fetchAiRequests}>
|
||||
<PlaceholderError onRetry={onFetchAiRequests}>
|
||||
<Trans>An error occurred while loading your AI requests.</Trans>
|
||||
</PlaceholderError>
|
||||
);
|
||||
}
|
||||
|
||||
if (aiRequestsArray.length === 0) {
|
||||
if (!aiRequests || aiRequests.length === 0) {
|
||||
return (
|
||||
<EmptyMessage>
|
||||
<Trans>
|
||||
@@ -131,7 +130,7 @@ export const AskAiHistoryContent = ({
|
||||
return (
|
||||
<ScrollView>
|
||||
<ColumnStackLayout expand>
|
||||
{aiRequestsArray.map(aiRequest => {
|
||||
{aiRequests.map(aiRequest => {
|
||||
const isSelected = selectedAiRequestId === aiRequest.id;
|
||||
const userRequestText = getFirstUserRequestText(aiRequest);
|
||||
const requestDate = new Date(aiRequest.createdAt);
|
||||
@@ -177,20 +176,14 @@ export const AskAiHistoryContent = ({
|
||||
</Paper>
|
||||
);
|
||||
})}
|
||||
<LineStackLayout justifyContent="center">
|
||||
<Line justifyContent="center">
|
||||
<FlatButton
|
||||
primary
|
||||
label={<Trans>Refresh</Trans>}
|
||||
onClick={fetchAiRequests}
|
||||
onClick={onFetchAiRequests}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<FlatButton
|
||||
primary
|
||||
label={<Trans>Load more</Trans>}
|
||||
onClick={onLoadMoreAiRequests}
|
||||
disabled={isLoading || !canLoadMore}
|
||||
/>
|
||||
</LineStackLayout>
|
||||
</Line>
|
||||
</ColumnStackLayout>
|
||||
</ScrollView>
|
||||
);
|
||||
@@ -203,6 +196,46 @@ export const AskAiHistory = ({
|
||||
selectedAiRequestId,
|
||||
}: Props) => {
|
||||
const { isMobile } = useResponsiveWindowSize();
|
||||
const [aiRequests, setAiRequests] = React.useState<Array<AiRequest> | null>(
|
||||
null
|
||||
);
|
||||
const [isLoading, setIsLoading] = React.useState<boolean>(false);
|
||||
const [error, setError] = React.useState<Error | null>(null);
|
||||
|
||||
const { profile, getAuthorizationHeader } = React.useContext(
|
||||
AuthenticatedUserContext
|
||||
);
|
||||
|
||||
const fetchAiRequests = React.useCallback(
|
||||
async () => {
|
||||
if (!profile) return;
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const requests = await getAiRequests(getAuthorizationHeader, {
|
||||
userId: profile.id,
|
||||
});
|
||||
setAiRequests(requests);
|
||||
} catch (err) {
|
||||
setError(err);
|
||||
console.error('Error fetching AI requests:', err);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[profile, getAuthorizationHeader]
|
||||
);
|
||||
|
||||
React.useEffect(
|
||||
() => {
|
||||
if (open) {
|
||||
fetchAiRequests();
|
||||
}
|
||||
},
|
||||
[open, fetchAiRequests]
|
||||
);
|
||||
|
||||
const handleSelectAiRequest = (aiRequest: AiRequest) => {
|
||||
onSelectAiRequest(aiRequest);
|
||||
@@ -232,8 +265,12 @@ export const AskAiHistory = ({
|
||||
onClose={onClose}
|
||||
/>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={aiRequests}
|
||||
isLoading={isLoading}
|
||||
error={error}
|
||||
onSelectAiRequest={handleSelectAiRequest}
|
||||
selectedAiRequestId={selectedAiRequestId}
|
||||
onFetchAiRequests={fetchAiRequests}
|
||||
/>
|
||||
</ColumnStackLayout>
|
||||
</Drawer>
|
||||
|
@@ -12,7 +12,6 @@ export type CompactTextAreaFieldWithControlsProps = {|
|
||||
value: string,
|
||||
onChange: (newValue: string) => void,
|
||||
onSubmit?: () => void,
|
||||
onNavigateHistory?: (direction: 'up' | 'down') => void,
|
||||
id?: string,
|
||||
disabled?: boolean,
|
||||
errored?: boolean,
|
||||
@@ -24,125 +23,60 @@ export type CompactTextAreaFieldWithControlsProps = {|
|
||||
hasAnimatedNeonCorner?: boolean,
|
||||
|};
|
||||
|
||||
export type CompactTextAreaFieldWithControlsInterface = {|
|
||||
setCursorPosition: (position: number) => void,
|
||||
|};
|
||||
export const CompactTextAreaFieldWithControls = ({
|
||||
value,
|
||||
onChange,
|
||||
id,
|
||||
disabled,
|
||||
errored,
|
||||
placeholder,
|
||||
rows,
|
||||
maxLength,
|
||||
onSubmit,
|
||||
controls,
|
||||
hasNeonCorner,
|
||||
hasAnimatedNeonCorner,
|
||||
}: CompactTextAreaFieldWithControlsProps) => {
|
||||
const idToUse = React.useRef<string>(id || makeTimestampedId());
|
||||
|
||||
export const CompactTextAreaFieldWithControls = React.forwardRef<
|
||||
CompactTextAreaFieldWithControlsProps,
|
||||
CompactTextAreaFieldWithControlsInterface
|
||||
>(
|
||||
(
|
||||
{
|
||||
value,
|
||||
onChange,
|
||||
id,
|
||||
disabled,
|
||||
errored,
|
||||
placeholder,
|
||||
rows,
|
||||
maxLength,
|
||||
onSubmit,
|
||||
onNavigateHistory,
|
||||
controls,
|
||||
hasNeonCorner,
|
||||
hasAnimatedNeonCorner,
|
||||
}: CompactTextAreaFieldWithControlsProps,
|
||||
ref
|
||||
) => {
|
||||
const idToUse = React.useRef<string>(id || makeTimestampedId());
|
||||
const textareaRef = React.useRef<?HTMLTextAreaElement>(null);
|
||||
|
||||
React.useImperativeHandle(ref, () => ({
|
||||
setCursorPosition: (position: number) => {
|
||||
if (textareaRef.current) {
|
||||
textareaRef.current.setSelectionRange(position, position);
|
||||
}
|
||||
},
|
||||
}));
|
||||
|
||||
const handleKeyDown = React.useCallback(
|
||||
(e: SyntheticKeyboardEvent<HTMLTextAreaElement>) => {
|
||||
// Handle submit first
|
||||
if (onSubmit && shouldSubmit(e)) {
|
||||
onSubmit();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!onNavigateHistory) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isArrowUp = e.key === 'ArrowUp';
|
||||
const isArrowDown = e.key === 'ArrowDown';
|
||||
|
||||
if (!isArrowUp && !isArrowDown) {
|
||||
return;
|
||||
}
|
||||
|
||||
const textarea = e.currentTarget;
|
||||
const { selectionStart, value: textValue } = textarea;
|
||||
|
||||
// Calculate cursor position info
|
||||
const textBeforeCursor = textValue.substring(0, selectionStart);
|
||||
const lines = textValue.split('\n');
|
||||
const currentLineIndex = textBeforeCursor.split('\n').length - 1;
|
||||
const currentLineStart = textBeforeCursor.lastIndexOf('\n') + 1;
|
||||
const currentLine = lines[currentLineIndex];
|
||||
const positionInLine = selectionStart - currentLineStart;
|
||||
|
||||
// Check if we should navigate history
|
||||
const isAtFirstLineStart =
|
||||
currentLineIndex === 0 && positionInLine === 0;
|
||||
const isAtLastLineEnd =
|
||||
currentLineIndex === lines.length - 1 &&
|
||||
positionInLine === currentLine.length;
|
||||
|
||||
if (
|
||||
(isArrowUp && isAtFirstLineStart) ||
|
||||
(isArrowDown && isAtLastLineEnd)
|
||||
) {
|
||||
e.preventDefault();
|
||||
onNavigateHistory(isArrowUp ? 'up' : 'down');
|
||||
}
|
||||
},
|
||||
[onSubmit, onNavigateHistory]
|
||||
);
|
||||
|
||||
return (
|
||||
<I18n>
|
||||
{({ i18n }) => (
|
||||
<label
|
||||
return (
|
||||
<I18n>
|
||||
{({ i18n }) => (
|
||||
<label
|
||||
className={classNames({
|
||||
[classes.container]: true,
|
||||
[classes.disabled]: disabled,
|
||||
[classes.errored]: errored,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className={classNames({
|
||||
[classes.container]: true,
|
||||
[classes.disabled]: disabled,
|
||||
[classes.errored]: errored,
|
||||
[classes.compactTextAreaField]: true,
|
||||
[classes.neonCorner]: hasNeonCorner,
|
||||
[classes.animatedNeonCorner]:
|
||||
hasNeonCorner && hasAnimatedNeonCorner,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className={classNames({
|
||||
[classes.compactTextAreaField]: true,
|
||||
[classes.neonCorner]: hasNeonCorner,
|
||||
[classes.animatedNeonCorner]:
|
||||
hasNeonCorner && hasAnimatedNeonCorner,
|
||||
})}
|
||||
>
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
id={idToUse.current}
|
||||
disabled={disabled}
|
||||
value={value === null ? '' : value}
|
||||
onChange={e => onChange(e.currentTarget.value)}
|
||||
placeholder={i18n._(placeholder)}
|
||||
onKeyDown={handleKeyDown}
|
||||
rows={rows || 3}
|
||||
maxLength={maxLength}
|
||||
/>
|
||||
{controls}
|
||||
</div>
|
||||
</label>
|
||||
)}
|
||||
</I18n>
|
||||
);
|
||||
}
|
||||
);
|
||||
<textarea
|
||||
id={idToUse.current}
|
||||
disabled={disabled}
|
||||
value={value === null ? '' : value}
|
||||
onChange={e => onChange(e.currentTarget.value)}
|
||||
placeholder={i18n._(placeholder)}
|
||||
onKeyDown={
|
||||
onSubmit
|
||||
? e => {
|
||||
if (shouldSubmit(e)) onSubmit();
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
rows={rows || 3}
|
||||
maxLength={maxLength}
|
||||
/>
|
||||
{controls}
|
||||
</div>
|
||||
</label>
|
||||
)}
|
||||
</I18n>
|
||||
);
|
||||
};
|
||||
|
@@ -3,7 +3,6 @@ import axios from 'axios';
|
||||
import { GDevelopAiCdn, GDevelopGenerationApi } from './ApiConfigs';
|
||||
import { type MessageByLocale } from '../i18n/MessageByLocale';
|
||||
import { getIDEVersionWithHash } from '../../Version';
|
||||
import { extractNextPageUriFromLinkHeader } from './Play';
|
||||
|
||||
export type Environment = 'staging' | 'live';
|
||||
|
||||
@@ -23,43 +22,39 @@ export type AiRequestFunctionCallOutput = {
|
||||
output: string,
|
||||
};
|
||||
|
||||
export type AiRequestAssistantMessage = {
|
||||
type: 'message',
|
||||
status: 'completed',
|
||||
role: 'assistant',
|
||||
content: Array<
|
||||
| {
|
||||
type: 'reasoning',
|
||||
status: 'completed',
|
||||
summary: {
|
||||
text: string,
|
||||
type: 'summary_text',
|
||||
},
|
||||
}
|
||||
| {
|
||||
type: 'output_text',
|
||||
export type AiRequestMessage =
|
||||
| {
|
||||
type: 'message',
|
||||
status: 'completed',
|
||||
role: 'assistant',
|
||||
content: Array<
|
||||
| {
|
||||
type: 'reasoning',
|
||||
status: 'completed',
|
||||
summary: {
|
||||
text: string,
|
||||
type: 'summary_text',
|
||||
},
|
||||
}
|
||||
| {
|
||||
type: 'output_text',
|
||||
status: 'completed',
|
||||
text: string,
|
||||
annotations: Array<{}>,
|
||||
}
|
||||
| AiRequestMessageAssistantFunctionCall
|
||||
>,
|
||||
}
|
||||
| {
|
||||
type: 'message',
|
||||
status: 'completed',
|
||||
role: 'user',
|
||||
content: Array<{
|
||||
type: 'user_request',
|
||||
status: 'completed',
|
||||
text: string,
|
||||
annotations: Array<{}>,
|
||||
}
|
||||
| AiRequestMessageAssistantFunctionCall
|
||||
>,
|
||||
};
|
||||
|
||||
export type AiRequestUserMessage = {
|
||||
type: 'message',
|
||||
status: 'completed',
|
||||
role: 'user',
|
||||
content: Array<{
|
||||
type: 'user_request',
|
||||
status: 'completed',
|
||||
text: string,
|
||||
}>,
|
||||
};
|
||||
|
||||
export type AiRequestMessage =
|
||||
| AiRequestAssistantMessage
|
||||
| AiRequestUserMessage
|
||||
}>,
|
||||
}
|
||||
| AiRequestFunctionCallOutput;
|
||||
|
||||
export type AiConfiguration = {
|
||||
@@ -171,10 +166,6 @@ export type AssetSearch = {
|
||||
}> | null,
|
||||
};
|
||||
|
||||
export const apiClient = axios.create({
|
||||
baseURL: GDevelopGenerationApi.baseUrl,
|
||||
});
|
||||
|
||||
export const getAiRequest = async (
|
||||
getAuthorizationHeader: () => Promise<string>,
|
||||
{
|
||||
@@ -204,37 +195,23 @@ export const getAiRequests = async (
|
||||
getAuthorizationHeader: () => Promise<string>,
|
||||
{
|
||||
userId,
|
||||
forceUri,
|
||||
}: {|
|
||||
userId: string,
|
||||
forceUri: ?string,
|
||||
|}
|
||||
): Promise<{
|
||||
aiRequests: Array<AiRequest>,
|
||||
nextPageUri: ?string,
|
||||
}> => {
|
||||
): Promise<Array<AiRequest>> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const uri = forceUri || '/ai-request';
|
||||
|
||||
// $FlowFixMe
|
||||
const response = await apiClient.get(uri, {
|
||||
headers: {
|
||||
Authorization: authorizationHeader,
|
||||
},
|
||||
params: forceUri ? { userId } : { userId, perPage: 10 },
|
||||
});
|
||||
const nextPageUri = response.headers.link
|
||||
? extractNextPageUriFromLinkHeader(response.headers.link)
|
||||
: null;
|
||||
const aiRequests = response.data;
|
||||
if (!Array.isArray(aiRequests)) {
|
||||
throw new Error('Invalid response from Ai requests API.');
|
||||
}
|
||||
|
||||
return {
|
||||
aiRequests,
|
||||
nextPageUri,
|
||||
};
|
||||
const response = await axios.get(
|
||||
`${GDevelopGenerationApi.baseUrl}/ai-request`,
|
||||
{
|
||||
params: {
|
||||
userId,
|
||||
},
|
||||
headers: {
|
||||
Authorization: authorizationHeader,
|
||||
},
|
||||
}
|
||||
);
|
||||
return response.data;
|
||||
};
|
||||
|
||||
export const createAiRequest = async (
|
||||
@@ -275,8 +252,8 @@ export const createAiRequest = async (
|
||||
|}
|
||||
): Promise<AiRequest> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.post(
|
||||
'/ai-request',
|
||||
const response = await axios.post(
|
||||
`${GDevelopGenerationApi.baseUrl}/ai-request`,
|
||||
{
|
||||
gdevelopVersionWithHash: getIDEVersionWithHash(),
|
||||
userRequest,
|
||||
@@ -329,8 +306,10 @@ export const addMessageToAiRequest = async (
|
||||
|}
|
||||
): Promise<AiRequest> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.post(
|
||||
`/ai-request/${aiRequestId}/action/add-message`,
|
||||
const response = await axios.post(
|
||||
`${
|
||||
GDevelopGenerationApi.baseUrl
|
||||
}/ai-request/${aiRequestId}/action/add-message`,
|
||||
{
|
||||
gdevelopVersionWithHash: getIDEVersionWithHash(),
|
||||
functionCallOutputs,
|
||||
@@ -372,8 +351,10 @@ export const sendAiRequestFeedback = async (
|
||||
|}
|
||||
): Promise<AiRequest> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.post(
|
||||
`/ai-request/${aiRequestId}/action/set-feedback`,
|
||||
const response = await axios.post(
|
||||
`${
|
||||
GDevelopGenerationApi.baseUrl
|
||||
}/ai-request/${aiRequestId}/action/set-feedback`,
|
||||
{
|
||||
gdevelopVersionWithHash: getIDEVersionWithHash(),
|
||||
messageIndex,
|
||||
@@ -434,8 +415,8 @@ export const createAiGeneratedEvent = async (
|
||||
|}
|
||||
): Promise<CreateAiGeneratedEventResult> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.post(
|
||||
`/ai-generated-event`,
|
||||
const response = await axios.post(
|
||||
`${GDevelopGenerationApi.baseUrl}/ai-generated-event`,
|
||||
{
|
||||
gdevelopVersionWithHash: getIDEVersionWithHash(),
|
||||
gameProjectJson,
|
||||
@@ -493,8 +474,8 @@ export const getAiGeneratedEvent = async (
|
||||
|}
|
||||
): Promise<AiGeneratedEvent> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.get(
|
||||
`/ai-generated-event/${aiGeneratedEventId}`,
|
||||
const response = await axios.get(
|
||||
`${GDevelopGenerationApi.baseUrl}/ai-generated-event/${aiGeneratedEventId}`,
|
||||
{
|
||||
params: {
|
||||
userId,
|
||||
@@ -524,8 +505,8 @@ export const createAssetSearch = async (
|
||||
|}
|
||||
): Promise<AssetSearch> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.post(
|
||||
`/asset-search`,
|
||||
const response = await axios.post(
|
||||
`${GDevelopGenerationApi.baseUrl}/asset-search`,
|
||||
{
|
||||
gdevelopVersionWithHash: getIDEVersionWithHash(),
|
||||
searchTerms,
|
||||
@@ -565,8 +546,10 @@ export const createAiUserContentPresignedUrls = async (
|
||||
|}
|
||||
): Promise<AiUserContentPresignedUrlsResult> => {
|
||||
const authorizationHeader = await getAuthorizationHeader();
|
||||
const response = await apiClient.post(
|
||||
`/ai-user-content/action/create-presigned-urls`,
|
||||
const response = await axios.post(
|
||||
`${
|
||||
GDevelopGenerationApi.baseUrl
|
||||
}/ai-user-content/action/create-presigned-urls`,
|
||||
{
|
||||
gdevelopVersionWithHash: getIDEVersionWithHash(),
|
||||
gameProjectJsonHash,
|
||||
|
@@ -3,11 +3,6 @@ import * as React from 'react';
|
||||
import paperDecorator from '../../PaperDecorator';
|
||||
import { AskAiHistoryContent } from '../../../AiGeneration/AskAiHistory';
|
||||
import FixedHeightFlexContainer from '../../FixedHeightFlexContainer';
|
||||
import {
|
||||
AiRequestContext,
|
||||
initialAiRequestContextState,
|
||||
} from '../../../AiGeneration/AiRequestContext';
|
||||
import { type AiRequest } from '../../../Utils/GDevelopServices/Generation';
|
||||
|
||||
// Re-use fake AI request data from AiRequestChat.stories.js
|
||||
const fakeOutputWithUserRequestOnly = [
|
||||
@@ -98,190 +93,183 @@ export default {
|
||||
decorators: [paperDecorator],
|
||||
};
|
||||
|
||||
const AskAIHistoryContentStoryTemplate = ({
|
||||
error,
|
||||
isLoading,
|
||||
aiRequests,
|
||||
canLoadMore,
|
||||
selectedAiRequestId,
|
||||
}: {|
|
||||
error: ?Error,
|
||||
isLoading: boolean,
|
||||
aiRequests: { [string]: AiRequest },
|
||||
canLoadMore: boolean,
|
||||
selectedAiRequestId: string | null,
|
||||
|}) => (
|
||||
export const Loading = () => (
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AiRequestContext.Provider
|
||||
value={{
|
||||
...initialAiRequestContextState,
|
||||
aiRequestStorage: {
|
||||
...initialAiRequestContextState.aiRequestStorage,
|
||||
aiRequests,
|
||||
isLoading,
|
||||
error,
|
||||
canLoadMore,
|
||||
},
|
||||
}}
|
||||
>
|
||||
<AskAiHistoryContent
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={selectedAiRequestId}
|
||||
/>
|
||||
</AiRequestContext.Provider>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={null}
|
||||
isLoading={true}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const Loading = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{}}
|
||||
isLoading={true}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
);
|
||||
|
||||
export const Errored = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{}}
|
||||
isLoading={false}
|
||||
error={new Error('Failed to fetch AI requests')}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={null}
|
||||
isLoading={false}
|
||||
error={new Error('Failed to fetch AI requests')}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const Empty = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{}}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[]}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const SingleAiRequest = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{
|
||||
'request-1': createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T10:30:00Z',
|
||||
output: fakeOutputWithAiResponses,
|
||||
}),
|
||||
}}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[
|
||||
createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T10:30:00Z',
|
||||
output: fakeOutputWithAiResponses,
|
||||
}),
|
||||
]}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const MultipleAiRequests = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{
|
||||
'request-1': createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
output: fakeOutputWithAiResponses,
|
||||
}),
|
||||
'request-2': createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
'request-3': createFakeAiRequest({
|
||||
id: 'request-3',
|
||||
createdAt: '2024-03-10T16:20:00Z',
|
||||
}),
|
||||
}}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[
|
||||
createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
output: fakeOutputWithAiResponses,
|
||||
}),
|
||||
createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
createFakeAiRequest({
|
||||
id: 'request-3',
|
||||
createdAt: '2024-03-10T16:20:00Z',
|
||||
}),
|
||||
]}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const WithSelectedRequest = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{
|
||||
'request-1': createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
output: fakeOutputWithAiResponses,
|
||||
}),
|
||||
'request-2': createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
}}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
selectedAiRequestId="request-2"
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[
|
||||
createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
output: fakeOutputWithAiResponses,
|
||||
}),
|
||||
createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
]}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId="request-2"
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const WithWorkingRequest = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{
|
||||
'request-1': createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
status: 'working',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
}),
|
||||
'request-2': createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
}}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[
|
||||
createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
status: 'working',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
}),
|
||||
createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
]}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const WithErroredRequest = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{
|
||||
'request-1': createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
status: 'error',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
error: { code: 'internal-error', message: 'Some error happened' },
|
||||
}),
|
||||
'request-2': createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
}}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[
|
||||
createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
status: 'error',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
error: { code: 'internal-error', message: 'Some error happened' },
|
||||
}),
|
||||
createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
output: fakeOutputWithDifferentUserRequest,
|
||||
}),
|
||||
]}
|
||||
isLoading={false}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
||||
export const RefreshingRequests = () => (
|
||||
<AskAIHistoryContentStoryTemplate
|
||||
aiRequests={{
|
||||
'request-1': createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
}),
|
||||
'request-2': createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
}),
|
||||
}}
|
||||
isLoading={true}
|
||||
error={null}
|
||||
selectedAiRequestId={null}
|
||||
canLoadMore={false}
|
||||
/>
|
||||
<FixedHeightFlexContainer height={500}>
|
||||
<AskAiHistoryContent
|
||||
aiRequests={[
|
||||
createFakeAiRequest({
|
||||
id: 'request-1',
|
||||
createdAt: '2024-03-15T14:30:00Z',
|
||||
}),
|
||||
createFakeAiRequest({
|
||||
id: 'request-2',
|
||||
createdAt: '2024-03-14T09:45:00Z',
|
||||
}),
|
||||
]}
|
||||
isLoading={true}
|
||||
error={null}
|
||||
onSelectAiRequest={() => {}}
|
||||
selectedAiRequestId={null}
|
||||
onFetchAiRequests={async () => {}}
|
||||
/>
|
||||
</FixedHeightFlexContainer>
|
||||
);
|
||||
|
Reference in New Issue
Block a user