292 lines
11 KiB
TypeScript
292 lines
11 KiB
TypeScript
import { useState, useEffect, useCallback, useRef } from 'react';
|
|
import { FeedPost } from '@/lib/db';
|
|
import * as db from '@/lib/db';
|
|
import { FEED_API_ENDPOINT, FEED_PAGE_SIZE } from '@/constants';
|
|
import { useProfiles } from '@/contexts/ProfilesContext';
|
|
import { useFeedCache } from '@/contexts/FeedCacheContext';
|
|
|
|
const { supabase } = await import('@/integrations/supabase/client');
|
|
|
|
export type FeedSortOption = 'latest' | 'top';
|
|
|
|
interface UseFeedDataProps {
|
|
source?: 'home' | 'collection' | 'tag' | 'user' | 'widget';
|
|
sourceId?: string;
|
|
isOrgContext?: boolean;
|
|
orgSlug?: string;
|
|
enabled?: boolean;
|
|
sortBy?: FeedSortOption;
|
|
supabaseClient?: any; // Using any to avoid importing SupabaseClient type if not strictly needed here, or better import it
|
|
}
|
|
|
|
export const useFeedData = ({
|
|
source = 'home',
|
|
sourceId,
|
|
isOrgContext,
|
|
orgSlug,
|
|
enabled = true,
|
|
sortBy = 'latest',
|
|
supabaseClient
|
|
}: UseFeedDataProps) => {
|
|
const { getCache, saveCache } = useFeedCache();
|
|
const cacheKey = `${source}-${sourceId || ''}-${isOrgContext ? 'org' : 'personal'}-${orgSlug || ''}-${sortBy}`;
|
|
|
|
// Initialize from cache if available
|
|
const [posts, setPosts] = useState<FeedPost[]>(() => {
|
|
const cached = getCache(cacheKey);
|
|
return cached ? cached.posts : [];
|
|
});
|
|
const [page, setPage] = useState(() => {
|
|
const cached = getCache(cacheKey);
|
|
return cached ? cached.page : 0;
|
|
});
|
|
const [hasMore, setHasMore] = useState(() => {
|
|
const cached = getCache(cacheKey);
|
|
return cached !== null ? cached.hasMore : true;
|
|
});
|
|
const [loading, setLoading] = useState(() => {
|
|
const cached = getCache(cacheKey);
|
|
return cached ? false : true;
|
|
});
|
|
|
|
const [isFetchingMore, setIsFetchingMore] = useState(false);
|
|
const [error, setError] = useState<Error | null>(null);
|
|
const { fetchProfiles } = useProfiles();
|
|
|
|
// Track if we mounted with cache to avoid double fetch
|
|
const mountedWithCache = useRef(!!getCache(cacheKey));
|
|
|
|
// Reset pagination when source changes (if not cached)
|
|
useEffect(() => {
|
|
const cached = getCache(cacheKey);
|
|
if (cached) {
|
|
setPosts(cached.posts);
|
|
setPage(cached.page);
|
|
setHasMore(cached.hasMore);
|
|
setLoading(false);
|
|
mountedWithCache.current = true;
|
|
} else {
|
|
setPosts([]);
|
|
setPage(0);
|
|
setHasMore(true);
|
|
setLoading(true);
|
|
mountedWithCache.current = false;
|
|
}
|
|
}, [source, sourceId, isOrgContext, orgSlug, cacheKey, getCache]);
|
|
|
|
// Update Cache whenever state changes
|
|
useEffect(() => {
|
|
if (!loading && posts.length > 0) {
|
|
// We only cache the data state here.
|
|
// Scroll position is updated by the consumer (PhotoGrid) before unmount/nav.
|
|
// But verify if we need to merge with existing cache to preserve scrollY?
|
|
// saveCache replaces the object.
|
|
// So we need to retrieve current scrollY from cache or keep it.
|
|
// Actually, the provider's saveCache typically overwrites.
|
|
// Let's modify saveCache usage or trust that PhotoGrid saves scrollY *after* these updates?
|
|
// No, PhotoGrid saves scrollY on unmount.
|
|
// If we update posts here, we might overwrite scrollY with undefined if we clean write.
|
|
// We should get current cache to preserve scrollY.
|
|
const currentCache = getCache(cacheKey);
|
|
saveCache(cacheKey, {
|
|
posts,
|
|
page,
|
|
hasMore,
|
|
scrollY: currentCache?.scrollY || 0
|
|
});
|
|
}
|
|
}, [posts, page, hasMore, loading, cacheKey, saveCache, getCache]);
|
|
|
|
const loadFeed = useCallback(async (currentPage: number, isLoadMore: boolean) => {
|
|
if (!enabled) {
|
|
setLoading(false);
|
|
return;
|
|
}
|
|
|
|
if (isLoadMore) {
|
|
setIsFetchingMore(true);
|
|
} else {
|
|
setLoading(true);
|
|
}
|
|
|
|
setError(null);
|
|
let fetchedPosts: any[] = [];
|
|
|
|
try {
|
|
// 1. Hydration (Home only, first page)
|
|
if (source === 'home' && !sourceId && currentPage === 0 && window.__INITIAL_STATE__?.feed) {
|
|
fetchedPosts = window.__INITIAL_STATE__.feed;
|
|
window.__INITIAL_STATE__.feed = undefined;
|
|
}
|
|
// 2. API Fetch (Universal)
|
|
// Prioritize API if endpoint exists. Using API allows server-side handling of complicated logic.
|
|
// Client still falls back to DB if API fails? Or we just error.
|
|
// Let's use API as primary.
|
|
if (true) {
|
|
const SERVER_URL = import.meta.env.VITE_SERVER_IMAGE_API_URL || '';
|
|
let queryParams = `?page=${currentPage}&limit=${FEED_PAGE_SIZE}&sortBy=${sortBy}`;
|
|
if (source) queryParams += `&source=${source}`;
|
|
if (sourceId) queryParams += `&sourceId=${sourceId}`;
|
|
|
|
// If we have token, pass it?
|
|
// The Supabase client in the hook prop (supabaseClient) or defaultSupabase usually has the session.
|
|
// We should pass the Authorization header.
|
|
// We can get the session from the client.
|
|
const client = supabaseClient || supabase;
|
|
|
|
const { data: { session } } = await client.auth.getSession();
|
|
const headers: Record<string, string> = {};
|
|
if (session?.access_token) {
|
|
headers['Authorization'] = `Bearer ${session.access_token}`;
|
|
}
|
|
|
|
const fetchUrl = SERVER_URL
|
|
? `${SERVER_URL}${FEED_API_ENDPOINT}${queryParams}`
|
|
: `${FEED_API_ENDPOINT}${queryParams}`;
|
|
|
|
const res = await fetch(fetchUrl, { headers });
|
|
if (!res.ok) {
|
|
// Fallback to DB if API fails (e.g. offline)?
|
|
console.warn('Feed API failed, falling back to direct DB', res.statusText);
|
|
// Allow falling through to step 3?
|
|
// If 404/500, maybe.
|
|
// IMPORTANT: If we want to strictly use API, we should throw.
|
|
// But user said "most of the client db queries".
|
|
// Let's try to fallback to DB if API fails, for robustness during migration.
|
|
throw new Error(`Feed fetch failed: ${res.statusText}`);
|
|
} else {
|
|
fetchedPosts = await res.json();
|
|
}
|
|
}
|
|
// 3. Fallback DB Fetch (Disabled if API succeeded, or if we caught error and want to fallback)
|
|
// Logic above throws on error, so we won't reach here if API succeeds.
|
|
// If we want fallback, we should try-catch inside.
|
|
/*
|
|
else {
|
|
// ...
|
|
}
|
|
*/
|
|
|
|
if (fetchedPosts.length < FEED_PAGE_SIZE) {
|
|
setHasMore(false);
|
|
} else {
|
|
setHasMore(true);
|
|
}
|
|
|
|
// Augment posts (ensure cover, author profiles etc are set)
|
|
const augmentedPosts = db.augmentFeedPosts(fetchedPosts);
|
|
|
|
// Update cover based on sort mode
|
|
const postsWithUpdatedCovers = updatePostCovers(augmentedPosts, sortBy);
|
|
|
|
// Apply client-side sorting
|
|
setPosts(prev => {
|
|
const combined = isLoadMore ? [...prev, ...postsWithUpdatedCovers] : postsWithUpdatedCovers;
|
|
// Re-sort the entire combined list to maintain correct order across pages
|
|
return sortFeedPosts(combined, sortBy);
|
|
});
|
|
|
|
// Fetch profiles via context for any users in the feed
|
|
const userIds = Array.from(new Set(augmentedPosts.map((p: any) => p.user_id)));
|
|
if (userIds.length > 0) {
|
|
await fetchProfiles(userIds as string[]);
|
|
}
|
|
|
|
} catch (err) {
|
|
setError(err instanceof Error ? err : new Error('Unknown error fetching feed'));
|
|
} finally {
|
|
setLoading(false);
|
|
setIsFetchingMore(false);
|
|
}
|
|
}, [source, sourceId, isOrgContext, orgSlug, enabled, fetchProfiles, sortBy, supabaseClient]);
|
|
|
|
// Initial Load
|
|
useEffect(() => {
|
|
// If we initialized from cache, don't fetch page 0 again immediately.
|
|
if (mountedWithCache.current) {
|
|
mountedWithCache.current = false;
|
|
return;
|
|
}
|
|
loadFeed(0, false);
|
|
}, [loadFeed]);
|
|
|
|
const loadMore = useCallback(() => {
|
|
if (!hasMore || isFetchingMore || loading) return;
|
|
const nextPage = page + 1;
|
|
setPage(nextPage);
|
|
loadFeed(nextPage, true);
|
|
}, [hasMore, isFetchingMore, loading, page, loadFeed]);
|
|
|
|
return { posts, loading, error, hasMore, loadMore, isFetchingMore };
|
|
};
|
|
|
|
// Helper function to update post covers based on sort mode
|
|
const updatePostCovers = (posts: FeedPost[], sortBy: FeedSortOption): FeedPost[] => {
|
|
return posts.map(post => {
|
|
if (!post.pictures || post.pictures.length === 0) {
|
|
return post;
|
|
}
|
|
|
|
const validPics = post.pictures.filter((p: any) => p.visible !== false);
|
|
if (validPics.length === 0) {
|
|
return post;
|
|
}
|
|
|
|
let newCover;
|
|
if (sortBy === 'latest') {
|
|
// For "Latest" mode: Show the newest picture (by created_at)
|
|
newCover = validPics.reduce((newest, current) => {
|
|
const newestDate = new Date(newest.created_at || post.created_at).getTime();
|
|
const currentDate = new Date(current.created_at || post.created_at).getTime();
|
|
return currentDate > newestDate ? current : newest;
|
|
});
|
|
} else {
|
|
// For "Top" or default: Use first by position
|
|
const sortedByPosition = [...validPics].sort((a, b) => (a.position || 0) - (b.position || 0));
|
|
newCover = sortedByPosition[0];
|
|
}
|
|
|
|
return {
|
|
...post,
|
|
cover: newCover
|
|
};
|
|
});
|
|
};
|
|
|
|
// Helper function to sort feed posts
|
|
const sortFeedPosts = (posts: FeedPost[], sortBy: FeedSortOption): FeedPost[] => {
|
|
const sorted = [...posts];
|
|
|
|
if (sortBy === 'top') {
|
|
// Sort by likes_count descending, then by created_at descending as tiebreaker
|
|
sorted.sort((a, b) => {
|
|
const likesA = a.likes_count || 0;
|
|
const likesB = b.likes_count || 0;
|
|
|
|
if (likesB !== likesA) {
|
|
return likesB - likesA;
|
|
}
|
|
|
|
// Tiebreaker: most recent first
|
|
return new Date(b.created_at).getTime() - new Date(a.created_at).getTime();
|
|
});
|
|
} else {
|
|
// 'latest' - Sort by created_at descending (most recent first)
|
|
// Also consider the latest picture modification if pictures array has dates
|
|
sorted.sort((a, b) => {
|
|
// Check if post has pictures with created_at dates
|
|
const latestPicDateA = a.pictures && a.pictures.length > 0
|
|
? Math.max(...a.pictures.map(p => new Date(p.created_at || a.created_at).getTime()))
|
|
: new Date(a.created_at).getTime();
|
|
|
|
const latestPicDateB = b.pictures && b.pictures.length > 0
|
|
? Math.max(...b.pictures.map(p => new Date(p.created_at || b.created_at).getTime()))
|
|
: new Date(b.created_at).getTime();
|
|
|
|
return latestPicDateB - latestPicDateA;
|
|
});
|
|
}
|
|
|
|
return sorted;
|
|
};
|