From 19c0ed2fc37c97902addbd29f06315ca83a4ef6e Mon Sep 17 00:00:00 2001 From: Etienne0101 Date: Thu, 14 Dec 2023 11:30:04 +0100 Subject: [PATCH] update catalog fetch --- components/docs/DocsGallery.js | 40 +++++--- components/docs/FilteredDocsDisplay.js | 2 +- components/docs/LastContent.js | 2 +- pages/api/docs.js | 122 ++++++++++--------------- 4 files changed, 76 insertions(+), 90 deletions(-) diff --git a/components/docs/DocsGallery.js b/components/docs/DocsGallery.js index 5115591..c422ef7 100644 --- a/components/docs/DocsGallery.js +++ b/components/docs/DocsGallery.js @@ -20,20 +20,34 @@ const DocsGallery = () => { const [docsMetadata, setDocsMetadata] = useState([]); const [viewMode, setViewMode] = useState('gallery'); + const fetchDocsMetadata = async (range) => { + try { + const response = await fetch(`/api/docs?action=metadatalist&range=${range}`); + const data = await response.json(); + return data.filter((doc) => doc.metadata.index !== 0); + } catch (error) { + console.error( + 'Erreur lors de la récupération des métadonnées des documents', + error, + ); + return []; + } + }; + useEffect(() => { - const fetchDocsMetadata = async () => { - try { - const response = await fetch('/api/docs?action=list'); - const data = await response.json(); - setDocsMetadata(data.filter((doc) => doc.metadata.index !== 0)); - } catch (error) { - console.error( - 'Erreur lors de la récupération des métadonnées des documents', - error, - ); - } - }; - fetchDocsMetadata(); + // Fetch the first 8 documents + fetchDocsMetadata('1-10').then((initialDocs) => { + setDocsMetadata(initialDocs); + }); + + // Fetch the remaining documents after a delay + const timer = setTimeout(() => { + fetchDocsMetadata('11-1000').then((additionalDocs) => { + setDocsMetadata((prevDocs) => [...prevDocs, ...additionalDocs]); + }); + }, 100); // 0.1 seconds delay + + return () => clearTimeout(timer); // Cleanup the timer }, []); useEffect(() => { diff --git a/components/docs/FilteredDocsDisplay.js b/components/docs/FilteredDocsDisplay.js index 5cd32a6..fcb186e 100644 --- a/components/docs/FilteredDocsDisplay.js +++ b/components/docs/FilteredDocsDisplay.js @@ -8,7 +8,7 @@ const FilteredDocsDisplay = ({ docsList }) => { useEffect(() => { const fetchFilteredDocs = async () => { try { - const response = await fetch(`/api/docs?action=list`); + const response = await fetch(`/api/docs?action=metadatalist`); const data = await response.json(); const filteredDocs = data.filter((doc) => docsList.includes(doc.name)); setDocs(filteredDocs); diff --git a/components/docs/LastContent.js b/components/docs/LastContent.js index 7ba126c..6ee137e 100644 --- a/components/docs/LastContent.js +++ b/components/docs/LastContent.js @@ -7,7 +7,7 @@ const LastContent = () => { useEffect(() => { const fetchData = async () => { - const res = await fetch('/api/docs?action=list'); + const res = await fetch('/api/docs?action=metadatalist'); const data = await res.json(); data.sort( (a, b) => new Date(b.metadata.date) - new Date(a.metadata.date), diff --git a/pages/api/docs.js b/pages/api/docs.js index 98f9d42..a2da9dd 100644 --- a/pages/api/docs.js +++ b/pages/api/docs.js @@ -1,91 +1,63 @@ -import fs from 'fs'; -import path from 'path'; -import matter from 'gray-matter'; import fetch from 'node-fetch'; -import { supabase } from "../../utils/supabaseClient"; +import matter from 'gray-matter'; export default async function handler(req, res) { - const { filename, action } = req.query; + const { action, range } = req.query; + const FILE_LIST_MD_URL = 'https://nextcloud.datactivist.coop/s/m68ztmJaaTiBitC/download'; + const NEXTCLOUD_URL = 'https://nextcloud.datactivist.coop/s/Te2XrTkdnG9zgan/download?path=/&files='; - const GITHUB_TOKEN = process.env.GITHUB_TOKEN; - const REPO_PATH = 'datactivist/open-datactivist-private'; - const BASE_URL = `https://api.github.com/repos/${REPO_PATH}/contents/docs`; - - if (action === 'list') { + if (action === 'filelist') { try { - const docsPath = path.join(process.cwd(), 'posts', 'docs'); - const files = fs.readdirSync(docsPath); - - const fileNames = files.map((file) => { - const filePath = path.join(docsPath, file); - const fileContent = fs.readFileSync(filePath, 'utf8'); - const { data } = matter(fileContent); - return { - name: file.replace(/\.md$/, ''), - metadata: data, - }; - }); + const fileListResponse = await fetch(FILE_LIST_MD_URL); + if (!fileListResponse.ok) { + throw new Error(`Failed to fetch file list Markdown. Status: ${fileListResponse.status}`); + } - res.status(200).json(fileNames); + const fileListMd = await fileListResponse.text(); + const fileList = fileListMd.split(',').map(name => name.trim()).filter(name => name !== ''); + res.status(200).json({ fileList }); } catch (error) { - res - .status(500) - .json({ message: 'Erreur lors de la récupération des fichiers.' }); + res.status(500).json({ message: `Error fetching file list Markdown: ${error.message}` }); } - } else { + } else if (action === 'metadatalist') { try { - const filePath = path.join( - process.cwd(), - 'posts', - 'docs', - `${filename}.md`, - ); - - if (fs.existsSync(filePath)) { - const fileContent = fs.readFileSync(filePath, 'utf8'); - const { data, content } = matter(fileContent); - res.status(200).json({ metadata: data, content }); - } else { - // If file not found locally, try fetching from GitHub - const fileURL = `${BASE_URL}/${filename}.md`; - const response = await fetch(fileURL, { - headers: { - Authorization: `token ${GITHUB_TOKEN}`, - Accept: 'application/vnd.github.v3.raw', - }, - }); - - if (!response.ok) { - throw new Error('Failed to fetch from GitHub API'); - } - - const content = await response.text(); - const { data, content: parsedContent } = matter(content); + const fileListResponse = await fetch(FILE_LIST_MD_URL); + if (!fileListResponse.ok) { + throw new Error(`Failed to fetch file list Markdown. Status: ${fileListResponse.status}`); + } - // Special handling for "pv" files - if (filename.endsWith('pv') && data.access === 'datactivist-team') { - const user = req.user; // Assuming you have user in req + const fileListMd = await fileListResponse.text(); + let fileList = fileListMd.split(',').map(name => name.trim()).filter(name => name !== ''); - if (user && user.email) { - const { data: supabaseData, error } = await supabase - .from('docaccess-datactivist-team') - .select('email') - .eq('email', user.email); + // Parse the range and adjust fileList accordingly + if (range) { + const [start, end] = range.split('-').map(Number); + fileList = fileList.slice(start - 1, end); + } - if (error || !supabaseData.length) { - res.status(403).json({ message: 'Access denied.' }); - return; - } - } else { - res.status(403).json({ message: 'Access denied. No user email found.' }); - return; - } - } + const fetchPromises = fileList.map(filename => { + return fetch(`${NEXTCLOUD_URL}${filename}.md`) + .then(response => { + if (!response.ok) throw new Error(`Failed to fetch file ${filename}. Status: ${response.status}`); + return response.text(); + }) + .then(content => { + const { data } = matter(content); + return { name: filename.replace('.md', ''), metadata: data }; + }) + .catch(error => { + console.error(error.message); + return null; // Return null for failed requests + }); + }); - res.status(200).json({ metadata: data, content: parsedContent }); - } + const metadataList = (await Promise.all(fetchPromises)).filter(item => item !== null); + res.status(200).json(metadataList); } catch (error) { - res.status(500).json({ message: 'Erreur lors de la récupération du contenu.' }); + + res.status(500).json({ message: `Error processing metadata list: ${error.message}` }); } + } else { + res.status(400).json({ message: 'Invalid action' }); } -} \ No newline at end of file +}