jingrow/utlis/data.js

173 lines
5.1 KiB
JavaScript

import axios from 'axios';
import fs from 'fs';
import path from 'path';
const JINGROW_SERVER_URL = process.env.JINGROW_SERVER_URL;
const PUBLIC_FILES_DIR = path.join(process.cwd(), 'public/files');
if (!fs.existsSync(PUBLIC_FILES_DIR)) {
fs.mkdirSync(PUBLIC_FILES_DIR, { recursive: true });
}
async function downloadToLocal(fileUrl) {
if (!fileUrl) return fileUrl;
try {
let fullUrl = fileUrl;
if (!/^https?:\/\//.test(fileUrl)) {
fullUrl = `${JINGROW_SERVER_URL}${fileUrl}`;
}
const fileName = path.basename(fullUrl.split('?')[0]);
const localPath = path.join(PUBLIC_FILES_DIR, fileName);
const localUrl = `/files/${fileName}`;
if (!fs.existsSync(localPath)) {
const response = await axios.get(fullUrl, { responseType: 'stream' });
await new Promise((resolve, reject) => {
const writer = fs.createWriteStream(localPath);
response.data.pipe(writer);
writer.on('finish', resolve);
writer.on('error', (error) => {
console.error(`Error writing file ${localPath}:`, error);
// Don't reject, just resolve and return original url later
fs.unlink(localPath, () => resolve());
});
});
}
return localUrl;
} catch (e) {
console.error(`Failed to download ${fileUrl}:`, e.message);
return fileUrl; // Return original url on error
}
}
function extractImageUrlsFromHtml(html) {
if (!html) return [];
const regex = /<img[^>]+src=["']([^"'>]+)["']/g;
const urls = [];
let match;
while ((match = regex.exec(html)) !== null) {
urls.push(match[1]);
}
return urls;
}
async function processDataItem(item, downloadFiles) {
if (!downloadFiles) return item;
if (item.image) {
item.image = await downloadToLocal(item.image);
}
if (item.image_1) {
item.image_1 = await downloadToLocal(item.image_1);
}
if (item.image_2) {
item.image_2 = await downloadToLocal(item.image_2);
}
if (item.video_src) {
item.video_src = await downloadToLocal(item.video_src);
}
if (item.file_src) {
item.file_src = await downloadToLocal(item.file_src);
}
if (item.items && Array.isArray(item.items)) {
for (const subItem of item.items) {
if (subItem.item_image) {
subItem.item_image = await downloadToLocal(subItem.item_image);
}
if (subItem.item_video_src) {
subItem.item_video_src = await downloadToLocal(subItem.item_video_src);
}
if (subItem.item_icon) {
subItem.item_icon = await downloadToLocal(subItem.item_icon);
}
}
}
for (const key of ['content', 'additional_content', 'description', 'p1', 'p2', 'p3']) {
if (item[key]) {
const urls = extractImageUrlsFromHtml(item[key]);
let html = item[key];
for (const url of urls) {
const localUrl = await downloadToLocal(url);
html = html.replaceAll(url, localUrl);
}
item[key] = html;
}
}
return item;
}
export async function getPageData({
slug_list,
page = 1,
page_size,
downloadFiles = false
}) {
try {
if (!Array.isArray(slug_list)) {
throw new Error('slug_list must be an array');
}
const params = { slug_list: JSON.stringify(slug_list), page };
if (page_size) params.page_size = page_size;
const response = await axios.get(
`${JINGROW_SERVER_URL}/api/method/jsite.api.v1.get_page_data`,
{ params }
);
const message = response.data.message;
if (message?.error) {
const errorMsg = typeof message.error === 'object' ? JSON.stringify(message.error) : message.error;
throw new Error(errorMsg);
}
let data = message?.data;
if (Array.isArray(data)) {
if(downloadFiles) {
data = await Promise.all(data.map(item => processDataItem(item, downloadFiles)));
}
} else if (data) {
data = await processDataItem(data, downloadFiles);
}
return {
data: message.data,
total: message.total,
page_info: message.page_info,
};
} catch (error) {
console.error("Error in getPageData:", error);
return { error: { message: error.message, detail: error?.response?.data || null } };
}
}
export async function getAllSlugs() {
try {
const response = await axios.get(
`${JINGROW_SERVER_URL}/api/method/jsite.api.v1.get_all_slugs`
);
const slugs = response.data.message?.data;
if (!Array.isArray(slugs)) {
console.error('API did not return an array of slugs:', response.data);
return [];
}
// Filter out slugs that represent the root page, as it's handled by app/page.jsx
const filteredSlugs = slugs.filter(slug => {
if (!Array.isArray(slug) || slug.length === 0) {
return false;
}
// Exclude slugs like [''] or ['/'] which are for the homepage
if (slug.length === 1 && (slug[0] === '' || slug[0] === '/')) {
return false;
}
return true;
});
return filteredSlugs;
} catch (error) {
console.error('Error fetching slugs:', error);
return [];
}
}