Spaces:
Sleeping
Sleeping
| import type { Dataset, DataItem, Modality, SingleComparisonResult, DatasetMetadata } from '../types'; | |
| // Define the base URL for your backend API | |
| // For local development, it might be 'http://localhost:8000' | |
| // When deployed on Hugging Face Spaces, it will be a relative path '/'. | |
| const API_BASE_URL = '/'; | |
| /** | |
| * A helper function to handle API errors. | |
| */ | |
| const handleApiError = async (response: Response) => { | |
| if (!response.ok) { | |
| let errorMessage = `HTTP error! status: ${response.status}`; | |
| try { | |
| const errorData = await response.json(); | |
| errorMessage = errorData.detail || JSON.stringify(errorData); | |
| } catch (e) { | |
| // The response was not JSON | |
| errorMessage = await response.text(); | |
| } | |
| throw new Error(errorMessage); | |
| } | |
| return response.json(); | |
| }; | |
| // Helper to correctly encode unicode strings to base64, which is required by the backend. | |
| const unicodeToBase64 = (str: string) => { | |
| return btoa( | |
| encodeURIComponent(str).replace(/%([0-9A-F]{2})/g, (match, p1) => { | |
| return String.fromCharCode(parseInt(p1, 16)); | |
| }) | |
| ); | |
| }; | |
| const contentToBase64 = (content: string | ArrayBuffer, modality: Modality): Promise<string> => { | |
| return new Promise((resolve, reject) => { | |
| if (modality === 'text') { | |
| try { | |
| // Use helper for proper unicode support | |
| resolve(unicodeToBase64(content as string)); | |
| } catch (error) { | |
| console.error("Failed to Base64 encode text content:", error); | |
| reject(new Error("Failed to encode text. Ensure it doesn't contain unsupported characters.")); | |
| } | |
| } else if (typeof content === 'string') { | |
| // For images, content is a data URL | |
| const parts = content.split(','); | |
| resolve(parts.length > 1 ? parts[1] : content); | |
| } else if (content instanceof ArrayBuffer) { | |
| // For meshes | |
| const bytes = new Uint8Array(content); | |
| let binary = ''; | |
| for (let i = 0; i < bytes.byteLength; i++) { | |
| binary += String.fromCharCode(bytes[i]); | |
| } | |
| resolve(btoa(binary)); | |
| } else { | |
| reject(new Error('Unsupported content type for base64 conversion.')); | |
| } | |
| }); | |
| }; | |
| /** | |
| * Post-processes data received from the backend to ensure correct frontend rendering. | |
| * - Converts raw Base64 image strings to Data URLs. | |
| * - Converts raw Base64 mesh strings to ArrayBuffers. | |
| */ | |
| const postProcessApiData = <T extends { content: string | ArrayBuffer }>(item: T, modality: 'images' | 'texts' | 'meshes'): T => { | |
| if (modality === 'images' && typeof item.content === 'string' && !item.content.startsWith('data:')) { | |
| item.content = `data:image/png;base64,${item.content}`; | |
| } | |
| if (modality === 'meshes' && typeof item.content === 'string') { | |
| const binaryString = atob(item.content); | |
| const len = binaryString.length; | |
| const bytes = new Uint8Array(len); | |
| for (let i = 0; i < len; i++) { | |
| bytes[i] = binaryString.charCodeAt(i); | |
| } | |
| item.content = bytes.buffer; | |
| } | |
| return item; | |
| } | |
| /** | |
| * Starts the dataset processing on the backend by uploading a .zip file. | |
| * @param file The .zip file to upload. | |
| * @returns A promise that resolves with a job ID for polling the status. | |
| */ | |
| export const startDatasetProcessing = async (file: File): Promise<string> => { | |
| const formData = new FormData(); | |
| formData.append('file', file); | |
| const response = await fetch(`${API_BASE_URL}api/process-dataset`, { | |
| method: 'POST', | |
| body: formData, | |
| }); | |
| const { job_id } = await handleApiError(response); | |
| if (!job_id) { | |
| throw new Error("API did not return a job ID."); | |
| } | |
| return job_id; | |
| }; | |
| interface ProcessingStatus { | |
| status: 'starting' | 'processing' | 'complete' | 'error'; | |
| stage?: string; | |
| progress?: number; | |
| message?: string; | |
| result?: Dataset; | |
| } | |
| /** | |
| * Polls the backend for the status of a dataset processing job. | |
| * @param jobId The ID of the job to check. | |
| * @returns A promise that resolves with the current status. | |
| */ | |
| export const getProcessingStatus = async (jobId: string): Promise<ProcessingStatus> => { | |
| const response = await fetch(`${API_BASE_URL}api/processing-status/${jobId}`); | |
| const status: ProcessingStatus = await handleApiError(response); | |
| // If the job is complete, post-process the resulting dataset data | |
| if (status.status === 'complete' && status.result) { | |
| const processedDataset = status.result; | |
| // The backend returns a string for the date, convert it to a Date object. | |
| if (processedDataset.uploadDate && typeof processedDataset.uploadDate === 'string') { | |
| processedDataset.uploadDate = new Date(processedDataset.uploadDate); | |
| } | |
| // Ensure all data items have the correct format for frontend rendering. | |
| if (processedDataset.data) { | |
| if (processedDataset.data.images) { | |
| processedDataset.data.images = processedDataset.data.images.map((item: DataItem) => postProcessApiData(item, 'images')); | |
| } | |
| if (processedDataset.data.meshes) { | |
| processedDataset.data.meshes = processedDataset.data.meshes.map((item: DataItem) => postProcessApiData(item, 'meshes')); | |
| } | |
| } | |
| status.result = processedDataset; | |
| } | |
| return status; | |
| } | |
| /** | |
| * Sends a local dataset to the backend to populate its in-memory cache. | |
| * This is crucial for making comparisons after a page reload. | |
| * @param dataset The full local dataset object from IndexedDB. | |
| */ | |
| export const ensureDatasetInCache = async (dataset: Dataset): Promise<void> => { | |
| // The backend expects content as base64 or raw text, but our Mesh content is an ArrayBuffer. | |
| // We need to convert it before sending. Images are already data URLs (string). | |
| const payload = { | |
| ...dataset, | |
| data: { | |
| ...dataset.data, | |
| meshes: await Promise.all(dataset.data.meshes.map(async (mesh) => { | |
| if (mesh.content instanceof ArrayBuffer) { | |
| return { ...mesh, content: await contentToBase64(mesh.content, 'mesh') }; | |
| } | |
| return mesh; | |
| })), | |
| } | |
| }; | |
| const response = await fetch(`${API_BASE_URL}api/cache-local-dataset`, { | |
| method: 'POST', | |
| headers: { 'Content-Type': 'application/json' }, | |
| body: JSON.stringify(payload), | |
| }); | |
| await handleApiError(response); | |
| }; | |
| /** | |
| * Finds the top matches for a single item by querying the backend. | |
| * @param sourceItem The item to find matches for. | |
| * @param sourceModality The modality of the source item. | |
| * @param datasetId The ID of the dataset to search within. | |
| * @returns A promise that resolves with the comparison results. | |
| */ | |
| export const findTopMatches = async ( | |
| sourceItem: DataItem, | |
| sourceModality: Modality, | |
| datasetId: string | |
| ): Promise<SingleComparisonResult> => { | |
| const contentAsBase64 = await contentToBase64(sourceItem.content, sourceModality); | |
| const requestBody = { | |
| modality: sourceModality, | |
| content: contentAsBase64, | |
| dataset_id: datasetId, | |
| }; | |
| const response = await fetch(`${API_BASE_URL}api/find-matches`, { | |
| method: 'POST', | |
| headers: { 'Content-Type': 'application/json' }, | |
| body: JSON.stringify(requestBody), | |
| }); | |
| const result: SingleComparisonResult = await handleApiError(response); | |
| // The API returns a representation of the source item with raw base64. | |
| // We replace it with our original source item which has the correct format for rendering. | |
| result.sourceItem = sourceItem; | |
| // Post-process all returned match items to ensure they render correctly. | |
| for (const key of Object.keys(result.results)) { | |
| const modalityKey = key as 'images' | 'texts' | 'meshes'; | |
| const matches = result.results[modalityKey]; | |
| if (matches) { | |
| matches.forEach(match => { | |
| postProcessApiData(match.item, modalityKey); | |
| }); | |
| } | |
| } | |
| return result; | |
| }; | |
| // --- Service functions for SHARED datasets --- | |
| /** | |
| * Returns the metadata for all available shared datasets by querying the backend. | |
| */ | |
| export const getSharedDatasetMetadata = async (): Promise<DatasetMetadata[]> => { | |
| try { | |
| const response = await fetch('/api/shared-dataset-metadata'); | |
| const metadataList = await handleApiError(response); | |
| // The backend returns strings for dates, convert them to Date objects. | |
| return metadataList.map((meta: any) => ({ | |
| ...meta, | |
| uploadDate: new Date(meta.uploadDate), | |
| })); | |
| } catch (error) { | |
| console.error("Failed to fetch shared dataset metadata:", error); | |
| // Re-throw the error so the UI layer can handle it. | |
| throw error; | |
| } | |
| }; | |
| /** | |
| * Returns the full data structure for a specific shared dataset from the backend. | |
| * The content for each item remains null, only the URLs are provided. | |
| */ | |
| export const getSharedDataset = async (id: string): Promise<Dataset | null> => { | |
| try { | |
| const response = await fetch(`/api/shared-dataset?id=${id}`); | |
| const dataset = await handleApiError(response); | |
| // Convert date string from API to Date object | |
| dataset.uploadDate = new Date(dataset.uploadDate); | |
| return dataset; | |
| } catch (error) { | |
| console.error(`Failed to fetch shared dataset with id ${id}:`, error); | |
| // Re-throw the error so the UI layer can handle it. | |
| throw error; | |
| } | |
| }; | |