Initial release: DictIA v0.8.14-alpha (fork de Speakr, AGPL-3.0)

This commit is contained in:
InnovA AI
2026-03-16 21:47:37 +00:00
commit 42772a31ed
365 changed files with 103572 additions and 0 deletions

View File

@@ -0,0 +1,272 @@
/**
* IndexedDB Failed Uploads Storage
* Handles storing and retrying failed uploads with background sync
*/
const DB_NAME = 'SpeakrFailedUploads';
const DB_VERSION = 1;
const STORE_NAME = 'failedUploads';
let dbInstance = null;
/**
* Initialize IndexedDB
*/
export const initDB = () => {
return new Promise((resolve, reject) => {
if (dbInstance) {
resolve(dbInstance);
return;
}
const request = indexedDB.open(DB_NAME, DB_VERSION);
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to open database:', request.error);
reject(request.error);
};
request.onsuccess = () => {
dbInstance = request.result;
console.log('[FailedUploadsDB] Database opened successfully');
resolve(dbInstance);
};
request.onupgradeneeded = (event) => {
const db = event.target.result;
// Create object store for failed uploads
if (!db.objectStoreNames.contains(STORE_NAME)) {
const objectStore = db.createObjectStore(STORE_NAME, { keyPath: 'id', autoIncrement: true });
objectStore.createIndex('timestamp', 'timestamp', { unique: false });
objectStore.createIndex('clientId', 'clientId', { unique: false });
console.log('[FailedUploadsDB] Object store created');
}
};
});
};
/**
* Store a failed upload for later retry
*/
export const storeFailedUpload = async (uploadData) => {
try {
const db = await initDB();
// Convert File to ArrayBuffer BEFORE opening transaction.
// IDB transactions auto-close when inactive — the async arrayBuffer()
// call would cause the transaction to expire before add().
let fileData = uploadData.fileData || null;
if (uploadData.file && !fileData) {
fileData = await uploadData.file.arrayBuffer();
}
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
const failedUpload = {
timestamp: Date.now(),
clientId: uploadData.clientId || `client-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`,
fileName: uploadData.file?.name || uploadData.fileName || 'unknown',
fileSize: uploadData.file?.size || uploadData.fileSize || 0,
notes: uploadData.notes || '',
tags: uploadData.tags || [],
asrOptions: uploadData.asrOptions || {},
retryCount: uploadData.retryCount || 0,
lastError: uploadData.error || '',
fileData: fileData,
mimeType: uploadData.file?.type || uploadData.mimeType || 'audio/webm'
};
const request = objectStore.add(failedUpload);
return new Promise((resolve, reject) => {
request.onsuccess = () => {
console.log('[FailedUploadsDB] Upload stored for retry:', failedUpload.fileName);
resolve(request.result); // Returns the ID
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to store upload:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error storing failed upload:', error);
throw error;
}
};
/**
* Get all failed uploads waiting to retry
*/
export const getFailedUploads = async () => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readonly');
const objectStore = transaction.objectStore(STORE_NAME);
return new Promise((resolve, reject) => {
const request = objectStore.getAll();
request.onsuccess = () => {
console.log(`[FailedUploadsDB] Retrieved ${request.result.length} failed uploads`);
resolve(request.result);
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to retrieve uploads:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error getting failed uploads:', error);
return [];
}
};
/**
* Get a specific failed upload by ID
*/
export const getFailedUpload = async (id) => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readonly');
const objectStore = transaction.objectStore(STORE_NAME);
return new Promise((resolve, reject) => {
const request = objectStore.get(id);
request.onsuccess = () => {
resolve(request.result);
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to get upload:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error getting failed upload:', error);
return null;
}
};
/**
* Update retry count for a failed upload
*/
export const updateRetryCount = async (id, retryCount, error = null) => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
const upload = await getFailedUpload(id);
if (!upload) {
console.warn('[FailedUploadsDB] Upload not found for retry count update');
return;
}
upload.retryCount = retryCount;
upload.lastRetry = Date.now();
if (error) {
upload.lastError = error;
}
return new Promise((resolve, reject) => {
const request = objectStore.put(upload);
request.onsuccess = () => {
console.log(`[FailedUploadsDB] Updated retry count for upload ${id}: ${retryCount}`);
resolve();
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to update retry count:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error updating retry count:', error);
}
};
/**
* Delete a failed upload (after successful retry)
*/
export const deleteFailedUpload = async (id) => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
return new Promise((resolve, reject) => {
const request = objectStore.delete(id);
request.onsuccess = () => {
console.log('[FailedUploadsDB] Deleted successful upload:', id);
resolve();
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to delete upload:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error deleting failed upload:', error);
}
};
/**
* Clear all failed uploads
*/
export const clearAllFailedUploads = async () => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
return new Promise((resolve, reject) => {
const request = objectStore.clear();
request.onsuccess = () => {
console.log('[FailedUploadsDB] Cleared all failed uploads');
resolve();
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to clear uploads:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error clearing failed uploads:', error);
}
};
/**
* Get count of failed uploads
*/
export const getFailedUploadCount = async () => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readonly');
const objectStore = transaction.objectStore(STORE_NAME);
return new Promise((resolve, reject) => {
const request = objectStore.count();
request.onsuccess = () => {
resolve(request.result);
};
request.onerror = () => {
console.error('[FailedUploadsDB] Failed to count uploads:', request.error);
reject(request.error);
};
});
} catch (error) {
console.error('[FailedUploadsDB] Error counting failed uploads:', error);
return 0;
}
};

View File

@@ -0,0 +1,76 @@
/**
* Incognito Mode storage utilities
* Uses sessionStorage for temporary storage that auto-clears when tab closes
*/
const INCOGNITO_KEY = 'speakr_incognito_recording';
/**
* Save incognito recording data to sessionStorage
* @param {Object} data - Recording data including transcription, summary, title
*/
export function saveIncognitoRecording(data) {
try {
sessionStorage.setItem(INCOGNITO_KEY, JSON.stringify(data));
console.log('[Incognito] Recording saved to sessionStorage');
} catch (e) {
console.error('[Incognito] Failed to save recording:', e);
}
}
/**
* Get incognito recording data from sessionStorage
* @returns {Object|null} Recording data or null if not found
*/
export function getIncognitoRecording() {
try {
const data = sessionStorage.getItem(INCOGNITO_KEY);
return data ? JSON.parse(data) : null;
} catch (e) {
console.error('[Incognito] Failed to retrieve recording:', e);
return null;
}
}
/**
* Clear incognito recording from sessionStorage
*/
export function clearIncognitoRecording() {
try {
sessionStorage.removeItem(INCOGNITO_KEY);
console.log('[Incognito] Recording cleared from sessionStorage');
} catch (e) {
console.error('[Incognito] Failed to clear recording:', e);
}
}
/**
* Check if an incognito recording exists
* @returns {boolean}
*/
export function hasIncognitoRecording() {
try {
return sessionStorage.getItem(INCOGNITO_KEY) !== null;
} catch (e) {
return false;
}
}
/**
* Update specific fields of the incognito recording
* @param {Object} updates - Fields to update
*/
export function updateIncognitoRecording(updates) {
try {
const existing = getIncognitoRecording();
if (existing) {
const updated = { ...existing, ...updates };
saveIncognitoRecording(updated);
return updated;
}
return null;
} catch (e) {
console.error('[Incognito] Failed to update recording:', e);
return null;
}
}

View File

@@ -0,0 +1,267 @@
/**
* IndexedDB Recording Persistence
* Handles saving recording chunks to IndexedDB for crash recovery
*/
const DB_NAME = 'SpeakrRecordings';
const DB_VERSION = 1;
const STORE_NAME = 'activeRecording';
let dbInstance = null;
/**
* Helper to promisify IDBRequest
*/
const promisifyRequest = (request) => {
return new Promise((resolve, reject) => {
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error);
});
};
/**
* Initialize IndexedDB
*/
export const initDB = () => {
return new Promise((resolve, reject) => {
if (dbInstance) {
resolve(dbInstance);
return;
}
const request = indexedDB.open(DB_NAME, DB_VERSION);
request.onerror = () => {
console.error('[RecordingDB] Failed to open database:', request.error);
reject(request.error);
};
request.onsuccess = () => {
dbInstance = request.result;
console.log('[RecordingDB] Database opened successfully');
resolve(dbInstance);
};
request.onupgradeneeded = (event) => {
const db = event.target.result;
// Create object store for active recording
if (!db.objectStoreNames.contains(STORE_NAME)) {
const objectStore = db.createObjectStore(STORE_NAME, { keyPath: 'id' });
objectStore.createIndex('timestamp', 'timestamp', { unique: false });
console.log('[RecordingDB] Object store created');
}
};
});
};
/**
* Save recording metadata and initialize session
*/
export const startRecordingSession = async (recordingData) => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
const session = {
id: 'current',
timestamp: Date.now(),
startTime: new Date().toISOString(),
mode: recordingData.mode,
notes: recordingData.notes || '',
tags: recordingData.tags || [],
asrOptions: recordingData.asrOptions || {},
chunks: [],
mimeType: recordingData.mimeType || 'audio/webm',
duration: 0
};
await promisifyRequest(objectStore.put(session));
console.log('[RecordingDB] Recording session started:', session.id);
return session;
} catch (error) {
console.error('[RecordingDB] Failed to start session:', error);
throw error;
}
};
/**
* Save a recording chunk to IndexedDB
*/
export const saveChunk = async (chunkBlob, chunkIndex) => {
try {
// Do async prep work BEFORE creating transaction to avoid auto-close
const db = await initDB();
const arrayBuffer = await chunkBlob.arrayBuffer();
// Now create transaction and do all DB operations quickly
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
// Get current session
const session = await promisifyRequest(objectStore.get('current'));
if (!session) {
console.warn('[RecordingDB] No active session found');
return;
}
// Add chunk to session
session.chunks.push({
index: chunkIndex,
data: arrayBuffer,
size: chunkBlob.size,
timestamp: Date.now()
});
// Update session - must happen before transaction auto-closes
await promisifyRequest(objectStore.put(session));
// Chunk saved silently to avoid spam (happens every 5 seconds)
} catch (error) {
console.error('[RecordingDB] Failed to save chunk:', error);
// Don't throw - recording should continue even if persistence fails
}
};
/**
* Update recording metadata (notes, duration, etc.)
*/
export const updateRecordingMetadata = async (updates) => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
const session = await promisifyRequest(objectStore.get('current'));
if (!session) {
console.warn('[RecordingDB] No active session to update');
return;
}
// Merge updates
Object.assign(session, updates);
await promisifyRequest(objectStore.put(session));
// Metadata updated silently to avoid spam (happens every 5 seconds)
} catch (error) {
console.error('[RecordingDB] Failed to update metadata:', error);
}
};
/**
* Check if there's a recoverable recording
*/
export const checkForRecoverableRecording = async () => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readonly');
const objectStore = transaction.objectStore(STORE_NAME);
const session = await promisifyRequest(objectStore.get('current'));
if (!session || !session.chunks || session.chunks.length === 0) {
return null;
}
// Calculate total size
const totalSize = session.chunks.reduce((sum, chunk) => sum + chunk.size, 0);
// Calculate approximate duration (1 second chunks)
const duration = session.chunks.length;
console.log('[RecordingDB] Found recoverable recording:', {
chunks: session.chunks.length,
size: totalSize,
duration: duration,
startTime: session.startTime
});
return {
...session,
totalSize,
duration: duration
};
} catch (error) {
console.error('[RecordingDB] Failed to check for recoverable recording:', error);
return null;
}
};
/**
* Recover recording from IndexedDB
*/
export const recoverRecording = async () => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readonly');
const objectStore = transaction.objectStore(STORE_NAME);
const session = await promisifyRequest(objectStore.get('current'));
if (!session || !session.chunks || session.chunks.length === 0) {
console.warn('[RecordingDB] No recording to recover');
return null;
}
// Convert chunks back to Blobs
const chunks = session.chunks.map(chunk => {
return new Blob([chunk.data], { type: session.mimeType });
});
console.log(`[RecordingDB] Recovered ${chunks.length} chunks`);
return {
chunks,
metadata: {
mode: session.mode,
notes: session.notes,
tags: session.tags,
asrOptions: session.asrOptions,
mimeType: session.mimeType,
duration: session.chunks.length,
startTime: session.startTime
}
};
} catch (error) {
console.error('[RecordingDB] Failed to recover recording:', error);
return null;
}
};
/**
* Clear recording session (after successful upload or discard)
*/
export const clearRecordingSession = async () => {
try {
const db = await initDB();
const transaction = db.transaction([STORE_NAME], 'readwrite');
const objectStore = transaction.objectStore(STORE_NAME);
await promisifyRequest(objectStore.delete('current'));
console.log('[RecordingDB] Recording session cleared');
} catch (error) {
console.error('[RecordingDB] Failed to clear session:', error);
}
};
/**
* Get database size information
*/
export const getDatabaseSize = async () => {
try {
if (!navigator.storage || !navigator.storage.estimate) {
return null;
}
const estimate = await navigator.storage.estimate();
return {
usage: estimate.usage,
quota: estimate.quota,
percentage: ((estimate.usage / estimate.quota) * 100).toFixed(2)
};
} catch (error) {
console.error('[RecordingDB] Failed to get database size:', error);
return null;
}
};