概述CSV 常用于批量数据交换。本文以行拆分与简单分隔解析为基础,演示增量持久化路径。解析管道(基础版)function csvSplitter(headers) {
let buf = '';
return new TransformStream({
transform(chunk, controller) {
buf += chunk;
const lines = buf.split('\n');
buf = lines.pop();
for (const line of lines) {
const cols = line.split(',').map(s => s.trim());
const obj = Object.fromEntries(headers.map((h, i) => [h, cols[i] || '']));
controller.enqueue(obj);
}
},
flush(controller) {
if (buf) {
const cols = buf.split(',').map(s => s.trim());
const obj = Object.fromEntries(headers.map((h, i) => [h, cols[i] || '']));
controller.enqueue(obj);
}
}
});
}
增量写入 IndexedDBfunction openDB(name) {
return new Promise((resolve, reject) => {
const r = indexedDB.open(name, 1);
r.onupgradeneeded = () => { const db = r.result; if (!db.objectStoreNames.contains('rows')) db.createObjectStore('rows', { keyPath: 'id' }); };
r.onsuccess = () => resolve(r.result);
r.onerror = () => reject(r.error);
});
}
async function importCSV(url, headers) {
const res = await fetch(url);
const rs = res.body.pipeThrough(new TextDecoderStream()).pipeThrough(csvSplitter(headers));
const reader = rs.getReader();
const db = await openDB('csvdb');
let id = 1;
while (true) {
const { value, done } = await reader.read();
if (done) break;
const tx = db.transaction('rows', 'readwrite');
tx.objectStore('rows').put({ id: id++, ...value });
await new Promise((resolve, reject) => { tx.oncomplete = resolve; tx.onerror = () => reject(tx.error); });
}
db.close();
}
注意事项基础版不处理引号与转义,复杂 CSV 请引入专用解析器或增强管道。导入过程建议增加校验与失败重试。

发表评论 取消回复