Banafo commited on
Commit
e3c8414
·
1 Parent(s): 2f14680

Add caching support

Browse files
Files changed (8) hide show
  1. app-asr.js +11 -5
  2. model.js → de.js +191 -26
  3. de.json +1 -1
  4. en.js +374 -0
  5. en.json +1 -1
  6. fr.js +374 -0
  7. fr.json +1 -1
  8. index.html +0 -2
app-asr.js CHANGED
@@ -45,11 +45,8 @@ function getDisplayResult() {
45
 
46
  Module = {};
47
  Module.locateFile = function(path, scriptDirectory = '') {
48
- if (path === 'model.data') {
49
- return scriptDirectory + lang + '.data';
50
- }
51
- if (path === 'model.js.metadata') {
52
- return scriptDirectory + lang + '.json';
53
  }
54
  return scriptDirectory + path;
55
  };
@@ -78,6 +75,15 @@ Module.onRuntimeInitialized = function() {
78
  console.log('recognizer is created!', recognizer);
79
  };
80
 
 
 
 
 
 
 
 
 
 
81
  let audioCtx;
82
  let mediaStream;
83
 
 
45
 
46
  Module = {};
47
  Module.locateFile = function(path, scriptDirectory = '') {
48
+ if (path.endsWith('.js.metadata')) {
49
+ return scriptDirectory + path.replace('.js.metadata', '.json');
 
 
 
50
  }
51
  return scriptDirectory + path;
52
  };
 
75
  console.log('recognizer is created!', recognizer);
76
  };
77
 
78
+ function loadScript(src) {
79
+ const scriptElement = document.createElement('script');
80
+ scriptElement.src = src;
81
+ document.body.append(scriptElement);
82
+ }
83
+
84
+ loadScript('./' + lang + '.js');
85
+ loadScript('./sherpa-onnx-wasm-main-asr.js');
86
+
87
  let audioCtx;
88
  let mediaStream;
89
 
model.js → de.js RENAMED
@@ -18,8 +18,8 @@
18
  // web worker
19
  PACKAGE_PATH = encodeURIComponent(location.pathname.substring(0, location.pathname.lastIndexOf('/')) + '/');
20
  }
21
- var PACKAGE_NAME = 'model.data';
22
- var REMOTE_PACKAGE_BASE = 'model.data';
23
  var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
24
  var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
25
 
@@ -92,18 +92,6 @@ var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
92
  console.error('package error:', error);
93
  };
94
 
95
- var fetchedCallback = null;
96
- var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;
97
-
98
- if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, (data) => {
99
- if (fetchedCallback) {
100
- fetchedCallback(data);
101
- fetchedCallback = null;
102
- } else {
103
- fetched = data;
104
- }
105
- }, handleError);
106
-
107
  function runWithFS(Module) {
108
 
109
  function assert(check, msg) {
@@ -142,6 +130,163 @@ var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
142
  new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
143
  }
144
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
  function processPackageData(arrayBuffer) {
146
  assert(arrayBuffer, 'Loading data file failed.');
147
  assert(arrayBuffer.constructor.name === ArrayBuffer.name, 'bad input to processPackageData');
@@ -152,20 +297,40 @@ var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
152
  var files = metadata['files'];
153
  for (var i = 0; i < files.length; ++i) {
154
  DataRequest.prototype.requests[files[i].filename].onload();
155
- } Module['removeRunDependency']('datafile_model.data');
156
 
157
  };
158
- Module['addRunDependency']('datafile_model.data');
159
 
160
  Module['preloadResults'] ??= {};
161
 
162
- Module['preloadResults'][PACKAGE_NAME] = {fromCache: false};
163
- if (fetched) {
164
- processPackageData(fetched);
165
- fetched = null;
166
- } else {
167
- fetchedCallback = processPackageData;
168
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
169
 
170
  }
171
  if (Module['calledRun']) {
@@ -174,12 +339,12 @@ var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
174
  (Module['preRun'] ??= []).push(runWithFS); // FS is not initialized yet, wait for it
175
  }
176
 
177
- Module['removeRunDependency']('model.js.metadata');
178
  }
179
 
180
  function runMetaWithFS() {
181
- Module['addRunDependency']('model.js.metadata');
182
- var metadataUrl = Module['locateFile'] ? Module['locateFile']('model.js.metadata', '') : 'model.js.metadata';
183
  if (isNode) {
184
  require('fs').readFile(metadataUrl, 'utf8', (err, contents) => {
185
  if (err) {
 
18
  // web worker
19
  PACKAGE_PATH = encodeURIComponent(location.pathname.substring(0, location.pathname.lastIndexOf('/')) + '/');
20
  }
21
+ var PACKAGE_NAME = 'de.data';
22
+ var REMOTE_PACKAGE_BASE = 'de.data';
23
  var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
24
  var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
25
 
 
92
  console.error('package error:', error);
93
  };
94
 
 
 
 
 
 
 
 
 
 
 
 
 
95
  function runWithFS(Module) {
96
 
97
  function assert(check, msg) {
 
130
  new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
131
  }
132
 
133
+ var PACKAGE_UUID = metadata['package_uuid'];
134
+ var IDB_RO = "readonly";
135
+ var IDB_RW = "readwrite";
136
+ var DB_NAME = "EM_PRELOAD_CACHE";
137
+ var DB_VERSION = 1;
138
+ var METADATA_STORE_NAME = 'METADATA';
139
+ var PACKAGE_STORE_NAME = 'PACKAGES';
140
+ function openDatabase(callback, errback) {
141
+ if (isNode) {
142
+ return errback();
143
+ }
144
+ var indexedDB;
145
+ if (typeof window === 'object') {
146
+ indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
147
+ } else if (typeof location !== 'undefined') {
148
+ // worker
149
+ indexedDB = self.indexedDB;
150
+ } else {
151
+ throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
152
+ }
153
+ try {
154
+ var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
155
+ } catch (e) {
156
+ return errback(e);
157
+ }
158
+ openRequest.onupgradeneeded = (event) => {
159
+ var db = /** @type {IDBDatabase} */ (event.target.result);
160
+
161
+ if (db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
162
+ db.deleteObjectStore(PACKAGE_STORE_NAME);
163
+ }
164
+ var packages = db.createObjectStore(PACKAGE_STORE_NAME);
165
+
166
+ if (db.objectStoreNames.contains(METADATA_STORE_NAME)) {
167
+ db.deleteObjectStore(METADATA_STORE_NAME);
168
+ }
169
+ var metadata = db.createObjectStore(METADATA_STORE_NAME);
170
+ };
171
+ openRequest.onsuccess = (event) => {
172
+ var db = /** @type {IDBDatabase} */ (event.target.result);
173
+ callback(db);
174
+ };
175
+ openRequest.onerror = (error) => errback(error);
176
+ };
177
+
178
+ // This is needed as chromium has a limit on per-entry files in IndexedDB
179
+ // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
180
+ // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
181
+ // We set the chunk size to 64MB to stay well-below the limit
182
+ var CHUNK_SIZE = 64 * 1024 * 1024;
183
+
184
+ function cacheRemotePackage(
185
+ db,
186
+ packageName,
187
+ packageData,
188
+ packageMeta,
189
+ callback,
190
+ errback
191
+ ) {
192
+ var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
193
+ var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
194
+ var chunkSliceStart = 0;
195
+ var nextChunkSliceStart = 0;
196
+ var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
197
+ var finishedChunks = 0;
198
+ for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
199
+ nextChunkSliceStart += CHUNK_SIZE;
200
+ var putPackageRequest = packages.put(
201
+ packageData.slice(chunkSliceStart, nextChunkSliceStart),
202
+ `package/${packageName}/${chunkId}`
203
+ );
204
+ chunkSliceStart = nextChunkSliceStart;
205
+ putPackageRequest.onsuccess = (event) => {
206
+ finishedChunks++;
207
+ if (finishedChunks == chunkCount) {
208
+ var transaction_metadata = db.transaction(
209
+ [METADATA_STORE_NAME],
210
+ IDB_RW
211
+ );
212
+ var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
213
+ var putMetadataRequest = metadata.put(
214
+ {
215
+ 'uuid': packageMeta.uuid,
216
+ 'chunkCount': chunkCount
217
+ },
218
+ `metadata/${packageName}`
219
+ );
220
+ putMetadataRequest.onsuccess = (event) => callback(packageData);
221
+ putMetadataRequest.onerror = (error) => errback(error);
222
+ }
223
+ };
224
+ putPackageRequest.onerror = (error) => errback(error);
225
+ }
226
+ }
227
+
228
+ /* Check if there's a cached package, and if so whether it's the latest available */
229
+ function checkCachedPackage(db, packageName, callback, errback) {
230
+ var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
231
+ var metadata = transaction.objectStore(METADATA_STORE_NAME);
232
+ var getRequest = metadata.get(`metadata/${packageName}`);
233
+ getRequest.onsuccess = (event) => {
234
+ var result = event.target.result;
235
+ if (!result) {
236
+ return callback(false, null);
237
+ } else {
238
+ return callback(PACKAGE_UUID === result['uuid'], result);
239
+ }
240
+ };
241
+ getRequest.onerror = (error) => errback(error);
242
+ }
243
+
244
+ function fetchCachedPackage(db, packageName, metadata, callback, errback) {
245
+ var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
246
+ var packages = transaction.objectStore(PACKAGE_STORE_NAME);
247
+
248
+ var chunksDone = 0;
249
+ var totalSize = 0;
250
+ var chunkCount = metadata['chunkCount'];
251
+ var chunks = new Array(chunkCount);
252
+
253
+ for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
254
+ var getRequest = packages.get(`package/${packageName}/${chunkId}`);
255
+ getRequest.onsuccess = (event) => {
256
+ if (!event.target.result) {
257
+ errback(new Error(`CachedPackageNotFound for: ${packageName}`));
258
+ return;
259
+ }
260
+ // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
261
+ if (chunkCount == 1) {
262
+ callback(event.target.result);
263
+ } else {
264
+ chunksDone++;
265
+ totalSize += event.target.result.byteLength;
266
+ chunks.push(event.target.result);
267
+ if (chunksDone == chunkCount) {
268
+ if (chunksDone == 1) {
269
+ callback(event.target.result);
270
+ } else {
271
+ var tempTyped = new Uint8Array(totalSize);
272
+ var byteOffset = 0;
273
+ for (var chunkId in chunks) {
274
+ var buffer = chunks[chunkId];
275
+ tempTyped.set(new Uint8Array(buffer), byteOffset);
276
+ byteOffset += buffer.byteLength;
277
+ buffer = undefined;
278
+ }
279
+ chunks = undefined;
280
+ callback(tempTyped.buffer);
281
+ tempTyped = undefined;
282
+ }
283
+ }
284
+ }
285
+ };
286
+ getRequest.onerror = (error) => errback(error);
287
+ }
288
+ }
289
+
290
  function processPackageData(arrayBuffer) {
291
  assert(arrayBuffer, 'Loading data file failed.');
292
  assert(arrayBuffer.constructor.name === ArrayBuffer.name, 'bad input to processPackageData');
 
297
  var files = metadata['files'];
298
  for (var i = 0; i < files.length; ++i) {
299
  DataRequest.prototype.requests[files[i].filename].onload();
300
+ } Module['removeRunDependency']('datafile_de.data');
301
 
302
  };
303
+ Module['addRunDependency']('datafile_de.data');
304
 
305
  Module['preloadResults'] ??= {};
306
 
307
+ function preloadFallback(error) {
308
+ console.error(error);
309
+ console.error('falling back to default preload behavior');
310
+ fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
311
+ };
312
+
313
+ openDatabase(
314
+ (db) => checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
315
+ (useCached, metadata) => {
316
+ Module['preloadResults'][PACKAGE_NAME] = {fromCache: useCached};
317
+ if (useCached) {
318
+ fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
319
+ } else {
320
+ fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
321
+ (packageData) => {
322
+ cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
323
+ (error) => {
324
+ console.error(error);
325
+ processPackageData(packageData);
326
+ });
327
+ }
328
+ , preloadFallback);
329
+ }
330
+ }, preloadFallback)
331
+ , preloadFallback);
332
+
333
+ Module['setStatus']?.('Downloading...');
334
 
335
  }
336
  if (Module['calledRun']) {
 
339
  (Module['preRun'] ??= []).push(runWithFS); // FS is not initialized yet, wait for it
340
  }
341
 
342
+ Module['removeRunDependency']('de.js.metadata');
343
  }
344
 
345
  function runMetaWithFS() {
346
+ Module['addRunDependency']('de.js.metadata');
347
+ var metadataUrl = Module['locateFile'] ? Module['locateFile']('de.js.metadata', '') : 'de.js.metadata';
348
  if (isNode) {
349
  require('fs').readFile(metadataUrl, 'utf8', (err, contents) => {
350
  if (err) {
de.json CHANGED
@@ -1 +1 @@
1
- {"files":[{"filename":"/.gitignore","start":0,"end":0},{"filename":"/README.md","start":0,"end":3029},{"filename":"/decoder.onnx","start":3029,"end":620518},{"filename":"/encoder.onnx","start":620518,"end":70712075},{"filename":"/joiner.onnx","start":70712075,"end":71048892},{"filename":"/tokens.txt","start":71048892,"end":71054498}],"remote_package_size":71054498}
 
1
+ {"files":[{"filename":"/.gitignore","start":0,"end":0},{"filename":"/README.md","start":0,"end":3029},{"filename":"/decoder.onnx","start":3029,"end":620518},{"filename":"/encoder.onnx","start":620518,"end":70712075},{"filename":"/joiner.onnx","start":70712075,"end":71048892},{"filename":"/tokens.txt","start":71048892,"end":71054498}],"remote_package_size":71054498,"package_uuid":"sha256-18d2e7f52fb932018b08f444f4f5fbbdba4404454c023d5d443e13ac6eb4110b"}
en.js ADDED
@@ -0,0 +1,374 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ var Module = typeof Module != 'undefined' ? Module : {};
3
+
4
+ Module['expectedDataFileDownloads'] ??= 0;
5
+ Module['expectedDataFileDownloads']++;
6
+ (() => {
7
+ // Do not attempt to redownload the virtual filesystem data when in a pthread or a Wasm Worker context.
8
+ var isPthread = typeof ENVIRONMENT_IS_PTHREAD != 'undefined' && ENVIRONMENT_IS_PTHREAD;
9
+ var isWasmWorker = typeof ENVIRONMENT_IS_WASM_WORKER != 'undefined' && ENVIRONMENT_IS_WASM_WORKER;
10
+ if (isPthread || isWasmWorker) return;
11
+ var isNode = typeof process === 'object' && typeof process.versions === 'object' && typeof process.versions.node === 'string';
12
+ function loadPackage(metadata) {
13
+
14
+ var PACKAGE_PATH = '';
15
+ if (typeof window === 'object') {
16
+ PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/')) + '/');
17
+ } else if (typeof process === 'undefined' && typeof location !== 'undefined') {
18
+ // web worker
19
+ PACKAGE_PATH = encodeURIComponent(location.pathname.substring(0, location.pathname.lastIndexOf('/')) + '/');
20
+ }
21
+ var PACKAGE_NAME = 'en.data';
22
+ var REMOTE_PACKAGE_BASE = 'en.data';
23
+ var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
24
+ var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
25
+
26
+ function fetchRemotePackage(packageName, packageSize, callback, errback) {
27
+ if (isNode) {
28
+ require('fs').readFile(packageName, (err, contents) => {
29
+ if (err) {
30
+ errback(err);
31
+ } else {
32
+ callback(contents.buffer);
33
+ }
34
+ });
35
+ return;
36
+ }
37
+ Module['dataFileDownloads'] ??= {};
38
+ fetch(packageName)
39
+ .catch((cause) => Promise.reject(new Error(`Network Error: ${packageName}`, {cause}))) // If fetch fails, rewrite the error to include the failing URL & the cause.
40
+ .then((response) => {
41
+ if (!response.ok) {
42
+ return Promise.reject(new Error(`${response.status}: ${response.url}`));
43
+ }
44
+
45
+ if (!response.body && response.arrayBuffer) { // If we're using the polyfill, readers won't be available...
46
+ return response.arrayBuffer().then(callback);
47
+ }
48
+
49
+ const reader = response.body.getReader();
50
+ const iterate = () => reader.read().then(handleChunk).catch((cause) => {
51
+ return Promise.reject(new Error(`Unexpected error while handling : ${response.url} ${cause}`, {cause}));
52
+ });
53
+
54
+ const chunks = [];
55
+ const headers = response.headers;
56
+ const total = Number(headers.get('Content-Length') ?? packageSize);
57
+ let loaded = 0;
58
+
59
+ const handleChunk = ({done, value}) => {
60
+ if (!done) {
61
+ chunks.push(value);
62
+ loaded += value.length;
63
+ Module['dataFileDownloads'][packageName] = {loaded, total};
64
+
65
+ let totalLoaded = 0;
66
+ let totalSize = 0;
67
+
68
+ for (const download of Object.values(Module['dataFileDownloads'])) {
69
+ totalLoaded += download.loaded;
70
+ totalSize += download.total;
71
+ }
72
+
73
+ Module['setStatus']?.(`Downloading data... (${totalLoaded}/${totalSize})`);
74
+ return iterate();
75
+ } else {
76
+ const packageData = new Uint8Array(chunks.map((c) => c.length).reduce((a, b) => a + b, 0));
77
+ let offset = 0;
78
+ for (const chunk of chunks) {
79
+ packageData.set(chunk, offset);
80
+ offset += chunk.length;
81
+ }
82
+ callback(packageData.buffer);
83
+ }
84
+ };
85
+
86
+ Module['setStatus']?.('Downloading data...');
87
+ return iterate();
88
+ });
89
+ };
90
+
91
+ function handleError(error) {
92
+ console.error('package error:', error);
93
+ };
94
+
95
+ function runWithFS(Module) {
96
+
97
+ function assert(check, msg) {
98
+ if (!check) throw msg + new Error().stack;
99
+ }
100
+
101
+ /** @constructor */
102
+ function DataRequest(start, end, audio) {
103
+ this.start = start;
104
+ this.end = end;
105
+ this.audio = audio;
106
+ }
107
+ DataRequest.prototype = {
108
+ requests: {},
109
+ open: function(mode, name) {
110
+ this.name = name;
111
+ this.requests[name] = this;
112
+ Module['addRunDependency'](`fp ${this.name}`);
113
+ },
114
+ send: function() {},
115
+ onload: function() {
116
+ var byteArray = this.byteArray.subarray(this.start, this.end);
117
+ this.finish(byteArray);
118
+ },
119
+ finish: function(byteArray) {
120
+ var that = this;
121
+ // canOwn this data in the filesystem, it is a slide into the heap that will never change
122
+ Module['FS_createDataFile'](this.name, null, byteArray, true, true, true);
123
+ Module['removeRunDependency'](`fp ${that.name}`);
124
+ this.requests[this.name] = null;
125
+ }
126
+ };
127
+
128
+ var files = metadata['files'];
129
+ for (var i = 0; i < files.length; ++i) {
130
+ new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
131
+ }
132
+
133
+ var PACKAGE_UUID = metadata['package_uuid'];
134
+ var IDB_RO = "readonly";
135
+ var IDB_RW = "readwrite";
136
+ var DB_NAME = "EM_PRELOAD_CACHE";
137
+ var DB_VERSION = 1;
138
+ var METADATA_STORE_NAME = 'METADATA';
139
+ var PACKAGE_STORE_NAME = 'PACKAGES';
140
+ function openDatabase(callback, errback) {
141
+ if (isNode) {
142
+ return errback();
143
+ }
144
+ var indexedDB;
145
+ if (typeof window === 'object') {
146
+ indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
147
+ } else if (typeof location !== 'undefined') {
148
+ // worker
149
+ indexedDB = self.indexedDB;
150
+ } else {
151
+ throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
152
+ }
153
+ try {
154
+ var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
155
+ } catch (e) {
156
+ return errback(e);
157
+ }
158
+ openRequest.onupgradeneeded = (event) => {
159
+ var db = /** @type {IDBDatabase} */ (event.target.result);
160
+
161
+ if (db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
162
+ db.deleteObjectStore(PACKAGE_STORE_NAME);
163
+ }
164
+ var packages = db.createObjectStore(PACKAGE_STORE_NAME);
165
+
166
+ if (db.objectStoreNames.contains(METADATA_STORE_NAME)) {
167
+ db.deleteObjectStore(METADATA_STORE_NAME);
168
+ }
169
+ var metadata = db.createObjectStore(METADATA_STORE_NAME);
170
+ };
171
+ openRequest.onsuccess = (event) => {
172
+ var db = /** @type {IDBDatabase} */ (event.target.result);
173
+ callback(db);
174
+ };
175
+ openRequest.onerror = (error) => errback(error);
176
+ };
177
+
178
+ // This is needed as chromium has a limit on per-entry files in IndexedDB
179
+ // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
180
+ // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
181
+ // We set the chunk size to 64MB to stay well-below the limit
182
+ var CHUNK_SIZE = 64 * 1024 * 1024;
183
+
184
+ function cacheRemotePackage(
185
+ db,
186
+ packageName,
187
+ packageData,
188
+ packageMeta,
189
+ callback,
190
+ errback
191
+ ) {
192
+ var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
193
+ var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
194
+ var chunkSliceStart = 0;
195
+ var nextChunkSliceStart = 0;
196
+ var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
197
+ var finishedChunks = 0;
198
+ for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
199
+ nextChunkSliceStart += CHUNK_SIZE;
200
+ var putPackageRequest = packages.put(
201
+ packageData.slice(chunkSliceStart, nextChunkSliceStart),
202
+ `package/${packageName}/${chunkId}`
203
+ );
204
+ chunkSliceStart = nextChunkSliceStart;
205
+ putPackageRequest.onsuccess = (event) => {
206
+ finishedChunks++;
207
+ if (finishedChunks == chunkCount) {
208
+ var transaction_metadata = db.transaction(
209
+ [METADATA_STORE_NAME],
210
+ IDB_RW
211
+ );
212
+ var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
213
+ var putMetadataRequest = metadata.put(
214
+ {
215
+ 'uuid': packageMeta.uuid,
216
+ 'chunkCount': chunkCount
217
+ },
218
+ `metadata/${packageName}`
219
+ );
220
+ putMetadataRequest.onsuccess = (event) => callback(packageData);
221
+ putMetadataRequest.onerror = (error) => errback(error);
222
+ }
223
+ };
224
+ putPackageRequest.onerror = (error) => errback(error);
225
+ }
226
+ }
227
+
228
+ /* Check if there's a cached package, and if so whether it's the latest available */
229
+ function checkCachedPackage(db, packageName, callback, errback) {
230
+ var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
231
+ var metadata = transaction.objectStore(METADATA_STORE_NAME);
232
+ var getRequest = metadata.get(`metadata/${packageName}`);
233
+ getRequest.onsuccess = (event) => {
234
+ var result = event.target.result;
235
+ if (!result) {
236
+ return callback(false, null);
237
+ } else {
238
+ return callback(PACKAGE_UUID === result['uuid'], result);
239
+ }
240
+ };
241
+ getRequest.onerror = (error) => errback(error);
242
+ }
243
+
244
+ function fetchCachedPackage(db, packageName, metadata, callback, errback) {
245
+ var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
246
+ var packages = transaction.objectStore(PACKAGE_STORE_NAME);
247
+
248
+ var chunksDone = 0;
249
+ var totalSize = 0;
250
+ var chunkCount = metadata['chunkCount'];
251
+ var chunks = new Array(chunkCount);
252
+
253
+ for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
254
+ var getRequest = packages.get(`package/${packageName}/${chunkId}`);
255
+ getRequest.onsuccess = (event) => {
256
+ if (!event.target.result) {
257
+ errback(new Error(`CachedPackageNotFound for: ${packageName}`));
258
+ return;
259
+ }
260
+ // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
261
+ if (chunkCount == 1) {
262
+ callback(event.target.result);
263
+ } else {
264
+ chunksDone++;
265
+ totalSize += event.target.result.byteLength;
266
+ chunks.push(event.target.result);
267
+ if (chunksDone == chunkCount) {
268
+ if (chunksDone == 1) {
269
+ callback(event.target.result);
270
+ } else {
271
+ var tempTyped = new Uint8Array(totalSize);
272
+ var byteOffset = 0;
273
+ for (var chunkId in chunks) {
274
+ var buffer = chunks[chunkId];
275
+ tempTyped.set(new Uint8Array(buffer), byteOffset);
276
+ byteOffset += buffer.byteLength;
277
+ buffer = undefined;
278
+ }
279
+ chunks = undefined;
280
+ callback(tempTyped.buffer);
281
+ tempTyped = undefined;
282
+ }
283
+ }
284
+ }
285
+ };
286
+ getRequest.onerror = (error) => errback(error);
287
+ }
288
+ }
289
+
290
+ function processPackageData(arrayBuffer) {
291
+ assert(arrayBuffer, 'Loading data file failed.');
292
+ assert(arrayBuffer.constructor.name === ArrayBuffer.name, 'bad input to processPackageData');
293
+ var byteArray = new Uint8Array(arrayBuffer);
294
+ var curr;
295
+ // Reuse the bytearray from the XHR as the source for file reads.
296
+ DataRequest.prototype.byteArray = byteArray;
297
+ var files = metadata['files'];
298
+ for (var i = 0; i < files.length; ++i) {
299
+ DataRequest.prototype.requests[files[i].filename].onload();
300
+ } Module['removeRunDependency']('datafile_en.data');
301
+
302
+ };
303
+ Module['addRunDependency']('datafile_en.data');
304
+
305
+ Module['preloadResults'] ??= {};
306
+
307
+ function preloadFallback(error) {
308
+ console.error(error);
309
+ console.error('falling back to default preload behavior');
310
+ fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
311
+ };
312
+
313
+ openDatabase(
314
+ (db) => checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
315
+ (useCached, metadata) => {
316
+ Module['preloadResults'][PACKAGE_NAME] = {fromCache: useCached};
317
+ if (useCached) {
318
+ fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
319
+ } else {
320
+ fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
321
+ (packageData) => {
322
+ cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
323
+ (error) => {
324
+ console.error(error);
325
+ processPackageData(packageData);
326
+ });
327
+ }
328
+ , preloadFallback);
329
+ }
330
+ }, preloadFallback)
331
+ , preloadFallback);
332
+
333
+ Module['setStatus']?.('Downloading...');
334
+
335
+ }
336
+ if (Module['calledRun']) {
337
+ runWithFS(Module);
338
+ } else {
339
+ (Module['preRun'] ??= []).push(runWithFS); // FS is not initialized yet, wait for it
340
+ }
341
+
342
+ Module['removeRunDependency']('en.js.metadata');
343
+ }
344
+
345
+ function runMetaWithFS() {
346
+ Module['addRunDependency']('en.js.metadata');
347
+ var metadataUrl = Module['locateFile'] ? Module['locateFile']('en.js.metadata', '') : 'en.js.metadata';
348
+ if (isNode) {
349
+ require('fs').readFile(metadataUrl, 'utf8', (err, contents) => {
350
+ if (err) {
351
+ return Promise.reject(err);
352
+ } else {
353
+ loadPackage(JSON.parse(contents));
354
+ }
355
+ });
356
+ return;
357
+ }
358
+ fetch(metadataUrl)
359
+ .then((response) => {
360
+ if (response.ok) {
361
+ return response.json();
362
+ }
363
+ return Promise.reject(new Error(`${response.status}: ${response.url}`));
364
+ })
365
+ .then(loadPackage);
366
+ }
367
+
368
+ if (Module['calledRun']) {
369
+ runMetaWithFS();
370
+ } else {
371
+ (Module['preRun'] ??= []).push(runMetaWithFS);
372
+ }
373
+
374
+ })();
en.json CHANGED
@@ -1 +1 @@
1
- {"files":[{"filename":"/.gitignore","start":0,"end":0},{"filename":"/README.md","start":0,"end":3029},{"filename":"/decoder.onnx","start":3029,"end":620517},{"filename":"/encoder.onnx","start":620517,"end":70713116},{"filename":"/joiner.onnx","start":70713116,"end":71049933},{"filename":"/tokens.txt","start":71049933,"end":71056243}],"remote_package_size":71056243}
 
1
+ {"files":[{"filename":"/.gitignore","start":0,"end":0},{"filename":"/README.md","start":0,"end":3029},{"filename":"/decoder.onnx","start":3029,"end":620517},{"filename":"/encoder.onnx","start":620517,"end":70713116},{"filename":"/joiner.onnx","start":70713116,"end":71049933},{"filename":"/tokens.txt","start":71049933,"end":71056243}],"remote_package_size":71056243,"package_uuid":"sha256-1838b94374576507f0efaaf3aa91bb921072cdf46c8802a7c12acc527b7e1f0e"}
fr.js ADDED
@@ -0,0 +1,374 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ var Module = typeof Module != 'undefined' ? Module : {};
3
+
4
+ Module['expectedDataFileDownloads'] ??= 0;
5
+ Module['expectedDataFileDownloads']++;
6
+ (() => {
7
+ // Do not attempt to redownload the virtual filesystem data when in a pthread or a Wasm Worker context.
8
+ var isPthread = typeof ENVIRONMENT_IS_PTHREAD != 'undefined' && ENVIRONMENT_IS_PTHREAD;
9
+ var isWasmWorker = typeof ENVIRONMENT_IS_WASM_WORKER != 'undefined' && ENVIRONMENT_IS_WASM_WORKER;
10
+ if (isPthread || isWasmWorker) return;
11
+ var isNode = typeof process === 'object' && typeof process.versions === 'object' && typeof process.versions.node === 'string';
12
+ function loadPackage(metadata) {
13
+
14
+ var PACKAGE_PATH = '';
15
+ if (typeof window === 'object') {
16
+ PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.substring(0, window.location.pathname.lastIndexOf('/')) + '/');
17
+ } else if (typeof process === 'undefined' && typeof location !== 'undefined') {
18
+ // web worker
19
+ PACKAGE_PATH = encodeURIComponent(location.pathname.substring(0, location.pathname.lastIndexOf('/')) + '/');
20
+ }
21
+ var PACKAGE_NAME = 'fr.data';
22
+ var REMOTE_PACKAGE_BASE = 'fr.data';
23
+ var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
24
+ var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
25
+
26
+ function fetchRemotePackage(packageName, packageSize, callback, errback) {
27
+ if (isNode) {
28
+ require('fs').readFile(packageName, (err, contents) => {
29
+ if (err) {
30
+ errback(err);
31
+ } else {
32
+ callback(contents.buffer);
33
+ }
34
+ });
35
+ return;
36
+ }
37
+ Module['dataFileDownloads'] ??= {};
38
+ fetch(packageName)
39
+ .catch((cause) => Promise.reject(new Error(`Network Error: ${packageName}`, {cause}))) // If fetch fails, rewrite the error to include the failing URL & the cause.
40
+ .then((response) => {
41
+ if (!response.ok) {
42
+ return Promise.reject(new Error(`${response.status}: ${response.url}`));
43
+ }
44
+
45
+ if (!response.body && response.arrayBuffer) { // If we're using the polyfill, readers won't be available...
46
+ return response.arrayBuffer().then(callback);
47
+ }
48
+
49
+ const reader = response.body.getReader();
50
+ const iterate = () => reader.read().then(handleChunk).catch((cause) => {
51
+ return Promise.reject(new Error(`Unexpected error while handling : ${response.url} ${cause}`, {cause}));
52
+ });
53
+
54
+ const chunks = [];
55
+ const headers = response.headers;
56
+ const total = Number(headers.get('Content-Length') ?? packageSize);
57
+ let loaded = 0;
58
+
59
+ const handleChunk = ({done, value}) => {
60
+ if (!done) {
61
+ chunks.push(value);
62
+ loaded += value.length;
63
+ Module['dataFileDownloads'][packageName] = {loaded, total};
64
+
65
+ let totalLoaded = 0;
66
+ let totalSize = 0;
67
+
68
+ for (const download of Object.values(Module['dataFileDownloads'])) {
69
+ totalLoaded += download.loaded;
70
+ totalSize += download.total;
71
+ }
72
+
73
+ Module['setStatus']?.(`Downloading data... (${totalLoaded}/${totalSize})`);
74
+ return iterate();
75
+ } else {
76
+ const packageData = new Uint8Array(chunks.map((c) => c.length).reduce((a, b) => a + b, 0));
77
+ let offset = 0;
78
+ for (const chunk of chunks) {
79
+ packageData.set(chunk, offset);
80
+ offset += chunk.length;
81
+ }
82
+ callback(packageData.buffer);
83
+ }
84
+ };
85
+
86
+ Module['setStatus']?.('Downloading data...');
87
+ return iterate();
88
+ });
89
+ };
90
+
91
+ function handleError(error) {
92
+ console.error('package error:', error);
93
+ };
94
+
95
+ function runWithFS(Module) {
96
+
97
+ function assert(check, msg) {
98
+ if (!check) throw msg + new Error().stack;
99
+ }
100
+
101
+ /** @constructor */
102
+ function DataRequest(start, end, audio) {
103
+ this.start = start;
104
+ this.end = end;
105
+ this.audio = audio;
106
+ }
107
+ DataRequest.prototype = {
108
+ requests: {},
109
+ open: function(mode, name) {
110
+ this.name = name;
111
+ this.requests[name] = this;
112
+ Module['addRunDependency'](`fp ${this.name}`);
113
+ },
114
+ send: function() {},
115
+ onload: function() {
116
+ var byteArray = this.byteArray.subarray(this.start, this.end);
117
+ this.finish(byteArray);
118
+ },
119
+ finish: function(byteArray) {
120
+ var that = this;
121
+ // canOwn this data in the filesystem, it is a slide into the heap that will never change
122
+ Module['FS_createDataFile'](this.name, null, byteArray, true, true, true);
123
+ Module['removeRunDependency'](`fp ${that.name}`);
124
+ this.requests[this.name] = null;
125
+ }
126
+ };
127
+
128
+ var files = metadata['files'];
129
+ for (var i = 0; i < files.length; ++i) {
130
+ new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio'] || 0).open('GET', files[i]['filename']);
131
+ }
132
+
133
+ var PACKAGE_UUID = metadata['package_uuid'];
134
+ var IDB_RO = "readonly";
135
+ var IDB_RW = "readwrite";
136
+ var DB_NAME = "EM_PRELOAD_CACHE";
137
+ var DB_VERSION = 1;
138
+ var METADATA_STORE_NAME = 'METADATA';
139
+ var PACKAGE_STORE_NAME = 'PACKAGES';
140
+ function openDatabase(callback, errback) {
141
+ if (isNode) {
142
+ return errback();
143
+ }
144
+ var indexedDB;
145
+ if (typeof window === 'object') {
146
+ indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
147
+ } else if (typeof location !== 'undefined') {
148
+ // worker
149
+ indexedDB = self.indexedDB;
150
+ } else {
151
+ throw 'using IndexedDB to cache data can only be done on a web page or in a web worker';
152
+ }
153
+ try {
154
+ var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
155
+ } catch (e) {
156
+ return errback(e);
157
+ }
158
+ openRequest.onupgradeneeded = (event) => {
159
+ var db = /** @type {IDBDatabase} */ (event.target.result);
160
+
161
+ if (db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
162
+ db.deleteObjectStore(PACKAGE_STORE_NAME);
163
+ }
164
+ var packages = db.createObjectStore(PACKAGE_STORE_NAME);
165
+
166
+ if (db.objectStoreNames.contains(METADATA_STORE_NAME)) {
167
+ db.deleteObjectStore(METADATA_STORE_NAME);
168
+ }
169
+ var metadata = db.createObjectStore(METADATA_STORE_NAME);
170
+ };
171
+ openRequest.onsuccess = (event) => {
172
+ var db = /** @type {IDBDatabase} */ (event.target.result);
173
+ callback(db);
174
+ };
175
+ openRequest.onerror = (error) => errback(error);
176
+ };
177
+
178
+ // This is needed as chromium has a limit on per-entry files in IndexedDB
179
+ // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
180
+ // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
181
+ // We set the chunk size to 64MB to stay well-below the limit
182
+ var CHUNK_SIZE = 64 * 1024 * 1024;
183
+
184
+ function cacheRemotePackage(
185
+ db,
186
+ packageName,
187
+ packageData,
188
+ packageMeta,
189
+ callback,
190
+ errback
191
+ ) {
192
+ var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
193
+ var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
194
+ var chunkSliceStart = 0;
195
+ var nextChunkSliceStart = 0;
196
+ var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
197
+ var finishedChunks = 0;
198
+ for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
199
+ nextChunkSliceStart += CHUNK_SIZE;
200
+ var putPackageRequest = packages.put(
201
+ packageData.slice(chunkSliceStart, nextChunkSliceStart),
202
+ `package/${packageName}/${chunkId}`
203
+ );
204
+ chunkSliceStart = nextChunkSliceStart;
205
+ putPackageRequest.onsuccess = (event) => {
206
+ finishedChunks++;
207
+ if (finishedChunks == chunkCount) {
208
+ var transaction_metadata = db.transaction(
209
+ [METADATA_STORE_NAME],
210
+ IDB_RW
211
+ );
212
+ var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
213
+ var putMetadataRequest = metadata.put(
214
+ {
215
+ 'uuid': packageMeta.uuid,
216
+ 'chunkCount': chunkCount
217
+ },
218
+ `metadata/${packageName}`
219
+ );
220
+ putMetadataRequest.onsuccess = (event) => callback(packageData);
221
+ putMetadataRequest.onerror = (error) => errback(error);
222
+ }
223
+ };
224
+ putPackageRequest.onerror = (error) => errback(error);
225
+ }
226
+ }
227
+
228
+ /* Check if there's a cached package, and if so whether it's the latest available */
229
+ function checkCachedPackage(db, packageName, callback, errback) {
230
+ var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
231
+ var metadata = transaction.objectStore(METADATA_STORE_NAME);
232
+ var getRequest = metadata.get(`metadata/${packageName}`);
233
+ getRequest.onsuccess = (event) => {
234
+ var result = event.target.result;
235
+ if (!result) {
236
+ return callback(false, null);
237
+ } else {
238
+ return callback(PACKAGE_UUID === result['uuid'], result);
239
+ }
240
+ };
241
+ getRequest.onerror = (error) => errback(error);
242
+ }
243
+
244
+ function fetchCachedPackage(db, packageName, metadata, callback, errback) {
245
+ var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
246
+ var packages = transaction.objectStore(PACKAGE_STORE_NAME);
247
+
248
+ var chunksDone = 0;
249
+ var totalSize = 0;
250
+ var chunkCount = metadata['chunkCount'];
251
+ var chunks = new Array(chunkCount);
252
+
253
+ for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
254
+ var getRequest = packages.get(`package/${packageName}/${chunkId}`);
255
+ getRequest.onsuccess = (event) => {
256
+ if (!event.target.result) {
257
+ errback(new Error(`CachedPackageNotFound for: ${packageName}`));
258
+ return;
259
+ }
260
+ // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
261
+ if (chunkCount == 1) {
262
+ callback(event.target.result);
263
+ } else {
264
+ chunksDone++;
265
+ totalSize += event.target.result.byteLength;
266
+ chunks.push(event.target.result);
267
+ if (chunksDone == chunkCount) {
268
+ if (chunksDone == 1) {
269
+ callback(event.target.result);
270
+ } else {
271
+ var tempTyped = new Uint8Array(totalSize);
272
+ var byteOffset = 0;
273
+ for (var chunkId in chunks) {
274
+ var buffer = chunks[chunkId];
275
+ tempTyped.set(new Uint8Array(buffer), byteOffset);
276
+ byteOffset += buffer.byteLength;
277
+ buffer = undefined;
278
+ }
279
+ chunks = undefined;
280
+ callback(tempTyped.buffer);
281
+ tempTyped = undefined;
282
+ }
283
+ }
284
+ }
285
+ };
286
+ getRequest.onerror = (error) => errback(error);
287
+ }
288
+ }
289
+
290
+ function processPackageData(arrayBuffer) {
291
+ assert(arrayBuffer, 'Loading data file failed.');
292
+ assert(arrayBuffer.constructor.name === ArrayBuffer.name, 'bad input to processPackageData');
293
+ var byteArray = new Uint8Array(arrayBuffer);
294
+ var curr;
295
+ // Reuse the bytearray from the XHR as the source for file reads.
296
+ DataRequest.prototype.byteArray = byteArray;
297
+ var files = metadata['files'];
298
+ for (var i = 0; i < files.length; ++i) {
299
+ DataRequest.prototype.requests[files[i].filename].onload();
300
+ } Module['removeRunDependency']('datafile_fr.data');
301
+
302
+ };
303
+ Module['addRunDependency']('datafile_fr.data');
304
+
305
+ Module['preloadResults'] ??= {};
306
+
307
+ function preloadFallback(error) {
308
+ console.error(error);
309
+ console.error('falling back to default preload behavior');
310
+ fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
311
+ };
312
+
313
+ openDatabase(
314
+ (db) => checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
315
+ (useCached, metadata) => {
316
+ Module['preloadResults'][PACKAGE_NAME] = {fromCache: useCached};
317
+ if (useCached) {
318
+ fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
319
+ } else {
320
+ fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
321
+ (packageData) => {
322
+ cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
323
+ (error) => {
324
+ console.error(error);
325
+ processPackageData(packageData);
326
+ });
327
+ }
328
+ , preloadFallback);
329
+ }
330
+ }, preloadFallback)
331
+ , preloadFallback);
332
+
333
+ Module['setStatus']?.('Downloading...');
334
+
335
+ }
336
+ if (Module['calledRun']) {
337
+ runWithFS(Module);
338
+ } else {
339
+ (Module['preRun'] ??= []).push(runWithFS); // FS is not initialized yet, wait for it
340
+ }
341
+
342
+ Module['removeRunDependency']('fr.js.metadata');
343
+ }
344
+
345
+ function runMetaWithFS() {
346
+ Module['addRunDependency']('fr.js.metadata');
347
+ var metadataUrl = Module['locateFile'] ? Module['locateFile']('fr.js.metadata', '') : 'fr.js.metadata';
348
+ if (isNode) {
349
+ require('fs').readFile(metadataUrl, 'utf8', (err, contents) => {
350
+ if (err) {
351
+ return Promise.reject(err);
352
+ } else {
353
+ loadPackage(JSON.parse(contents));
354
+ }
355
+ });
356
+ return;
357
+ }
358
+ fetch(metadataUrl)
359
+ .then((response) => {
360
+ if (response.ok) {
361
+ return response.json();
362
+ }
363
+ return Promise.reject(new Error(`${response.status}: ${response.url}`));
364
+ })
365
+ .then(loadPackage);
366
+ }
367
+
368
+ if (Module['calledRun']) {
369
+ runMetaWithFS();
370
+ } else {
371
+ (Module['preRun'] ??= []).push(runMetaWithFS);
372
+ }
373
+
374
+ })();
fr.json CHANGED
@@ -1 +1 @@
1
- {"files":[{"filename":"/.gitignore","start":0,"end":0},{"filename":"/README.md","start":0,"end":3029},{"filename":"/decoder.onnx","start":3029,"end":620517},{"filename":"/encoder.onnx","start":620517,"end":70713116},{"filename":"/joiner.onnx","start":70713116,"end":71049933},{"filename":"/tokens.txt","start":71049933,"end":71055348}],"remote_package_size":71055348}
 
1
+ {"files":[{"filename":"/.gitignore","start":0,"end":0},{"filename":"/README.md","start":0,"end":3029},{"filename":"/decoder.onnx","start":3029,"end":620517},{"filename":"/encoder.onnx","start":620517,"end":70713116},{"filename":"/joiner.onnx","start":70713116,"end":71049933},{"filename":"/tokens.txt","start":71049933,"end":71055348}],"remote_package_size":71055348,"package_uuid":"sha256-97d77f564f21cad9487ace162b7c4f4f42d2077f0a9bb0bc87d0f4964b5b59fc"}
index.html CHANGED
@@ -140,7 +140,5 @@
140
 
141
  <script src="./sherpa-onnx-asr.js"></script>
142
  <script src="./app-asr.js"></script>
143
- <script src="./model.js"></script>
144
- <script src="./sherpa-onnx-wasm-main-asr.js"></script>
145
  </body>
146
  </html>
 
140
 
141
  <script src="./sherpa-onnx-asr.js"></script>
142
  <script src="./app-asr.js"></script>
 
 
143
  </body>
144
  </html>