1
|
/**
|
2
|
* This file is part of Haketilo.
|
3
|
*
|
4
|
* Function: Facilitate use of IndexedDB within Haketilo.
|
5
|
*
|
6
|
* Copyright (C) 2021, 2022 Wojtek Kosior <koszko@koszko.org>
|
7
|
*
|
8
|
* This program is free software: you can redistribute it and/or modify
|
9
|
* it under the terms of the GNU General Public License as published by
|
10
|
* the Free Software Foundation, either version 3 of the License, or
|
11
|
* (at your option) any later version.
|
12
|
*
|
13
|
* This program is distributed in the hope that it will be useful,
|
14
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
15
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
16
|
* GNU General Public License for more details.
|
17
|
*
|
18
|
* As additional permission under GNU GPL version 3 section 7, you
|
19
|
* may distribute forms of that code without the copy of the GNU
|
20
|
* GPL normally required by section 4, provided you include this
|
21
|
* license notice and, in case of non-source distribution, a URL
|
22
|
* through which recipients can access the Corresponding Source.
|
23
|
* If you modify file(s) with this exception, you may extend this
|
24
|
* exception to your version of the file(s), but you are not
|
25
|
* obligated to do so. If you do not wish to do so, delete this
|
26
|
* exception statement from your version.
|
27
|
*
|
28
|
* As a special exception to the GPL, any HTML file which merely
|
29
|
* makes function calls to this code, and for that purpose
|
30
|
* includes it by reference shall be deemed a separate work for
|
31
|
* copyright law purposes. If you modify this code, you may extend
|
32
|
* this exception to your version of the code, but you are not
|
33
|
* obligated to do so. If you do not wish to do so, delete this
|
34
|
* exception statement from your version.
|
35
|
*
|
36
|
* You should have received a copy of the GNU General Public License
|
37
|
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
38
|
*
|
39
|
* I, Wojtek Kosior, thereby promise not to sue for violation of this file's
|
40
|
* license. Although I request that you do not make use of this code in a
|
41
|
* proprietary program, I am not going to enforce this in court.
|
42
|
*/
|
43
|
|
44
|
#IMPORT common/entities.js
|
45
|
#IMPORT common/broadcast.js
|
46
|
|
47
|
let initial_data = (
|
48
|
#IF UNIT_TEST
|
49
|
{}
|
50
|
#ELSE
|
51
|
#INCLUDE default_settings.json
|
52
|
#ENDIF
|
53
|
);
|
54
|
|
55
|
/* Update when changes are made to database schema. Must have 3 elements */
|
56
|
const db_version = [1, 0, 1];
|
57
|
|
58
|
const nr_reductor = ([i, s], num) => [i - 1, s + num * 1024 ** i];
|
59
|
const version_nr = ver => ver.slice(0, 3).reduce(nr_reductor, [2, 0])[1];
|
60
|
|
61
|
let db_version_nr = version_nr(db_version);
|
62
|
|
63
|
const stores = [
|
64
|
["file", {keyPath: "sha256"}],
|
65
|
["file_uses", {keyPath: "sha256"}],
|
66
|
["resource", {keyPath: "identifier"}],
|
67
|
["mapping", {keyPath: "identifier"}],
|
68
|
["setting", {keyPath: "name"}],
|
69
|
["blocking", {keyPath: "pattern"}],
|
70
|
["repo", {keyPath: "url"}]
|
71
|
];
|
72
|
|
73
|
let db = null;
|
74
|
|
75
|
/* Generate a Promise that resolves when an IndexedDB request succeeds. */
|
76
|
async function wait_request(idb_request)
|
77
|
{
|
78
|
let resolve, reject;
|
79
|
const waiter = new Promise((...cbs) => [resolve, reject] = cbs);
|
80
|
[idb_request.onsuccess, idb_request.onerror] = [resolve, reject];
|
81
|
return waiter;
|
82
|
}
|
83
|
|
84
|
/* asynchronous wrapper for IDBObjectStore's get() method. */
|
85
|
async function idb_get(transaction, store_name, key)
|
86
|
{
|
87
|
const req = transaction.objectStore(store_name).get(key);
|
88
|
return (await wait_request(req)).target.result;
|
89
|
}
|
90
|
#EXPORT idb_get
|
91
|
|
92
|
/* asynchronous wrapper for IDBObjectStore's put() method. */
|
93
|
async function idb_put(transaction, store_name, object)
|
94
|
{
|
95
|
return wait_request(transaction.objectStore(store_name).put(object));
|
96
|
}
|
97
|
|
98
|
/* asynchronous wrapper for IDBObjectStore's delete() method. */
|
99
|
async function idb_del(transaction, store_name, key)
|
100
|
{
|
101
|
return wait_request(transaction.objectStore(store_name).delete(key));
|
102
|
}
|
103
|
|
104
|
async function perform_upgrade(event) {
|
105
|
if (event.oldVersion > db_version_nr)
|
106
|
throw "bad db version: " + event.oldVersion;
|
107
|
|
108
|
const opened_db = event.target.result;
|
109
|
const transaction = event.target.transaction;
|
110
|
|
111
|
if (event.oldVersion == 0) {
|
112
|
for (const [store_name, key_mode] of stores)
|
113
|
opened_db.createObjectStore(store_name, key_mode);
|
114
|
}
|
115
|
|
116
|
if (event.oldVersion > 0 && event.oldVersion < db_version_nr) {
|
117
|
const v1_url = "https://hydrilla.koszko.org/api_v1/";
|
118
|
const v1_entry = await idb_get(transaction, "repo", v1_url);
|
119
|
|
120
|
if (v1_entry) {
|
121
|
const v2_url = "https://hydrilla.koszko.org/api_v2/";
|
122
|
|
123
|
await idb_del(transaction, "repo", v1_url);
|
124
|
await idb_put(transaction, "repo", {url: v2_url});
|
125
|
}
|
126
|
}
|
127
|
|
128
|
if (event.oldVersion == 0) {
|
129
|
const ctx = make_context(transaction, initial_data.file);
|
130
|
await _save_items(initial_data.resource, initial_data.mapping,
|
131
|
initial_data.repo || [], ctx);
|
132
|
} else {
|
133
|
await new Promise(
|
134
|
(...cbs) => [transaction.onsuccess, transaction.onerror] = cbs
|
135
|
);
|
136
|
}
|
137
|
|
138
|
return opened_db;
|
139
|
}
|
140
|
|
141
|
/* Open haketilo database, asynchronously return an IDBDatabase object. */
|
142
|
async function get_db() {
|
143
|
if (db)
|
144
|
return db;
|
145
|
|
146
|
let resolve, reject;
|
147
|
const waiter = new Promise((...cbs) => [resolve, reject] = cbs);
|
148
|
|
149
|
const request = indexedDB.open("haketilo", db_version_nr);
|
150
|
request.onsuccess = ev => resolve(ev.target.result);
|
151
|
request.onerror = ev => reject("db error: " + ev.target.errorCode);
|
152
|
request.onupgradeneeded = ev => perform_upgrade(ev).then(resolve, reject);
|
153
|
|
154
|
const opened_db = await waiter;
|
155
|
|
156
|
if (db)
|
157
|
opened_db.close();
|
158
|
else
|
159
|
db = opened_db;
|
160
|
|
161
|
return db;
|
162
|
}
|
163
|
#EXPORT get_db AS get
|
164
|
|
165
|
/* Helper function used by make_context(). */
|
166
|
function reject_discard(context)
|
167
|
{
|
168
|
broadcast.discard(context.sender);
|
169
|
broadcast.close(context.sender);
|
170
|
context.reject();
|
171
|
}
|
172
|
|
173
|
/* Helper function used by make_context(). */
|
174
|
function resolve_flush(context)
|
175
|
{
|
176
|
broadcast.close(context.sender);
|
177
|
context.resolve();
|
178
|
}
|
179
|
|
180
|
/* Helper function used by start_items_transaction() and get_db(). */
|
181
|
function make_context(transaction, files)
|
182
|
{
|
183
|
const sender = broadcast.sender_connection();
|
184
|
|
185
|
files = files || {};
|
186
|
let resolve, reject;
|
187
|
const result = new Promise((...cbs) => [resolve, reject] = cbs);
|
188
|
|
189
|
const context =
|
190
|
{sender, transaction, resolve, reject, result, files, file_uses: {}};
|
191
|
|
192
|
transaction.oncomplete = () => resolve_flush(context);
|
193
|
transaction.onerror = () => reject_discard(context);
|
194
|
|
195
|
return context;
|
196
|
}
|
197
|
|
198
|
/*
|
199
|
* item_store_names should be an array with either string "mapping", string
|
200
|
* "resource" or both. files should be an object with an "sha256" property whose
|
201
|
* values will be yet another object with values being contents of files that
|
202
|
* are to be possibly saved in this transaction and keys being hexadecimal
|
203
|
* representations of files' SHA256 sums.
|
204
|
*
|
205
|
* Returned is a context object wrapping the transaction and handling the
|
206
|
* counting of file references in IndexedDB.
|
207
|
*/
|
208
|
async function start_items_transaction(item_store_names, files)
|
209
|
{
|
210
|
const db = await get_db();
|
211
|
const scope = [...item_store_names, "file", "file_uses"];
|
212
|
return make_context(db.transaction(scope, "readwrite"), files);
|
213
|
}
|
214
|
#EXPORT start_items_transaction
|
215
|
|
216
|
async function incr_file_uses(context, file_ref, by=1)
|
217
|
{
|
218
|
const sha256 = file_ref.sha256;
|
219
|
let uses = context.file_uses[sha256];
|
220
|
if (uses === undefined) {
|
221
|
uses = await idb_get(context.transaction, "file_uses", sha256);
|
222
|
if (uses)
|
223
|
[uses.new, uses.initial] = [false, uses.uses];
|
224
|
else
|
225
|
uses = {sha256, uses: 0, new: true, initial: 0};
|
226
|
|
227
|
context.file_uses[sha256] = uses;
|
228
|
}
|
229
|
|
230
|
uses.uses = uses.uses + by;
|
231
|
}
|
232
|
|
233
|
const decr_file_uses = (ctx, file_ref) => incr_file_uses(ctx, file_ref, -1);
|
234
|
|
235
|
async function finalize_transaction(context)
|
236
|
{
|
237
|
for (const uses of Object.values(context.file_uses)) {
|
238
|
if (uses.uses < 0)
|
239
|
console.error("Haketilo: internal error: uses < 0 for file " + uses.sha256);
|
240
|
|
241
|
const is_new = uses.new;
|
242
|
const initial_uses = uses.initial;
|
243
|
const sha256 = uses.sha256;
|
244
|
|
245
|
delete uses.new;
|
246
|
delete uses.initial;
|
247
|
|
248
|
if (uses.uses < 1) {
|
249
|
if (!is_new) {
|
250
|
idb_del(context.transaction, "file_uses", sha256);
|
251
|
idb_del(context.transaction, "file", sha256);
|
252
|
}
|
253
|
|
254
|
continue;
|
255
|
}
|
256
|
|
257
|
if (uses.uses === initial_uses)
|
258
|
continue;
|
259
|
|
260
|
idb_put(context.transaction, "file_uses", uses);
|
261
|
|
262
|
if (initial_uses > 0)
|
263
|
continue;
|
264
|
|
265
|
const file = context.files.sha256[sha256];
|
266
|
if (file === undefined) {
|
267
|
context.transaction.abort();
|
268
|
throw "file not present: " + sha256;
|
269
|
}
|
270
|
|
271
|
idb_put(context.transaction, "file", {sha256, contents: file});
|
272
|
}
|
273
|
|
274
|
return context.result;
|
275
|
}
|
276
|
#EXPORT finalize_transaction
|
277
|
|
278
|
/*
|
279
|
* How a sample data argument to the function below might look like:
|
280
|
*
|
281
|
* data = {
|
282
|
* resource: {
|
283
|
* "resource1": {
|
284
|
* "1": {
|
285
|
* // some stuff
|
286
|
* },
|
287
|
* "1.1": {
|
288
|
* // some stuff
|
289
|
* }
|
290
|
* },
|
291
|
* "resource2": {
|
292
|
* "0.4.3": {
|
293
|
* // some stuff
|
294
|
* }
|
295
|
* },
|
296
|
* },
|
297
|
* mapping: {
|
298
|
* "mapping1": {
|
299
|
* "2": {
|
300
|
* // some stuff
|
301
|
* }
|
302
|
* },
|
303
|
* "mapping2": {
|
304
|
* "0.1": {
|
305
|
* // some stuff
|
306
|
* }
|
307
|
* },
|
308
|
* },
|
309
|
* file: {
|
310
|
* sha256: {
|
311
|
* "f9444510dc7403e41049deb133f6892aa6a63c05591b2b59e4ee5b234d7bbd99": "console.log(\"hello\");\n",
|
312
|
* "b857cd521cc82fff30f0d316deba38b980d66db29a5388eb6004579cf743c6fd": "console.log(\"bye\");"
|
313
|
* }
|
314
|
* }
|
315
|
* }
|
316
|
*/
|
317
|
async function save_items(data)
|
318
|
{
|
319
|
const item_store_names = ["resource", "mapping"];
|
320
|
if ("repo" in data)
|
321
|
item_store_names.push("repo");
|
322
|
|
323
|
const context = await start_items_transaction(item_store_names, data.file);
|
324
|
|
325
|
return _save_items(data.resource, data.mapping, data.repo || [], context);
|
326
|
}
|
327
|
#EXPORT save_items
|
328
|
|
329
|
async function _save_items(resources, mappings, repos, context)
|
330
|
{
|
331
|
resources = Object.values(resources || {}).map(entities.get_newest);
|
332
|
mappings = Object.values(mappings || {}).map(entities.get_newest);
|
333
|
|
334
|
for (const item of resources.concat(mappings))
|
335
|
await save_item(item, context);
|
336
|
|
337
|
for (const repo_url of repos) {
|
338
|
broadcast.prepare(context.sender, "idb_changes_repo", repo_url);
|
339
|
await idb_put(context.transaction, "repo", {url: repo_url});
|
340
|
}
|
341
|
|
342
|
await finalize_transaction(context);
|
343
|
}
|
344
|
|
345
|
/*
|
346
|
* Save given definition of a resource/mapping to IndexedDB. If the definition
|
347
|
* (passed as `item`) references files that are not already present in
|
348
|
* IndexedDB, those files should be provided as values of the `files' object
|
349
|
* used to create the transaction context.
|
350
|
*
|
351
|
* context should be one returned from start_items_transaction() and should be
|
352
|
* later passed to finalize_transaction() so that files depended on are added to
|
353
|
* IndexedDB and files that are no longer depended on after this operation are
|
354
|
* removed from IndexedDB.
|
355
|
*/
|
356
|
async function save_item(item, context)
|
357
|
{
|
358
|
for (const file_ref of entities.get_files(item))
|
359
|
await incr_file_uses(context, file_ref);
|
360
|
|
361
|
broadcast.prepare(context.sender, `idb_changes_${item.type}`,
|
362
|
item.identifier);
|
363
|
await _remove_item(item.type, item.identifier, context, false);
|
364
|
await idb_put(context.transaction, item.type, item);
|
365
|
}
|
366
|
#EXPORT save_item
|
367
|
|
368
|
/* Helper function used by remove_item() and save_item(). */
|
369
|
async function _remove_item(store_name, identifier, context)
|
370
|
{
|
371
|
const item = await idb_get(context.transaction, store_name, identifier);
|
372
|
if (item !== undefined) {
|
373
|
for (const file_ref of entities.get_files(item))
|
374
|
await decr_file_uses(context, file_ref);
|
375
|
}
|
376
|
}
|
377
|
|
378
|
/*
|
379
|
* Remove definition of a resource/mapping from IndexedDB.
|
380
|
*
|
381
|
* context should be one returned from start_items_transaction() and should be
|
382
|
* later passed to finalize_transaction() so that files depended on are added to
|
383
|
* IndexedDB and files that are no longer depended on after this operation are
|
384
|
* removed from IndexedDB.
|
385
|
*/
|
386
|
async function remove_item(store_name, identifier, context)
|
387
|
{
|
388
|
broadcast.prepare(context.sender, `idb_changes_${store_name}`, identifier);
|
389
|
await _remove_item(store_name, identifier, context);
|
390
|
await idb_del(context.transaction, store_name, identifier);
|
391
|
}
|
392
|
|
393
|
const remove_resource = (id, ctx) => remove_item("resource", id, ctx);
|
394
|
#EXPORT remove_resource
|
395
|
|
396
|
const remove_mapping = (id, ctx) => remove_item("mapping", id, ctx);
|
397
|
#EXPORT remove_mapping
|
398
|
|
399
|
/* Function to retrieve all items from a given store. */
|
400
|
async function get_all(store_name)
|
401
|
{
|
402
|
const transaction = (await get_db()).transaction([store_name]);
|
403
|
const all_req = transaction.objectStore(store_name).getAll();
|
404
|
|
405
|
return (await wait_request(all_req)).target.result;
|
406
|
}
|
407
|
#EXPORT get_all
|
408
|
|
409
|
/*
|
410
|
* A simplified kind of transaction for modifying stores without special
|
411
|
* inter-store integrity constraints ("setting", "blocking", "repo").
|
412
|
*/
|
413
|
async function start_simple_transaction(store_name)
|
414
|
{
|
415
|
const db = await get_db();
|
416
|
return make_context(db.transaction(store_name, "readwrite"), {});
|
417
|
}
|
418
|
|
419
|
/* Functions to access the "setting" store. */
|
420
|
async function set_setting(name, value)
|
421
|
{
|
422
|
const context = await start_simple_transaction("setting");
|
423
|
broadcast.prepare(context.sender, "idb_changes_setting", name);
|
424
|
await idb_put(context.transaction, "setting", {name, value});
|
425
|
return finalize_transaction(context);
|
426
|
}
|
427
|
#EXPORT set_setting
|
428
|
|
429
|
async function get_setting(name)
|
430
|
{
|
431
|
const transaction = (await get_db()).transaction("setting");
|
432
|
return ((await idb_get(transaction, "setting", name)) || {}).value;
|
433
|
}
|
434
|
#EXPORT get_setting
|
435
|
|
436
|
/* Functions to access the "blocking" store. */
|
437
|
async function set_allowed(pattern, allow=true)
|
438
|
{
|
439
|
const context = await start_simple_transaction("blocking");
|
440
|
broadcast.prepare(context.sender, "idb_changes_blocking", pattern);
|
441
|
if (allow === null)
|
442
|
await idb_del(context.transaction, "blocking", pattern);
|
443
|
else
|
444
|
await idb_put(context.transaction, "blocking", {pattern, allow});
|
445
|
return finalize_transaction(context);
|
446
|
}
|
447
|
#EXPORT set_allowed
|
448
|
|
449
|
const set_disallowed = pattern => set_allowed(pattern, false);
|
450
|
#EXPORT set_disallowed
|
451
|
|
452
|
const set_default_allowing = pattern => set_allowed(pattern, null);
|
453
|
#EXPORT set_default_allowing
|
454
|
|
455
|
async function get_allowing(pattern)
|
456
|
{
|
457
|
const transaction = (await get_db()).transaction("blocking");
|
458
|
return ((await idb_get(transaction, "blocking", pattern)) || {}).allow;
|
459
|
}
|
460
|
#EXPORT get_allowing
|
461
|
|
462
|
/* Functions to access the "repo" store. */
|
463
|
async function set_repo(url, remove=false)
|
464
|
{
|
465
|
const context = await start_simple_transaction("repo");
|
466
|
broadcast.prepare(context.sender, "idb_changes_repo", url);
|
467
|
if (remove)
|
468
|
await idb_del(context.transaction, "repo", url);
|
469
|
else
|
470
|
await idb_put(context.transaction, "repo", {url});
|
471
|
return finalize_transaction(context);
|
472
|
}
|
473
|
#EXPORT set_repo
|
474
|
|
475
|
const del_repo = url => set_repo(url, true);
|
476
|
#EXPORT del_repo
|
477
|
|
478
|
const get_repos = () => get_all("repo").then(list => list.map(obj => obj.url));
|
479
|
#EXPORT get_repos
|
480
|
|
481
|
/* Callback used when listening to broadcasts while tracking db changes. */
|
482
|
async function track_change(tracking, key)
|
483
|
{
|
484
|
const transaction = (await get_db()).transaction([tracking.store_name]);
|
485
|
const new_val = await idb_get(transaction, tracking.store_name, key);
|
486
|
|
487
|
tracking.onchange({key, new_val});
|
488
|
}
|
489
|
|
490
|
/*
|
491
|
* Monitor changes to `store_name` IndexedDB object store.
|
492
|
*
|
493
|
* `store_name` should be either "resource", "mapping", "setting", "blocking"
|
494
|
* or "repo".
|
495
|
*
|
496
|
* `onchange` should be a callback that will be called when an item is added,
|
497
|
* modified or removed from the store. The callback will be passed an object
|
498
|
* representing the change as its first argument. This object will have the
|
499
|
* form:
|
500
|
* {
|
501
|
* key: "the identifier of modified resource/mapping or settings key",
|
502
|
* new_val: undefined // `undefined` if item removed, item object otherwise
|
503
|
* }
|
504
|
*
|
505
|
* Returns a [tracking, all_current_items] array where `tracking` is an object
|
506
|
* that can be later passed to untrack() to stop tracking changes and
|
507
|
* `all_current_items` is an array of items currently present in the object
|
508
|
* store.
|
509
|
*
|
510
|
* It is possible that `onchange` gets spuriously fired even when an item is not
|
511
|
* actually modified or that it only gets called once after multiple quick
|
512
|
* changes to an item.
|
513
|
*/
|
514
|
async function start_tracking(store_name, onchange)
|
515
|
{
|
516
|
const tracking = {store_name, onchange};
|
517
|
tracking.listener =
|
518
|
broadcast.listener_connection(msg => track_change(tracking, msg[1]));
|
519
|
broadcast.subscribe(tracking.listener, `idb_changes_${store_name}`);
|
520
|
|
521
|
return [tracking, await get_all(store_name)];
|
522
|
}
|
523
|
|
524
|
const track = {};
|
525
|
const trackable = ["resource", "mapping", "setting", "blocking", "repo"];
|
526
|
for (const store_name of trackable)
|
527
|
track[store_name] = onchange => start_tracking(store_name, onchange);
|
528
|
#EXPORT track
|
529
|
|
530
|
const untrack = tracking => broadcast.close(tracking.listener);
|
531
|
#EXPORT untrack
|