Project

General

Profile

Download (15.9 KB) Statistics
| Branch: | Tag: | Revision:

haketilo / common / indexeddb.js @ 5ed09841

1
/**
2
 * This file is part of Haketilo.
3
 *
4
 * Function: Facilitate use of IndexedDB within Haketilo.
5
 *
6
 * Copyright (C) 2021, 2022 Wojtek Kosior <koszko@koszko.org>
7
 *
8
 * This program is free software: you can redistribute it and/or modify
9
 * it under the terms of the GNU General Public License as published by
10
 * the Free Software Foundation, either version 3 of the License, or
11
 * (at your option) any later version.
12
 *
13
 * This program is distributed in the hope that it will be useful,
14
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
 * GNU General Public License for more details.
17
 *
18
 * As additional permission under GNU GPL version 3 section 7, you
19
 * may distribute forms of that code without the copy of the GNU
20
 * GPL normally required by section 4, provided you include this
21
 * license notice and, in case of non-source distribution, a URL
22
 * through which recipients can access the Corresponding Source.
23
 * If you modify file(s) with this exception, you may extend this
24
 * exception to your version of the file(s), but you are not
25
 * obligated to do so. If you do not wish to do so, delete this
26
 * exception statement from your version.
27
 *
28
 * As a special exception to the GPL, any HTML file which merely
29
 * makes function calls to this code, and for that purpose
30
 * includes it by reference shall be deemed a separate work for
31
 * copyright law purposes. If you modify this code, you may extend
32
 * this exception to your version of the code, but you are not
33
 * obligated to do so. If you do not wish to do so, delete this
34
 * exception statement from your version.
35
 *
36
 * You should have received a copy of the GNU General Public License
37
 * along with this program.  If not, see <https://www.gnu.org/licenses/>.
38
 *
39
 * I, Wojtek Kosior, thereby promise not to sue for violation of this file's
40
 * license. Although I request that you do not make use of this code in a
41
 * proprietary program, I am not going to enforce this in court.
42
 */
43

    
44
#IMPORT common/entities.js
45
#IMPORT common/broadcast.js
46

    
47
let initial_data = (
48
#IF UNIT_TEST
49
    {}
50
#ELSE
51
#INCLUDE default_settings.json
52
#ENDIF
53
);
54

    
55
/* Update when changes are made to database schema. Must have 3 elements */
56
const db_version = [1, 0, 0];
57

    
58
const nr_reductor = ([i, s], num) => [i - 1, s + num * 1024 ** i];
59
const version_nr = ver => ver.slice(0, 3).reduce(nr_reductor, [2, 0])[1];
60

    
61
const stores = 	[
62
    ["file",      {keyPath: "sha256"}],
63
    ["file_uses", {keyPath: "sha256"}],
64
    ["resource",  {keyPath: "identifier"}],
65
    ["mapping",   {keyPath: "identifier"}],
66
    ["setting",   {keyPath: "name"}],
67
    ["blocking",  {keyPath: "pattern"}],
68
    ["repo",      {keyPath: "url"}]
69
];
70

    
71
let db = null;
72

    
73
/* Generate a Promise that resolves when an IndexedDB request succeeds. */
74
async function wait_request(idb_request)
75
{
76
    let resolve, reject;
77
    const waiter = new Promise((...cbs) => [resolve, reject] = cbs);
78
    [idb_request.onsuccess, idb_request.onerror] = [resolve, reject];
79
    return waiter;
80
}
81

    
82
/* asynchronous wrapper for IDBObjectStore's get() method. */
83
async function idb_get(transaction, store_name, key)
84
{
85
    const req = transaction.objectStore(store_name).get(key);
86
    return (await wait_request(req)).target.result;
87
}
88
#EXPORT idb_get
89

    
90
/* asynchronous wrapper for IDBObjectStore's put() method. */
91
async function idb_put(transaction, store_name, object)
92
{
93
    return wait_request(transaction.objectStore(store_name).put(object));
94
}
95

    
96
/* asynchronous wrapper for IDBObjectStore's delete() method. */
97
async function idb_del(transaction, store_name, key)
98
{
99
    return wait_request(transaction.objectStore(store_name).delete(key));
100
}
101

    
102
async function perform_upgrade(event) {
103
    const opened_db = event.target.result;
104

    
105
    /* When we move to a new database schema, we will add upgrade logic here. */
106
    if (event.oldVersion > 0)
107
	throw "bad db version: " + event.oldVersion;
108

    
109
    let store;
110
    for (const [store_name, key_mode] of stores)
111
	store = opened_db.createObjectStore(store_name, key_mode);
112

    
113
    const ctx = make_context(store.transaction, initial_data.file);
114
    await _save_items(initial_data.resource, initial_data.mapping,
115
		      initial_data.repo || [], ctx);
116

    
117
    return opened_db;
118
}
119

    
120
/* Open haketilo database, asynchronously return an IDBDatabase object. */
121
async function get_db() {
122
    if (db)
123
	return db;
124

    
125
    let resolve, reject;
126
    const waiter = new Promise((...cbs) => [resolve, reject] = cbs);
127

    
128
    const request = indexedDB.open("haketilo", version_nr(db_version));
129
    request.onsuccess       = ev => resolve(ev.target.result);
130
    request.onerror         = ev => reject("db error: " + ev.target.errorCode);
131
    request.onupgradeneeded = ev => perform_upgrade(ev).then(resolve, reject);
132

    
133
    const opened_db = await waiter;
134

    
135
    if (db)
136
	opened_db.close();
137
    else
138
	db = opened_db;
139

    
140
    return db;
141
}
142
#EXPORT  get_db  AS get
143

    
144
/* Helper function used by make_context(). */
145
function reject_discard(context)
146
{
147
    broadcast.discard(context.sender);
148
    broadcast.close(context.sender);
149
    context.reject();
150
}
151

    
152
/* Helper function used by make_context(). */
153
function resolve_flush(context)
154
{
155
    broadcast.close(context.sender);
156
    context.resolve();
157
}
158

    
159
/* Helper function used by start_items_transaction() and get_db(). */
160
function make_context(transaction, files)
161
{
162
    const sender = broadcast.sender_connection();
163

    
164
    files = files || {};
165
    let resolve, reject;
166
    const result = new Promise((...cbs) => [resolve, reject] = cbs);
167

    
168
    const context =
169
	  {sender, transaction, resolve, reject, result, files, file_uses: {}};
170

    
171
    transaction.oncomplete = () => resolve_flush(context);
172
    transaction.onerror = () => reject_discard(context);
173

    
174
    return context;
175
}
176

    
177
/*
178
 * item_store_names should be an array with either string "mapping", string
179
 * "resource" or both. files should be an object with an "sha256" property whose
180
 * values will be yet another object with values being contents of files that
181
 * are to be possibly saved in this transaction and keys being hexadecimal
182
 * representations of files' SHA256 sums.
183
 *
184
 * Returned is a context object wrapping the transaction and handling the
185
 * counting of file references in IndexedDB.
186
 */
187
async function start_items_transaction(item_store_names, files)
188
{
189
    const db = await get_db();
190
    const scope = [...item_store_names, "file", "file_uses"];
191
    return make_context(db.transaction(scope, "readwrite"), files);
192
}
193
#EXPORT start_items_transaction
194

    
195
async function incr_file_uses(context, file_ref, by=1)
196
{
197
    const sha256 = file_ref.sha256;
198
    let uses = context.file_uses[sha256];
199
    if (uses === undefined) {
200
	uses = await idb_get(context.transaction, "file_uses", sha256);
201
	if (uses)
202
	    [uses.new, uses.initial] = [false, uses.uses];
203
	else
204
	    uses = {sha256, uses: 0, new: true, initial: 0};
205

    
206
	context.file_uses[sha256] = uses;
207
    }
208

    
209
    uses.uses = uses.uses + by;
210
}
211

    
212
const decr_file_uses = (ctx, file_ref) => incr_file_uses(ctx, file_ref, -1);
213

    
214
async function finalize_transaction(context)
215
{
216
    for (const uses of Object.values(context.file_uses)) {
217
	if (uses.uses < 0)
218
	    console.error("internal error: uses < 0 for file " + uses.sha256);
219

    
220
	const is_new       = uses.new;
221
	const initial_uses = uses.initial;
222
	const sha256       = uses.sha256;
223

    
224
	delete uses.new;
225
	delete uses.initial;
226

    
227
	if (uses.uses < 1) {
228
	    if (!is_new) {
229
		idb_del(context.transaction, "file_uses", sha256);
230
		idb_del(context.transaction, "file",      sha256);
231
	    }
232

    
233
	    continue;
234
	}
235

    
236
	if (uses.uses === initial_uses)
237
	    continue;
238

    
239
	idb_put(context.transaction, "file_uses", uses);
240

    
241
	if (initial_uses > 0)
242
	    continue;
243

    
244
	const file = context.files.sha256[sha256];
245
	if (file === undefined) {
246
	    context.transaction.abort();
247
	    throw "file not present: " + sha256;
248
	}
249

    
250
	idb_put(context.transaction, "file", {sha256, contents: file});
251
    }
252

    
253
    return context.result;
254
}
255
#EXPORT finalize_transaction
256

    
257
/*
258
 * How a sample data argument to the function below might look like:
259
 *
260
 * data = {
261
 *     resource: {
262
 *         "resource1": {
263
 *             "1": {
264
 *                 // some stuff
265
 *             },
266
 *             "1.1": {
267
 *                 // some stuff
268
 *             }
269
 *         },
270
 *         "resource2": {
271
 *             "0.4.3": {
272
 *                 // some stuff
273
 *             }
274
 *         },
275
 *     },
276
 *     mapping: {
277
 *         "mapping1": {
278
 *             "2": {
279
 *                 // some stuff
280
 *             }
281
 *         },
282
 *         "mapping2": {
283
 *             "0.1": {
284
 *                 // some stuff
285
 *             }
286
 *         },
287
 *     },
288
 *     file: {
289
 *         sha256: {
290
 *             "f9444510dc7403e41049deb133f6892aa6a63c05591b2b59e4ee5b234d7bbd99": "console.log(\"hello\");\n",
291
 *             "b857cd521cc82fff30f0d316deba38b980d66db29a5388eb6004579cf743c6fd": "console.log(\"bye\");"
292
 *         }
293
 *     }
294
 * }
295
 */
296
async function save_items(data)
297
{
298
    const item_store_names = ["resource", "mapping"];
299
    if ("repo" in data)
300
	item_store_names.push("repo");
301

    
302
    const context = await start_items_transaction(item_store_names, data.file);
303

    
304
    return _save_items(data.resource, data.mapping, data.repo || [], context);
305
}
306
#EXPORT save_items
307

    
308
async function _save_items(resources, mappings, repos, context)
309
{
310
    resources = Object.values(resources || {}).map(entities.get_newest);
311
    mappings  = Object.values(mappings  || {}).map(entities.get_newest);
312

    
313
    for (const item of resources.concat(mappings))
314
	await save_item(item, context);
315

    
316
    for (const repo_url of repos) {
317
	broadcast.prepare(context.sender, "idb_changes_repo", repo_url);
318
	await idb_put(context.transaction, "repo", {url: repo_url});
319
    }
320

    
321
    await finalize_transaction(context);
322
}
323

    
324
/*
325
 * Save given definition of a resource/mapping to IndexedDB. If the definition
326
 * (passed as `item`) references files that are not already present in
327
 * IndexedDB, those files should be provided as values of the `files' object
328
 * used to create the transaction context.
329
 *
330
 * context should be one returned from start_items_transaction() and should be
331
 * later passed to finalize_transaction() so that files depended on are added to
332
 * IndexedDB and files that are no longer depended on after this operation are
333
 * removed from IndexedDB.
334
 */
335
async function save_item(item, context)
336
{
337
    for (const file_ref of entities.get_files(item))
338
	await incr_file_uses(context, file_ref);
339

    
340
    broadcast.prepare(context.sender, `idb_changes_${item.type}`,
341
		      item.identifier);
342
    await _remove_item(item.type, item.identifier, context, false);
343
    await idb_put(context.transaction, item.type, item);
344
}
345
#EXPORT save_item
346

    
347
/* Helper function used by remove_item() and save_item(). */
348
async function _remove_item(store_name, identifier, context)
349
{
350
    const item = await idb_get(context.transaction, store_name, identifier);
351
    if (item !== undefined) {
352
	for (const file_ref of entities.get_files(item))
353
	    await decr_file_uses(context, file_ref);
354
    }
355
}
356

    
357
/*
358
 * Remove definition of a resource/mapping from IndexedDB.
359
 *
360
 * context should be one returned from start_items_transaction() and should be
361
 * later passed to finalize_transaction() so that files depended on are added to
362
 * IndexedDB and files that are no longer depended on after this operation are
363
 * removed from IndexedDB.
364
 */
365
async function remove_item(store_name, identifier, context)
366
{
367
    broadcast.prepare(context.sender, `idb_changes_${store_name}`, identifier);
368
    await _remove_item(store_name, identifier, context);
369
    await idb_del(context.transaction, store_name, identifier);
370
}
371

    
372
const remove_resource = (id, ctx) => remove_item("resource", id, ctx);
373
#EXPORT remove_resource
374

    
375
const remove_mapping = (id, ctx) => remove_item("mapping",  id, ctx);
376
#EXPORT remove_mapping
377

    
378
/* Function to retrieve all items from a given store. */
379
async function get_all(store_name)
380
{
381
    const transaction = (await get_db()).transaction([store_name]);
382
    const all_req = transaction.objectStore(store_name).getAll();
383

    
384
    return (await wait_request(all_req)).target.result;
385
}
386
#EXPORT get_all
387

    
388
/*
389
 * A simplified kind of transaction for modifying stores without special
390
 * inter-store integrity constraints ("setting", "blocking", "repo").
391
 */
392
async function start_simple_transaction(store_name)
393
{
394
    const db = await get_db();
395
    return make_context(db.transaction(store_name, "readwrite"), {});
396
}
397

    
398
/* Functions to access the "setting" store. */
399
async function set_setting(name, value)
400
{
401
    const context = await start_simple_transaction("setting");
402
    broadcast.prepare(context.sender, "idb_changes_setting", name);
403
    await idb_put(context.transaction, "setting", {name, value});
404
    return finalize_transaction(context);
405
}
406
#EXPORT set_setting
407

    
408
async function get_setting(name)
409
{
410
    const transaction = (await get_db()).transaction("setting");
411
    return ((await idb_get(transaction, "setting", name)) || {}).value;
412
}
413
#EXPORT get_setting
414

    
415
/* Functions to access the "blocking" store. */
416
async function set_allowed(pattern, allow=true)
417
{
418
    const context = await start_simple_transaction("blocking");
419
    broadcast.prepare(context.sender, "idb_changes_blocking", pattern);
420
    if (allow === null)
421
	await idb_del(context.transaction, "blocking", pattern);
422
    else
423
	await idb_put(context.transaction, "blocking", {pattern, allow});
424
    return finalize_transaction(context);
425
}
426
#EXPORT set_allowed
427

    
428
const set_disallowed = pattern => set_allowed(pattern, false);
429
#EXPORT set_disallowed
430

    
431
const set_default_allowing = pattern => set_allowed(pattern, null);
432
#EXPORT set_default_allowing
433

    
434
async function get_allowing(pattern)
435
{
436
    const transaction = (await get_db()).transaction("blocking");
437
    return ((await idb_get(transaction, "blocking", pattern)) || {}).allow;
438
}
439
#EXPORT get_allowing
440

    
441
/* Functions to access the "repo" store. */
442
async function set_repo(url, remove=false)
443
{
444
    const context = await start_simple_transaction("repo");
445
    broadcast.prepare(context.sender, "idb_changes_repo", url);
446
    if (remove)
447
	await idb_del(context.transaction, "repo", url);
448
    else
449
	await idb_put(context.transaction, "repo", {url});
450
    return finalize_transaction(context);
451
}
452
#EXPORT set_repo
453

    
454
const del_repo = url => set_repo(url, true);
455
#EXPORT del_repo
456

    
457
const get_repos = () => get_all("repo").then(list => list.map(obj => obj.url));
458
#EXPORT get_repos
459

    
460
/* Callback used when listening to broadcasts while tracking db changes. */
461
async function track_change(tracking, key)
462
{
463
    const transaction = (await get_db()).transaction([tracking.store_name]);
464
    const new_val = await idb_get(transaction, tracking.store_name, key);
465

    
466
    tracking.onchange({key, new_val});
467
}
468

    
469
/*
470
 * Monitor changes to `store_name` IndexedDB object store.
471
 *
472
 * `store_name` should be either "resource", "mapping", "setting", "blocking"
473
 * or "repo".
474
 *
475
 * `onchange` should be a callback that will be called when an item is added,
476
 * modified or removed from the store. The callback will be passed an object
477
 * representing the change as its first argument. This object will have the
478
 * form:
479
 * {
480
 *     key: "the identifier of modified resource/mapping or settings key",
481
 *     new_val: undefined // `undefined` if item removed, item object otherwise
482
 * }
483
 *
484
 * Returns a [tracking, all_current_items] array where `tracking` is an object
485
 * that can be later passed to untrack() to stop tracking changes and
486
 * `all_current_items` is an array of items currently present in the object
487
 * store.
488
 *
489
 * It is possible that `onchange` gets spuriously fired even when an item is not
490
 * actually modified or that it only gets called once after multiple quick
491
 * changes to an item.
492
 */
493
async function start_tracking(store_name, onchange)
494
{
495
    const tracking = {store_name, onchange};
496
    tracking.listener =
497
	broadcast.listener_connection(msg => track_change(tracking, msg[1]));
498
    broadcast.subscribe(tracking.listener, `idb_changes_${store_name}`);
499

    
500
    return [tracking, await get_all(store_name)];
501
}
502

    
503
const track = {};
504
const trackable = ["resource", "mapping", "setting", "blocking", "repo"];
505
for (const store_name of trackable)
506
    track[store_name] = onchange => start_tracking(store_name, onchange);
507
#EXPORT track
508

    
509
const untrack = tracking => broadcast.close(tracking.listener);
510
#EXPORT untrack
(4-4/10)