Revision 96068ada
Added by koszko almost 2 years ago
content/main.js | ||
---|---|---|
11 | 11 |
/* |
12 | 12 |
* IMPORTS_START |
13 | 13 |
* IMPORT handle_page_actions |
14 |
* IMPORT extract_signed |
|
15 |
* IMPORT sign_data |
|
16 | 14 |
* IMPORT gen_nonce |
17 | 15 |
* IMPORT is_privileged_url |
16 |
* IMPORT browser |
|
18 | 17 |
* IMPORT is_chrome |
19 | 18 |
* IMPORT is_mozilla |
20 | 19 |
* IMPORT start_activity_info_server |
21 | 20 |
* IMPORT make_csp_rule |
22 | 21 |
* IMPORT csp_header_regex |
22 |
* IMPORT report_settings |
|
23 | 23 |
* IMPORTS_END |
24 | 24 |
*/ |
25 | 25 |
|
... | ... | |
29 | 29 |
|
30 | 30 |
wait_loaded(document).then(() => document.content_loaded = true); |
31 | 31 |
|
32 |
function extract_cookie_policy(cookie, min_time) |
|
33 |
{ |
|
34 |
let best_result = {time: -1}; |
|
35 |
let policy = null; |
|
36 |
const extracted_signatures = []; |
|
37 |
|
|
38 |
for (const match of cookie.matchAll(/haketilo-(\w*)=([^;]*)/g)) { |
|
39 |
const new_result = extract_signed(...match.slice(1, 3)); |
|
40 |
if (new_result.fail) |
|
41 |
continue; |
|
42 |
|
|
43 |
extracted_signatures.push(match[1]); |
|
44 |
|
|
45 |
if (new_result.time < Math.max(min_time, best_result.time)) |
|
46 |
continue; |
|
47 |
|
|
48 |
/* This should succeed - it's our self-produced valid JSON. */ |
|
49 |
const new_policy = JSON.parse(decodeURIComponent(new_result.data)); |
|
50 |
if (new_policy.url !== document.URL) |
|
51 |
continue; |
|
52 |
|
|
53 |
best_result = new_result; |
|
54 |
policy = new_policy; |
|
55 |
} |
|
56 |
|
|
57 |
return [policy, extracted_signatures]; |
|
58 |
} |
|
59 |
|
|
60 |
function extract_url_policy(url, min_time) |
|
61 |
{ |
|
62 |
const [base_url, payload, anchor] = |
|
63 |
/^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4); |
|
64 |
|
|
65 |
const match = /^haketilo_([^_]+)_(.*)$/.exec(payload); |
|
66 |
if (!match) |
|
67 |
return [null, url]; |
|
68 |
|
|
69 |
const result = extract_signed(...match.slice(1, 3)); |
|
70 |
if (result.fail) |
|
71 |
return [null, url]; |
|
72 |
|
|
73 |
const original_url = base_url + anchor; |
|
74 |
const policy = result.time < min_time ? null : |
|
75 |
JSON.parse(decodeURIComponent(result.data)); |
|
76 |
|
|
77 |
return [policy.url === original_url ? policy : null, original_url]; |
|
78 |
} |
|
79 |
|
|
80 |
function employ_nonhttp_policy(policy) |
|
81 |
{ |
|
82 |
if (!policy.allow) |
|
83 |
return; |
|
84 |
|
|
85 |
policy.nonce = gen_nonce(); |
|
86 |
const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3); |
|
87 |
const encoded_policy = encodeURIComponent(JSON.stringify(policy)); |
|
88 |
const payload = "haketilo_" + |
|
89 |
sign_data(encoded_policy, new Date().getTime()).join("_"); |
|
90 |
const resulting_url = `${base_url}#${payload}${target}`; |
|
91 |
location.href = resulting_url; |
|
92 |
location.reload(); |
|
93 |
} |
|
94 |
|
|
95 | 32 |
/* |
96 | 33 |
* In the case of HTML documents: |
97 | 34 |
* 1. When injecting some payload we need to sanitize <meta> CSP tags before |
... | ... | |
306 | 243 |
start_data_urls_sanitizing(doc); |
307 | 244 |
} |
308 | 245 |
|
309 |
async function disable_service_workers() |
|
246 |
async function _disable_service_workers()
|
|
310 | 247 |
{ |
311 | 248 |
if (!navigator.serviceWorker) |
312 | 249 |
return; |
... | ... | |
315 | 252 |
if (registrations.length === 0) |
316 | 253 |
return; |
317 | 254 |
|
318 |
console.warn("Service Workers detected on this page! Unregistering and reloading"); |
|
255 |
console.warn("Service Workers detected on this page! Unregistering and reloading.");
|
|
319 | 256 |
|
320 | 257 |
try { |
321 | 258 |
await Promise.all(registrations.map(r => r.unregister())); |
... | ... | |
327 | 264 |
return new Promise(() => 0); |
328 | 265 |
} |
329 | 266 |
|
330 |
if (!is_privileged_url(document.URL)) { |
|
331 |
let policy_received_callback = () => undefined; |
|
332 |
let policy; |
|
333 |
|
|
334 |
/* Signature valid for half an hour. */ |
|
335 |
const min_time = new Date().getTime() - 1800 * 1000; |
|
336 |
|
|
337 |
if (/^https?:/.test(document.URL)) { |
|
338 |
let signatures; |
|
339 |
[policy, signatures] = extract_cookie_policy(document.cookie, min_time); |
|
340 |
for (const signature of signatures) |
|
341 |
document.cookie = `haketilo-${signature}=; Max-Age=-1;`; |
|
342 |
} else { |
|
343 |
const scheme = /^([^:]*)/.exec(document.URL)[1]; |
|
344 |
const known_scheme = ["file", "ftp"].includes(scheme); |
|
345 |
|
|
346 |
if (!known_scheme) |
|
347 |
console.warn(`Unknown url scheme: \`${scheme}'!`); |
|
348 |
|
|
349 |
let original_url; |
|
350 |
[policy, original_url] = extract_url_policy(document.URL, min_time); |
|
351 |
history.replaceState(null, "", original_url); |
|
352 |
|
|
353 |
if (known_scheme && !policy) |
|
354 |
policy_received_callback = employ_nonhttp_policy; |
|
267 |
/* |
|
268 |
* Trying to use servce workers APIs might result in exceptions, for example |
|
269 |
* when in a non-HTML document. Because of this, we wrap the function that does |
|
270 |
* the actual work in a try {} block. |
|
271 |
*/ |
|
272 |
async function disable_service_workers() |
|
273 |
{ |
|
274 |
try { |
|
275 |
await _disable_service_workers() |
|
276 |
} catch (e) { |
|
277 |
console.debug("Exception thrown during an attempt to detect and disable service workers.", e); |
|
355 | 278 |
} |
279 |
} |
|
356 | 280 |
|
357 |
if (!policy) { |
|
358 |
console.debug("Using fallback policy!"); |
|
359 |
policy = {allow: false, nonce: gen_nonce()}; |
|
281 |
function synchronously_get_policy(url) |
|
282 |
{ |
|
283 |
const encoded_url = encodeURIComponent(url); |
|
284 |
const request_url = `${browser.runtime.getURL("dummy")}?url=${encoded_url}`; |
|
285 |
|
|
286 |
try { |
|
287 |
var xhttp = new XMLHttpRequest(); |
|
288 |
xhttp.open("GET", request_url, false); |
|
289 |
xhttp.send(); |
|
290 |
} catch(e) { |
|
291 |
console.error("Failure to synchronously fetch policy for url.", e); |
|
292 |
return {allow: false}; |
|
360 | 293 |
} |
361 | 294 |
|
295 |
const policy = /^[^?]*\?settings=(.*)$/.exec(xhttp.responseURL)[1]; |
|
296 |
return JSON.parse(decodeURIComponent(policy)); |
|
297 |
} |
|
298 |
|
|
299 |
if (!is_privileged_url(document.URL)) { |
|
300 |
const policy = synchronously_get_policy(document.URL); |
|
301 |
|
|
362 | 302 |
if (!(document instanceof HTMLDocument)) |
363 |
policy.has_payload = false;
|
|
303 |
delete policy.payload;
|
|
364 | 304 |
|
365 | 305 |
console.debug("current policy", policy); |
366 | 306 |
|
307 |
report_settings(policy); |
|
308 |
|
|
309 |
policy.nonce = gen_nonce(); |
|
310 |
|
|
367 | 311 |
const doc_ready = Promise.all([ |
368 | 312 |
policy.allow ? Promise.resolve() : sanitize_document(document, policy), |
369 | 313 |
policy.allow ? Promise.resolve() : disable_service_workers(), |
370 | 314 |
wait_loaded(document) |
371 | 315 |
]); |
372 | 316 |
|
373 |
handle_page_actions(policy.nonce, policy_received_callback, doc_ready);
|
|
317 |
handle_page_actions(policy, doc_ready); |
|
374 | 318 |
|
375 | 319 |
start_activity_info_server(); |
376 | 320 |
} |
Also available in: Unified diff
replace cookies with synchronous XmlHttpRequest as policy smuggling method.
Note: this breaks Mozilla port of Haketilo. Synchronous XmlHttpRequest doesn't work as well there. This will be fixed with dynamically-registered content scripts later.