Skip to content

Commit

Permalink
update vendor/loadScene
Browse files Browse the repository at this point in the history
  • Loading branch information
dai-shi committed Jun 10, 2022
1 parent 1c6687d commit 7bdb18a
Show file tree
Hide file tree
Showing 3 changed files with 223 additions and 51 deletions.
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@
"module": "./dist/library.js",
"types": "./dist/library.d.ts",
"dependencies": {
"@excalidraw/excalidraw": "0.11.0"
"@excalidraw/excalidraw": "0.11.0",
"pako": "1.0.11"
},
"devDependencies": {
"@testing-library/jest-dom": "^5.16.2",
"@testing-library/react": "^12.1.3",
"@types/dom-mediacapture-record": "^1.0.11",
"@types/pako": "^1.0.3",
"@types/react": "^17.0.39",
"@types/react-dom": "^17.0.11",
"browser-fs-access": "^0.24.0",
Expand Down
260 changes: 210 additions & 50 deletions src/vendor/loadScene.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,94 @@
import { inflate } from "pako";
import { restore } from "@excalidraw/excalidraw";
import type {
ImportedDataState
} from "@excalidraw/excalidraw/types/data/types";
import type { ImportedDataState } from "@excalidraw/excalidraw/types/data/types";

const t = (s: string) => s;

const BACKEND_GET = "https://json.excalidraw.com/api/v1/";
// const BACKEND_GET = "https://json.excalidraw.com/api/v1/";
const BACKEND_V2_GET = "https://json.excalidraw.com/api/v2/";
const IV_LENGTH_BYTES = 12; // 96 bits
const ENCRYPTION_KEY_BITS = 128;
const CONCAT_BUFFERS_VERSION = 1;
const VERSION_DATAVIEW_BYTES = 4;
const NEXT_CHUNK_SIZE_DATAVIEW_BYTES = 4;
const DATA_VIEW_BITS_MAP = { 1: 8, 2: 16, 4: 32 } as const;

const getImportedKey = (key: string, usage: KeyUsage) =>
function dataView(buffer: Uint8Array, bytes: 1 | 2 | 4, offset: number): number;
function dataView(
buffer: Uint8Array,
bytes: 1 | 2 | 4,
offset: number,
value: number
): Uint8Array;
function dataView(
buffer: Uint8Array,
bytes: 1 | 2 | 4,
offset: number,
value?: number
): Uint8Array | number {
if (value != null) {
if (value > Math.pow(2, DATA_VIEW_BITS_MAP[bytes]) - 1) {
throw new Error(
`attempting to set value higher than the allocated bytes (value: ${value}, bytes: ${bytes})`
);
}
const method = `setUint${DATA_VIEW_BITS_MAP[bytes]}` as const;
new DataView(buffer.buffer)[method](offset, value);
return buffer;
}
const method = `getUint${DATA_VIEW_BITS_MAP[bytes]}` as const;
return new DataView(buffer.buffer)[method](offset);
}

const splitBuffers = (concatenatedBuffer: Uint8Array) => {
const buffers = [];

let cursor = 0;

// first chunk is the version
const version = dataView(
concatenatedBuffer,
NEXT_CHUNK_SIZE_DATAVIEW_BYTES,
cursor
);
// If version is outside of the supported versions, throw an error.
// This usually means the buffer wasn't encoded using this API, so we'd only
// waste compute.
if (version > CONCAT_BUFFERS_VERSION) {
throw new Error(`invalid version ${version}`);
}

cursor += VERSION_DATAVIEW_BYTES;

while (true) {
const chunkSize = dataView(
concatenatedBuffer,
NEXT_CHUNK_SIZE_DATAVIEW_BYTES,
cursor
);
cursor += NEXT_CHUNK_SIZE_DATAVIEW_BYTES;

buffers.push(concatenatedBuffer.slice(cursor, cursor + chunkSize));
cursor += chunkSize;
if (cursor >= concatenatedBuffer.byteLength) {
break;
}
}

return buffers;
};

type FileEncodingInfo = {
/* version 2 is the version we're shipping the initial image support with.
version 1 was a PR version that a lot of people were using anyway.
Thus, if there are issues we can check whether they're not using the
unoffic version */
version: 1 | 2;
compression: "pako@1" | null;
encryption: "AES-GCM" | null;
};

const getCryptoKey = (key: string, usage: KeyUsage) =>
window.crypto.subtle.importKey(
"jwk",
{
Expand All @@ -21,72 +100,153 @@ const getImportedKey = (key: string, usage: KeyUsage) =>
},
{
name: "AES-GCM",
length: 128,
length: ENCRYPTION_KEY_BITS,
},
false, // extractable
[usage],
[usage]
);

const decryptImported = async (
iv: ArrayBuffer | Uint8Array,
encrypted: ArrayBuffer,
privateKey: string,
const decryptData = async (
iv: Uint8Array,
encrypted: Uint8Array | ArrayBuffer,
privateKey: string
): Promise<ArrayBuffer> => {
const key = await getImportedKey(privateKey, "decrypt");
const key = await getCryptoKey(privateKey, "decrypt");
return window.crypto.subtle.decrypt(
{
name: "AES-GCM",
iv,
},
key,
encrypted,
encrypted
);
};

const _decryptAndDecompress = async (
iv: Uint8Array,
decryptedBuffer: Uint8Array,
decryptionKey: string,
isCompressed: boolean
) => {
decryptedBuffer = new Uint8Array(
await decryptData(iv, decryptedBuffer, decryptionKey)
);

if (isCompressed) {
return inflate(decryptedBuffer);
}

return decryptedBuffer;
};

const decompressData = async <T extends Record<string, any>>(
bufferView: Uint8Array,
options: { decryptionKey: string }
) => {
// first chunk is encoding metadata (ignored for now)
const [encodingMetadataBuffer, iv, buffer] = splitBuffers(bufferView);

const encodingMetadata: FileEncodingInfo = JSON.parse(
new TextDecoder().decode(encodingMetadataBuffer)
);

try {
const [contentsMetadataBuffer, contentsBuffer] = splitBuffers(
await _decryptAndDecompress(
iv,
buffer,
options.decryptionKey,
!!encodingMetadata.compression
)
);

const metadata = JSON.parse(
new TextDecoder().decode(contentsMetadataBuffer)
) as T;

return {
/** metadata source is always JSON so we can decode it here */
metadata,
/** data can be anything so the caller must decode it */
data: contentsBuffer,
};
} catch (error: any) {
console.error(
`Error during decompressing and decrypting the file.`,
encodingMetadata
);
throw error;
}
};

const legacy_decodeFromBackend = async ({
buffer,
decryptionKey,
}: {
buffer: ArrayBuffer;
decryptionKey: string;
}) => {
let decrypted: ArrayBuffer;

try {
// Buffer should contain both the IV (fixed length) and encrypted data
const iv = buffer.slice(0, IV_LENGTH_BYTES);
const encrypted = buffer.slice(IV_LENGTH_BYTES, buffer.byteLength);
decrypted = await decryptData(new Uint8Array(iv), encrypted, decryptionKey);
} catch (error: any) {
// Fixed IV (old format, backward compatibility)
const fixedIv = new Uint8Array(IV_LENGTH_BYTES);
decrypted = await decryptData(fixedIv, buffer, decryptionKey);
}

// We need to convert the decrypted array buffer to a string
const string = new window.TextDecoder("utf-8").decode(
new Uint8Array(decrypted)
);
const data: ImportedDataState = JSON.parse(string);

return {
elements: data.elements || null,
appState: data.appState || null,
};
};

const importFromBackend = async (
id: string | null,
privateKey?: string | null,
id: string,
decryptionKey: string
): Promise<ImportedDataState> => {
try {
const response = await fetch(
privateKey ? `${BACKEND_V2_GET}${id}` : `${BACKEND_GET}${id}.json`,
);
const response = await fetch(`${BACKEND_V2_GET}${id}`);

if (!response.ok) {
window.alert(t("alerts.importBackendFailed"));
return {};
}
let data: ImportedDataState;
if (privateKey) {
const buffer = await response.arrayBuffer();

let decrypted: ArrayBuffer;
try {
// Buffer should contain both the IV (fixed length) and encrypted data
const iv = buffer.slice(0, IV_LENGTH_BYTES);
const encrypted = buffer.slice(IV_LENGTH_BYTES, buffer.byteLength);
decrypted = await decryptImported(iv, encrypted, privateKey);
} catch (error) {
// Fixed IV (old format, backward compatibility)
const fixedIv = new Uint8Array(IV_LENGTH_BYTES);
decrypted = await decryptImported(fixedIv, buffer, privateKey);
}

// We need to convert the decrypted array buffer to a string
const string = new window.TextDecoder("utf-8").decode(
new Uint8Array(decrypted),
const buffer = await response.arrayBuffer();

try {
const { data: decodedBuffer } = await decompressData(
new Uint8Array(buffer),
{
decryptionKey,
}
);
const data: ImportedDataState = JSON.parse(
new TextDecoder().decode(decodedBuffer)
);
data = JSON.parse(string);
} else {
// Legacy format
data = await response.json();
}

return {
elements: data.elements || null,
appState: data.appState || null,
};
} catch (error) {
return {
elements: data.elements || null,
appState: data.appState || null,
};
} catch (error: any) {
console.warn(
"error when decoding shareLink data using the new format:",
error
);
return legacy_decodeFromBackend({ buffer, decryptionKey });
}
} catch (error: any) {
window.alert(t("alerts.importBackendFailed"));
console.error(error);
return {};
Expand All @@ -99,16 +259,16 @@ export const loadScene = async (
// Supply local state even if importing from backend to ensure we restore
// localStorage user settings which we do not persist on server.
// Non-optional so we don't forget to pass it even if `undefined`.
localDataState: ImportedDataState | undefined | null,
localDataState: ImportedDataState | undefined | null
) => {
let data;
if (id != null) {
if (id != null && privateKey != null) {
// the private key is used to decrypt the content from the server, take
// extra care not to leak it
data = restore(
await importFromBackend(id, privateKey),
localDataState?.appState,
localDataState?.elements,
localDataState?.elements
);
} else {
data = restore(localDataState || null, null, null);
Expand Down
10 changes: 10 additions & 0 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1817,6 +1817,11 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.18.tgz#3b4fed5cfb58010e3a2be4b6e74615e4847f1074"
integrity sha512-eKj4f/BsN/qcculZiRSujogjvp5O/k4lOW5m35NopjZM/QwLOR075a8pJW5hD+Rtdm2DaCVPENS6KtSQnUD6BA==

"@types/pako@^1.0.3":
version "1.0.4"
resolved "https://registry.yarnpkg.com/@types/pako/-/pako-1.0.4.tgz#b4262aef92680a9331fcdb8420c69cf3dd98d3f3"
integrity sha512-Z+5bJSm28EXBSUJEgx29ioWeEEHUh6TiMkZHDhLwjc9wVFH+ressbkmX6waUZc5R3Gobn4Qu5llGxaoflZ+yhA==

"@types/parse-json@^4.0.0":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
Expand Down Expand Up @@ -6233,6 +6238,11 @@ p-try@^2.0.0:
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==

[email protected]:
version "1.0.11"
resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf"
integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==

param-case@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5"
Expand Down

0 comments on commit 7bdb18a

Please sign in to comment.