mirror of
https://github.com/bitwarden/browser
synced 2025-12-20 02:03:39 +00:00
move state provider extensions to tools folder (#9436)
This commit is contained in:
@@ -4,12 +4,12 @@ import { CryptoService } from "../../../platform/abstractions/crypto.service";
|
||||
import { EncryptService } from "../../../platform/abstractions/encrypt.service";
|
||||
import { SingleUserState, StateProvider } from "../../../platform/state";
|
||||
import { UserId } from "../../../types/guid";
|
||||
import { BufferedState } from "../../state/buffered-state";
|
||||
import { PaddedDataPacker } from "../../state/padded-data-packer";
|
||||
import { SecretState } from "../../state/secret-state";
|
||||
import { UserKeyEncryptor } from "../../state/user-key-encryptor";
|
||||
import { GeneratorHistoryService } from "../abstractions/generator-history.abstraction";
|
||||
import { GENERATOR_HISTORY, GENERATOR_HISTORY_BUFFER } from "../key-definitions";
|
||||
import { BufferedState } from "../state/buffered-state";
|
||||
import { PaddedDataPacker } from "../state/padded-data-packer";
|
||||
import { SecretState } from "../state/secret-state";
|
||||
import { UserKeyEncryptor } from "../state/user-key-encryptor";
|
||||
|
||||
import { GeneratedCredential } from "./generated-credential";
|
||||
import { LegacyPasswordHistoryDecryptor } from "./legacy-password-history-decryptor";
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
import { GENERATOR_DISK, UserKeyDefinition } from "../../platform/state";
|
||||
import { BufferedKeyDefinition } from "../state/buffered-key-definition";
|
||||
import { SecretClassifier } from "../state/secret-classifier";
|
||||
import { SecretKeyDefinition } from "../state/secret-key-definition";
|
||||
|
||||
import { GeneratedCredential } from "./history/generated-credential";
|
||||
import { LegacyPasswordHistoryDecryptor } from "./history/legacy-password-history-decryptor";
|
||||
@@ -8,9 +11,6 @@ import { GeneratorNavigation } from "./navigation/generator-navigation";
|
||||
import { PassphraseGenerationOptions } from "./passphrase/passphrase-generation-options";
|
||||
import { GeneratedPasswordHistory } from "./password/generated-password-history";
|
||||
import { PasswordGenerationOptions } from "./password/password-generation-options";
|
||||
import { BufferedKeyDefinition } from "./state/buffered-key-definition";
|
||||
import { SecretClassifier } from "./state/secret-classifier";
|
||||
import { SecretKeyDefinition } from "./state/secret-key-definition";
|
||||
import { CatchallGenerationOptions } from "./username/catchall-generator-options";
|
||||
import { EffUsernameGenerationOptions } from "./username/eff-username-generator-options";
|
||||
import {
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
import { GENERATOR_DISK, UserKeyDefinition } from "../../../platform/state";
|
||||
|
||||
import { BufferedKeyDefinition } from "./buffered-key-definition";
|
||||
|
||||
describe("BufferedKeyDefinition", () => {
|
||||
const deserializer = (jsonValue: number) => jsonValue + 1;
|
||||
|
||||
describe("toKeyDefinition", () => {
|
||||
it("should create a key definition", () => {
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", {
|
||||
deserializer,
|
||||
cleanupDelayMs: 5,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
const result = key.toKeyDefinition();
|
||||
|
||||
expect(result).toBeInstanceOf(UserKeyDefinition);
|
||||
expect(result.stateDefinition).toBe(GENERATOR_DISK);
|
||||
expect(result.key).toBe("test");
|
||||
expect(result.deserializer(1)).toEqual(2);
|
||||
expect(result.cleanupDelayMs).toEqual(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldOverwrite", () => {
|
||||
it("should call the shouldOverwrite function when its defined", async () => {
|
||||
const shouldOverwrite = jest.fn(() => true);
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", {
|
||||
deserializer,
|
||||
shouldOverwrite,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
const result = await key.shouldOverwrite(true);
|
||||
|
||||
expect(shouldOverwrite).toHaveBeenCalledWith(true);
|
||||
expect(result).toStrictEqual(true);
|
||||
});
|
||||
|
||||
it("should return true when shouldOverwrite is not defined and the input is truthy", async () => {
|
||||
const key = new BufferedKeyDefinition<number, number, number>(GENERATOR_DISK, "test", {
|
||||
deserializer,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
const result = await key.shouldOverwrite(1);
|
||||
|
||||
expect(result).toStrictEqual(true);
|
||||
});
|
||||
|
||||
it("should return false when shouldOverwrite is not defined and the input is falsy", async () => {
|
||||
const key = new BufferedKeyDefinition<number, number, number>(GENERATOR_DISK, "test", {
|
||||
deserializer,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
const result = await key.shouldOverwrite(0);
|
||||
|
||||
expect(result).toStrictEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("map", () => {
|
||||
it("should call the map function when its defined", async () => {
|
||||
const map = jest.fn((value: number) => Promise.resolve(`${value}`));
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", {
|
||||
deserializer,
|
||||
map,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
const result = await key.map(1, true);
|
||||
|
||||
expect(map).toHaveBeenCalledWith(1, true);
|
||||
expect(result).toStrictEqual("1");
|
||||
});
|
||||
|
||||
it("should fall back to an identity function when map is not defined", async () => {
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", { deserializer, clearOn: [] });
|
||||
|
||||
const result = await key.map(1, null);
|
||||
|
||||
expect(result).toStrictEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isValid", () => {
|
||||
it("should call the isValid function when its defined", async () => {
|
||||
const isValid = jest.fn(() => Promise.resolve(true));
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", {
|
||||
deserializer,
|
||||
isValid,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
const result = await key.isValid(1, true);
|
||||
|
||||
expect(isValid).toHaveBeenCalledWith(1, true);
|
||||
expect(result).toStrictEqual(true);
|
||||
});
|
||||
|
||||
it("should return true when isValid is not defined and the input is truthy", async () => {
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", { deserializer, clearOn: [] });
|
||||
|
||||
const result = await key.isValid(1, null);
|
||||
|
||||
expect(result).toStrictEqual(true);
|
||||
});
|
||||
|
||||
it("should return false when isValid is not defined and the input is falsy", async () => {
|
||||
const key = new BufferedKeyDefinition(GENERATOR_DISK, "test", { deserializer, clearOn: [] });
|
||||
|
||||
const result = await key.isValid(0, null);
|
||||
|
||||
expect(result).toStrictEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,104 +0,0 @@
|
||||
import { UserKeyDefinition, UserKeyDefinitionOptions } from "../../../platform/state";
|
||||
// eslint-disable-next-line -- `StateDefinition` used as an argument
|
||||
import { StateDefinition } from "../../../platform/state/state-definition";
|
||||
|
||||
/** A set of options for customizing the behavior of a {@link BufferedKeyDefinition}
|
||||
*/
|
||||
export type BufferedKeyDefinitionOptions<Input, Output, Dependency> =
|
||||
UserKeyDefinitionOptions<Input> & {
|
||||
/** Checks whether the input type can be converted to the output type.
|
||||
* @param input the data that is rolling over.
|
||||
* @returns `true` if the definition is valid, otherwise `false`. If this
|
||||
* function is not specified, any truthy input is valid.
|
||||
*
|
||||
* @remarks this is intended for cases where you're working with validated or
|
||||
* signed data. It should be used to prevent data from being "laundered" through
|
||||
* synchronized state.
|
||||
*/
|
||||
isValid?: (input: Input, dependency: Dependency) => Promise<boolean>;
|
||||
|
||||
/** Transforms the input data format to its output format.
|
||||
* @param input the data that is rolling over.
|
||||
* @returns the converted value. If this function is not specified, the value
|
||||
* is asserted as the output type.
|
||||
*
|
||||
* @remarks This is intended for converting between, say, a replication format
|
||||
* and a disk format or rotating encryption keys.
|
||||
*/
|
||||
map?: (input: Input, dependency: Dependency) => Promise<Output>;
|
||||
|
||||
/** Checks whether an overwrite should occur
|
||||
* @param dependency the latest value from the dependency observable provided
|
||||
* to the buffered state.
|
||||
* @returns `true` if a overwrite should occur, otherwise `false`. If this
|
||||
* function is not specified, overwrites occur when the dependency is truthy.
|
||||
*
|
||||
* @remarks This is intended for waiting to overwrite until a dependency becomes
|
||||
* available (e.g. an encryption key or a user confirmation).
|
||||
*/
|
||||
shouldOverwrite?: (dependency: Dependency) => boolean;
|
||||
};
|
||||
|
||||
/** Storage and mapping settings for data stored by a `BufferedState`.
|
||||
*/
|
||||
export class BufferedKeyDefinition<Input, Output = Input, Dependency = true> {
|
||||
/**
|
||||
* Defines a buffered state
|
||||
* @param stateDefinition The domain of the buffer
|
||||
* @param key Domain key that identifies the buffered value. This key must
|
||||
* not be reused in any capacity.
|
||||
* @param options Configures the operation of the buffered state.
|
||||
*/
|
||||
constructor(
|
||||
readonly stateDefinition: StateDefinition,
|
||||
readonly key: string,
|
||||
readonly options: BufferedKeyDefinitionOptions<Input, Output, Dependency>,
|
||||
) {}
|
||||
|
||||
/** Converts the buffered key definition to a state provider
|
||||
* key definition
|
||||
*/
|
||||
toKeyDefinition() {
|
||||
const bufferedKey = new UserKeyDefinition<Input>(this.stateDefinition, this.key, this.options);
|
||||
|
||||
return bufferedKey;
|
||||
}
|
||||
|
||||
/** Checks whether the dependency triggers an overwrite. */
|
||||
shouldOverwrite(dependency: Dependency) {
|
||||
const shouldOverwrite = this.options?.shouldOverwrite;
|
||||
if (shouldOverwrite) {
|
||||
return shouldOverwrite(dependency);
|
||||
}
|
||||
|
||||
return dependency ? true : false;
|
||||
}
|
||||
|
||||
/** Converts the input data format to its output format.
|
||||
* @returns the converted value.
|
||||
*/
|
||||
map(input: Input, dependency: Dependency) {
|
||||
const map = this.options?.map;
|
||||
if (map) {
|
||||
return map(input, dependency);
|
||||
}
|
||||
|
||||
return Promise.resolve(input as unknown as Output);
|
||||
}
|
||||
|
||||
/** Checks whether the input type can be converted to the output type.
|
||||
* @returns `true` if the definition is defined and valid, otherwise `false`.
|
||||
*/
|
||||
isValid(input: Input, dependency: Dependency) {
|
||||
if (input === null) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
const isValid = this.options?.isValid;
|
||||
if (isValid) {
|
||||
return isValid(input, dependency);
|
||||
}
|
||||
|
||||
return Promise.resolve(input ? true : false);
|
||||
}
|
||||
}
|
||||
@@ -1,381 +0,0 @@
|
||||
import { BehaviorSubject, firstValueFrom, of } from "rxjs";
|
||||
|
||||
import {
|
||||
mockAccountServiceWith,
|
||||
FakeStateProvider,
|
||||
awaitAsync,
|
||||
trackEmissions,
|
||||
} from "../../../../spec";
|
||||
import { GENERATOR_DISK, KeyDefinition } from "../../../platform/state";
|
||||
import { UserId } from "../../../types/guid";
|
||||
|
||||
import { BufferedKeyDefinition } from "./buffered-key-definition";
|
||||
import { BufferedState } from "./buffered-state";
|
||||
|
||||
const SomeUser = "SomeUser" as UserId;
|
||||
const accountService = mockAccountServiceWith(SomeUser);
|
||||
type SomeType = { foo: boolean; bar: boolean };
|
||||
|
||||
const SOME_KEY = new KeyDefinition<SomeType>(GENERATOR_DISK, "fooBar", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
});
|
||||
const BUFFER_KEY = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
clearOn: [],
|
||||
});
|
||||
|
||||
describe("BufferedState", () => {
|
||||
describe("state$", function () {
|
||||
it("reads from the output state", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const value = { foo: true, bar: false };
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
await outputState.update(() => value);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = await firstValueFrom(bufferedState.state$);
|
||||
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("updates when the output state updates", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
const secondValue = { foo: true, bar: true };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await outputState.update(() => secondValue);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, secondValue]);
|
||||
});
|
||||
|
||||
// this test is important for data migrations, which set
|
||||
// the buffered state without using the `BufferedState` abstraction.
|
||||
it.each([[null], [undefined]])(
|
||||
"reads from the output state when the buffered state is '%p'",
|
||||
async (bufferValue) => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
await provider.setUserState(BUFFER_KEY.toKeyDefinition(), bufferValue, SomeUser);
|
||||
|
||||
const result = await firstValueFrom(bufferedState.state$);
|
||||
|
||||
expect(result).toEqual(firstValue);
|
||||
},
|
||||
);
|
||||
|
||||
// also important for data migrations
|
||||
it("rolls over pending values from the buffered state immediately by default", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const initialValue = { foo: true, bar: false };
|
||||
await outputState.update(() => initialValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
await provider.setUserState(BUFFER_KEY.toKeyDefinition(), bufferedValue, SomeUser);
|
||||
|
||||
const result = await trackEmissions(bufferedState.state$);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([initialValue, bufferedValue]);
|
||||
});
|
||||
|
||||
// also important for data migrations
|
||||
it("reads from the output state when its dependency is false", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const value = { foo: true, bar: false };
|
||||
await outputState.update(() => value);
|
||||
const dependency = new BehaviorSubject<boolean>(false).asObservable();
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState, dependency);
|
||||
await provider.setUserState(BUFFER_KEY.toKeyDefinition(), { foo: true, bar: true }, SomeUser);
|
||||
|
||||
const result = await firstValueFrom(bufferedState.state$);
|
||||
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
// also important for data migrations
|
||||
it("overwrites the output state when its dependency emits a truthy value", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const dependency = new BehaviorSubject<boolean>(false);
|
||||
const bufferedState = new BufferedState(
|
||||
provider,
|
||||
BUFFER_KEY,
|
||||
outputState,
|
||||
dependency.asObservable(),
|
||||
);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
await provider.setUserState(BUFFER_KEY.toKeyDefinition(), bufferedValue, SomeUser);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
dependency.next(true);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, bufferedValue]);
|
||||
});
|
||||
|
||||
it("overwrites the output state when shouldOverwrite returns a truthy value", async () => {
|
||||
const bufferedKey = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
shouldOverwrite: () => true,
|
||||
clearOn: [],
|
||||
});
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const initialValue = { foo: true, bar: false };
|
||||
await outputState.update(() => initialValue);
|
||||
const bufferedState = new BufferedState(provider, bufferedKey, outputState);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
await provider.setUserState(bufferedKey.toKeyDefinition(), bufferedValue, SomeUser);
|
||||
|
||||
const result = await trackEmissions(bufferedState.state$);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([initialValue, bufferedValue]);
|
||||
});
|
||||
|
||||
it("reads from the output state when shouldOverwrite returns a falsy value", async () => {
|
||||
const bufferedKey = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
shouldOverwrite: () => false,
|
||||
clearOn: [],
|
||||
});
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const value = { foo: true, bar: false };
|
||||
await outputState.update(() => value);
|
||||
const bufferedState = new BufferedState(provider, bufferedKey, outputState);
|
||||
await provider.setUserState(
|
||||
bufferedKey.toKeyDefinition(),
|
||||
{ foo: true, bar: true },
|
||||
SomeUser,
|
||||
);
|
||||
|
||||
const result = await firstValueFrom(bufferedState.state$);
|
||||
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("replaces the output state when shouldOverwrite transforms its dependency to a truthy value", async () => {
|
||||
const bufferedKey = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
shouldOverwrite: (dependency) => !dependency,
|
||||
clearOn: [],
|
||||
});
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const dependency = new BehaviorSubject<boolean>(true);
|
||||
const bufferedState = new BufferedState(
|
||||
provider,
|
||||
bufferedKey,
|
||||
outputState,
|
||||
dependency.asObservable(),
|
||||
);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
await provider.setUserState(bufferedKey.toKeyDefinition(), bufferedValue, SomeUser);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
dependency.next(false);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, bufferedValue]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("userId", () => {
|
||||
const AnotherUser = "anotherUser" as UserId;
|
||||
|
||||
it.each([[SomeUser], [AnotherUser]])("gets the userId", (userId) => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(userId, SOME_KEY);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = bufferedState.userId;
|
||||
|
||||
expect(result).toEqual(userId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("update", () => {
|
||||
it("updates state$", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
const secondValue = { foo: true, bar: true };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.update(() => secondValue);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, secondValue]);
|
||||
});
|
||||
|
||||
it("respects update options", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
const secondValue = { foo: true, bar: true };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.update(() => secondValue, {
|
||||
shouldUpdate: (_, latest) => latest,
|
||||
combineLatestWith: of(false),
|
||||
});
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("buffer", () => {
|
||||
it("updates state$ once per overwrite", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
const secondValue = { foo: true, bar: true };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer(secondValue);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, secondValue]);
|
||||
});
|
||||
|
||||
it("emits the output state when its dependency is false", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const dependency = new BehaviorSubject<boolean>(false);
|
||||
const bufferedState = new BufferedState(
|
||||
provider,
|
||||
BUFFER_KEY,
|
||||
outputState,
|
||||
dependency.asObservable(),
|
||||
);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer(bufferedValue);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue]);
|
||||
});
|
||||
|
||||
it("replaces the output state when its dependency becomes true", async () => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const dependency = new BehaviorSubject<boolean>(false);
|
||||
const bufferedState = new BufferedState(
|
||||
provider,
|
||||
BUFFER_KEY,
|
||||
outputState,
|
||||
dependency.asObservable(),
|
||||
);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer(bufferedValue);
|
||||
dependency.next(true);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, bufferedValue]);
|
||||
});
|
||||
|
||||
it.each([[null], [undefined]])("ignores `%p`", async (bufferedValue) => {
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, BUFFER_KEY, outputState);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer(bufferedValue);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue]);
|
||||
});
|
||||
|
||||
it("discards the buffered data when isValid returns false", async () => {
|
||||
const bufferedKey = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
isValid: () => Promise.resolve(false),
|
||||
clearOn: [],
|
||||
});
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, bufferedKey, outputState);
|
||||
|
||||
const stateResult = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer({ foo: true, bar: true });
|
||||
await awaitAsync();
|
||||
const bufferedResult = await firstValueFrom(bufferedState.bufferedState$);
|
||||
|
||||
expect(stateResult).toEqual([firstValue]);
|
||||
expect(bufferedResult).toBeNull();
|
||||
});
|
||||
|
||||
it("overwrites the output when isValid returns true", async () => {
|
||||
const bufferedKey = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
isValid: () => Promise.resolve(true),
|
||||
clearOn: [],
|
||||
});
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, bufferedKey, outputState);
|
||||
const bufferedValue = { foo: true, bar: true };
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer(bufferedValue);
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, bufferedValue]);
|
||||
});
|
||||
|
||||
it("maps the buffered data when it overwrites the state", async () => {
|
||||
const mappedValue = { foo: true, bar: true };
|
||||
const bufferedKey = new BufferedKeyDefinition<SomeType>(GENERATOR_DISK, "fooBar_buffer", {
|
||||
deserializer: (jsonValue) => jsonValue as SomeType,
|
||||
map: () => Promise.resolve(mappedValue),
|
||||
clearOn: [],
|
||||
});
|
||||
const provider = new FakeStateProvider(accountService);
|
||||
const outputState = provider.getUser(SomeUser, SOME_KEY);
|
||||
const firstValue = { foo: true, bar: false };
|
||||
await outputState.update(() => firstValue);
|
||||
const bufferedState = new BufferedState(provider, bufferedKey, outputState);
|
||||
|
||||
const result = trackEmissions(bufferedState.state$);
|
||||
await bufferedState.buffer({ foo: false, bar: false });
|
||||
await awaitAsync();
|
||||
|
||||
expect(result).toEqual([firstValue, mappedValue]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,129 +0,0 @@
|
||||
import { Observable, combineLatest, concatMap, filter, map, of, concat, merge } from "rxjs";
|
||||
|
||||
import {
|
||||
StateProvider,
|
||||
SingleUserState,
|
||||
CombinedState,
|
||||
StateUpdateOptions,
|
||||
} from "../../../platform/state";
|
||||
|
||||
import { BufferedKeyDefinition } from "./buffered-key-definition";
|
||||
|
||||
/** Stateful storage that overwrites one state with a buffered state.
|
||||
* When a overwrite occurs, the input state is automatically deleted.
|
||||
* @remarks The buffered state can only overwrite non-nullish values. If the
|
||||
* buffer key contains `null` or `undefined`, it will do nothing.
|
||||
*/
|
||||
export class BufferedState<Input, Output, Dependency> implements SingleUserState<Output> {
|
||||
/**
|
||||
* Instantiate a buffered state
|
||||
* @param provider constructs the buffer.
|
||||
* @param key defines the buffer location.
|
||||
* @param output updates when a overwrite occurs
|
||||
* @param dependency$ provides data the buffer depends upon to evaluate and
|
||||
* transform its data. If this is omitted, then `true` is injected as
|
||||
* a dependency, which with a default output will trigger a overwrite immediately.
|
||||
*
|
||||
* @remarks `dependency$` enables overwrite control during dynamic circumstances,
|
||||
* such as when a overwrite should occur only if a user key is available.
|
||||
*/
|
||||
constructor(
|
||||
provider: StateProvider,
|
||||
private key: BufferedKeyDefinition<Input, Output, Dependency>,
|
||||
private output: SingleUserState<Output>,
|
||||
dependency$: Observable<Dependency> = null,
|
||||
) {
|
||||
this.bufferedState = provider.getUser(output.userId, key.toKeyDefinition());
|
||||
|
||||
// overwrite the output value
|
||||
const hasValue$ = concat(of(null), this.bufferedState.state$).pipe(
|
||||
map((buffer) => (buffer ?? null) !== null),
|
||||
);
|
||||
const overwriteDependency$ = (dependency$ ?? of(true as unknown as Dependency)).pipe(
|
||||
map((dependency) => [key.shouldOverwrite(dependency), dependency] as const),
|
||||
);
|
||||
const overwrite$ = combineLatest([hasValue$, overwriteDependency$]).pipe(
|
||||
concatMap(async ([hasValue, [shouldOverwrite, dependency]]) => {
|
||||
if (hasValue && shouldOverwrite) {
|
||||
await this.overwriteOutput(dependency);
|
||||
}
|
||||
return [false, null] as const;
|
||||
}),
|
||||
);
|
||||
|
||||
// drive overwrites only when there's a subscription;
|
||||
// the output state determines when emissions occur
|
||||
const output$ = this.output.state$.pipe(map((output) => [true, output] as const));
|
||||
this.state$ = merge(overwrite$, output$).pipe(
|
||||
filter(([emit]) => emit),
|
||||
map(([, output]) => output),
|
||||
);
|
||||
|
||||
this.combinedState$ = this.state$.pipe(map((state) => [this.output.userId, state]));
|
||||
|
||||
this.bufferedState$ = this.bufferedState.state$;
|
||||
}
|
||||
|
||||
private bufferedState: SingleUserState<Input>;
|
||||
|
||||
private async overwriteOutput(dependency: Dependency) {
|
||||
// take the latest value from the buffer
|
||||
let buffered: Input;
|
||||
await this.bufferedState.update((state) => {
|
||||
buffered = state ?? null;
|
||||
return null;
|
||||
});
|
||||
|
||||
// update the output state
|
||||
const isValid = await this.key.isValid(buffered, dependency);
|
||||
if (isValid) {
|
||||
const output = await this.key.map(buffered, dependency);
|
||||
await this.output.update(() => output);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@link SingleUserState.userId} */
|
||||
get userId() {
|
||||
return this.output.userId;
|
||||
}
|
||||
|
||||
/** Observes changes to the output state. This updates when the output
|
||||
* state updates, when the buffer is moved to the output, and when `BufferedState.buffer`
|
||||
* is invoked.
|
||||
*/
|
||||
readonly state$: Observable<Output>;
|
||||
|
||||
/** {@link SingleUserState.combinedState$} */
|
||||
readonly combinedState$: Observable<CombinedState<Output>>;
|
||||
|
||||
/** Buffers a value state. The buffered state overwrites the output
|
||||
* state when a subscription occurs.
|
||||
* @param value the state to roll over. Setting this to `null` or `undefined`
|
||||
* has no effect.
|
||||
*/
|
||||
async buffer(value: Input): Promise<void> {
|
||||
const normalized = value ?? null;
|
||||
if (normalized !== null) {
|
||||
await this.bufferedState.update(() => normalized);
|
||||
}
|
||||
}
|
||||
|
||||
/** The data presently being buffered. This emits the pending value each time
|
||||
* new buffer data is provided. It emits null when the buffer is empty.
|
||||
*/
|
||||
readonly bufferedState$: Observable<Input>;
|
||||
|
||||
/** Updates the output state.
|
||||
* @param configureState a callback that returns an updated output
|
||||
* state. The callback receives the state's present value as its
|
||||
* first argument and the dependencies listed in `options.combinedLatestWith`
|
||||
* as its second argument.
|
||||
* @param options configures how the update is applied. See {@link StateUpdateOptions}.
|
||||
*/
|
||||
update<TCombine>(
|
||||
configureState: (state: Output, dependencies: TCombine) => Output,
|
||||
options: StateUpdateOptions<Output, TCombine> = null,
|
||||
): Promise<Output> {
|
||||
return this.output.update(configureState, options);
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
/** Describes the structure of data stored by the SecretState's
|
||||
* encrypted state. Notably, this interface ensures that `Disclosed`
|
||||
* round trips through JSON serialization. It also preserves the
|
||||
* Id.
|
||||
*/
|
||||
export type ClassifiedFormat<Id, Disclosed> = {
|
||||
/** Identifies records. `null` when storing a `value` */
|
||||
readonly id: Id | null;
|
||||
/** Serialized {@link EncString} of the secret state's
|
||||
* secret-level classified data.
|
||||
*/
|
||||
readonly secret: string;
|
||||
/** serialized representation of the secret state's
|
||||
* disclosed-level classified data.
|
||||
*/
|
||||
readonly disclosed: Jsonify<Disclosed>;
|
||||
};
|
||||
@@ -1,21 +0,0 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
/** A packing strategy that packs data into a string.
|
||||
*/
|
||||
export abstract class DataPacker {
|
||||
/**
|
||||
* Packs value into a string format.
|
||||
* @type {Data} is the type of data being protected.
|
||||
* @param value is packed into the string
|
||||
* @returns the packed string
|
||||
*/
|
||||
abstract pack<Data>(value: Jsonify<Data>): string;
|
||||
|
||||
/** Unpacks a string produced by pack.
|
||||
* @param packedValue is the string to unpack
|
||||
* @type {Data} is the type of data being protected.
|
||||
* @returns the data stored within the secret.
|
||||
* @throws when `packedValue` has an invalid format.
|
||||
*/
|
||||
abstract unpack<Data>(packedValue: string): Jsonify<Data>;
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
import { PaddedDataPacker } from "./padded-data-packer";
|
||||
|
||||
describe("UserKeyEncryptor", () => {
|
||||
describe("pack", () => {
|
||||
it("should pack a stringified value", () => {
|
||||
const dataPacker = new PaddedDataPacker(32);
|
||||
|
||||
const packed = dataPacker.pack({ foo: true });
|
||||
|
||||
expect(packed).toEqual("32|eyJmb28iOnRydWV9|000000000000");
|
||||
});
|
||||
|
||||
it("should pad to a multiple of the frame size", () => {
|
||||
const dataPacker = new PaddedDataPacker(8);
|
||||
|
||||
const packed = dataPacker.pack({ foo: true });
|
||||
expect(packed.length).toEqual(24);
|
||||
});
|
||||
|
||||
it("should pad to a multiple of the frame size", () => {
|
||||
const dataPacker = new PaddedDataPacker(8);
|
||||
|
||||
const packed = dataPacker.pack({ foo: true });
|
||||
expect(packed.length).toEqual(24);
|
||||
});
|
||||
});
|
||||
|
||||
describe("unpack", () => {
|
||||
it("should unpack a value with the same frame size", () => {
|
||||
const dataPacker = new PaddedDataPacker(32);
|
||||
|
||||
const unpacked = dataPacker.unpack("32|eyJmb28iOnRydWV9|000000000000");
|
||||
|
||||
expect(unpacked).toEqual({ foo: true });
|
||||
});
|
||||
|
||||
it("should unpack a value with a different frame size", () => {
|
||||
const dataPacker = new PaddedDataPacker(32);
|
||||
|
||||
const unpacked = dataPacker.unpack("24|eyJmb28iOnRydWV9|0000");
|
||||
|
||||
expect(unpacked).toEqual({ foo: true });
|
||||
});
|
||||
|
||||
it("should unpack a value whose length is a multiple of the frame size", () => {
|
||||
const dataPacker = new PaddedDataPacker(32);
|
||||
|
||||
const unpacked = dataPacker.unpack("16|eyJmb28iOnRydWV9|000000000000");
|
||||
|
||||
expect(unpacked).toEqual({ foo: true });
|
||||
});
|
||||
|
||||
it("should throw an error when the frame size is missing", () => {
|
||||
const dataPacker = new PaddedDataPacker(512);
|
||||
const packed = `|eyJmb28iOnRydWV9|${"0".repeat(16)}`;
|
||||
|
||||
expect(() => dataPacker.unpack(packed)).toThrow("missing frame size");
|
||||
});
|
||||
|
||||
it("should throw an error when the length is not a multiple of the frame size", () => {
|
||||
const dataPacker = new PaddedDataPacker(16);
|
||||
const packed = "16|eyJmb28iOnRydWV9|0";
|
||||
|
||||
expect(() => dataPacker.unpack(packed)).toThrow("invalid length");
|
||||
});
|
||||
|
||||
it("should throw an error when the padding divider is missing", () => {
|
||||
const dataPacker = new PaddedDataPacker(16);
|
||||
const packed = "16|eyJmb28iOnRydWV90000000000000";
|
||||
|
||||
expect(() => dataPacker.unpack(packed)).toThrow("missing json object");
|
||||
});
|
||||
|
||||
it("should throw an error when the padding contains a non-0 character", () => {
|
||||
const dataPacker = new PaddedDataPacker(16);
|
||||
const packed = "16|eyJmb28iOnRydWV9|000000000001";
|
||||
|
||||
expect(() => dataPacker.unpack(packed)).toThrow("invalid padding");
|
||||
});
|
||||
});
|
||||
|
||||
it("should unpack a packed JSON-literal value", () => {
|
||||
const dataPacker = new PaddedDataPacker(8);
|
||||
const input = { foo: true };
|
||||
|
||||
const packed = dataPacker.pack(input);
|
||||
const unpacked = dataPacker.unpack(packed);
|
||||
|
||||
expect(unpacked).toEqual(input);
|
||||
});
|
||||
});
|
||||
@@ -1,95 +0,0 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
import { Utils } from "../../../platform/misc/utils";
|
||||
|
||||
import { DataPacker as DataPackerAbstraction } from "./data-packer.abstraction";
|
||||
|
||||
const DATA_PACKING = Object.freeze({
|
||||
/** The character to use for padding. */
|
||||
padding: "0",
|
||||
|
||||
/** The character dividing packed data. */
|
||||
divider: "|",
|
||||
|
||||
/** A regular expression for detecting invalid padding. When the character
|
||||
* changes, this should be updated to include the new padding pattern.
|
||||
*/
|
||||
hasInvalidPadding: /[^0]/,
|
||||
});
|
||||
|
||||
/** A packing strategy that conceals the length of secret data by padding it
|
||||
* to a multiple of the frame size.
|
||||
* @example
|
||||
* // packed === "24|e2Zvbzp0cnVlfQ==|0000"
|
||||
* const packer = new SecretPacker(24);
|
||||
* const packed = packer.pack({ foo: true });
|
||||
*/
|
||||
export class PaddedDataPacker extends DataPackerAbstraction {
|
||||
/** Instantiates the padded data packer
|
||||
* @param frameSize The size of the dataframe used to pad encrypted values.
|
||||
*/
|
||||
constructor(private readonly frameSize: number) {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Packs value into a string format that conceals the length by obscuring it
|
||||
* with the frameSize.
|
||||
* @see {@link DataPackerAbstraction.unpack}
|
||||
*/
|
||||
pack<Secret>(value: Jsonify<Secret>) {
|
||||
// encode the value
|
||||
const json = JSON.stringify(value);
|
||||
const b64 = Utils.fromUtf8ToB64(json);
|
||||
|
||||
// calculate packing metadata
|
||||
const frameSize = JSON.stringify(this.frameSize);
|
||||
const separatorLength = 2 * DATA_PACKING.divider.length; // there are 2 separators
|
||||
const payloadLength = b64.length + frameSize.length + separatorLength;
|
||||
const paddingLength = this.frameSize - (payloadLength % this.frameSize);
|
||||
|
||||
// pack the data, thereby concealing its length
|
||||
const padding = DATA_PACKING.padding.repeat(paddingLength);
|
||||
const packed = `${frameSize}|${b64}|${padding}`;
|
||||
|
||||
return packed;
|
||||
}
|
||||
|
||||
/** {@link DataPackerAbstraction.unpack} */
|
||||
unpack<Secret>(secret: string): Jsonify<Secret> {
|
||||
// frame size is stored before the JSON payload in base 10
|
||||
const frameEndIndex = secret.indexOf(DATA_PACKING.divider);
|
||||
if (frameEndIndex < 1) {
|
||||
throw new Error("missing frame size");
|
||||
}
|
||||
const frameSize = parseInt(secret.slice(0, frameEndIndex), 10);
|
||||
const dataStartIndex = frameEndIndex + 1;
|
||||
|
||||
// The decrypted string should be a multiple of the frame length
|
||||
if (secret.length % frameSize > 0) {
|
||||
throw new Error("invalid length");
|
||||
}
|
||||
|
||||
// encoded data terminates with the divider, followed by the padding character
|
||||
const dataEndIndex = secret.lastIndexOf(DATA_PACKING.divider);
|
||||
if (dataEndIndex == frameEndIndex) {
|
||||
throw new Error("missing json object");
|
||||
}
|
||||
const paddingStartIndex = dataEndIndex + 1;
|
||||
|
||||
// If the padding contains invalid padding characters then the padding could be used
|
||||
// as a side channel for arbitrary data.
|
||||
if (secret.slice(paddingStartIndex).match(DATA_PACKING.hasInvalidPadding)) {
|
||||
throw new Error("invalid padding");
|
||||
}
|
||||
|
||||
// remove frame size and padding
|
||||
const b64 = secret.slice(dataStartIndex, dataEndIndex);
|
||||
|
||||
// unpack the stored data
|
||||
const json = Utils.fromB64ToUtf8(b64);
|
||||
const unpacked = JSON.parse(json);
|
||||
|
||||
return unpacked;
|
||||
}
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
import { SecretClassifier } from "./secret-classifier";
|
||||
|
||||
describe("SecretClassifier", () => {
|
||||
describe("forSecret", () => {
|
||||
it("classifies a property as secret by default", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
|
||||
expect(classifier.disclosed).toEqual([]);
|
||||
expect(classifier.excluded).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("disclose", () => {
|
||||
it("adds a property to the disclosed list", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
|
||||
const withDisclosedFoo = classifier.disclose("foo");
|
||||
|
||||
expect(withDisclosedFoo.disclosed).toEqual(["foo"]);
|
||||
expect(withDisclosedFoo.excluded).toEqual([]);
|
||||
});
|
||||
|
||||
it("chains calls with excluded", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>();
|
||||
|
||||
const withDisclosedFoo = classifier.disclose("foo").exclude("bar");
|
||||
|
||||
expect(withDisclosedFoo.disclosed).toEqual(["foo"]);
|
||||
expect(withDisclosedFoo.excluded).toEqual(["bar"]);
|
||||
});
|
||||
|
||||
it("returns a new classifier", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
|
||||
const withDisclosedFoo = classifier.disclose("foo");
|
||||
|
||||
expect(withDisclosedFoo).not.toBe(classifier);
|
||||
});
|
||||
});
|
||||
|
||||
describe("exclude", () => {
|
||||
it("adds a property to the excluded list", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
|
||||
const withExcludedFoo = classifier.exclude("foo");
|
||||
|
||||
expect(withExcludedFoo.disclosed).toEqual([]);
|
||||
expect(withExcludedFoo.excluded).toEqual(["foo"]);
|
||||
});
|
||||
|
||||
it("chains calls with disclose", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>();
|
||||
|
||||
const withExcludedFoo = classifier.exclude("foo").disclose("bar");
|
||||
|
||||
expect(withExcludedFoo.disclosed).toEqual(["bar"]);
|
||||
expect(withExcludedFoo.excluded).toEqual(["foo"]);
|
||||
});
|
||||
|
||||
it("returns a new classifier", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
|
||||
const withExcludedFoo = classifier.exclude("foo");
|
||||
|
||||
expect(withExcludedFoo).not.toBe(classifier);
|
||||
});
|
||||
});
|
||||
|
||||
describe("classify", () => {
|
||||
it("partitions disclosed properties into the disclosed member", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().disclose(
|
||||
"foo",
|
||||
);
|
||||
|
||||
const classified = classifier.classify({ foo: true, bar: false });
|
||||
|
||||
expect(classified.disclosed).toEqual({ foo: true });
|
||||
});
|
||||
|
||||
it("jsonifies its outputs", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: Date; bar: Date }>().disclose("foo");
|
||||
|
||||
const classified = classifier.classify({ foo: new Date(100), bar: new Date(100) });
|
||||
|
||||
expect(classified.disclosed).toEqual({ foo: "1970-01-01T00:00:00.100Z" });
|
||||
expect(classified.secret).toEqual({ bar: "1970-01-01T00:00:00.100Z" });
|
||||
});
|
||||
|
||||
it("deletes disclosed properties from the secret member", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().disclose(
|
||||
"foo",
|
||||
);
|
||||
|
||||
const classified = classifier.classify({ foo: true, bar: false });
|
||||
|
||||
expect(classified.secret).toEqual({ bar: false });
|
||||
});
|
||||
|
||||
it("deletes excluded properties from the secret member", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().exclude(
|
||||
"foo",
|
||||
);
|
||||
|
||||
const classified = classifier.classify({ foo: true, bar: false });
|
||||
|
||||
expect(classified.secret).toEqual({ bar: false });
|
||||
});
|
||||
|
||||
it("excludes excluded properties from the disclosed member", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().exclude(
|
||||
"foo",
|
||||
);
|
||||
|
||||
const classified = classifier.classify({ foo: true, bar: false });
|
||||
|
||||
expect(classified.disclosed).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe("declassify", () => {
|
||||
it("merges disclosed and secret members", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().disclose(
|
||||
"foo",
|
||||
);
|
||||
|
||||
const declassified = classifier.declassify({ foo: true }, { bar: false });
|
||||
|
||||
expect(declassified).toEqual({ foo: true, bar: false });
|
||||
});
|
||||
|
||||
it("omits unknown disclosed members", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>().disclose("foo");
|
||||
|
||||
// `any` is required here because Typescript knows `bar` is not a disclosed member,
|
||||
// but the feautre assumes the disclosed data bypassed the typechecker (e.g. someone
|
||||
// is trying to clobber secret data.)
|
||||
const declassified = classifier.declassify({ foo: true, bar: false } as any, {});
|
||||
|
||||
expect(declassified).toEqual({ foo: true });
|
||||
});
|
||||
|
||||
it("clobbers disclosed members with secret members", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().disclose(
|
||||
"foo",
|
||||
);
|
||||
|
||||
// `any` is required here because `declassify` knows `bar` is supposed to be public,
|
||||
// but the feature assumes the secret data bypassed the typechecker (e.g. migrated data)
|
||||
const declassified = classifier.declassify({ foo: true }, { foo: false, bar: false } as any);
|
||||
|
||||
expect(declassified).toEqual({ foo: false, bar: false });
|
||||
});
|
||||
|
||||
it("omits excluded secret members", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean; bar: boolean }>().exclude(
|
||||
"foo",
|
||||
);
|
||||
|
||||
// `any` is required here because `declassify` knows `bar` isn't allowed, but the
|
||||
// feature assumes the data bypassed the typechecker (e.g. omitted legacy data).
|
||||
const declassified = classifier.declassify({}, { foo: false, bar: false } as any);
|
||||
|
||||
expect(declassified).toEqual({ bar: false });
|
||||
});
|
||||
|
||||
it("returns a new object", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
|
||||
const disclosed = {};
|
||||
const secret = { foo: false };
|
||||
const declassified = classifier.declassify(disclosed, secret);
|
||||
|
||||
expect(declassified).not.toBe(disclosed);
|
||||
expect(declassified).not.toBe(secret);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,139 +0,0 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
/** Classifies an object's JSON-serializable data by property into
|
||||
* 3 categories:
|
||||
* * Disclosed data MAY be stored in plaintext.
|
||||
* * Excluded data MUST NOT be saved.
|
||||
* * The remaining data is secret and MUST be stored using encryption.
|
||||
*
|
||||
* This type should not be used to classify functions.
|
||||
* Data that cannot be serialized by JSON.stringify() should
|
||||
* be excluded.
|
||||
*/
|
||||
export class SecretClassifier<Plaintext extends object, Disclosed, Secret> {
|
||||
private constructor(
|
||||
disclosed: readonly (keyof Jsonify<Disclosed> & keyof Jsonify<Plaintext>)[],
|
||||
excluded: readonly (keyof Plaintext)[],
|
||||
) {
|
||||
this.disclosed = disclosed;
|
||||
this.excluded = excluded;
|
||||
}
|
||||
|
||||
/** lists the disclosed properties. */
|
||||
readonly disclosed: readonly (keyof Jsonify<Disclosed> & keyof Jsonify<Plaintext>)[];
|
||||
|
||||
/** lists the excluded properties. */
|
||||
readonly excluded: readonly (keyof Plaintext)[];
|
||||
|
||||
/** Creates a classifier where all properties are secret.
|
||||
* @type {T} The type of secret being classified.
|
||||
*/
|
||||
static allSecret<T extends object>() {
|
||||
const disclosed = Object.freeze([]);
|
||||
const excluded = Object.freeze([]);
|
||||
return new SecretClassifier<T, Record<keyof T, never>, T>(disclosed, excluded);
|
||||
}
|
||||
|
||||
/** Classify a property as disclosed.
|
||||
* @type {PropertyName} Available secrets to disclose.
|
||||
* @param disclose The property name to disclose.
|
||||
* @returns a new classifier
|
||||
*/
|
||||
disclose<const PropertyName extends keyof Jsonify<Secret>>(disclose: PropertyName) {
|
||||
// move the property from the secret type to the disclose type
|
||||
type NewDisclosed = Disclosed | Record<PropertyName, Jsonify<Secret>[PropertyName]>;
|
||||
type NewSecret = Omit<Secret, PropertyName>;
|
||||
|
||||
// update the fluent interface
|
||||
const newDisclosed = [...this.disclosed, disclose] as (keyof Jsonify<NewDisclosed> &
|
||||
keyof Jsonify<Plaintext>)[];
|
||||
const classifier = new SecretClassifier<Plaintext, NewDisclosed, NewSecret>(
|
||||
// since `NewDisclosed` is opaque to the type checker, it's necessary
|
||||
// to assert the type of the array here.
|
||||
Object.freeze(newDisclosed),
|
||||
this.excluded,
|
||||
);
|
||||
|
||||
return classifier;
|
||||
}
|
||||
|
||||
/** Classify a property as excluded.
|
||||
* @type {PropertyName} Available secrets to exclude.
|
||||
* @param exclude The property name to exclude.
|
||||
* @returns a new classifier
|
||||
*/
|
||||
exclude<const PropertyName extends keyof Secret>(excludedPropertyName: PropertyName) {
|
||||
// remove the property from the secret type
|
||||
type NewConfidential = Omit<Secret, PropertyName>;
|
||||
|
||||
// update the fluent interface
|
||||
const newExcluded = [...this.excluded, excludedPropertyName] as (keyof Plaintext)[];
|
||||
const classifier = new SecretClassifier<Plaintext, Disclosed, NewConfidential>(
|
||||
this.disclosed,
|
||||
Object.freeze(newExcluded),
|
||||
);
|
||||
|
||||
return classifier;
|
||||
}
|
||||
|
||||
/** Partitions `secret` into its disclosed properties and secret properties.
|
||||
* @param value The object to partition
|
||||
* @returns an object that classifies secrets.
|
||||
* The `disclosed` member is new and contains disclosed properties.
|
||||
* The `secret` member is a copy of the secret parameter, including its
|
||||
* prototype, with all disclosed and excluded properties deleted.
|
||||
*/
|
||||
classify(value: Plaintext): { disclosed: Jsonify<Disclosed>; secret: Jsonify<Secret> } {
|
||||
// need to JSONify during classification because the prototype is almost guaranteed
|
||||
// to be invalid when this method deletes arbitrary properties.
|
||||
const secret = JSON.parse(JSON.stringify(value)) as Record<keyof Plaintext, unknown>;
|
||||
|
||||
for (const excludedProp of this.excluded) {
|
||||
delete secret[excludedProp];
|
||||
}
|
||||
|
||||
const disclosed: Record<PropertyKey, unknown> = {};
|
||||
for (const disclosedProp of this.disclosed) {
|
||||
// disclosedProp is known to be a subset of the keys of `Plaintext`, so these
|
||||
// type assertions are accurate.
|
||||
// FIXME: prove it to the compiler
|
||||
disclosed[disclosedProp] = secret[disclosedProp as keyof Plaintext];
|
||||
delete secret[disclosedProp as keyof Plaintext];
|
||||
}
|
||||
|
||||
return {
|
||||
disclosed: disclosed as Jsonify<Disclosed>,
|
||||
secret: secret as Jsonify<Secret>,
|
||||
};
|
||||
}
|
||||
|
||||
/** Merges the properties of `secret` and `disclosed`. When `secret` and
|
||||
* `disclosed` contain the same property, the `secret` property overrides
|
||||
* the `disclosed` property.
|
||||
* @param disclosed an object whose disclosed properties are merged into
|
||||
* the output. Unknown properties are ignored.
|
||||
* @param secret an objects whose properties are merged into the output.
|
||||
* Excluded properties are ignored. Unknown properties are retained.
|
||||
* @returns a new object containing the merged data.
|
||||
*/
|
||||
// Declassified data is always jsonified--the purpose of classifying it is to Jsonify it,
|
||||
// which causes type conversions.
|
||||
declassify(disclosed: Jsonify<Disclosed>, secret: Jsonify<Secret>): Jsonify<Plaintext> {
|
||||
// removed unknown keys from `disclosed` to prevent any old edit
|
||||
// of plaintext data from being laundered though declassification.
|
||||
const cleaned = {} as Partial<Jsonify<Disclosed>>;
|
||||
for (const disclosedProp of this.disclosed) {
|
||||
cleaned[disclosedProp] = disclosed[disclosedProp];
|
||||
}
|
||||
|
||||
// merge decrypted into cleaned so that secret data clobbers public data
|
||||
const merged: any = Object.assign(cleaned, secret);
|
||||
|
||||
// delete excluded props
|
||||
for (const excludedProp of this.excluded) {
|
||||
delete merged[excludedProp];
|
||||
}
|
||||
|
||||
return merged as Jsonify<Plaintext>;
|
||||
}
|
||||
}
|
||||
@@ -1,210 +0,0 @@
|
||||
import { GENERATOR_DISK, UserKeyDefinitionOptions } from "../../../platform/state";
|
||||
|
||||
import { SecretClassifier } from "./secret-classifier";
|
||||
import { SecretKeyDefinition } from "./secret-key-definition";
|
||||
|
||||
describe("SecretKeyDefinition", () => {
|
||||
const classifier = SecretClassifier.allSecret<{ foo: boolean }>();
|
||||
const options: UserKeyDefinitionOptions<any> = { deserializer: (v: any) => v, clearOn: [] };
|
||||
|
||||
it("toEncryptedStateKey returns a key", () => {
|
||||
const expectedOptions: UserKeyDefinitionOptions<any> = {
|
||||
deserializer: (v: any) => v,
|
||||
cleanupDelayMs: 100,
|
||||
clearOn: ["logout", "lock"],
|
||||
};
|
||||
const definition = SecretKeyDefinition.value(
|
||||
GENERATOR_DISK,
|
||||
"key",
|
||||
classifier,
|
||||
expectedOptions,
|
||||
);
|
||||
const expectedDeserializerResult = {} as any;
|
||||
|
||||
const result = definition.toEncryptedStateKey();
|
||||
const deserializerResult = result.deserializer(expectedDeserializerResult);
|
||||
|
||||
expect(result.stateDefinition).toEqual(GENERATOR_DISK);
|
||||
expect(result.key).toBe("key");
|
||||
expect(result.cleanupDelayMs).toBe(expectedOptions.cleanupDelayMs);
|
||||
expect(result.clearOn).toEqual(expectedOptions.clearOn);
|
||||
expect(deserializerResult).toBe(expectedDeserializerResult);
|
||||
});
|
||||
|
||||
describe("value", () => {
|
||||
it("returns an initialized SecretKeyDefinition", () => {
|
||||
const definition = SecretKeyDefinition.value(GENERATOR_DISK, "key", classifier, options);
|
||||
|
||||
expect(definition).toBeInstanceOf(SecretKeyDefinition);
|
||||
expect(definition.stateDefinition).toBe(GENERATOR_DISK);
|
||||
expect(definition.key).toBe("key");
|
||||
expect(definition.classifier).toBe(classifier);
|
||||
});
|
||||
|
||||
it("deconstruct returns an array with a single item", () => {
|
||||
const definition = SecretKeyDefinition.value(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = { foo: true };
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toEqual([[null, value]]);
|
||||
});
|
||||
|
||||
it("reconstruct returns the inner value", () => {
|
||||
const definition = SecretKeyDefinition.value(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = { foo: true };
|
||||
|
||||
const result = definition.reconstruct([[null, value]]);
|
||||
|
||||
expect(result).toBe(value);
|
||||
});
|
||||
});
|
||||
|
||||
describe("array", () => {
|
||||
it("returns an initialized SecretKeyDefinition", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
|
||||
expect(definition).toBeInstanceOf(SecretKeyDefinition);
|
||||
expect(definition.stateDefinition).toBe(GENERATOR_DISK);
|
||||
expect(definition.key).toBe("key");
|
||||
expect(definition.classifier).toBe(classifier);
|
||||
});
|
||||
|
||||
describe("deconstruct", () => {
|
||||
it("over a 0-length array returns an empty array", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
const value: { foo: boolean }[] = [];
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it("over a 1-length array returns a pair of indices and values", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = [{ foo: true }];
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toStrictEqual([[0, value[0]]]);
|
||||
});
|
||||
|
||||
it("over an n-length array returns n pairs of indices and values", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = [{ foo: true }, { foo: false }];
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toStrictEqual([
|
||||
[0, value[0]],
|
||||
[1, value[1]],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deconstruct", () => {
|
||||
it("over a 0-length array of entries returns an empty array", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
|
||||
const result = definition.reconstruct([]);
|
||||
|
||||
expect(result).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it("over a 1-length array of entries returns a 1-length array", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = [{ foo: true }];
|
||||
|
||||
const result = definition.reconstruct([[0, value[0]]]);
|
||||
|
||||
expect(result).toStrictEqual(value);
|
||||
});
|
||||
|
||||
it("over an n-length array of entries returns an n-length array", () => {
|
||||
const definition = SecretKeyDefinition.array(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = [{ foo: true }, { foo: false }];
|
||||
|
||||
const result = definition.reconstruct([
|
||||
[0, value[0]],
|
||||
[1, value[1]],
|
||||
]);
|
||||
|
||||
expect(result).toStrictEqual(value);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("record", () => {
|
||||
it("returns an initialized SecretKeyDefinition", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
|
||||
expect(definition).toBeInstanceOf(SecretKeyDefinition);
|
||||
expect(definition.stateDefinition).toBe(GENERATOR_DISK);
|
||||
expect(definition.key).toBe("key");
|
||||
expect(definition.classifier).toBe(classifier);
|
||||
});
|
||||
|
||||
describe("deconstruct", () => {
|
||||
it("over a 0-key record returns an empty array", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
const value: Record<string, { foo: boolean }> = {};
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it("over a 1-key record returns a pair of indices and values", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = { foo: { foo: true } };
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toStrictEqual([["foo", value["foo"]]]);
|
||||
});
|
||||
|
||||
it("over an n-key record returns n pairs of indices and values", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = { foo: { foo: true }, bar: { foo: false } };
|
||||
|
||||
const result = definition.deconstruct(value);
|
||||
|
||||
expect(result).toStrictEqual([
|
||||
["foo", value["foo"]],
|
||||
["bar", value["bar"]],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deconstruct", () => {
|
||||
it("over a 0-key record of entries returns an empty array", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
|
||||
const result = definition.reconstruct([]);
|
||||
|
||||
expect(result).toStrictEqual({});
|
||||
});
|
||||
|
||||
it("over a 1-key record of entries returns a 1-length record", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = { foo: { foo: true } };
|
||||
|
||||
const result = definition.reconstruct([["foo", value["foo"]]]);
|
||||
|
||||
expect(result).toStrictEqual(value);
|
||||
});
|
||||
|
||||
it("over an n-key record of entries returns an n-length record", () => {
|
||||
const definition = SecretKeyDefinition.record(GENERATOR_DISK, "key", classifier, options);
|
||||
const value = { foo: { foo: true }, bar: { foo: false } };
|
||||
|
||||
const result = definition.reconstruct([
|
||||
["foo", value["foo"]],
|
||||
["bar", value["bar"]],
|
||||
]);
|
||||
|
||||
expect(result).toStrictEqual(value);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,109 +0,0 @@
|
||||
import { UserKeyDefinitionOptions, UserKeyDefinition } from "../../../platform/state";
|
||||
// eslint-disable-next-line -- `StateDefinition` used as an argument
|
||||
import { StateDefinition } from "../../../platform/state/state-definition";
|
||||
import { ClassifiedFormat } from "./classified-format";
|
||||
import { SecretClassifier } from "./secret-classifier";
|
||||
|
||||
/** Encryption and storage settings for data stored by a `SecretState`.
|
||||
*/
|
||||
export class SecretKeyDefinition<Outer, Id, Inner extends object, Disclosed, Secret> {
|
||||
private constructor(
|
||||
readonly stateDefinition: StateDefinition,
|
||||
readonly key: string,
|
||||
readonly classifier: SecretClassifier<Inner, Disclosed, Secret>,
|
||||
readonly options: UserKeyDefinitionOptions<Inner>,
|
||||
// type erasure is necessary here because typescript doesn't support
|
||||
// higher kinded types that generalize over collections. The invariants
|
||||
// needed to make this typesafe are maintained by the static factories.
|
||||
readonly deconstruct: (value: any) => [Id, any][],
|
||||
readonly reconstruct: ([inners, ids]: (readonly [Id, any])[]) => Outer,
|
||||
) {}
|
||||
|
||||
/** Converts the secret key to the `KeyDefinition` used for secret storage. */
|
||||
toEncryptedStateKey() {
|
||||
const secretKey = new UserKeyDefinition<ClassifiedFormat<Id, Disclosed>[]>(
|
||||
this.stateDefinition,
|
||||
this.key,
|
||||
{
|
||||
cleanupDelayMs: this.options.cleanupDelayMs,
|
||||
deserializer: (jsonValue) => jsonValue as ClassifiedFormat<Id, Disclosed>[],
|
||||
// Clear encrypted state on logout
|
||||
clearOn: this.options.clearOn,
|
||||
},
|
||||
);
|
||||
|
||||
return secretKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a secret state for a single value
|
||||
* @param stateDefinition The domain of the secret's durable state.
|
||||
* @param key Domain key that identifies the stored value. This key must not be reused
|
||||
* in any capacity.
|
||||
* @param classifier Partitions the value into encrypted, discarded, and public data.
|
||||
* @param options Configures the operation of the secret state.
|
||||
*/
|
||||
static value<Value extends object, Disclosed, Secret>(
|
||||
stateDefinition: StateDefinition,
|
||||
key: string,
|
||||
classifier: SecretClassifier<Value, Disclosed, Secret>,
|
||||
options: UserKeyDefinitionOptions<Value>,
|
||||
) {
|
||||
return new SecretKeyDefinition<Value, void, Value, Disclosed, Secret>(
|
||||
stateDefinition,
|
||||
key,
|
||||
classifier,
|
||||
options,
|
||||
(value) => [[null, value]],
|
||||
([[, inner]]) => inner,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a secret state for an array of values. Each item is encrypted separately.
|
||||
* @param stateDefinition The domain of the secret's durable state.
|
||||
* @param key Domain key that identifies the stored items. This key must not be reused
|
||||
* in any capacity.
|
||||
* @param classifier Partitions each item into encrypted, discarded, and public data.
|
||||
* @param options Configures the operation of the secret state.
|
||||
*/
|
||||
static array<Item extends object, Disclosed, Secret>(
|
||||
stateDefinition: StateDefinition,
|
||||
key: string,
|
||||
classifier: SecretClassifier<Item, Disclosed, Secret>,
|
||||
options: UserKeyDefinitionOptions<Item>,
|
||||
) {
|
||||
return new SecretKeyDefinition<Item[], number, Item, Disclosed, Secret>(
|
||||
stateDefinition,
|
||||
key,
|
||||
classifier,
|
||||
options,
|
||||
(value) => value.map((v: any, id: number) => [id, v]),
|
||||
(values) => values.map(([, v]) => v),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a secret state for a record. Each property is encrypted separately.
|
||||
* @param stateDefinition The domain of the secret's durable state.
|
||||
* @param key Domain key that identifies the stored properties. This key must not be reused
|
||||
* in any capacity.
|
||||
* @param classifier Partitions each property into encrypted, discarded, and public data.
|
||||
* @param options Configures the operation of the secret state.
|
||||
*/
|
||||
static record<Item extends object, Disclosed, Secret, Id extends string | number>(
|
||||
stateDefinition: StateDefinition,
|
||||
key: string,
|
||||
classifier: SecretClassifier<Item, Disclosed, Secret>,
|
||||
options: UserKeyDefinitionOptions<Item>,
|
||||
) {
|
||||
return new SecretKeyDefinition<Record<Id, Item>, Id, Item, Disclosed, Secret>(
|
||||
stateDefinition,
|
||||
key,
|
||||
classifier,
|
||||
options,
|
||||
(value) => Object.entries(value) as [Id, Item][],
|
||||
(values) => Object.fromEntries(values) as Record<Id, Item>,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,269 +0,0 @@
|
||||
import { mock } from "jest-mock-extended";
|
||||
import { firstValueFrom, from } from "rxjs";
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
import {
|
||||
FakeStateProvider,
|
||||
makeEncString,
|
||||
mockAccountServiceWith,
|
||||
awaitAsync,
|
||||
} from "../../../../spec";
|
||||
import { EncString } from "../../../platform/models/domain/enc-string";
|
||||
import { GENERATOR_DISK } from "../../../platform/state";
|
||||
import { UserId } from "../../../types/guid";
|
||||
|
||||
import { SecretClassifier } from "./secret-classifier";
|
||||
import { SecretKeyDefinition } from "./secret-key-definition";
|
||||
import { SecretState } from "./secret-state";
|
||||
import { UserEncryptor } from "./user-encryptor.abstraction";
|
||||
|
||||
type FooBar = { foo: boolean; bar: boolean; date?: Date };
|
||||
const classifier = SecretClassifier.allSecret<FooBar>();
|
||||
const options: any = {
|
||||
deserializer: (fb: FooBar) => {
|
||||
const result: FooBar = { foo: fb.foo, bar: fb.bar };
|
||||
|
||||
if (fb.date) {
|
||||
result.date = new Date(fb.date);
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
};
|
||||
const FOOBAR_VALUE = SecretKeyDefinition.value(GENERATOR_DISK, "fooBar", classifier, options);
|
||||
const FOOBAR_ARRAY = SecretKeyDefinition.array(GENERATOR_DISK, "fooBar", classifier, options);
|
||||
const FOOBAR_RECORD = SecretKeyDefinition.record(GENERATOR_DISK, "fooBar", classifier, options);
|
||||
|
||||
const SomeUser = "some user" as UserId;
|
||||
|
||||
function mockEncryptor<T>(fooBar: T[] = []): UserEncryptor {
|
||||
// stores "encrypted values" so that they can be "decrypted" later
|
||||
// while allowing the operations to be interleaved.
|
||||
const encrypted = new Map<string, Jsonify<FooBar>>(
|
||||
fooBar.map((fb) => [toKey(fb as any).encryptedString, toValue(fb)] as const),
|
||||
);
|
||||
|
||||
const result = mock<UserEncryptor>({
|
||||
encrypt<T>(value: Jsonify<T>, user: UserId) {
|
||||
const encString = toKey(value as any);
|
||||
encrypted.set(encString.encryptedString, toValue(value));
|
||||
return Promise.resolve(encString);
|
||||
},
|
||||
decrypt(secret: EncString, userId: UserId) {
|
||||
const decValue = encrypted.get(secret.encryptedString);
|
||||
return Promise.resolve(decValue as any);
|
||||
},
|
||||
});
|
||||
|
||||
function toKey(value: Jsonify<T>) {
|
||||
// `stringify` is only relevant for its uniqueness as a key
|
||||
// to `encrypted`.
|
||||
return makeEncString(JSON.stringify(value));
|
||||
}
|
||||
|
||||
function toValue(value: any) {
|
||||
// replace toJSON types with their round-trip equivalents
|
||||
return JSON.parse(JSON.stringify(value));
|
||||
}
|
||||
|
||||
// typescript pops a false positive about missing `encrypt` and `decrypt`
|
||||
// functions, so assert the type manually.
|
||||
return result as unknown as UserEncryptor;
|
||||
}
|
||||
|
||||
async function fakeStateProvider() {
|
||||
const accountService = mockAccountServiceWith(SomeUser);
|
||||
const stateProvider = new FakeStateProvider(accountService);
|
||||
return stateProvider;
|
||||
}
|
||||
|
||||
describe("SecretState", () => {
|
||||
describe("from", () => {
|
||||
it("returns a state store", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
|
||||
const result = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
|
||||
expect(result).toBeInstanceOf(SecretState);
|
||||
});
|
||||
});
|
||||
|
||||
describe("instance", () => {
|
||||
it("userId outputs the user input during construction", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
|
||||
expect(state.userId).toEqual(SomeUser);
|
||||
});
|
||||
|
||||
it("state$ gets a set value", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const value = { foo: true, bar: false };
|
||||
|
||||
await state.update(() => value);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("round-trips json-serializable values", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const value = { foo: true, bar: true, date: new Date(1) };
|
||||
|
||||
await state.update(() => value);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("state$ gets a set array", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_ARRAY, provider, encryptor);
|
||||
const array = [
|
||||
{ foo: true, bar: false, date: new Date(1) },
|
||||
{ foo: false, bar: true },
|
||||
];
|
||||
|
||||
await state.update(() => array);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toStrictEqual(array);
|
||||
});
|
||||
|
||||
it("state$ gets a set record", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_RECORD, provider, encryptor);
|
||||
const record = {
|
||||
baz: { foo: true, bar: false, date: new Date(1) },
|
||||
biz: { foo: false, bar: true },
|
||||
};
|
||||
|
||||
await state.update(() => record);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toStrictEqual(record);
|
||||
});
|
||||
|
||||
it("combinedState$ gets a set value with the userId", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const value = { foo: true, bar: false };
|
||||
|
||||
await state.update(() => value);
|
||||
await awaitAsync();
|
||||
const [userId, result] = await firstValueFrom(state.combinedState$);
|
||||
|
||||
expect(result).toEqual(value);
|
||||
expect(userId).toEqual(SomeUser);
|
||||
});
|
||||
|
||||
it("gets the last set value", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const initialValue = { foo: true, bar: false };
|
||||
const replacementValue = { foo: false, bar: false };
|
||||
|
||||
await state.update(() => initialValue);
|
||||
await state.update(() => replacementValue);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toEqual(replacementValue);
|
||||
});
|
||||
|
||||
it("interprets shouldUpdate option", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const initialValue = { foo: true, bar: false };
|
||||
const replacementValue = { foo: false, bar: false };
|
||||
|
||||
await state.update(() => initialValue, { shouldUpdate: () => true });
|
||||
await state.update(() => replacementValue, { shouldUpdate: () => false });
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toEqual(initialValue);
|
||||
});
|
||||
|
||||
it("sets the state to `null` when `update` returns `null`", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const value = { foo: true, bar: false };
|
||||
|
||||
await state.update(() => value);
|
||||
await state.update(() => null);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toEqual(null);
|
||||
});
|
||||
|
||||
it("sets the state to `null` when `update` returns `undefined`", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const value = { foo: true, bar: false };
|
||||
|
||||
await state.update(() => value);
|
||||
await state.update(() => undefined);
|
||||
await awaitAsync();
|
||||
const result = await firstValueFrom(state.state$);
|
||||
|
||||
expect(result).toEqual(null);
|
||||
});
|
||||
|
||||
it("sends rxjs observables into the shouldUpdate method", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const combinedWith$ = from([1]);
|
||||
let combinedShouldUpdate = 0;
|
||||
|
||||
await state.update((value) => value, {
|
||||
shouldUpdate: (_, combined) => {
|
||||
combinedShouldUpdate = combined;
|
||||
return true;
|
||||
},
|
||||
combineLatestWith: combinedWith$,
|
||||
});
|
||||
|
||||
expect(combinedShouldUpdate).toEqual(1);
|
||||
});
|
||||
|
||||
it("sends rxjs observables into the update method", async () => {
|
||||
const provider = await fakeStateProvider();
|
||||
const encryptor = mockEncryptor();
|
||||
const state = SecretState.from(SomeUser, FOOBAR_VALUE, provider, encryptor);
|
||||
const combinedWith$ = from([1]);
|
||||
let combinedUpdate = 0;
|
||||
|
||||
await state.update(
|
||||
(value, combined) => {
|
||||
combinedUpdate = combined;
|
||||
return value;
|
||||
},
|
||||
{
|
||||
combineLatestWith: combinedWith$,
|
||||
},
|
||||
);
|
||||
|
||||
expect(combinedUpdate).toEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,196 +0,0 @@
|
||||
import { Observable, map, concatMap, share, ReplaySubject, timer } from "rxjs";
|
||||
|
||||
import { EncString } from "../../../platform/models/domain/enc-string";
|
||||
import {
|
||||
SingleUserState,
|
||||
StateProvider,
|
||||
StateUpdateOptions,
|
||||
CombinedState,
|
||||
} from "../../../platform/state";
|
||||
import { UserId } from "../../../types/guid";
|
||||
|
||||
import { ClassifiedFormat } from "./classified-format";
|
||||
import { SecretKeyDefinition } from "./secret-key-definition";
|
||||
import { UserEncryptor } from "./user-encryptor.abstraction";
|
||||
|
||||
const ONE_MINUTE = 1000 * 60;
|
||||
|
||||
/** Stores account-specific secrets protected by a UserKeyEncryptor.
|
||||
*
|
||||
* @remarks This state store changes the structure of `Plaintext` during
|
||||
* storage, and requires user keys to operate. It is incompatible with sync,
|
||||
* which expects the disk storage format to be identical to the sync format.
|
||||
*
|
||||
* DO NOT USE THIS for synchronized data.
|
||||
*/
|
||||
export class SecretState<Outer, Id, Plaintext extends object, Disclosed, Secret>
|
||||
implements SingleUserState<Outer>
|
||||
{
|
||||
// The constructor is private to avoid creating a circular dependency when
|
||||
// wiring the derived and secret states together.
|
||||
private constructor(
|
||||
private readonly key: SecretKeyDefinition<Outer, Id, Plaintext, Disclosed, Secret>,
|
||||
private readonly encryptor: UserEncryptor,
|
||||
userId: UserId,
|
||||
provider: StateProvider,
|
||||
) {
|
||||
// construct the backing store
|
||||
this.encryptedState = provider.getUser(userId, key.toEncryptedStateKey());
|
||||
|
||||
// cache plaintext
|
||||
this.combinedState$ = this.encryptedState.combinedState$.pipe(
|
||||
concatMap(
|
||||
async ([userId, state]) => [userId, await this.declassifyAll(state)] as [UserId, Outer],
|
||||
),
|
||||
share({
|
||||
connector: () => {
|
||||
return new ReplaySubject<[UserId, Outer]>(1);
|
||||
},
|
||||
resetOnRefCountZero: () => timer(key.options.cleanupDelayMs ?? ONE_MINUTE),
|
||||
}),
|
||||
);
|
||||
|
||||
this.state$ = this.combinedState$.pipe(map(([, state]) => state));
|
||||
}
|
||||
|
||||
private readonly encryptedState: SingleUserState<ClassifiedFormat<Id, Disclosed>[]>;
|
||||
|
||||
/** {@link SingleUserState.userId} */
|
||||
get userId() {
|
||||
return this.encryptedState.userId;
|
||||
}
|
||||
|
||||
/** Observes changes to the decrypted secret state. The observer
|
||||
* updates after the secret has been recorded to state storage.
|
||||
* @returns `undefined` when the account is locked.
|
||||
*/
|
||||
readonly state$: Observable<Outer>;
|
||||
|
||||
/** {@link SingleUserState.combinedState$} */
|
||||
readonly combinedState$: Observable<CombinedState<Outer>>;
|
||||
|
||||
/** Creates a secret state bound to an account encryptor. The account must be unlocked
|
||||
* when this method is called.
|
||||
* @param userId: the user to which the secret state is bound.
|
||||
* @param key Converts between a declassified secret and its formal type.
|
||||
* @param provider constructs state objects.
|
||||
* @param encryptor protects `Secret` data.
|
||||
* @throws when `key.stateDefinition` is backed by memory storage.
|
||||
* @remarks Secrets are written to a secret store as a named tuple. Data classification is
|
||||
* determined by the encryptor's classifier. Secret-classification data is jsonified,
|
||||
* encrypted, and stored in a `secret` property. Disclosed-classification data is stored
|
||||
* in a `disclosed` property. Omitted-classification data is not stored.
|
||||
*/
|
||||
static from<Outer, Id, TFrom extends object, Disclosed, Secret>(
|
||||
userId: UserId,
|
||||
key: SecretKeyDefinition<Outer, Id, TFrom, Disclosed, Secret>,
|
||||
provider: StateProvider,
|
||||
encryptor: UserEncryptor,
|
||||
) {
|
||||
const secretState = new SecretState(key, encryptor, userId, provider);
|
||||
return secretState;
|
||||
}
|
||||
|
||||
private async declassifyItem({ id, secret, disclosed }: ClassifiedFormat<Id, Disclosed>) {
|
||||
const encrypted = EncString.fromJSON(secret);
|
||||
const decrypted = await this.encryptor.decrypt(encrypted, this.encryptedState.userId);
|
||||
|
||||
const declassified = this.key.classifier.declassify(disclosed, decrypted);
|
||||
const result = [id, this.key.options.deserializer(declassified)] as const;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async declassifyAll(data: ClassifiedFormat<Id, Disclosed>[]) {
|
||||
// fail fast if there's no value
|
||||
if (data === null || data === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// decrypt each item
|
||||
const decryptTasks = data.map(async (item) => this.declassifyItem(item));
|
||||
|
||||
// reconstruct expected type
|
||||
const results = await Promise.all(decryptTasks);
|
||||
const result = this.key.reconstruct(results);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async classifyItem([id, item]: [Id, Plaintext]) {
|
||||
const classified = this.key.classifier.classify(item);
|
||||
const encrypted = await this.encryptor.encrypt(classified.secret, this.encryptedState.userId);
|
||||
|
||||
// the deserializer in the plaintextState's `derive` configuration always runs, but
|
||||
// `encryptedState` is not guaranteed to serialize the data, so it's necessary to
|
||||
// round-trip `encrypted` proactively.
|
||||
const serialized = {
|
||||
id,
|
||||
secret: JSON.parse(JSON.stringify(encrypted)),
|
||||
disclosed: classified.disclosed,
|
||||
} as ClassifiedFormat<Id, Disclosed>;
|
||||
|
||||
return serialized;
|
||||
}
|
||||
|
||||
private async classifyAll(data: Outer) {
|
||||
// fail fast if there's no value
|
||||
if (data === null || data === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// convert the object to a list format so that all encrypt and decrypt
|
||||
// operations are self-similar
|
||||
const desconstructed = this.key.deconstruct(data);
|
||||
|
||||
// encrypt each value individually
|
||||
const classifyTasks = desconstructed.map(async (item) => this.classifyItem(item));
|
||||
const classified = await Promise.all(classifyTasks);
|
||||
|
||||
return classified;
|
||||
}
|
||||
|
||||
/** Updates the secret stored by this state.
|
||||
* @param configureState a callback that returns an updated decrypted
|
||||
* secret state. The callback receives the state's present value as its
|
||||
* first argument and the dependencies listed in `options.combinedLatestWith`
|
||||
* as its second argument.
|
||||
* @param options configures how the update is applied. See {@link StateUpdateOptions}.
|
||||
* @returns a promise that resolves with the updated value read from the state.
|
||||
* The round-trip encrypts, decrypts, and deserializes the data, producing a new
|
||||
* object.
|
||||
* @remarks `configureState` must return a JSON-serializable object.
|
||||
* If there are properties of your class which are not JSON-serializable,
|
||||
* they can be lost when the secret state updates its backing store.
|
||||
*/
|
||||
async update<TCombine>(
|
||||
configureState: (state: Outer, dependencies: TCombine) => Outer,
|
||||
options: StateUpdateOptions<Outer, TCombine> = null,
|
||||
): Promise<Outer> {
|
||||
// read the backing store
|
||||
let latestClassified: ClassifiedFormat<Id, Disclosed>[];
|
||||
let latestCombined: TCombine;
|
||||
await this.encryptedState.update((c) => c, {
|
||||
shouldUpdate: (latest, combined) => {
|
||||
latestClassified = latest;
|
||||
latestCombined = combined;
|
||||
return false;
|
||||
},
|
||||
combineLatestWith: options?.combineLatestWith,
|
||||
});
|
||||
|
||||
// exit early if there's no update to apply
|
||||
const latestDeclassified = await this.declassifyAll(latestClassified);
|
||||
const shouldUpdate = options?.shouldUpdate?.(latestDeclassified, latestCombined) ?? true;
|
||||
if (!shouldUpdate) {
|
||||
return latestDeclassified;
|
||||
}
|
||||
|
||||
// apply the update
|
||||
const updatedDeclassified = configureState(latestDeclassified, latestCombined);
|
||||
const updatedClassified = await this.classifyAll(updatedDeclassified);
|
||||
await this.encryptedState.update(() => updatedClassified);
|
||||
|
||||
return updatedDeclassified;
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
import { EncString } from "../../../platform/models/domain/enc-string";
|
||||
import { UserId } from "../../../types/guid";
|
||||
|
||||
/** A classification strategy that protects a type's secrets with
|
||||
* user-specific information. The specific kind of information is
|
||||
* determined by the classification strategy.
|
||||
*/
|
||||
export abstract class UserEncryptor {
|
||||
/** Protects secrets in `value` with a user-specific key.
|
||||
* @param secret the object to protect. This object is mutated during encryption.
|
||||
* @param userId identifies the user-specific information used to protect
|
||||
* the secret.
|
||||
* @returns a promise that resolves to a tuple. The tuple's first property contains
|
||||
* the encrypted secret and whose second property contains an object w/ disclosed
|
||||
* properties.
|
||||
* @throws If `value` is `null` or `undefined`, the promise rejects with an error.
|
||||
*/
|
||||
abstract encrypt<Secret>(secret: Jsonify<Secret>, userId: UserId): Promise<EncString>;
|
||||
|
||||
/** Combines protected secrets and disclosed data into a type that can be
|
||||
* rehydrated into a domain object.
|
||||
* @param secret an encrypted JSON payload containing encrypted secrets.
|
||||
* @param userId identifies the user-specific information used to protect
|
||||
* the secret.
|
||||
* @returns a promise that resolves to the raw state. This state *is not* a
|
||||
* class. It contains only data that can be round-tripped through JSON,
|
||||
* and lacks members such as a prototype or bound functions.
|
||||
* @throws If `secret` or `disclosed` is `null` or `undefined`, the promise
|
||||
* rejects with an error.
|
||||
*/
|
||||
abstract decrypt<Secret>(secret: EncString, userId: UserId): Promise<Jsonify<Secret>>;
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
import { mock } from "jest-mock-extended";
|
||||
|
||||
import { CryptoService } from "../../../platform/abstractions/crypto.service";
|
||||
import { EncryptService } from "../../../platform/abstractions/encrypt.service";
|
||||
import { EncString } from "../../../platform/models/domain/enc-string";
|
||||
import { SymmetricCryptoKey } from "../../../platform/models/domain/symmetric-crypto-key";
|
||||
import { CsprngArray } from "../../../types/csprng";
|
||||
import { UserId } from "../../../types/guid";
|
||||
import { UserKey } from "../../../types/key";
|
||||
|
||||
import { DataPacker } from "./data-packer.abstraction";
|
||||
import { UserKeyEncryptor } from "./user-key-encryptor";
|
||||
|
||||
describe("UserKeyEncryptor", () => {
|
||||
const encryptService = mock<EncryptService>();
|
||||
const keyService = mock<CryptoService>();
|
||||
const dataPacker = mock<DataPacker>();
|
||||
const userKey = new SymmetricCryptoKey(new Uint8Array(64) as CsprngArray) as UserKey;
|
||||
const anyUserId = "foo" as UserId;
|
||||
|
||||
beforeEach(() => {
|
||||
// The UserKeyEncryptor is, in large part, a facade coordinating a handful of worker
|
||||
// objects, so its tests focus on how data flows between components. The defaults rely
|
||||
// on this property--that the facade treats its data like a opaque objects--to trace
|
||||
// the data through several function calls. Should the encryptor interact with the
|
||||
// objects themselves, it will break.
|
||||
encryptService.encrypt.mockImplementation((p) => Promise.resolve(p as unknown as EncString));
|
||||
encryptService.decryptToUtf8.mockImplementation((c) => Promise.resolve(c as unknown as string));
|
||||
keyService.getUserKey.mockImplementation(() => Promise.resolve(userKey));
|
||||
dataPacker.pack.mockImplementation((v) => v as string);
|
||||
dataPacker.unpack.mockImplementation(<T>(v: string) => v as T);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe("encrypt", () => {
|
||||
it("should throw if value was not supplied", async () => {
|
||||
const encryptor = new UserKeyEncryptor(encryptService, keyService, dataPacker);
|
||||
|
||||
await expect(encryptor.encrypt<Record<string, never>>(null, anyUserId)).rejects.toThrow(
|
||||
"secret cannot be null or undefined",
|
||||
);
|
||||
await expect(encryptor.encrypt<Record<string, never>>(undefined, anyUserId)).rejects.toThrow(
|
||||
"secret cannot be null or undefined",
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw if userId was not supplied", async () => {
|
||||
const encryptor = new UserKeyEncryptor(encryptService, keyService, dataPacker);
|
||||
|
||||
await expect(encryptor.encrypt({}, null)).rejects.toThrow(
|
||||
"userId cannot be null or undefined",
|
||||
);
|
||||
await expect(encryptor.encrypt({}, undefined)).rejects.toThrow(
|
||||
"userId cannot be null or undefined",
|
||||
);
|
||||
});
|
||||
|
||||
it("should encrypt a packed value using the user's key", async () => {
|
||||
const encryptor = new UserKeyEncryptor(encryptService, keyService, dataPacker);
|
||||
const value = { foo: true };
|
||||
|
||||
const result = await encryptor.encrypt(value, anyUserId);
|
||||
|
||||
// these are data flow expectations; the operations all all pass-through mocks
|
||||
expect(keyService.getUserKey).toHaveBeenCalledWith(anyUserId);
|
||||
expect(dataPacker.pack).toHaveBeenCalledWith(value);
|
||||
expect(encryptService.encrypt).toHaveBeenCalledWith(value, userKey);
|
||||
expect(result).toBe(value);
|
||||
});
|
||||
});
|
||||
|
||||
describe("decrypt", () => {
|
||||
it("should throw if secret was not supplied", async () => {
|
||||
const encryptor = new UserKeyEncryptor(encryptService, keyService, dataPacker);
|
||||
|
||||
await expect(encryptor.decrypt(null, anyUserId)).rejects.toThrow(
|
||||
"secret cannot be null or undefined",
|
||||
);
|
||||
await expect(encryptor.decrypt(undefined, anyUserId)).rejects.toThrow(
|
||||
"secret cannot be null or undefined",
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw if userId was not supplied", async () => {
|
||||
const encryptor = new UserKeyEncryptor(encryptService, keyService, dataPacker);
|
||||
|
||||
await expect(encryptor.decrypt({} as any, null)).rejects.toThrow(
|
||||
"userId cannot be null or undefined",
|
||||
);
|
||||
await expect(encryptor.decrypt({} as any, undefined)).rejects.toThrow(
|
||||
"userId cannot be null or undefined",
|
||||
);
|
||||
});
|
||||
|
||||
it("should declassify a decrypted packed value using the user's key", async () => {
|
||||
const encryptor = new UserKeyEncryptor(encryptService, keyService, dataPacker);
|
||||
const secret = "encrypted" as any;
|
||||
|
||||
const result = await encryptor.decrypt(secret, anyUserId);
|
||||
|
||||
// these are data flow expectations; the operations all all pass-through mocks
|
||||
expect(keyService.getUserKey).toHaveBeenCalledWith(anyUserId);
|
||||
expect(encryptService.decryptToUtf8).toHaveBeenCalledWith(secret, userKey);
|
||||
expect(dataPacker.unpack).toHaveBeenCalledWith(secret);
|
||||
expect(result).toBe(secret);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,66 +0,0 @@
|
||||
import { Jsonify } from "type-fest";
|
||||
|
||||
import { CryptoService } from "../../../platform/abstractions/crypto.service";
|
||||
import { EncryptService } from "../../../platform/abstractions/encrypt.service";
|
||||
import { EncString } from "../../../platform/models/domain/enc-string";
|
||||
import { UserId } from "../../../types/guid";
|
||||
|
||||
import { DataPacker } from "./data-packer.abstraction";
|
||||
import { UserEncryptor } from "./user-encryptor.abstraction";
|
||||
|
||||
/** A classification strategy that protects a type's secrets by encrypting them
|
||||
* with a `UserKey`
|
||||
*/
|
||||
export class UserKeyEncryptor extends UserEncryptor {
|
||||
/** Instantiates the encryptor
|
||||
* @param encryptService protects properties of `Secret`.
|
||||
* @param keyService looks up the user key when protecting data.
|
||||
* @param dataPacker packs and unpacks data classified as secrets.
|
||||
*/
|
||||
constructor(
|
||||
private readonly encryptService: EncryptService,
|
||||
private readonly keyService: CryptoService,
|
||||
private readonly dataPacker: DataPacker,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
/** {@link UserEncryptor.encrypt} */
|
||||
async encrypt<Secret>(secret: Jsonify<Secret>, userId: UserId): Promise<EncString> {
|
||||
this.assertHasValue("secret", secret);
|
||||
this.assertHasValue("userId", userId);
|
||||
|
||||
let packed = this.dataPacker.pack(secret);
|
||||
|
||||
// encrypt the data and drop the key
|
||||
let key = await this.keyService.getUserKey(userId);
|
||||
const encrypted = await this.encryptService.encrypt(packed, key);
|
||||
packed = null;
|
||||
key = null;
|
||||
|
||||
return encrypted;
|
||||
}
|
||||
|
||||
/** {@link UserEncryptor.decrypt} */
|
||||
async decrypt<Secret>(secret: EncString, userId: UserId): Promise<Jsonify<Secret>> {
|
||||
this.assertHasValue("secret", secret);
|
||||
this.assertHasValue("userId", userId);
|
||||
|
||||
// decrypt the data and drop the key
|
||||
let key = await this.keyService.getUserKey(userId);
|
||||
let decrypted = await this.encryptService.decryptToUtf8(secret, key);
|
||||
key = null;
|
||||
|
||||
// reconstruct TFrom's data
|
||||
const unpacked = this.dataPacker.unpack<Secret>(decrypted);
|
||||
decrypted = null;
|
||||
|
||||
return unpacked;
|
||||
}
|
||||
|
||||
private assertHasValue(name: string, value: any) {
|
||||
if (value === undefined || value === null) {
|
||||
throw new Error(`${name} cannot be null or undefined`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,9 +11,9 @@ import { EncryptService } from "../../../platform/abstractions/encrypt.service";
|
||||
import { StateProvider } from "../../../platform/state";
|
||||
import { UserId } from "../../../types/guid";
|
||||
import { UserKey } from "../../../types/key";
|
||||
import { BufferedState } from "../../state/buffered-state";
|
||||
import { DefaultPolicyEvaluator } from "../default-policy-evaluator";
|
||||
import { DUCK_DUCK_GO_FORWARDER, DUCK_DUCK_GO_BUFFER } from "../key-definitions";
|
||||
import { BufferedState } from "../state/buffered-state";
|
||||
|
||||
import { ForwarderGeneratorStrategy } from "./forwarder-generator-strategy";
|
||||
import { DefaultDuckDuckGoOptions } from "./forwarders/duck-duck-go";
|
||||
|
||||
@@ -5,16 +5,16 @@ import { CryptoService } from "../../../platform/abstractions/crypto.service";
|
||||
import { EncryptService } from "../../../platform/abstractions/encrypt.service";
|
||||
import { SingleUserState, StateProvider, UserKeyDefinition } from "../../../platform/state";
|
||||
import { UserId } from "../../../types/guid";
|
||||
import { BufferedKeyDefinition } from "../../state/buffered-key-definition";
|
||||
import { BufferedState } from "../../state/buffered-state";
|
||||
import { PaddedDataPacker } from "../../state/padded-data-packer";
|
||||
import { SecretClassifier } from "../../state/secret-classifier";
|
||||
import { SecretKeyDefinition } from "../../state/secret-key-definition";
|
||||
import { SecretState } from "../../state/secret-state";
|
||||
import { UserKeyEncryptor } from "../../state/user-key-encryptor";
|
||||
import { GeneratorStrategy } from "../abstractions";
|
||||
import { DefaultPolicyEvaluator } from "../default-policy-evaluator";
|
||||
import { NoPolicy } from "../no-policy";
|
||||
import { BufferedKeyDefinition } from "../state/buffered-key-definition";
|
||||
import { BufferedState } from "../state/buffered-state";
|
||||
import { PaddedDataPacker } from "../state/padded-data-packer";
|
||||
import { SecretClassifier } from "../state/secret-classifier";
|
||||
import { SecretKeyDefinition } from "../state/secret-key-definition";
|
||||
import { SecretState } from "../state/secret-state";
|
||||
import { UserKeyEncryptor } from "../state/user-key-encryptor";
|
||||
|
||||
import { ApiOptions } from "./options/forwarder-options";
|
||||
|
||||
|
||||
Reference in New Issue
Block a user