diff --git a/jslib/common/src/abstractions/api.service.ts b/jslib/common/src/abstractions/api.service.ts index e091a7aa..4ebbc550 100644 --- a/jslib/common/src/abstractions/api.service.ts +++ b/jslib/common/src/abstractions/api.service.ts @@ -2,9 +2,9 @@ import { ApiTokenRequest } from "../models/request/identityToken/apiTokenRequest import { PasswordTokenRequest } from "../models/request/identityToken/passwordTokenRequest"; import { SsoTokenRequest } from "../models/request/identityToken/ssoTokenRequest"; import { OrganizationImportRequest } from "../models/request/organizationImportRequest"; -import { IdentityCaptchaResponse } from '../models/response/identityCaptchaResponse'; -import { IdentityTokenResponse } from '../models/response/identityTokenResponse'; -import { IdentityTwoFactorResponse } from '../models/response/identityTwoFactorResponse'; +import { IdentityCaptchaResponse } from "../models/response/identityCaptchaResponse"; +import { IdentityTokenResponse } from "../models/response/identityTokenResponse"; +import { IdentityTwoFactorResponse } from "../models/response/identityTwoFactorResponse"; export abstract class ApiService { postIdentityToken: ( diff --git a/src/abstractions/directory-factory.service.ts b/src/abstractions/directory-factory.service.ts new file mode 100644 index 00000000..90d2f674 --- /dev/null +++ b/src/abstractions/directory-factory.service.ts @@ -0,0 +1,6 @@ +import { DirectoryType } from "@/src/enums/directoryType"; +import { IDirectoryService } from "@/src/services/directory.service"; + +export abstract class DirectoryFactoryService { + abstract createService(type: DirectoryType): IDirectoryService; +} diff --git a/src/abstractions/request-builder.service.ts b/src/abstractions/request-builder.service.ts new file mode 100644 index 00000000..1edce99b --- /dev/null +++ b/src/abstractions/request-builder.service.ts @@ -0,0 +1,13 @@ +import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; + +import { GroupEntry } from "@/src/models/groupEntry"; +import { UserEntry } from "@/src/models/userEntry"; + +export abstract class RequestBuilder { + buildRequest: ( + groups: GroupEntry[], + users: UserEntry[], + removeDisabled: boolean, + overwriteExisting: boolean, + ) => OrganizationImportRequest[]; +} diff --git a/src/app/services/services.module.ts b/src/app/services/services.module.ts index a3b219b2..9457980e 100644 --- a/src/app/services/services.module.ts +++ b/src/app/services/services.module.ts @@ -25,6 +25,11 @@ import { ElectronRendererStorageService } from "@/jslib/electron/src/services/el import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service"; import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service"; +import { DirectoryFactoryService } from "@/src/abstractions/directory-factory.service"; +import { BatchRequestBuilder } from "@/src/services/batch-request-builder"; +import { DefaultDirectoryFactoryService } from "@/src/services/directory-factory.service"; +import { SingleRequestBuilder } from "@/src/services/single-request-builder"; + import { AuthService as AuthServiceAbstraction } from "../../abstractions/auth.service"; import { StateService as StateServiceAbstraction } from "../../abstractions/state.service"; import { Account } from "../../models/account"; @@ -168,13 +173,15 @@ export function initFactory( provide: SyncService, useClass: SyncService, deps: [ - LogServiceAbstraction, CryptoFunctionServiceAbstraction, ApiServiceAbstraction, MessagingServiceAbstraction, I18nServiceAbstraction, EnvironmentServiceAbstraction, StateServiceAbstraction, + BatchRequestBuilder, + SingleRequestBuilder, + DirectoryFactoryService, ], }), safeProvider(AuthGuardService), @@ -215,6 +222,19 @@ export function initFactory( StateMigrationServiceAbstraction, ], }), + safeProvider({ + provide: SingleRequestBuilder, + deps: [], + }), + safeProvider({ + provide: BatchRequestBuilder, + deps: [], + }), + safeProvider({ + provide: DirectoryFactoryService, + useClass: DefaultDirectoryFactoryService, + deps: [LogServiceAbstraction, I18nServiceAbstraction, StateServiceAbstraction], + }), ] satisfies SafeProvider[], }) export class ServicesModule {} diff --git a/src/bwdc.ts b/src/bwdc.ts index 84abf65b..afa06430 100644 --- a/src/bwdc.ts +++ b/src/bwdc.ts @@ -17,12 +17,16 @@ import { ConsoleLogService } from "@/jslib/node/src/cli/services/consoleLog.serv import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service"; import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service"; +import { DirectoryFactoryService } from "./abstractions/directory-factory.service"; import { Account } from "./models/account"; import { Program } from "./program"; import { AuthService } from "./services/auth.service"; +import { BatchRequestBuilder } from "./services/batch-request-builder"; +import { DefaultDirectoryFactoryService } from "./services/directory-factory.service"; import { I18nService } from "./services/i18n.service"; import { KeytarSecureStorageService } from "./services/keytarSecureStorage.service"; import { LowdbStorageService } from "./services/lowdbStorage.service"; +import { SingleRequestBuilder } from "./services/single-request-builder"; import { StateService } from "./services/state.service"; import { StateMigrationService } from "./services/stateMigration.service"; import { SyncService } from "./services/sync.service"; @@ -51,6 +55,9 @@ export class Main { syncService: SyncService; stateService: StateService; stateMigrationService: StateMigrationService; + directoryFactoryService: DirectoryFactoryService; + batchRequestBuilder: BatchRequestBuilder; + singleRequestBuilder: SingleRequestBuilder; constructor() { const applicationName = "Bitwarden Directory Connector"; @@ -146,14 +153,25 @@ export class Main { this.stateService, ); - this.syncService = new SyncService( + this.directoryFactoryService = new DefaultDirectoryFactoryService( this.logService, + this.i18nService, + this.stateService, + ); + + this.batchRequestBuilder = new BatchRequestBuilder(); + this.singleRequestBuilder = new SingleRequestBuilder(); + + this.syncService = new SyncService( this.cryptoFunctionService, this.apiService, this.messagingService, this.i18nService, this.environmentService, this.stateService, + this.batchRequestBuilder, + this.singleRequestBuilder, + this.directoryFactoryService, ); this.program = new Program(this); diff --git a/src/services/batch-request-builder.ts b/src/services/batch-request-builder.ts new file mode 100644 index 00000000..7034642d --- /dev/null +++ b/src/services/batch-request-builder.ts @@ -0,0 +1,70 @@ +import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; + +import { GroupEntry } from "@/src/models/groupEntry"; +import { UserEntry } from "@/src/models/userEntry"; + +import { RequestBuilder } from "../abstractions/request-builder.service"; + +import { batchSize } from "./sync.service"; + +/** + * This class is responsible for batching large sync requests (>2k users) into multiple smaller + * requests to the /import endpoint. This is done to ensure we are under the default + * maximum packet size for NGINX web servers to avoid the request potentially timing out + * */ +export class BatchRequestBuilder implements RequestBuilder { + buildRequest( + groups: GroupEntry[], + users: UserEntry[], + removeDisabled: boolean, + overwriteExisting: boolean, + ): OrganizationImportRequest[] { + const requests: OrganizationImportRequest[] = []; + + if (users.length > 0) { + const usersRequest = users.map((u) => { + return { + email: u.email, + externalId: u.externalId, + deleted: u.deleted || (removeDisabled && u.disabled), + }; + }); + + // Partition users + for (let i = 0; i < usersRequest.length; i += batchSize) { + const u = usersRequest.slice(i, i + batchSize); + const req = new OrganizationImportRequest({ + groups: [], + users: u, + largeImport: true, + overwriteExisting, + }); + requests.push(req); + } + } + + if (groups.length > 0) { + const groupRequest = groups.map((g) => { + return { + name: g.name, + externalId: g.externalId, + memberExternalIds: Array.from(g.userMemberExternalIds), + }; + }); + + // Partition groups + for (let i = 0; i < groupRequest.length; i += batchSize) { + const g = groupRequest.slice(i, i + batchSize); + const req = new OrganizationImportRequest({ + groups: g, + users: [], + largeImport: true, + overwriteExisting, + }); + requests.push(req); + } + } + + return requests; + } +} diff --git a/src/services/batch-requests-builder.spec.ts b/src/services/batch-requests-builder.spec.ts new file mode 100644 index 00000000..4cd0c1c8 --- /dev/null +++ b/src/services/batch-requests-builder.spec.ts @@ -0,0 +1,47 @@ +import { GroupEntry } from "@/src/models/groupEntry"; +import { UserEntry } from "@/src/models/userEntry"; + +import { BatchRequestBuilder } from "./batch-request-builder"; +import { SingleRequestBuilder } from "./single-request-builder"; + +describe("BatchRequestBuilder", () => { + let batchRequestBuilder: BatchRequestBuilder; + let singleRequestBuilder: SingleRequestBuilder; + + function userSimulator(userCount: number) { + return Array(userCount).fill(new UserEntry()); + } + + function groupSimulator(groupCount: number) { + return Array(groupCount).fill(new GroupEntry()); + } + + beforeEach(async () => { + batchRequestBuilder = new BatchRequestBuilder(); + singleRequestBuilder = new SingleRequestBuilder(); + }); + + it("BatchRequestBuilder batches requests for > 2000 users", () => { + const mockGroups = groupSimulator(11000); + const mockUsers = userSimulator(11000); + + const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, true, true); + + expect(requests.length).toEqual(12); + }); + + it("SingleRequestBuilder returns single request for 200 users", () => { + const mockGroups = groupSimulator(200); + const mockUsers = userSimulator(200); + + const requests = singleRequestBuilder.buildRequest(mockGroups, mockUsers, true, true); + + expect(requests.length).toEqual(1); + }); + + it("BatchRequestBuilder retuns an empty array when there are no users or groups", () => { + const requests = batchRequestBuilder.buildRequest([], [], true, true); + + expect(requests).toEqual([]); + }); +}); diff --git a/src/services/directory-factory.service.ts b/src/services/directory-factory.service.ts new file mode 100644 index 00000000..a7f34840 --- /dev/null +++ b/src/services/directory-factory.service.ts @@ -0,0 +1,37 @@ +import { I18nService } from "@/jslib/common/src/abstractions/i18n.service"; +import { LogService } from "@/jslib/common/src/abstractions/log.service"; + +import { DirectoryFactoryService } from "../abstractions/directory-factory.service"; +import { StateService } from "../abstractions/state.service"; +import { DirectoryType } from "../enums/directoryType"; + +import { AzureDirectoryService } from "./azure-directory.service"; +import { GSuiteDirectoryService } from "./gsuite-directory.service"; +import { LdapDirectoryService } from "./ldap-directory.service"; +import { OktaDirectoryService } from "./okta-directory.service"; +import { OneLoginDirectoryService } from "./onelogin-directory.service"; + +export class DefaultDirectoryFactoryService implements DirectoryFactoryService { + constructor( + private logService: LogService, + private i18nService: I18nService, + private stateService: StateService, + ) {} + + createService(directoryType: DirectoryType) { + switch (directoryType) { + case DirectoryType.GSuite: + return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService); + case DirectoryType.AzureActiveDirectory: + return new AzureDirectoryService(this.logService, this.i18nService, this.stateService); + case DirectoryType.Ldap: + return new LdapDirectoryService(this.logService, this.i18nService, this.stateService); + case DirectoryType.Okta: + return new OktaDirectoryService(this.logService, this.i18nService, this.stateService); + case DirectoryType.OneLogin: + return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService); + default: + throw new Error("Invalid Directory Type"); + } + } +} diff --git a/src/services/ldap-directory.service.integration.spec.ts b/src/services/ldap-directory.service.integration.spec.ts index af06fb5c..7cbf9252 100644 --- a/src/services/ldap-directory.service.integration.spec.ts +++ b/src/services/ldap-directory.service.integration.spec.ts @@ -5,8 +5,7 @@ import { LogService } from "../../jslib/common/src/abstractions/log.service"; import { groupFixtures } from "../../openldap/group-fixtures"; import { userFixtures } from "../../openldap/user-fixtures"; import { DirectoryType } from "../enums/directoryType"; -import { LdapConfiguration } from "../models/ldapConfiguration"; -import { SyncConfiguration } from "../models/syncConfiguration"; +import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures"; import { LdapDirectoryService } from "./ldap-directory.service"; import { StateService } from "./state.service"; @@ -154,54 +153,3 @@ describe("ldapDirectoryService", () => { }); }); }); - -/** - * @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration. - */ -const getLdapConfiguration = (config?: Partial): LdapConfiguration => ({ - ssl: false, - startTls: false, - tlsCaPath: null, - sslAllowUnauthorized: false, - sslCertPath: null, - sslKeyPath: null, - sslCaPath: null, - hostname: "localhost", - port: 1389, - domain: null, - rootPath: "dc=bitwarden,dc=com", - currentUser: false, - username: "cn=admin,dc=bitwarden,dc=com", - password: "admin", - ad: false, - pagedSearch: false, - ...(config ?? {}), -}); - -/** - * @returns a basic sync configuration. Can be overridden by passing in a partial configuration. - */ -const getSyncConfiguration = (config?: Partial): SyncConfiguration => ({ - users: false, - groups: false, - interval: 5, - userFilter: null, - groupFilter: null, - removeDisabled: false, - overwriteExisting: false, - largeImport: false, - // Ldap properties - groupObjectClass: "posixGroup", - userObjectClass: "person", - groupPath: null, - userPath: null, - groupNameAttribute: "cn", - userEmailAttribute: "mail", - memberAttribute: "memberUid", - useEmailPrefixSuffix: false, - emailPrefixAttribute: "sAMAccountName", - emailSuffix: null, - creationDateAttribute: "whenCreated", - revisionDateAttribute: "whenChanged", - ...(config ?? {}), -}); diff --git a/src/services/single-request-builder.ts b/src/services/single-request-builder.ts new file mode 100644 index 00000000..4026d93e --- /dev/null +++ b/src/services/single-request-builder.ts @@ -0,0 +1,42 @@ +import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; + +import { GroupEntry } from "@/src/models/groupEntry"; +import { UserEntry } from "@/src/models/userEntry"; + +import { RequestBuilder } from "../abstractions/request-builder.service"; + +/** + * This class is responsible for building small (<2k users) syncs as a single + * request to the /import endpoint. This is done to be backwards compatible with + * existing functionality for sync requests that are sufficiently small enough to not + * exceed default maximum packet size limits on NGINX web servers. + * */ +export class SingleRequestBuilder implements RequestBuilder { + buildRequest( + groups: GroupEntry[], + users: UserEntry[], + removeDisabled: boolean, + overwriteExisting: boolean, + ): OrganizationImportRequest[] { + return [ + new OrganizationImportRequest({ + groups: (groups ?? []).map((g) => { + return { + name: g.name, + externalId: g.externalId, + memberExternalIds: Array.from(g.userMemberExternalIds), + }; + }), + users: (users ?? []).map((u) => { + return { + email: u.email, + externalId: u.externalId, + deleted: u.deleted || (removeDisabled && u.disabled), + }; + }), + overwriteExisting: overwriteExisting, + largeImport: false, + }), + ]; + } +} diff --git a/src/services/sync.service.spec.ts b/src/services/sync.service.spec.ts new file mode 100644 index 00000000..5f67346c --- /dev/null +++ b/src/services/sync.service.spec.ts @@ -0,0 +1,132 @@ +import { mock, MockProxy } from "jest-mock-extended"; + +import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service"; +import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service"; +import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service"; +import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; +import { ApiService } from "@/jslib/common/src/services/api.service"; + +import { DirectoryFactoryService } from "../abstractions/directory-factory.service"; +import { DirectoryType } from "../enums/directoryType"; +import { getSyncConfiguration } from "../utils/test-fixtures"; + +import { BatchRequestBuilder } from "./batch-request-builder"; +import { I18nService } from "./i18n.service"; +import { LdapDirectoryService } from "./ldap-directory.service"; +import { SingleRequestBuilder } from "./single-request-builder"; +import { StateService } from "./state.service"; +import { SyncService } from "./sync.service"; +import * as constants from "./sync.service"; + +import { groupFixtures } from "@/openldap/group-fixtures"; +import { userFixtures } from "@/openldap/user-fixtures"; + +describe("SyncService", () => { + let cryptoFunctionService: MockProxy; + let apiService: MockProxy; + let messagingService: MockProxy; + let i18nService: MockProxy; + let environmentService: MockProxy; + let stateService: MockProxy; + let directoryFactory: MockProxy; + let batchRequestBuilder: MockProxy; + let singleRequestBuilder: MockProxy; + + let syncService: SyncService; + + beforeEach(() => { + cryptoFunctionService = mock(); + apiService = mock(); + messagingService = mock(); + i18nService = mock(); + environmentService = mock(); + stateService = mock(); + directoryFactory = mock(); + batchRequestBuilder = mock(); + singleRequestBuilder = mock(); + + stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap); + stateService.getOrganizationId.mockResolvedValue("fakeId"); + const mockDirectoryService = mock(); + mockDirectoryService.getEntries.mockResolvedValue([groupFixtures, userFixtures]); + directoryFactory.createService.mockReturnValue(mockDirectoryService); + + syncService = new SyncService( + cryptoFunctionService, + apiService, + messagingService, + i18nService, + environmentService, + stateService, + batchRequestBuilder, + singleRequestBuilder, + directoryFactory, + ); + }); + + it("Sync posts single request successfully for unique hashes", async () => { + stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true })); + cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1)); + // This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64 + stateService.getLastSyncHash.mockResolvedValue("unique hash"); + + const mockRequest: OrganizationImportRequest[] = [ + { + members: [], + groups: [], + overwriteExisting: true, + largeImport: true, + }, + ]; + + singleRequestBuilder.buildRequest.mockReturnValue(mockRequest); + + await syncService.sync(true, false); + + expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1); + }); + + it("Sync posts multiple request successfully for unique hashes", async () => { + stateService.getSync.mockResolvedValue( + getSyncConfiguration({ groups: true, users: true, largeImport: true }), + ); + cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1)); + // This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64 + stateService.getLastSyncHash.mockResolvedValue("unique hash"); + + // @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const. + constants.batchSize = 4; + + const mockRequests = new Array(6).fill({ + members: [], + groups: [], + overwriteExisting: true, + largeImport: true, + }); + + batchRequestBuilder.buildRequest.mockReturnValue(mockRequests); + + await syncService.sync(true, false); + + expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6); + expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[0]); + expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[1]); + expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[2]); + expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[3]); + expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[4]); + expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[5]); + }); + + it("does not post for the same hash", async () => { + // @ts-expect-error this sets the batch size back to its expexted value for this test. + constants.batchSize = 2000; + stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true })); + cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1)); + // This arranges the last hash to be the same as the ArrayBuffer after it is converted to b64 + stateService.getLastSyncHash.mockResolvedValue("AA=="); + + await syncService.sync(true, false); + + expect(apiService.postPublicImportDirectory).not.toHaveBeenCalled(); + }); +}); diff --git a/src/services/sync.service.ts b/src/services/sync.service.ts index db966d0a..ca46c5f0 100644 --- a/src/services/sync.service.ts +++ b/src/services/sync.service.ts @@ -2,35 +2,40 @@ import { ApiService } from "@/jslib/common/src/abstractions/api.service"; import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service"; import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service"; import { I18nService } from "@/jslib/common/src/abstractions/i18n.service"; -import { LogService } from "@/jslib/common/src/abstractions/log.service"; import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service"; import { Utils } from "@/jslib/common/src/misc/utils"; import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; +import { DirectoryFactoryService } from "../abstractions/directory-factory.service"; import { StateService } from "../abstractions/state.service"; import { DirectoryType } from "../enums/directoryType"; import { GroupEntry } from "../models/groupEntry"; import { SyncConfiguration } from "../models/syncConfiguration"; import { UserEntry } from "../models/userEntry"; -import { AzureDirectoryService } from "./azure-directory.service"; -import { IDirectoryService } from "./directory.service"; -import { GSuiteDirectoryService } from "./gsuite-directory.service"; -import { LdapDirectoryService } from "./ldap-directory.service"; -import { OktaDirectoryService } from "./okta-directory.service"; -import { OneLoginDirectoryService } from "./onelogin-directory.service"; +import { BatchRequestBuilder } from "./batch-request-builder"; +import { SingleRequestBuilder } from "./single-request-builder"; + +export interface HashResult { + hash: string; + hashLegacy: string; +} + +export const batchSize = 2000; export class SyncService { private dirType: DirectoryType; constructor( - private logService: LogService, private cryptoFunctionService: CryptoFunctionService, private apiService: ApiService, private messagingService: MessagingService, private i18nService: I18nService, private environmentService: EnvironmentService, private stateService: StateService, + private batchRequestBuilder: BatchRequestBuilder, + private singleRequestBuilder: SingleRequestBuilder, + private directoryFactory: DirectoryFactoryService, ) {} async sync(force: boolean, test: boolean): Promise<[GroupEntry[], UserEntry[]]> { @@ -39,7 +44,7 @@ export class SyncService { throw new Error("No directory configured."); } - const directoryService = this.getDirectoryService(); + const directoryService = this.directoryFactory.createService(this.dirType); if (directoryService == null) { throw new Error("Cannot load directory service."); } @@ -78,42 +83,21 @@ export class SyncService { return [groups, users]; } - const req = this.buildRequest( + const reqs = this.buildRequest( groups, users, syncConfig.removeDisabled, syncConfig.overwriteExisting, syncConfig.largeImport, ); - const reqJson = JSON.stringify(req); - const orgId = await this.stateService.getOrganizationId(); - if (orgId == null) { - throw new Error("Organization not set."); - } + const result: HashResult = await this.generateHash(reqs); - // TODO: Remove hashLegacy once we're sure clients have had time to sync new hashes - let hashLegacy: string = null; - const hashBuffLegacy = await this.cryptoFunctionService.hash( - this.environmentService.getApiUrl() + reqJson, - "sha256", - ); - if (hashBuffLegacy != null) { - hashLegacy = Utils.fromBufferToB64(hashBuffLegacy); - } - let hash: string = null; - const hashBuff = await this.cryptoFunctionService.hash( - this.environmentService.getApiUrl() + orgId + reqJson, - "sha256", - ); - if (hashBuff != null) { - hash = Utils.fromBufferToB64(hashBuff); - } - const lastHash = await this.stateService.getLastSyncHash(); - - if (lastHash == null || (hash !== lastHash && hashLegacy !== lastHash)) { - await this.apiService.postPublicImportDirectory(req); - await this.stateService.setLastSyncHash(hash); + if (result.hash && (await this.isNewHash(result))) { + for (const req of reqs) { + await this.apiService.postPublicImportDirectory(req); + } + await this.stateService.setLastSyncHash(result.hash); } else { groups = null; users = null; @@ -133,6 +117,40 @@ export class SyncService { } } + async generateHash(reqs: OrganizationImportRequest[]): Promise { + const reqJson = JSON.stringify(reqs?.length === 1 ? reqs[0] : reqs); + const orgId = await this.stateService.getOrganizationId(); + if (orgId == null) { + throw new Error("Organization not set."); + } + + // TODO: Remove hashLegacy once we're sure clients have had time to sync new hashes + let hashLegacy: string = null; + const hashBuffLegacy = await this.cryptoFunctionService.hash( + this.environmentService.getApiUrl() + reqJson, + "sha256", + ); + if (hashBuffLegacy != null) { + hashLegacy = Utils.fromBufferToB64(hashBuffLegacy); + } + let hash: string = null; + const hashBuff = await this.cryptoFunctionService.hash( + this.environmentService.getApiUrl() + orgId + reqJson, + "sha256", + ); + if (hashBuff != null) { + hash = Utils.fromBufferToB64(hashBuff); + } + + return { hash, hashLegacy }; + } + + async isNewHash(hashResult: HashResult): Promise { + const lastHash = await this.stateService.getLastSyncHash(); + + return lastHash == null || (hashResult.hash !== lastHash && hashResult.hashLegacy !== lastHash); + } + private removeDuplicateUsers(users: UserEntry[]) { if (users == null) { return null; @@ -198,48 +216,28 @@ export class SyncService { return allUsers; } - private getDirectoryService(): IDirectoryService { - switch (this.dirType) { - case DirectoryType.GSuite: - return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService); - case DirectoryType.AzureActiveDirectory: - return new AzureDirectoryService(this.logService, this.i18nService, this.stateService); - case DirectoryType.Ldap: - return new LdapDirectoryService(this.logService, this.i18nService, this.stateService); - case DirectoryType.Okta: - return new OktaDirectoryService(this.logService, this.i18nService, this.stateService); - case DirectoryType.OneLogin: - return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService); - default: - return null; - } - } - private buildRequest( groups: GroupEntry[], users: UserEntry[], removeDisabled: boolean, overwriteExisting: boolean, largeImport = false, - ) { - return new OrganizationImportRequest({ - groups: (groups ?? []).map((g) => { - return { - name: g.name, - externalId: g.externalId, - memberExternalIds: Array.from(g.userMemberExternalIds), - }; - }), - users: (users ?? []).map((u) => { - return { - email: u.email, - externalId: u.externalId, - deleted: u.deleted || (removeDisabled && u.disabled), - }; - }), - overwriteExisting: overwriteExisting, - largeImport: largeImport, - }); + ): OrganizationImportRequest[] { + if (largeImport && groups.length + users.length > batchSize) { + return this.batchRequestBuilder.buildRequest( + groups, + users, + overwriteExisting, + removeDisabled, + ); + } else { + return this.singleRequestBuilder.buildRequest( + groups, + users, + overwriteExisting, + removeDisabled, + ); + } } private async saveSyncTimes(syncConfig: SyncConfiguration, time: Date) { diff --git a/src/utils/test-fixtures.ts b/src/utils/test-fixtures.ts new file mode 100644 index 00000000..d611e3e8 --- /dev/null +++ b/src/utils/test-fixtures.ts @@ -0,0 +1,53 @@ +import { LdapConfiguration } from "../models/ldapConfiguration"; +import { SyncConfiguration } from "../models/syncConfiguration"; + +/** + * @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration. + */ +export const getLdapConfiguration = (config?: Partial): LdapConfiguration => ({ + ssl: false, + startTls: false, + tlsCaPath: null, + sslAllowUnauthorized: false, + sslCertPath: null, + sslKeyPath: null, + sslCaPath: null, + hostname: "localhost", + port: 1389, + domain: null, + rootPath: "dc=bitwarden,dc=com", + currentUser: false, + username: "cn=admin,dc=bitwarden,dc=com", + password: "admin", + ad: false, + pagedSearch: false, + ...(config ?? {}), +}); + +/** + * @returns a basic sync configuration. Can be overridden by passing in a partial configuration. + */ +export const getSyncConfiguration = (config?: Partial): SyncConfiguration => ({ + users: false, + groups: false, + interval: 5, + userFilter: null, + groupFilter: null, + removeDisabled: false, + overwriteExisting: false, + largeImport: false, + // Ldap properties + groupObjectClass: "posixGroup", + userObjectClass: "person", + groupPath: null, + userPath: null, + groupNameAttribute: "cn", + userEmailAttribute: "mail", + memberAttribute: "memberUid", + useEmailPrefixSuffix: false, + emailPrefixAttribute: "sAMAccountName", + emailSuffix: null, + creationDateAttribute: "whenCreated", + revisionDateAttribute: "whenChanged", + ...(config ?? {}), +});