1
0
mirror of https://github.com/bitwarden/directory-connector synced 2025-12-05 23:53:21 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Jimmy Vo
599b6e6058 [PM-15283] [PM-15284] [PM-15286] Upgrade Angular to 19. 2024-12-09 14:10:39 -05:00
38 changed files with 4864 additions and 345606 deletions

View File

@@ -7,12 +7,6 @@
"groupName": "gh minor",
"matchManagers": ["github-actions"],
"matchUpdateTypes": ["minor", "patch"]
},
{
"groupName": "Google Libraries",
"matchPackagePatterns": ["google-auth-library", "googleapis"],
"matchManagers": ["npm"],
"groupSlug": "google-libraries"
}
]
}

View File

@@ -5,20 +5,15 @@ on:
push:
branches:
- "main"
- "rc"
- "hotfix-rc"
workflow_dispatch: {}
permissions:
contents: read
jobs:
cloc:
name: CLOC
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up CLOC
run: |
@@ -36,7 +31,7 @@ jobs:
package_version: ${{ steps.retrieve-version.outputs.package_version }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Get Package Version
id: retrieve-version
@@ -55,10 +50,10 @@ jobs:
_PKG_FETCH_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -98,6 +93,11 @@ jobs:
- name: Zip
run: zip -j dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip dist-cli/linux/bwdc keytar/linux/build/Release/keytar.node
- name: Create checksums
run: |
shasum -a 256 dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip | \
cut -d " " -f 1 > dist-cli/bwdc-linux-sha256-$_PACKAGE_VERSION.txt
- name: Version Test
run: |
sudo apt-get update
@@ -121,12 +121,19 @@ jobs:
fi
- name: Upload Linux Zip to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
- name: Upload Linux checksum to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-linux-sha256-${{ env._PACKAGE_VERSION }}.txt
path: ./dist-cli/bwdc-linux-sha256-${{ env._PACKAGE_VERSION }}.txt
if-no-files-found: error
macos-cli:
name: Build Mac CLI
@@ -138,10 +145,10 @@ jobs:
_PKG_FETCH_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -181,6 +188,11 @@ jobs:
- name: Zip
run: zip -j dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip dist-cli/macos/bwdc keytar/macos/build/Release/keytar.node
- name: Create checksums
run: |
shasum -a 256 dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip | \
cut -d " " -f 1 > dist-cli/bwdc-macos-sha256-$_PACKAGE_VERSION.txt
- name: Version Test
run: |
mkdir -p test/macos
@@ -197,12 +209,18 @@ jobs:
fi
- name: Upload Mac Zip to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
- name: Upload Mac checksum to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-macos-sha256-${{ env._PACKAGE_VERSION }}.txt
path: ./dist-cli/bwdc-macos-sha256-${{ env._PACKAGE_VERSION }}.txt
if-no-files-found: error
windows-cli:
name: Build Windows CLI
@@ -214,7 +232,7 @@ jobs:
_WIN_PKG_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Setup Windows builder
run: |
@@ -222,7 +240,7 @@ jobs:
choco install reshack --no-progress
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -329,13 +347,25 @@ jobs:
Throw "Version test failed."
}
- name: Create checksums
run: |
checksum -f="./dist-cli/bwdc-windows-${env:_PACKAGE_VERSION}.zip" `
-t sha256 | Out-File ./dist-cli/bwdc-windows-sha256-${env:_PACKAGE_VERSION}.txt
- name: Upload Windows Zip to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
- name: Upload Windows checksum to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-windows-sha256-${{ env._PACKAGE_VERSION }}.txt
path: ./dist-cli/bwdc-windows-sha256-${{ env._PACKAGE_VERSION }}.txt
if-no-files-found: error
windows-gui:
name: Build Windows GUI
@@ -347,10 +377,10 @@ jobs:
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -372,55 +402,39 @@ jobs:
- name: Install Node dependencies
run: npm install
- name: Login to Azure
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Retrieve secrets
id: retrieve-secrets
uses: bitwarden/gh-actions/get-keyvault-secrets@main
with:
keyvault: "bitwarden-ci"
secrets: "code-signing-vault-url,
code-signing-client-id,
code-signing-tenant-id,
code-signing-client-secret,
code-signing-cert-name"
- name: Build & Sign
run: npm run dist:win
env:
ELECTRON_BUILDER_SIGN: 1
SIGNING_VAULT_URL: ${{ steps.retrieve-secrets.outputs.code-signing-vault-url }}
SIGNING_CLIENT_ID: ${{ steps.retrieve-secrets.outputs.code-signing-client-id }}
SIGNING_TENANT_ID: ${{ steps.retrieve-secrets.outputs.code-signing-tenant-id }}
SIGNING_CLIENT_SECRET: ${{ steps.retrieve-secrets.outputs.code-signing-client-secret }}
SIGNING_CERT_NAME: ${{ steps.retrieve-secrets.outputs.code-signing-cert-name }}
SIGNING_VAULT_URL: ${{ secrets.SIGNING_VAULT_URL }}
SIGNING_CLIENT_ID: ${{ secrets.SIGNING_CLIENT_ID }}
SIGNING_TENANT_ID: ${{ secrets.SIGNING_TENANT_ID }}
SIGNING_CLIENT_SECRET: ${{ secrets.SIGNING_CLIENT_SECRET }}
SIGNING_CERT_NAME: ${{ secrets.SIGNING_CERT_NAME }}
- name: Upload Portable Executable to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
path: ./dist/Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
if-no-files-found: error
- name: Upload Installer Executable to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
if-no-files-found: error
- name: Upload Installer Executable Blockmap to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: latest.yml
path: ./dist/latest.yml
@@ -437,10 +451,10 @@ jobs:
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -467,14 +481,14 @@ jobs:
run: npm run dist:lin
- name: Upload AppImage
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: latest-linux.yml
path: ./dist/latest-linux.yml
@@ -491,10 +505,10 @@ jobs:
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -561,7 +575,7 @@ jobs:
- name: Install Node dependencies
run: npm install
- name: Set up private auth key
run: |
mkdir ~/private_keys
@@ -578,28 +592,28 @@ jobs:
CSC_FOR_PULL_REQUEST: true
- name: Upload .zip artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
if-no-files-found: error
- name: Upload .dmg artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
if-no-files-found: error
- name: Upload .dmg Blockmap artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: latest-mac.yml
path: ./dist/latest-mac.yml
@@ -620,11 +634,7 @@ jobs:
- macos-gui
steps:
- name: Check if any job failed
if: |
(github.ref == 'refs/heads/main'
|| github.ref == 'refs/heads/rc'
|| github.ref == 'refs/heads/hotfix-rc')
&& contains(needs.*.result, 'failure')
if: github.ref == 'refs/heads/main' && contains(needs.*.result, 'failure')
run: exit 1
- name: Login to Azure - CI subscription

View File

@@ -3,9 +3,6 @@ name: Enforce PR labels
on:
pull_request:
types: [labeled, unlabeled, opened, edited, synchronize]
permissions:
contents: read
pull-requests: read
jobs:
enforce-label:
name: EnforceLabel

View File

@@ -8,21 +8,39 @@ on:
paths:
- ".github/workflows/integration-test.yml" # this file
- "src/services/ldap-directory.service*" # we only have integration for LDAP testing at the moment
- "./openldap/**/*" # any change to test fixtures
- "./openldap*" # any change to test fixtures
- "./docker-compose.yml" # any change to Docker configuration
pull_request:
paths:
- ".github/workflows/integration-test.yml" # this file
- "src/services/ldap-directory.service*" # we only have integration for LDAP testing at the moment
- "./openldap/**/*" # any change to test fixtures
- "./openldap*" # any change to test fixtures
- "./docker-compose.yml" # any change to Docker configuration
jobs:
check-test-secrets:
name: Check for test secrets
runs-on: ubuntu-22.04
outputs:
available: ${{ steps.check-test-secrets.outputs.available }}
permissions:
contents: read
steps:
- name: Check
id: check-test-secrets
run: |
if [ "${{ secrets.CODECOV_TOKEN }}" != '' ]; then
echo "available=true" >> $GITHUB_OUTPUT;
else
echo "available=false" >> $GITHUB_OUTPUT;
fi
testing:
name: Run tests
if: ${{ startsWith(github.head_ref, 'version_bump_') == false }}
runs-on: ubuntu-22.04
needs: check-test-secrets
permissions:
checks: write
contents: read
@@ -30,7 +48,7 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Get Node version
id: retrieve-node-version
@@ -40,7 +58,7 @@ jobs:
echo "node_version=$NODE_VERSION" >> $GITHUB_OUTPUT
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -62,7 +80,7 @@ jobs:
- name: Report test results
uses: dorny/test-reporter@31a54ee7ebcacc03a09ea97a7e5465a47b84aea5 # v1.9.1
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !cancelled() }}
if: ${{ needs.check-test-secrets.outputs.available == 'true' && !cancelled() }}
with:
name: Test Results
path: "junit.xml"
@@ -70,7 +88,13 @@ jobs:
fail-on-error: true
- name: Upload coverage to codecov.io
uses: codecov/codecov-action@5a605bd92782ce0810fa3b8acc235c921b497052 # v5.2.0
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Upload results to codecov.io
uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 # v1.0.2
uses: codecov/test-results-action@1b5b448b98e58ba90d1a1a1d9fcb72ca2263be46 # v1.0.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -13,25 +13,22 @@ on:
- Redeploy
- Dry Run
permissions:
contents: read
jobs:
setup:
name: Setup
runs-on: ubuntu-24.04
outputs:
release_version: ${{ steps.version.outputs.version }}
release-version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Branch check
if: ${{ inputs.release_type != 'Dry Run' }}
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
run: |
if [[ "$GITHUB_REF" != "refs/heads/rc" ]] && [[ "$GITHUB_REF" != "refs/heads/hotfix-rc" ]]; then
if [[ "$GITHUB_REF" != "refs/heads/main" ]]; then
echo "==================================="
echo "[!] Can only release from the 'rc' or 'hotfix-rc' branches"
echo "[!] Can only release from the 'main' branch"
echo "==================================="
exit 1
fi
@@ -40,7 +37,7 @@ jobs:
id: version
uses: bitwarden/gh-actions/release-version-check@main
with:
release-type: ${{ inputs.release_type }}
release-type: ${{ github.event.inputs.release_type }}
project-type: ts
file: package.json
@@ -50,7 +47,7 @@ jobs:
needs: setup
steps:
- name: Download all artifacts
if: ${{ inputs.release_type != 'Dry Run' }}
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
uses: bitwarden/gh-actions/download-artifacts@main
with:
workflow: build.yml
@@ -58,7 +55,7 @@ jobs:
branch: ${{ github.ref_name }}
- name: Dry Run - Download all artifacts
if: ${{ inputs.release_type == 'Dry Run' }}
if: ${{ github.event.inputs.release_type == 'Dry Run' }}
uses: bitwarden/gh-actions/download-artifacts@main
with:
workflow: build.yml
@@ -66,14 +63,17 @@ jobs:
branch: main
- name: Create release
if: ${{ inputs.release_type != 'Dry Run' }}
uses: ncipollo/release-action@cdcc88a9acf3ca41c16c37bb7d21b9ad48560d87 # v1.15.0
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
uses: ncipollo/release-action@2c591bcc8ecdcd2db72b97d6147f871fcd833ba5 # v1.14.0
env:
PKG_VERSION: ${{ needs.setup.outputs.release_version }}
PKG_VERSION: ${{ needs.setup.outputs.release-version }}
with:
artifacts: "./bwdc-windows-${{ env.PKG_VERSION }}.zip,
./bwdc-macos-${{ env.PKG_VERSION }}.zip,
./bwdc-linux-${{ env.PKG_VERSION }}.zip,
./bwdc-windows-sha256-${{ env.PKG_VERSION }}.txt,
./bwdc-macos-sha256-${{ env.PKG_VERSION }}.txt,
./bwdc-linux-sha256-${{ env.PKG_VERSION }}.txt,
./Bitwarden-Connector-Portable-${{ env.PKG_VERSION }}.exe,
./Bitwarden-Connector-Installer-${{ env.PKG_VERSION }}.exe,
./Bitwarden-Connector-Installer-${{ env.PKG_VERSION }}.exe.blockmap,

View File

@@ -5,23 +5,13 @@ on:
push:
branches:
- "main"
pull_request:
types: [opened, synchronize, reopened]
branches-ignore:
- main
pull_request_target:
types: [opened, synchronize, reopened]
branches:
- "main"
permissions: {}
types: [opened, synchronize]
jobs:
check-run:
name: Check PR run
uses: bitwarden/gh-actions/.github/workflows/check-run.yml@main
permissions:
contents: read
sast:
name: SAST scan
@@ -34,12 +24,12 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Scan with Checkmarx
uses: checkmarx/ast-github-action@184bf2f64f55d1c93fd6636d539edf274703e434 # 2.0.41
uses: checkmarx/ast-github-action@ed196cdaec9cd1bc5aacac4ca2010dd773b20893 # 2.0.35
env:
INCREMENTAL: "${{ contains(github.event_name, 'pull_request') && '--sast-incremental' || '' }}"
with:
@@ -54,11 +44,9 @@ jobs:
--output-path . ${{ env.INCREMENTAL }}
- name: Upload Checkmarx results to GitHub
uses: github/codeql-action/upload-sarif@dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7 # v3.28.3
uses: github/codeql-action/upload-sarif@e2b3eafc8d227b0241d48be5f425d47c2d750a13 # v3.26.10
with:
sarif_file: cx_result.sarif
sha: ${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.sha || github.sha }}
ref: ${{ contains(github.event_name, 'pull_request') && format('refs/pull/{0}/head', github.event.pull_request.number) || github.ref }}
quality:
name: Quality scan
@@ -70,15 +58,16 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Scan with SonarCloud
uses: sonarsource/sonarqube-scan-action@2500896589ef8f7247069a56136f8dc177c27ccf # v5.2.0
uses: sonarsource/sonarcloud-github-action@eb211723266fe8e83102bac7361f0a05c3ac1d1b # v3.0.0
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
args: >
-Dsonar.organization=${{ github.repository_owner }}
@@ -87,4 +76,3 @@ jobs:
-Dsonar.sources=.
-Dsonar.test.inclusions=**/*.spec.ts
-Dsonar.exclusions=**/*.spec.ts
-Dsonar.pullrequest.key=${{ github.event.pull_request.number }}

View File

@@ -5,16 +5,32 @@ on:
push:
branches:
- "main"
- "rc"
- "hotfix-rc"
pull_request:
jobs:
check-test-secrets:
name: Check for test secrets
runs-on: ubuntu-24.04
outputs:
available: ${{ steps.check-test-secrets.outputs.available }}
permissions:
contents: read
steps:
- name: Check
id: check-test-secrets
run: |
if [ "${{ secrets.CODECOV_TOKEN }}" != '' ]; then
echo "available=true" >> $GITHUB_OUTPUT;
else
echo "available=false" >> $GITHUB_OUTPUT;
fi
testing:
name: Run tests
if: ${{ startsWith(github.head_ref, 'version_bump_') == false }}
runs-on: ubuntu-24.04
needs: check-test-secrets
permissions:
checks: write
contents: read
@@ -22,7 +38,7 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Get Node version
id: retrieve-node-version
@@ -32,7 +48,7 @@ jobs:
echo "node_version=$NODE_VERSION" >> $GITHUB_OUTPUT
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -52,7 +68,7 @@ jobs:
- name: Report test results
uses: dorny/test-reporter@31a54ee7ebcacc03a09ea97a7e5465a47b84aea5 # v1.9.1
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !cancelled() }}
if: ${{ needs.check-test-secrets.outputs.available == 'true' && !cancelled() }}
with:
name: Test Results
path: "junit.xml"
@@ -60,7 +76,13 @@ jobs:
fail-on-error: true
- name: Upload coverage to codecov.io
uses: codecov/codecov-action@5a605bd92782ce0810fa3b8acc235c921b497052 # v5.2.0
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4.6.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Upload results to codecov.io
uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 # v1.0.2
uses: codecov/test-results-action@1b5b448b98e58ba90d1a1a1d9fcb72ca2263be46 # v1.0.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -20,14 +20,14 @@ jobs:
version: ${{ inputs.version_number_override }}
- name: Generate GH App token
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1.11.0
id: app-token
with:
app-id: ${{ secrets.BW_GHAPP_ID }}
private-key: ${{ secrets.BW_GHAPP_KEY }}
- name: Checkout Branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
token: ${{ steps.app-token.outputs.token }}

View File

@@ -2,9 +2,9 @@ import { ApiTokenRequest } from "../models/request/identityToken/apiTokenRequest
import { PasswordTokenRequest } from "../models/request/identityToken/passwordTokenRequest";
import { SsoTokenRequest } from "../models/request/identityToken/ssoTokenRequest";
import { OrganizationImportRequest } from "../models/request/organizationImportRequest";
import { IdentityCaptchaResponse } from "../models/response/identityCaptchaResponse";
import { IdentityTokenResponse } from "../models/response/identityTokenResponse";
import { IdentityTwoFactorResponse } from "../models/response/identityTwoFactorResponse";
import { IdentityCaptchaResponse } from '../models/response/identityCaptchaResponse';
import { IdentityTokenResponse } from '../models/response/identityTokenResponse';
import { IdentityTwoFactorResponse } from '../models/response/identityTwoFactorResponse';
export abstract class ApiService {
postIdentityToken: (

View File

@@ -8,12 +8,16 @@ export class OrganizationImportRequest {
overwriteExisting = false;
largeImport = false;
constructor(model: {
groups: Required<OrganizationImportGroupRequest>[];
users: Required<OrganizationImportMemberRequest>[];
overwriteExisting: boolean;
largeImport: boolean;
}) {
constructor(
model:
| {
groups: Required<OrganizationImportGroupRequest>[];
users: Required<OrganizationImportMemberRequest>[];
overwriteExisting: boolean;
largeImport: boolean;
}
| ImportDirectoryRequest,
) {
if (model instanceof ImportDirectoryRequest) {
this.groups = model.groups.map((g) => new OrganizationImportGroupRequest(g));
this.members = model.users.map((u) => new OrganizationImportMemberRequest(u));

View File

@@ -60,8 +60,9 @@ export class TrayMain {
}
setupWindowListeners(win: BrowserWindow) {
win.on("minimize", async () => {
win.on("minimize", async (e: Event) => {
if (await this.stateService.getEnableMinimizeToTray()) {
e.preventDefault();
this.hideToTray();
}
});

File diff suppressed because it is too large Load Diff

View File

@@ -6,5 +6,5 @@ fi
mkcert -install
mkdir -p ./openldap/certs
cp "$(mkcert -CAROOT)/rootCA.pem" ./openldap/certs/rootCA.pem
cp $(mkcert -CAROOT)/rootCA.pem ./openldap/certs/rootCA.pem
mkcert -key-file ./openldap/certs/openldap-key.pem -cert-file ./openldap/certs/openldap.pem localhost openldap

8403
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
"name": "@bitwarden/directory-connector",
"productName": "Bitwarden Directory Connector",
"description": "Sync your user directory to your Bitwarden organization.",
"version": "2025.6.0",
"version": "2024.10.0",
"keywords": [
"bitwarden",
"password",
@@ -73,108 +73,107 @@
"test:types": "npx tsc --noEmit"
},
"devDependencies": {
"@angular-devkit/build-angular": "17.3.17",
"@angular-devkit/build-angular": "19.0.3",
"@angular-eslint/eslint-plugin-template": "17.5.3",
"@angular-eslint/template-parser": "17.5.3",
"@angular/compiler-cli": "17.3.12",
"@angular/compiler-cli": "19.0.3",
"@electron/notarize": "2.5.0",
"@electron/rebuild": "3.7.2",
"@electron/rebuild": "3.7.1",
"@fluffy-spoon/substitute": "1.208.0",
"@microsoft/microsoft-graph-types": "2.40.0",
"@ngtools/webpack": "17.3.17",
"@ngtools/webpack": "19.0.0",
"@types/inquirer": "8.2.10",
"@types/jest": "29.5.14",
"@types/lowdb": "1.0.15",
"@types/node": "22.13.1",
"@types/node": "20.17.9",
"@types/node-fetch": "2.6.12",
"@types/node-forge": "1.3.11",
"@types/proper-lockfile": "4.1.4",
"@types/tldjs": "2.3.4",
"@typescript-eslint/eslint-plugin": "8.32.1",
"@typescript-eslint/parser": "8.32.1",
"@typescript-eslint/eslint-plugin": "5.62.0",
"@typescript-eslint/parser": "5.62.0",
"clean-webpack-plugin": "4.0.0",
"concurrently": "9.1.2",
"concurrently": "9.1.0",
"copy-webpack-plugin": "12.0.2",
"cross-env": "7.0.3",
"css-loader": "7.1.2",
"dotenv": "16.5.0",
"electron": "34.1.1",
"dotenv": "16.4.5",
"electron": "28.3.3",
"electron-builder": "24.13.3",
"electron-log": "5.4.1",
"electron-log": "5.2.4",
"electron-reload": "2.0.0-alpha.1",
"electron-store": "8.2.0",
"electron-updater": "6.6.2",
"electron-updater": "6.3.9",
"eslint": "8.57.1",
"eslint-config-prettier": "10.1.5",
"eslint-import-resolver-typescript": "3.7.0",
"eslint-config-prettier": "9.1.0",
"eslint-import-resolver-typescript": "3.6.3",
"eslint-plugin-import": "2.31.0",
"eslint-plugin-rxjs": "5.0.3",
"eslint-plugin-rxjs-angular": "2.0.1",
"form-data": "4.0.3",
"form-data": "4.0.1",
"html-loader": "5.1.0",
"html-webpack-plugin": "5.6.3",
"husky": "9.1.7",
"jest": "29.7.0",
"jest-junit": "16.0.0",
"jest-mock-extended": "3.0.7",
"jest-preset-angular": "14.5.5",
"lint-staged": "15.5.2",
"jest-preset-angular": "14.4.2",
"lint-staged": "15.2.10",
"mini-css-extract-plugin": "2.9.2",
"node-abi": "3.75.0",
"node-forge": "1.3.1",
"node-loader": "2.1.0",
"pkg": "5.8.1",
"prettier": "3.5.3",
"rimraf": "6.0.1",
"rxjs": "7.8.2",
"prettier": "3.3.3",
"rimraf": "5.0.10",
"rxjs": "7.8.1",
"sass": "1.79.4",
"sass-loader": "16.0.5",
"ts-jest": "29.4.0",
"ts-loader": "9.5.2",
"sass-loader": "16.0.4",
"ts-jest": "29.2.5",
"ts-loader": "9.5.1",
"tsconfig-paths-webpack-plugin": "4.2.0",
"type-fest": "4.41.0",
"typescript": "5.4.5",
"webpack": "5.97.1",
"webpack-cli": "6.0.1",
"type-fest": "4.30.0",
"typescript": "5.5.4",
"typescript-transform-paths": "3.5.2",
"webpack": "5.95.0",
"webpack-cli": "5.1.4",
"webpack-merge": "6.0.1",
"webpack-node-externals": "3.0.0",
"zone.js": "0.14.10"
"zone.js": "0.15.0",
"webpack-node-externals": "3.0.0"
},
"dependencies": {
"@angular/animations": "17.3.12",
"@angular/cdk": "17.3.10",
"@angular/common": "17.3.12",
"@angular/compiler": "17.3.12",
"@angular/core": "17.3.12",
"@angular/forms": "17.3.12",
"@angular/platform-browser": "17.3.12",
"@angular/platform-browser-dynamic": "17.3.12",
"@angular/router": "17.3.12",
"@angular/animations": "19.0.3",
"@angular/cdk": "19.0.2",
"@angular/common": "19.0.3",
"@angular/compiler": "19.0.3",
"@angular/core": "19.0.3",
"@angular/forms": "19.0.3",
"@angular/platform-browser": "19.0.3",
"@angular/platform-browser-dynamic": "19.0.3",
"@angular/router": "19.0.3",
"@microsoft/microsoft-graph-client": "3.0.7",
"big-integer": "1.6.52",
"bootstrap": "5.3.3",
"browser-hrtime": "1.1.8",
"chalk": "4.1.2",
"commander": "13.1.0",
"core-js": "3.42.0",
"form-data": "4.0.3",
"google-auth-library": "9.15.1",
"googleapis": "144.0.0",
"https-proxy-agent": "7.0.6",
"commander": "12.1.0",
"core-js": "3.38.1",
"form-data": "4.0.1",
"google-auth-library": "7.14.1",
"googleapis": "73.0.0",
"https-proxy-agent": "7.0.5",
"inquirer": "8.2.6",
"keytar": "7.9.0",
"ldapts": "7.4.0",
"ldapts": "7.2.1",
"lowdb": "1.0.0",
"ngx-toastr": "19.0.0",
"ngx-toastr": "17.0.2",
"node-fetch": "2.7.0",
"proper-lockfile": "4.1.2",
"rxjs": "7.8.2",
"rxjs": "7.8.1",
"tldjs": "2.3.1",
"zone.js": "0.14.10",
"parse5": "7.2.1"
"zone.js": "0.15.0"
},
"engines": {
"node": "~22.13.0",
"node": "~20.18.0",
"npm": "~10"
},
"lint-staged": {

View File

@@ -1,7 +1,7 @@
{
"name": "@bitwarden/directory-connector",
"version": "2.9.5",
"lockfileVersion": 3,
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {

View File

@@ -1,6 +0,0 @@
import { DirectoryType } from "@/src/enums/directoryType";
import { IDirectoryService } from "@/src/services/directory.service";
export abstract class DirectoryFactoryService {
abstract createService(type: DirectoryType): IDirectoryService;
}

View File

@@ -1,17 +0,0 @@
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { GroupEntry } from "@/src/models/groupEntry";
import { UserEntry } from "@/src/models/userEntry";
export interface RequestBuilderOptions {
removeDisabled: boolean;
overwriteExisting: boolean;
}
export abstract class RequestBuilder {
buildRequest: (
groups: GroupEntry[],
users: UserEntry[],
options: RequestBuilderOptions,
) => OrganizationImportRequest[];
}

View File

@@ -25,11 +25,6 @@ import { ElectronRendererStorageService } from "@/jslib/electron/src/services/el
import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service";
import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service";
import { DirectoryFactoryService } from "@/src/abstractions/directory-factory.service";
import { BatchRequestBuilder } from "@/src/services/batch-request-builder";
import { DefaultDirectoryFactoryService } from "@/src/services/directory-factory.service";
import { SingleRequestBuilder } from "@/src/services/single-request-builder";
import { AuthService as AuthServiceAbstraction } from "../../abstractions/auth.service";
import { StateService as StateServiceAbstraction } from "../../abstractions/state.service";
import { Account } from "../../models/account";
@@ -173,15 +168,13 @@ export function initFactory(
provide: SyncService,
useClass: SyncService,
deps: [
LogServiceAbstraction,
CryptoFunctionServiceAbstraction,
ApiServiceAbstraction,
MessagingServiceAbstraction,
I18nServiceAbstraction,
EnvironmentServiceAbstraction,
StateServiceAbstraction,
BatchRequestBuilder,
SingleRequestBuilder,
DirectoryFactoryService,
],
}),
safeProvider(AuthGuardService),
@@ -222,19 +215,6 @@ export function initFactory(
StateMigrationServiceAbstraction,
],
}),
safeProvider({
provide: SingleRequestBuilder,
deps: [],
}),
safeProvider({
provide: BatchRequestBuilder,
deps: [],
}),
safeProvider({
provide: DirectoryFactoryService,
useClass: DefaultDirectoryFactoryService,
deps: [LogServiceAbstraction, I18nServiceAbstraction, StateServiceAbstraction],
}),
] satisfies SafeProvider[],
})
export class ServicesModule {}

View File

@@ -22,15 +22,18 @@
class="btn btn-primary"
[disabled]="startForm.loading"
>
<i class="bwi bwi-play bwi-fw" [hidden]="startForm.loading"></i>
<i class="bwi bwi-spinner bwi-fw bwi-spin" [hidden]="!startForm.loading"></i>
{{ "startSync" | i18n }}
</button>
</form>
<button type="button" (click)="stop()" class="btn btn-danger text-white">
<button type="button" (click)="stop()" class="btn btn-primary">
<i class="bwi bwi-stop bwi-fw"></i>
{{ "stopSync" | i18n }}
</button>
<form #syncForm [appApiAction]="syncPromise" class="d-inline">
<button type="button" (click)="sync()" class="btn btn-primary" [disabled]="syncForm.loading">
<i class="bwi bwi-refresh bwi-fw" [ngClass]="{ 'bwi-spin': syncForm.loading }"></i>
{{ "syncNow" | i18n }}
</button>
</form>
@@ -48,6 +51,7 @@
[disabled]="simForm.loading"
>
<i class="bwi bwi-spinner bwi-fw bwi-spin" [hidden]="!simForm.loading"></i>
<i class="bwi bwi-bug bwi-fw" [hidden]="simForm.loading"></i>
{{ "testNow" | i18n }}
</button>
</form>

View File

@@ -614,7 +614,7 @@
{{ "ex" | i18n }} exclude:joe&#64;company.com | profile.firstName eq "John"
</div>
<div class="form-text" *ngIf="directory === directoryType.GSuite">
{{ "ex" | i18n }} exclude:joe&#64;company.com | orgUnitPath=/Engineering
{{ "ex" | i18n }} exclude:joe&#64;company.com | orgName=Engineering
</div>
</div>
<div class="mb-3" [hidden]="directory != directoryType.Ldap">

View File

@@ -2,16 +2,19 @@
<ul class="nav nav-tabs mb-3">
<li class="nav-item">
<a class="nav-link" routerLink="dashboard" routerLinkActive="active">
<i class="bwi bwi-dashboard"></i>
{{ "dashboard" | i18n }}
</a>
</li>
<li class="nav-item">
<a class="nav-link" routerLink="settings" routerLinkActive="active">
<i class="bwi bwi-cogs"></i>
{{ "settings" | i18n }}
</a>
</li>
<li class="nav-item">
<a class="nav-link" routerLink="more" routerLinkActive="active">
<i class="bwi bwi-sliders"></i>
{{ "more" | i18n }}
</a>
</li>

View File

@@ -17,16 +17,12 @@ import { ConsoleLogService } from "@/jslib/node/src/cli/services/consoleLog.serv
import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service";
import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service";
import { DirectoryFactoryService } from "./abstractions/directory-factory.service";
import { Account } from "./models/account";
import { Program } from "./program";
import { AuthService } from "./services/auth.service";
import { BatchRequestBuilder } from "./services/batch-request-builder";
import { DefaultDirectoryFactoryService } from "./services/directory-factory.service";
import { I18nService } from "./services/i18n.service";
import { KeytarSecureStorageService } from "./services/keytarSecureStorage.service";
import { LowdbStorageService } from "./services/lowdbStorage.service";
import { SingleRequestBuilder } from "./services/single-request-builder";
import { StateService } from "./services/state.service";
import { StateMigrationService } from "./services/stateMigration.service";
import { SyncService } from "./services/sync.service";
@@ -55,9 +51,6 @@ export class Main {
syncService: SyncService;
stateService: StateService;
stateMigrationService: StateMigrationService;
directoryFactoryService: DirectoryFactoryService;
batchRequestBuilder: BatchRequestBuilder;
singleRequestBuilder: SingleRequestBuilder;
constructor() {
const applicationName = "Bitwarden Directory Connector";
@@ -153,25 +146,14 @@ export class Main {
this.stateService,
);
this.directoryFactoryService = new DefaultDirectoryFactoryService(
this.logService,
this.i18nService,
this.stateService,
);
this.batchRequestBuilder = new BatchRequestBuilder();
this.singleRequestBuilder = new SingleRequestBuilder();
this.syncService = new SyncService(
this.logService,
this.cryptoFunctionService,
this.apiService,
this.messagingService,
this.i18nService,
this.environmentService,
this.stateService,
this.batchRequestBuilder,
this.singleRequestBuilder,
this.directoryFactoryService,
);
this.program = new Program(this);

View File

@@ -18,9 +18,7 @@ import { BaseDirectoryService } from "./baseDirectory.service";
import { IDirectoryService } from "./directory.service";
const AzurePublicIdentityAuhtority = "login.microsoftonline.com";
const AzurePublicGraphEndpoint = "https://graph.microsoft.com";
const AzureGovermentIdentityAuhtority = "login.microsoftonline.us";
const AzureGovernmentGraphEndpoint = "https://graph.microsoft.us";
const NextLink = "@odata.nextLink";
const DeltaLink = "@odata.deltaLink";
@@ -209,7 +207,7 @@ export class AzureDirectoryService extends BaseDirectoryService implements IDire
if (keyword === "excludeadministrativeunit" || keyword === "includeadministrativeunit") {
for (const p of pieces) {
let auMembers = await this.client
.api(`${this.getGraphApiEndpoint()}/v1.0/directory/administrativeUnits/${p}/members`)
.api(`https://graph.microsoft.com/v1.0/directory/administrativeUnits/${p}/members`)
.get();
// eslint-disable-next-line
while (true) {
@@ -480,7 +478,7 @@ export class AzureDirectoryService extends BaseDirectoryService implements IDire
client_id: this.dirConfig.applicationId,
client_secret: this.dirConfig.key,
grant_type: "client_credentials",
scope: `${this.getGraphApiEndpoint()}/.default`,
scope: "https://graph.microsoft.com/.default",
});
const req = https
@@ -544,10 +542,4 @@ export class AzureDirectoryService extends BaseDirectoryService implements IDire
exp.setSeconds(exp.getSeconds() + expSeconds);
this.accessTokenExpiration = exp;
}
private getGraphApiEndpoint(): string {
return this.dirConfig.identityAuthority === AzureGovermentIdentityAuhtority
? AzureGovernmentGraphEndpoint
: AzurePublicGraphEndpoint;
}
}

View File

@@ -1,75 +0,0 @@
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { GroupEntry } from "@/src/models/groupEntry";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilder, RequestBuilderOptions } from "../abstractions/request-builder.service";
import { batchSize } from "./sync.service";
/**
* This class is responsible for batching large sync requests (>2k users) into multiple smaller
* requests to the /import endpoint. This is done to ensure we are under the default
* maximum packet size for NGINX web servers to avoid the request potentially timing out
* */
export class BatchRequestBuilder implements RequestBuilder {
buildRequest(
groups: GroupEntry[],
users: UserEntry[],
options: RequestBuilderOptions,
): OrganizationImportRequest[] {
if (options.overwriteExisting) {
throw new Error(
"You cannot use the 'Remove and re-add organization users during the next sync' option with large imports.",
);
}
const requests: OrganizationImportRequest[] = [];
if (users?.length > 0) {
const usersRequest = users.map((u) => {
return {
email: u.email,
externalId: u.externalId,
deleted: u.deleted || (options.removeDisabled && u.disabled),
};
});
// Partition users
for (let i = 0; i < usersRequest.length; i += batchSize) {
const u = usersRequest.slice(i, i + batchSize);
const req = new OrganizationImportRequest({
groups: [],
users: u,
largeImport: true,
overwriteExisting: false,
});
requests.push(req);
}
}
if (groups?.length > 0) {
const groupRequest = groups.map((g) => {
return {
name: g.name,
externalId: g.externalId,
memberExternalIds: Array.from(g.userMemberExternalIds),
};
});
// Partition groups
for (let i = 0; i < groupRequest.length; i += batchSize) {
const g = groupRequest.slice(i, i + batchSize);
const req = new OrganizationImportRequest({
groups: g,
users: [],
largeImport: true,
overwriteExisting: false,
});
requests.push(req);
}
}
return requests;
}
}

View File

@@ -1,75 +0,0 @@
import { GetUniqueString } from "@/jslib/common/spec/utils";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilderOptions } from "../abstractions/request-builder.service";
import { groupSimulator, userSimulator } from "../utils/request-builder-helper";
import { BatchRequestBuilder } from "./batch-request-builder";
describe("BatchRequestBuilder", () => {
let batchRequestBuilder: BatchRequestBuilder;
beforeEach(async () => {
batchRequestBuilder = new BatchRequestBuilder();
});
const defaultOptions: RequestBuilderOptions = Object.freeze({
overwriteExisting: false,
removeDisabled: false,
});
it("BatchRequestBuilder batches requests for > 2000 users", () => {
const mockGroups = groupSimulator(11000);
const mockUsers = userSimulator(11000);
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, defaultOptions);
expect(requests.length).toEqual(12);
});
it("BatchRequestBuilder throws error when overwriteExisting is true", () => {
const mockGroups = groupSimulator(11000);
const mockUsers = userSimulator(11000);
const options = { ...defaultOptions, overwriteExisting: true };
const r = () => batchRequestBuilder.buildRequest(mockGroups, mockUsers, options);
expect(r).toThrow(
"You cannot use the 'Remove and re-add organization users during the next sync' option with large imports.",
);
});
it("BatchRequestBuilder returns requests with deleted users when removeDisabled is true", () => {
const mockGroups = groupSimulator(11000);
const mockUsers = userSimulator(11000);
const disabledUser1 = new UserEntry();
const disabledUserEmail1 = GetUniqueString() + "@email.com";
const disabledUser2 = new UserEntry();
const disabledUserEmail2 = GetUniqueString() + "@email.com";
disabledUser1.disabled = true;
disabledUser1.email = disabledUserEmail1;
disabledUser2.disabled = true;
disabledUser2.email = disabledUserEmail2;
mockUsers[0] = disabledUser1;
mockUsers.push(disabledUser2);
const options = { ...defaultOptions, removeDisabled: true };
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, options);
expect(requests[0].members).toContainEqual({ email: disabledUserEmail1, deleted: true });
expect(requests[1].members.find((m) => m.deleted)).toBeUndefined();
expect(requests[3].members.find((m) => m.deleted)).toBeUndefined();
expect(requests[4].members.find((m) => m.deleted)).toBeUndefined();
expect(requests[5].members).toContainEqual({ email: disabledUserEmail2, deleted: true });
});
it("BatchRequestBuilder retuns an empty array when there are no users or groups", () => {
const requests = batchRequestBuilder.buildRequest([], [], defaultOptions);
expect(requests).toEqual([]);
});
});

View File

@@ -1,37 +0,0 @@
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
import { LogService } from "@/jslib/common/src/abstractions/log.service";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { StateService } from "../abstractions/state.service";
import { DirectoryType } from "../enums/directoryType";
import { AzureDirectoryService } from "./azure-directory.service";
import { GSuiteDirectoryService } from "./gsuite-directory.service";
import { LdapDirectoryService } from "./ldap-directory.service";
import { OktaDirectoryService } from "./okta-directory.service";
import { OneLoginDirectoryService } from "./onelogin-directory.service";
export class DefaultDirectoryFactoryService implements DirectoryFactoryService {
constructor(
private logService: LogService,
private i18nService: I18nService,
private stateService: StateService,
) {}
createService(directoryType: DirectoryType) {
switch (directoryType) {
case DirectoryType.GSuite:
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.AzureActiveDirectory:
return new AzureDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Ldap:
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Okta:
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.OneLogin:
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService);
default:
throw new Error("Invalid Directory Type");
}
}
}

View File

@@ -141,7 +141,7 @@ export class GSuiteDirectoryService extends BaseDirectoryService implements IDir
entry.referenceId = user.id;
entry.externalId = user.id;
entry.email = user.primaryEmail != null ? user.primaryEmail.trim().toLowerCase() : null;
entry.disabled = user.suspended || user.archived || false;
entry.disabled = user.suspended || false;
entry.deleted = deleted;
return entry;
}

View File

@@ -5,7 +5,8 @@ import { LogService } from "../../jslib/common/src/abstractions/log.service";
import { groupFixtures } from "../../openldap/group-fixtures";
import { userFixtures } from "../../openldap/user-fixtures";
import { DirectoryType } from "../enums/directoryType";
import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures";
import { LdapConfiguration } from "../models/ldapConfiguration";
import { SyncConfiguration } from "../models/syncConfiguration";
import { LdapDirectoryService } from "./ldap-directory.service";
import { StateService } from "./state.service";
@@ -153,3 +154,54 @@ describe("ldapDirectoryService", () => {
});
});
});
/**
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration.
*/
const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({
ssl: false,
startTls: false,
tlsCaPath: null,
sslAllowUnauthorized: false,
sslCertPath: null,
sslKeyPath: null,
sslCaPath: null,
hostname: "localhost",
port: 1389,
domain: null,
rootPath: "dc=bitwarden,dc=com",
currentUser: false,
username: "cn=admin,dc=bitwarden,dc=com",
password: "admin",
ad: false,
pagedSearch: false,
...(config ?? {}),
});
/**
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration.
*/
const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({
users: false,
groups: false,
interval: 5,
userFilter: null,
groupFilter: null,
removeDisabled: false,
overwriteExisting: false,
largeImport: false,
// Ldap properties
groupObjectClass: "posixGroup",
userObjectClass: "person",
groupPath: null,
userPath: null,
groupNameAttribute: "cn",
userEmailAttribute: "mail",
memberAttribute: "memberUid",
useEmailPrefixSuffix: false,
emailPrefixAttribute: "sAMAccountName",
emailSuffix: null,
creationDateAttribute: "whenCreated",
revisionDateAttribute: "whenChanged",
...(config ?? {}),
});

View File

@@ -18,11 +18,6 @@ import { IDirectoryService } from "./directory.service";
const UserControlAccountDisabled = 2;
/**
* The attribute name for the unique identifier used by Active Directory.
*/
const ActiveDirectoryExternalId = "objectGUID";
export class LdapDirectoryService implements IDirectoryService {
private client: ldapts.Client;
private dirConfig: LdapConfiguration;
@@ -245,7 +240,7 @@ export class LdapDirectoryService implements IDirectoryService {
* otherwise it falls back to the provided referenceId.
*/
private getExternalId(searchEntry: ldapts.Entry, referenceId: string) {
const attr = this.getAttr<Buffer>(searchEntry, ActiveDirectoryExternalId);
const attr = this.getAttr<Buffer>(searchEntry, "objectGUID");
if (attr != null) {
return this.bufToGuid(attr);
} else {
@@ -363,9 +358,6 @@ export class LdapDirectoryService implements IDirectoryService {
filter: filter,
scope: "sub",
paged: this.dirConfig.pagedSearch,
// We need to expressly tell ldapts what attributes to return as Buffer objects,
// otherwise they are returned as strings
explicitBufferAttributes: [ActiveDirectoryExternalId],
};
const { searchEntries } = await this.client.search(path, options, controls);
return searchEntries.map((e) => processEntry(e)).filter((e) => e != null);

View File

@@ -1,79 +0,0 @@
import { GetUniqueString } from "@/jslib/common/spec/utils";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilderOptions } from "../abstractions/request-builder.service";
import { groupSimulator, userSimulator } from "../utils/request-builder-helper";
import { SingleRequestBuilder } from "./single-request-builder";
describe("SingleRequestBuilder", () => {
let singleRequestBuilder: SingleRequestBuilder;
beforeEach(async () => {
singleRequestBuilder = new SingleRequestBuilder();
});
const defaultOptions: RequestBuilderOptions = Object.freeze({
overwriteExisting: false,
removeDisabled: false,
});
it("SingleRequestBuilder returns single request for 200 users", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const requests = singleRequestBuilder.buildRequest(mockGroups, mockUsers, defaultOptions);
expect(requests.length).toEqual(1);
});
it("SingleRequestBuilder returns request with overwriteExisting enabled", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const options = { ...defaultOptions, overwriteExisting: true };
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
expect(request.overwriteExisting).toBe(true);
});
it("SingleRequestBuilder returns request with deleted user when removeDisabled is true", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const disabledUser = new UserEntry();
const disabledUserEmail = GetUniqueString() + "@example.com";
disabledUser.disabled = true;
disabledUser.email = disabledUserEmail;
mockUsers.push(disabledUser);
const options = { ...defaultOptions, removeDisabled: true };
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
expect(request.members.length).toEqual(201);
expect(request.members.pop()).toEqual(
expect.objectContaining({ email: disabledUserEmail, deleted: true }),
);
expect(request.overwriteExisting).toBe(false);
});
it("SingleRequestBuilder returns request with deleted user and overwriteExisting enabled when overwriteExisting and removeDisabled are true", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const disabledUser = new UserEntry();
const disabledUserEmail = GetUniqueString() + "@example.com";
disabledUser.disabled = true;
disabledUser.email = disabledUserEmail;
mockUsers.push(disabledUser);
const options = { overwriteExisting: true, removeDisabled: true };
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
expect(request.members.pop()).toEqual(
expect.objectContaining({ email: disabledUserEmail, deleted: true }),
);
expect(request.overwriteExisting).toBe(true);
});
});

View File

@@ -1,41 +0,0 @@
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { GroupEntry } from "@/src/models/groupEntry";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilder, RequestBuilderOptions } from "../abstractions/request-builder.service";
/**
* This class is responsible for building small (<2k users) syncs as a single
* request to the /import endpoint. This is done to be backwards compatible with
* existing functionality for sync requests that are sufficiently small enough to not
* exceed default maximum packet size limits on NGINX web servers.
* */
export class SingleRequestBuilder implements RequestBuilder {
buildRequest(
groups: GroupEntry[],
users: UserEntry[],
options: RequestBuilderOptions,
): OrganizationImportRequest[] {
return [
new OrganizationImportRequest({
groups: (groups ?? []).map((g) => {
return {
name: g.name,
externalId: g.externalId,
memberExternalIds: Array.from(g.userMemberExternalIds),
};
}),
users: (users ?? []).map((u) => {
return {
email: u.email,
externalId: u.externalId,
deleted: u.deleted || (options.removeDisabled && u.disabled),
};
}),
overwriteExisting: options.overwriteExisting,
largeImport: false,
}),
];
}
}

View File

@@ -1,132 +0,0 @@
import { mock, MockProxy } from "jest-mock-extended";
import { ApiService } from "@/jslib/common/src/abstractions/api.service";
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
import { EnvironmentService } from "@/jslib/common/src/services/environment.service";
import { I18nService } from "../../jslib/common/src/abstractions/i18n.service";
import { LogService } from "../../jslib/common/src/abstractions/log.service";
import { groupFixtures } from "../../openldap/group-fixtures";
import { userFixtures } from "../../openldap/user-fixtures";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { DirectoryType } from "../enums/directoryType";
import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures";
import { BatchRequestBuilder } from "./batch-request-builder";
import { LdapDirectoryService } from "./ldap-directory.service";
import { SingleRequestBuilder } from "./single-request-builder";
import { StateService } from "./state.service";
import { SyncService } from "./sync.service";
import * as constants from "./sync.service";
describe("SyncService", () => {
let logService: MockProxy<LogService>;
let i18nService: MockProxy<I18nService>;
let stateService: MockProxy<StateService>;
let cryptoFunctionService: MockProxy<CryptoFunctionService>;
let apiService: MockProxy<ApiService>;
let messagingService: MockProxy<MessagingService>;
let environmentService: MockProxy<EnvironmentService>;
let directoryFactory: MockProxy<DirectoryFactoryService>;
let batchRequestBuilder: BatchRequestBuilder;
let singleRequestBuilder: SingleRequestBuilder;
let syncService: SyncService;
let directoryService: LdapDirectoryService;
const originalBatchSize = constants.batchSize;
beforeEach(() => {
logService = mock();
i18nService = mock();
stateService = mock();
cryptoFunctionService = mock();
apiService = mock();
messagingService = mock();
environmentService = mock();
directoryFactory = mock();
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap);
stateService.getOrganizationId.mockResolvedValue("fakeId");
directoryService = new LdapDirectoryService(logService, i18nService, stateService);
directoryFactory.createService.mockReturnValue(directoryService);
batchRequestBuilder = new BatchRequestBuilder();
singleRequestBuilder = new SingleRequestBuilder();
syncService = new SyncService(
cryptoFunctionService,
apiService,
messagingService,
i18nService,
environmentService,
stateService,
batchRequestBuilder,
singleRequestBuilder,
directoryFactory,
);
});
describe("OpenLdap integration: ", () => {
it("with largeImport disabled matches directory fixture data", async () => {
stateService.getDirectory
.calledWith(DirectoryType.Ldap)
.mockResolvedValue(getLdapConfiguration());
stateService.getSync.mockResolvedValue(
getSyncConfiguration({
users: true,
groups: true,
largeImport: false,
overwriteExisting: false,
}),
);
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
const syncResult = await syncService.sync(false, false);
expect(syncResult).toEqual([groupFixtures, userFixtures]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(
expect.objectContaining({ overwriteExisting: false }),
);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1);
});
it("with largeImport enabled matches directory fixture data", async () => {
stateService.getDirectory
.calledWith(DirectoryType.Ldap)
.mockResolvedValue(getLdapConfiguration());
stateService.getSync.mockResolvedValue(
getSyncConfiguration({
users: true,
groups: true,
largeImport: true,
overwriteExisting: false,
}),
);
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
constants.batchSize = 4;
const syncResult = await syncService.sync(false, false);
expect(syncResult).toEqual([groupFixtures, userFixtures]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(
expect.objectContaining({ overwriteExisting: false }),
);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6);
// @ts-expect-error Reset batch size to original state.
constants.batchSize = originalBatchSize;
});
});
});

View File

@@ -1,135 +0,0 @@
import { mock, MockProxy } from "jest-mock-extended";
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service";
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { ApiService } from "@/jslib/common/src/services/api.service";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { DirectoryType } from "../enums/directoryType";
import { getSyncConfiguration } from "../utils/test-fixtures";
import { BatchRequestBuilder } from "./batch-request-builder";
import { I18nService } from "./i18n.service";
import { LdapDirectoryService } from "./ldap-directory.service";
import { SingleRequestBuilder } from "./single-request-builder";
import { StateService } from "./state.service";
import { SyncService } from "./sync.service";
import * as constants from "./sync.service";
import { groupFixtures } from "@/openldap/group-fixtures";
import { userFixtures } from "@/openldap/user-fixtures";
describe("SyncService", () => {
let cryptoFunctionService: MockProxy<CryptoFunctionService>;
let apiService: MockProxy<ApiService>;
let messagingService: MockProxy<MessagingService>;
let i18nService: MockProxy<I18nService>;
let environmentService: MockProxy<EnvironmentService>;
let stateService: MockProxy<StateService>;
let directoryFactory: MockProxy<DirectoryFactoryService>;
let batchRequestBuilder: MockProxy<BatchRequestBuilder>;
let singleRequestBuilder: MockProxy<SingleRequestBuilder>;
let syncService: SyncService;
const originalBatchSize = constants.batchSize;
beforeEach(() => {
cryptoFunctionService = mock();
apiService = mock();
messagingService = mock();
i18nService = mock();
environmentService = mock();
stateService = mock();
directoryFactory = mock();
batchRequestBuilder = mock();
singleRequestBuilder = mock();
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap);
stateService.getOrganizationId.mockResolvedValue("fakeId");
const mockDirectoryService = mock<LdapDirectoryService>();
mockDirectoryService.getEntries.mockResolvedValue([groupFixtures, userFixtures]);
directoryFactory.createService.mockReturnValue(mockDirectoryService);
syncService = new SyncService(
cryptoFunctionService,
apiService,
messagingService,
i18nService,
environmentService,
stateService,
batchRequestBuilder,
singleRequestBuilder,
directoryFactory,
);
});
it("Sync posts single request successfully for unique hashes", async () => {
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true }));
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
const mockRequest: OrganizationImportRequest[] = [
{
members: [],
groups: [],
overwriteExisting: true,
largeImport: true,
},
];
singleRequestBuilder.buildRequest.mockReturnValue(mockRequest);
await syncService.sync(true, false);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1);
});
it("Sync posts multiple request successfully for unique hashes", async () => {
stateService.getSync.mockResolvedValue(
getSyncConfiguration({ groups: true, users: true, largeImport: true }),
);
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
constants.batchSize = 4;
const mockRequests = new Array(6).fill({
members: [],
groups: [],
overwriteExisting: true,
largeImport: true,
});
batchRequestBuilder.buildRequest.mockReturnValue(mockRequests);
await syncService.sync(true, false);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[0]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[1]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[2]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[3]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[4]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[5]);
// @ts-expect-error Reset batch size back to original value.
constants.batchSize = originalBatchSize;
});
it("does not post for the same hash", async () => {
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true }));
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be the same as the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("AA==");
await syncService.sync(true, false);
expect(apiService.postPublicImportDirectory).not.toHaveBeenCalled();
});
});

View File

@@ -2,40 +2,35 @@ import { ApiService } from "@/jslib/common/src/abstractions/api.service";
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service";
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
import { LogService } from "@/jslib/common/src/abstractions/log.service";
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
import { Utils } from "@/jslib/common/src/misc/utils";
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { StateService } from "../abstractions/state.service";
import { DirectoryType } from "../enums/directoryType";
import { GroupEntry } from "../models/groupEntry";
import { SyncConfiguration } from "../models/syncConfiguration";
import { UserEntry } from "../models/userEntry";
import { BatchRequestBuilder } from "./batch-request-builder";
import { SingleRequestBuilder } from "./single-request-builder";
export interface HashResult {
hash: string;
hashLegacy: string;
}
export const batchSize = 2000;
import { AzureDirectoryService } from "./azure-directory.service";
import { IDirectoryService } from "./directory.service";
import { GSuiteDirectoryService } from "./gsuite-directory.service";
import { LdapDirectoryService } from "./ldap-directory.service";
import { OktaDirectoryService } from "./okta-directory.service";
import { OneLoginDirectoryService } from "./onelogin-directory.service";
export class SyncService {
private dirType: DirectoryType;
constructor(
private logService: LogService,
private cryptoFunctionService: CryptoFunctionService,
private apiService: ApiService,
private messagingService: MessagingService,
private i18nService: I18nService,
private environmentService: EnvironmentService,
private stateService: StateService,
private batchRequestBuilder: BatchRequestBuilder,
private singleRequestBuilder: SingleRequestBuilder,
private directoryFactory: DirectoryFactoryService,
) {}
async sync(force: boolean, test: boolean): Promise<[GroupEntry[], UserEntry[]]> {
@@ -44,7 +39,7 @@ export class SyncService {
throw new Error("No directory configured.");
}
const directoryService = this.directoryFactory.createService(this.dirType);
const directoryService = this.getDirectoryService();
if (directoryService == null) {
throw new Error("Cannot load directory service.");
}
@@ -83,15 +78,42 @@ export class SyncService {
return [groups, users];
}
const reqs = this.buildRequest(groups, users, syncConfig);
const req = this.buildRequest(
groups,
users,
syncConfig.removeDisabled,
syncConfig.overwriteExisting,
syncConfig.largeImport,
);
const reqJson = JSON.stringify(req);
const result: HashResult = await this.generateHash(reqs);
const orgId = await this.stateService.getOrganizationId();
if (orgId == null) {
throw new Error("Organization not set.");
}
if (result.hash && (await this.isNewHash(result))) {
for (const req of reqs) {
await this.apiService.postPublicImportDirectory(req);
}
await this.stateService.setLastSyncHash(result.hash);
// TODO: Remove hashLegacy once we're sure clients have had time to sync new hashes
let hashLegacy: string = null;
const hashBuffLegacy = await this.cryptoFunctionService.hash(
this.environmentService.getApiUrl() + reqJson,
"sha256",
);
if (hashBuffLegacy != null) {
hashLegacy = Utils.fromBufferToB64(hashBuffLegacy);
}
let hash: string = null;
const hashBuff = await this.cryptoFunctionService.hash(
this.environmentService.getApiUrl() + orgId + reqJson,
"sha256",
);
if (hashBuff != null) {
hash = Utils.fromBufferToB64(hashBuff);
}
const lastHash = await this.stateService.getLastSyncHash();
if (lastHash == null || (hash !== lastHash && hashLegacy !== lastHash)) {
await this.apiService.postPublicImportDirectory(req);
await this.stateService.setLastSyncHash(hash);
} else {
groups = null;
users = null;
@@ -111,40 +133,6 @@ export class SyncService {
}
}
async generateHash(reqs: OrganizationImportRequest[]): Promise<HashResult> {
const reqJson = JSON.stringify(reqs?.length === 1 ? reqs[0] : reqs);
const orgId = await this.stateService.getOrganizationId();
if (orgId == null) {
throw new Error("Organization not set.");
}
// TODO: Remove hashLegacy once we're sure clients have had time to sync new hashes
let hashLegacy: string = null;
const hashBuffLegacy = await this.cryptoFunctionService.hash(
this.environmentService.getApiUrl() + reqJson,
"sha256",
);
if (hashBuffLegacy != null) {
hashLegacy = Utils.fromBufferToB64(hashBuffLegacy);
}
let hash: string = null;
const hashBuff = await this.cryptoFunctionService.hash(
this.environmentService.getApiUrl() + orgId + reqJson,
"sha256",
);
if (hashBuff != null) {
hash = Utils.fromBufferToB64(hashBuff);
}
return { hash, hashLegacy };
}
async isNewHash(hashResult: HashResult): Promise<boolean> {
const lastHash = await this.stateService.getLastSyncHash();
return lastHash == null || (hashResult.hash !== lastHash && hashResult.hashLegacy !== lastHash);
}
private removeDuplicateUsers(users: UserEntry[]) {
if (users == null) {
return null;
@@ -210,16 +198,48 @@ export class SyncService {
return allUsers;
}
private getDirectoryService(): IDirectoryService {
switch (this.dirType) {
case DirectoryType.GSuite:
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.AzureActiveDirectory:
return new AzureDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Ldap:
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Okta:
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.OneLogin:
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService);
default:
return null;
}
}
private buildRequest(
groups: GroupEntry[],
users: UserEntry[],
syncConfig: SyncConfiguration,
): OrganizationImportRequest[] {
if (syncConfig.largeImport && (groups?.length ?? 0) + (users?.length ?? 0) > batchSize) {
return this.batchRequestBuilder.buildRequest(groups, users, syncConfig);
} else {
return this.singleRequestBuilder.buildRequest(groups, users, syncConfig);
}
removeDisabled: boolean,
overwriteExisting: boolean,
largeImport = false,
) {
return new OrganizationImportRequest({
groups: (groups ?? []).map((g) => {
return {
name: g.name,
externalId: g.externalId,
memberExternalIds: Array.from(g.userMemberExternalIds),
};
}),
users: (users ?? []).map((u) => {
return {
email: u.email,
externalId: u.externalId,
deleted: u.deleted || (removeDisabled && u.disabled),
};
}),
overwriteExisting: overwriteExisting,
largeImport: largeImport,
});
}
private async saveSyncTimes(syncConfig: SyncConfiguration, time: Date) {

View File

@@ -1,26 +0,0 @@
import { GetUniqueString } from "@/jslib/common/spec/utils";
import { GroupEntry } from "../models/groupEntry";
import { UserEntry } from "../models/userEntry";
export function userSimulator(userCount: number): UserEntry[] {
const users: UserEntry[] = [];
while (userCount > 0) {
const userEntry = new UserEntry();
userEntry.email = GetUniqueString() + "@example.com";
users.push(userEntry);
userCount--;
}
return users;
}
export function groupSimulator(groupCount: number): GroupEntry[] {
const groups: GroupEntry[] = [];
while (groupCount > 0) {
const groupEntry = new GroupEntry();
groupEntry.name = GetUniqueString();
groups.push(groupEntry);
groupCount--;
}
return groups;
}

View File

@@ -1,53 +0,0 @@
import { LdapConfiguration } from "../models/ldapConfiguration";
import { SyncConfiguration } from "../models/syncConfiguration";
/**
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration.
*/
export const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({
ssl: false,
startTls: false,
tlsCaPath: null,
sslAllowUnauthorized: false,
sslCertPath: null,
sslKeyPath: null,
sslCaPath: null,
hostname: "localhost",
port: 1389,
domain: null,
rootPath: "dc=bitwarden,dc=com",
currentUser: false,
username: "cn=admin,dc=bitwarden,dc=com",
password: "admin",
ad: false,
pagedSearch: false,
...(config ?? {}),
});
/**
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration.
*/
export const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({
users: false,
groups: false,
interval: 5,
userFilter: null,
groupFilter: null,
removeDisabled: false,
overwriteExisting: false,
largeImport: false,
// Ldap properties
groupObjectClass: "posixGroup",
userObjectClass: "person",
groupPath: null,
userPath: null,
groupNameAttribute: "cn",
userEmailAttribute: "mail",
memberAttribute: "memberUid",
useEmailPrefixSuffix: false,
emailPrefixAttribute: "sAMAccountName",
emailSuffix: null,
creationDateAttribute: "whenCreated",
revisionDateAttribute: "whenChanged",
...(config ?? {}),
});

View File

@@ -17,7 +17,12 @@
"paths": {
"tldjs": ["@/jslib/src/misc/tldjs.noop"],
"@/*": ["./*"]
}
},
"plugins": [
{
"transform": "typescript-transform-paths"
}
]
},
"include": ["src", "jslib", "scripts", "./*.ts"]
}