mirror of
https://github.com/bitwarden/directory-connector
synced 2025-12-05 23:53:21 +00:00
Compare commits
95 Commits
v2024.10.0
...
v2025.6.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed58d7c758 | ||
|
|
cd6bbd792a | ||
|
|
3b3ea8ac47 | ||
|
|
5f9adf9ab7 | ||
|
|
1deb22a446 | ||
|
|
115a60316d | ||
|
|
e11225b2ce | ||
|
|
4909d306ba | ||
|
|
caa8c4d070 | ||
|
|
ed1d941282 | ||
|
|
f6f874360f | ||
|
|
18b110e70d | ||
|
|
83c42cec73 | ||
|
|
2d80fceb8c | ||
|
|
0489f0cbe9 | ||
|
|
c5d4cb9fb6 | ||
|
|
16d6647090 | ||
|
|
a08673917b | ||
|
|
27e1ab9bcf | ||
|
|
3573e201a6 | ||
|
|
23d285a9f6 | ||
|
|
527d2cb75d | ||
|
|
42efd689e3 | ||
|
|
2fe980dea6 | ||
|
|
9446eedec7 | ||
|
|
41ee0d82d5 | ||
|
|
40a85bb875 | ||
|
|
50be1218e2 | ||
|
|
e4abb2c751 | ||
|
|
23c591f903 | ||
|
|
2ea2fd701c | ||
|
|
3b74be446e | ||
|
|
2651a53f27 | ||
|
|
09ed8326c3 | ||
|
|
c5a65a85ad | ||
|
|
3ae90cbb4c | ||
|
|
99dbb3162e | ||
|
|
f146d41b66 | ||
|
|
b35cf8e995 | ||
|
|
f7ee5dcd92 | ||
|
|
61bbff771e | ||
|
|
2047b6644e | ||
|
|
26dd9662cf | ||
|
|
70073fb570 | ||
|
|
8642b9d7aa | ||
|
|
d77b50c540 | ||
|
|
ed935d998a | ||
|
|
682da52040 | ||
|
|
531619af1d | ||
|
|
cf54858cc5 | ||
|
|
6cc022b135 | ||
|
|
a8a4390624 | ||
|
|
f9d817f0b1 | ||
|
|
112bda1137 | ||
|
|
23713d92fa | ||
|
|
6ebc9631aa | ||
|
|
e8579f11d3 | ||
|
|
6b2c7a5f00 | ||
|
|
2a1a5bf064 | ||
|
|
1464d72b27 | ||
|
|
f5cbd8f03d | ||
|
|
fdbbef68c1 | ||
|
|
efb412684d | ||
|
|
79f7a2b495 | ||
|
|
4342734412 | ||
|
|
62f14e5043 | ||
|
|
c2b22518fe | ||
|
|
37c992f16b | ||
|
|
69156677ac | ||
|
|
aaed7b13ea | ||
|
|
096d2a03ab | ||
|
|
bd5bcbebd9 | ||
|
|
bb9ece6078 | ||
|
|
40de47e6e3 | ||
|
|
094ed57e03 | ||
|
|
96a38e2d76 | ||
|
|
9e200c8705 | ||
|
|
ca945318ed | ||
|
|
04abed9251 | ||
|
|
9b08ca6db8 | ||
|
|
0cbe6e9d33 | ||
|
|
dda6dd99ed | ||
|
|
5492466276 | ||
|
|
ef571ec0c3 | ||
|
|
f2bea1b6d7 | ||
|
|
07a1ae6dea | ||
|
|
f23997dd72 | ||
|
|
18547d6eaa | ||
|
|
c3a4f25160 | ||
|
|
e57a52e483 | ||
|
|
ff1380ee67 | ||
|
|
2269b82e7e | ||
|
|
8ab3516377 | ||
|
|
91dfd7e0b7 | ||
|
|
6db28408e6 |
6
.github/renovate.json
vendored
6
.github/renovate.json
vendored
@@ -7,6 +7,12 @@
|
||||
"groupName": "gh minor",
|
||||
"matchManagers": ["github-actions"],
|
||||
"matchUpdateTypes": ["minor", "patch"]
|
||||
},
|
||||
{
|
||||
"groupName": "Google Libraries",
|
||||
"matchPackagePatterns": ["google-auth-library", "googleapis"],
|
||||
"matchManagers": ["npm"],
|
||||
"groupSlug": "google-libraries"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
96
.github/workflows/build.yml
vendored
96
.github/workflows/build.yml
vendored
@@ -5,6 +5,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "rc"
|
||||
- "hotfix-rc"
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
@@ -13,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up CLOC
|
||||
run: |
|
||||
@@ -31,7 +33,7 @@ jobs:
|
||||
package_version: ${{ steps.retrieve-version.outputs.package_version }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Package Version
|
||||
id: retrieve-version
|
||||
@@ -50,10 +52,10 @@ jobs:
|
||||
_PKG_FETCH_VERSION: 3.4
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -121,14 +123,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Linux Zip to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
|
||||
path: ./dist-cli/bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Linux checksum to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: bwdc-linux-sha256-${{ env._PACKAGE_VERSION }}.txt
|
||||
path: ./dist-cli/bwdc-linux-sha256-${{ env._PACKAGE_VERSION }}.txt
|
||||
@@ -145,10 +147,10 @@ jobs:
|
||||
_PKG_FETCH_VERSION: 3.4
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -209,14 +211,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Mac Zip to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
|
||||
path: ./dist-cli/bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Mac checksum to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: bwdc-macos-sha256-${{ env._PACKAGE_VERSION }}.txt
|
||||
path: ./dist-cli/bwdc-macos-sha256-${{ env._PACKAGE_VERSION }}.txt
|
||||
@@ -232,7 +234,7 @@ jobs:
|
||||
_WIN_PKG_VERSION: 3.4
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Windows builder
|
||||
run: |
|
||||
@@ -240,7 +242,7 @@ jobs:
|
||||
choco install reshack --no-progress
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -353,14 +355,14 @@ jobs:
|
||||
-t sha256 | Out-File ./dist-cli/bwdc-windows-sha256-${env:_PACKAGE_VERSION}.txt
|
||||
|
||||
- name: Upload Windows Zip to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
|
||||
path: ./dist-cli/bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Windows checksum to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: bwdc-windows-sha256-${{ env._PACKAGE_VERSION }}.txt
|
||||
path: ./dist-cli/bwdc-windows-sha256-${{ env._PACKAGE_VERSION }}.txt
|
||||
@@ -377,10 +379,10 @@ jobs:
|
||||
HUSKY: 0
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -402,39 +404,55 @@ jobs:
|
||||
- name: Install Node dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Login to Azure
|
||||
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
|
||||
with:
|
||||
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
|
||||
|
||||
- name: Retrieve secrets
|
||||
id: retrieve-secrets
|
||||
uses: bitwarden/gh-actions/get-keyvault-secrets@main
|
||||
with:
|
||||
keyvault: "bitwarden-ci"
|
||||
secrets: "code-signing-vault-url,
|
||||
code-signing-client-id,
|
||||
code-signing-tenant-id,
|
||||
code-signing-client-secret,
|
||||
code-signing-cert-name"
|
||||
|
||||
- name: Build & Sign
|
||||
run: npm run dist:win
|
||||
env:
|
||||
ELECTRON_BUILDER_SIGN: 1
|
||||
SIGNING_VAULT_URL: ${{ secrets.SIGNING_VAULT_URL }}
|
||||
SIGNING_CLIENT_ID: ${{ secrets.SIGNING_CLIENT_ID }}
|
||||
SIGNING_TENANT_ID: ${{ secrets.SIGNING_TENANT_ID }}
|
||||
SIGNING_CLIENT_SECRET: ${{ secrets.SIGNING_CLIENT_SECRET }}
|
||||
SIGNING_CERT_NAME: ${{ secrets.SIGNING_CERT_NAME }}
|
||||
SIGNING_VAULT_URL: ${{ steps.retrieve-secrets.outputs.code-signing-vault-url }}
|
||||
SIGNING_CLIENT_ID: ${{ steps.retrieve-secrets.outputs.code-signing-client-id }}
|
||||
SIGNING_TENANT_ID: ${{ steps.retrieve-secrets.outputs.code-signing-tenant-id }}
|
||||
SIGNING_CLIENT_SECRET: ${{ steps.retrieve-secrets.outputs.code-signing-client-secret }}
|
||||
SIGNING_CERT_NAME: ${{ steps.retrieve-secrets.outputs.code-signing-cert-name }}
|
||||
|
||||
- name: Upload Portable Executable to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
|
||||
path: ./dist/Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Installer Executable to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
|
||||
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Installer Executable Blockmap to GitHub
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
|
||||
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload latest auto-update artifact
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: latest.yml
|
||||
path: ./dist/latest.yml
|
||||
@@ -451,10 +469,10 @@ jobs:
|
||||
HUSKY: 0
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -481,14 +499,14 @@ jobs:
|
||||
run: npm run dist:lin
|
||||
|
||||
- name: Upload AppImage
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
|
||||
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload latest auto-update artifact
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: latest-linux.yml
|
||||
path: ./dist/latest-linux.yml
|
||||
@@ -505,10 +523,10 @@ jobs:
|
||||
HUSKY: 0
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -575,7 +593,7 @@ jobs:
|
||||
|
||||
- name: Install Node dependencies
|
||||
run: npm install
|
||||
|
||||
|
||||
- name: Set up private auth key
|
||||
run: |
|
||||
mkdir ~/private_keys
|
||||
@@ -592,28 +610,28 @@ jobs:
|
||||
CSC_FOR_PULL_REQUEST: true
|
||||
|
||||
- name: Upload .zip artifact
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
|
||||
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload .dmg artifact
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
|
||||
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload .dmg Blockmap artifact
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
|
||||
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload latest auto-update artifact
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: latest-mac.yml
|
||||
path: ./dist/latest-mac.yml
|
||||
@@ -634,7 +652,11 @@ jobs:
|
||||
- macos-gui
|
||||
steps:
|
||||
- name: Check if any job failed
|
||||
if: github.ref == 'refs/heads/main' && contains(needs.*.result, 'failure')
|
||||
if: |
|
||||
(github.ref == 'refs/heads/main'
|
||||
|| github.ref == 'refs/heads/rc'
|
||||
|| github.ref == 'refs/heads/hotfix-rc')
|
||||
&& contains(needs.*.result, 'failure')
|
||||
run: exit 1
|
||||
|
||||
- name: Login to Azure - CI subscription
|
||||
|
||||
38
.github/workflows/integration-test.yml
vendored
38
.github/workflows/integration-test.yml
vendored
@@ -8,39 +8,21 @@ on:
|
||||
paths:
|
||||
- ".github/workflows/integration-test.yml" # this file
|
||||
- "src/services/ldap-directory.service*" # we only have integration for LDAP testing at the moment
|
||||
- "./openldap*" # any change to test fixtures
|
||||
- "./openldap/**/*" # any change to test fixtures
|
||||
- "./docker-compose.yml" # any change to Docker configuration
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/integration-test.yml" # this file
|
||||
- "src/services/ldap-directory.service*" # we only have integration for LDAP testing at the moment
|
||||
- "./openldap*" # any change to test fixtures
|
||||
- "./openldap/**/*" # any change to test fixtures
|
||||
- "./docker-compose.yml" # any change to Docker configuration
|
||||
|
||||
jobs:
|
||||
check-test-secrets:
|
||||
name: Check for test secrets
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
available: ${{ steps.check-test-secrets.outputs.available }}
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Check
|
||||
id: check-test-secrets
|
||||
run: |
|
||||
if [ "${{ secrets.CODECOV_TOKEN }}" != '' ]; then
|
||||
echo "available=true" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "available=false" >> $GITHUB_OUTPUT;
|
||||
fi
|
||||
|
||||
testing:
|
||||
name: Run tests
|
||||
if: ${{ startsWith(github.head_ref, 'version_bump_') == false }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: check-test-secrets
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
@@ -48,7 +30,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Node version
|
||||
id: retrieve-node-version
|
||||
@@ -58,7 +40,7 @@ jobs:
|
||||
echo "node_version=$NODE_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -80,7 +62,7 @@ jobs:
|
||||
|
||||
- name: Report test results
|
||||
uses: dorny/test-reporter@31a54ee7ebcacc03a09ea97a7e5465a47b84aea5 # v1.9.1
|
||||
if: ${{ needs.check-test-secrets.outputs.available == 'true' && !cancelled() }}
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !cancelled() }}
|
||||
with:
|
||||
name: Test Results
|
||||
path: "junit.xml"
|
||||
@@ -88,13 +70,7 @@ jobs:
|
||||
fail-on-error: true
|
||||
|
||||
- name: Upload coverage to codecov.io
|
||||
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0
|
||||
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/codecov-action@5a605bd92782ce0810fa3b8acc235c921b497052 # v5.2.0
|
||||
|
||||
- name: Upload results to codecov.io
|
||||
uses: codecov/test-results-action@1b5b448b98e58ba90d1a1a1d9fcb72ca2263be46 # v1.0.0
|
||||
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 # v1.0.2
|
||||
|
||||
22
.github/workflows/release.yml
vendored
22
.github/workflows/release.yml
vendored
@@ -18,17 +18,17 @@ jobs:
|
||||
name: Setup
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
release-version: ${{ steps.version.outputs.version }}
|
||||
release_version: ${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Branch check
|
||||
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
|
||||
if: ${{ inputs.release_type != 'Dry Run' }}
|
||||
run: |
|
||||
if [[ "$GITHUB_REF" != "refs/heads/main" ]]; then
|
||||
if [[ "$GITHUB_REF" != "refs/heads/rc" ]] && [[ "$GITHUB_REF" != "refs/heads/hotfix-rc" ]]; then
|
||||
echo "==================================="
|
||||
echo "[!] Can only release from the 'main' branch"
|
||||
echo "[!] Can only release from the 'rc' or 'hotfix-rc' branches"
|
||||
echo "==================================="
|
||||
exit 1
|
||||
fi
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
id: version
|
||||
uses: bitwarden/gh-actions/release-version-check@main
|
||||
with:
|
||||
release-type: ${{ github.event.inputs.release_type }}
|
||||
release-type: ${{ inputs.release_type }}
|
||||
project-type: ts
|
||||
file: package.json
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
|
||||
if: ${{ inputs.release_type != 'Dry Run' }}
|
||||
uses: bitwarden/gh-actions/download-artifacts@main
|
||||
with:
|
||||
workflow: build.yml
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
branch: ${{ github.ref_name }}
|
||||
|
||||
- name: Dry Run - Download all artifacts
|
||||
if: ${{ github.event.inputs.release_type == 'Dry Run' }}
|
||||
if: ${{ inputs.release_type == 'Dry Run' }}
|
||||
uses: bitwarden/gh-actions/download-artifacts@main
|
||||
with:
|
||||
workflow: build.yml
|
||||
@@ -63,10 +63,10 @@ jobs:
|
||||
branch: main
|
||||
|
||||
- name: Create release
|
||||
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
|
||||
uses: ncipollo/release-action@2c591bcc8ecdcd2db72b97d6147f871fcd833ba5 # v1.14.0
|
||||
if: ${{ inputs.release_type != 'Dry Run' }}
|
||||
uses: ncipollo/release-action@cdcc88a9acf3ca41c16c37bb7d21b9ad48560d87 # v1.15.0
|
||||
env:
|
||||
PKG_VERSION: ${{ needs.setup.outputs.release-version }}
|
||||
PKG_VERSION: ${{ needs.setup.outputs.release_version }}
|
||||
with:
|
||||
artifacts: "./bwdc-windows-${{ env.PKG_VERSION }}.zip,
|
||||
./bwdc-macos-${{ env.PKG_VERSION }}.zip,
|
||||
|
||||
22
.github/workflows/scan.yml
vendored
22
.github/workflows/scan.yml
vendored
@@ -5,8 +5,14 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
branches-ignore:
|
||||
- main
|
||||
pull_request_target:
|
||||
types: [opened, synchronize]
|
||||
types: [opened, synchronize, reopened]
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
jobs:
|
||||
check-run:
|
||||
@@ -24,12 +30,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Scan with Checkmarx
|
||||
uses: checkmarx/ast-github-action@ed196cdaec9cd1bc5aacac4ca2010dd773b20893 # 2.0.35
|
||||
uses: checkmarx/ast-github-action@184bf2f64f55d1c93fd6636d539edf274703e434 # 2.0.41
|
||||
env:
|
||||
INCREMENTAL: "${{ contains(github.event_name, 'pull_request') && '--sast-incremental' || '' }}"
|
||||
with:
|
||||
@@ -44,9 +50,11 @@ jobs:
|
||||
--output-path . ${{ env.INCREMENTAL }}
|
||||
|
||||
- name: Upload Checkmarx results to GitHub
|
||||
uses: github/codeql-action/upload-sarif@e2b3eafc8d227b0241d48be5f425d47c2d750a13 # v3.26.10
|
||||
uses: github/codeql-action/upload-sarif@dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7 # v3.28.3
|
||||
with:
|
||||
sarif_file: cx_result.sarif
|
||||
sha: ${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.sha || github.sha }}
|
||||
ref: ${{ contains(github.event_name, 'pull_request') && format('refs/pull/{0}/head', github.event.pull_request.number) || github.ref }}
|
||||
|
||||
quality:
|
||||
name: Quality scan
|
||||
@@ -58,16 +66,15 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Scan with SonarCloud
|
||||
uses: sonarsource/sonarcloud-github-action@eb211723266fe8e83102bac7361f0a05c3ac1d1b # v3.0.0
|
||||
uses: sonarsource/sonarqube-scan-action@bfd4e558cda28cda6b5defafb9232d191be8c203 # v4.2.1
|
||||
env:
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.organization=${{ github.repository_owner }}
|
||||
@@ -76,3 +83,4 @@ jobs:
|
||||
-Dsonar.sources=.
|
||||
-Dsonar.test.inclusions=**/*.spec.ts
|
||||
-Dsonar.exclusions=**/*.spec.ts
|
||||
-Dsonar.pullrequest.key=${{ github.event.pull_request.number }}
|
||||
|
||||
36
.github/workflows/test.yml
vendored
36
.github/workflows/test.yml
vendored
@@ -5,32 +5,16 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
- "rc"
|
||||
- "hotfix-rc"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
check-test-secrets:
|
||||
name: Check for test secrets
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
available: ${{ steps.check-test-secrets.outputs.available }}
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Check
|
||||
id: check-test-secrets
|
||||
run: |
|
||||
if [ "${{ secrets.CODECOV_TOKEN }}" != '' ]; then
|
||||
echo "available=true" >> $GITHUB_OUTPUT;
|
||||
else
|
||||
echo "available=false" >> $GITHUB_OUTPUT;
|
||||
fi
|
||||
|
||||
testing:
|
||||
name: Run tests
|
||||
if: ${{ startsWith(github.head_ref, 'version_bump_') == false }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs: check-test-secrets
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
@@ -38,7 +22,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Node version
|
||||
id: retrieve-node-version
|
||||
@@ -48,7 +32,7 @@ jobs:
|
||||
echo "node_version=$NODE_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
|
||||
with:
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
@@ -68,7 +52,7 @@ jobs:
|
||||
|
||||
- name: Report test results
|
||||
uses: dorny/test-reporter@31a54ee7ebcacc03a09ea97a7e5465a47b84aea5 # v1.9.1
|
||||
if: ${{ needs.check-test-secrets.outputs.available == 'true' && !cancelled() }}
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !cancelled() }}
|
||||
with:
|
||||
name: Test Results
|
||||
path: "junit.xml"
|
||||
@@ -76,13 +60,7 @@ jobs:
|
||||
fail-on-error: true
|
||||
|
||||
- name: Upload coverage to codecov.io
|
||||
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4.6.0
|
||||
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/codecov-action@5a605bd92782ce0810fa3b8acc235c921b497052 # v5.2.0
|
||||
|
||||
- name: Upload results to codecov.io
|
||||
uses: codecov/test-results-action@1b5b448b98e58ba90d1a1a1d9fcb72ca2263be46 # v1.0.0
|
||||
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 # v1.0.2
|
||||
|
||||
112
.github/workflows/version-bump.yml
vendored
112
.github/workflows/version-bump.yml
vendored
@@ -7,17 +7,11 @@ on:
|
||||
description: "New version override (leave blank for automatic calculation, example: '2024.1.0')"
|
||||
required: false
|
||||
type: string
|
||||
enable_slack_notification:
|
||||
description: "Enable Slack notifications for upcoming release?"
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
bump_version:
|
||||
name: Bump Version
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
version: ${{ steps.set-final-version-output.outputs.version }}
|
||||
steps:
|
||||
- name: Validate version input
|
||||
if: ${{ inputs.version_number_override != '' }}
|
||||
@@ -25,49 +19,22 @@ jobs:
|
||||
with:
|
||||
version: ${{ inputs.version_number_override }}
|
||||
|
||||
- name: Slack Notification Check
|
||||
run: |
|
||||
if [[ "${{ inputs.enable_slack_notification }}" == true ]]; then
|
||||
echo "Slack notifications enabled."
|
||||
else
|
||||
echo "Slack notifications disabled."
|
||||
fi
|
||||
- name: Generate GH App token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.BW_GHAPP_ID }}
|
||||
private-key: ${{ secrets.BW_GHAPP_KEY }}
|
||||
|
||||
- name: Checkout Branch
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
|
||||
- name: Login to Azure - CI Subscription
|
||||
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
|
||||
|
||||
- name: Retrieve secrets
|
||||
id: retrieve-secrets
|
||||
uses: bitwarden/gh-actions/get-keyvault-secrets@main
|
||||
with:
|
||||
keyvault: "bitwarden-ci"
|
||||
secrets: "github-gpg-private-key,
|
||||
github-gpg-private-key-passphrase"
|
||||
|
||||
- name: Import GPG key
|
||||
uses: crazy-max/ghaction-import-gpg@01dd5d3ca463c7f10f7f4f7b4f177225ac661ee4 # v6.1.0
|
||||
with:
|
||||
gpg_private_key: ${{ steps.retrieve-secrets.outputs.github-gpg-private-key }}
|
||||
passphrase: ${{ steps.retrieve-secrets.outputs.github-gpg-private-key-passphrase }}
|
||||
git_user_signingkey: true
|
||||
git_commit_gpgsign: true
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --local user.email "106330231+bitwarden-devops-bot@users.noreply.github.com"
|
||||
git config --local user.name "bitwarden-devops-bot"
|
||||
|
||||
- name: Create Version Branch
|
||||
id: create-branch
|
||||
run: |
|
||||
NAME=version_bump_${{ github.ref_name }}_$(date +"%Y-%m-%d")
|
||||
git switch -c $NAME
|
||||
echo "name=$NAME" >> $GITHUB_OUTPUT
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
|
||||
- name: Get current version
|
||||
id: current-version
|
||||
@@ -144,61 +111,4 @@ jobs:
|
||||
|
||||
- name: Push changes
|
||||
if: ${{ steps.version-changed.outputs.changes_to_commit == 'TRUE' }}
|
||||
env:
|
||||
PR_BRANCH: ${{ steps.create-branch.outputs.name }}
|
||||
run: git push -u origin $PR_BRANCH
|
||||
|
||||
- name: Generate GH App token
|
||||
uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1.11.0
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.BW_GHAPP_ID }}
|
||||
private-key: ${{ secrets.BW_GHAPP_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
|
||||
- name: Create Version PR
|
||||
if: ${{ steps.version-changed.outputs.changes_to_commit == 'TRUE' }}
|
||||
id: create-pr
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
PR_BRANCH: ${{ steps.create-branch.outputs.name }}
|
||||
TITLE: "Bump version to ${{ steps.set-final-version-output.outputs.version }}"
|
||||
run: |
|
||||
PR_URL=$(gh pr create --title "$TITLE" \
|
||||
--base "main" \
|
||||
--head "$PR_BRANCH" \
|
||||
--label "version update" \
|
||||
--label "automated pr" \
|
||||
--body "
|
||||
## Type of change
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature development
|
||||
- [ ] Tech debt (refactoring, code cleanup, dependency upgrades, etc)
|
||||
- [ ] Build/deploy pipeline (DevOps)
|
||||
- [X] Other
|
||||
|
||||
## Objective
|
||||
Automated version bump to ${{ steps.set-final-version-output.outputs.version }}")
|
||||
echo "pr_number=${PR_URL##*/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Approve PR
|
||||
if: ${{ steps.version-changed.outputs.changes_to_commit == 'TRUE' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ steps.create-pr.outputs.pr_number }}
|
||||
run: gh pr review $PR_NUMBER --approve
|
||||
|
||||
- name: Merge PR
|
||||
if: ${{ steps.version-changed.outputs.changes_to_commit == 'TRUE' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
PR_NUMBER: ${{ steps.create-pr.outputs.pr_number }}
|
||||
run: gh pr merge $PR_NUMBER --squash --auto --delete-branch
|
||||
|
||||
- name: Report upcoming release version to Slack
|
||||
if: ${{ steps.version-changed.outputs.changes_to_commit == 'TRUE' && inputs.enable_slack_notification == true }}
|
||||
uses: bitwarden/gh-actions/report-upcoming-release-version@main
|
||||
with:
|
||||
version: ${{ steps.set-final-version-output.outputs.version }}
|
||||
project: ${{ github.repository }}
|
||||
AZURE_KV_CI_SERVICE_PRINCIPAL: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
|
||||
run: git push
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
# Bitwarden Directory Connector
|
||||
|
||||
The Bitwarden Directory Connector is a a desktop application used to sync your Bitwarden enterprise organization to an existing directory of users and groups.
|
||||
The Bitwarden Directory Connector is a desktop application used to sync your Bitwarden enterprise organization to an existing directory of users and groups.
|
||||
|
||||
Supported directories:
|
||||
|
||||
|
||||
@@ -2,9 +2,9 @@ import { ApiTokenRequest } from "../models/request/identityToken/apiTokenRequest
|
||||
import { PasswordTokenRequest } from "../models/request/identityToken/passwordTokenRequest";
|
||||
import { SsoTokenRequest } from "../models/request/identityToken/ssoTokenRequest";
|
||||
import { OrganizationImportRequest } from "../models/request/organizationImportRequest";
|
||||
import { IdentityCaptchaResponse } from '../models/response/identityCaptchaResponse';
|
||||
import { IdentityTokenResponse } from '../models/response/identityTokenResponse';
|
||||
import { IdentityTwoFactorResponse } from '../models/response/identityTwoFactorResponse';
|
||||
import { IdentityCaptchaResponse } from "../models/response/identityCaptchaResponse";
|
||||
import { IdentityTokenResponse } from "../models/response/identityTokenResponse";
|
||||
import { IdentityTwoFactorResponse } from "../models/response/identityTwoFactorResponse";
|
||||
|
||||
export abstract class ApiService {
|
||||
postIdentityToken: (
|
||||
|
||||
@@ -8,16 +8,12 @@ export class OrganizationImportRequest {
|
||||
overwriteExisting = false;
|
||||
largeImport = false;
|
||||
|
||||
constructor(
|
||||
model:
|
||||
| {
|
||||
groups: Required<OrganizationImportGroupRequest>[];
|
||||
users: Required<OrganizationImportMemberRequest>[];
|
||||
overwriteExisting: boolean;
|
||||
largeImport: boolean;
|
||||
}
|
||||
| ImportDirectoryRequest,
|
||||
) {
|
||||
constructor(model: {
|
||||
groups: Required<OrganizationImportGroupRequest>[];
|
||||
users: Required<OrganizationImportMemberRequest>[];
|
||||
overwriteExisting: boolean;
|
||||
largeImport: boolean;
|
||||
}) {
|
||||
if (model instanceof ImportDirectoryRequest) {
|
||||
this.groups = model.groups.map((g) => new OrganizationImportGroupRequest(g));
|
||||
this.members = model.users.map((u) => new OrganizationImportMemberRequest(u));
|
||||
|
||||
@@ -60,9 +60,8 @@ export class TrayMain {
|
||||
}
|
||||
|
||||
setupWindowListeners(win: BrowserWindow) {
|
||||
win.on("minimize", async (e: Event) => {
|
||||
win.on("minimize", async () => {
|
||||
if (await this.stateService.getEnableMinimizeToTray()) {
|
||||
e.preventDefault();
|
||||
this.hideToTray();
|
||||
}
|
||||
});
|
||||
|
||||
340708
openldap/example-ldifs/directory-11000.ldif
Normal file
340708
openldap/example-ldifs/directory-11000.ldif
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,5 +6,5 @@ fi
|
||||
|
||||
mkcert -install
|
||||
mkdir -p ./openldap/certs
|
||||
cp $(mkcert -CAROOT)/rootCA.pem ./openldap/certs/rootCA.pem
|
||||
cp "$(mkcert -CAROOT)/rootCA.pem" ./openldap/certs/rootCA.pem
|
||||
mkcert -key-file ./openldap/certs/openldap-key.pem -cert-file ./openldap/certs/openldap.pem localhost openldap
|
||||
|
||||
6558
package-lock.json
generated
6558
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
95
package.json
95
package.json
@@ -2,7 +2,7 @@
|
||||
"name": "@bitwarden/directory-connector",
|
||||
"productName": "Bitwarden Directory Connector",
|
||||
"description": "Sync your user directory to your Bitwarden organization.",
|
||||
"version": "2024.10.0",
|
||||
"version": "2025.6.0",
|
||||
"keywords": [
|
||||
"bitwarden",
|
||||
"password",
|
||||
@@ -73,73 +73,72 @@
|
||||
"test:types": "npx tsc --noEmit"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "17.3.10",
|
||||
"@angular-devkit/build-angular": "17.3.17",
|
||||
"@angular-eslint/eslint-plugin-template": "17.5.3",
|
||||
"@angular-eslint/template-parser": "17.5.3",
|
||||
"@angular/compiler-cli": "17.3.12",
|
||||
"@electron/notarize": "2.2.1",
|
||||
"@electron/rebuild": "3.6.0",
|
||||
"@electron/notarize": "2.5.0",
|
||||
"@electron/rebuild": "3.7.1",
|
||||
"@fluffy-spoon/substitute": "1.208.0",
|
||||
"@microsoft/microsoft-graph-types": "2.40.0",
|
||||
"@ngtools/webpack": "17.3.10",
|
||||
"@ngtools/webpack": "17.3.17",
|
||||
"@types/inquirer": "8.2.10",
|
||||
"@types/jest": "29.5.13",
|
||||
"@types/ldapjs": "2.2.5",
|
||||
"@types/jest": "29.5.14",
|
||||
"@types/lowdb": "1.0.15",
|
||||
"@types/node": "20.16.10",
|
||||
"@types/node-fetch": "2.6.11",
|
||||
"@types/node": "22.13.1",
|
||||
"@types/node-fetch": "2.6.12",
|
||||
"@types/node-forge": "1.3.11",
|
||||
"@types/proper-lockfile": "4.1.4",
|
||||
"@types/tldjs": "2.3.4",
|
||||
"@typescript-eslint/eslint-plugin": "5.62.0",
|
||||
"@typescript-eslint/parser": "5.62.0",
|
||||
"@typescript-eslint/eslint-plugin": "8.32.1",
|
||||
"@typescript-eslint/parser": "8.32.1",
|
||||
"clean-webpack-plugin": "4.0.0",
|
||||
"concurrently": "9.0.1",
|
||||
"concurrently": "9.1.2",
|
||||
"copy-webpack-plugin": "12.0.2",
|
||||
"cross-env": "7.0.3",
|
||||
"css-loader": "7.1.2",
|
||||
"dotenv": "16.4.5",
|
||||
"electron": "28.3.3",
|
||||
"dotenv": "16.5.0",
|
||||
"electron": "34.1.1",
|
||||
"electron-builder": "24.13.3",
|
||||
"electron-log": "5.2.0",
|
||||
"electron-log": "5.2.4",
|
||||
"electron-reload": "2.0.0-alpha.1",
|
||||
"electron-store": "8.2.0",
|
||||
"electron-updater": "6.3.4",
|
||||
"electron-updater": "6.6.2",
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-import-resolver-typescript": "3.6.3",
|
||||
"eslint-plugin-import": "2.30.0",
|
||||
"eslint-config-prettier": "10.1.5",
|
||||
"eslint-import-resolver-typescript": "3.7.0",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-rxjs": "5.0.3",
|
||||
"eslint-plugin-rxjs-angular": "2.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"form-data": "4.0.1",
|
||||
"html-loader": "5.1.0",
|
||||
"html-webpack-plugin": "5.6.0",
|
||||
"husky": "9.1.6",
|
||||
"html-webpack-plugin": "5.6.3",
|
||||
"husky": "9.1.7",
|
||||
"jest": "29.7.0",
|
||||
"jest-junit": "16.0.0",
|
||||
"jest-mock-extended": "3.0.7",
|
||||
"jest-preset-angular": "14.2.4",
|
||||
"lint-staged": "15.2.10",
|
||||
"mini-css-extract-plugin": "2.9.1",
|
||||
"jest-preset-angular": "14.5.5",
|
||||
"lint-staged": "15.5.2",
|
||||
"mini-css-extract-plugin": "2.9.2",
|
||||
"node-abi": "3.75.0",
|
||||
"node-forge": "1.3.1",
|
||||
"node-loader": "2.0.0",
|
||||
"node-loader": "2.1.0",
|
||||
"pkg": "5.8.1",
|
||||
"prettier": "3.3.3",
|
||||
"rimraf": "5.0.10",
|
||||
"rxjs": "7.8.1",
|
||||
"prettier": "3.5.3",
|
||||
"rimraf": "6.0.1",
|
||||
"rxjs": "7.8.2",
|
||||
"sass": "1.79.4",
|
||||
"sass-loader": "16.0.2",
|
||||
"sass-loader": "16.0.4",
|
||||
"ts-jest": "29.2.5",
|
||||
"ts-loader": "9.5.1",
|
||||
"tsconfig-paths-webpack-plugin": "4.1.0",
|
||||
"type-fest": "4.26.1",
|
||||
"ts-loader": "9.5.2",
|
||||
"tsconfig-paths-webpack-plugin": "4.2.0",
|
||||
"type-fest": "4.41.0",
|
||||
"typescript": "5.4.5",
|
||||
"typescript-transform-paths": "3.5.1",
|
||||
"webpack": "5.95.0",
|
||||
"webpack-cli": "5.1.4",
|
||||
"webpack": "5.97.1",
|
||||
"webpack-cli": "6.0.1",
|
||||
"webpack-merge": "6.0.1",
|
||||
"zone.js": "0.14.10",
|
||||
"webpack-node-externals": "3.0.0"
|
||||
"webpack-node-externals": "3.0.0",
|
||||
"zone.js": "0.14.10"
|
||||
},
|
||||
"dependencies": {
|
||||
"@angular/animations": "17.3.12",
|
||||
@@ -156,25 +155,25 @@
|
||||
"bootstrap": "5.3.3",
|
||||
"browser-hrtime": "1.1.8",
|
||||
"chalk": "4.1.2",
|
||||
"commander": "12.1.0",
|
||||
"core-js": "3.38.1",
|
||||
"form-data": "4.0.0",
|
||||
"google-auth-library": "7.14.1",
|
||||
"googleapis": "73.0.0",
|
||||
"https-proxy-agent": "7.0.5",
|
||||
"commander": "13.1.0",
|
||||
"core-js": "3.42.0",
|
||||
"form-data": "4.0.1",
|
||||
"google-auth-library": "9.15.1",
|
||||
"googleapis": "144.0.0",
|
||||
"https-proxy-agent": "7.0.6",
|
||||
"inquirer": "8.2.6",
|
||||
"keytar": "7.9.0",
|
||||
"ldapjs": "2.3.3",
|
||||
"ldapts": "7.4.0",
|
||||
"lowdb": "1.0.0",
|
||||
"ngx-toastr": "17.0.2",
|
||||
"ngx-toastr": "19.0.0",
|
||||
"node-fetch": "2.7.0",
|
||||
"proper-lockfile": "4.1.2",
|
||||
"rxjs": "7.8.1",
|
||||
"rxjs": "7.8.2",
|
||||
"tldjs": "2.3.1",
|
||||
"zone.js": "0.14.10"
|
||||
},
|
||||
"engines": {
|
||||
"node": "~20.18.0",
|
||||
"node": "~22.13.0",
|
||||
"npm": "~10"
|
||||
},
|
||||
"lint-staged": {
|
||||
|
||||
2
src-cli/package-lock.json
generated
2
src-cli/package-lock.json
generated
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@bitwarden/directory-connector",
|
||||
"version": "2.9.5",
|
||||
"lockfileVersion": 2,
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
|
||||
6
src/abstractions/directory-factory.service.ts
Normal file
6
src/abstractions/directory-factory.service.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { DirectoryType } from "@/src/enums/directoryType";
|
||||
import { IDirectoryService } from "@/src/services/directory.service";
|
||||
|
||||
export abstract class DirectoryFactoryService {
|
||||
abstract createService(type: DirectoryType): IDirectoryService;
|
||||
}
|
||||
17
src/abstractions/request-builder.service.ts
Normal file
17
src/abstractions/request-builder.service.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
|
||||
|
||||
import { GroupEntry } from "@/src/models/groupEntry";
|
||||
import { UserEntry } from "@/src/models/userEntry";
|
||||
|
||||
export interface RequestBuilderOptions {
|
||||
removeDisabled: boolean;
|
||||
overwriteExisting: boolean;
|
||||
}
|
||||
|
||||
export abstract class RequestBuilder {
|
||||
buildRequest: (
|
||||
groups: GroupEntry[],
|
||||
users: UserEntry[],
|
||||
options: RequestBuilderOptions,
|
||||
) => OrganizationImportRequest[];
|
||||
}
|
||||
@@ -25,6 +25,11 @@ import { ElectronRendererStorageService } from "@/jslib/electron/src/services/el
|
||||
import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service";
|
||||
import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service";
|
||||
|
||||
import { DirectoryFactoryService } from "@/src/abstractions/directory-factory.service";
|
||||
import { BatchRequestBuilder } from "@/src/services/batch-request-builder";
|
||||
import { DefaultDirectoryFactoryService } from "@/src/services/directory-factory.service";
|
||||
import { SingleRequestBuilder } from "@/src/services/single-request-builder";
|
||||
|
||||
import { AuthService as AuthServiceAbstraction } from "../../abstractions/auth.service";
|
||||
import { StateService as StateServiceAbstraction } from "../../abstractions/state.service";
|
||||
import { Account } from "../../models/account";
|
||||
@@ -168,13 +173,15 @@ export function initFactory(
|
||||
provide: SyncService,
|
||||
useClass: SyncService,
|
||||
deps: [
|
||||
LogServiceAbstraction,
|
||||
CryptoFunctionServiceAbstraction,
|
||||
ApiServiceAbstraction,
|
||||
MessagingServiceAbstraction,
|
||||
I18nServiceAbstraction,
|
||||
EnvironmentServiceAbstraction,
|
||||
StateServiceAbstraction,
|
||||
BatchRequestBuilder,
|
||||
SingleRequestBuilder,
|
||||
DirectoryFactoryService,
|
||||
],
|
||||
}),
|
||||
safeProvider(AuthGuardService),
|
||||
@@ -215,6 +222,19 @@ export function initFactory(
|
||||
StateMigrationServiceAbstraction,
|
||||
],
|
||||
}),
|
||||
safeProvider({
|
||||
provide: SingleRequestBuilder,
|
||||
deps: [],
|
||||
}),
|
||||
safeProvider({
|
||||
provide: BatchRequestBuilder,
|
||||
deps: [],
|
||||
}),
|
||||
safeProvider({
|
||||
provide: DirectoryFactoryService,
|
||||
useClass: DefaultDirectoryFactoryService,
|
||||
deps: [LogServiceAbstraction, I18nServiceAbstraction, StateServiceAbstraction],
|
||||
}),
|
||||
] satisfies SafeProvider[],
|
||||
})
|
||||
export class ServicesModule {}
|
||||
|
||||
@@ -22,18 +22,15 @@
|
||||
class="btn btn-primary"
|
||||
[disabled]="startForm.loading"
|
||||
>
|
||||
<i class="bwi bwi-play bwi-fw" [hidden]="startForm.loading"></i>
|
||||
<i class="bwi bwi-spinner bwi-fw bwi-spin" [hidden]="!startForm.loading"></i>
|
||||
{{ "startSync" | i18n }}
|
||||
</button>
|
||||
</form>
|
||||
<button type="button" (click)="stop()" class="btn btn-primary">
|
||||
<i class="bwi bwi-stop bwi-fw"></i>
|
||||
<button type="button" (click)="stop()" class="btn btn-danger text-white">
|
||||
{{ "stopSync" | i18n }}
|
||||
</button>
|
||||
<form #syncForm [appApiAction]="syncPromise" class="d-inline">
|
||||
<button type="button" (click)="sync()" class="btn btn-primary" [disabled]="syncForm.loading">
|
||||
<i class="bwi bwi-refresh bwi-fw" [ngClass]="{ 'bwi-spin': syncForm.loading }"></i>
|
||||
{{ "syncNow" | i18n }}
|
||||
</button>
|
||||
</form>
|
||||
@@ -51,7 +48,6 @@
|
||||
[disabled]="simForm.loading"
|
||||
>
|
||||
<i class="bwi bwi-spinner bwi-fw bwi-spin" [hidden]="!simForm.loading"></i>
|
||||
<i class="bwi bwi-bug bwi-fw" [hidden]="simForm.loading"></i>
|
||||
{{ "testNow" | i18n }}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
@@ -614,7 +614,7 @@
|
||||
{{ "ex" | i18n }} exclude:joe@company.com | profile.firstName eq "John"
|
||||
</div>
|
||||
<div class="form-text" *ngIf="directory === directoryType.GSuite">
|
||||
{{ "ex" | i18n }} exclude:joe@company.com | orgName=Engineering
|
||||
{{ "ex" | i18n }} exclude:joe@company.com | orgUnitPath=/Engineering
|
||||
</div>
|
||||
</div>
|
||||
<div class="mb-3" [hidden]="directory != directoryType.Ldap">
|
||||
|
||||
@@ -2,19 +2,16 @@
|
||||
<ul class="nav nav-tabs mb-3">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" routerLink="dashboard" routerLinkActive="active">
|
||||
<i class="bwi bwi-dashboard"></i>
|
||||
{{ "dashboard" | i18n }}
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" routerLink="settings" routerLinkActive="active">
|
||||
<i class="bwi bwi-cogs"></i>
|
||||
{{ "settings" | i18n }}
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" routerLink="more" routerLinkActive="active">
|
||||
<i class="bwi bwi-sliders"></i>
|
||||
{{ "more" | i18n }}
|
||||
</a>
|
||||
</li>
|
||||
|
||||
20
src/bwdc.ts
20
src/bwdc.ts
@@ -17,12 +17,16 @@ import { ConsoleLogService } from "@/jslib/node/src/cli/services/consoleLog.serv
|
||||
import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service";
|
||||
import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service";
|
||||
|
||||
import { DirectoryFactoryService } from "./abstractions/directory-factory.service";
|
||||
import { Account } from "./models/account";
|
||||
import { Program } from "./program";
|
||||
import { AuthService } from "./services/auth.service";
|
||||
import { BatchRequestBuilder } from "./services/batch-request-builder";
|
||||
import { DefaultDirectoryFactoryService } from "./services/directory-factory.service";
|
||||
import { I18nService } from "./services/i18n.service";
|
||||
import { KeytarSecureStorageService } from "./services/keytarSecureStorage.service";
|
||||
import { LowdbStorageService } from "./services/lowdbStorage.service";
|
||||
import { SingleRequestBuilder } from "./services/single-request-builder";
|
||||
import { StateService } from "./services/state.service";
|
||||
import { StateMigrationService } from "./services/stateMigration.service";
|
||||
import { SyncService } from "./services/sync.service";
|
||||
@@ -51,6 +55,9 @@ export class Main {
|
||||
syncService: SyncService;
|
||||
stateService: StateService;
|
||||
stateMigrationService: StateMigrationService;
|
||||
directoryFactoryService: DirectoryFactoryService;
|
||||
batchRequestBuilder: BatchRequestBuilder;
|
||||
singleRequestBuilder: SingleRequestBuilder;
|
||||
|
||||
constructor() {
|
||||
const applicationName = "Bitwarden Directory Connector";
|
||||
@@ -146,14 +153,25 @@ export class Main {
|
||||
this.stateService,
|
||||
);
|
||||
|
||||
this.syncService = new SyncService(
|
||||
this.directoryFactoryService = new DefaultDirectoryFactoryService(
|
||||
this.logService,
|
||||
this.i18nService,
|
||||
this.stateService,
|
||||
);
|
||||
|
||||
this.batchRequestBuilder = new BatchRequestBuilder();
|
||||
this.singleRequestBuilder = new SingleRequestBuilder();
|
||||
|
||||
this.syncService = new SyncService(
|
||||
this.cryptoFunctionService,
|
||||
this.apiService,
|
||||
this.messagingService,
|
||||
this.i18nService,
|
||||
this.environmentService,
|
||||
this.stateService,
|
||||
this.batchRequestBuilder,
|
||||
this.singleRequestBuilder,
|
||||
this.directoryFactoryService,
|
||||
);
|
||||
|
||||
this.program = new Program(this);
|
||||
|
||||
@@ -18,7 +18,9 @@ import { BaseDirectoryService } from "./baseDirectory.service";
|
||||
import { IDirectoryService } from "./directory.service";
|
||||
|
||||
const AzurePublicIdentityAuhtority = "login.microsoftonline.com";
|
||||
const AzurePublicGraphEndpoint = "https://graph.microsoft.com";
|
||||
const AzureGovermentIdentityAuhtority = "login.microsoftonline.us";
|
||||
const AzureGovernmentGraphEndpoint = "https://graph.microsoft.us";
|
||||
|
||||
const NextLink = "@odata.nextLink";
|
||||
const DeltaLink = "@odata.deltaLink";
|
||||
@@ -207,7 +209,7 @@ export class AzureDirectoryService extends BaseDirectoryService implements IDire
|
||||
if (keyword === "excludeadministrativeunit" || keyword === "includeadministrativeunit") {
|
||||
for (const p of pieces) {
|
||||
let auMembers = await this.client
|
||||
.api(`https://graph.microsoft.com/v1.0/directory/administrativeUnits/${p}/members`)
|
||||
.api(`${this.getGraphApiEndpoint()}/v1.0/directory/administrativeUnits/${p}/members`)
|
||||
.get();
|
||||
// eslint-disable-next-line
|
||||
while (true) {
|
||||
@@ -478,7 +480,7 @@ export class AzureDirectoryService extends BaseDirectoryService implements IDire
|
||||
client_id: this.dirConfig.applicationId,
|
||||
client_secret: this.dirConfig.key,
|
||||
grant_type: "client_credentials",
|
||||
scope: "https://graph.microsoft.com/.default",
|
||||
scope: `${this.getGraphApiEndpoint()}/.default`,
|
||||
});
|
||||
|
||||
const req = https
|
||||
@@ -542,4 +544,10 @@ export class AzureDirectoryService extends BaseDirectoryService implements IDire
|
||||
exp.setSeconds(exp.getSeconds() + expSeconds);
|
||||
this.accessTokenExpiration = exp;
|
||||
}
|
||||
|
||||
private getGraphApiEndpoint(): string {
|
||||
return this.dirConfig.identityAuthority === AzureGovermentIdentityAuhtority
|
||||
? AzureGovernmentGraphEndpoint
|
||||
: AzurePublicGraphEndpoint;
|
||||
}
|
||||
}
|
||||
|
||||
75
src/services/batch-request-builder.ts
Normal file
75
src/services/batch-request-builder.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
|
||||
|
||||
import { GroupEntry } from "@/src/models/groupEntry";
|
||||
import { UserEntry } from "@/src/models/userEntry";
|
||||
|
||||
import { RequestBuilder, RequestBuilderOptions } from "../abstractions/request-builder.service";
|
||||
|
||||
import { batchSize } from "./sync.service";
|
||||
|
||||
/**
|
||||
* This class is responsible for batching large sync requests (>2k users) into multiple smaller
|
||||
* requests to the /import endpoint. This is done to ensure we are under the default
|
||||
* maximum packet size for NGINX web servers to avoid the request potentially timing out
|
||||
* */
|
||||
export class BatchRequestBuilder implements RequestBuilder {
|
||||
buildRequest(
|
||||
groups: GroupEntry[],
|
||||
users: UserEntry[],
|
||||
options: RequestBuilderOptions,
|
||||
): OrganizationImportRequest[] {
|
||||
if (options.overwriteExisting) {
|
||||
throw new Error(
|
||||
"You cannot use the 'Remove and re-add organization users during the next sync' option with large imports.",
|
||||
);
|
||||
}
|
||||
|
||||
const requests: OrganizationImportRequest[] = [];
|
||||
|
||||
if (users?.length > 0) {
|
||||
const usersRequest = users.map((u) => {
|
||||
return {
|
||||
email: u.email,
|
||||
externalId: u.externalId,
|
||||
deleted: u.deleted || (options.removeDisabled && u.disabled),
|
||||
};
|
||||
});
|
||||
|
||||
// Partition users
|
||||
for (let i = 0; i < usersRequest.length; i += batchSize) {
|
||||
const u = usersRequest.slice(i, i + batchSize);
|
||||
const req = new OrganizationImportRequest({
|
||||
groups: [],
|
||||
users: u,
|
||||
largeImport: true,
|
||||
overwriteExisting: false,
|
||||
});
|
||||
requests.push(req);
|
||||
}
|
||||
}
|
||||
|
||||
if (groups?.length > 0) {
|
||||
const groupRequest = groups.map((g) => {
|
||||
return {
|
||||
name: g.name,
|
||||
externalId: g.externalId,
|
||||
memberExternalIds: Array.from(g.userMemberExternalIds),
|
||||
};
|
||||
});
|
||||
|
||||
// Partition groups
|
||||
for (let i = 0; i < groupRequest.length; i += batchSize) {
|
||||
const g = groupRequest.slice(i, i + batchSize);
|
||||
const req = new OrganizationImportRequest({
|
||||
groups: g,
|
||||
users: [],
|
||||
largeImport: true,
|
||||
overwriteExisting: false,
|
||||
});
|
||||
requests.push(req);
|
||||
}
|
||||
}
|
||||
|
||||
return requests;
|
||||
}
|
||||
}
|
||||
75
src/services/batch-requests-builder.spec.ts
Normal file
75
src/services/batch-requests-builder.spec.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { GetUniqueString } from "@/jslib/common/spec/utils";
|
||||
|
||||
import { UserEntry } from "@/src/models/userEntry";
|
||||
|
||||
import { RequestBuilderOptions } from "../abstractions/request-builder.service";
|
||||
import { groupSimulator, userSimulator } from "../utils/request-builder-helper";
|
||||
|
||||
import { BatchRequestBuilder } from "./batch-request-builder";
|
||||
|
||||
describe("BatchRequestBuilder", () => {
|
||||
let batchRequestBuilder: BatchRequestBuilder;
|
||||
|
||||
beforeEach(async () => {
|
||||
batchRequestBuilder = new BatchRequestBuilder();
|
||||
});
|
||||
|
||||
const defaultOptions: RequestBuilderOptions = Object.freeze({
|
||||
overwriteExisting: false,
|
||||
removeDisabled: false,
|
||||
});
|
||||
|
||||
it("BatchRequestBuilder batches requests for > 2000 users", () => {
|
||||
const mockGroups = groupSimulator(11000);
|
||||
const mockUsers = userSimulator(11000);
|
||||
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, defaultOptions);
|
||||
|
||||
expect(requests.length).toEqual(12);
|
||||
});
|
||||
|
||||
it("BatchRequestBuilder throws error when overwriteExisting is true", () => {
|
||||
const mockGroups = groupSimulator(11000);
|
||||
const mockUsers = userSimulator(11000);
|
||||
const options = { ...defaultOptions, overwriteExisting: true };
|
||||
|
||||
const r = () => batchRequestBuilder.buildRequest(mockGroups, mockUsers, options);
|
||||
|
||||
expect(r).toThrow(
|
||||
"You cannot use the 'Remove and re-add organization users during the next sync' option with large imports.",
|
||||
);
|
||||
});
|
||||
|
||||
it("BatchRequestBuilder returns requests with deleted users when removeDisabled is true", () => {
|
||||
const mockGroups = groupSimulator(11000);
|
||||
const mockUsers = userSimulator(11000);
|
||||
|
||||
const disabledUser1 = new UserEntry();
|
||||
const disabledUserEmail1 = GetUniqueString() + "@email.com";
|
||||
|
||||
const disabledUser2 = new UserEntry();
|
||||
const disabledUserEmail2 = GetUniqueString() + "@email.com";
|
||||
|
||||
disabledUser1.disabled = true;
|
||||
disabledUser1.email = disabledUserEmail1;
|
||||
disabledUser2.disabled = true;
|
||||
disabledUser2.email = disabledUserEmail2;
|
||||
|
||||
mockUsers[0] = disabledUser1;
|
||||
mockUsers.push(disabledUser2);
|
||||
|
||||
const options = { ...defaultOptions, removeDisabled: true };
|
||||
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, options);
|
||||
|
||||
expect(requests[0].members).toContainEqual({ email: disabledUserEmail1, deleted: true });
|
||||
expect(requests[1].members.find((m) => m.deleted)).toBeUndefined();
|
||||
expect(requests[3].members.find((m) => m.deleted)).toBeUndefined();
|
||||
expect(requests[4].members.find((m) => m.deleted)).toBeUndefined();
|
||||
expect(requests[5].members).toContainEqual({ email: disabledUserEmail2, deleted: true });
|
||||
});
|
||||
|
||||
it("BatchRequestBuilder retuns an empty array when there are no users or groups", () => {
|
||||
const requests = batchRequestBuilder.buildRequest([], [], defaultOptions);
|
||||
|
||||
expect(requests).toEqual([]);
|
||||
});
|
||||
});
|
||||
37
src/services/directory-factory.service.ts
Normal file
37
src/services/directory-factory.service.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
|
||||
import { LogService } from "@/jslib/common/src/abstractions/log.service";
|
||||
|
||||
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
|
||||
import { StateService } from "../abstractions/state.service";
|
||||
import { DirectoryType } from "../enums/directoryType";
|
||||
|
||||
import { AzureDirectoryService } from "./azure-directory.service";
|
||||
import { GSuiteDirectoryService } from "./gsuite-directory.service";
|
||||
import { LdapDirectoryService } from "./ldap-directory.service";
|
||||
import { OktaDirectoryService } from "./okta-directory.service";
|
||||
import { OneLoginDirectoryService } from "./onelogin-directory.service";
|
||||
|
||||
export class DefaultDirectoryFactoryService implements DirectoryFactoryService {
|
||||
constructor(
|
||||
private logService: LogService,
|
||||
private i18nService: I18nService,
|
||||
private stateService: StateService,
|
||||
) {}
|
||||
|
||||
createService(directoryType: DirectoryType) {
|
||||
switch (directoryType) {
|
||||
case DirectoryType.GSuite:
|
||||
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.AzureActiveDirectory:
|
||||
return new AzureDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.Ldap:
|
||||
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.Okta:
|
||||
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.OneLogin:
|
||||
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
default:
|
||||
throw new Error("Invalid Directory Type");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -141,7 +141,7 @@ export class GSuiteDirectoryService extends BaseDirectoryService implements IDir
|
||||
entry.referenceId = user.id;
|
||||
entry.externalId = user.id;
|
||||
entry.email = user.primaryEmail != null ? user.primaryEmail.trim().toLowerCase() : null;
|
||||
entry.disabled = user.suspended || false;
|
||||
entry.disabled = user.suspended || user.archived || false;
|
||||
entry.deleted = deleted;
|
||||
return entry;
|
||||
}
|
||||
|
||||
@@ -5,8 +5,7 @@ import { LogService } from "../../jslib/common/src/abstractions/log.service";
|
||||
import { groupFixtures } from "../../openldap/group-fixtures";
|
||||
import { userFixtures } from "../../openldap/user-fixtures";
|
||||
import { DirectoryType } from "../enums/directoryType";
|
||||
import { LdapConfiguration } from "../models/ldapConfiguration";
|
||||
import { SyncConfiguration } from "../models/syncConfiguration";
|
||||
import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures";
|
||||
|
||||
import { LdapDirectoryService } from "./ldap-directory.service";
|
||||
import { StateService } from "./state.service";
|
||||
@@ -154,54 +153,3 @@ describe("ldapDirectoryService", () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration.
|
||||
*/
|
||||
const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({
|
||||
ssl: false,
|
||||
startTls: false,
|
||||
tlsCaPath: null,
|
||||
sslAllowUnauthorized: false,
|
||||
sslCertPath: null,
|
||||
sslKeyPath: null,
|
||||
sslCaPath: null,
|
||||
hostname: "localhost",
|
||||
port: 1389,
|
||||
domain: null,
|
||||
rootPath: "dc=bitwarden,dc=com",
|
||||
currentUser: false,
|
||||
username: "cn=admin,dc=bitwarden,dc=com",
|
||||
password: "admin",
|
||||
ad: false,
|
||||
pagedSearch: false,
|
||||
...(config ?? {}),
|
||||
});
|
||||
|
||||
/**
|
||||
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration.
|
||||
*/
|
||||
const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({
|
||||
users: false,
|
||||
groups: false,
|
||||
interval: 5,
|
||||
userFilter: null,
|
||||
groupFilter: null,
|
||||
removeDisabled: false,
|
||||
overwriteExisting: false,
|
||||
largeImport: false,
|
||||
// Ldap properties
|
||||
groupObjectClass: "posixGroup",
|
||||
userObjectClass: "person",
|
||||
groupPath: null,
|
||||
userPath: null,
|
||||
groupNameAttribute: "cn",
|
||||
userEmailAttribute: "mail",
|
||||
memberAttribute: "memberUid",
|
||||
useEmailPrefixSuffix: false,
|
||||
emailPrefixAttribute: "sAMAccountName",
|
||||
emailSuffix: null,
|
||||
creationDateAttribute: "whenCreated",
|
||||
revisionDateAttribute: "whenChanged",
|
||||
...(config ?? {}),
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as fs from "fs";
|
||||
import { checkServerIdentity, PeerCertificate } from "tls";
|
||||
import * as tls from "tls";
|
||||
|
||||
import * as ldap from "ldapjs";
|
||||
import * as ldapts from "ldapts";
|
||||
|
||||
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
|
||||
import { LogService } from "@/jslib/common/src/abstractions/log.service";
|
||||
@@ -18,8 +18,13 @@ import { IDirectoryService } from "./directory.service";
|
||||
|
||||
const UserControlAccountDisabled = 2;
|
||||
|
||||
/**
|
||||
* The attribute name for the unique identifier used by Active Directory.
|
||||
*/
|
||||
const ActiveDirectoryExternalId = "objectGUID";
|
||||
|
||||
export class LdapDirectoryService implements IDirectoryService {
|
||||
private client: ldap.Client;
|
||||
private client: ldapts.Client;
|
||||
private dirConfig: LdapConfiguration;
|
||||
private syncConfig: SyncConfiguration;
|
||||
|
||||
@@ -48,21 +53,25 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
await this.bind();
|
||||
|
||||
let users: UserEntry[];
|
||||
if (this.syncConfig.users) {
|
||||
users = await this.getUsers(force, test);
|
||||
}
|
||||
|
||||
let groups: GroupEntry[];
|
||||
if (this.syncConfig.groups) {
|
||||
let groupForce = force;
|
||||
if (!groupForce && users != null) {
|
||||
const activeUsers = users.filter((u) => !u.deleted && !u.disabled);
|
||||
groupForce = activeUsers.length > 0;
|
||||
|
||||
try {
|
||||
if (this.syncConfig.users) {
|
||||
users = await this.getUsers(force, test);
|
||||
}
|
||||
groups = await this.getGroups(groupForce);
|
||||
|
||||
if (this.syncConfig.groups) {
|
||||
let groupForce = force;
|
||||
if (!groupForce && users != null) {
|
||||
const activeUsers = users.filter((u) => !u.deleted && !u.disabled);
|
||||
groupForce = activeUsers.length > 0;
|
||||
}
|
||||
groups = await this.getGroups(groupForce);
|
||||
}
|
||||
} finally {
|
||||
await this.client.unbind();
|
||||
}
|
||||
|
||||
await this.unbind();
|
||||
return [groups, users];
|
||||
}
|
||||
|
||||
@@ -101,10 +110,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
const deletedPath = this.makeSearchPath("CN=Deleted Objects");
|
||||
this.logService.info("Deleted user search: " + deletedPath + " => " + deletedFilter);
|
||||
|
||||
const delControl = new (ldap as any).Control({
|
||||
type: "1.2.840.113556.1.4.417",
|
||||
criticality: true,
|
||||
});
|
||||
const delControl = new ldapts.Control("1.2.840.113556.1.4.417", { critical: true });
|
||||
const deletedUsers = await this.search<UserEntry>(
|
||||
deletedPath,
|
||||
deletedFilter,
|
||||
@@ -120,7 +126,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
|
||||
private buildUser(searchEntry: any, deleted: boolean): UserEntry {
|
||||
const user = new UserEntry();
|
||||
user.referenceId = searchEntry.objectName;
|
||||
user.referenceId = this.getReferenceId(searchEntry);
|
||||
user.deleted = deleted;
|
||||
|
||||
if (user.referenceId == null) {
|
||||
@@ -172,7 +178,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
let groupSearchEntries: any[] = [];
|
||||
const initialSearchGroupIds = await this.search<string>(path, filter, (se: any) => {
|
||||
groupSearchEntries.push(se);
|
||||
return se.objectName;
|
||||
return this.getReferenceId(se);
|
||||
});
|
||||
|
||||
if (searchSinceRevision && initialSearchGroupIds.length === 0) {
|
||||
@@ -188,7 +194,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
const userPath = this.makeSearchPath(this.syncConfig.userPath);
|
||||
const userIdMap = new Map<string, string>();
|
||||
await this.search<string>(userPath, userFilter, (se: any) => {
|
||||
userIdMap.set(se.objectName, this.getExternalId(se, se.objectName));
|
||||
userIdMap.set(this.getReferenceId(se), this.getExternalId(se, this.getReferenceId(se)));
|
||||
return se;
|
||||
});
|
||||
|
||||
@@ -204,7 +210,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
|
||||
private buildGroup(searchEntry: any, userMap: Map<string, string>) {
|
||||
const group = new GroupEntry();
|
||||
group.referenceId = searchEntry.objectName;
|
||||
group.referenceId = this.getReferenceId(searchEntry);
|
||||
if (group.referenceId == null) {
|
||||
return null;
|
||||
}
|
||||
@@ -220,7 +226,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
return null;
|
||||
}
|
||||
|
||||
const members = this.getAttrVals(searchEntry, this.syncConfig.memberAttribute);
|
||||
const members = this.getAttrVals<string>(searchEntry, this.syncConfig.memberAttribute);
|
||||
if (members != null) {
|
||||
for (const memDn of members) {
|
||||
if (userMap.has(memDn) && !group.userMemberExternalIds.has(userMap.get(memDn))) {
|
||||
@@ -234,15 +240,26 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
return group;
|
||||
}
|
||||
|
||||
private getExternalId(searchEntry: any, referenceId: string) {
|
||||
const attrObj = this.getAttrObj(searchEntry, "objectGUID");
|
||||
if (attrObj != null && attrObj._vals != null && attrObj._vals.length > 0) {
|
||||
return this.bufToGuid(attrObj._vals[0]);
|
||||
/**
|
||||
* The externalId is the "objectGUID" property if present (a unique identifier used by Active Directory),
|
||||
* otherwise it falls back to the provided referenceId.
|
||||
*/
|
||||
private getExternalId(searchEntry: ldapts.Entry, referenceId: string) {
|
||||
const attr = this.getAttr<Buffer>(searchEntry, ActiveDirectoryExternalId);
|
||||
if (attr != null) {
|
||||
return this.bufToGuid(attr);
|
||||
} else {
|
||||
return referenceId;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the object's reference id (dn)
|
||||
*/
|
||||
private getReferenceId(entry: ldapts.Entry): string {
|
||||
return entry.dn;
|
||||
}
|
||||
|
||||
private buildBaseFilter(objectClass: string, subFilter: string): string {
|
||||
let filter = this.buildObjectClassFilter(objectClass);
|
||||
if (subFilter != null && subFilter.trim() !== "") {
|
||||
@@ -281,42 +298,48 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
return null;
|
||||
}
|
||||
|
||||
private getAttrObj(searchEntry: any, attr: string): any {
|
||||
if (searchEntry == null || searchEntry.attributes == null) {
|
||||
/**
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get all values for an ldap attribute
|
||||
* @param searchEntry The ldap entry
|
||||
* @param attr An attribute name on the ldap entry
|
||||
* @returns An array containing all values of the attribute, or null if there are no values
|
||||
*/
|
||||
private getAttrVals<T extends string | Buffer>(
|
||||
searchEntry: ldapts.Entry,
|
||||
attr: string,
|
||||
): T[] | null {
|
||||
if (searchEntry == null || searchEntry[attr] == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const attrs = searchEntry.attributes.filter((a: any) => a.type === attr);
|
||||
if (
|
||||
attrs == null ||
|
||||
attrs.length === 0 ||
|
||||
attrs[0].vals == null ||
|
||||
attrs[0].vals.length === 0
|
||||
) {
|
||||
return null;
|
||||
const vals = searchEntry[attr];
|
||||
if (!Array.isArray(vals)) {
|
||||
return [vals] as T[];
|
||||
}
|
||||
|
||||
return attrs[0];
|
||||
return vals as T[];
|
||||
}
|
||||
|
||||
private getAttrVals(searchEntry: any, attr: string): string[] {
|
||||
const obj = this.getAttrObj(searchEntry, attr);
|
||||
if (obj == null) {
|
||||
return null;
|
||||
}
|
||||
return obj.vals;
|
||||
}
|
||||
|
||||
private getAttr(searchEntry: any, attr: string): string {
|
||||
/**
|
||||
* Get the first value for an ldap attribute
|
||||
* @param searchEntry The ldap entry
|
||||
* @param attr An attribute name on the ldap entry
|
||||
* @returns The first value of the attribute, or null if there is not at least 1 value
|
||||
*/
|
||||
private getAttr<T extends string | Buffer>(searchEntry: ldapts.Entry, attr: string): T {
|
||||
const vals = this.getAttrVals(searchEntry, attr);
|
||||
if (vals == null) {
|
||||
if (vals == null || vals.length < 1) {
|
||||
return null;
|
||||
}
|
||||
return vals[0];
|
||||
|
||||
return vals[0] as T;
|
||||
}
|
||||
|
||||
private entryDisabled(searchEntry: any): boolean {
|
||||
const c = this.getAttr(searchEntry, "userAccountControl");
|
||||
const c = this.getAttr<string>(searchEntry, "userAccountControl");
|
||||
if (c != null) {
|
||||
try {
|
||||
const control = parseInt(c, null);
|
||||
@@ -333,145 +356,106 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
private async search<T>(
|
||||
path: string,
|
||||
filter: string,
|
||||
processEntry: (searchEntry: any) => T,
|
||||
controls: ldap.Control[] = [],
|
||||
processEntry: (searchEntry: ldapts.Entry) => T,
|
||||
controls: ldapts.Control[] = [],
|
||||
): Promise<T[]> {
|
||||
const options: ldap.SearchOptions = {
|
||||
const options: ldapts.SearchOptions = {
|
||||
filter: filter,
|
||||
scope: "sub",
|
||||
paged: this.dirConfig.pagedSearch,
|
||||
// We need to expressly tell ldapts what attributes to return as Buffer objects,
|
||||
// otherwise they are returned as strings
|
||||
explicitBufferAttributes: [ActiveDirectoryExternalId],
|
||||
};
|
||||
const entries: T[] = [];
|
||||
return new Promise<T[]>((resolve, reject) => {
|
||||
this.client.search(path, options, controls, (err, res) => {
|
||||
if (err != null) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
res.on("error", (resErr) => {
|
||||
reject(resErr);
|
||||
});
|
||||
|
||||
res.on("searchEntry", (entry) => {
|
||||
const e = processEntry(entry);
|
||||
if (e != null) {
|
||||
entries.push(e);
|
||||
}
|
||||
});
|
||||
|
||||
res.on("end", (result) => {
|
||||
resolve(entries);
|
||||
});
|
||||
});
|
||||
});
|
||||
const { searchEntries } = await this.client.search(path, options, controls);
|
||||
return searchEntries.map((e) => processEntry(e)).filter((e) => e != null);
|
||||
}
|
||||
|
||||
private async bind(): Promise<any> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
if (this.dirConfig.hostname == null || this.dirConfig.port == null) {
|
||||
reject(this.i18nService.t("dirConfigIncomplete"));
|
||||
return;
|
||||
}
|
||||
const protocol = "ldap" + (this.dirConfig.ssl && !this.dirConfig.startTls ? "s" : "");
|
||||
const url = protocol + "://" + this.dirConfig.hostname + ":" + this.dirConfig.port;
|
||||
const options: ldap.ClientOptions = {
|
||||
url: url.trim().toLowerCase(),
|
||||
};
|
||||
if (this.dirConfig.hostname == null || this.dirConfig.port == null) {
|
||||
throw new Error(this.i18nService.t("dirConfigIncomplete"));
|
||||
}
|
||||
|
||||
const tlsOptions: any = {};
|
||||
if (this.dirConfig.ssl) {
|
||||
if (this.dirConfig.sslAllowUnauthorized) {
|
||||
tlsOptions.rejectUnauthorized = !this.dirConfig.sslAllowUnauthorized;
|
||||
}
|
||||
if (!this.dirConfig.startTls) {
|
||||
if (
|
||||
this.dirConfig.sslCaPath != null &&
|
||||
this.dirConfig.sslCaPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.sslCaPath)
|
||||
) {
|
||||
tlsOptions.ca = [fs.readFileSync(this.dirConfig.sslCaPath)];
|
||||
}
|
||||
if (
|
||||
this.dirConfig.sslCertPath != null &&
|
||||
this.dirConfig.sslCertPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.sslCertPath)
|
||||
) {
|
||||
tlsOptions.cert = fs.readFileSync(this.dirConfig.sslCertPath);
|
||||
}
|
||||
if (
|
||||
this.dirConfig.sslKeyPath != null &&
|
||||
this.dirConfig.sslKeyPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.sslKeyPath)
|
||||
) {
|
||||
tlsOptions.key = fs.readFileSync(this.dirConfig.sslKeyPath);
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
this.dirConfig.tlsCaPath != null &&
|
||||
this.dirConfig.tlsCaPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.tlsCaPath)
|
||||
) {
|
||||
tlsOptions.ca = [fs.readFileSync(this.dirConfig.tlsCaPath)];
|
||||
}
|
||||
}
|
||||
}
|
||||
const protocol = this.dirConfig.ssl && !this.dirConfig.startTls ? "ldaps" : "ldap";
|
||||
|
||||
tlsOptions.checkServerIdentity = this.checkServerIdentityAltNames;
|
||||
options.tlsOptions = tlsOptions;
|
||||
const url = protocol + "://" + this.dirConfig.hostname + ":" + this.dirConfig.port;
|
||||
const options: ldapts.ClientOptions = {
|
||||
url: url.trim().toLowerCase(),
|
||||
};
|
||||
|
||||
this.client = ldap.createClient(options);
|
||||
// If using ldaps, TLS options are given to the client constructor
|
||||
if (protocol === "ldaps") {
|
||||
options.tlsOptions = this.buildTlsOptions();
|
||||
}
|
||||
|
||||
const user =
|
||||
this.dirConfig.username == null || this.dirConfig.username.trim() === ""
|
||||
? null
|
||||
: this.dirConfig.username;
|
||||
const pass =
|
||||
this.dirConfig.password == null || this.dirConfig.password.trim() === ""
|
||||
? null
|
||||
: this.dirConfig.password;
|
||||
this.client = new ldapts.Client(options);
|
||||
|
||||
if (user == null || pass == null) {
|
||||
reject(this.i18nService.t("usernamePasswordNotConfigured"));
|
||||
return;
|
||||
}
|
||||
const user =
|
||||
this.dirConfig.username == null || this.dirConfig.username.trim() === ""
|
||||
? null
|
||||
: this.dirConfig.username;
|
||||
const pass =
|
||||
this.dirConfig.password == null || this.dirConfig.password.trim() === ""
|
||||
? null
|
||||
: this.dirConfig.password;
|
||||
|
||||
if (this.dirConfig.startTls && this.dirConfig.ssl) {
|
||||
this.client.starttls(options.tlsOptions, undefined, (err, res) => {
|
||||
if (err != null) {
|
||||
reject(err.message);
|
||||
} else {
|
||||
this.client.bind(user, pass, (err2) => {
|
||||
if (err2 != null) {
|
||||
reject(err2.message);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.client.bind(user, pass, (err) => {
|
||||
if (err != null) {
|
||||
reject(err.message);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
if (user == null || pass == null) {
|
||||
throw new Error(this.i18nService.t("usernamePasswordNotConfigured"));
|
||||
}
|
||||
|
||||
// If using StartTLS, TLS options are given to the StartTLS call
|
||||
if (this.dirConfig.startTls && this.dirConfig.ssl) {
|
||||
await this.client.startTLS(this.buildTlsOptions());
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.bind(user, pass);
|
||||
} catch {
|
||||
await this.client.unbind();
|
||||
}
|
||||
}
|
||||
|
||||
private async unbind(): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.unbind((err) => {
|
||||
if (err != null) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
private buildTlsOptions(): tls.ConnectionOptions {
|
||||
const tlsOptions: tls.ConnectionOptions = {};
|
||||
|
||||
if (this.dirConfig.sslAllowUnauthorized) {
|
||||
tlsOptions.rejectUnauthorized = !this.dirConfig.sslAllowUnauthorized;
|
||||
}
|
||||
if (!this.dirConfig.startTls) {
|
||||
if (
|
||||
this.dirConfig.sslCaPath != null &&
|
||||
this.dirConfig.sslCaPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.sslCaPath)
|
||||
) {
|
||||
tlsOptions.ca = [fs.readFileSync(this.dirConfig.sslCaPath)];
|
||||
}
|
||||
if (
|
||||
this.dirConfig.sslCertPath != null &&
|
||||
this.dirConfig.sslCertPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.sslCertPath)
|
||||
) {
|
||||
tlsOptions.cert = fs.readFileSync(this.dirConfig.sslCertPath);
|
||||
}
|
||||
if (
|
||||
this.dirConfig.sslKeyPath != null &&
|
||||
this.dirConfig.sslKeyPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.sslKeyPath)
|
||||
) {
|
||||
tlsOptions.key = fs.readFileSync(this.dirConfig.sslKeyPath);
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
this.dirConfig.tlsCaPath != null &&
|
||||
this.dirConfig.tlsCaPath !== "" &&
|
||||
fs.existsSync(this.dirConfig.tlsCaPath)
|
||||
) {
|
||||
tlsOptions.ca = [fs.readFileSync(this.dirConfig.tlsCaPath)];
|
||||
}
|
||||
}
|
||||
|
||||
tlsOptions.checkServerIdentity = this.checkServerIdentityAltNames;
|
||||
|
||||
return tlsOptions;
|
||||
}
|
||||
|
||||
private bufToGuid(buf: Buffer) {
|
||||
@@ -494,7 +478,7 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
return guid.toLowerCase();
|
||||
}
|
||||
|
||||
private checkServerIdentityAltNames(host: string, cert: PeerCertificate) {
|
||||
private checkServerIdentityAltNames(host: string, cert: tls.PeerCertificate) {
|
||||
// Fixes the cert representation when subject is empty and altNames are present
|
||||
// Required for node versions < 12.14.1 (which could be used for bwdc cli)
|
||||
// Adapted from: https://github.com/auth0/ad-ldap-connector/commit/1f4dd2be6ed93dda591dd31ed5483a9b452a8d2a
|
||||
@@ -510,6 +494,6 @@ export class LdapDirectoryService implements IDirectoryService {
|
||||
};
|
||||
}
|
||||
|
||||
return checkServerIdentity(host, cert);
|
||||
return tls.checkServerIdentity(host, cert);
|
||||
}
|
||||
}
|
||||
|
||||
79
src/services/single-request-builder.spec.ts
Normal file
79
src/services/single-request-builder.spec.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { GetUniqueString } from "@/jslib/common/spec/utils";
|
||||
|
||||
import { UserEntry } from "@/src/models/userEntry";
|
||||
|
||||
import { RequestBuilderOptions } from "../abstractions/request-builder.service";
|
||||
import { groupSimulator, userSimulator } from "../utils/request-builder-helper";
|
||||
|
||||
import { SingleRequestBuilder } from "./single-request-builder";
|
||||
|
||||
describe("SingleRequestBuilder", () => {
|
||||
let singleRequestBuilder: SingleRequestBuilder;
|
||||
|
||||
beforeEach(async () => {
|
||||
singleRequestBuilder = new SingleRequestBuilder();
|
||||
});
|
||||
|
||||
const defaultOptions: RequestBuilderOptions = Object.freeze({
|
||||
overwriteExisting: false,
|
||||
removeDisabled: false,
|
||||
});
|
||||
|
||||
it("SingleRequestBuilder returns single request for 200 users", () => {
|
||||
const mockGroups = groupSimulator(200);
|
||||
const mockUsers = userSimulator(200);
|
||||
|
||||
const requests = singleRequestBuilder.buildRequest(mockGroups, mockUsers, defaultOptions);
|
||||
|
||||
expect(requests.length).toEqual(1);
|
||||
});
|
||||
|
||||
it("SingleRequestBuilder returns request with overwriteExisting enabled", () => {
|
||||
const mockGroups = groupSimulator(200);
|
||||
const mockUsers = userSimulator(200);
|
||||
|
||||
const options = { ...defaultOptions, overwriteExisting: true };
|
||||
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
|
||||
|
||||
expect(request.overwriteExisting).toBe(true);
|
||||
});
|
||||
|
||||
it("SingleRequestBuilder returns request with deleted user when removeDisabled is true", () => {
|
||||
const mockGroups = groupSimulator(200);
|
||||
const mockUsers = userSimulator(200);
|
||||
|
||||
const disabledUser = new UserEntry();
|
||||
const disabledUserEmail = GetUniqueString() + "@example.com";
|
||||
disabledUser.disabled = true;
|
||||
disabledUser.email = disabledUserEmail;
|
||||
mockUsers.push(disabledUser);
|
||||
|
||||
const options = { ...defaultOptions, removeDisabled: true };
|
||||
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
|
||||
|
||||
expect(request.members.length).toEqual(201);
|
||||
expect(request.members.pop()).toEqual(
|
||||
expect.objectContaining({ email: disabledUserEmail, deleted: true }),
|
||||
);
|
||||
expect(request.overwriteExisting).toBe(false);
|
||||
});
|
||||
|
||||
it("SingleRequestBuilder returns request with deleted user and overwriteExisting enabled when overwriteExisting and removeDisabled are true", () => {
|
||||
const mockGroups = groupSimulator(200);
|
||||
const mockUsers = userSimulator(200);
|
||||
|
||||
const disabledUser = new UserEntry();
|
||||
const disabledUserEmail = GetUniqueString() + "@example.com";
|
||||
disabledUser.disabled = true;
|
||||
disabledUser.email = disabledUserEmail;
|
||||
mockUsers.push(disabledUser);
|
||||
|
||||
const options = { overwriteExisting: true, removeDisabled: true };
|
||||
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
|
||||
|
||||
expect(request.members.pop()).toEqual(
|
||||
expect.objectContaining({ email: disabledUserEmail, deleted: true }),
|
||||
);
|
||||
expect(request.overwriteExisting).toBe(true);
|
||||
});
|
||||
});
|
||||
41
src/services/single-request-builder.ts
Normal file
41
src/services/single-request-builder.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
|
||||
|
||||
import { GroupEntry } from "@/src/models/groupEntry";
|
||||
import { UserEntry } from "@/src/models/userEntry";
|
||||
|
||||
import { RequestBuilder, RequestBuilderOptions } from "../abstractions/request-builder.service";
|
||||
|
||||
/**
|
||||
* This class is responsible for building small (<2k users) syncs as a single
|
||||
* request to the /import endpoint. This is done to be backwards compatible with
|
||||
* existing functionality for sync requests that are sufficiently small enough to not
|
||||
* exceed default maximum packet size limits on NGINX web servers.
|
||||
* */
|
||||
export class SingleRequestBuilder implements RequestBuilder {
|
||||
buildRequest(
|
||||
groups: GroupEntry[],
|
||||
users: UserEntry[],
|
||||
options: RequestBuilderOptions,
|
||||
): OrganizationImportRequest[] {
|
||||
return [
|
||||
new OrganizationImportRequest({
|
||||
groups: (groups ?? []).map((g) => {
|
||||
return {
|
||||
name: g.name,
|
||||
externalId: g.externalId,
|
||||
memberExternalIds: Array.from(g.userMemberExternalIds),
|
||||
};
|
||||
}),
|
||||
users: (users ?? []).map((u) => {
|
||||
return {
|
||||
email: u.email,
|
||||
externalId: u.externalId,
|
||||
deleted: u.deleted || (options.removeDisabled && u.disabled),
|
||||
};
|
||||
}),
|
||||
overwriteExisting: options.overwriteExisting,
|
||||
largeImport: false,
|
||||
}),
|
||||
];
|
||||
}
|
||||
}
|
||||
132
src/services/sync.service.integration.spec.ts
Normal file
132
src/services/sync.service.integration.spec.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { mock, MockProxy } from "jest-mock-extended";
|
||||
|
||||
import { ApiService } from "@/jslib/common/src/abstractions/api.service";
|
||||
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
|
||||
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
|
||||
import { EnvironmentService } from "@/jslib/common/src/services/environment.service";
|
||||
|
||||
import { I18nService } from "../../jslib/common/src/abstractions/i18n.service";
|
||||
import { LogService } from "../../jslib/common/src/abstractions/log.service";
|
||||
import { groupFixtures } from "../../openldap/group-fixtures";
|
||||
import { userFixtures } from "../../openldap/user-fixtures";
|
||||
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
|
||||
import { DirectoryType } from "../enums/directoryType";
|
||||
import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures";
|
||||
|
||||
import { BatchRequestBuilder } from "./batch-request-builder";
|
||||
import { LdapDirectoryService } from "./ldap-directory.service";
|
||||
import { SingleRequestBuilder } from "./single-request-builder";
|
||||
import { StateService } from "./state.service";
|
||||
import { SyncService } from "./sync.service";
|
||||
import * as constants from "./sync.service";
|
||||
|
||||
describe("SyncService", () => {
|
||||
let logService: MockProxy<LogService>;
|
||||
let i18nService: MockProxy<I18nService>;
|
||||
let stateService: MockProxy<StateService>;
|
||||
let cryptoFunctionService: MockProxy<CryptoFunctionService>;
|
||||
let apiService: MockProxy<ApiService>;
|
||||
let messagingService: MockProxy<MessagingService>;
|
||||
let environmentService: MockProxy<EnvironmentService>;
|
||||
let directoryFactory: MockProxy<DirectoryFactoryService>;
|
||||
|
||||
let batchRequestBuilder: BatchRequestBuilder;
|
||||
let singleRequestBuilder: SingleRequestBuilder;
|
||||
let syncService: SyncService;
|
||||
let directoryService: LdapDirectoryService;
|
||||
|
||||
const originalBatchSize = constants.batchSize;
|
||||
|
||||
beforeEach(() => {
|
||||
logService = mock();
|
||||
i18nService = mock();
|
||||
stateService = mock();
|
||||
cryptoFunctionService = mock();
|
||||
apiService = mock();
|
||||
messagingService = mock();
|
||||
environmentService = mock();
|
||||
directoryFactory = mock();
|
||||
|
||||
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap);
|
||||
stateService.getOrganizationId.mockResolvedValue("fakeId");
|
||||
|
||||
directoryService = new LdapDirectoryService(logService, i18nService, stateService);
|
||||
directoryFactory.createService.mockReturnValue(directoryService);
|
||||
|
||||
batchRequestBuilder = new BatchRequestBuilder();
|
||||
singleRequestBuilder = new SingleRequestBuilder();
|
||||
|
||||
syncService = new SyncService(
|
||||
cryptoFunctionService,
|
||||
apiService,
|
||||
messagingService,
|
||||
i18nService,
|
||||
environmentService,
|
||||
stateService,
|
||||
batchRequestBuilder,
|
||||
singleRequestBuilder,
|
||||
directoryFactory,
|
||||
);
|
||||
});
|
||||
|
||||
describe("OpenLdap integration: ", () => {
|
||||
it("with largeImport disabled matches directory fixture data", async () => {
|
||||
stateService.getDirectory
|
||||
.calledWith(DirectoryType.Ldap)
|
||||
.mockResolvedValue(getLdapConfiguration());
|
||||
stateService.getSync.mockResolvedValue(
|
||||
getSyncConfiguration({
|
||||
users: true,
|
||||
groups: true,
|
||||
largeImport: false,
|
||||
overwriteExisting: false,
|
||||
}),
|
||||
);
|
||||
|
||||
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
|
||||
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
|
||||
stateService.getLastSyncHash.mockResolvedValue("unique hash");
|
||||
|
||||
const syncResult = await syncService.sync(false, false);
|
||||
|
||||
expect(syncResult).toEqual([groupFixtures, userFixtures]);
|
||||
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ overwriteExisting: false }),
|
||||
);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("with largeImport enabled matches directory fixture data", async () => {
|
||||
stateService.getDirectory
|
||||
.calledWith(DirectoryType.Ldap)
|
||||
.mockResolvedValue(getLdapConfiguration());
|
||||
stateService.getSync.mockResolvedValue(
|
||||
getSyncConfiguration({
|
||||
users: true,
|
||||
groups: true,
|
||||
largeImport: true,
|
||||
overwriteExisting: false,
|
||||
}),
|
||||
);
|
||||
|
||||
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
|
||||
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
|
||||
stateService.getLastSyncHash.mockResolvedValue("unique hash");
|
||||
|
||||
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
|
||||
constants.batchSize = 4;
|
||||
|
||||
const syncResult = await syncService.sync(false, false);
|
||||
|
||||
expect(syncResult).toEqual([groupFixtures, userFixtures]);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ overwriteExisting: false }),
|
||||
);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6);
|
||||
|
||||
// @ts-expect-error Reset batch size to original state.
|
||||
constants.batchSize = originalBatchSize;
|
||||
});
|
||||
});
|
||||
});
|
||||
135
src/services/sync.service.spec.ts
Normal file
135
src/services/sync.service.spec.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { mock, MockProxy } from "jest-mock-extended";
|
||||
|
||||
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
|
||||
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service";
|
||||
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
|
||||
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
|
||||
import { ApiService } from "@/jslib/common/src/services/api.service";
|
||||
|
||||
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
|
||||
import { DirectoryType } from "../enums/directoryType";
|
||||
import { getSyncConfiguration } from "../utils/test-fixtures";
|
||||
|
||||
import { BatchRequestBuilder } from "./batch-request-builder";
|
||||
import { I18nService } from "./i18n.service";
|
||||
import { LdapDirectoryService } from "./ldap-directory.service";
|
||||
import { SingleRequestBuilder } from "./single-request-builder";
|
||||
import { StateService } from "./state.service";
|
||||
import { SyncService } from "./sync.service";
|
||||
import * as constants from "./sync.service";
|
||||
|
||||
import { groupFixtures } from "@/openldap/group-fixtures";
|
||||
import { userFixtures } from "@/openldap/user-fixtures";
|
||||
|
||||
describe("SyncService", () => {
|
||||
let cryptoFunctionService: MockProxy<CryptoFunctionService>;
|
||||
let apiService: MockProxy<ApiService>;
|
||||
let messagingService: MockProxy<MessagingService>;
|
||||
let i18nService: MockProxy<I18nService>;
|
||||
let environmentService: MockProxy<EnvironmentService>;
|
||||
let stateService: MockProxy<StateService>;
|
||||
let directoryFactory: MockProxy<DirectoryFactoryService>;
|
||||
let batchRequestBuilder: MockProxy<BatchRequestBuilder>;
|
||||
let singleRequestBuilder: MockProxy<SingleRequestBuilder>;
|
||||
|
||||
let syncService: SyncService;
|
||||
|
||||
const originalBatchSize = constants.batchSize;
|
||||
|
||||
beforeEach(() => {
|
||||
cryptoFunctionService = mock();
|
||||
apiService = mock();
|
||||
messagingService = mock();
|
||||
i18nService = mock();
|
||||
environmentService = mock();
|
||||
stateService = mock();
|
||||
directoryFactory = mock();
|
||||
batchRequestBuilder = mock();
|
||||
singleRequestBuilder = mock();
|
||||
|
||||
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap);
|
||||
stateService.getOrganizationId.mockResolvedValue("fakeId");
|
||||
const mockDirectoryService = mock<LdapDirectoryService>();
|
||||
mockDirectoryService.getEntries.mockResolvedValue([groupFixtures, userFixtures]);
|
||||
directoryFactory.createService.mockReturnValue(mockDirectoryService);
|
||||
|
||||
syncService = new SyncService(
|
||||
cryptoFunctionService,
|
||||
apiService,
|
||||
messagingService,
|
||||
i18nService,
|
||||
environmentService,
|
||||
stateService,
|
||||
batchRequestBuilder,
|
||||
singleRequestBuilder,
|
||||
directoryFactory,
|
||||
);
|
||||
});
|
||||
|
||||
it("Sync posts single request successfully for unique hashes", async () => {
|
||||
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true }));
|
||||
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
|
||||
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
|
||||
stateService.getLastSyncHash.mockResolvedValue("unique hash");
|
||||
|
||||
const mockRequest: OrganizationImportRequest[] = [
|
||||
{
|
||||
members: [],
|
||||
groups: [],
|
||||
overwriteExisting: true,
|
||||
largeImport: true,
|
||||
},
|
||||
];
|
||||
|
||||
singleRequestBuilder.buildRequest.mockReturnValue(mockRequest);
|
||||
|
||||
await syncService.sync(true, false);
|
||||
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("Sync posts multiple request successfully for unique hashes", async () => {
|
||||
stateService.getSync.mockResolvedValue(
|
||||
getSyncConfiguration({ groups: true, users: true, largeImport: true }),
|
||||
);
|
||||
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
|
||||
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
|
||||
stateService.getLastSyncHash.mockResolvedValue("unique hash");
|
||||
|
||||
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
|
||||
constants.batchSize = 4;
|
||||
|
||||
const mockRequests = new Array(6).fill({
|
||||
members: [],
|
||||
groups: [],
|
||||
overwriteExisting: true,
|
||||
largeImport: true,
|
||||
});
|
||||
|
||||
batchRequestBuilder.buildRequest.mockReturnValue(mockRequests);
|
||||
|
||||
await syncService.sync(true, false);
|
||||
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[0]);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[1]);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[2]);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[3]);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[4]);
|
||||
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[5]);
|
||||
|
||||
// @ts-expect-error Reset batch size back to original value.
|
||||
constants.batchSize = originalBatchSize;
|
||||
});
|
||||
|
||||
it("does not post for the same hash", async () => {
|
||||
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true }));
|
||||
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
|
||||
// This arranges the last hash to be the same as the ArrayBuffer after it is converted to b64
|
||||
stateService.getLastSyncHash.mockResolvedValue("AA==");
|
||||
|
||||
await syncService.sync(true, false);
|
||||
|
||||
expect(apiService.postPublicImportDirectory).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -2,35 +2,40 @@ import { ApiService } from "@/jslib/common/src/abstractions/api.service";
|
||||
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
|
||||
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service";
|
||||
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
|
||||
import { LogService } from "@/jslib/common/src/abstractions/log.service";
|
||||
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
|
||||
import { Utils } from "@/jslib/common/src/misc/utils";
|
||||
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
|
||||
|
||||
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
|
||||
import { StateService } from "../abstractions/state.service";
|
||||
import { DirectoryType } from "../enums/directoryType";
|
||||
import { GroupEntry } from "../models/groupEntry";
|
||||
import { SyncConfiguration } from "../models/syncConfiguration";
|
||||
import { UserEntry } from "../models/userEntry";
|
||||
|
||||
import { AzureDirectoryService } from "./azure-directory.service";
|
||||
import { IDirectoryService } from "./directory.service";
|
||||
import { GSuiteDirectoryService } from "./gsuite-directory.service";
|
||||
import { LdapDirectoryService } from "./ldap-directory.service";
|
||||
import { OktaDirectoryService } from "./okta-directory.service";
|
||||
import { OneLoginDirectoryService } from "./onelogin-directory.service";
|
||||
import { BatchRequestBuilder } from "./batch-request-builder";
|
||||
import { SingleRequestBuilder } from "./single-request-builder";
|
||||
|
||||
export interface HashResult {
|
||||
hash: string;
|
||||
hashLegacy: string;
|
||||
}
|
||||
|
||||
export const batchSize = 2000;
|
||||
|
||||
export class SyncService {
|
||||
private dirType: DirectoryType;
|
||||
|
||||
constructor(
|
||||
private logService: LogService,
|
||||
private cryptoFunctionService: CryptoFunctionService,
|
||||
private apiService: ApiService,
|
||||
private messagingService: MessagingService,
|
||||
private i18nService: I18nService,
|
||||
private environmentService: EnvironmentService,
|
||||
private stateService: StateService,
|
||||
private batchRequestBuilder: BatchRequestBuilder,
|
||||
private singleRequestBuilder: SingleRequestBuilder,
|
||||
private directoryFactory: DirectoryFactoryService,
|
||||
) {}
|
||||
|
||||
async sync(force: boolean, test: boolean): Promise<[GroupEntry[], UserEntry[]]> {
|
||||
@@ -39,7 +44,7 @@ export class SyncService {
|
||||
throw new Error("No directory configured.");
|
||||
}
|
||||
|
||||
const directoryService = this.getDirectoryService();
|
||||
const directoryService = this.directoryFactory.createService(this.dirType);
|
||||
if (directoryService == null) {
|
||||
throw new Error("Cannot load directory service.");
|
||||
}
|
||||
@@ -78,42 +83,15 @@ export class SyncService {
|
||||
return [groups, users];
|
||||
}
|
||||
|
||||
const req = this.buildRequest(
|
||||
groups,
|
||||
users,
|
||||
syncConfig.removeDisabled,
|
||||
syncConfig.overwriteExisting,
|
||||
syncConfig.largeImport,
|
||||
);
|
||||
const reqJson = JSON.stringify(req);
|
||||
const reqs = this.buildRequest(groups, users, syncConfig);
|
||||
|
||||
const orgId = await this.stateService.getOrganizationId();
|
||||
if (orgId == null) {
|
||||
throw new Error("Organization not set.");
|
||||
}
|
||||
const result: HashResult = await this.generateHash(reqs);
|
||||
|
||||
// TODO: Remove hashLegacy once we're sure clients have had time to sync new hashes
|
||||
let hashLegacy: string = null;
|
||||
const hashBuffLegacy = await this.cryptoFunctionService.hash(
|
||||
this.environmentService.getApiUrl() + reqJson,
|
||||
"sha256",
|
||||
);
|
||||
if (hashBuffLegacy != null) {
|
||||
hashLegacy = Utils.fromBufferToB64(hashBuffLegacy);
|
||||
}
|
||||
let hash: string = null;
|
||||
const hashBuff = await this.cryptoFunctionService.hash(
|
||||
this.environmentService.getApiUrl() + orgId + reqJson,
|
||||
"sha256",
|
||||
);
|
||||
if (hashBuff != null) {
|
||||
hash = Utils.fromBufferToB64(hashBuff);
|
||||
}
|
||||
const lastHash = await this.stateService.getLastSyncHash();
|
||||
|
||||
if (lastHash == null || (hash !== lastHash && hashLegacy !== lastHash)) {
|
||||
await this.apiService.postPublicImportDirectory(req);
|
||||
await this.stateService.setLastSyncHash(hash);
|
||||
if (result.hash && (await this.isNewHash(result))) {
|
||||
for (const req of reqs) {
|
||||
await this.apiService.postPublicImportDirectory(req);
|
||||
}
|
||||
await this.stateService.setLastSyncHash(result.hash);
|
||||
} else {
|
||||
groups = null;
|
||||
users = null;
|
||||
@@ -133,6 +111,40 @@ export class SyncService {
|
||||
}
|
||||
}
|
||||
|
||||
async generateHash(reqs: OrganizationImportRequest[]): Promise<HashResult> {
|
||||
const reqJson = JSON.stringify(reqs?.length === 1 ? reqs[0] : reqs);
|
||||
const orgId = await this.stateService.getOrganizationId();
|
||||
if (orgId == null) {
|
||||
throw new Error("Organization not set.");
|
||||
}
|
||||
|
||||
// TODO: Remove hashLegacy once we're sure clients have had time to sync new hashes
|
||||
let hashLegacy: string = null;
|
||||
const hashBuffLegacy = await this.cryptoFunctionService.hash(
|
||||
this.environmentService.getApiUrl() + reqJson,
|
||||
"sha256",
|
||||
);
|
||||
if (hashBuffLegacy != null) {
|
||||
hashLegacy = Utils.fromBufferToB64(hashBuffLegacy);
|
||||
}
|
||||
let hash: string = null;
|
||||
const hashBuff = await this.cryptoFunctionService.hash(
|
||||
this.environmentService.getApiUrl() + orgId + reqJson,
|
||||
"sha256",
|
||||
);
|
||||
if (hashBuff != null) {
|
||||
hash = Utils.fromBufferToB64(hashBuff);
|
||||
}
|
||||
|
||||
return { hash, hashLegacy };
|
||||
}
|
||||
|
||||
async isNewHash(hashResult: HashResult): Promise<boolean> {
|
||||
const lastHash = await this.stateService.getLastSyncHash();
|
||||
|
||||
return lastHash == null || (hashResult.hash !== lastHash && hashResult.hashLegacy !== lastHash);
|
||||
}
|
||||
|
||||
private removeDuplicateUsers(users: UserEntry[]) {
|
||||
if (users == null) {
|
||||
return null;
|
||||
@@ -198,48 +210,16 @@ export class SyncService {
|
||||
return allUsers;
|
||||
}
|
||||
|
||||
private getDirectoryService(): IDirectoryService {
|
||||
switch (this.dirType) {
|
||||
case DirectoryType.GSuite:
|
||||
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.AzureActiveDirectory:
|
||||
return new AzureDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.Ldap:
|
||||
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.Okta:
|
||||
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
case DirectoryType.OneLogin:
|
||||
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private buildRequest(
|
||||
groups: GroupEntry[],
|
||||
users: UserEntry[],
|
||||
removeDisabled: boolean,
|
||||
overwriteExisting: boolean,
|
||||
largeImport = false,
|
||||
) {
|
||||
return new OrganizationImportRequest({
|
||||
groups: (groups ?? []).map((g) => {
|
||||
return {
|
||||
name: g.name,
|
||||
externalId: g.externalId,
|
||||
memberExternalIds: Array.from(g.userMemberExternalIds),
|
||||
};
|
||||
}),
|
||||
users: (users ?? []).map((u) => {
|
||||
return {
|
||||
email: u.email,
|
||||
externalId: u.externalId,
|
||||
deleted: u.deleted || (removeDisabled && u.disabled),
|
||||
};
|
||||
}),
|
||||
overwriteExisting: overwriteExisting,
|
||||
largeImport: largeImport,
|
||||
});
|
||||
syncConfig: SyncConfiguration,
|
||||
): OrganizationImportRequest[] {
|
||||
if (syncConfig.largeImport && (groups?.length ?? 0) + (users?.length ?? 0) > batchSize) {
|
||||
return this.batchRequestBuilder.buildRequest(groups, users, syncConfig);
|
||||
} else {
|
||||
return this.singleRequestBuilder.buildRequest(groups, users, syncConfig);
|
||||
}
|
||||
}
|
||||
|
||||
private async saveSyncTimes(syncConfig: SyncConfiguration, time: Date) {
|
||||
|
||||
26
src/utils/request-builder-helper.ts
Normal file
26
src/utils/request-builder-helper.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { GetUniqueString } from "@/jslib/common/spec/utils";
|
||||
|
||||
import { GroupEntry } from "../models/groupEntry";
|
||||
import { UserEntry } from "../models/userEntry";
|
||||
|
||||
export function userSimulator(userCount: number): UserEntry[] {
|
||||
const users: UserEntry[] = [];
|
||||
while (userCount > 0) {
|
||||
const userEntry = new UserEntry();
|
||||
userEntry.email = GetUniqueString() + "@example.com";
|
||||
users.push(userEntry);
|
||||
userCount--;
|
||||
}
|
||||
return users;
|
||||
}
|
||||
|
||||
export function groupSimulator(groupCount: number): GroupEntry[] {
|
||||
const groups: GroupEntry[] = [];
|
||||
while (groupCount > 0) {
|
||||
const groupEntry = new GroupEntry();
|
||||
groupEntry.name = GetUniqueString();
|
||||
groups.push(groupEntry);
|
||||
groupCount--;
|
||||
}
|
||||
return groups;
|
||||
}
|
||||
53
src/utils/test-fixtures.ts
Normal file
53
src/utils/test-fixtures.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { LdapConfiguration } from "../models/ldapConfiguration";
|
||||
import { SyncConfiguration } from "../models/syncConfiguration";
|
||||
|
||||
/**
|
||||
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration.
|
||||
*/
|
||||
export const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({
|
||||
ssl: false,
|
||||
startTls: false,
|
||||
tlsCaPath: null,
|
||||
sslAllowUnauthorized: false,
|
||||
sslCertPath: null,
|
||||
sslKeyPath: null,
|
||||
sslCaPath: null,
|
||||
hostname: "localhost",
|
||||
port: 1389,
|
||||
domain: null,
|
||||
rootPath: "dc=bitwarden,dc=com",
|
||||
currentUser: false,
|
||||
username: "cn=admin,dc=bitwarden,dc=com",
|
||||
password: "admin",
|
||||
ad: false,
|
||||
pagedSearch: false,
|
||||
...(config ?? {}),
|
||||
});
|
||||
|
||||
/**
|
||||
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration.
|
||||
*/
|
||||
export const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({
|
||||
users: false,
|
||||
groups: false,
|
||||
interval: 5,
|
||||
userFilter: null,
|
||||
groupFilter: null,
|
||||
removeDisabled: false,
|
||||
overwriteExisting: false,
|
||||
largeImport: false,
|
||||
// Ldap properties
|
||||
groupObjectClass: "posixGroup",
|
||||
userObjectClass: "person",
|
||||
groupPath: null,
|
||||
userPath: null,
|
||||
groupNameAttribute: "cn",
|
||||
userEmailAttribute: "mail",
|
||||
memberAttribute: "memberUid",
|
||||
useEmailPrefixSuffix: false,
|
||||
emailPrefixAttribute: "sAMAccountName",
|
||||
emailSuffix: null,
|
||||
creationDateAttribute: "whenCreated",
|
||||
revisionDateAttribute: "whenChanged",
|
||||
...(config ?? {}),
|
||||
});
|
||||
@@ -17,12 +17,7 @@
|
||||
"paths": {
|
||||
"tldjs": ["@/jslib/src/misc/tldjs.noop"],
|
||||
"@/*": ["./*"]
|
||||
},
|
||||
"plugins": [
|
||||
{
|
||||
"transform": "typescript-transform-paths"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": ["src", "jslib", "scripts", "./*.ts"]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user