1
0
mirror of https://github.com/bitwarden/directory-connector synced 2025-12-05 23:53:21 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Jimmy Vo
599b6e6058 [PM-15283] [PM-15284] [PM-15286] Upgrade Angular to 19. 2024-12-09 14:10:39 -05:00
51 changed files with 5537 additions and 346612 deletions

View File

@@ -7,12 +7,6 @@
"groupName": "gh minor",
"matchManagers": ["github-actions"],
"matchUpdateTypes": ["minor", "patch"]
},
{
"groupName": "Google Libraries",
"matchPackagePatterns": ["google-auth-library", "googleapis"],
"matchManagers": ["npm"],
"groupSlug": "google-libraries"
}
]
}

View File

@@ -1,377 +0,0 @@
name: Build CLI
on:
pull_request: {}
push:
branches:
- "main"
- "rc"
- "hotfix-rc"
workflow_dispatch: {}
permissions:
contents: read
jobs:
cloc:
name: CLOC
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up CLOC
run: |
sudo apt update
sudo apt -y install cloc
- name: Print lines of code
run: cloc --include-lang TypeScript,JavaScript,HTML,Sass,CSS --vcs git
setup:
name: Setup
runs-on: ubuntu-24.04
outputs:
package_version: ${{ steps.retrieve-version.outputs.package_version }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get Package Version
id: retrieve-version
run: |
PKG_VERSION=$(jq -r .version package.json)
echo "package_version=$PKG_VERSION" >> $GITHUB_OUTPUT
linux-cli:
name: Build Linux CLI
runs-on: ubuntu-24.04
needs: setup
env:
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
_PKG_FETCH_NODE_VERSION: 18.5.0
_PKG_FETCH_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Get pkg-fetch
run: |
cd $HOME
fetchedUrl="https://github.com/vercel/pkg-fetch/releases/download/v$_PKG_FETCH_VERSION/node-v$_PKG_FETCH_NODE_VERSION-linux-x64"
mkdir -p .pkg-cache/v$_PKG_FETCH_VERSION
wget $fetchedUrl -O "./.pkg-cache/v$_PKG_FETCH_VERSION/fetched-v$_PKG_FETCH_NODE_VERSION-linux-x64"
- name: Keytar
run: |
keytarVersion=$(cat package.json | jq -r '.dependencies.keytar')
keytarTar="keytar-v$keytarVersion-napi-v3-linux-x64.tar"
keytarTarGz="$keytarTar.gz"
keytarUrl="https://github.com/atom/node-keytar/releases/download/v$keytarVersion/$keytarTarGz"
mkdir -p ./keytar/linux
wget $keytarUrl -O ./keytar/linux/$keytarTarGz
tar -xvf ./keytar/linux/$keytarTarGz -C ./keytar/linux
- name: Install
run: npm install
- name: Package CLI
run: npm run dist:cli:lin
- name: Zip
run: zip -j dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip dist-cli/linux/bwdc keytar/linux/build/Release/keytar.node
- name: Version Test
run: |
sudo apt-get update
sudo apt install libsecret-1-0 dbus-x11 gnome-keyring
eval $(dbus-launch --sh-syntax)
eval $(echo -n "" | /usr/bin/gnome-keyring-daemon --login)
eval $(/usr/bin/gnome-keyring-daemon --components=secrets --start)
mkdir -p test/linux
unzip ./dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip -d ./test/linux
testVersion=$(./test/linux/bwdc -v)
echo "version: $_PACKAGE_VERSION"
echo "testVersion: $testVersion"
if [ "$testVersion" != "$_PACKAGE_VERSION" ]; then
echo "Version test failed."
exit 1
fi
- name: Upload Linux Zip to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
macos-cli:
name: Build Mac CLI
runs-on: macos-13
needs: setup
env:
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
_PKG_FETCH_NODE_VERSION: 18.5.0
_PKG_FETCH_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Get pkg-fetch
run: |
cd $HOME
fetchedUrl="https://github.com/vercel/pkg-fetch/releases/download/v$_PKG_FETCH_VERSION/node-v$_PKG_FETCH_NODE_VERSION-macos-x64"
mkdir -p .pkg-cache/v$_PKG_FETCH_VERSION
wget $fetchedUrl -O "./.pkg-cache/v$_PKG_FETCH_VERSION/fetched-v$_PKG_FETCH_NODE_VERSION-macos-x64"
- name: Keytar
run: |
keytarVersion=$(cat package.json | jq -r '.dependencies.keytar')
keytarTar="keytar-v$keytarVersion-napi-v3-darwin-x64.tar"
keytarTarGz="$keytarTar.gz"
keytarUrl="https://github.com/atom/node-keytar/releases/download/v$keytarVersion/$keytarTarGz"
mkdir -p ./keytar/macos
wget $keytarUrl -O ./keytar/macos/$keytarTarGz
tar -xvf ./keytar/macos/$keytarTarGz -C ./keytar/macos
- name: Install
run: npm install
- name: Package CLI
run: npm run dist:cli:mac
- name: Zip
run: zip -j dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip dist-cli/macos/bwdc keytar/macos/build/Release/keytar.node
- name: Version Test
run: |
mkdir -p test/macos
unzip ./dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip -d ./test/macos
testVersion=$(./test/macos/bwdc -v)
echo "version: $_PACKAGE_VERSION"
echo "testVersion: $testVersion"
if [ "$testVersion" != "$_PACKAGE_VERSION" ]; then
echo "Version test failed."
exit 1
fi
- name: Upload Mac Zip to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
windows-cli:
name: Build Windows CLI
runs-on: windows-2022
needs: setup
env:
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
_WIN_PKG_FETCH_VERSION: 18.5.0
_WIN_PKG_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup Windows builder
run: |
choco install checksum --no-progress
choco install reshack --no-progress
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Get pkg-fetch
shell: pwsh
run: |
cd $HOME
$fetchedUrl = "https://github.com/vercel/pkg-fetch/releases/download/v$env:_WIN_PKG_VERSION/node-v$env:_WIN_PKG_FETCH_VERSION-win-x64"
New-Item -ItemType directory -Path ./.pkg-cache
New-Item -ItemType directory -Path ./.pkg-cache/v$env:_WIN_PKG_VERSION
Invoke-RestMethod -Uri $fetchedUrl `
-OutFile "./.pkg-cache/v$env:_WIN_PKG_VERSION/fetched-v$env:_WIN_PKG_FETCH_VERSION-win-x64"
- name: Keytar
shell: pwsh
run: |
$keytarVersion = (Get-Content -Raw -Path ./package.json | ConvertFrom-Json).dependencies.keytar
$keytarTar = "keytar-v${keytarVersion}-napi-v3-{0}-x64.tar"
$keytarTarGz = "${keytarTar}.gz"
$keytarUrl = "https://github.com/atom/node-keytar/releases/download/v${keytarVersion}/${keytarTarGz}"
New-Item -ItemType directory -Path ./keytar/windows | Out-Null
Invoke-RestMethod -Uri $($keytarUrl -f "win32") -OutFile "./keytar/windows/$($keytarTarGz -f "win32")"
7z e "./keytar/windows/$($keytarTarGz -f "win32")" -o"./keytar/windows"
7z e "./keytar/windows/$($keytarTar -f "win32")" -o"./keytar/windows"
- name: Setup Version Info
shell: pwsh
run: |
$major, $minor, $patch = $env:_PACKAGE_VERSION.split('.')
$versionInfo = @"
1 VERSIONINFO
FILEVERSION $major,$minor,$patch,0
PRODUCTVERSION $major,$minor,$patch,0
FILEOS 0x40004
FILETYPE 0x1
{
BLOCK "StringFileInfo"
{
BLOCK "040904b0"
{
VALUE "CompanyName", "Bitwarden Inc."
VALUE "ProductName", "Bitwarden"
VALUE "FileDescription", "Bitwarden Directory Connector CLI"
VALUE "FileVersion", "$env:_PACKAGE_VERSION"
VALUE "ProductVersion", "$env:_PACKAGE_VERSION"
VALUE "OriginalFilename", "bwdc.exe"
VALUE "InternalName", "bwdc"
VALUE "LegalCopyright", "Copyright Bitwarden Inc."
}
}
BLOCK "VarFileInfo"
{
VALUE "Translation", 0x0409 0x04B0
}
}
"@
$versionInfo | Out-File ./version-info.rc
- name: Resource Hacker
shell: cmd
run: |
set PATH=%PATH%;C:\Program Files (x86)\Resource Hacker
set WIN_PKG=C:\Users\runneradmin\.pkg-cache\v%_WIN_PKG_VERSION%\fetched-v%_WIN_PKG_FETCH_VERSION%-win-x64
set WIN_PKG_BUILT=C:\Users\runneradmin\.pkg-cache\v%_WIN_PKG_VERSION%\built-v%_WIN_PKG_FETCH_VERSION%-win-x64
ResourceHacker -open %WIN_PKG% -save %WIN_PKG% -action delete -mask ICONGROUP,1,
ResourceHacker -open version-info.rc -save version-info.res -action compile
ResourceHacker -open %WIN_PKG% -save %WIN_PKG% -action addoverwrite -resource version-info.res
- name: Install
run: npm install
- name: Package CLI
run: npm run dist:cli:win
- name: Zip
shell: cmd
run: 7z a .\dist-cli\bwdc-windows-%_PACKAGE_VERSION%.zip .\dist-cli\windows\bwdc.exe .\keytar\windows\keytar.node
- name: Version Test
shell: pwsh
run: |
Expand-Archive -Path "dist-cli\bwdc-windows-${{ env._PACKAGE_VERSION }}.zip" -DestinationPath "test\windows"
$testVersion = Invoke-Expression '& .\test\windows\bwdc.exe -v'
echo "version: ${env:_PACKAGE_VERSION}"
echo "testVersion: $testVersion"
if ($testVersion -ne ${env:_PACKAGE_VERSION}) {
Throw "Version test failed."
}
- name: Upload Windows Zip to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
check-failures:
name: Check for failures
runs-on: ubuntu-24.04
needs:
- cloc
- setup
- linux-cli
- macos-cli
- windows-cli
steps:
- name: Check if any job failed
if: |
(github.ref == 'refs/heads/main'
|| github.ref == 'refs/heads/rc'
|| github.ref == 'refs/heads/hotfix-rc')
&& contains(needs.*.result, 'failure')
run: exit 1
- name: Login to Azure - CI subscription
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
if: failure()
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Retrieve secrets
id: retrieve-secrets
uses: bitwarden/gh-actions/get-keyvault-secrets@main
if: failure()
with:
keyvault: "bitwarden-ci"
secrets: "devops-alerts-slack-webhook-url"
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ steps.retrieve-secrets.outputs.devops-alerts-slack-webhook-url }}
with:
status: ${{ job.status }}

View File

@@ -1,354 +0,0 @@
name: Build GUI
on:
pull_request: {}
push:
branches:
- "main"
- "rc"
- "hotfix-rc"
workflow_dispatch: {}
permissions:
contents: read
jobs:
cloc:
name: CLOC
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up CLOC
run: |
sudo apt update
sudo apt -y install cloc
- name: Print lines of code
run: cloc --include-lang TypeScript,JavaScript,HTML,Sass,CSS --vcs git
setup:
name: Setup
runs-on: ubuntu-24.04
outputs:
package_version: ${{ steps.retrieve-version.outputs.package_version }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get Package Version
id: retrieve-version
run: |
PKG_VERSION=$(jq -r .version package.json)
echo "package_version=$PKG_VERSION" >> $GITHUB_OUTPUT
windows-gui:
name: Build Windows GUI
runs-on: windows-2022
needs: setup
env:
NODE_OPTIONS: --max_old_space_size=4096
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Print environment
run: |
node --version
npm --version
- name: Install AST
run: dotnet tool install --global AzureSignTool --version 4.0.1
- name: Install Node dependencies
run: npm install
- name: Login to Azure
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Retrieve secrets
id: retrieve-secrets
uses: bitwarden/gh-actions/get-keyvault-secrets@main
with:
keyvault: "bitwarden-ci"
secrets: "code-signing-vault-url,
code-signing-client-id,
code-signing-tenant-id,
code-signing-client-secret,
code-signing-cert-name"
- name: Build & Sign
run: npm run dist:win
env:
ELECTRON_BUILDER_SIGN: 1
SIGNING_VAULT_URL: ${{ steps.retrieve-secrets.outputs.code-signing-vault-url }}
SIGNING_CLIENT_ID: ${{ steps.retrieve-secrets.outputs.code-signing-client-id }}
SIGNING_TENANT_ID: ${{ steps.retrieve-secrets.outputs.code-signing-tenant-id }}
SIGNING_CLIENT_SECRET: ${{ steps.retrieve-secrets.outputs.code-signing-client-secret }}
SIGNING_CERT_NAME: ${{ steps.retrieve-secrets.outputs.code-signing-cert-name }}
- name: Upload Portable Executable to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
path: ./dist/Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
if-no-files-found: error
- name: Upload Installer Executable to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
if-no-files-found: error
- name: Upload Installer Executable Blockmap to GitHub
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: latest.yml
path: ./dist/latest.yml
if-no-files-found: error
linux-gui:
name: Build Linux GUI
runs-on: ubuntu-24.04
needs: setup
env:
NODE_OPTIONS: --max_old_space_size=4096
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Set up environment
run: |
sudo apt-get update
sudo apt-get -y install pkg-config libxss-dev libsecret-1-dev
sudo apt-get -y install rpm
- name: NPM Install
run: npm install
- name: NPM Rebuild
run: npm run rebuild
- name: NPM Package
run: npm run dist:lin
- name: Upload AppImage
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: latest-linux.yml
path: ./dist/latest-linux.yml
if-no-files-found: error
macos-gui:
name: Build MacOS GUI
runs-on: macos-13
needs: setup
env:
NODE_OPTIONS: --max_old_space_size=4096
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Print environment
run: |
node --version
npm --version
echo "GitHub ref: $GITHUB_REF"
echo "GitHub event: $GITHUB_EVENT"
- name: Login to Azure
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Get certificates
run: |
mkdir -p $HOME/certificates
az keyvault secret show --id https://bitwarden-ci.vault.azure.net/certificates/devid-app-cert |
jq -r .value | base64 -d > $HOME/certificates/devid-app-cert.p12
az keyvault secret show --id https://bitwarden-ci.vault.azure.net/certificates/devid-installer-cert |
jq -r .value | base64 -d > $HOME/certificates/devid-installer-cert.p12
az keyvault secret show --id https://bitwarden-ci.vault.azure.net/certificates/macdev-cert |
jq -r .value | base64 -d > $HOME/certificates/macdev-cert.p12
- name: Set up keychain
env:
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
run: |
security create-keychain -p $KEYCHAIN_PASSWORD build.keychain
security default-keychain -s build.keychain
security unlock-keychain -p $KEYCHAIN_PASSWORD build.keychain
security set-keychain-settings -lut 1200 build.keychain
security import "$HOME/certificates/devid-app-cert.p12" -k build.keychain -P "" \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/productbuild
security import "$HOME/certificates/devid-installer-cert.p12" -k build.keychain -P "" \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/productbuild
security import "$HOME/certificates/macdev-cert.p12" -k build.keychain -P "" \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/productbuild
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $KEYCHAIN_PASSWORD build.keychain
- name: Load package version
run: |
$rootPath = $env:GITHUB_WORKSPACE;
$packageVersion = (Get-Content -Raw -Path $rootPath\package.json | ConvertFrom-Json).version;
Write-Output "Setting package version to $packageVersion";
Write-Output "PACKAGE_VERSION=$packageVersion" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append;
shell: pwsh
- name: Install Node dependencies
run: npm install
- name: Set up private auth key
run: |
mkdir ~/private_keys
cat << EOF > ~/private_keys/AuthKey_UFD296548T.p8
${{ secrets.APP_STORE_CONNECT_AUTH_KEY }}
EOF
- name: Build application
run: npm run dist:mac
env:
APP_STORE_CONNECT_TEAM_ISSUER: ${{ secrets.APP_STORE_CONNECT_TEAM_ISSUER }}
APP_STORE_CONNECT_AUTH_KEY: UFD296548T
APP_STORE_CONNECT_AUTH_KEY_PATH: ~/private_keys/AuthKey_UFD296548T.p8
CSC_FOR_PULL_REQUEST: true
- name: Upload .zip artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
if-no-files-found: error
- name: Upload .dmg artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
if-no-files-found: error
- name: Upload .dmg Blockmap artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: latest-mac.yml
path: ./dist/latest-mac.yml
if-no-files-found: error
check-failures:
name: Check for failures
runs-on: ubuntu-24.04
needs:
- cloc
- setup
- windows-gui
- linux-gui
- macos-gui
steps:
- name: Check if any job failed
if: |
(github.ref == 'refs/heads/main'
|| github.ref == 'refs/heads/rc'
|| github.ref == 'refs/heads/hotfix-rc')
&& contains(needs.*.result, 'failure')
run: exit 1
- name: Login to Azure - CI subscription
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
if: failure()
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Retrieve secrets
id: retrieve-secrets
uses: bitwarden/gh-actions/get-keyvault-secrets@main
if: failure()
with:
keyvault: "bitwarden-ci"
secrets: "devops-alerts-slack-webhook-url"
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ steps.retrieve-secrets.outputs.devops-alerts-slack-webhook-url }}
with:
status: ${{ job.status }}

660
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,660 @@
name: Build
on:
pull_request: {}
push:
branches:
- "main"
workflow_dispatch: {}
jobs:
cloc:
name: CLOC
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up CLOC
run: |
sudo apt update
sudo apt -y install cloc
- name: Print lines of code
run: cloc --include-lang TypeScript,JavaScript,HTML,Sass,CSS --vcs git
setup:
name: Setup
runs-on: ubuntu-24.04
outputs:
package_version: ${{ steps.retrieve-version.outputs.package_version }}
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Get Package Version
id: retrieve-version
run: |
PKG_VERSION=$(jq -r .version package.json)
echo "package_version=$PKG_VERSION" >> $GITHUB_OUTPUT
linux-cli:
name: Build Linux CLI
runs-on: ubuntu-24.04
needs: setup
env:
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
_PKG_FETCH_NODE_VERSION: 18.5.0
_PKG_FETCH_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Get pkg-fetch
run: |
cd $HOME
fetchedUrl="https://github.com/vercel/pkg-fetch/releases/download/v$_PKG_FETCH_VERSION/node-v$_PKG_FETCH_NODE_VERSION-linux-x64"
mkdir -p .pkg-cache/v$_PKG_FETCH_VERSION
wget $fetchedUrl -O "./.pkg-cache/v$_PKG_FETCH_VERSION/fetched-v$_PKG_FETCH_NODE_VERSION-linux-x64"
- name: Keytar
run: |
keytarVersion=$(cat package.json | jq -r '.dependencies.keytar')
keytarTar="keytar-v$keytarVersion-napi-v3-linux-x64.tar"
keytarTarGz="$keytarTar.gz"
keytarUrl="https://github.com/atom/node-keytar/releases/download/v$keytarVersion/$keytarTarGz"
mkdir -p ./keytar/linux
wget $keytarUrl -O ./keytar/linux/$keytarTarGz
tar -xvf ./keytar/linux/$keytarTarGz -C ./keytar/linux
- name: Install
run: npm install
- name: Package CLI
run: npm run dist:cli:lin
- name: Zip
run: zip -j dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip dist-cli/linux/bwdc keytar/linux/build/Release/keytar.node
- name: Create checksums
run: |
shasum -a 256 dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip | \
cut -d " " -f 1 > dist-cli/bwdc-linux-sha256-$_PACKAGE_VERSION.txt
- name: Version Test
run: |
sudo apt-get update
sudo apt install libsecret-1-0 dbus-x11 gnome-keyring
eval $(dbus-launch --sh-syntax)
eval $(echo -n "" | /usr/bin/gnome-keyring-daemon --login)
eval $(/usr/bin/gnome-keyring-daemon --components=secrets --start)
mkdir -p test/linux
unzip ./dist-cli/bwdc-linux-$_PACKAGE_VERSION.zip -d ./test/linux
testVersion=$(./test/linux/bwdc -v)
echo "version: $_PACKAGE_VERSION"
echo "testVersion: $testVersion"
if [ "$testVersion" != "$_PACKAGE_VERSION" ]; then
echo "Version test failed."
exit 1
fi
- name: Upload Linux Zip to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-linux-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
- name: Upload Linux checksum to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-linux-sha256-${{ env._PACKAGE_VERSION }}.txt
path: ./dist-cli/bwdc-linux-sha256-${{ env._PACKAGE_VERSION }}.txt
if-no-files-found: error
macos-cli:
name: Build Mac CLI
runs-on: macos-13
needs: setup
env:
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
_PKG_FETCH_NODE_VERSION: 18.5.0
_PKG_FETCH_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Get pkg-fetch
run: |
cd $HOME
fetchedUrl="https://github.com/vercel/pkg-fetch/releases/download/v$_PKG_FETCH_VERSION/node-v$_PKG_FETCH_NODE_VERSION-macos-x64"
mkdir -p .pkg-cache/v$_PKG_FETCH_VERSION
wget $fetchedUrl -O "./.pkg-cache/v$_PKG_FETCH_VERSION/fetched-v$_PKG_FETCH_NODE_VERSION-macos-x64"
- name: Keytar
run: |
keytarVersion=$(cat package.json | jq -r '.dependencies.keytar')
keytarTar="keytar-v$keytarVersion-napi-v3-darwin-x64.tar"
keytarTarGz="$keytarTar.gz"
keytarUrl="https://github.com/atom/node-keytar/releases/download/v$keytarVersion/$keytarTarGz"
mkdir -p ./keytar/macos
wget $keytarUrl -O ./keytar/macos/$keytarTarGz
tar -xvf ./keytar/macos/$keytarTarGz -C ./keytar/macos
- name: Install
run: npm install
- name: Package CLI
run: npm run dist:cli:mac
- name: Zip
run: zip -j dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip dist-cli/macos/bwdc keytar/macos/build/Release/keytar.node
- name: Create checksums
run: |
shasum -a 256 dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip | \
cut -d " " -f 1 > dist-cli/bwdc-macos-sha256-$_PACKAGE_VERSION.txt
- name: Version Test
run: |
mkdir -p test/macos
unzip ./dist-cli/bwdc-macos-$_PACKAGE_VERSION.zip -d ./test/macos
testVersion=$(./test/macos/bwdc -v)
echo "version: $_PACKAGE_VERSION"
echo "testVersion: $testVersion"
if [ "$testVersion" != "$_PACKAGE_VERSION" ]; then
echo "Version test failed."
exit 1
fi
- name: Upload Mac Zip to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-macos-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
- name: Upload Mac checksum to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-macos-sha256-${{ env._PACKAGE_VERSION }}.txt
path: ./dist-cli/bwdc-macos-sha256-${{ env._PACKAGE_VERSION }}.txt
if-no-files-found: error
windows-cli:
name: Build Windows CLI
runs-on: windows-2022
needs: setup
env:
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
_WIN_PKG_FETCH_VERSION: 18.5.0
_WIN_PKG_VERSION: 3.4
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Setup Windows builder
run: |
choco install checksum --no-progress
choco install reshack --no-progress
- name: Set up Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Get pkg-fetch
shell: pwsh
run: |
cd $HOME
$fetchedUrl = "https://github.com/vercel/pkg-fetch/releases/download/v$env:_WIN_PKG_VERSION/node-v$env:_WIN_PKG_FETCH_VERSION-win-x64"
New-Item -ItemType directory -Path ./.pkg-cache
New-Item -ItemType directory -Path ./.pkg-cache/v$env:_WIN_PKG_VERSION
Invoke-RestMethod -Uri $fetchedUrl `
-OutFile "./.pkg-cache/v$env:_WIN_PKG_VERSION/fetched-v$env:_WIN_PKG_FETCH_VERSION-win-x64"
- name: Keytar
shell: pwsh
run: |
$keytarVersion = (Get-Content -Raw -Path ./package.json | ConvertFrom-Json).dependencies.keytar
$keytarTar = "keytar-v${keytarVersion}-napi-v3-{0}-x64.tar"
$keytarTarGz = "${keytarTar}.gz"
$keytarUrl = "https://github.com/atom/node-keytar/releases/download/v${keytarVersion}/${keytarTarGz}"
New-Item -ItemType directory -Path ./keytar/windows | Out-Null
Invoke-RestMethod -Uri $($keytarUrl -f "win32") -OutFile "./keytar/windows/$($keytarTarGz -f "win32")"
7z e "./keytar/windows/$($keytarTarGz -f "win32")" -o"./keytar/windows"
7z e "./keytar/windows/$($keytarTar -f "win32")" -o"./keytar/windows"
- name: Setup Version Info
shell: pwsh
run: |
$major, $minor, $patch = $env:_PACKAGE_VERSION.split('.')
$versionInfo = @"
1 VERSIONINFO
FILEVERSION $major,$minor,$patch,0
PRODUCTVERSION $major,$minor,$patch,0
FILEOS 0x40004
FILETYPE 0x1
{
BLOCK "StringFileInfo"
{
BLOCK "040904b0"
{
VALUE "CompanyName", "Bitwarden Inc."
VALUE "ProductName", "Bitwarden"
VALUE "FileDescription", "Bitwarden Directory Connector CLI"
VALUE "FileVersion", "$env:_PACKAGE_VERSION"
VALUE "ProductVersion", "$env:_PACKAGE_VERSION"
VALUE "OriginalFilename", "bwdc.exe"
VALUE "InternalName", "bwdc"
VALUE "LegalCopyright", "Copyright Bitwarden Inc."
}
}
BLOCK "VarFileInfo"
{
VALUE "Translation", 0x0409 0x04B0
}
}
"@
$versionInfo | Out-File ./version-info.rc
- name: Resource Hacker
shell: cmd
run: |
set PATH=%PATH%;C:\Program Files (x86)\Resource Hacker
set WIN_PKG=C:\Users\runneradmin\.pkg-cache\v%_WIN_PKG_VERSION%\fetched-v%_WIN_PKG_FETCH_VERSION%-win-x64
set WIN_PKG_BUILT=C:\Users\runneradmin\.pkg-cache\v%_WIN_PKG_VERSION%\built-v%_WIN_PKG_FETCH_VERSION%-win-x64
ResourceHacker -open %WIN_PKG% -save %WIN_PKG% -action delete -mask ICONGROUP,1,
ResourceHacker -open version-info.rc -save version-info.res -action compile
ResourceHacker -open %WIN_PKG% -save %WIN_PKG% -action addoverwrite -resource version-info.res
- name: Install
run: npm install
- name: Package CLI
run: npm run dist:cli:win
- name: Zip
shell: cmd
run: 7z a .\dist-cli\bwdc-windows-%_PACKAGE_VERSION%.zip .\dist-cli\windows\bwdc.exe .\keytar\windows\keytar.node
- name: Version Test
shell: pwsh
run: |
Expand-Archive -Path "dist-cli\bwdc-windows-${{ env._PACKAGE_VERSION }}.zip" -DestinationPath "test\windows"
$testVersion = Invoke-Expression '& .\test\windows\bwdc.exe -v'
echo "version: ${env:_PACKAGE_VERSION}"
echo "testVersion: $testVersion"
if ($testVersion -ne ${env:_PACKAGE_VERSION}) {
Throw "Version test failed."
}
- name: Create checksums
run: |
checksum -f="./dist-cli/bwdc-windows-${env:_PACKAGE_VERSION}.zip" `
-t sha256 | Out-File ./dist-cli/bwdc-windows-sha256-${env:_PACKAGE_VERSION}.txt
- name: Upload Windows Zip to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
path: ./dist-cli/bwdc-windows-${{ env._PACKAGE_VERSION }}.zip
if-no-files-found: error
- name: Upload Windows checksum to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: bwdc-windows-sha256-${{ env._PACKAGE_VERSION }}.txt
path: ./dist-cli/bwdc-windows-sha256-${{ env._PACKAGE_VERSION }}.txt
if-no-files-found: error
windows-gui:
name: Build Windows GUI
runs-on: windows-2022
needs: setup
env:
NODE_OPTIONS: --max_old_space_size=4096
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Print environment
run: |
node --version
npm --version
- name: Install AST
run: dotnet tool install --global AzureSignTool --version 4.0.1
- name: Install Node dependencies
run: npm install
- name: Build & Sign
run: npm run dist:win
env:
ELECTRON_BUILDER_SIGN: 1
SIGNING_VAULT_URL: ${{ secrets.SIGNING_VAULT_URL }}
SIGNING_CLIENT_ID: ${{ secrets.SIGNING_CLIENT_ID }}
SIGNING_TENANT_ID: ${{ secrets.SIGNING_TENANT_ID }}
SIGNING_CLIENT_SECRET: ${{ secrets.SIGNING_CLIENT_SECRET }}
SIGNING_CERT_NAME: ${{ secrets.SIGNING_CERT_NAME }}
- name: Upload Portable Executable to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
path: ./dist/Bitwarden-Connector-Portable-${{ env._PACKAGE_VERSION }}.exe
if-no-files-found: error
- name: Upload Installer Executable to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe
if-no-files-found: error
- name: Upload Installer Executable Blockmap to GitHub
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
path: ./dist/Bitwarden-Connector-Installer-${{ env._PACKAGE_VERSION }}.exe.blockmap
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: latest.yml
path: ./dist/latest.yml
if-no-files-found: error
linux-gui:
name: Build Linux GUI
runs-on: ubuntu-24.04
needs: setup
env:
NODE_OPTIONS: --max_old_space_size=4096
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Set up environment
run: |
sudo apt-get update
sudo apt-get -y install pkg-config libxss-dev libsecret-1-dev
sudo apt-get -y install rpm
- name: NPM Install
run: npm install
- name: NPM Rebuild
run: npm run rebuild
- name: NPM Package
run: npm run dist:lin
- name: Upload AppImage
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-x86_64.AppImage
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: latest-linux.yml
path: ./dist/latest-linux.yml
if-no-files-found: error
macos-gui:
name: Build MacOS GUI
runs-on: macos-13
needs: setup
env:
NODE_OPTIONS: --max_old_space_size=4096
_PACKAGE_VERSION: ${{ needs.setup.outputs.package_version }}
HUSKY: 0
steps:
- name: Checkout repo
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Set up Node
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
node-version: '18'
- name: Update NPM
run: |
npm install -g node-gyp
node-gyp install $(node -v)
- name: Print environment
run: |
node --version
npm --version
echo "GitHub ref: $GITHUB_REF"
echo "GitHub event: $GITHUB_EVENT"
- name: Login to Azure
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Get certificates
run: |
mkdir -p $HOME/certificates
az keyvault secret show --id https://bitwarden-ci.vault.azure.net/certificates/devid-app-cert |
jq -r .value | base64 -d > $HOME/certificates/devid-app-cert.p12
az keyvault secret show --id https://bitwarden-ci.vault.azure.net/certificates/devid-installer-cert |
jq -r .value | base64 -d > $HOME/certificates/devid-installer-cert.p12
az keyvault secret show --id https://bitwarden-ci.vault.azure.net/certificates/macdev-cert |
jq -r .value | base64 -d > $HOME/certificates/macdev-cert.p12
- name: Set up keychain
env:
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
run: |
security create-keychain -p $KEYCHAIN_PASSWORD build.keychain
security default-keychain -s build.keychain
security unlock-keychain -p $KEYCHAIN_PASSWORD build.keychain
security set-keychain-settings -lut 1200 build.keychain
security import "$HOME/certificates/devid-app-cert.p12" -k build.keychain -P "" \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/productbuild
security import "$HOME/certificates/devid-installer-cert.p12" -k build.keychain -P "" \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/productbuild
security import "$HOME/certificates/macdev-cert.p12" -k build.keychain -P "" \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/productbuild
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $KEYCHAIN_PASSWORD build.keychain
- name: Load package version
run: |
$rootPath = $env:GITHUB_WORKSPACE;
$packageVersion = (Get-Content -Raw -Path $rootPath\package.json | ConvertFrom-Json).version;
Write-Output "Setting package version to $packageVersion";
Write-Output "PACKAGE_VERSION=$packageVersion" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append;
shell: pwsh
- name: Install Node dependencies
run: npm install
- name: Set up private auth key
run: |
mkdir ~/private_keys
cat << EOF > ~/private_keys/AuthKey_UFD296548T.p8
${{ secrets.APP_STORE_CONNECT_AUTH_KEY }}
EOF
- name: Build application
run: npm run dist:mac
env:
APP_STORE_CONNECT_TEAM_ISSUER: ${{ secrets.APP_STORE_CONNECT_TEAM_ISSUER }}
APP_STORE_CONNECT_AUTH_KEY: UFD296548T
APP_STORE_CONNECT_AUTH_KEY_PATH: ~/private_keys/AuthKey_UFD296548T.p8
CSC_FOR_PULL_REQUEST: true
- name: Upload .zip artifact
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}-mac.zip
if-no-files-found: error
- name: Upload .dmg artifact
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg
if-no-files-found: error
- name: Upload .dmg Blockmap artifact
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
path: ./dist/Bitwarden-Connector-${{ env._PACKAGE_VERSION }}.dmg.blockmap
if-no-files-found: error
- name: Upload latest auto-update artifact
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: latest-mac.yml
path: ./dist/latest-mac.yml
if-no-files-found: error
check-failures:
name: Check for failures
runs-on: ubuntu-24.04
needs:
- cloc
- setup
- linux-cli
- macos-cli
- windows-cli
- windows-gui
- linux-gui
- macos-gui
steps:
- name: Check if any job failed
if: github.ref == 'refs/heads/main' && contains(needs.*.result, 'failure')
run: exit 1
- name: Login to Azure - CI subscription
uses: Azure/login@e15b166166a8746d1a47596803bd8c1b595455cf # v1.6.0
if: failure()
with:
creds: ${{ secrets.AZURE_KV_CI_SERVICE_PRINCIPAL }}
- name: Retrieve secrets
id: retrieve-secrets
uses: bitwarden/gh-actions/get-keyvault-secrets@main
if: failure()
with:
keyvault: "bitwarden-ci"
secrets: "devops-alerts-slack-webhook-url"
- name: Notify Slack on failure
uses: act10ns/slack@44541246747a30eb3102d87f7a4cc5471b0ffb7d # v2.1.0
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ steps.retrieve-secrets.outputs.devops-alerts-slack-webhook-url }}
with:
status: ${{ job.status }}

View File

@@ -3,9 +3,6 @@ name: Enforce PR labels
on:
pull_request:
types: [labeled, unlabeled, opened, edited, synchronize]
permissions:
contents: read
pull-requests: read
jobs:
enforce-label:
name: EnforceLabel

View File

@@ -8,21 +8,39 @@ on:
paths:
- ".github/workflows/integration-test.yml" # this file
- "src/services/ldap-directory.service*" # we only have integration for LDAP testing at the moment
- "./openldap/**/*" # any change to test fixtures
- "./openldap*" # any change to test fixtures
- "./docker-compose.yml" # any change to Docker configuration
pull_request:
paths:
- ".github/workflows/integration-test.yml" # this file
- "src/services/ldap-directory.service*" # we only have integration for LDAP testing at the moment
- "./openldap/**/*" # any change to test fixtures
- "./openldap*" # any change to test fixtures
- "./docker-compose.yml" # any change to Docker configuration
jobs:
check-test-secrets:
name: Check for test secrets
runs-on: ubuntu-22.04
outputs:
available: ${{ steps.check-test-secrets.outputs.available }}
permissions:
contents: read
steps:
- name: Check
id: check-test-secrets
run: |
if [ "${{ secrets.CODECOV_TOKEN }}" != '' ]; then
echo "available=true" >> $GITHUB_OUTPUT;
else
echo "available=false" >> $GITHUB_OUTPUT;
fi
testing:
name: Run tests
if: ${{ startsWith(github.head_ref, 'version_bump_') == false }}
runs-on: ubuntu-22.04
needs: check-test-secrets
permissions:
checks: write
contents: read
@@ -30,7 +48,7 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Get Node version
id: retrieve-node-version
@@ -40,7 +58,7 @@ jobs:
echo "node_version=$NODE_VERSION" >> $GITHUB_OUTPUT
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -62,7 +80,7 @@ jobs:
- name: Report test results
uses: dorny/test-reporter@31a54ee7ebcacc03a09ea97a7e5465a47b84aea5 # v1.9.1
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !cancelled() }}
if: ${{ needs.check-test-secrets.outputs.available == 'true' && !cancelled() }}
with:
name: Test Results
path: "junit.xml"
@@ -70,7 +88,13 @@ jobs:
fail-on-error: true
- name: Upload coverage to codecov.io
uses: codecov/codecov-action@5a605bd92782ce0810fa3b8acc235c921b497052 # v5.2.0
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Upload results to codecov.io
uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 # v1.0.2
uses: codecov/test-results-action@1b5b448b98e58ba90d1a1a1d9fcb72ca2263be46 # v1.0.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,83 +0,0 @@
name: Release
on:
workflow_dispatch:
inputs:
release_type:
description: 'Release Options'
required: true
default: 'Initial Release'
type: choice
options:
- Initial Release
- Redeploy
- Dry Run
permissions:
contents: read
jobs:
setup:
name: Setup
runs-on: ubuntu-24.04
outputs:
release_version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Branch check
if: ${{ inputs.release_type != 'Dry Run' }}
run: |
if [[ "$GITHUB_REF" != "refs/heads/rc" ]] && [[ "$GITHUB_REF" != "refs/heads/hotfix-rc" ]]; then
echo "==================================="
echo "[!] Can only release from the 'rc' or 'hotfix-rc' branches"
echo "==================================="
exit 1
fi
- name: Check Release Version
id: version
uses: bitwarden/gh-actions/release-version-check@main
with:
release-type: ${{ inputs.release_type }}
project-type: ts
file: package.json
release:
name: Release
runs-on: ubuntu-24.04
needs: setup
steps:
- name: Download all artifacts
if: ${{ inputs.release_type != 'Dry Run' }}
uses: bitwarden/gh-actions/download-artifacts@main
with:
workflow: build-cli.yml
workflow_conclusion: success
branch: ${{ github.ref_name }}
- name: Dry Run - Download all artifacts
if: ${{ inputs.release_type == 'Dry Run' }}
uses: bitwarden/gh-actions/download-artifacts@main
with:
workflow: build-cli.yml
workflow_conclusion: success
branch: main
- name: Create release
if: ${{ inputs.release_type != 'Dry Run' }}
uses: ncipollo/release-action@cdcc88a9acf3ca41c16c37bb7d21b9ad48560d87 # v1.15.0
env:
PKG_VERSION: ${{ needs.setup.outputs.release_version }}
with:
artifacts: "./bwdc-windows-${{ env.PKG_VERSION }}.zip,
./bwdc-macos-${{ env.PKG_VERSION }}.zip,
./bwdc-linux-${{ env.PKG_VERSION }}.zip"
commit: ${{ github.sha }}
tag: v${{ env.PKG_VERSION }}
name: Version ${{ env.PKG_VERSION }}
body: "<insert release notes here>"
token: ${{ secrets.GITHUB_TOKEN }}
draft: true

View File

@@ -1,4 +1,4 @@
name: Release GUI
name: Release
on:
workflow_dispatch:
@@ -13,25 +13,22 @@ on:
- Redeploy
- Dry Run
permissions:
contents: read
jobs:
setup:
name: Setup
runs-on: ubuntu-24.04
outputs:
release_version: ${{ steps.version.outputs.version }}
release-version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Branch check
if: ${{ inputs.release_type != 'Dry Run' }}
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
run: |
if [[ "$GITHUB_REF" != "refs/heads/rc" ]] && [[ "$GITHUB_REF" != "refs/heads/hotfix-rc" ]]; then
if [[ "$GITHUB_REF" != "refs/heads/main" ]]; then
echo "==================================="
echo "[!] Can only release from the 'rc' or 'hotfix-rc' branches"
echo "[!] Can only release from the 'main' branch"
echo "==================================="
exit 1
fi
@@ -40,7 +37,7 @@ jobs:
id: version
uses: bitwarden/gh-actions/release-version-check@main
with:
release-type: ${{ inputs.release_type }}
release-type: ${{ github.event.inputs.release_type }}
project-type: ts
file: package.json
@@ -50,28 +47,34 @@ jobs:
needs: setup
steps:
- name: Download all artifacts
if: ${{ inputs.release_type != 'Dry Run' }}
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
uses: bitwarden/gh-actions/download-artifacts@main
with:
workflow: build-gui.yml
workflow: build.yml
workflow_conclusion: success
branch: ${{ github.ref_name }}
- name: Dry Run - Download all artifacts
if: ${{ inputs.release_type == 'Dry Run' }}
if: ${{ github.event.inputs.release_type == 'Dry Run' }}
uses: bitwarden/gh-actions/download-artifacts@main
with:
workflow: build-gui.yml
workflow: build.yml
workflow_conclusion: success
branch: main
- name: Create release
if: ${{ inputs.release_type != 'Dry Run' }}
uses: ncipollo/release-action@cdcc88a9acf3ca41c16c37bb7d21b9ad48560d87 # v1.15.0
if: ${{ github.event.inputs.release_type != 'Dry Run' }}
uses: ncipollo/release-action@2c591bcc8ecdcd2db72b97d6147f871fcd833ba5 # v1.14.0
env:
PKG_VERSION: ${{ needs.setup.outputs.release_version }}
PKG_VERSION: ${{ needs.setup.outputs.release-version }}
with:
artifacts: " ./Bitwarden-Connector-Portable-${{ env.PKG_VERSION }}.exe,
artifacts: "./bwdc-windows-${{ env.PKG_VERSION }}.zip,
./bwdc-macos-${{ env.PKG_VERSION }}.zip,
./bwdc-linux-${{ env.PKG_VERSION }}.zip,
./bwdc-windows-sha256-${{ env.PKG_VERSION }}.txt,
./bwdc-macos-sha256-${{ env.PKG_VERSION }}.txt,
./bwdc-linux-sha256-${{ env.PKG_VERSION }}.txt,
./Bitwarden-Connector-Portable-${{ env.PKG_VERSION }}.exe,
./Bitwarden-Connector-Installer-${{ env.PKG_VERSION }}.exe,
./Bitwarden-Connector-Installer-${{ env.PKG_VERSION }}.exe.blockmap,
./Bitwarden-Connector-${{ env.PKG_VERSION }}-x86_64.AppImage,

View File

@@ -5,23 +5,13 @@ on:
push:
branches:
- "main"
pull_request:
types: [opened, synchronize, reopened]
branches-ignore:
- main
pull_request_target:
types: [opened, synchronize, reopened]
branches:
- "main"
permissions: {}
types: [opened, synchronize]
jobs:
check-run:
name: Check PR run
uses: bitwarden/gh-actions/.github/workflows/check-run.yml@main
permissions:
contents: read
sast:
name: SAST scan
@@ -34,12 +24,12 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Scan with Checkmarx
uses: checkmarx/ast-github-action@184bf2f64f55d1c93fd6636d539edf274703e434 # 2.0.41
uses: checkmarx/ast-github-action@ed196cdaec9cd1bc5aacac4ca2010dd773b20893 # 2.0.35
env:
INCREMENTAL: "${{ contains(github.event_name, 'pull_request') && '--sast-incremental' || '' }}"
with:
@@ -54,11 +44,9 @@ jobs:
--output-path . ${{ env.INCREMENTAL }}
- name: Upload Checkmarx results to GitHub
uses: github/codeql-action/upload-sarif@dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7 # v3.28.3
uses: github/codeql-action/upload-sarif@e2b3eafc8d227b0241d48be5f425d47c2d750a13 # v3.26.10
with:
sarif_file: cx_result.sarif
sha: ${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.sha || github.sha }}
ref: ${{ contains(github.event_name, 'pull_request') && format('refs/pull/{0}/head', github.event.pull_request.number) || github.ref }}
quality:
name: Quality scan
@@ -70,15 +58,16 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha }}
- name: Scan with SonarCloud
uses: sonarsource/sonarqube-scan-action@2500896589ef8f7247069a56136f8dc177c27ccf # v5.2.0
uses: sonarsource/sonarcloud-github-action@eb211723266fe8e83102bac7361f0a05c3ac1d1b # v3.0.0
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
args: >
-Dsonar.organization=${{ github.repository_owner }}
@@ -87,4 +76,3 @@ jobs:
-Dsonar.sources=.
-Dsonar.test.inclusions=**/*.spec.ts
-Dsonar.exclusions=**/*.spec.ts
-Dsonar.pullrequest.key=${{ github.event.pull_request.number }}

View File

@@ -5,16 +5,32 @@ on:
push:
branches:
- "main"
- "rc"
- "hotfix-rc"
pull_request:
jobs:
check-test-secrets:
name: Check for test secrets
runs-on: ubuntu-24.04
outputs:
available: ${{ steps.check-test-secrets.outputs.available }}
permissions:
contents: read
steps:
- name: Check
id: check-test-secrets
run: |
if [ "${{ secrets.CODECOV_TOKEN }}" != '' ]; then
echo "available=true" >> $GITHUB_OUTPUT;
else
echo "available=false" >> $GITHUB_OUTPUT;
fi
testing:
name: Run tests
if: ${{ startsWith(github.head_ref, 'version_bump_') == false }}
runs-on: ubuntu-24.04
needs: check-test-secrets
permissions:
checks: write
contents: read
@@ -22,7 +38,7 @@ jobs:
steps:
- name: Check out repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Get Node version
id: retrieve-node-version
@@ -32,7 +48,7 @@ jobs:
echo "node_version=$NODE_VERSION" >> $GITHUB_OUTPUT
- name: Set up Node
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4
with:
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
@@ -52,7 +68,7 @@ jobs:
- name: Report test results
uses: dorny/test-reporter@31a54ee7ebcacc03a09ea97a7e5465a47b84aea5 # v1.9.1
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !cancelled() }}
if: ${{ needs.check-test-secrets.outputs.available == 'true' && !cancelled() }}
with:
name: Test Results
path: "junit.xml"
@@ -60,7 +76,13 @@ jobs:
fail-on-error: true
- name: Upload coverage to codecov.io
uses: codecov/codecov-action@5a605bd92782ce0810fa3b8acc235c921b497052 # v5.2.0
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4.6.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
- name: Upload results to codecov.io
uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 # v1.0.2
uses: codecov/test-results-action@1b5b448b98e58ba90d1a1a1d9fcb72ca2263be46 # v1.0.0
if: ${{ needs.check-test-secrets.outputs.available == 'true' }}
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -20,14 +20,14 @@ jobs:
version: ${{ inputs.version_number_override }}
- name: Generate GH App token
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1.11.0
id: app-token
with:
app-id: ${{ secrets.BW_GHAPP_ID }}
private-key: ${{ secrets.BW_GHAPP_KEY }}
- name: Checkout Branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
token: ${{ steps.app-token.outputs.token }}

View File

@@ -9,7 +9,7 @@ Supported directories:
- Active Directory
- Any other LDAP-based directory
- Microsoft Entra ID
- Azure Active Directory
- G Suite (Google)
- Okta

View File

@@ -2,9 +2,9 @@ import { ApiTokenRequest } from "../models/request/identityToken/apiTokenRequest
import { PasswordTokenRequest } from "../models/request/identityToken/passwordTokenRequest";
import { SsoTokenRequest } from "../models/request/identityToken/ssoTokenRequest";
import { OrganizationImportRequest } from "../models/request/organizationImportRequest";
import { IdentityCaptchaResponse } from "../models/response/identityCaptchaResponse";
import { IdentityTokenResponse } from "../models/response/identityTokenResponse";
import { IdentityTwoFactorResponse } from "../models/response/identityTwoFactorResponse";
import { IdentityCaptchaResponse } from '../models/response/identityCaptchaResponse';
import { IdentityTokenResponse } from '../models/response/identityTokenResponse';
import { IdentityTwoFactorResponse } from '../models/response/identityTwoFactorResponse';
export abstract class ApiService {
postIdentityToken: (

View File

@@ -8,12 +8,16 @@ export class OrganizationImportRequest {
overwriteExisting = false;
largeImport = false;
constructor(model: {
constructor(
model:
| {
groups: Required<OrganizationImportGroupRequest>[];
users: Required<OrganizationImportMemberRequest>[];
overwriteExisting: boolean;
largeImport: boolean;
}) {
}
| ImportDirectoryRequest,
) {
if (model instanceof ImportDirectoryRequest) {
this.groups = model.groups.map((g) => new OrganizationImportGroupRequest(g));
this.members = model.users.map((u) => new OrganizationImportMemberRequest(u));

View File

@@ -60,8 +60,9 @@ export class TrayMain {
}
setupWindowListeners(win: BrowserWindow) {
win.on("minimize", async () => {
win.on("minimize", async (e: Event) => {
if (await this.stateService.getEnableMinimizeToTray()) {
e.preventDefault();
this.hideToTray();
}
});

File diff suppressed because it is too large Load Diff

View File

@@ -6,5 +6,5 @@ fi
mkcert -install
mkdir -p ./openldap/certs
cp "$(mkcert -CAROOT)/rootCA.pem" ./openldap/certs/rootCA.pem
cp $(mkcert -CAROOT)/rootCA.pem ./openldap/certs/rootCA.pem
mkcert -key-file ./openldap/certs/openldap-key.pem -cert-file ./openldap/certs/openldap.pem localhost openldap

8532
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
"name": "@bitwarden/directory-connector",
"productName": "Bitwarden Directory Connector",
"description": "Sync your user directory to your Bitwarden organization.",
"version": "2025.6.1",
"version": "2024.10.0",
"keywords": [
"bitwarden",
"password",
@@ -73,108 +73,107 @@
"test:types": "npx tsc --noEmit"
},
"devDependencies": {
"@angular-devkit/build-angular": "17.3.17",
"@angular-devkit/build-angular": "19.0.3",
"@angular-eslint/eslint-plugin-template": "17.5.3",
"@angular-eslint/template-parser": "17.5.3",
"@angular/compiler-cli": "17.3.12",
"@angular/compiler-cli": "19.0.3",
"@electron/notarize": "2.5.0",
"@electron/rebuild": "3.7.2",
"@electron/rebuild": "3.7.1",
"@fluffy-spoon/substitute": "1.208.0",
"@microsoft/microsoft-graph-types": "2.40.0",
"@ngtools/webpack": "17.3.17",
"@ngtools/webpack": "19.0.0",
"@types/inquirer": "8.2.10",
"@types/jest": "29.5.14",
"@types/lowdb": "1.0.15",
"@types/node": "22.13.1",
"@types/node": "20.17.9",
"@types/node-fetch": "2.6.12",
"@types/node-forge": "1.3.11",
"@types/proper-lockfile": "4.1.4",
"@types/tldjs": "2.3.4",
"@typescript-eslint/eslint-plugin": "8.35.0",
"@typescript-eslint/parser": "8.35.0",
"@typescript-eslint/eslint-plugin": "5.62.0",
"@typescript-eslint/parser": "5.62.0",
"clean-webpack-plugin": "4.0.0",
"concurrently": "9.1.2",
"copy-webpack-plugin": "13.0.0",
"concurrently": "9.1.0",
"copy-webpack-plugin": "12.0.2",
"cross-env": "7.0.3",
"css-loader": "7.1.2",
"dotenv": "16.5.0",
"electron": "34.5.8",
"dotenv": "16.4.5",
"electron": "28.3.3",
"electron-builder": "24.13.3",
"electron-log": "5.4.1",
"electron-log": "5.2.4",
"electron-reload": "2.0.0-alpha.1",
"electron-store": "8.2.0",
"electron-updater": "6.6.2",
"electron-updater": "6.3.9",
"eslint": "8.57.1",
"eslint-config-prettier": "10.1.5",
"eslint-import-resolver-typescript": "3.7.0",
"eslint-config-prettier": "9.1.0",
"eslint-import-resolver-typescript": "3.6.3",
"eslint-plugin-import": "2.31.0",
"eslint-plugin-rxjs": "5.0.3",
"eslint-plugin-rxjs-angular": "2.0.1",
"form-data": "4.0.3",
"form-data": "4.0.1",
"html-loader": "5.1.0",
"html-webpack-plugin": "5.6.3",
"husky": "9.1.7",
"jest": "29.7.0",
"jest-junit": "16.0.0",
"jest-mock-extended": "4.0.0",
"jest-preset-angular": "14.6.0",
"lint-staged": "16.1.2",
"jest-mock-extended": "3.0.7",
"jest-preset-angular": "14.4.2",
"lint-staged": "15.2.10",
"mini-css-extract-plugin": "2.9.2",
"node-abi": "3.75.0",
"node-forge": "1.3.1",
"node-loader": "2.1.0",
"pkg": "5.8.1",
"prettier": "3.5.3",
"rimraf": "6.0.1",
"rxjs": "7.8.2",
"sass": "1.89.2",
"sass-loader": "16.0.5",
"ts-jest": "29.4.0",
"ts-loader": "9.5.2",
"prettier": "3.3.3",
"rimraf": "5.0.10",
"rxjs": "7.8.1",
"sass": "1.79.4",
"sass-loader": "16.0.4",
"ts-jest": "29.2.5",
"ts-loader": "9.5.1",
"tsconfig-paths-webpack-plugin": "4.2.0",
"type-fest": "4.41.0",
"typescript": "5.4.5",
"webpack": "5.99.9",
"webpack-cli": "6.0.1",
"type-fest": "4.30.0",
"typescript": "5.5.4",
"typescript-transform-paths": "3.5.2",
"webpack": "5.95.0",
"webpack-cli": "5.1.4",
"webpack-merge": "6.0.1",
"webpack-node-externals": "3.0.0",
"zone.js": "0.14.10"
"zone.js": "0.15.0",
"webpack-node-externals": "3.0.0"
},
"dependencies": {
"@angular/animations": "17.3.12",
"@angular/cdk": "17.3.10",
"@angular/common": "17.3.12",
"@angular/compiler": "17.3.12",
"@angular/core": "17.3.12",
"@angular/forms": "17.3.12",
"@angular/platform-browser": "17.3.12",
"@angular/platform-browser-dynamic": "17.3.12",
"@angular/router": "17.3.12",
"@angular/animations": "19.0.3",
"@angular/cdk": "19.0.2",
"@angular/common": "19.0.3",
"@angular/compiler": "19.0.3",
"@angular/core": "19.0.3",
"@angular/forms": "19.0.3",
"@angular/platform-browser": "19.0.3",
"@angular/platform-browser-dynamic": "19.0.3",
"@angular/router": "19.0.3",
"@microsoft/microsoft-graph-client": "3.0.7",
"big-integer": "1.6.52",
"bootstrap": "5.3.7",
"bootstrap": "5.3.3",
"browser-hrtime": "1.1.8",
"chalk": "4.1.2",
"commander": "14.0.0",
"core-js": "3.43.0",
"form-data": "4.0.3",
"google-auth-library": "9.15.1",
"googleapis": "144.0.0",
"https-proxy-agent": "7.0.6",
"commander": "12.1.0",
"core-js": "3.38.1",
"form-data": "4.0.1",
"google-auth-library": "7.14.1",
"googleapis": "73.0.0",
"https-proxy-agent": "7.0.5",
"inquirer": "8.2.6",
"keytar": "7.9.0",
"ldapts": "8.0.1",
"ldapts": "7.2.1",
"lowdb": "1.0.0",
"ngx-toastr": "19.0.0",
"ngx-toastr": "17.0.2",
"node-fetch": "2.7.0",
"proper-lockfile": "4.1.2",
"rxjs": "7.8.2",
"rxjs": "7.8.1",
"tldjs": "2.3.1",
"zone.js": "0.14.10",
"parse5": "7.3.0"
"zone.js": "0.15.0"
},
"engines": {
"node": "~22.13.0",
"node": "~20.18.0",
"npm": "~10"
},
"lint-staged": {

View File

@@ -1,7 +1,7 @@
{
"name": "@bitwarden/directory-connector",
"version": "2.9.5",
"lockfileVersion": 3,
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {

View File

@@ -1,6 +0,0 @@
import { DirectoryType } from "@/src/enums/directoryType";
import { IDirectoryService } from "@/src/services/directory.service";
export abstract class DirectoryFactoryService {
abstract createService(type: DirectoryType): IDirectoryService;
}

View File

@@ -1,17 +0,0 @@
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { GroupEntry } from "@/src/models/groupEntry";
import { UserEntry } from "@/src/models/userEntry";
export interface RequestBuilderOptions {
removeDisabled: boolean;
overwriteExisting: boolean;
}
export abstract class RequestBuilder {
buildRequest: (
groups: GroupEntry[],
users: UserEntry[],
options: RequestBuilderOptions,
) => OrganizationImportRequest[];
}

View File

@@ -3,7 +3,7 @@ import { StorageOptions } from "@/jslib/common/src/models/domain/storageOptions"
import { DirectoryType } from "@/src/enums/directoryType";
import { Account } from "@/src/models/account";
import { EntraIdConfiguration } from "@/src/models/entraIdConfiguration";
import { AzureConfiguration } from "@/src/models/azureConfiguration";
import { GSuiteConfiguration } from "@/src/models/gsuiteConfiguration";
import { LdapConfiguration } from "@/src/models/ldapConfiguration";
import { OktaConfiguration } from "@/src/models/oktaConfiguration";
@@ -17,7 +17,7 @@ export abstract class StateService extends BaseStateServiceAbstraction<Account>
config:
| LdapConfiguration
| GSuiteConfiguration
| EntraIdConfiguration
| AzureConfiguration
| OktaConfiguration
| OneLoginConfiguration,
) => Promise<any>;
@@ -25,8 +25,8 @@ export abstract class StateService extends BaseStateServiceAbstraction<Account>
setLdapConfiguration: (value: LdapConfiguration, options?: StorageOptions) => Promise<void>;
getGsuiteConfiguration: (options?: StorageOptions) => Promise<GSuiteConfiguration>;
setGsuiteConfiguration: (value: GSuiteConfiguration, options?: StorageOptions) => Promise<void>;
getEntraConfiguration: (options?: StorageOptions) => Promise<EntraIdConfiguration>;
setEntraConfiguration: (value: EntraIdConfiguration, options?: StorageOptions) => Promise<void>;
getAzureConfiguration: (options?: StorageOptions) => Promise<AzureConfiguration>;
setAzureConfiguration: (value: AzureConfiguration, options?: StorageOptions) => Promise<void>;
getOktaConfiguration: (options?: StorageOptions) => Promise<OktaConfiguration>;
setOktaConfiguration: (value: OktaConfiguration, options?: StorageOptions) => Promise<void>;
getOneLoginConfiguration: (options?: StorageOptions) => Promise<OneLoginConfiguration>;

View File

@@ -25,11 +25,6 @@ import { ElectronRendererStorageService } from "@/jslib/electron/src/services/el
import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service";
import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service";
import { DirectoryFactoryService } from "@/src/abstractions/directory-factory.service";
import { BatchRequestBuilder } from "@/src/services/batch-request-builder";
import { DefaultDirectoryFactoryService } from "@/src/services/directory-factory.service";
import { SingleRequestBuilder } from "@/src/services/single-request-builder";
import { AuthService as AuthServiceAbstraction } from "../../abstractions/auth.service";
import { StateService as StateServiceAbstraction } from "../../abstractions/state.service";
import { Account } from "../../models/account";
@@ -173,15 +168,13 @@ export function initFactory(
provide: SyncService,
useClass: SyncService,
deps: [
LogServiceAbstraction,
CryptoFunctionServiceAbstraction,
ApiServiceAbstraction,
MessagingServiceAbstraction,
I18nServiceAbstraction,
EnvironmentServiceAbstraction,
StateServiceAbstraction,
BatchRequestBuilder,
SingleRequestBuilder,
DirectoryFactoryService,
],
}),
safeProvider(AuthGuardService),
@@ -222,19 +215,6 @@ export function initFactory(
StateMigrationServiceAbstraction,
],
}),
safeProvider({
provide: SingleRequestBuilder,
deps: [],
}),
safeProvider({
provide: BatchRequestBuilder,
deps: [],
}),
safeProvider({
provide: DirectoryFactoryService,
useClass: DefaultDirectoryFactoryService,
deps: [LogServiceAbstraction, I18nServiceAbstraction, StateServiceAbstraction],
}),
] satisfies SafeProvider[],
})
export class ServicesModule {}

View File

@@ -22,15 +22,18 @@
class="btn btn-primary"
[disabled]="startForm.loading"
>
<i class="bwi bwi-play bwi-fw" [hidden]="startForm.loading"></i>
<i class="bwi bwi-spinner bwi-fw bwi-spin" [hidden]="!startForm.loading"></i>
{{ "startSync" | i18n }}
</button>
</form>
<button type="button" (click)="stop()" class="btn btn-danger text-white">
<button type="button" (click)="stop()" class="btn btn-primary">
<i class="bwi bwi-stop bwi-fw"></i>
{{ "stopSync" | i18n }}
</button>
<form #syncForm [appApiAction]="syncPromise" class="d-inline">
<button type="button" (click)="sync()" class="btn btn-primary" [disabled]="syncForm.loading">
<i class="bwi bwi-refresh bwi-fw" [ngClass]="{ 'bwi-spin': syncForm.loading }"></i>
{{ "syncNow" | i18n }}
</button>
</form>
@@ -48,6 +51,7 @@
[disabled]="simForm.loading"
>
<i class="bwi bwi-spinner bwi-fw bwi-spin" [hidden]="!simForm.loading"></i>
<i class="bwi bwi-bug bwi-fw" [hidden]="simForm.loading"></i>
{{ "testNow" | i18n }}
</button>
</form>

View File

@@ -242,7 +242,7 @@
</div>
</div>
</div>
<div [hidden]="directory != directoryType.EntraID">
<div [hidden]="directory != directoryType.AzureActiveDirectory">
<div class="mb-3">
<label for="identityAuthority" class="form-label">{{
"identityAuthority" | i18n
@@ -251,10 +251,10 @@
class="form-select"
id="identityAuthority"
name="IdentityAuthority"
[(ngModel)]="entra.identityAuthority"
[(ngModel)]="azure.identityAuthority"
>
<option value="login.microsoftonline.com">Entra Id Public</option>
<option value="login.microsoftonline.us">Entra Id Government</option>
<option value="login.microsoftonline.com">Azure AD Public</option>
<option value="login.microsoftonline.us">Azure AD Government</option>
</select>
</div>
<div class="mb-3">
@@ -264,7 +264,7 @@
class="form-control"
id="tenant"
name="Tenant"
[(ngModel)]="entra.tenant"
[(ngModel)]="azure.tenant"
/>
<div class="form-text">{{ "ex" | i18n }} companyad.onmicrosoft.com</div>
</div>
@@ -275,29 +275,29 @@
class="form-control"
id="applicationId"
name="ApplicationId"
[(ngModel)]="entra.applicationId"
[(ngModel)]="azure.applicationId"
/>
</div>
<div class="mb-3">
<label for="secretKey" class="form-label">{{ "secretKey" | i18n }}</label>
<div class="input-group">
<input
type="{{ showEntraKey ? 'text' : 'password' }}"
type="{{ showAzureKey ? 'text' : 'password' }}"
class="form-control"
id="secretKey"
name="SecretKey"
[(ngModel)]="entra.key"
[(ngModel)]="azure.key"
/>
<button
type="button"
class="btn btn-outline-secondary"
appA11yTitle="{{ 'toggleVisibility' | i18n }}"
(click)="toggleEntraKey()"
(click)="toggleAzureKey()"
>
<i
class="bwi bwi-lg"
aria-hidden="true"
[ngClass]="showEntraKey ? 'bwi-eye-slash' : 'bwi-eye'"
[ngClass]="showAzureKey ? 'bwi-eye-slash' : 'bwi-eye'"
></i>
</button>
</div>
@@ -607,14 +607,14 @@
<div class="form-text" *ngIf="directory === directoryType.Ldap">
{{ "ex" | i18n }} (&amp;(givenName=John)(|(l=Dallas)(l=Austin)))
</div>
<div class="form-text" *ngIf="directory === directoryType.EntraID">
<div class="form-text" *ngIf="directory === directoryType.AzureActiveDirectory">
{{ "ex" | i18n }} exclude:joe&#64;company.com
</div>
<div class="form-text" *ngIf="directory === directoryType.Okta">
{{ "ex" | i18n }} exclude:joe&#64;company.com | profile.firstName eq "John"
</div>
<div class="form-text" *ngIf="directory === directoryType.GSuite">
{{ "ex" | i18n }} exclude:joe&#64;company.com | orgUnitPath=/Engineering
{{ "ex" | i18n }} exclude:joe&#64;company.com | orgName=Engineering
</div>
</div>
<div class="mb-3" [hidden]="directory != directoryType.Ldap">
@@ -684,7 +684,7 @@
<div class="form-text" *ngIf="directory === directoryType.Ldap">
{{ "ex" | i18n }} (&amp;(objectClass=group)(!(cn=Sales*))(!(cn=IT*)))
</div>
<div class="form-text" *ngIf="directory === directoryType.EntraID">
<div class="form-text" *ngIf="directory === directoryType.AzureActiveDirectory">
{{ "ex" | i18n }} include:Sales,IT
</div>
<div class="form-text" *ngIf="directory === directoryType.Okta">

View File

@@ -5,7 +5,7 @@ import { LogService } from "@/jslib/common/src/abstractions/log.service";
import { StateService } from "../../abstractions/state.service";
import { DirectoryType } from "../../enums/directoryType";
import { EntraIdConfiguration } from "../../models/entraIdConfiguration";
import { AzureConfiguration } from "../../models/azureConfiguration";
import { GSuiteConfiguration } from "../../models/gsuiteConfiguration";
import { LdapConfiguration } from "../../models/ldapConfiguration";
import { OktaConfiguration } from "../../models/oktaConfiguration";
@@ -22,13 +22,13 @@ export class SettingsComponent implements OnInit, OnDestroy {
directoryType = DirectoryType;
ldap = new LdapConfiguration();
gsuite = new GSuiteConfiguration();
entra = new EntraIdConfiguration();
azure = new AzureConfiguration();
okta = new OktaConfiguration();
oneLogin = new OneLoginConfiguration();
sync = new SyncConfiguration();
directoryOptions: any[];
showLdapPassword = false;
showEntraKey = false;
showAzureKey = false;
showOktaKey = false;
showOneLoginSecret = false;
@@ -42,7 +42,7 @@ export class SettingsComponent implements OnInit, OnDestroy {
this.directoryOptions = [
{ name: this.i18nService.t("select"), value: null },
{ name: "Active Directory / LDAP", value: DirectoryType.Ldap },
{ name: "Entra ID", value: DirectoryType.EntraID },
{ name: "Azure Active Directory", value: DirectoryType.AzureActiveDirectory },
{ name: "G Suite (Google)", value: DirectoryType.GSuite },
{ name: "Okta", value: DirectoryType.Okta },
{ name: "OneLogin", value: DirectoryType.OneLogin },
@@ -56,9 +56,10 @@ export class SettingsComponent implements OnInit, OnDestroy {
this.gsuite =
(await this.stateService.getDirectory<GSuiteConfiguration>(DirectoryType.GSuite)) ||
this.gsuite;
this.entra =
(await this.stateService.getDirectory<EntraIdConfiguration>(DirectoryType.EntraID)) ||
this.entra;
this.azure =
(await this.stateService.getDirectory<AzureConfiguration>(
DirectoryType.AzureActiveDirectory,
)) || this.azure;
this.okta =
(await this.stateService.getDirectory<OktaConfiguration>(DirectoryType.Okta)) || this.okta;
this.oneLogin =
@@ -79,7 +80,7 @@ export class SettingsComponent implements OnInit, OnDestroy {
await this.stateService.setDirectoryType(this.directory);
await this.stateService.setDirectory(DirectoryType.Ldap, this.ldap);
await this.stateService.setDirectory(DirectoryType.GSuite, this.gsuite);
await this.stateService.setDirectory(DirectoryType.EntraID, this.entra);
await this.stateService.setDirectory(DirectoryType.AzureActiveDirectory, this.azure);
await this.stateService.setDirectory(DirectoryType.Okta, this.okta);
await this.stateService.setDirectory(DirectoryType.OneLogin, this.oneLogin);
await this.stateService.setSync(this.sync);
@@ -134,8 +135,8 @@ export class SettingsComponent implements OnInit, OnDestroy {
document.getElementById("password").focus();
}
toggleEntraKey() {
this.showEntraKey = !this.showEntraKey;
toggleAzureKey() {
this.showAzureKey = !this.showAzureKey;
document.getElementById("secretKey").focus();
}

View File

@@ -2,16 +2,19 @@
<ul class="nav nav-tabs mb-3">
<li class="nav-item">
<a class="nav-link" routerLink="dashboard" routerLinkActive="active">
<i class="bwi bwi-dashboard"></i>
{{ "dashboard" | i18n }}
</a>
</li>
<li class="nav-item">
<a class="nav-link" routerLink="settings" routerLinkActive="active">
<i class="bwi bwi-cogs"></i>
{{ "settings" | i18n }}
</a>
</li>
<li class="nav-item">
<a class="nav-link" routerLink="more" routerLinkActive="active">
<i class="bwi bwi-sliders"></i>
{{ "more" | i18n }}
</a>
</li>

View File

@@ -17,16 +17,12 @@ import { ConsoleLogService } from "@/jslib/node/src/cli/services/consoleLog.serv
import { NodeApiService } from "@/jslib/node/src/services/nodeApi.service";
import { NodeCryptoFunctionService } from "@/jslib/node/src/services/nodeCryptoFunction.service";
import { DirectoryFactoryService } from "./abstractions/directory-factory.service";
import { Account } from "./models/account";
import { Program } from "./program";
import { AuthService } from "./services/auth.service";
import { BatchRequestBuilder } from "./services/batch-request-builder";
import { DefaultDirectoryFactoryService } from "./services/directory-factory.service";
import { I18nService } from "./services/i18n.service";
import { KeytarSecureStorageService } from "./services/keytarSecureStorage.service";
import { LowdbStorageService } from "./services/lowdbStorage.service";
import { SingleRequestBuilder } from "./services/single-request-builder";
import { StateService } from "./services/state.service";
import { StateMigrationService } from "./services/stateMigration.service";
import { SyncService } from "./services/sync.service";
@@ -55,9 +51,6 @@ export class Main {
syncService: SyncService;
stateService: StateService;
stateMigrationService: StateMigrationService;
directoryFactoryService: DirectoryFactoryService;
batchRequestBuilder: BatchRequestBuilder;
singleRequestBuilder: SingleRequestBuilder;
constructor() {
const applicationName = "Bitwarden Directory Connector";
@@ -153,25 +146,14 @@ export class Main {
this.stateService,
);
this.directoryFactoryService = new DefaultDirectoryFactoryService(
this.logService,
this.i18nService,
this.stateService,
);
this.batchRequestBuilder = new BatchRequestBuilder();
this.singleRequestBuilder = new SingleRequestBuilder();
this.syncService = new SyncService(
this.logService,
this.cryptoFunctionService,
this.apiService,
this.messagingService,
this.i18nService,
this.environmentService,
this.stateService,
this.batchRequestBuilder,
this.singleRequestBuilder,
this.directoryFactoryService,
);
this.program = new Program(this);

View File

@@ -8,7 +8,7 @@ import { MessageResponse } from "@/jslib/node/src/cli/models/response/messageRes
import { StateService } from "../abstractions/state.service";
import { DirectoryType } from "../enums/directoryType";
import { EntraIdConfiguration } from "../models/entraIdConfiguration";
import { AzureConfiguration } from "../models/azureConfiguration";
import { GSuiteConfiguration } from "../models/gsuiteConfiguration";
import { LdapConfiguration } from "../models/ldapConfiguration";
import { OktaConfiguration } from "../models/oktaConfiguration";
@@ -20,7 +20,7 @@ export class ConfigCommand {
private directory: DirectoryType;
private ldap = new LdapConfiguration();
private gsuite = new GSuiteConfiguration();
private entra = new EntraIdConfiguration();
private azure = new AzureConfiguration();
private okta = new OktaConfiguration();
private oneLogin = new OneLoginConfiguration();
private sync = new SyncConfiguration();
@@ -54,11 +54,8 @@ export class ConfigCommand {
case "gsuite.key":
await this.setGSuiteKey(value);
break;
// Azure Active Directory was renamed to Entra ID, but we've kept the old key name
// to be backwards compatible with existing configurations.
case "azure.key":
case "entra.key":
await this.setEntraIdKey(value);
await this.setAzureKey(value);
break;
case "okta.token":
await this.setOktaToken(value);
@@ -105,9 +102,9 @@ export class ConfigCommand {
await this.saveConfig();
}
private async setEntraIdKey(key: string) {
private async setAzureKey(key: string) {
await this.loadConfig();
this.entra.key = key;
this.azure.key = key;
await this.saveConfig();
}
@@ -130,9 +127,10 @@ export class ConfigCommand {
this.gsuite =
(await this.stateService.getDirectory<GSuiteConfiguration>(DirectoryType.GSuite)) ||
this.gsuite;
this.entra =
(await this.stateService.getDirectory<EntraIdConfiguration>(DirectoryType.EntraID)) ||
this.entra;
this.azure =
(await this.stateService.getDirectory<AzureConfiguration>(
DirectoryType.AzureActiveDirectory,
)) || this.azure;
this.okta =
(await this.stateService.getDirectory<OktaConfiguration>(DirectoryType.Okta)) || this.okta;
this.oneLogin =
@@ -146,7 +144,7 @@ export class ConfigCommand {
await this.stateService.setDirectoryType(this.directory);
await this.stateService.setDirectory(DirectoryType.Ldap, this.ldap);
await this.stateService.setDirectory(DirectoryType.GSuite, this.gsuite);
await this.stateService.setDirectory(DirectoryType.EntraID, this.entra);
await this.stateService.setDirectory(DirectoryType.AzureActiveDirectory, this.azure);
await this.stateService.setDirectory(DirectoryType.Okta, this.okta);
await this.stateService.setDirectory(DirectoryType.OneLogin, this.oneLogin);
await this.stateService.setSync(this.sync);

View File

@@ -1,6 +1,6 @@
export enum DirectoryType {
Ldap = 0,
EntraID = 1,
AzureActiveDirectory = 1,
GSuite = 2,
Okta = 3,
OneLogin = 4,

View File

@@ -2,7 +2,7 @@ import { Account as BaseAccount } from "@/jslib/common/src/models/domain/account
import { DirectoryType } from "@/src/enums/directoryType";
import { EntraIdConfiguration } from "./entraIdConfiguration";
import { AzureConfiguration } from "./azureConfiguration";
import { GSuiteConfiguration } from "./gsuiteConfiguration";
import { LdapConfiguration } from "./ldapConfiguration";
import { OktaConfiguration } from "./oktaConfiguration";
@@ -29,10 +29,7 @@ export class ClientKeys {
export class DirectoryConfigurations {
ldap: LdapConfiguration;
gsuite: GSuiteConfiguration;
entra: EntraIdConfiguration;
// Azure Active Directory was renamed to Entra ID, but we've kept the old account property name
// to be backwards compatible with existing configurations.
azure: EntraIdConfiguration;
azure: AzureConfiguration;
okta: OktaConfiguration;
oneLogin: OneLoginConfiguration;
}

View File

@@ -1,6 +1,6 @@
import { IConfiguration } from "./IConfiguration";
export class EntraIdConfiguration implements IConfiguration {
export class AzureConfiguration implements IConfiguration {
identityAuthority: string;
tenant: string;
applicationId: string;

View File

@@ -190,7 +190,7 @@ export class Program extends BaseProgram {
writeLn(" server - On-premise hosted installation URL.");
writeLn(" directory - The type of directory to use.");
writeLn(" ldap.password - The password for connection to this LDAP server.");
writeLn(" entra.key - The Entra Id secret key.");
writeLn(" azure.key - The Azure AD secret key.");
writeLn(" gsuite.key - The G Suite private key.");
writeLn(" okta.token - The Okta token.");
writeLn(" onelogin.secret - The OneLogin client secret.");
@@ -202,7 +202,7 @@ export class Program extends BaseProgram {
writeLn(" bwdc config directory 1");
writeLn(" bwdc config ldap.password <password>");
writeLn(" bwdc config ldap.password --secretenv LDAP_PWD");
writeLn(" bwdc config entra.key <key>");
writeLn(" bwdc config azure.key <key>");
writeLn(" bwdc config gsuite.key <key>");
writeLn(" bwdc config okta.token <token>");
writeLn(" bwdc config onelogin.secret <secret>");

View File

@@ -9,7 +9,7 @@ import { LogService } from "@/jslib/common/src/abstractions/log.service";
import { StateService } from "../abstractions/state.service";
import { DirectoryType } from "../enums/directoryType";
import { EntraIdConfiguration } from "../models/entraIdConfiguration";
import { AzureConfiguration } from "../models/azureConfiguration";
import { GroupEntry } from "../models/groupEntry";
import { SyncConfiguration } from "../models/syncConfiguration";
import { UserEntry } from "../models/userEntry";
@@ -17,10 +17,8 @@ import { UserEntry } from "../models/userEntry";
import { BaseDirectoryService } from "./baseDirectory.service";
import { IDirectoryService } from "./directory.service";
const EntraIdPublicIdentityAuthority = "login.microsoftonline.com";
const EntraIdPublicGraphEndpoint = "https://graph.microsoft.com";
const EntraIdGovernmentIdentityAuthority = "login.microsoftonline.us";
const EntraIdGovernmentGraphEndpoint = "https://graph.microsoft.us";
const AzurePublicIdentityAuhtority = "login.microsoftonline.com";
const AzureGovermentIdentityAuhtority = "login.microsoftonline.us";
const NextLink = "@odata.nextLink";
const DeltaLink = "@odata.deltaLink";
@@ -34,9 +32,9 @@ enum UserSetType {
ExcludeGroup,
}
export class EntraIdDirectoryService extends BaseDirectoryService implements IDirectoryService {
export class AzureDirectoryService extends BaseDirectoryService implements IDirectoryService {
private client: graph.Client;
private dirConfig: EntraIdConfiguration;
private dirConfig: AzureConfiguration;
private syncConfig: SyncConfiguration;
private accessToken: string;
private accessTokenExpiration: Date;
@@ -52,12 +50,12 @@ export class EntraIdDirectoryService extends BaseDirectoryService implements IDi
async getEntries(force: boolean, test: boolean): Promise<[GroupEntry[], UserEntry[]]> {
const type = await this.stateService.getDirectoryType();
if (type !== DirectoryType.EntraID) {
if (type !== DirectoryType.AzureActiveDirectory) {
return;
}
this.dirConfig = await this.stateService.getDirectory<EntraIdConfiguration>(
DirectoryType.EntraID,
this.dirConfig = await this.stateService.getDirectory<AzureConfiguration>(
DirectoryType.AzureActiveDirectory,
);
if (this.dirConfig == null) {
return;
@@ -209,7 +207,7 @@ export class EntraIdDirectoryService extends BaseDirectoryService implements IDi
if (keyword === "excludeadministrativeunit" || keyword === "includeadministrativeunit") {
for (const p of pieces) {
let auMembers = await this.client
.api(`${this.getGraphApiEndpoint()}/v1.0/directory/administrativeUnits/${p}/members`)
.api(`https://graph.microsoft.com/v1.0/directory/administrativeUnits/${p}/members`)
.get();
// eslint-disable-next-line
while (true) {
@@ -459,10 +457,10 @@ export class EntraIdDirectoryService extends BaseDirectoryService implements IDi
const identityAuthority =
this.dirConfig.identityAuthority != null
? this.dirConfig.identityAuthority
: EntraIdPublicIdentityAuthority;
: AzurePublicIdentityAuhtority;
if (
identityAuthority !== EntraIdPublicIdentityAuthority &&
identityAuthority !== EntraIdGovernmentIdentityAuthority
identityAuthority !== AzurePublicIdentityAuhtority &&
identityAuthority !== AzureGovermentIdentityAuhtority
) {
done(new Error(this.i18nService.t("dirConfigIncomplete")), null);
return;
@@ -480,7 +478,7 @@ export class EntraIdDirectoryService extends BaseDirectoryService implements IDi
client_id: this.dirConfig.applicationId,
client_secret: this.dirConfig.key,
grant_type: "client_credentials",
scope: `${this.getGraphApiEndpoint()}/.default`,
scope: "https://graph.microsoft.com/.default",
});
const req = https
@@ -544,10 +542,4 @@ export class EntraIdDirectoryService extends BaseDirectoryService implements IDi
exp.setSeconds(exp.getSeconds() + expSeconds);
this.accessTokenExpiration = exp;
}
private getGraphApiEndpoint(): string {
return this.dirConfig.identityAuthority === EntraIdGovernmentIdentityAuthority
? EntraIdGovernmentGraphEndpoint
: EntraIdPublicGraphEndpoint;
}
}

View File

@@ -1,75 +0,0 @@
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { GroupEntry } from "@/src/models/groupEntry";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilder, RequestBuilderOptions } from "../abstractions/request-builder.service";
import { batchSize } from "./sync.service";
/**
* This class is responsible for batching large sync requests (>2k users) into multiple smaller
* requests to the /import endpoint. This is done to ensure we are under the default
* maximum packet size for NGINX web servers to avoid the request potentially timing out
* */
export class BatchRequestBuilder implements RequestBuilder {
buildRequest(
groups: GroupEntry[],
users: UserEntry[],
options: RequestBuilderOptions,
): OrganizationImportRequest[] {
if (options.overwriteExisting) {
throw new Error(
"You cannot use the 'Remove and re-add organization users during the next sync' option with large imports.",
);
}
const requests: OrganizationImportRequest[] = [];
if (users?.length > 0) {
const usersRequest = users.map((u) => {
return {
email: u.email,
externalId: u.externalId,
deleted: u.deleted || (options.removeDisabled && u.disabled),
};
});
// Partition users
for (let i = 0; i < usersRequest.length; i += batchSize) {
const u = usersRequest.slice(i, i + batchSize);
const req = new OrganizationImportRequest({
groups: [],
users: u,
largeImport: true,
overwriteExisting: false,
});
requests.push(req);
}
}
if (groups?.length > 0) {
const groupRequest = groups.map((g) => {
return {
name: g.name,
externalId: g.externalId,
memberExternalIds: Array.from(g.userMemberExternalIds),
};
});
// Partition groups
for (let i = 0; i < groupRequest.length; i += batchSize) {
const g = groupRequest.slice(i, i + batchSize);
const req = new OrganizationImportRequest({
groups: g,
users: [],
largeImport: true,
overwriteExisting: false,
});
requests.push(req);
}
}
return requests;
}
}

View File

@@ -1,75 +0,0 @@
import { GetUniqueString } from "@/jslib/common/spec/utils";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilderOptions } from "../abstractions/request-builder.service";
import { groupSimulator, userSimulator } from "../utils/request-builder-helper";
import { BatchRequestBuilder } from "./batch-request-builder";
describe("BatchRequestBuilder", () => {
let batchRequestBuilder: BatchRequestBuilder;
beforeEach(async () => {
batchRequestBuilder = new BatchRequestBuilder();
});
const defaultOptions: RequestBuilderOptions = Object.freeze({
overwriteExisting: false,
removeDisabled: false,
});
it("BatchRequestBuilder batches requests for > 2000 users", () => {
const mockGroups = groupSimulator(11000);
const mockUsers = userSimulator(11000);
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, defaultOptions);
expect(requests.length).toEqual(12);
});
it("BatchRequestBuilder throws error when overwriteExisting is true", () => {
const mockGroups = groupSimulator(11000);
const mockUsers = userSimulator(11000);
const options = { ...defaultOptions, overwriteExisting: true };
const r = () => batchRequestBuilder.buildRequest(mockGroups, mockUsers, options);
expect(r).toThrow(
"You cannot use the 'Remove and re-add organization users during the next sync' option with large imports.",
);
});
it("BatchRequestBuilder returns requests with deleted users when removeDisabled is true", () => {
const mockGroups = groupSimulator(11000);
const mockUsers = userSimulator(11000);
const disabledUser1 = new UserEntry();
const disabledUserEmail1 = GetUniqueString() + "@email.com";
const disabledUser2 = new UserEntry();
const disabledUserEmail2 = GetUniqueString() + "@email.com";
disabledUser1.disabled = true;
disabledUser1.email = disabledUserEmail1;
disabledUser2.disabled = true;
disabledUser2.email = disabledUserEmail2;
mockUsers[0] = disabledUser1;
mockUsers.push(disabledUser2);
const options = { ...defaultOptions, removeDisabled: true };
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, options);
expect(requests[0].members).toContainEqual({ email: disabledUserEmail1, deleted: true });
expect(requests[1].members.find((m) => m.deleted)).toBeUndefined();
expect(requests[3].members.find((m) => m.deleted)).toBeUndefined();
expect(requests[4].members.find((m) => m.deleted)).toBeUndefined();
expect(requests[5].members).toContainEqual({ email: disabledUserEmail2, deleted: true });
});
it("BatchRequestBuilder retuns an empty array when there are no users or groups", () => {
const requests = batchRequestBuilder.buildRequest([], [], defaultOptions);
expect(requests).toEqual([]);
});
});

View File

@@ -1,37 +0,0 @@
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
import { LogService } from "@/jslib/common/src/abstractions/log.service";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { StateService } from "../abstractions/state.service";
import { DirectoryType } from "../enums/directoryType";
import { EntraIdDirectoryService } from "./entra-id-directory.service";
import { GSuiteDirectoryService } from "./gsuite-directory.service";
import { LdapDirectoryService } from "./ldap-directory.service";
import { OktaDirectoryService } from "./okta-directory.service";
import { OneLoginDirectoryService } from "./onelogin-directory.service";
export class DefaultDirectoryFactoryService implements DirectoryFactoryService {
constructor(
private logService: LogService,
private i18nService: I18nService,
private stateService: StateService,
) {}
createService(directoryType: DirectoryType) {
switch (directoryType) {
case DirectoryType.GSuite:
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.EntraID:
return new EntraIdDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Ldap:
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Okta:
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.OneLogin:
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService);
default:
throw new Error("Invalid Directory Type");
}
}
}

View File

@@ -141,7 +141,7 @@ export class GSuiteDirectoryService extends BaseDirectoryService implements IDir
entry.referenceId = user.id;
entry.externalId = user.id;
entry.email = user.primaryEmail != null ? user.primaryEmail.trim().toLowerCase() : null;
entry.disabled = user.suspended || user.archived || false;
entry.disabled = user.suspended || false;
entry.deleted = deleted;
return entry;
}

View File

@@ -5,7 +5,8 @@ import { LogService } from "../../jslib/common/src/abstractions/log.service";
import { groupFixtures } from "../../openldap/group-fixtures";
import { userFixtures } from "../../openldap/user-fixtures";
import { DirectoryType } from "../enums/directoryType";
import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures";
import { LdapConfiguration } from "../models/ldapConfiguration";
import { SyncConfiguration } from "../models/syncConfiguration";
import { LdapDirectoryService } from "./ldap-directory.service";
import { StateService } from "./state.service";
@@ -153,3 +154,54 @@ describe("ldapDirectoryService", () => {
});
});
});
/**
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration.
*/
const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({
ssl: false,
startTls: false,
tlsCaPath: null,
sslAllowUnauthorized: false,
sslCertPath: null,
sslKeyPath: null,
sslCaPath: null,
hostname: "localhost",
port: 1389,
domain: null,
rootPath: "dc=bitwarden,dc=com",
currentUser: false,
username: "cn=admin,dc=bitwarden,dc=com",
password: "admin",
ad: false,
pagedSearch: false,
...(config ?? {}),
});
/**
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration.
*/
const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({
users: false,
groups: false,
interval: 5,
userFilter: null,
groupFilter: null,
removeDisabled: false,
overwriteExisting: false,
largeImport: false,
// Ldap properties
groupObjectClass: "posixGroup",
userObjectClass: "person",
groupPath: null,
userPath: null,
groupNameAttribute: "cn",
userEmailAttribute: "mail",
memberAttribute: "memberUid",
useEmailPrefixSuffix: false,
emailPrefixAttribute: "sAMAccountName",
emailSuffix: null,
creationDateAttribute: "whenCreated",
revisionDateAttribute: "whenChanged",
...(config ?? {}),
});

View File

@@ -18,11 +18,6 @@ import { IDirectoryService } from "./directory.service";
const UserControlAccountDisabled = 2;
/**
* The attribute name for the unique identifier used by Active Directory.
*/
const ActiveDirectoryExternalId = "objectGUID";
export class LdapDirectoryService implements IDirectoryService {
private client: ldapts.Client;
private dirConfig: LdapConfiguration;
@@ -245,7 +240,7 @@ export class LdapDirectoryService implements IDirectoryService {
* otherwise it falls back to the provided referenceId.
*/
private getExternalId(searchEntry: ldapts.Entry, referenceId: string) {
const attr = this.getAttr<Buffer>(searchEntry, ActiveDirectoryExternalId);
const attr = this.getAttr<Buffer>(searchEntry, "objectGUID");
if (attr != null) {
return this.bufToGuid(attr);
} else {
@@ -363,9 +358,6 @@ export class LdapDirectoryService implements IDirectoryService {
filter: filter,
scope: "sub",
paged: this.dirConfig.pagedSearch,
// We need to expressly tell ldapts what attributes to return as Buffer objects,
// otherwise they are returned as strings
explicitBufferAttributes: [ActiveDirectoryExternalId],
};
const { searchEntries } = await this.client.search(path, options, controls);
return searchEntries.map((e) => processEntry(e)).filter((e) => e != null);

View File

@@ -1,79 +0,0 @@
import { GetUniqueString } from "@/jslib/common/spec/utils";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilderOptions } from "../abstractions/request-builder.service";
import { groupSimulator, userSimulator } from "../utils/request-builder-helper";
import { SingleRequestBuilder } from "./single-request-builder";
describe("SingleRequestBuilder", () => {
let singleRequestBuilder: SingleRequestBuilder;
beforeEach(async () => {
singleRequestBuilder = new SingleRequestBuilder();
});
const defaultOptions: RequestBuilderOptions = Object.freeze({
overwriteExisting: false,
removeDisabled: false,
});
it("SingleRequestBuilder returns single request for 200 users", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const requests = singleRequestBuilder.buildRequest(mockGroups, mockUsers, defaultOptions);
expect(requests.length).toEqual(1);
});
it("SingleRequestBuilder returns request with overwriteExisting enabled", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const options = { ...defaultOptions, overwriteExisting: true };
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
expect(request.overwriteExisting).toBe(true);
});
it("SingleRequestBuilder returns request with deleted user when removeDisabled is true", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const disabledUser = new UserEntry();
const disabledUserEmail = GetUniqueString() + "@example.com";
disabledUser.disabled = true;
disabledUser.email = disabledUserEmail;
mockUsers.push(disabledUser);
const options = { ...defaultOptions, removeDisabled: true };
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
expect(request.members.length).toEqual(201);
expect(request.members.pop()).toEqual(
expect.objectContaining({ email: disabledUserEmail, deleted: true }),
);
expect(request.overwriteExisting).toBe(false);
});
it("SingleRequestBuilder returns request with deleted user and overwriteExisting enabled when overwriteExisting and removeDisabled are true", () => {
const mockGroups = groupSimulator(200);
const mockUsers = userSimulator(200);
const disabledUser = new UserEntry();
const disabledUserEmail = GetUniqueString() + "@example.com";
disabledUser.disabled = true;
disabledUser.email = disabledUserEmail;
mockUsers.push(disabledUser);
const options = { overwriteExisting: true, removeDisabled: true };
const request = singleRequestBuilder.buildRequest(mockGroups, mockUsers, options)[0];
expect(request.members.pop()).toEqual(
expect.objectContaining({ email: disabledUserEmail, deleted: true }),
);
expect(request.overwriteExisting).toBe(true);
});
});

View File

@@ -1,41 +0,0 @@
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { GroupEntry } from "@/src/models/groupEntry";
import { UserEntry } from "@/src/models/userEntry";
import { RequestBuilder, RequestBuilderOptions } from "../abstractions/request-builder.service";
/**
* This class is responsible for building small (<2k users) syncs as a single
* request to the /import endpoint. This is done to be backwards compatible with
* existing functionality for sync requests that are sufficiently small enough to not
* exceed default maximum packet size limits on NGINX web servers.
* */
export class SingleRequestBuilder implements RequestBuilder {
buildRequest(
groups: GroupEntry[],
users: UserEntry[],
options: RequestBuilderOptions,
): OrganizationImportRequest[] {
return [
new OrganizationImportRequest({
groups: (groups ?? []).map((g) => {
return {
name: g.name,
externalId: g.externalId,
memberExternalIds: Array.from(g.userMemberExternalIds),
};
}),
users: (users ?? []).map((u) => {
return {
email: u.email,
externalId: u.externalId,
deleted: u.deleted || (options.removeDisabled && u.disabled),
};
}),
overwriteExisting: options.overwriteExisting,
largeImport: false,
}),
];
}
}

View File

@@ -11,7 +11,7 @@ import { StateService as StateServiceAbstraction } from "@/src/abstractions/stat
import { DirectoryType } from "@/src/enums/directoryType";
import { IConfiguration } from "@/src/models/IConfiguration";
import { Account } from "@/src/models/account";
import { EntraIdConfiguration } from "@/src/models/entraIdConfiguration";
import { AzureConfiguration } from "@/src/models/azureConfiguration";
import { GSuiteConfiguration } from "@/src/models/gsuiteConfiguration";
import { LdapConfiguration } from "@/src/models/ldapConfiguration";
import { OktaConfiguration } from "@/src/models/oktaConfiguration";
@@ -21,10 +21,7 @@ import { SyncConfiguration } from "@/src/models/syncConfiguration";
const SecureStorageKeys = {
ldap: "ldapPassword",
gsuite: "gsuitePrivateKey",
// Azure Active Directory was renamed to Entra ID, but we've kept the old property name
// to be backwards compatible with existing configurations.
azure: "azureKey",
entra: "entraKey",
okta: "oktaToken",
oneLogin: "oneLoginClientSecret",
userDelta: "userDeltaToken",
@@ -71,8 +68,8 @@ export class StateService
case DirectoryType.Ldap:
(configWithSecrets as any).password = await this.getLdapKey();
break;
case DirectoryType.EntraID:
(configWithSecrets as any).key = await this.getEntraKey();
case DirectoryType.AzureActiveDirectory:
(configWithSecrets as any).key = await this.getAzureKey();
break;
case DirectoryType.Okta:
(configWithSecrets as any).token = await this.getOktaKey();
@@ -96,7 +93,7 @@ export class StateService
config:
| LdapConfiguration
| GSuiteConfiguration
| EntraIdConfiguration
| AzureConfiguration
| OktaConfiguration
| OneLoginConfiguration,
): Promise<any> {
@@ -109,11 +106,11 @@ export class StateService
await this.setLdapConfiguration(ldapConfig);
break;
}
case DirectoryType.EntraID: {
const entraConfig = config as EntraIdConfiguration;
await this.setEntraKey(entraConfig.key);
entraConfig.key = StoredSecurely;
await this.setEntraConfiguration(entraConfig);
case DirectoryType.AzureActiveDirectory: {
const azureConfig = config as AzureConfiguration;
await this.setAzureKey(azureConfig.key);
azureConfig.key = StoredSecurely;
await this.setAzureConfiguration(azureConfig);
break;
}
case DirectoryType.Okta: {
@@ -190,32 +187,23 @@ export class StateService
);
}
private async getEntraKey(options?: StorageOptions): Promise<string> {
private async getAzureKey(options?: StorageOptions): Promise<string> {
options = this.reconcileOptions(options, await this.defaultSecureStorageOptions());
if (options?.userId == null) {
return null;
}
const entraKey = await this.secureStorageService.get<string>(
`${options.userId}_${SecureStorageKeys.entra}`,
);
if (entraKey != null) {
return entraKey;
}
return await this.secureStorageService.get<string>(
`${options.userId}_${SecureStorageKeys.azure}`,
);
}
private async setEntraKey(value: string, options?: StorageOptions): Promise<void> {
private async setAzureKey(value: string, options?: StorageOptions): Promise<void> {
options = this.reconcileOptions(options, await this.defaultSecureStorageOptions());
if (options?.userId == null) {
return;
}
await this.secureStorageService.save(
`${options.userId}_${SecureStorageKeys.entra}`,
`${options.userId}_${SecureStorageKeys.azure}`,
value,
options,
);
@@ -271,8 +259,8 @@ export class StateService
return await this.getLdapConfiguration();
case DirectoryType.GSuite:
return await this.getGsuiteConfiguration();
case DirectoryType.EntraID:
return await this.getEntraConfiguration();
case DirectoryType.AzureActiveDirectory:
return await this.getAzureConfiguration();
case DirectoryType.Okta:
return await this.getOktaConfiguration();
case DirectoryType.OneLogin:
@@ -317,28 +305,17 @@ export class StateService
);
}
async getEntraConfiguration(options?: StorageOptions): Promise<EntraIdConfiguration> {
const entraConfig = (
await this.getAccount(this.reconcileOptions(options, await this.defaultOnDiskOptions()))
)?.directoryConfigurations?.entra;
if (entraConfig != null) {
return entraConfig;
}
async getAzureConfiguration(options?: StorageOptions): Promise<AzureConfiguration> {
return (
await this.getAccount(this.reconcileOptions(options, await this.defaultOnDiskOptions()))
)?.directoryConfigurations?.azure;
}
async setEntraConfiguration(
value: EntraIdConfiguration,
options?: StorageOptions,
): Promise<void> {
async setAzureConfiguration(value: AzureConfiguration, options?: StorageOptions): Promise<void> {
const account = await this.getAccount(
this.reconcileOptions(options, await this.defaultOnDiskOptions()),
);
account.directoryConfigurations.entra = value;
account.directoryConfigurations.azure = value;
await this.saveAccount(
account,
this.reconcileOptions(options, await this.defaultOnDiskOptions()),

View File

@@ -3,7 +3,7 @@ import { StateMigrationService as BaseStateMigrationService } from "@/jslib/comm
import { DirectoryType } from "@/src/enums/directoryType";
import { Account, DirectoryConfigurations, DirectorySettings } from "@/src/models/account";
import { EntraIdConfiguration } from "@/src/models/entraIdConfiguration";
import { AzureConfiguration } from "@/src/models/azureConfiguration";
import { GSuiteConfiguration } from "@/src/models/gsuiteConfiguration";
import { LdapConfiguration } from "@/src/models/ldapConfiguration";
import { OktaConfiguration } from "@/src/models/oktaConfiguration";
@@ -14,7 +14,6 @@ const SecureStorageKeys: { [key: string]: any } = {
ldap: "ldapPassword",
gsuite: "gsuitePrivateKey",
azure: "azureKey",
entra: "entraIdKey",
okta: "oktaToken",
oneLogin: "oneLoginClientSecret",
directoryConfigPrefix: "directoryConfig_",
@@ -105,16 +104,13 @@ export class StateMigrationService extends BaseStateMigrationService {
}
};
// Initialize typed objects from key/value pairs in storage to either be saved temporarily until an account is authed or applied to the active account
// Initilize typed objects from key/value pairs in storage to either be saved temporarily until an account is authed or applied to the active account
const getDirectoryConfig = async <T>(type: DirectoryType) =>
await this.get<T>(SecureStorageKeys.directoryConfigPrefix + type);
const directoryConfigs: DirectoryConfigurations = {
ldap: await getDirectoryConfig<LdapConfiguration>(DirectoryType.Ldap),
gsuite: await getDirectoryConfig<GSuiteConfiguration>(DirectoryType.GSuite),
// Azure Active Directory was renamed to Entra ID, but we've kept the old property name
// to be backwards compatible with existing configurations.
azure: await getDirectoryConfig<EntraIdConfiguration>(DirectoryType.EntraID),
entra: await getDirectoryConfig<EntraIdConfiguration>(DirectoryType.EntraID),
azure: await getDirectoryConfig<AzureConfiguration>(DirectoryType.AzureActiveDirectory),
okta: await getDirectoryConfig<OktaConfiguration>(DirectoryType.Okta),
oneLogin: await getDirectoryConfig<OneLoginConfiguration>(DirectoryType.OneLogin),
};

View File

@@ -1,132 +0,0 @@
import { mock, MockProxy } from "jest-mock-extended";
import { ApiService } from "@/jslib/common/src/abstractions/api.service";
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
import { EnvironmentService } from "@/jslib/common/src/services/environment.service";
import { I18nService } from "../../jslib/common/src/abstractions/i18n.service";
import { LogService } from "../../jslib/common/src/abstractions/log.service";
import { groupFixtures } from "../../openldap/group-fixtures";
import { userFixtures } from "../../openldap/user-fixtures";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { DirectoryType } from "../enums/directoryType";
import { getLdapConfiguration, getSyncConfiguration } from "../utils/test-fixtures";
import { BatchRequestBuilder } from "./batch-request-builder";
import { LdapDirectoryService } from "./ldap-directory.service";
import { SingleRequestBuilder } from "./single-request-builder";
import { StateService } from "./state.service";
import { SyncService } from "./sync.service";
import * as constants from "./sync.service";
describe("SyncService", () => {
let logService: MockProxy<LogService>;
let i18nService: MockProxy<I18nService>;
let stateService: MockProxy<StateService>;
let cryptoFunctionService: MockProxy<CryptoFunctionService>;
let apiService: MockProxy<ApiService>;
let messagingService: MockProxy<MessagingService>;
let environmentService: MockProxy<EnvironmentService>;
let directoryFactory: MockProxy<DirectoryFactoryService>;
let batchRequestBuilder: BatchRequestBuilder;
let singleRequestBuilder: SingleRequestBuilder;
let syncService: SyncService;
let directoryService: LdapDirectoryService;
const originalBatchSize = constants.batchSize;
beforeEach(() => {
logService = mock();
i18nService = mock();
stateService = mock();
cryptoFunctionService = mock();
apiService = mock();
messagingService = mock();
environmentService = mock();
directoryFactory = mock();
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap);
stateService.getOrganizationId.mockResolvedValue("fakeId");
directoryService = new LdapDirectoryService(logService, i18nService, stateService);
directoryFactory.createService.mockReturnValue(directoryService);
batchRequestBuilder = new BatchRequestBuilder();
singleRequestBuilder = new SingleRequestBuilder();
syncService = new SyncService(
cryptoFunctionService,
apiService,
messagingService,
i18nService,
environmentService,
stateService,
batchRequestBuilder,
singleRequestBuilder,
directoryFactory,
);
});
describe("OpenLdap integration: ", () => {
it("with largeImport disabled matches directory fixture data", async () => {
stateService.getDirectory
.calledWith(DirectoryType.Ldap)
.mockResolvedValue(getLdapConfiguration());
stateService.getSync.mockResolvedValue(
getSyncConfiguration({
users: true,
groups: true,
largeImport: false,
overwriteExisting: false,
}),
);
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
const syncResult = await syncService.sync(false, false);
expect(syncResult).toEqual([groupFixtures, userFixtures]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(
expect.objectContaining({ overwriteExisting: false }),
);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1);
});
it("with largeImport enabled matches directory fixture data", async () => {
stateService.getDirectory
.calledWith(DirectoryType.Ldap)
.mockResolvedValue(getLdapConfiguration());
stateService.getSync.mockResolvedValue(
getSyncConfiguration({
users: true,
groups: true,
largeImport: true,
overwriteExisting: false,
}),
);
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
constants.batchSize = 4;
const syncResult = await syncService.sync(false, false);
expect(syncResult).toEqual([groupFixtures, userFixtures]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(
expect.objectContaining({ overwriteExisting: false }),
);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6);
// @ts-expect-error Reset batch size to original state.
constants.batchSize = originalBatchSize;
});
});
});

View File

@@ -1,135 +0,0 @@
import { mock, MockProxy } from "jest-mock-extended";
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service";
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { ApiService } from "@/jslib/common/src/services/api.service";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { DirectoryType } from "../enums/directoryType";
import { getSyncConfiguration } from "../utils/test-fixtures";
import { BatchRequestBuilder } from "./batch-request-builder";
import { I18nService } from "./i18n.service";
import { LdapDirectoryService } from "./ldap-directory.service";
import { SingleRequestBuilder } from "./single-request-builder";
import { StateService } from "./state.service";
import { SyncService } from "./sync.service";
import * as constants from "./sync.service";
import { groupFixtures } from "@/openldap/group-fixtures";
import { userFixtures } from "@/openldap/user-fixtures";
describe("SyncService", () => {
let cryptoFunctionService: MockProxy<CryptoFunctionService>;
let apiService: MockProxy<ApiService>;
let messagingService: MockProxy<MessagingService>;
let i18nService: MockProxy<I18nService>;
let environmentService: MockProxy<EnvironmentService>;
let stateService: MockProxy<StateService>;
let directoryFactory: MockProxy<DirectoryFactoryService>;
let batchRequestBuilder: MockProxy<BatchRequestBuilder>;
let singleRequestBuilder: MockProxy<SingleRequestBuilder>;
let syncService: SyncService;
const originalBatchSize = constants.batchSize;
beforeEach(() => {
cryptoFunctionService = mock();
apiService = mock();
messagingService = mock();
i18nService = mock();
environmentService = mock();
stateService = mock();
directoryFactory = mock();
batchRequestBuilder = mock();
singleRequestBuilder = mock();
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap);
stateService.getOrganizationId.mockResolvedValue("fakeId");
const mockDirectoryService = mock<LdapDirectoryService>();
mockDirectoryService.getEntries.mockResolvedValue([groupFixtures, userFixtures]);
directoryFactory.createService.mockReturnValue(mockDirectoryService);
syncService = new SyncService(
cryptoFunctionService,
apiService,
messagingService,
i18nService,
environmentService,
stateService,
batchRequestBuilder,
singleRequestBuilder,
directoryFactory,
);
});
it("Sync posts single request successfully for unique hashes", async () => {
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true }));
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
const mockRequest: OrganizationImportRequest[] = [
{
members: [],
groups: [],
overwriteExisting: true,
largeImport: true,
},
];
singleRequestBuilder.buildRequest.mockReturnValue(mockRequest);
await syncService.sync(true, false);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1);
});
it("Sync posts multiple request successfully for unique hashes", async () => {
stateService.getSync.mockResolvedValue(
getSyncConfiguration({ groups: true, users: true, largeImport: true }),
);
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("unique hash");
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
constants.batchSize = 4;
const mockRequests = new Array(6).fill({
members: [],
groups: [],
overwriteExisting: true,
largeImport: true,
});
batchRequestBuilder.buildRequest.mockReturnValue(mockRequests);
await syncService.sync(true, false);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[0]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[1]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[2]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[3]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[4]);
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[5]);
// @ts-expect-error Reset batch size back to original value.
constants.batchSize = originalBatchSize;
});
it("does not post for the same hash", async () => {
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true }));
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1));
// This arranges the last hash to be the same as the ArrayBuffer after it is converted to b64
stateService.getLastSyncHash.mockResolvedValue("AA==");
await syncService.sync(true, false);
expect(apiService.postPublicImportDirectory).not.toHaveBeenCalled();
});
});

View File

@@ -2,40 +2,35 @@ import { ApiService } from "@/jslib/common/src/abstractions/api.service";
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service";
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service";
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service";
import { LogService } from "@/jslib/common/src/abstractions/log.service";
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service";
import { Utils } from "@/jslib/common/src/misc/utils";
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest";
import { DirectoryFactoryService } from "../abstractions/directory-factory.service";
import { StateService } from "../abstractions/state.service";
import { DirectoryType } from "../enums/directoryType";
import { GroupEntry } from "../models/groupEntry";
import { SyncConfiguration } from "../models/syncConfiguration";
import { UserEntry } from "../models/userEntry";
import { BatchRequestBuilder } from "./batch-request-builder";
import { SingleRequestBuilder } from "./single-request-builder";
export interface HashResult {
hash: string;
hashLegacy: string;
}
export const batchSize = 2000;
import { AzureDirectoryService } from "./azure-directory.service";
import { IDirectoryService } from "./directory.service";
import { GSuiteDirectoryService } from "./gsuite-directory.service";
import { LdapDirectoryService } from "./ldap-directory.service";
import { OktaDirectoryService } from "./okta-directory.service";
import { OneLoginDirectoryService } from "./onelogin-directory.service";
export class SyncService {
private dirType: DirectoryType;
constructor(
private logService: LogService,
private cryptoFunctionService: CryptoFunctionService,
private apiService: ApiService,
private messagingService: MessagingService,
private i18nService: I18nService,
private environmentService: EnvironmentService,
private stateService: StateService,
private batchRequestBuilder: BatchRequestBuilder,
private singleRequestBuilder: SingleRequestBuilder,
private directoryFactory: DirectoryFactoryService,
) {}
async sync(force: boolean, test: boolean): Promise<[GroupEntry[], UserEntry[]]> {
@@ -44,7 +39,7 @@ export class SyncService {
throw new Error("No directory configured.");
}
const directoryService = this.directoryFactory.createService(this.dirType);
const directoryService = this.getDirectoryService();
if (directoryService == null) {
throw new Error("Cannot load directory service.");
}
@@ -83,36 +78,15 @@ export class SyncService {
return [groups, users];
}
const reqs = this.buildRequest(groups, users, syncConfig);
const req = this.buildRequest(
groups,
users,
syncConfig.removeDisabled,
syncConfig.overwriteExisting,
syncConfig.largeImport,
);
const reqJson = JSON.stringify(req);
const result: HashResult = await this.generateHash(reqs);
if (result.hash && (await this.isNewHash(result))) {
for (const req of reqs) {
await this.apiService.postPublicImportDirectory(req);
}
await this.stateService.setLastSyncHash(result.hash);
} else {
groups = null;
users = null;
}
await this.saveSyncTimes(syncConfig, now);
this.messagingService.send("dirSyncCompleted", { successfully: true });
return [groups, users];
} catch (e) {
if (!test) {
await this.stateService.setGroupDelta(startingGroupDelta);
await this.stateService.setUserDelta(startingUserDelta);
}
this.messagingService.send("dirSyncCompleted", { successfully: false });
throw e;
}
}
async generateHash(reqs: OrganizationImportRequest[]): Promise<HashResult> {
const reqJson = JSON.stringify(reqs?.length === 1 ? reqs[0] : reqs);
const orgId = await this.stateService.getOrganizationId();
if (orgId == null) {
throw new Error("Organization not set.");
@@ -135,14 +109,28 @@ export class SyncService {
if (hashBuff != null) {
hash = Utils.fromBufferToB64(hashBuff);
}
return { hash, hashLegacy };
}
async isNewHash(hashResult: HashResult): Promise<boolean> {
const lastHash = await this.stateService.getLastSyncHash();
return lastHash == null || (hashResult.hash !== lastHash && hashResult.hashLegacy !== lastHash);
if (lastHash == null || (hash !== lastHash && hashLegacy !== lastHash)) {
await this.apiService.postPublicImportDirectory(req);
await this.stateService.setLastSyncHash(hash);
} else {
groups = null;
users = null;
}
await this.saveSyncTimes(syncConfig, now);
this.messagingService.send("dirSyncCompleted", { successfully: true });
return [groups, users];
} catch (e) {
if (!test) {
await this.stateService.setGroupDelta(startingGroupDelta);
await this.stateService.setUserDelta(startingUserDelta);
}
this.messagingService.send("dirSyncCompleted", { successfully: false });
throw e;
}
}
private removeDuplicateUsers(users: UserEntry[]) {
@@ -210,16 +198,48 @@ export class SyncService {
return allUsers;
}
private getDirectoryService(): IDirectoryService {
switch (this.dirType) {
case DirectoryType.GSuite:
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.AzureActiveDirectory:
return new AzureDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Ldap:
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.Okta:
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService);
case DirectoryType.OneLogin:
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService);
default:
return null;
}
}
private buildRequest(
groups: GroupEntry[],
users: UserEntry[],
syncConfig: SyncConfiguration,
): OrganizationImportRequest[] {
if (syncConfig.largeImport && (groups?.length ?? 0) + (users?.length ?? 0) > batchSize) {
return this.batchRequestBuilder.buildRequest(groups, users, syncConfig);
} else {
return this.singleRequestBuilder.buildRequest(groups, users, syncConfig);
}
removeDisabled: boolean,
overwriteExisting: boolean,
largeImport = false,
) {
return new OrganizationImportRequest({
groups: (groups ?? []).map((g) => {
return {
name: g.name,
externalId: g.externalId,
memberExternalIds: Array.from(g.userMemberExternalIds),
};
}),
users: (users ?? []).map((u) => {
return {
email: u.email,
externalId: u.externalId,
deleted: u.deleted || (removeDisabled && u.disabled),
};
}),
overwriteExisting: overwriteExisting,
largeImport: largeImport,
});
}
private async saveSyncTimes(syncConfig: SyncConfiguration, time: Date) {

View File

@@ -1,26 +0,0 @@
import { GetUniqueString } from "@/jslib/common/spec/utils";
import { GroupEntry } from "../models/groupEntry";
import { UserEntry } from "../models/userEntry";
export function userSimulator(userCount: number): UserEntry[] {
const users: UserEntry[] = [];
while (userCount > 0) {
const userEntry = new UserEntry();
userEntry.email = GetUniqueString() + "@example.com";
users.push(userEntry);
userCount--;
}
return users;
}
export function groupSimulator(groupCount: number): GroupEntry[] {
const groups: GroupEntry[] = [];
while (groupCount > 0) {
const groupEntry = new GroupEntry();
groupEntry.name = GetUniqueString();
groups.push(groupEntry);
groupCount--;
}
return groups;
}

View File

@@ -1,53 +0,0 @@
import { LdapConfiguration } from "../models/ldapConfiguration";
import { SyncConfiguration } from "../models/syncConfiguration";
/**
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration.
*/
export const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({
ssl: false,
startTls: false,
tlsCaPath: null,
sslAllowUnauthorized: false,
sslCertPath: null,
sslKeyPath: null,
sslCaPath: null,
hostname: "localhost",
port: 1389,
domain: null,
rootPath: "dc=bitwarden,dc=com",
currentUser: false,
username: "cn=admin,dc=bitwarden,dc=com",
password: "admin",
ad: false,
pagedSearch: false,
...(config ?? {}),
});
/**
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration.
*/
export const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({
users: false,
groups: false,
interval: 5,
userFilter: null,
groupFilter: null,
removeDisabled: false,
overwriteExisting: false,
largeImport: false,
// Ldap properties
groupObjectClass: "posixGroup",
userObjectClass: "person",
groupPath: null,
userPath: null,
groupNameAttribute: "cn",
userEmailAttribute: "mail",
memberAttribute: "memberUid",
useEmailPrefixSuffix: false,
emailPrefixAttribute: "sAMAccountName",
emailSuffix: null,
creationDateAttribute: "whenCreated",
revisionDateAttribute: "whenChanged",
...(config ?? {}),
});

View File

@@ -17,7 +17,12 @@
"paths": {
"tldjs": ["@/jslib/src/misc/tldjs.noop"],
"@/*": ["./*"]
},
"plugins": [
{
"transform": "typescript-transform-paths"
}
]
},
"include": ["src", "jslib", "scripts", "./*.ts"]
}