mirror of
https://github.com/bitwarden/browser
synced 2026-02-17 09:59:41 +00:00
Send searches through query parser.
This starts implementing a new search service -- renamed to filter service.
This commit is contained in:
5
libs/common/src/vault/search/.eslintrc.json
Normal file
5
libs/common/src/vault/search/.eslintrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"ignorePatterns": [
|
||||
"bitwarden-query-grammar.ts" // generated grammar file
|
||||
]
|
||||
}
|
||||
173
libs/common/src/vault/search/ast.ts
Normal file
173
libs/common/src/vault/search/ast.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
export const AstNodeTypeNames = [
|
||||
"search",
|
||||
"not",
|
||||
"parentheses",
|
||||
"and",
|
||||
"or",
|
||||
"term",
|
||||
"field term",
|
||||
"hasAttachment",
|
||||
"hasUri",
|
||||
"hasFolder",
|
||||
"hasCollection",
|
||||
"inFolder",
|
||||
"inCollection",
|
||||
"inOrg",
|
||||
"isFavorite",
|
||||
] as const;
|
||||
export type AstNodeType = (typeof AstNodeTypeNames)[number];
|
||||
export type AstNode =
|
||||
| Search
|
||||
| Not
|
||||
| Parentheses
|
||||
| And
|
||||
| Or
|
||||
| Term
|
||||
| FieldTerm
|
||||
| HasAttachment
|
||||
| HasUri
|
||||
| HasFolder
|
||||
| HasCollection
|
||||
| InFolder
|
||||
| InCollection
|
||||
| InOrg
|
||||
| IsFavorite;
|
||||
|
||||
type AstNodeBase = {
|
||||
type: AstNodeType;
|
||||
start: number;
|
||||
end: number;
|
||||
length: number;
|
||||
};
|
||||
export type Search = AstNodeBase & {
|
||||
type: "search";
|
||||
d: Or;
|
||||
};
|
||||
|
||||
export function isSearch(x: AstNode): x is Search {
|
||||
return x.type === "search";
|
||||
}
|
||||
|
||||
export type Not = AstNodeBase & {
|
||||
type: "not";
|
||||
value: Parentheses;
|
||||
};
|
||||
|
||||
export function isNot(x: AstNode): x is Not {
|
||||
return x.type === "not";
|
||||
}
|
||||
|
||||
export type Parentheses = AstNodeBase & {
|
||||
type: "parentheses";
|
||||
inner: Or;
|
||||
};
|
||||
|
||||
export function isParentheses(x: AstNode): x is Parentheses {
|
||||
return x.type === "parentheses";
|
||||
}
|
||||
|
||||
export type And = AstNodeBase & {
|
||||
type: "and";
|
||||
left: And | Parentheses;
|
||||
right: Parentheses;
|
||||
};
|
||||
|
||||
export function isAnd(x: AstNode): x is And {
|
||||
return x.type === "and";
|
||||
}
|
||||
|
||||
export type Or = AstNodeBase & {
|
||||
type: "or";
|
||||
left: Or | And;
|
||||
right: And;
|
||||
};
|
||||
|
||||
export function isOr(x: AstNode): x is Or {
|
||||
return x.type === "or";
|
||||
}
|
||||
|
||||
export type Term = AstNodeBase & {
|
||||
type: "term";
|
||||
value: string;
|
||||
};
|
||||
|
||||
export function isTerm(x: AstNode): x is Term {
|
||||
return x.type === "term";
|
||||
}
|
||||
|
||||
export type FieldTerm = AstNodeBase & {
|
||||
type: "field term";
|
||||
field: string;
|
||||
term: string;
|
||||
};
|
||||
|
||||
export function isFieldTerm(x: AstNode): x is FieldTerm {
|
||||
return x.type === "field term";
|
||||
}
|
||||
|
||||
export type HasAttachment = AstNodeBase & {
|
||||
type: "hasAttachment";
|
||||
};
|
||||
|
||||
export function isHasAttachment(x: AstNode): x is HasAttachment {
|
||||
return x.type === "hasAttachment";
|
||||
}
|
||||
|
||||
export type HasUri = AstNodeBase & {
|
||||
type: "hasUri";
|
||||
};
|
||||
|
||||
export function isHasUri(x: AstNode): x is HasUri {
|
||||
return x.type === "hasUri";
|
||||
}
|
||||
|
||||
export type HasFolder = AstNodeBase & {
|
||||
type: "hasFolder";
|
||||
};
|
||||
|
||||
export function isHasFolder(x: AstNode): x is HasFolder {
|
||||
return x.type === "hasFolder";
|
||||
}
|
||||
|
||||
export type HasCollection = AstNodeBase & {
|
||||
type: "hasCollection";
|
||||
};
|
||||
|
||||
export function isHasCollection(x: AstNode): x is HasCollection {
|
||||
return x.type === "hasCollection";
|
||||
}
|
||||
|
||||
export type InFolder = AstNodeBase & {
|
||||
type: "inFolder";
|
||||
folder: string;
|
||||
};
|
||||
|
||||
export function isInFolder(x: AstNode): x is InFolder {
|
||||
return x.type === "inFolder";
|
||||
}
|
||||
|
||||
export type InCollection = AstNodeBase & {
|
||||
type: "inCollection";
|
||||
collection: string;
|
||||
};
|
||||
|
||||
export function isInCollection(x: AstNode): x is InCollection {
|
||||
return x.type === "inCollection";
|
||||
}
|
||||
|
||||
export type InOrg = AstNodeBase & {
|
||||
type: "inOrg";
|
||||
org: string;
|
||||
};
|
||||
|
||||
export function isInOrg(x: AstNode): x is InOrg {
|
||||
return x.type === "inOrg";
|
||||
}
|
||||
|
||||
export type IsFavorite = AstNodeBase & {
|
||||
type: "isFavorite";
|
||||
};
|
||||
|
||||
export function isIsFavorite(x: AstNode): x is IsFavorite {
|
||||
return x.type === "isFavorite";
|
||||
}
|
||||
63
libs/common/src/vault/search/bitwarden-query-grammar.ne
Normal file
63
libs/common/src/vault/search/bitwarden-query-grammar.ne
Normal file
@@ -0,0 +1,63 @@
|
||||
@preprocessor typescript
|
||||
@{%
|
||||
const moo = require("moo");
|
||||
|
||||
let lexer = moo.compile({
|
||||
// Logical operators
|
||||
NOT: 'NOT', // Right associative unary not
|
||||
AND: 'AND', // Left associative and
|
||||
OR: 'OR', // Left associative or
|
||||
WS: /[ \t]+/, // Whitespace
|
||||
lparen: '(', // Left parenthesis
|
||||
rparen: ')', // Right parenthesis
|
||||
// Special search functions
|
||||
// Note, there have been issues with reserverd words in the past, so we're using a prefix
|
||||
func_has: 'has:',
|
||||
func_in: 'in:',
|
||||
func_is: 'is:',
|
||||
// function parameter separator
|
||||
access: ':',
|
||||
// string match, includes quoted strings with escaped quotes and backslashes
|
||||
string: /(?:"(?:\\["\\]|[^\n"\\])*"|(?:\\["\\]|[^\s\(\):])+)/,
|
||||
})
|
||||
%}
|
||||
|
||||
@lexer lexer
|
||||
|
||||
search -> _ OR _ {% function(d) { return { type: 'search', d: d[1], start: d[1].start, end: d[1].end, length: d[1].length } } %}
|
||||
|
||||
PARENTHESES -> %lparen _ OR _ %rparen {% function(d) { const start = d[0].offset; const end = d[4].offset; return { type: 'parentheses', inner: d[2], d:d, start, end, length: end - start + 1 } } %}
|
||||
| TERM {% id %}
|
||||
|
||||
AND -> AND _ %AND _ PARENTHESES {% function(d) { return { type: 'and', left: d[0], right: d[4], d:d, start: d[0].start, end: d[4].end, length: d[4].end - d[0].start + 1 } } %}
|
||||
| AND _ PARENTHESES {% function(d) { return { type: 'and', left: d[0], right: d[2], d:d, start: d[0].start, end: d[2].end, length: d[2].end - d[0].start + 1 }} %}
|
||||
| PARENTHESES {% id %}
|
||||
|
||||
OR -> OR _ %OR _ AND {% function(d) { return { type: 'or', left: d[0], right: d[4], d:d, start: d[0].start, end: d[4].end, length: d[4].end - d[0].start + 1 } } %}
|
||||
| AND {% id %}
|
||||
|
||||
TERM ->
|
||||
# naked string search term, search all fields
|
||||
%string {% function(d) { const start = d[0].offset; const end = d[0].offset + d[0].value.length; return { type: 'term', value: d[0].value, d: d[0], start, end, length: d[0].value.length } } %}
|
||||
# specified field search term
|
||||
| %string %access %string {% function(d) { const start = d[0].offset; const end = d[2].offset + d[2].value.length; return { type: 'field term', field: d[0], term: d[2], d: d, start, end, length: end - start + 1 } } %}
|
||||
# only items with attachments
|
||||
| %func_has "attachment" {% function(d) { const start = d[0].offset; const length = 14; return { type: 'hasAttachment', d: d, start, end: d[0].offset + length, length } } %}
|
||||
# only items with URIs
|
||||
| %func_has "uri" {% function(d) { const start = d[0].offset; const length = 7; return { type: 'hasUri', d:d, start, end: d[0].offset + length, length } } %}
|
||||
# only items assigned to a folder
|
||||
| %func_has "folder" {% function(d) { const start = d[0].offset; const length = 10; return { type: 'hasFolder', d:d, start, end: d[0].offset + length, length } } %}
|
||||
# only items assigned to a collection
|
||||
| %func_has "collection" {% function(d) { const start = d[0].offset; const length = 14; return { type: 'hasCollection', d:d, start, end: d[0].offset + length, length } } %}
|
||||
# only items assigned to a specified folder
|
||||
| %func_in "folder" %access %string {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inFolder', folder: d[3], d:d, start, end, length: end - start } } %}
|
||||
# only items assigned to a specified collection
|
||||
| %func_in "collection" %access %string {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inCollection', collection: d[3], d:d, start, end, length: end - start + 1 } } %}
|
||||
# only items assigned to a specified organization
|
||||
| %func_in "org" %access %string {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inOrg', org: d[3], d:d, start, end, length: end - start + 1 } } %}
|
||||
# only items marked as favorites
|
||||
| %func_is "favorite" {% function(d) { const start = d[0].offset; const length = 11; return { type: 'isFavorite', d:d, start, end: d[0].offset + length, length } } %}
|
||||
# Boolean NOT operator
|
||||
| %NOT _ PARENTHESES {% function(d) { const start = d[0].offset; return { type: 'not', value: d[2], d:d, start, end: d[2].end, length: d[2].end - d[0].offset + 1 } } %}
|
||||
|
||||
_ -> %WS:* {% function(d) {return null } %}
|
||||
297
libs/common/src/vault/search/bitwarden-query-grammar.ts
Normal file
297
libs/common/src/vault/search/bitwarden-query-grammar.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
// Generated automatically by nearley, version 2.20.1
|
||||
// http://github.com/Hardmath123/nearley
|
||||
// Bypasses TS6133. Allow declared but unused functions.
|
||||
// @ts-ignore
|
||||
function id(d: any[]): any {
|
||||
return d[0];
|
||||
}
|
||||
declare var lparen: any;
|
||||
declare var rparen: any;
|
||||
declare var AND: any;
|
||||
declare var OR: any;
|
||||
declare var string: any;
|
||||
declare var access: any;
|
||||
declare var func_has: any;
|
||||
declare var func_in: any;
|
||||
declare var func_is: any;
|
||||
declare var NOT: any;
|
||||
declare var WS: any;
|
||||
|
||||
const moo = require("moo");
|
||||
|
||||
let lexer = moo.compile({
|
||||
// Logical operators
|
||||
NOT: "NOT", // Right associative unary not
|
||||
AND: "AND", // Left associative and
|
||||
OR: "OR", // Left associative or
|
||||
WS: /[ \t]+/, // Whitespace
|
||||
lparen: "(", // Left parenthesis
|
||||
rparen: ")", // Right parenthesis
|
||||
// Special search functions
|
||||
// Note, there have been issues with reserverd words in the past, so we're using a prefix
|
||||
func_has: "has:",
|
||||
func_in: "in:",
|
||||
func_is: "is:",
|
||||
// function parameter separator
|
||||
access: ":",
|
||||
// string match, includes quoted strings with escaped quotes and backslashes
|
||||
string: /(?:"(?:\\["\\]|[^\n"\\])*"|(?:\\["\\]|[^\s\(\):])+)/,
|
||||
});
|
||||
|
||||
interface NearleyToken {
|
||||
value: any;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
interface NearleyLexer {
|
||||
reset: (chunk: string, info: any) => void;
|
||||
next: () => NearleyToken | undefined;
|
||||
save: () => any;
|
||||
formatError: (token: never) => string;
|
||||
has: (tokenType: string) => boolean;
|
||||
}
|
||||
|
||||
interface NearleyRule {
|
||||
name: string;
|
||||
symbols: NearleySymbol[];
|
||||
postprocess?: (d: any[], loc?: number, reject?: {}) => any;
|
||||
}
|
||||
|
||||
type NearleySymbol = string | { literal: any } | { test: (token: any) => boolean };
|
||||
|
||||
interface Grammar {
|
||||
Lexer: NearleyLexer | undefined;
|
||||
ParserRules: NearleyRule[];
|
||||
ParserStart: string;
|
||||
}
|
||||
|
||||
const grammar: Grammar = {
|
||||
Lexer: lexer,
|
||||
ParserRules: [
|
||||
{
|
||||
name: "search",
|
||||
symbols: ["_", "OR", "_"],
|
||||
postprocess: function (d) {
|
||||
return { type: "search", d: d[1], start: d[1].start, end: d[1].end, length: d[1].length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "PARENTHESES",
|
||||
symbols: [
|
||||
lexer.has("lparen") ? { type: "lparen" } : lparen,
|
||||
"_",
|
||||
"OR",
|
||||
"_",
|
||||
lexer.has("rparen") ? { type: "rparen" } : rparen,
|
||||
],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const end = d[4].offset;
|
||||
return { type: "parentheses", inner: d[2], d: d, start, end, length: end - start + 1 };
|
||||
},
|
||||
},
|
||||
{ name: "PARENTHESES", symbols: ["TERM"], postprocess: id },
|
||||
{
|
||||
name: "AND",
|
||||
symbols: ["AND", "_", lexer.has("AND") ? { type: "AND" } : AND, "_", "PARENTHESES"],
|
||||
postprocess: function (d) {
|
||||
return {
|
||||
type: "and",
|
||||
left: d[0],
|
||||
right: d[4],
|
||||
d: d,
|
||||
start: d[0].start,
|
||||
end: d[4].end,
|
||||
length: d[4].end - d[0].start + 1,
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "AND",
|
||||
symbols: ["AND", "_", "PARENTHESES"],
|
||||
postprocess: function (d) {
|
||||
return {
|
||||
type: "and",
|
||||
left: d[0],
|
||||
right: d[2],
|
||||
d: d,
|
||||
start: d[0].start,
|
||||
end: d[2].end,
|
||||
length: d[2].end - d[0].start + 1,
|
||||
};
|
||||
},
|
||||
},
|
||||
{ name: "AND", symbols: ["PARENTHESES"], postprocess: id },
|
||||
{
|
||||
name: "OR",
|
||||
symbols: ["OR", "_", lexer.has("OR") ? { type: "OR" } : OR, "_", "AND"],
|
||||
postprocess: function (d) {
|
||||
return {
|
||||
type: "or",
|
||||
left: d[0],
|
||||
right: d[4],
|
||||
d: d,
|
||||
start: d[0].start,
|
||||
end: d[4].end,
|
||||
length: d[4].end - d[0].start + 1,
|
||||
};
|
||||
},
|
||||
},
|
||||
{ name: "OR", symbols: ["AND"], postprocess: id },
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("string") ? { type: "string" } : string],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const end = d[0].offset + d[0].value.length;
|
||||
return { type: "term", value: d[0].value, d: d[0], start, end, length: d[0].value.length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [
|
||||
lexer.has("string") ? { type: "string" } : string,
|
||||
lexer.has("access") ? { type: "access" } : access,
|
||||
lexer.has("string") ? { type: "string" } : string,
|
||||
],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const end = d[2].offset + d[2].value.length;
|
||||
return {
|
||||
type: "field term",
|
||||
field: d[0],
|
||||
term: d[2],
|
||||
d: d,
|
||||
start,
|
||||
end,
|
||||
length: end - start + 1,
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "attachment" }],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const length = 14;
|
||||
return { type: "hasAttachment", d: d, start, end: d[0].offset + length, length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "uri" }],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const length = 7;
|
||||
return { type: "hasUri", d: d, start, end: d[0].offset + length, length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "folder" }],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const length = 10;
|
||||
return { type: "hasFolder", d: d, start, end: d[0].offset + length, length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "collection" }],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const length = 14;
|
||||
return { type: "hasCollection", d: d, start, end: d[0].offset + length, length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [
|
||||
lexer.has("func_in") ? { type: "func_in" } : func_in,
|
||||
{ literal: "folder" },
|
||||
lexer.has("access") ? { type: "access" } : access,
|
||||
lexer.has("string") ? { type: "string" } : string,
|
||||
],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const end = d[3].offset + d[3].value.length;
|
||||
return { type: "inFolder", folder: d[3], d: d, start, end, length: end - start };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [
|
||||
lexer.has("func_in") ? { type: "func_in" } : func_in,
|
||||
{ literal: "collection" },
|
||||
lexer.has("access") ? { type: "access" } : access,
|
||||
lexer.has("string") ? { type: "string" } : string,
|
||||
],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const end = d[3].offset + d[3].value.length;
|
||||
return {
|
||||
type: "inCollection",
|
||||
collection: d[3],
|
||||
d: d,
|
||||
start,
|
||||
end,
|
||||
length: end - start + 1,
|
||||
};
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [
|
||||
lexer.has("func_in") ? { type: "func_in" } : func_in,
|
||||
{ literal: "org" },
|
||||
lexer.has("access") ? { type: "access" } : access,
|
||||
lexer.has("string") ? { type: "string" } : string,
|
||||
],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const end = d[3].offset + d[3].value.length;
|
||||
return { type: "inOrg", org: d[3], d: d, start, end, length: end - start + 1 };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("func_is") ? { type: "func_is" } : func_is, { literal: "favorite" }],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
const length = 11;
|
||||
return { type: "isFavorite", d: d, start, end: d[0].offset + length, length };
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TERM",
|
||||
symbols: [lexer.has("NOT") ? { type: "NOT" } : NOT, "_", "PARENTHESES"],
|
||||
postprocess: function (d) {
|
||||
const start = d[0].offset;
|
||||
return {
|
||||
type: "not",
|
||||
value: d[2],
|
||||
d: d,
|
||||
start,
|
||||
end: d[2].end,
|
||||
length: d[2].end - d[0].offset + 1,
|
||||
};
|
||||
},
|
||||
},
|
||||
{ name: "_$ebnf$1", symbols: [] },
|
||||
{
|
||||
name: "_$ebnf$1",
|
||||
symbols: ["_$ebnf$1", lexer.has("WS") ? { type: "WS" } : WS],
|
||||
postprocess: (d) => d[0].concat([d[1]]),
|
||||
},
|
||||
{
|
||||
name: "_",
|
||||
symbols: ["_$ebnf$1"],
|
||||
postprocess: function (d) {
|
||||
return null;
|
||||
},
|
||||
},
|
||||
],
|
||||
ParserStart: "search",
|
||||
};
|
||||
|
||||
export default grammar;
|
||||
207
libs/common/src/vault/search/filter.service.ts
Normal file
207
libs/common/src/vault/search/filter.service.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import * as lunr from "lunr";
|
||||
import {
|
||||
combineLatest,
|
||||
combineLatestWith,
|
||||
map,
|
||||
Observable,
|
||||
of,
|
||||
OperatorFunction,
|
||||
pipe,
|
||||
switchMap,
|
||||
} from "rxjs";
|
||||
|
||||
import { CollectionService } from "../../../../admin-console/src/common/collections/abstractions";
|
||||
import { OrganizationService } from "../../admin-console/abstractions/organization/organization.service.abstraction";
|
||||
import { UriMatchStrategy } from "../../models/domain/domain-service";
|
||||
import { CipherService } from "../abstractions/cipher.service";
|
||||
import { FolderService } from "../abstractions/folder/folder.service.abstraction";
|
||||
import { CipherType, FieldType } from "../enums";
|
||||
import { CipherView } from "../models/view/cipher.view";
|
||||
|
||||
import { parseQuery } from "./parse";
|
||||
import { ProcessInstructions, SearchContext } from "./query.types";
|
||||
|
||||
export class FilterService {
|
||||
private static registeredPipeline = false;
|
||||
searchContext$: Observable<SearchContext>;
|
||||
constructor(
|
||||
cipherService: CipherService,
|
||||
organizationService: OrganizationService,
|
||||
folderService: FolderService,
|
||||
collectionService: CollectionService,
|
||||
) {
|
||||
const viewsAndIndex$ = cipherService.ciphers$.pipe(
|
||||
switchMap((_) => cipherService.getAllDecrypted()),
|
||||
switchMap((views) => of([views, this.buildIndex(views)] as const)),
|
||||
);
|
||||
|
||||
this.searchContext$ = combineLatest([
|
||||
viewsAndIndex$,
|
||||
organizationService.organizations$,
|
||||
folderService.folderViews$,
|
||||
collectionService.decryptedCollections$,
|
||||
]).pipe(
|
||||
switchMap(([[ciphers, index], organizations, folders, collections]) => {
|
||||
return of({
|
||||
ciphers,
|
||||
index,
|
||||
organizations,
|
||||
folders,
|
||||
collections,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
// Currently have to ensure this is only done a single time. Lunr allows you to register a function
|
||||
// multiple times but they will add a warning message to the console. The way they do that breaks when ran on a service worker.
|
||||
if (!FilterService.registeredPipeline) {
|
||||
FilterService.registeredPipeline = true;
|
||||
//register lunr pipeline function
|
||||
lunr.Pipeline.registerFunction(this.normalizeAccentsPipelineFunction, "normalizeAccents");
|
||||
}
|
||||
}
|
||||
|
||||
parse = map((query: string) => parseQuery(query));
|
||||
|
||||
filter(): OperatorFunction<ProcessInstructions, CipherView[]> {
|
||||
return pipe(
|
||||
combineLatestWith(this.searchContext$),
|
||||
map(([instructions, context]) => {
|
||||
return instructions.filter(context).ciphers;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
private buildIndex(ciphers: CipherView[]) {
|
||||
const builder = new lunr.Builder();
|
||||
builder.pipeline.add(this.normalizeAccentsPipelineFunction);
|
||||
builder.ref("id");
|
||||
builder.field("shortid", { boost: 100, extractor: (c: CipherView) => c.id.substring(0, 8) });
|
||||
builder.field("name", { boost: 10 });
|
||||
builder.field("subtitle", {
|
||||
boost: 5,
|
||||
extractor: (c: CipherView) => {
|
||||
if (c.subTitle != null && c.type === CipherType.Card) {
|
||||
return c.subTitle.replace(/\*/g, "");
|
||||
}
|
||||
return c.subTitle;
|
||||
},
|
||||
});
|
||||
builder.field("notes");
|
||||
builder.field("login.username", {
|
||||
extractor: (c: CipherView) => (c.type === CipherType.Login ? c.login?.username : null),
|
||||
});
|
||||
builder.field("login.uris", {
|
||||
boost: 2,
|
||||
extractor: (c: CipherView) => this.uriExtractor(c),
|
||||
});
|
||||
builder.field("fields", { extractor: (c: CipherView) => this.fieldExtractor(c, false) });
|
||||
builder.field("fields_joined", { extractor: (c: CipherView) => this.fieldExtractor(c, true) });
|
||||
builder.field("attachments", {
|
||||
extractor: (c: CipherView) => this.attachmentExtractor(c, false),
|
||||
});
|
||||
builder.field("attachments_joined", {
|
||||
extractor: (c: CipherView) => this.attachmentExtractor(c, true),
|
||||
});
|
||||
builder.field("organizationid", { extractor: (c: CipherView) => c.organizationId });
|
||||
return lunr(function () {
|
||||
this.ref("id");
|
||||
this.field("name");
|
||||
this.field("notes");
|
||||
this.field("login.username");
|
||||
this.field("login.uris");
|
||||
this.field("login.password");
|
||||
this.field("login.totp");
|
||||
this.field("login.passwordRevisionDate");
|
||||
this.field("login.passwordHistory");
|
||||
this.field("login.passwordHistory.password");
|
||||
});
|
||||
}
|
||||
|
||||
private normalizeAccentsPipelineFunction(token: lunr.Token): any {
|
||||
const searchableFields = ["name", "login.username", "subtitle", "notes"];
|
||||
const fields = (token as any).metadata["fields"];
|
||||
const checkFields = fields.every((i: any) => searchableFields.includes(i));
|
||||
|
||||
if (checkFields) {
|
||||
return FilterService.normalizeSearchQuery(token.toString());
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
private fieldExtractor(c: CipherView, joined: boolean) {
|
||||
if (!c.hasFields) {
|
||||
return null;
|
||||
}
|
||||
let fields: string[] = [];
|
||||
c.fields.forEach((f) => {
|
||||
if (f.name != null) {
|
||||
fields.push(f.name);
|
||||
}
|
||||
if (f.type === FieldType.Text && f.value != null) {
|
||||
fields.push(f.value);
|
||||
}
|
||||
});
|
||||
fields = fields.filter((f) => f.trim() !== "");
|
||||
if (fields.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return joined ? fields.join(" ") : fields;
|
||||
}
|
||||
|
||||
private attachmentExtractor(c: CipherView, joined: boolean) {
|
||||
if (!c.hasAttachments) {
|
||||
return null;
|
||||
}
|
||||
let attachments: string[] = [];
|
||||
c.attachments.forEach((a) => {
|
||||
if (a != null && a.fileName != null) {
|
||||
if (joined && a.fileName.indexOf(".") > -1) {
|
||||
attachments.push(a.fileName.substr(0, a.fileName.lastIndexOf(".")));
|
||||
} else {
|
||||
attachments.push(a.fileName);
|
||||
}
|
||||
}
|
||||
});
|
||||
attachments = attachments.filter((f) => f.trim() !== "");
|
||||
if (attachments.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return joined ? attachments.join(" ") : attachments;
|
||||
}
|
||||
|
||||
private uriExtractor(c: CipherView) {
|
||||
if (c.type !== CipherType.Login || c.login == null || !c.login.hasUris) {
|
||||
return null;
|
||||
}
|
||||
const uris: string[] = [];
|
||||
c.login.uris.forEach((u) => {
|
||||
if (u.uri == null || u.uri === "") {
|
||||
return;
|
||||
}
|
||||
if (u.hostname != null) {
|
||||
uris.push(u.hostname);
|
||||
return;
|
||||
}
|
||||
let uri = u.uri;
|
||||
if (u.match !== UriMatchStrategy.RegularExpression) {
|
||||
const protocolIndex = uri.indexOf("://");
|
||||
if (protocolIndex > -1) {
|
||||
uri = uri.substr(protocolIndex + 3);
|
||||
}
|
||||
const queryIndex = uri.search(/\?|&|#/);
|
||||
if (queryIndex > -1) {
|
||||
uri = uri.substring(0, queryIndex);
|
||||
}
|
||||
}
|
||||
uris.push(uri);
|
||||
});
|
||||
return uris.length > 0 ? uris : null;
|
||||
}
|
||||
|
||||
// Remove accents/diacritics characters from text. This regex is equivalent to the Diacritic unicode property escape, i.e. it will match all diacritic characters.
|
||||
private static normalizeSearchQuery(query: string): string {
|
||||
return query?.normalize("NFD").replace(/[\u0300-\u036f]/g, "");
|
||||
}
|
||||
}
|
||||
276
libs/common/src/vault/search/parse.ts
Normal file
276
libs/common/src/vault/search/parse.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
import { Parser, Grammar } from "nearley";
|
||||
|
||||
import {
|
||||
AstNode,
|
||||
isAnd,
|
||||
isFieldTerm,
|
||||
isHasAttachment,
|
||||
isHasFolder,
|
||||
isHasUri,
|
||||
isInCollection,
|
||||
isInFolder,
|
||||
isInOrg,
|
||||
isIsFavorite,
|
||||
isNot,
|
||||
isOr,
|
||||
isParentheses,
|
||||
isSearch,
|
||||
isTerm,
|
||||
} from "./ast";
|
||||
import grammar from "./bitwarden-query-grammar";
|
||||
import { ProcessInstructions } from "./query.types";
|
||||
|
||||
export function parseQuery(query: string): ProcessInstructions {
|
||||
const parser = new Parser(Grammar.fromCompiled(grammar));
|
||||
parser.feed(query);
|
||||
if (!parser.results) {
|
||||
// TODO: Better error handling
|
||||
// there should be some invalid token information
|
||||
throw new Error("Invalid search query");
|
||||
}
|
||||
|
||||
const result = parser.results[0] as AstNode;
|
||||
|
||||
return handleNode(result);
|
||||
}
|
||||
|
||||
function handleNode(node: AstNode): ProcessInstructions {
|
||||
if (isSearch(node)) {
|
||||
return handleNode(node.d);
|
||||
} else if (isOr(node)) {
|
||||
const left = handleNode(node.left);
|
||||
const right = handleNode(node.right);
|
||||
return {
|
||||
filter: (context) => {
|
||||
const leftFilteredContext = left.filter(context);
|
||||
const rightFilteredContext = right.filter(context);
|
||||
return {
|
||||
...context,
|
||||
ciphers: leftFilteredContext.ciphers.concat(rightFilteredContext.ciphers),
|
||||
};
|
||||
},
|
||||
sections: left.sections.concat(right.sections).concat([
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
]),
|
||||
};
|
||||
} else if (isNot(node)) {
|
||||
const negate = handleNode(node.value);
|
||||
return {
|
||||
filter: (context) => {
|
||||
const filteredContext = negate.filter(context);
|
||||
return {
|
||||
...context,
|
||||
ciphers: context.ciphers.filter((cipher) => !filteredContext.ciphers.includes(cipher)),
|
||||
};
|
||||
},
|
||||
sections: negate.sections.concat([
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
]),
|
||||
};
|
||||
} else if (isAnd(node)) {
|
||||
const left = handleNode(node.left);
|
||||
const right = handleNode(node.right);
|
||||
return {
|
||||
filter: (context) => {
|
||||
const leftFilteredContext = left.filter(context);
|
||||
return right.filter(leftFilteredContext);
|
||||
},
|
||||
sections: left.sections.concat(right.sections).concat([
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
]),
|
||||
};
|
||||
} else if (isParentheses(node)) {
|
||||
const inner = handleNode(node.inner);
|
||||
return {
|
||||
filter: inner.filter,
|
||||
sections: inner.sections.concat([
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
]),
|
||||
};
|
||||
} else if (isTerm(node)) {
|
||||
return {
|
||||
filter: (context) => {
|
||||
const filteredCipherIds = context.index.search(node.value).map((r) => r.ref);
|
||||
return {
|
||||
...context,
|
||||
ciphers: context.ciphers.filter((cipher) => filteredCipherIds.includes(cipher.id)),
|
||||
};
|
||||
},
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isFieldTerm(node)) {
|
||||
return {
|
||||
filter: (context) => {
|
||||
const filteredCipherIds = context.index
|
||||
.search(`${node.field}:${node.term}`)
|
||||
.map((r) => r.ref);
|
||||
return {
|
||||
...context,
|
||||
ciphers: context.ciphers.filter((cipher) => filteredCipherIds.includes(cipher.id)),
|
||||
};
|
||||
},
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isHasAttachment(node)) {
|
||||
return {
|
||||
filter: (context) => ({
|
||||
...context,
|
||||
ciphers: context.ciphers.filter(
|
||||
(cipher) => !!cipher.attachments && cipher.attachments.length > 0,
|
||||
),
|
||||
}),
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isHasUri(node)) {
|
||||
return {
|
||||
filter: (context) => ({
|
||||
...context,
|
||||
ciphers: context.ciphers.filter(
|
||||
(cipher) => !!cipher?.login?.uris && cipher.login.uris.length > 0,
|
||||
),
|
||||
}),
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isHasFolder(node)) {
|
||||
return {
|
||||
filter: (context) => ({
|
||||
...context,
|
||||
ciphers: context.ciphers.filter((cipher) => !!cipher.folderId),
|
||||
}),
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isInFolder(node)) {
|
||||
// TODO: There is currently no folder name information in a cipher view
|
||||
return {
|
||||
filter: (context) => {
|
||||
const folderId = context.folders.find((folder) => folder.name === node.folder)?.id;
|
||||
return {
|
||||
...context,
|
||||
ciphers:
|
||||
folderId == null
|
||||
? // Folder not found, no matches
|
||||
// TODO: should this be an error?
|
||||
[]
|
||||
: context.ciphers.filter((cipher) => cipher.folderId === folderId),
|
||||
};
|
||||
},
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isInCollection(node)) {
|
||||
// TODO: There is currently no collection name information in a cipher view
|
||||
return {
|
||||
filter: (context) => {
|
||||
const collectionId = context.collections.find(
|
||||
(collection) => collection.name === node.collection,
|
||||
)?.id;
|
||||
return {
|
||||
...context,
|
||||
ciphers:
|
||||
collectionId == null
|
||||
? // Collection not found, no matches
|
||||
// TODO: should this be an error?
|
||||
[]
|
||||
: context.ciphers.filter((cipher) => cipher.collectionIds.includes(collectionId)),
|
||||
};
|
||||
},
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isInOrg(node)) {
|
||||
// TODO: There is currently no organization name information in a cipher view
|
||||
return {
|
||||
filter: (context) => {
|
||||
const organizationId = context.organizations.find((org) => org.name === node.org)?.id;
|
||||
return {
|
||||
...context,
|
||||
ciphers:
|
||||
organizationId == null
|
||||
? // Organization not found, no matches
|
||||
// TODO: This should be an error
|
||||
[]
|
||||
: context.ciphers.filter((cipher) => cipher.organizationId === organizationId),
|
||||
};
|
||||
},
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else if (isIsFavorite(node)) {
|
||||
return {
|
||||
filter: (context) => ({
|
||||
...context,
|
||||
ciphers: context.ciphers.filter((cipher) => cipher.favorite),
|
||||
}),
|
||||
sections: [
|
||||
{
|
||||
start: node.start,
|
||||
end: node.end,
|
||||
type: node.type,
|
||||
},
|
||||
],
|
||||
};
|
||||
} else {
|
||||
throw new Error("Invalid node\n" + JSON.stringify(node, null, 2));
|
||||
}
|
||||
}
|
||||
22
libs/common/src/vault/search/query.types.ts
Normal file
22
libs/common/src/vault/search/query.types.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import lunr from "lunr";
|
||||
|
||||
import { CollectionView } from "@bitwarden/admin-console/common";
|
||||
|
||||
import { Organization } from "../../admin-console/models/domain/organization";
|
||||
import { CipherView } from "../models/view/cipher.view";
|
||||
import { FolderView } from "../models/view/folder.view";
|
||||
|
||||
import { AstNodeType } from "./ast";
|
||||
|
||||
export type ProcessInstructions = {
|
||||
filter: (context: SearchContext) => SearchContext;
|
||||
sections: { start: number; end: number; type: AstNodeType }[];
|
||||
};
|
||||
|
||||
export type SearchContext = {
|
||||
ciphers: CipherView[];
|
||||
folders: FolderView[];
|
||||
collections: CollectionView[];
|
||||
organizations: Organization[];
|
||||
index: lunr.Index;
|
||||
};
|
||||
Reference in New Issue
Block a user