1
0
mirror of https://github.com/bitwarden/browser synced 2026-02-10 05:30:01 +00:00

revert grammar updates

we don't want the contains: operator, and would rather default to wildcards on either side

adhere to new single eslint config file.
This commit is contained in:
Matt Gibson
2025-03-11 08:08:52 -07:00
parent c3c1383af6
commit a137c99797
4 changed files with 56 additions and 83 deletions

View File

@@ -362,6 +362,7 @@ export default tseslint.config(
"libs/components/tailwind.config.js",
"scripts/*.js",
"**/*grammar.ts",
],
},
);

View File

@@ -1,5 +0,0 @@
{
"ignorePatterns": [
"bitwarden-query-grammar.ts" // generated grammar file
]
}

View File

@@ -17,7 +17,6 @@ let lexer = moo.compile({
func_is: 'is:',
// function parameter separator
access: ':',
contains_string: /contains:(?:"(?:\\["\\]|[^\n"\\])*"|(?:\\["\\]|[^\s\(\):])+)/,
// string match, includes quoted strings with escaped quotes and backslashes
string: /(?:"(?:\\["\\]|[^\n"\\])*"|(?:\\["\\]|[^\s\(\):])+)/,
})
@@ -25,44 +24,40 @@ let lexer = moo.compile({
@lexer lexer
search -> _ OR _ {% function(d) { return { type: 'search', d: d[1], start: d[1].start, end: d[1].end, length: d[1].length } } %}
search -> _ OR _ {% function(d) { return { type: 'search', d: d, contents: d[1], start: d[1].start, end: d[1].end, length: d[1].length } } %}
PARENTHESES -> %lparen _ OR _ %rparen {% function(d) { const start = d[0].offset; const end = d[4].offset; return { type: 'parentheses', inner: d[2], d:d, start, end, length: end - start + 1 } } %}
| EXPRESSION {% id %}
PARENTHESES -> %lparen _ OR _ %rparen {% function(d) { const start = d[0].offset; const end = d[4].offset; return { type: 'parentheses', inner: d[2], d: d, start, end, length: end - start + 1 } } %}
| TERM {% id %}
AND -> AND _ %AND _ PARENTHESES {% function(d) { return { type: 'and', left: d[0], right: d[4], d:d, start: d[0].start, end: d[4].end, length: d[4].end - d[0].start + 1 } } %}
| AND _ PARENTHESES {% function(d) { return { type: 'and', left: d[0], right: d[2], d:d, start: d[0].start, end: d[2].end, length: d[2].end - d[0].start + 1 }} %}
AND -> AND _ %AND _ PARENTHESES {% function(d) { return { type: 'and', left: d[0], right: d[4], d: d, start: d[0].start, end: d[4].end, length: d[4].end - d[0].start + 1 } } %}
| AND _ PARENTHESES {% function(d) { return { type: 'and', left: d[0], right: d[2], d: d, start: d[0].start, end: d[2].end, length: d[2].end - d[0].start + 1 }} %}
| PARENTHESES {% id %}
OR -> OR _ %OR _ AND {% function(d) { return { type: 'or', left: d[0], right: d[4], d:d, start: d[0].start, end: d[4].end, length: d[4].end - d[0].start + 1 } } %}
OR -> OR _ %OR _ AND {% function(d) { return { type: 'or', left: d[0], right: d[4], d: d, start: d[0].start, end: d[4].end, length: d[4].end - d[0].start + 1 } } %}
| AND {% id %}
EXPRESSION -> TERM
TERM ->
# naked string search term, search all fields
%string {% function(d) { const start = d[0].offset; const end = d[0].offset + d[0].value.length; return { type: 'term', value: d[0].value, d: d[0], start, end, length: d[0].value.length } } %}
# specified field search term
| TERM %access TERM {% function(d) { const start = d[0].offset; const end = d[2].offset + d[2].value.length; return { type: 'field term', field: d[0], term: d[2], d: d, start, end, length: end - start + 1 } } %}
| %string %access %string {% function(d) { const start = d[0].offset; const end = d[2].offset + d[2].value.length; return { type: 'field term', field: d[0], term: d[2], d: d, start, end, length: end - start + 1 } } %}
# only items with attachments
| %func_has "attachment" {% function(d) { const start = d[0].offset; const length = 14; return { type: 'hasAttachment', d: d, start, end: d[0].offset + length, length } } %}
# only items with URIs
| %func_has "uri" {% function(d) { const start = d[0].offset; const length = 7; return { type: 'hasUri', d:d, start, end: d[0].offset + length, length } } %}
| %func_has "uri" {% function(d) { const start = d[0].offset; const length = 7; return { type: 'hasUri', d: d, start, end: d[0].offset + length, length } } %}
# only items assigned to a folder
| %func_has "folder" {% function(d) { const start = d[0].offset; const length = 10; return { type: 'hasFolder', d:d, start, end: d[0].offset + length, length } } %}
| %func_has "folder" {% function(d) { const start = d[0].offset; const length = 10; return { type: 'hasFolder', d: d, start, end: d[0].offset + length, length } } %}
# only items assigned to a collection
| %func_has "collection" {% function(d) { const start = d[0].offset; const length = 14; return { type: 'hasCollection', d:d, start, end: d[0].offset + length, length } } %}
| %func_has "collection" {% function(d) { const start = d[0].offset; const length = 14; return { type: 'hasCollection', d: d, start, end: d[0].offset + length, length } } %}
# only items assigned to a specified folder
| %func_in "folder" %access TERM {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inFolder', folder: d[3], d:d, start, end, length: end - start } } %}
| %func_in "folder" %access %string {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inFolder', folder: d[3], d: d, start, end, length: end - start } } %}
# only items assigned to a specified collection
| %func_in "collection" %access TERM {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inCollection', collection: d[3], d:d, start, end, length: end - start + 1 } } %}
| %func_in "collection" %access %string {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inCollection', collection: d[3], d: d, start, end, length: end - start + 1 } } %}
# only items assigned to a specified organization
| %func_in "org" %access TERM {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inOrg', org: d[3], d:d, start, end, length: end - start + 1 } } %}
| %func_in "org" %access %string {% function(d) { const start = d[0].offset; const end = d[3].offset + d[3].value.length; return { type: 'inOrg', org: d[3], d: d, start, end, length: end - start + 1 } } %}
# only items marked as favorites
| %func_is "favorite" {% function(d) { const start = d[0].offset; const length = 11; return { type: 'isFavorite', d:d, start, end: d[0].offset + length, length } } %}
| %func_is "favorite" {% function(d) { const start = d[0].offset; const length = 11; return { type: 'isFavorite', d: d, start, end: d[0].offset + length, length } } %}
# Boolean NOT operator
| %NOT _ PARENTHESES {% function(d) { const start = d[0].offset; return { type: 'not', value: d[2], d:d, start, end: d[2].end, length: d[2].end - d[0].offset + 1 } } %}
TERM ->
# naked string search term, search all fields
%string {% function(d) { const start = d[0].offset; const end = d[0].offset + d[0].value.length; return { type: 'term', value: d[0].value.replace(/^"/,"").replace(/"$/,"").replace(/\"/,'"'), d: d[0], start, end, length: d[0].value.length } } %}
| %contains_string {% function(d) { const start = d[0].offset; const end = d[0].offset + d[0].value.length; return { type: 'term', value: d[0].value.replace(/^contains:"?/,"*").replace(/"?$/,"*").replace(/\"/,'"'), d: d[0], start, end, length: d[0].value.length } } %}
| %NOT _ PARENTHESES {% function(d) { const start = d[0].offset; return { type: 'not', value: d[2], d: d, start, end: d[2].end, length: d[2].end - d[0].offset + 1 } } %}
_ -> %WS:* {% function(d) {return null } %}

View File

@@ -9,13 +9,12 @@ declare var lparen: any;
declare var rparen: any;
declare var AND: any;
declare var OR: any;
declare var string: any;
declare var access: any;
declare var func_has: any;
declare var func_in: any;
declare var func_is: any;
declare var NOT: any;
declare var string: any;
declare var contains_string: any;
declare var WS: any;
const moo = require("moo");
@@ -35,7 +34,6 @@ let lexer = moo.compile({
func_is: "is:",
// function parameter separator
access: ":",
contains_string: /contains:(?:"(?:\\["\\]|[^\n"\\])*"|(?:\\["\\]|[^\s\(\):])+)/,
// string match, includes quoted strings with escaped quotes and backslashes
string: /(?:"(?:\\["\\]|[^\n"\\])*"|(?:\\["\\]|[^\s\(\):])+)/,
});
@@ -74,7 +72,14 @@ const grammar: Grammar = {
name: "search",
symbols: ["_", "OR", "_"],
postprocess: function (d) {
return { type: "search", d: d[1], start: d[1].start, end: d[1].end, length: d[1].length };
return {
type: "search",
d: d,
contents: d[1],
start: d[1].start,
end: d[1].end,
length: d[1].length,
};
},
},
{
@@ -92,7 +97,7 @@ const grammar: Grammar = {
return { type: "parentheses", inner: d[2], d: d, start, end, length: end - start + 1 };
},
},
{ name: "PARENTHESES", symbols: ["EXPRESSION"], postprocess: id },
{ name: "PARENTHESES", symbols: ["TERM"], postprocess: id },
{
name: "AND",
symbols: ["AND", "_", lexer.has("AND") ? { type: "AND" } : AND, "_", "PARENTHESES"],
@@ -140,10 +145,22 @@ const grammar: Grammar = {
},
},
{ name: "OR", symbols: ["AND"], postprocess: id },
{ name: "EXPRESSION", symbols: ["TERM"] },
{
name: "EXPRESSION",
symbols: ["TERM", lexer.has("access") ? { type: "access" } : access, "TERM"],
name: "TERM",
symbols: [lexer.has("string") ? { type: "string" } : string],
postprocess: function (d) {
const start = d[0].offset;
const end = d[0].offset + d[0].value.length;
return { type: "term", value: d[0].value, d: d[0], start, end, length: d[0].value.length };
},
},
{
name: "TERM",
symbols: [
lexer.has("string") ? { type: "string" } : string,
lexer.has("access") ? { type: "access" } : access,
lexer.has("string") ? { type: "string" } : string,
],
postprocess: function (d) {
const start = d[0].offset;
const end = d[2].offset + d[2].value.length;
@@ -159,7 +176,7 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "attachment" }],
postprocess: function (d) {
const start = d[0].offset;
@@ -168,7 +185,7 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "uri" }],
postprocess: function (d) {
const start = d[0].offset;
@@ -177,7 +194,7 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "folder" }],
postprocess: function (d) {
const start = d[0].offset;
@@ -186,7 +203,7 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [lexer.has("func_has") ? { type: "func_has" } : func_has, { literal: "collection" }],
postprocess: function (d) {
const start = d[0].offset;
@@ -195,12 +212,12 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [
lexer.has("func_in") ? { type: "func_in" } : func_in,
{ literal: "folder" },
lexer.has("access") ? { type: "access" } : access,
"TERM",
lexer.has("string") ? { type: "string" } : string,
],
postprocess: function (d) {
const start = d[0].offset;
@@ -209,12 +226,12 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [
lexer.has("func_in") ? { type: "func_in" } : func_in,
{ literal: "collection" },
lexer.has("access") ? { type: "access" } : access,
"TERM",
lexer.has("string") ? { type: "string" } : string,
],
postprocess: function (d) {
const start = d[0].offset;
@@ -230,12 +247,12 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [
lexer.has("func_in") ? { type: "func_in" } : func_in,
{ literal: "org" },
lexer.has("access") ? { type: "access" } : access,
"TERM",
lexer.has("string") ? { type: "string" } : string,
],
postprocess: function (d) {
const start = d[0].offset;
@@ -244,7 +261,7 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [lexer.has("func_is") ? { type: "func_is" } : func_is, { literal: "favorite" }],
postprocess: function (d) {
const start = d[0].offset;
@@ -253,7 +270,7 @@ const grammar: Grammar = {
},
},
{
name: "EXPRESSION",
name: "TERM",
symbols: [lexer.has("NOT") ? { type: "NOT" } : NOT, "_", "PARENTHESES"],
postprocess: function (d) {
const start = d[0].offset;
@@ -267,41 +284,6 @@ const grammar: Grammar = {
};
},
},
{
name: "TERM",
symbols: [lexer.has("string") ? { type: "string" } : string],
postprocess: function (d) {
const start = d[0].offset;
const end = d[0].offset + d[0].value.length;
return {
type: "term",
value: d[0].value.replace(/^"/, "").replace(/"$/, "").replace(/\"/, '"'),
d: d[0],
start,
end,
length: d[0].value.length,
};
},
},
{
name: "TERM",
symbols: [lexer.has("contains_string") ? { type: "contains_string" } : contains_string],
postprocess: function (d) {
const start = d[0].offset;
const end = d[0].offset + d[0].value.length;
return {
type: "term",
value: d[0].value
.replace(/^contains:"?/, "*")
.replace(/"?$/, "*")
.replace(/\"/, '"'),
d: d[0],
start,
end,
length: d[0].value.length,
};
},
},
{ name: "_$ebnf$1", symbols: [] },
{
name: "_$ebnf$1",