From 200c75c9258fbcbd00522e7d1601f459d3325908 Mon Sep 17 00:00:00 2001 From: liuyi Date: Mon, 16 Oct 2023 17:54:25 +0800 Subject: [PATCH] test: accurate description --- test/parser/hive/suggestion/tokenSuggestion.test.ts | 2 +- test/parser/spark/suggestion/tokenSuggestion.test.ts | 2 +- test/parser/spark/syntax/addStatement.test.ts | 2 +- test/parser/spark/syntax/alert.test.ts | 2 +- test/parser/spark/syntax/analyzeTableStatement.test.ts | 2 +- test/parser/spark/syntax/cacheStatement.test.ts | 2 +- test/parser/spark/syntax/create.test.ts | 2 +- test/parser/spark/syntax/describeStatement.test.ts | 2 +- test/parser/spark/syntax/drop.test.ts | 2 +- test/parser/spark/syntax/kwMultipleValues.test.ts | 2 +- test/parser/spark/syntax/listStatement.test.ts | 2 +- test/parser/spark/syntax/loadStatement.test.ts | 2 +- test/parser/spark/syntax/refreshStatement.test.ts | 2 +- test/parser/spark/syntax/resetStatement.test.ts | 2 +- test/parser/spark/syntax/selectStatement.test.ts | 2 +- test/parser/spark/syntax/setStatement.test.ts | 2 +- test/parser/spark/syntax/showStatement.test.ts | 2 +- test/parser/spark/syntax/table.test.ts | 2 +- test/parser/spark/syntax/useDatabase.test.ts | 2 +- 19 files changed, 19 insertions(+), 19 deletions(-) diff --git a/test/parser/hive/suggestion/tokenSuggestion.test.ts b/test/parser/hive/suggestion/tokenSuggestion.test.ts index 66110765..7b7782d5 100644 --- a/test/parser/hive/suggestion/tokenSuggestion.test.ts +++ b/test/parser/hive/suggestion/tokenSuggestion.test.ts @@ -5,7 +5,7 @@ import HiveSQL from '../../../../src/parser/hive'; const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); -describe('Hive SQL Syntax Suggestion', () => { +describe('Hive SQL Token Suggestion', () => { const parser = new HiveSQL(); test('After ALTER', () => { diff --git a/test/parser/spark/suggestion/tokenSuggestion.test.ts b/test/parser/spark/suggestion/tokenSuggestion.test.ts index 0b6afed8..d18709f5 100644 --- a/test/parser/spark/suggestion/tokenSuggestion.test.ts +++ b/test/parser/spark/suggestion/tokenSuggestion.test.ts @@ -5,7 +5,7 @@ import SparkSQL from '../../../../src/parser/spark'; const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); -describe('Spark SQL Syntax Suggestion', () => { +describe('Spark SQL Token Suggestion', () => { const parser = new SparkSQL(); test('After ALTER', () => { diff --git a/test/parser/spark/syntax/addStatement.test.ts b/test/parser/spark/syntax/addStatement.test.ts index a7ed31a3..5209fdc9 100644 --- a/test/parser/spark/syntax/addStatement.test.ts +++ b/test/parser/spark/syntax/addStatement.test.ts @@ -7,7 +7,7 @@ const features = { add: readSQL(__dirname, 'add.sql'), }; -describe('Spark add Syntax Tests', () => { +describe('Spark Add Syntax Tests', () => { features.add.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/alert.test.ts b/test/parser/spark/syntax/alert.test.ts index c0c7a29e..2799ad43 100644 --- a/test/parser/spark/syntax/alert.test.ts +++ b/test/parser/spark/syntax/alert.test.ts @@ -9,7 +9,7 @@ const features = { alertView: readSQL(__dirname, 'alertView.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL Alert Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => { diff --git a/test/parser/spark/syntax/analyzeTableStatement.test.ts b/test/parser/spark/syntax/analyzeTableStatement.test.ts index 88d8ce58..8d20759a 100644 --- a/test/parser/spark/syntax/analyzeTableStatement.test.ts +++ b/test/parser/spark/syntax/analyzeTableStatement.test.ts @@ -7,7 +7,7 @@ const features = { analyzeTable: readSQL(__dirname, 'analyzeTable.sql'), }; -describe('Spark analyzeTable Syntax Tests', () => { +describe('Spark Analyze Table Syntax Tests', () => { features.analyzeTable.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/cacheStatement.test.ts b/test/parser/spark/syntax/cacheStatement.test.ts index 989d05d3..a5e88061 100644 --- a/test/parser/spark/syntax/cacheStatement.test.ts +++ b/test/parser/spark/syntax/cacheStatement.test.ts @@ -7,7 +7,7 @@ const features = { cache: readSQL(__dirname, 'cache.sql'), }; -describe('Spark cache Syntax Tests', () => { +describe('Spark Cache Syntax Tests', () => { features.cache.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/create.test.ts b/test/parser/spark/syntax/create.test.ts index da58b5d4..7cf32515 100644 --- a/test/parser/spark/syntax/create.test.ts +++ b/test/parser/spark/syntax/create.test.ts @@ -9,7 +9,7 @@ const features = { createView: readSQL(__dirname, 'createView.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL Create Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => { diff --git a/test/parser/spark/syntax/describeStatement.test.ts b/test/parser/spark/syntax/describeStatement.test.ts index db050d4a..85bd5341 100644 --- a/test/parser/spark/syntax/describeStatement.test.ts +++ b/test/parser/spark/syntax/describeStatement.test.ts @@ -7,7 +7,7 @@ const features = { describe: readSQL(__dirname, 'describe.sql'), }; -describe('Spark describe Syntax Tests', () => { +describe('Spark Describe Syntax Tests', () => { features.describe.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/drop.test.ts b/test/parser/spark/syntax/drop.test.ts index 399b96d1..a0209dcf 100644 --- a/test/parser/spark/syntax/drop.test.ts +++ b/test/parser/spark/syntax/drop.test.ts @@ -10,7 +10,7 @@ const features = { dropView: readSQL(__dirname, 'dropView.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL Drop Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => { diff --git a/test/parser/spark/syntax/kwMultipleValues.test.ts b/test/parser/spark/syntax/kwMultipleValues.test.ts index 584d9a15..d3fef2ef 100644 --- a/test/parser/spark/syntax/kwMultipleValues.test.ts +++ b/test/parser/spark/syntax/kwMultipleValues.test.ts @@ -12,7 +12,7 @@ const features = { kwMultipleValues: readSQL(__dirname, 'kwMultipleValues.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL Keyword Has Multiple Values Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => { diff --git a/test/parser/spark/syntax/listStatement.test.ts b/test/parser/spark/syntax/listStatement.test.ts index 67f8381c..515651bf 100644 --- a/test/parser/spark/syntax/listStatement.test.ts +++ b/test/parser/spark/syntax/listStatement.test.ts @@ -7,7 +7,7 @@ const features = { list: readSQL(__dirname, 'list.sql'), }; -describe('Spark list Syntax Tests', () => { +describe('Spark List Syntax Tests', () => { features.list.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/loadStatement.test.ts b/test/parser/spark/syntax/loadStatement.test.ts index b24244be..15040b7e 100644 --- a/test/parser/spark/syntax/loadStatement.test.ts +++ b/test/parser/spark/syntax/loadStatement.test.ts @@ -7,7 +7,7 @@ const features = { loadData: readSQL(__dirname, 'loadData.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL Load Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => { diff --git a/test/parser/spark/syntax/refreshStatement.test.ts b/test/parser/spark/syntax/refreshStatement.test.ts index 4f95cc79..854d87a7 100644 --- a/test/parser/spark/syntax/refreshStatement.test.ts +++ b/test/parser/spark/syntax/refreshStatement.test.ts @@ -7,7 +7,7 @@ const features = { refresh: readSQL(__dirname, 'refresh.sql'), }; -describe('Spark refresh Syntax Tests', () => { +describe('Spark Refresh Syntax Tests', () => { features.refresh.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/resetStatement.test.ts b/test/parser/spark/syntax/resetStatement.test.ts index 5630625f..a778abca 100644 --- a/test/parser/spark/syntax/resetStatement.test.ts +++ b/test/parser/spark/syntax/resetStatement.test.ts @@ -7,7 +7,7 @@ const features = { reset: readSQL(__dirname, 'reset.sql'), }; -describe('Spark reset Syntax Tests', () => { +describe('Spark Reset Syntax Tests', () => { features.reset.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/selectStatement.test.ts b/test/parser/spark/syntax/selectStatement.test.ts index a19166e8..d39788ec 100644 --- a/test/parser/spark/syntax/selectStatement.test.ts +++ b/test/parser/spark/syntax/selectStatement.test.ts @@ -31,7 +31,7 @@ const features = { selectDistributeBy: readSQL(__dirname, 'selectDistributeBy.sql'), selectClusterBy: readSQL(__dirname, 'selectClusterBy.sql'), }; -describe('Spark select Syntax Tests', () => { +describe('Spark Select Syntax Tests', () => { features.selectAggregateFn.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/setStatement.test.ts b/test/parser/spark/syntax/setStatement.test.ts index ce330db8..d97a8e31 100644 --- a/test/parser/spark/syntax/setStatement.test.ts +++ b/test/parser/spark/syntax/setStatement.test.ts @@ -7,7 +7,7 @@ const features = { set: readSQL(__dirname, 'set.sql'), }; -describe('Spark set Syntax Tests', () => { +describe('Spark Set Syntax Tests', () => { features.set.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/showStatement.test.ts b/test/parser/spark/syntax/showStatement.test.ts index 2c392e5f..b6b6cefa 100644 --- a/test/parser/spark/syntax/showStatement.test.ts +++ b/test/parser/spark/syntax/showStatement.test.ts @@ -7,7 +7,7 @@ const features = { show: readSQL(__dirname, 'show.sql'), }; -describe('Spark show Syntax Tests', () => { +describe('Spark Show Syntax Tests', () => { features.show.forEach((itemSql) => { it(itemSql, () => { expect(parser.validate(itemSql).length).toBe(0); diff --git a/test/parser/spark/syntax/table.test.ts b/test/parser/spark/syntax/table.test.ts index 513d8a4f..0de8b302 100644 --- a/test/parser/spark/syntax/table.test.ts +++ b/test/parser/spark/syntax/table.test.ts @@ -12,7 +12,7 @@ const features = { truncateTable: readSQL(__dirname, 'truncateTable.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL About Table Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => { diff --git a/test/parser/spark/syntax/useDatabase.test.ts b/test/parser/spark/syntax/useDatabase.test.ts index 23c72336..717d06c3 100644 --- a/test/parser/spark/syntax/useDatabase.test.ts +++ b/test/parser/spark/syntax/useDatabase.test.ts @@ -7,7 +7,7 @@ const features = { useDatabase: readSQL(__dirname, 'useDatabase.sql'), }; -describe('SparkSQL Insert Syntax Tests', () => { +describe('SparkSQL Use Database Syntax Tests', () => { Object.keys(features).forEach((key) => { features[key].forEach((sql) => { it(sql, () => {