Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[regression-test](framework) disable defining global variable in test… #45840

Merged
merged 2 commits into from
Dec 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
-- !sc --
3

-- !sc --
3 2017-10-01 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 1 32 20 2

-- !sc --
4 2017-10-01 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 1 32 20 2

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,14 @@ class GroovyFileSource implements ScriptSource {

@Override
SuiteScript toScript(ScriptContext scriptContext, GroovyShell shell) {
SuiteScript suiteScript = shell.parse(file) as SuiteScript
def setPropertyFunction = '''
\nvoid setProperty(String key, value) {
throw new IllegalArgumentException("defined global variables in script are not allowed: ${key}")
}
'''
def scriptContent = file.text
scriptContent = scriptContent + setPropertyFunction
SuiteScript suiteScript = shell.parse(scriptContent, file.getName()) as SuiteScript
suiteScript.init(scriptContext)
return suiteScript
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ suite("test_backup_restore_db", "backup_restore") {
syncer.waitAllRestoreFinish(dbName)

for (def tableName in tables) {
result = sql "SELECT * FROM ${dbName}.${tableName}"
def result = sql "SELECT * FROM ${dbName}.${tableName}"
assertEquals(result.size(), numRows);
sql "DROP TABLE ${dbName}.${tableName} FORCE"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ suite("test_backup_restore_exclude", "backup_restore") {

qt_select "SELECT * FROM ${dbName}.${backupExcludeTable} ORDER BY id"
for (def tableName in tables) {
result = sql "SELECT * FROM ${dbName}.${tableName}"
def result = sql "SELECT * FROM ${dbName}.${tableName}"
assertEquals(result.size(), numRows);
sql "DROP TABLE ${dbName}.${tableName} FORCE"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ suite("test_backup_restore_multi_tables", "backup_restore") {
syncer.waitAllRestoreFinish(dbName)

for (def tableName in tables) {
result = sql "SELECT * FROM ${dbName}.${tableName}"
def result = sql "SELECT * FROM ${dbName}.${tableName}"
assertEquals(result.size(), numRows);
sql "DROP TABLE ${dbName}.${tableName} FORCE"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ suite("test_backup_restore_multi_tables_overwrite", "backup_restore") {

qt_select "SELECT * FROM ${dbName}.${firstTableName} ORDER BY id"
for (def tableName in tables) {
result = sql "SELECT * FROM ${dbName}.${tableName}"
def result = sql "SELECT * FROM ${dbName}.${tableName}"
assertEquals(result.size(), numRows);
sql "DROP TABLE ${dbName}.${tableName} FORCE"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,13 @@ suite("test_backup_restore_mv", "backup_restore") {
"""

def alter_finished = false
for (i = 0; i < 60 && !alter_finished; i++) {
for (int i = 0; i < 60 && !alter_finished; i++) {
result = sql_return_maparray "SHOW ALTER TABLE MATERIALIZED VIEW FROM ${dbName}"
logger.info("result: ${result}")
for (int i = 0; i < result.size(); i++) {
if (result[i]['TableName'] == "${tableName}" &&
result[i]['RollupIndexName'] == "${mvName}" &&
result[i]['State'] == 'FINISHED') {
for (int j = 0; j < result.size(); j++) {
if (result[j]['TableName'] == "${tableName}" &&
result[j]['RollupIndexName'] == "${mvName}" &&
result[j]['State'] == 'FINISHED') {
alter_finished = true
break
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ suite("test_restore_mix_exists_and_new_table", "backup_restore") {
syncer.waitAllRestoreFinish(dbName)

for (def tableName in tables) {
result = sql "SELECT * FROM ${dbName}.${tableName}"
def result = sql "SELECT * FROM ${dbName}.${tableName}"
assertEquals(result.size(), numRows);
sql "DROP TABLE ${dbName}.${tableName} FORCE"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ suite("test_mow_ingest_binlog") {
}

target_sql " sync "
res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
def res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
assertEquals(res.size(), insert_num)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ suite("test_ingest_binlog_index") {
}

target_sql " sync "
res = target_sql """SELECT * FROM ${tableName}"""
def res = target_sql """SELECT * FROM ${tableName}"""
if (tableName.contains("mow")) {
assertEquals(res.size(), insert_data(tableName).size() / 2 as Integer)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ suite("test_ingest_binlog") {
}

target_sql " sync "
res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
def res = target_sql """SELECT * FROM ${tableName} WHERE test=${test_num}"""
assertEquals(res.size(), insert_num)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ suite("test_disable_revoke_admin_auth", "cloud_auth") {
sql """create user ${user} identified by 'Cloud12345' default role 'admin'"""

sql "sync"
def result

try {
result = sql """revoke 'admin' from 'admin'""";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ suite("test_full_compaction") {
for (def tablet in tablets) {
String tablet_id = tablet.TabletId
backend_id = tablet.BackendId
times = 1
def times = 1

do{
(code, out, err) = be_run_full_compaction(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id), tablet_id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,6 @@ suite("test_trim_new_parameters") {
rtrim = sql "select rtrim('bcTTTabcabc','abc')"
assertEquals(rtrim[0][0], 'bcTTT')

trim_one = sql "select trim('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaabcTTTabcabcaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa','a')"
def trim_one = sql "select trim('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaabcTTTabcabcaaaaaaaaaaaaaaaaaaaaaaaaaabaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa','a')"
assertEquals(trim_one[0][0], 'baaaaaaaaaaabcTTTabcabcaaaaaaaaaaaaaaaaaaaaaaaaaab')
}
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ suite("test_create_table_properties") {
)
"""
sql """ insert into ${bool_tab} values (1, '2020-12-12 12:12:12', '2000-01-01 12:12:12.123456'), (0, '20201212 121212', '2000-01-01'), (1, '20201212121212', '2000-01-01'), (0, 'AaA', '2000-01-01') """
result = sql "show partitions from ${bool_tab}"
def result = sql "show partitions from ${bool_tab}"
logger.info("${result}")
assertEquals(result.size(), 2)

Expand Down
6 changes: 3 additions & 3 deletions regression-test/suites/export/test_array_export.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ suite("test_array_export", "export") {
def check_export_result = {checklabel->
max_try_milli_secs = 15000
while(max_try_milli_secs) {
result = sql "show export where label='${checklabel}'"
def result = sql "show export where label='${checklabel}'"
if(result[0][2] == "FINISHED") {
break
} else {
Expand Down Expand Up @@ -171,7 +171,7 @@ suite("test_array_export", "export") {
} else {
throw new IllegalStateException("""${outFilePath} already exists! """)
}
result = sql """
def result = sql """
SELECT * FROM ${tableName} t ORDER BY k1 INTO OUTFILE "file://${outFile}/";
"""
def url = result[0][3]
Expand Down Expand Up @@ -203,7 +203,7 @@ suite("test_array_export", "export") {
path.delete();
}
if (csvFiles != "") {
cmd = "rm -rf ${csvFiles}"
def cmd = "rm -rf ${csvFiles}"
sshExec("root", urlHost, cmd)
}
}
Expand Down
4 changes: 2 additions & 2 deletions regression-test/suites/export/test_map_export.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ suite("test_map_export", "export") {
def result = sql """
SELECT * FROM ${testTable} ORDER BY id INTO OUTFILE "file://${outFile}/";
"""
url = result[0][3]
def url = result[0][3]
urlHost = url.substring(8, url.indexOf("${outFile}"))
if (backends.size() > 1) {
// custer will scp files
Expand Down Expand Up @@ -146,7 +146,7 @@ suite("test_map_export", "export") {
path.delete();
}
if (csvFiles != "") {
cmd = "rm -rf ${csvFiles}"
def cmd = "rm -rf ${csvFiles}"
sshExec("root", urlHost, cmd)
}
}
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/export/test_struct_export.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ suite("test_struct_export", "export") {
path.delete();
}
if (csvFiles != "") {
cmd = "rm -rf ${csvFiles}"
def cmd = "rm -rf ${csvFiles}"
sshExec("root", urlHost, cmd)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
String ak = getS3AK()
String sk = getS3SK()
String s3_endpoint = getS3Endpoint()
String region = region = getS3Region()
String region = getS3Region()
String bucket = context.config.otherConfigs.get("s3BucketName");

// broker
Expand All @@ -67,8 +67,8 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
// select ... into outfile ...
def uuid = UUID.randomUUID().toString()

hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"
def hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
def uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"

def res = sql """
SELECT * FROM ${export_table_name} t ORDER BY user_id
Expand All @@ -87,8 +87,8 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {
// select ... into outfile ...
def uuid = UUID.randomUUID().toString()

hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"
def hdfs_outfile_path = "/user/doris/tmp_data/${uuid}"
def uri = "${defaultFS}" + "${hdfs_outfile_path}/exp_"

def res = sql """
SELECT * FROM ${export_table_name} t ORDER BY user_id
Expand All @@ -106,8 +106,8 @@ suite("test_outfile_empty_data", "external,hive,tvf,external_docker") {

def outfile_to_S3_directly = {
// select ... into outfile ...
s3_outfile_path = "${bucket}/outfile/csv/test-outfile-empty/"
uri = "s3://${s3_outfile_path}/exp_"
def s3_outfile_path = "${bucket}/outfile/csv/test-outfile-empty/"
def uri = "s3://${s3_outfile_path}/exp_"

def res = sql """
SELECT * FROM ${export_table_name} t ORDER BY user_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
"s3.access_key" = "${ak}"
);
"""
outfile_url = res[0][3]
def outfile_url = res[0][3]

check_outfile_data(outfile_url, outfile_format)
check_outfile_column_name(outfile_url, outfile_format)
Expand All @@ -150,7 +150,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
"s3.access_key" = "${ak}"
);
"""
outfile_url = res[0][3]
def outfile_url = res[0][3]

check_outfile_data(outfile_url, outfile_format)
check_outfile_column_name(outfile_url, outfile_format)
Expand All @@ -171,7 +171,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
"s3.access_key" = "${ak}"
);
"""
outfile_url = res[0][3]
def outfile_url = res[0][3]

check_outfile_data(outfile_url, outfile_format)
check_outfile_column_name(outfile_url, outfile_format)
Expand Down Expand Up @@ -211,7 +211,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
"s3.access_key" = "${ak}"
);
"""
outfile_url = res[0][3]
def outfile_url = res[0][3]

check_outfile_data(outfile_url, outfile_format)
check_outfile_column_name(outfile_url, outfile_format)
Expand All @@ -235,7 +235,7 @@ suite("test_outfile_expr_generate_col_name", "p0") {
"s3.access_key" = "${ak}"
);
"""
outfile_url = res[0][3]
def outfile_url = res[0][3]

check_outfile_data(outfile_url, outfile_format)
check_outfile_column_name(outfile_url, outfile_format)
Expand Down
8 changes: 4 additions & 4 deletions regression-test/suites/export_p0/test_export_basic.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -414,11 +414,11 @@ suite("test_export_basic", "p0") {
}

// 5. test order by and limit clause
uuid1 = UUID.randomUUID().toString()
def uuid1 = UUID.randomUUID().toString()
outFilePath = """${outfile_path_prefix}_${uuid1}"""
label1 = "label_${uuid1}"
uuid2 = UUID.randomUUID().toString()
label2 = "label_${uuid2}"
def label1 = "label_${uuid1}"
def uuid2 = UUID.randomUUID().toString()
def label2 = "label_${uuid2}"
try {
// check export path
check_path_exists.call("${outFilePath}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ suite("test_outfile_file_suffix", "p0") {

def outFilePath = """s3://${bucket}/outfile_"""
def csv_suffix_result = { file_suffix, file_format ->
result = sql """
def result = sql """
select * from ${table_name}
into outfile "${outFilePath}"
FORMAT AS ${file_format}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
suite("test_show_create_database", "p0,external,hive,external_docker,external_docker_hive") {

sql """create database if not exists db_test"""
result = sql """show create database db_test"""
def result = sql """show create database db_test"""
assertEquals(result.size(), 1)
assertEquals(result[0][1], "CREATE DATABASE `db_test`")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc

String enabled = context.config.otherConfigs.get("enableHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
brokerName = getBrokerName()
hdfsUser = getHdfsUser()
hdfsPasswd = getHdfsPasswd()
hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
def brokerName = getBrokerName()
def hdfsUser = getHdfsUser()
def hdfsPasswd = getHdfsPasswd()
def hdfs_port = context.config.otherConfigs.get("hive2HdfsPort")
def externalEnvIp = context.config.otherConfigs.get("externalEnvIp")

def test_dir = "user/doris/preinstalled_data/data_case/autoinc"

def load_from_hdfs = {columns, testTable, label, testFile, format, brokerName, hdfsUser, hdfsPasswd ->
def load_from_hdfs = {columns, testTable, label, testFile, format ->
def result1= sql """ LOAD LABEL ${label} (
DATA INFILE("hdfs://${externalEnvIp}:${hdfs_port}/${test_dir}/${testFile}")
INTO TABLE ${testTable}
Expand All @@ -46,9 +46,9 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
}

def wait_for_load_result = {checklabel, testTable ->
max_try_milli_secs = 10000
def max_try_milli_secs = 10000
while(max_try_milli_secs) {
result = sql "show load where label = '${checklabel}'"
def result = sql "show load where label = '${checklabel}'"
if(result[0][2] == "FINISHED") {
break
} else {
Expand All @@ -61,7 +61,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
}
}

table = "test_autoinc_broker_load"
def table = "test_autoinc_broker_load"
sql "drop table if exists ${table}"
sql """ CREATE TABLE IF NOT EXISTS `${table}` (
`id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "用户 ID",
Expand All @@ -78,7 +78,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
"enable_unique_key_merge_on_write" = "true") """

def test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
load_from_hdfs("name, value", table, test_load_label, "auto_inc_basic.csv", "csv", brokerName, hdfsUser, hdfsPasswd)
load_from_hdfs("name, value", table, test_load_label, "auto_inc_basic.csv", "csv")
wait_for_load_result(test_load_label, table)
qt_sql "select * from ${table};"
sql """ insert into ${table} values(0, "Bob", 123), (2, "Tom", 323), (4, "Carter", 523);"""
Expand All @@ -102,7 +102,7 @@ suite("test_autoinc_broker_load", "p0,external,hive,external_docker,external_doc
"storage_format" = "V2",
"enable_unique_key_merge_on_write" = "true");"""
test_load_label = UUID.randomUUID().toString().replaceAll("-", "")
load_from_hdfs("id, name, value", table, test_load_label, "auto_inc_with_null.csv", "csv", brokerName, hdfsUser, hdfsPasswd)
load_from_hdfs("id, name, value", table, test_load_label, "auto_inc_with_null.csv", "csv")
wait_for_load_result(test_load_label, table)
sql "sync"
qt_sql "select * from ${table};"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ suite("test_hive_parquet_alter_column", "p0,external,hive,external_docker,extern



types = ["int","smallint","tinyint","bigint","float","double","boolean","string","char","varchar","date","timestamp","decimal"]
def types = ["int","smallint","tinyint","bigint","float","double","boolean","string","char","varchar","date","timestamp","decimal"]

for( String type1 in types) {
qt_desc """ desc parquet_alter_column_to_${type1} ; """
Expand Down
Loading
Loading