Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
yagagagaga committed Feb 22, 2025
1 parent 95bb92b commit 3b0569d
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ suite("add_drop_partition") {
DROP TABLE ${tableName}
"""

sql """
try_sql """
drop storage policy add_policy;
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ suite("add_drop_partition_by_hdfs") {
DROP TABLE ${tableName}
"""

sql """
try_sql """
drop storage policy add_policy;
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ suite("create_table_use_policy") {
}
}
// data_sizes is one arrayList<Long>, t is tablet
def fetchDataSize = { data_sizes, t ->
def tabletId = t[0]
String meta_url = t[17]
def fetchDataSize = {List<Long> data_sizes, Map<String, Object> t ->
def tabletId = t.TabletId
String meta_url = t.MetaUrl
def clos = { respCode, body ->
logger.info("test ttl expired resp Code {}", "${respCode}".toString())
assertEquals("${respCode}".toString(), "200")
Expand Down Expand Up @@ -189,7 +189,7 @@ suite("create_table_use_policy") {
load_lineitem_table()

// show tablets from table, 获取第一个tablet的 LocalDataSize1
tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
log.info( "test tablets not empty")
Expand All @@ -201,15 +201,15 @@ suite("create_table_use_policy") {
sleep(600000)


tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
log.info( "test tablets not empty")
fetchDataSize(sizes, tablets[0])
while (sizes[1] == 0) {
log.info( "test remote size is zero, sleep 10s")
sleep(10000)
tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
fetchDataSize(sizes, tablets[0])
Expand Down Expand Up @@ -256,7 +256,7 @@ suite("create_table_use_policy") {
load_lineitem_table()

// show tablets from table, 获取第一个tablet的 LocalDataSize1
tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
log.info( "test tablets not empty")
Expand All @@ -267,15 +267,15 @@ suite("create_table_use_policy") {
sleep(600000)


tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
log.info( "test tablets not empty")
fetchDataSize(sizes, tablets[0])
while (sizes[1] == 0) {
log.info( "test remote size is zero, sleep 10s")
sleep(10000)
tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
fetchDataSize(sizes, tablets[0])
Expand All @@ -293,4 +293,4 @@ suite("create_table_use_policy") {



}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ suite("load_colddata_to_hdfs") {
}
}
// data_sizes is one arrayList<Long>, t is tablet
def fetchDataSize = { data_sizes, t ->
def tabletId = t[0]
String meta_url = t[17]
def fetchDataSize = {List<Long> data_sizes, Map<String, Object> t ->
def tabletId = t.TabletId
String meta_url = t.MetaUrl
def clos = { respCode, body ->
logger.info("test ttl expired resp Code {}", "${respCode}".toString())
assertEquals("${respCode}".toString(), "200")
Expand Down Expand Up @@ -191,7 +191,7 @@ suite("load_colddata_to_hdfs") {
load_lineitem_table()

// show tablets from table, 获取第一个tablet的 LocalDataSize1
tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
log.info( "test tablets not empty")
Expand All @@ -203,15 +203,15 @@ suite("load_colddata_to_hdfs") {
sleep(600000)


tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
log.info( "test tablets not empty")
fetchDataSize(sizes, tablets[0])
while (sizes[1] == 0) {
log.info( "test remote size is zero, sleep 10s")
sleep(10000)
tablets = sql """
tablets = sql_return_maparray """
SHOW TABLETS FROM ${tableName}
"""
fetchDataSize(sizes, tablets[0])
Expand Down

0 comments on commit 3b0569d

Please sign in to comment.