Skip to content

Commit cf86eb8

Browse files
authored
[test](migrate) move test_hive_text_complex_type from p2 to p0 (apache#37007) (apache#37123)
bp: apache#37007
1 parent d0eea38 commit cf86eb8

File tree

23 files changed

+334
-6
lines changed

23 files changed

+334
-6
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.hive_text_complex_type`(
5+
`column1` int,
6+
`column2` map<int,boolean>,
7+
`column3` map<int,tinyint>,
8+
`column4` map<string,smallint>,
9+
`column5` map<string,int>,
10+
`column6` map<string,bigint>,
11+
`column7` map<string,float>,
12+
`column8` map<string,double>,
13+
`column9` map<int,string>,
14+
`column10` map<string,timestamp>,
15+
`column11` map<string,date>,
16+
`column12` struct<field1:boolean,field2:tinyint,field3:smallint,field4:int,field5:bigint,field6:float,field7:double,field8:string,field9:timestamp,field10:date>)
17+
ROW FORMAT SERDE
18+
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
19+
STORED AS INPUTFORMAT
20+
'org.apache.hadoop.mapred.TextInputFormat'
21+
OUTPUTFORMAT
22+
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
23+
LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type'
24+
TBLPROPERTIES (
25+
'transient_lastDdlTime'='1690518015');
26+
27+
msck repair table hive_text_complex_type;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.hive_text_complex_type2`(
5+
`id` int,
6+
`col1` map<int,map<string,int>>,
7+
`col2` array<array<map<int,boolean>>>,
8+
`col3` struct<field1:int,field2:map<int,string>,field3:struct<sub_field1:boolean,sub_field2:boolean,sub_field3:int>,field4:array<int>>,
9+
`col4` map<int,map<int,array<boolean>>>,
10+
`col5` map<int,struct<sub_field1:boolean,sub_field2:string>>)
11+
ROW FORMAT SERDE
12+
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
13+
STORED AS INPUTFORMAT
14+
'org.apache.hadoop.mapred.TextInputFormat'
15+
OUTPUTFORMAT
16+
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
17+
LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type2'
18+
TBLPROPERTIES (
19+
'transient_lastDdlTime'='1692719086');
20+
21+
msck repair table hive_text_complex_type2;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.hive_text_complex_type3`(
5+
`id` int,
6+
`column1` map<int,struct<a:int,b:int,c:array<map<string,array<array<array<array<struct<aa:int,bb:string,cc:boolean>>>>>>>>>,
7+
`column2` array<struct<a:int,b:array<map<string,map<int,map<string,array<struct<aaa:struct<aa:int,bb:string,cc:boolean>,bbb:boolean,ccc:string,ddd:date>>>>>>,c:int>>,
8+
`column3` struct<a:int,b:struct<a:array<map<string,array<map<int,map<boolean,array<struct<aa:int,bb:string,cc:boolean>>>>>>>>,c:map<int,string>,d:array<int>>,
9+
`column4` map<int,map<date,map<int,map<double,map<string,map<int,map<string,map<int,map<int,map<int,boolean>>>>>>>>>>,
10+
`column5` array<array<array<array<array<array<array<array<array<array<int>>>>>>>>>>,
11+
`column6` struct<a:map<int,map<int,map<string,string>>>,b:struct<aa:struct<aaa:struct<aaaa:struct<aaaaa:struct<aaaaaa:struct<aaaaaaa:struct<aaaaaaaa:struct<a1:int,a2:string>,bbbbbbbb:map<int,int>>,bbbbbbb:array<string>>>>,bbbb:map<int,double>>>,bb:date>,c:date>)
12+
ROW FORMAT SERDE
13+
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
14+
WITH SERDEPROPERTIES (
15+
'hive.serialization.extend.nesting.levels'='true')
16+
STORED AS INPUTFORMAT
17+
'org.apache.hadoop.mapred.TextInputFormat'
18+
OUTPUTFORMAT
19+
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
20+
LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type3'
21+
TBLPROPERTIES (
22+
'transient_lastDdlTime'='1693389680');
23+
24+
msck repair table hive_text_complex_type3;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.hive_text_complex_type_delimiter`(
5+
`column1` int,
6+
`column2` map<int,boolean>,
7+
`column3` map<int,tinyint>,
8+
`column4` map<string,smallint>,
9+
`column5` map<string,int>,
10+
`column6` map<string,bigint>,
11+
`column7` map<string,float>,
12+
`column8` map<string,double>,
13+
`column9` map<int,string>,
14+
`column10` map<string,timestamp>,
15+
`column11` map<string,date>,
16+
`column12` struct<field1:boolean,field2:tinyint,field3:smallint,field4:int,field5:bigint,field6:float,field7:double,field8:string,field9:timestamp,field10:date>)
17+
ROW FORMAT SERDE
18+
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
19+
WITH SERDEPROPERTIES (
20+
'colelction.delim'='|',
21+
'field.delim'=',',
22+
'line.delim'='\n',
23+
'mapkey.delim'=':',
24+
'serialization.format'=',')
25+
STORED AS INPUTFORMAT
26+
'org.apache.hadoop.mapred.TextInputFormat'
27+
OUTPUTFORMAT
28+
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
29+
LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type_delimiter'
30+
TBLPROPERTIES (
31+
'transient_lastDdlTime'='1690517298');
32+
33+
msck repair table hive_text_complex_type_delimiter;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.hive_text_complex_type_delimiter2`(
5+
`id` int,
6+
`col1` map<int,map<string,int>>,
7+
`col2` array<array<map<int,boolean>>>,
8+
`col3` struct<field1:int,field2:map<int,string>,field3:struct<sub_field1:boolean,sub_field2:boolean,sub_field3:int>,field4:array<int>>,
9+
`col4` map<int,map<int,array<boolean>>>,
10+
`col5` map<int,struct<sub_field1:boolean,sub_field2:string>>)
11+
ROW FORMAT SERDE
12+
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
13+
WITH SERDEPROPERTIES (
14+
'colelction.delim'=',',
15+
'field.delim'='\t',
16+
'line.delim'='\n',
17+
'mapkey.delim'=':',
18+
'serialization.format'='\t')
19+
STORED AS INPUTFORMAT
20+
'org.apache.hadoop.mapred.TextInputFormat'
21+
OUTPUTFORMAT
22+
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
23+
LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type_delimiter2'
24+
TBLPROPERTIES (
25+
'transient_lastDdlTime'='1692719456');
26+
27+
msck repair table hive_text_complex_type_delimiter2;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.hive_text_complex_type_delimiter3`(
5+
`id` int,
6+
`column1` map<int,struct<a:int,b:int,c:array<map<string,array<array<array<array<struct<aa:int,bb:string,cc:boolean>>>>>>>>>,
7+
`column2` array<struct<a:int,b:array<map<string,map<int,map<string,array<struct<aaa:struct<aa:int,bb:string,cc:boolean>,bbb:boolean,ccc:string,ddd:date>>>>>>,c:int>>,
8+
`column3` struct<a:int,b:struct<a:array<map<string,array<map<int,map<boolean,array<struct<aa:int,bb:string,cc:boolean>>>>>>>>,c:map<int,string>,d:array<int>>,
9+
`column4` map<int,map<date,map<int,map<double,map<string,map<int,map<string,map<int,map<int,map<int,boolean>>>>>>>>>>,
10+
`column5` array<array<array<array<array<array<array<array<array<array<int>>>>>>>>>>,
11+
`column6` struct<a:map<int,map<int,map<string,string>>>,b:struct<aa:struct<aaa:struct<aaaa:struct<aaaaa:struct<aaaaaa:struct<aaaaaaa:struct<aaaaaaaa:struct<a1:int,a2:string>,bbbbbbbb:map<int,int>>,bbbbbbb:array<string>>>>,bbbb:map<int,double>>>,bb:date>,c:date>)
12+
ROW FORMAT SERDE
13+
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
14+
WITH SERDEPROPERTIES (
15+
'collection.delim'=',',
16+
'hive.serialization.extend.nesting.levels'='true',
17+
'mapkey.delim'=':')
18+
STORED AS INPUTFORMAT
19+
'org.apache.hadoop.mapred.TextInputFormat'
20+
OUTPUTFORMAT
21+
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
22+
LOCATION '/user/doris/suites/multi_catalog/hive_text_complex_type_delimiter3'
23+
TBLPROPERTIES (
24+
'transient_lastDdlTime'='1693390056');
25+
26+
msck repair table hive_text_complex_type_delimiter3;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
CREATE DATABASE IF NOT EXISTS multi_catalog;
2+
USE multi_catalog;
3+
4+
CREATE TABLE `multi_catalog.parquet_predicate_table`(
5+
`column_primitive_integer` int,
6+
`column1_struct` struct<field0:bigint,field1:bigint>,
7+
`column_primitive_bigint` bigint)
8+
ROW FORMAT SERDE
9+
'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
10+
STORED AS INPUTFORMAT
11+
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
12+
OUTPUTFORMAT
13+
'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
14+
LOCATION '/user/doris/suites/multi_catalog/parquet_predicate_table'
15+
TBLPROPERTIES (
16+
'transient_lastDdlTime'='1692368377');
17+
18+
msck repair table parquet_predicate_table;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -x
3+
4+
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
5+
6+
## mkdir and put data to hdfs
7+
cd "${CUR_DIR}" && rm -rf data/ && tar xzf data.tar.gz
8+
hadoop fs -mkdir -p /user/doris/suites/multi_catalog/
9+
hadoop fs -put "${CUR_DIR}"/data/* /user/doris/suites/multi_catalog/
10+
11+
# create table
12+
hive -f "${CUR_DIR}/create_table.hql"

0 commit comments

Comments
 (0)