Skip to content
This repository was archived by the owner on Mar 24, 2022. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 17 additions & 12 deletions hawq/hawq_tables/create_hawq_tables.sql
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
DROP SCHEMA IF EXISTS retail_demo CASCADE;
CREATE SCHEMA retail_demo;

-- 1. HAWQ table; load via COPY
-- 1. HAWQ table
DROP TABLE IF EXISTS retail_demo.categories_dim_hawq;
CREATE TABLE retail_demo.categories_dim_hawq
(
category_id integer NOT NULL,
category_name character varying(400) NOT NULL
)
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;

-- 2. HAWQ table; load via COPY
-- 2. HAWQ table
DROP TABLE IF EXISTS retail_demo.customers_dim_hawq;
CREATE TABLE retail_demo.customers_dim_hawq
(
customer_id TEXT,
Expand All @@ -19,7 +18,8 @@ CREATE TABLE retail_demo.customers_dim_hawq
)
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;

-- 3. HAWQ table; load via COPY
-- 3. HAWQ table
DROP TABLE IF EXISTS retail_demo.order_lineitems_hawq;
CREATE TABLE retail_demo.order_lineitems_hawq
(
order_id TEXT,
Expand Down Expand Up @@ -57,7 +57,8 @@ CREATE TABLE retail_demo.order_lineitems_hawq
)
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;

-- 4. HAWQ table; load via COPY
-- 4. HAWQ table
DROP TABLE IF EXISTS retail_demo.orders_hawq;
CREATE TABLE retail_demo.orders_hawq
(
order_id TEXT,
Expand Down Expand Up @@ -94,7 +95,8 @@ CREATE TABLE retail_demo.orders_hawq
)
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;

-- 5. HAWQ table; load via COPY
-- 5. HAWQ table
DROP TABLE IF EXISTS retail_demo.customer_addresses_dim_hawq;
CREATE TABLE retail_demo.customer_addresses_dim_hawq
(
customer_address_id TEXT,
Expand All @@ -113,7 +115,8 @@ CREATE TABLE retail_demo.customer_addresses_dim_hawq
)
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;

-- 6. HAWQ table; load via COPY
-- 6. HAWQ table
DROP TABLE IF EXISTS retail_demo.date_dim_hawq;
CREATE TABLE retail_demo.date_dim_hawq
(
calendar_day date,
Expand All @@ -126,6 +129,7 @@ CREATE TABLE retail_demo.date_dim_hawq
WITH (appendonly=true) DISTRIBUTED RANDOMLY;

-- 7. HAWQ table
DROP TABLE IF EXISTS retail_demo.email_addresses_dim_hawq;
CREATE TABLE retail_demo.email_addresses_dim_hawq
(
customer_id TEXT,
Expand All @@ -134,16 +138,17 @@ CREATE TABLE retail_demo.email_addresses_dim_hawq
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;


-- 8. HAWQ table; load via COPY
-- 8. HAWQ table
DROP TABLE IF EXISTS retail_demo.payment_methods_hawq;
CREATE TABLE retail_demo.payment_methods_hawq
(
payment_method_id smallint,
payment_method_code character varying(20)
)
WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;
ALTER TABLE retail_demo.payment_methods_hawq OWNER TO gpadmin;

-- 9. HAWQ table; load via COPY
-- 9. HAWQ table
DROP TABLE IF EXISTS retail_demo.products_dim_hawq;
CREATE TABLE retail_demo.products_dim_hawq
(
product_id TEXT,
Expand Down
16 changes: 4 additions & 12 deletions hawq/hawq_tables/load_data_to_HDFS.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,9 @@ base_dir="/retail_demo"

# Clean up any previous load
echo "hadoop fs -rm -r -skipTrash $base_dir"
hadoop fs -rm -r -skipTrash $base_dir
hadoop fs -rm -r -skipTrash $base_dir

echo "hadoop fs -mkdir $base_dir"
hadoop fs -mkdir $base_dir

for file in *.tsv.gz
do
dir=`echo $file | perl -ne 's/^(.+?)\..+$/$1/;print;'`
echo "hadoop fs -mkdir $base_dir/$dir"
hadoop fs -mkdir $base_dir/$dir
echo "hadoop fs -put $file $base_dir/$dir/"
hadoop fs -put $file $base_dir/$dir/
done
# Copy the data directory, recursively, into HDFS root
echo "hadoop fs -put /retail_demo /"
hadoop fs -put /retail_demo /

16 changes: 0 additions & 16 deletions hawq/hawq_tables/load_hawq_tables_perl.sh

This file was deleted.