Home | About | Sematext search-lucene.com search-hadoop.com
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB
 Search Hadoop and all its subprojects:

Switch to Plain View
Hive >> mail # user >> Hive insert into RCFILE issue with timestamp columns


+
Dileep Kumar 2013-03-05, 01:37
Copy link to this message
-
Re: Hive insert into RCFILE issue with timestamp columns
Hi Dilip,
Are you able to run this query successfully?

select d_date_sk, d_date_id, d_date, d_month_seq, d_week_seq,
d_quarter_seq, d_dow, d_moy, d_dom, d_qoy, d_fy_year,
d_fy_quarter_seq, d_fy_week_seq, d_day_name, d_quarter_name,
d_holiday, d_weekend, d_following_holiday, d_first_dom, d_last_dom,
d_same_day_ly, d_same_day_lq, d_current_day, d_current_week,
d_current_month, d_current_quarter, d_current_year, d_year
from date_dim

On Mon, Mar 4, 2013 at 5:37 PM, Dileep Kumar <[EMAIL PROTECTED]> wrote:
> Hi All,
>
> I am using the schema in the Impala VM and trying to create a dynamic
> partitioned table on date_dim.
> New table is called date_dim_i and schema for that is defined as:
> create table date_dim_i
> (
>     d_date_sk                 int,
>     d_date_id                 string,
>     d_date                    timestamp,
>     d_month_seq               int,
>     d_week_seq                int,
>     d_quarter_seq             int,
>     d_dow                     int,
>     d_moy                     int,
>     d_dom                     int,
>     d_qoy                     int,
>     d_fy_year                 int,
>     d_fy_quarter_seq          int,
>     d_fy_week_seq             int,
>     d_day_name                string,
>     d_quarter_name            string,
>     d_holiday                 string,
>     d_weekend                 string,
>     d_following_holiday       string,
>     d_first_dom               int,
>     d_last_dom                int,
>     d_same_day_ly             int,
>     d_same_day_lq             int,
>     d_current_day             string,
>     d_current_week            string,
>     d_current_month           string,
>     d_current_quarter         string,
>     d_current_year            string
> )
> PARTITIONED BY (d_year int)
> stored as RCFILE;
>
> Then I do insert overwrite as:
> insert overwrite table date_dim_i
> PARTITION (d_year)
> select d_date_sk, d_date_id, d_date, d_month_seq, d_week_seq, d_quarter_seq,
> d_dow, d_moy, d_dom, d_qoy, d_fy_year, d_fy_quarter_seq, d_fy_week_seq,
> d_day_name, d_quarter_name, d_holiday, d_weekend, d_following_holiday,
> d_first_dom, d_last_dom, d_same_day_ly, d_same_day_lq, d_current_day,
> d_current_week, d_current_month, d_current_quarter, d_current_year, d_year
> from date_dim;
>
> The date_dim table schema is as :
> create external table date_dim
> (
>     d_date_sk                 int,
>     d_date_id                 string,
>     d_date                    timestamp,
>     d_month_seq               int,
>     d_week_seq                int,
>     d_quarter_seq             int,
>     d_year                    int,
>     d_dow                     int,
>     d_moy                     int,
>     d_dom                     int,
>     d_qoy                     int,
>     d_fy_year                 int,
>     d_fy_quarter_seq          int,
>     d_fy_week_seq             int,
>     d_day_name                string,
>     d_quarter_name            string,
>     d_holiday                 string,
>     d_weekend                 string,
>     d_following_holiday       string,
>     d_first_dom               int,
>     d_last_dom                int,
>     d_same_day_ly             int,
>     d_same_day_lq             int,
>     d_current_day             string,
>     d_current_week            string,
>     d_current_month           string,
>     d_current_quarter         string,
>     d_current_year            string
> )
> row format delimited fields terminated by '|'
> location '/hive/tpcds/date_dim';
>
>
>
>
>
> It fails with following exception:
>
> Error: java.lang.RuntimeException:
> org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while
> processing row
> {"d_date_sk":2415022,"d_date_id":"AAAAAAAAOKJNECAA","d_date":"1969-12-31
> 19:00:00","d_month_seq":0,"d_week_seq":1,"d_quarter_seq":1,"d_year":1900,"d_dow":1,"d_moy":1,"d_dom":2,"d_qoy":1,"d_fy_year":1900,"d_fy_quarter_seq":1,"d_fy_week_seq":1,"d_day_name":"Monday","d_quarter_name":"1900Q1","d_holiday":"N","d_weekend":"N","d_following_holiday":"Y","d_first_dom":2415021,"d_last_dom":2415020,"d_same_day_ly":2414657,"d_same_day_lq":2414930,"d_current_day":"N","d_current_week":"N","d_current_month":"N","d_current_quarter":"N","d_current_year":"N"}
+
Dileep Kumar 2013-03-05, 02:00
+
Dileep Kumar 2013-03-05, 22:56
+
Prasad Mujumdar 2013-03-06, 08:28
+
Sékine Coulibaly 2013-03-06, 09:58
+
Mark Grover 2013-03-05, 23:28
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB