Home | About | Sematext search-lucene.com search-hadoop.com
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB
 Search Hadoop and all its subprojects:

Switch to Threaded View
HBase >> mail # user >> [Error]Finding average using hbase hadoop


Copy link to this message
-
Re: [Error]Finding average using hbase hadoop
Hi Manish,

First, instead of "stocks".getBytes() you need to use
Bytes.toBytes("stocks"). Same for the other strings.
Second, on your map task, you creates 2 bytes arrays for those string at
each iteration. You don't want that on a production environment. You should
move that outside of the map method and make them final static.

JM

2013/8/16 manish dunani <[EMAIL PROTECTED]>

> hello,
>
> I am using apache hadoop 1.1.2 and hbase 0.94.9 on pseudo distibuted mode.
>
> I am trying to find Average open stocks values.
>
> *sample dataset in hbase::**(table name:nyse4)*
>
>
>  2010-02-04           column=stocks:open, timestamp=1376567559424,
> value=2.5
>  2010-02-04           column=stocks:symbol, timestamp=1376567559424,
> value=QXM
>  2010-02-05           column=stocks:open, timestamp=1376567559429,
> value=2.42
>  2010-02-05           column=stocks:symbol, timestamp=1376567559429,
> value=QXM
>  2010-02-08           column=stocks:open, timestamp=1376567559431,
> value=2.33
>  2010-02-08           column=stocks:symbol, timestamp=1376567559431,
> value=QXM
>
> *code:*(please ignores the lines that are commenting)
>
>
> > package com.maddy;
> >
> > import java.io.IOException;
> >
> > import org.apache.hadoop.conf.Configuration;
> > import org.apache.hadoop.fs.Path;
> > import org.apache.hadoop.hbase.HBaseConfiguration;
> > import org.apache.hadoop.hbase.client.Put;
> > import org.apache.hadoop.hbase.client.Result;
> > import org.apache.hadoop.hbase.client.Scan;
> > import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
> > import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
> > import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
> > import org.apache.hadoop.hbase.mapreduce.TableMapper;
> > import org.apache.hadoop.hbase.mapreduce.TableReducer;
> > import org.apache.hadoop.hbase.util.Bytes;
> > //import org.apache.hadoop.io.DoubleWritable;
> > import org.apache.hadoop.io.FloatWritable;
> > import org.apache.hadoop.mapreduce.Job;
> > import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> >
> >
> > public class openaveragestock
> > {
> >     public static class map extends
> > TableMapper<ImmutableBytesWritable,FloatWritable>
> >     {
> >         @Override
> >
> >         public void map(ImmutableBytesWritable row,Result value,Context
> > context) throws IOException
> >         {
> >
> >             byte[]
> > val=(value.getValue("stocks".getBytes(),"open".getBytes()));
> >             //byte[]
> > val1=(value.getValue("stocks".getBytes(),"symbol".getBytes()));
> >
> >
> >             ImmutableBytesWritable stock_symbol=new
> > ImmutableBytesWritable("symbol".getBytes());
> >
> >
> >             try
> >             {
> >                 context.write(stock_symbol,new
> > FloatWritable(Bytes.toFloat(val)));
> >             }
> >             catch(InterruptedException e)
> >
> >             {
> >                  throw new IOException(e);
> >             }
> >
> >
> >         }
> >
> >
> >     }
> >
> >
> >     public static class reduce extends
> > TableReducer<ImmutableBytesWritable,FloatWritable,ImmutableBytesWritable>
> >     {
> >
> >         @Override
> >         public void reduce(ImmutableBytesWritable
> > key,Iterable<FloatWritable>values,Context context) throws IOException,
> > InterruptedException
> >         {
> >             float sum=0;
> >             int count=0;
> >           //  float average=0;
> >             for(FloatWritable val:values)
> >             {
> >                 sum+=val.get();
> >                 count++;
> >             }
> >             //average=(sum/count);
> >             Put put=new Put(key.get());
> >
> >
> put.add(Bytes.toBytes("stocks_output"),Bytes.toBytes("average"),Bytes.toBytes(sum/count));
> >             System.out.println("For\t"+count+"\t average
> is:"+(sum/count));
> >             context.write(key,put);
> >
> >         }
> >
> >     }
> >
> >     public static void main(String args[]) throws IOException,
> > ClassNotFoundException, InterruptedException
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB