Home | About | Sematext search-lucene.com search-hadoop.com
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB
 Search Hadoop and all its subprojects:

Switch to Threaded View
HBase >> mail # dev >> Error Running MAP REDUCE Job


Copy link to this message
-
Re: Error Running MAP REDUCE Job
Hi Nick,

Are you really using hbase 0.94.0? We are at 0.94.10. .0 is a pretty old
version. any chance to upgrade?

Also, what is the output of this command?
ls -hal /opt/hbase/
?

Thanks,

JM

2013/8/15 Nick <[EMAIL PROTECTED]>

> Hi
>
> I am trying to run a Standalone map reduce job but I am getting
> ClassNotFoundException.
> I am able to connect to shell and create tables, I am also able to scan
> table from stand alone program
> Not sure exactly where I am missing classpath setting
> Will appreciate any advice.
>
> export
>
> HADOOP_CLASSPATH=/opt/hbase/bin:/opt/hbase/conf:/opt/hbase/hbase-0.94.0.jar:/opt/hbase/hbase-0.94.0-tests.jar
>
> /opt/hadoop/bin/hadoop -jar hbase-example-1.0.jar
> [EMAIL PROTECTED]
>
> Exception in thread "main" java.lang.NoClassDefFoundError:
> org/apache/hadoop/hbase/util/Bytes
>         at client.MapperTest.<clinit>(MapperTest.java:29)
> Caused by: java.lang.ClassNotFoundException:
> org.apache.hadoop.hbase.util.Bytes
>         at java.net.URLClassLoader$1.run(URLClassLoader.java:202)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
>         at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
>         ... 1 more
>
> My Program is like following
>
> package client;
>
>
> import java.io.IOException;
>
>
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.client.Result;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
> import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
> import org.apache.hadoop.hbase.mapreduce.TableMapper;
> import org.apache.hadoop.hbase.util.Bytes;
> import org.apache.hadoop.io.NullWritable;
> import org.apache.hadoop.io.Text;
> import org.apache.hadoop.mapreduce.Job;
> import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
>
> import util.HBaseHelper;
>
> public class MapperTest {
>
>
>         public static String TABLE = "30000001_registration";
>         public static final String COOKIE_ID_QUAL_LABEL = "ci";
>         public static final String REGISTRATION_ID_QUAL_LABEL = "r";
>         public static final byte[] COOKIE_ID_QUAL > Bytes.toBytes(COOKIE_ID_QUAL_LABEL);
>         public static final byte[] REGISTRATION_ID_QUAL > Bytes.toBytes(REGISTRATION_ID_QUAL_LABEL);
>         public static final String NAME = "MapperTest";
>
>
>           public static void main(String[] args) throws Exception {
>
>                 try{
>                         Configuration conf = HBaseHelper.getProdQa();
>                         Job job = new Job(conf, "Analyze data in " +
> TABLE);
>                     job.setJarByClass(MapperTest.class);
>                     Scan scan = new Scan();
>                     scan.setCaching(5);
>                     scan.setCacheBlocks(false);
>
> TableMapReduceUtil.initTableMapperJob(TABLE.getBytes(), scan,
> MyMapper.class,Text.class, NullWritable.class, job);
>                     //job.setOutputKeyClass(Text.class);
>                     //job.setOutputValueClass(NullWritable.class);
>                     job.setNumReduceTasks(0);
>                     FileOutputFormat.setOutputPath(job, new
> Path("/tmp/nick"));
>                     job.setOutputFormatClass(TextOutputFormat.class);
>                     System.exit(job.waitForCompletion(true) ? 0 : 1);
>                 }catch(Exception e){
>                         System.out.println("There is something wrong");
>                 }
>
>
>         }
>
>         static class MyMapper extends TableMapper<Text, NullWritable>{
>
>                 @Override
>                 protected void setup(Context context) throws IOException,
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB