Home | About | Sematext search-lucene.com search-hadoop.com
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB
 Search Hadoop and all its subprojects:

Switch to Threaded View
HBase >> mail # user >> Exception

Dear all,
I run this program but I found the following exception..
/** * Copyright 2008 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements.  See the NOTICE file * distributed with this work for additional information * regarding copyright ownership.  The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package hbasetest;
import java.io.UnsupportedEncodingException;import java.nio.ByteBuffer;import java.nio.charset.CharacterCodingException;import java.nio.charset.Charset;import java.nio.charset.CharsetDecoder;import java.text.NumberFormat;import java.util.ArrayList;import java.util.List;import java.util.Map;import java.util.TreeMap;import java.util.SortedMap;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;import org.apache.hadoop.hbase.thrift.generated.Hbase;import org.apache.hadoop.hbase.thrift.generated.IOError;import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;import org.apache.hadoop.hbase.thrift.generated.Mutation;import org.apache.hadoop.hbase.thrift.generated.NotFound;import org.apache.hadoop.hbase.thrift.generated.TCell;import org.apache.hadoop.hbase.thrift.generated.TRowResult;
//import com.facebook.thrift.TException;//import com.facebook.thrift.protocol.TBinaryProtocol;//import com.facebook.thrift.protocol.TProtocol;//import com.facebook.thrift.transport.TSocket;//import com.facebook.thrift.transport.TTransport;
//import org.apache.hadoop.hive.service.ThriftHive;import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;/* * Instructions: * 1. Run Thrift to generate the java module HBase *    thrift --gen java ../../../src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift * 2. Acquire a jar of compiled Thrift java classes.  As of this writing, HBase ships  *    with this jar (libthrift-[VERSION].jar).  If this jar is not present, or it is  *    out-of-date with your current version of thrift, you can compile the jar  *    yourself by executing {ant} in {$THRIFT_HOME}/lib/java. * 3. Compile and execute this file with both the libthrift jar and the gen-java/  *    directory in the classpath.  This can be done on the command-line with the  *    following lines: (from the directory containing this file and gen-java/) *     *    javac -cp /path/to/libthrift/jar.jar:gen-java/ DemoClient.java *    mv DemoClient.class gen-java/org/apache/hadoop/hbase/thrift/ *    java -cp /path/to/libthrift/jar.jar:gen-java/ org.apache.hadoop.hbase.thri
 ft.DemoClient *  */public class HbaseTestClient {    protected int port = 9090;  CharsetDecoder decoder = null;
  public static void main(String[] args)   throws IOError, TException, UnsupportedEncodingException, IllegalArgument, AlreadyExists {    HbaseTestClient client = new HbaseTestClient();    client.run();  }
  HbaseTestClient() {    decoder = Charset.forName("UTF-8").newDecoder();  }    // Helper to translate byte[]'s to UTF8 strings  private String utf8(byte[] buf) {    try {      return decoder.decode(ByteBuffer.wrap(buf)).toString();    } catch (CharacterCodingException e) {      return "[INVALID UTF-8]";    }  }    // Helper to translate strings to UTF8 bytes  private byte[] bytes(String s) {    try {      return s.getBytes("UTF-8");    } catch (UnsupportedEncodingException e) {      e.printStackTrace();      return null;    }  }    private void run() throws IOError, TException, IllegalArgument,      AlreadyExists {        TTransport transport = new TSocket("", port);    TProtocol protocol = new TBinaryProtocol(transport, true, true);    Hbase.Client client = new Hbase.Client(protocol);
    byte[] t = bytes("demo_table");    byte[] t1 = bytes("test");        //    // Scan all tables, look for the demo table and delete it.    //        System.out.println("scanning tables...");    for (byte[] name : client.getTableNames()) {      System.out.println("  found: " + utf8(name));      if (utf8(name).equals(utf8(t))) {        if (client.isTableEnabled(name)) {          System.out.println("    disabling table: " + utf8(name));          client.disableTable(name);        }        System.out.println("    deleting table: " + utf8(name));         client.deleteTable(name);      }    }        //    // Create the demo table with two column families, entry: and unused:    //    ArrayList<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>();    ColumnDescriptor col = null;    col = new ColumnDescriptor();    col.name = bytes("entry:");    col.maxVersions = 10;    columns.add(col);    col = new ColumnDescriptor();    col.name = bytes("unused:");    columns.add(col);
    System.out.println("creating table: " + utf8(t));    try {       // client.deleteTable("test".getBytes());      client.createTable(t, columns);    } catch (AlreadyExists ae) {      System.out.println("WARN: " + ae.message);    }    catch(Exception ex){        System.out.println(ex);    }    System.exit(0);    System.out.println("column families in " + utf8(t) + ": ");    Map<byte[], ColumnDescriptor> columnMap = client.getColumnDescriptors(t);    for (ColumnDescriptor col2 : columnMap.values()) {      System.out.println("  column: " + utf8(col2.na
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB