Home | About | Sematext search-lucene.com search-hadoop.com
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB
 Search Hadoop and all its subprojects:

Switch to Threaded View
Sqoop >> mail # user >> Connection refused while Job Submission - Sqoop Client API


Copy link to this message
-
Re: Connection refused while Job Submission - Sqoop Client API
Hi Abraham,

That is another problem,I cannot use the commands you said because when I
start client on the same machine(centOS-remote for me) on which server is
running, the server automatically stops.
But when I started the server on centOS(which is remote for me and I am
accessing it through putty)
and I have a sqoop Client API java program in my system(windows 7).
It does not stops the server and my connection established successfully.
Also MySQL server is installed on my system(windows 7) and I have given
remote access to ip address of CentOS system,
that is why my connection get established in 1st step.
I am attaching the source and the output I am getting, please have a look.

*Program:*
*
*
public class TestSqoopClient {
 private static void printMessage(List<MForm> formList) {
  for(MForm form : formList) {
    List<MInput<?>> inputlist = form.getInputs();
    if (form.getValidationMessage() != null) {
      System.out.println("Form message: " + form.getValidationMessage());
    }
    for (MInput minput : inputlist) {
      if (minput.getValidationStatus() == Status.ACCEPTABLE) {
        System.out.println("Warning:" + minput.getValidationMessage());
      } else if (minput.getValidationStatus() == Status.UNACCEPTABLE) {
        System.out.println("Error:" + minput.getValidationMessage());
      }
    }
  }
}

public static void main(String[] args) {
 try
{
String url = "http://192.168.3.156:12000/sqoop/";
SqoopClient client = new SqoopClient(url);
 /*********Connection**********/
//Dummy connection object
MConnection newCon = client.newConnection(1);

//Get connection and framework forms. Set name for connection
MConnectionForms conForms = newCon.getConnectorPart();
MConnectionForms frameworkForms = newCon.getFrameworkPart();
newCon.setName("MyConnection");
 /*sample for below code
 * MStringInput m = conForms.getStringInput("connection.connectionString");
m.setValue("");*/
 //Set connection forms values
conForms.getStringInput("connection.connectionString").setValue("jdbc:mysql://
10.10.10.103:3306/mydb");
conForms.getStringInput("connection.jdbcDriver").setValue("com.mysql.jdbc.Driver");
conForms.getStringInput("connection.username").setValue("********");
conForms.getStringInput("connection.password").setValue("********");   //I
masked it.

frameworkForms.getIntegerInput("security.maxConnections").setValue(0);

Status status  = client.createConnection(newCon);
if(status.canProceed()) {
 System.out.println("Created. New Connection ID : "
+newCon.getPersistenceId());
} else {
 System.out.println("Check for status and forms error ");
}
 //for checking the errors/warnings
printMessage(newCon.getConnectorPart().getForms());
printMessage(newCon.getFrameworkPart().getForms());
 /*********Job**********/
//Creating dummy job object
MJob newjob = client.newJob(1, org.apache.sqoop.model.MJob.Type.IMPORT);
MJobForms connectorForm = newjob.getConnectorPart();
MJobForms frameworkForm = newjob.getFrameworkPart();
 newjob.setName("ImportJob");
//Database configuration
connectorForm.getStringInput("table.schemaName").setValue("");
 //Input either table name or sql
connectorForm.getStringInput("table.tableName").setValue("student");
//connectorForm.getStringInput("table.sql").setValue("select id,name from
student");
 connectorForm.getStringInput("table.columns").setValue("id,name");
connectorForm.getStringInput("table.partitionColumn").setValue("id");
 //Set boundary value only if required
//connectorForm.getStringInput("table.boundaryQuery").setValue("");

//Output configurations
frameworkForm.getEnumInput("output.storageType").setValue("HDFS");
frameworkForm.getEnumInput("output.outputFormat").setValue("TEXT_FILE");//Other
option: SEQUENCE_FILE/TEXT_FILE
frameworkForm.getStringInput("output.outputDirectory").setValue("/home/hadoop/tmp/hdfs/data");

//Job resources
frameworkForm.getIntegerInput("throttling.extractors").setValue(1);
frameworkForm.getIntegerInput("throttling.loaders").setValue(1);

Status status1 = client.createJob(newjob);
if(status1.canProceed()) {
 System.out.println("New Job ID: "+ newjob.getPersistenceId());
} else {
 System.out.println("Check for status and forms error ");
}
 printMessage(newjob.getConnectorPart().getForms());
printMessage(newjob.getFrameworkPart().getForms());
 /*****Job Submission******/
//Job submission start
long jid = newjob.getPersistenceId();
System.out.println(jid);
 MSubmission submission = client.startSubmission(jid);
 System.out.println("Status : " + submission.getStatus());
 if(submission.getStatus().isRunning() && submission.getProgress() != -1) {
  System.out.println("Progress : " + String.format("%.2f %%",
submission.getProgress() * 100));
}
 System.out.println("Hadoop job id :" + submission.getExternalId());
System.out.println("Job link : " + submission.getExternalLink());
 Counters counters = submission.getCounters();
 if(counters != null) {
  System.out.println("Counters:");
  for(CounterGroup group : counters) {
    System.out.print("\t");
    System.out.println(group.getName());
    for(Counter counter : group) {
      System.out.print("\t\t");
      System.out.print(counter.getName());
      System.out.print(": ");
      System.out.println(counter.getValue());
    }
  }
}
if(submission.getExceptionInfo() != null) {
  System.out.println("Exception info : " +submission.getExceptionInfo());
}
//Check job status
MSubmission submission1 = client.getSubmissionStatus(1);
if(submission1.getStatus().isRunning() && submission1.getProgress() != -1) {
  System.out.println("Progress : " + String.format("%.2f %%",
submission.getProgress() * 100));
}

//Stop a running job
//submission1.stopSubmission(jid);

}
catch(Exception e)
{
e.printStackTrace();
}
 }
}

*Output:*

Created. New Connection ID : 33
New Job ID: 11
11
org.apache.sqoop.common.SqoopException: CLIENT_0006:Server has returned
exception
at
org.apache.sqoop.client.request.Request$ServerExceptionFilter.handle(Request.java:102)
at com.sun.jersey.api.client.WebResource.handle(WebResource.
NEW: Monitor These Apps!
elasticsearch, apache solr, apache hbase, hadoop, redis, casssandra, amazon cloudwatch, mysql, memcached, apache kafka, apache zookeeper, apache storm, ubuntu, centOS, red hat, debian, puppet labs, java, senseiDB