Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.

...

From this above page click the Gateway client binary archive link or just use the one here.

Unzip this file into your preferred location which will result in a knoxshell-0.14.0 directory and we will refer to that location as the {GATEWAY_HOME}.

...

  • if you have access to the server you may use the command 
    • knoxcli.sh export-cert –type--type JKS
  • copy the resulting gateway-client-identity.jks to your user home directory
  • you may also ask your Knox administrator to provide you with the public cert for the gateway and create your own truststore within your user home directory

...

and save the following to another script in the bin directory called hive2.groovy:

import java.sql.DriverManager

/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.sql.DriverManagerSQLException
import org.apache.hadoop.gateway.shell.Credentials

gatewayHost = "localhost";
gatewayPort = 8443;
trustStore = System.getProperty('user.home') + "/gateway-client-trust.jks";
trustStorePassword = "changeit";
contextPath = "gateway/sandbox/hive";
sql = ""

if (args.length == 0) {
  // accept defaults
  System.out.println(String.format("\nDefault connection args: %s, %d, %s, %s, %s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath))
} else if (args[0] == "?" || args[0] == "help") {
  System.out.println("\nExpected arguments: {host, port, truststore, truststore-pass, context-path}\n")
  System.exit(0);
} else if (args.length == 5) {
  gatewayHost = args[0];
  gatewayPort = args[1].toInteger();
  trustStore = args[2];
  trustStorePassword = args[3];
  contextPath = args[4];
  System.out.println(String.format("\nProvided connection args: %s, %d, %s, %s, %s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath))
} else if (args.length > 0) {
  System.out.println("\nERROR: Expected arguments: NONE for defaults or {host, port, truststore, truststore-pass, context-path}\n")
  System.exit(1);
}

connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );

System.out.println("<======KnoxLine======>");
System.out.println("powered by Apache Knox");
System.out.println("");

credentials = new Credentials()
credentials.add("ClearInput", "Enter username: ", "user")
  .add("HiddenInput", "Enter pas" + "sword: ", "pass")
credentials.collect()

user = credentials.get("user").string()
pass = credentials.get("pass").string()

// Load Hive JDBC Driver
Class.forName( "org.apache.hive.jdbc.HiveDriver" );

// Configure JDBC connection
connection = DriverManager.getConnection( connectionString, user, pass );

while(1) {
  def sql = System.console().readLine 'knoxline> '
  if (!sql.equals("")) {
    System.out.println(sql)

    rs = true;
    statement = connection.createStatement();
    try {
      if (statement.execute( sql )) {
        resultSet = statement.getResultSet()
        int colcount = 0
        colcount = resultSet.getMetaData().getColumnCount();
        row = 0
        header = "| "
        while ( resultSet.next() ) {
          line = "| "
          for (int i = 1; i <= colcount; i++) {
            colvalue = resultSet.getString( i )
            if (colvalue == null) colvalue = ""
              colsize = colvalue.length()
              headerSize = resultSet.getMetaData().getColumnLabel( i ).length()
              if (headerSize > colsize) colsize = headerSize
                if (row == 0) {
                  header += resultSet.getMetaData().getColumnLabel( i ).center(colsize) + " | ";
                }
                line += colvalue.center(colsize) + " | ";
              }
              if (row == 0) {   

            System.out.println("".padLeft(header.length()-1, "="))

                System.out.println(header);   

            System.out.println("".padLeft(header.length()-1, "="))
              }
              System.out.println(line);
              row++
            }
            System.out.println("\nRows: " + row + "\n");
            resultSet.close();
        }
      }
      catch(SQLException e) {
        }//e.printStackTrace()
        System.out.println("SQL Exception encountered... " + e.getMessage())

        if (e.getMessage().contains("org.apache.thrift.transport.TTransportException")) {
  catch(Exception e) {
         eSystem.out.printStackTrace(println("reconnecting... ")
           connection connection = DriverManager.getConnection( connectionString, user, pass );
        }
      }
      statement.close();
      }
 }
 connectionconnection.close();

Execute a SQL Commands using KnoxLine 

Enter the knoxline sql client at the command line:

I will use the defaults for the arguments in the script:

Default connection args: localhost, 8443, /Users/larry/gateway-client-trust.jks, changeit, gateway/sandbox/hive

Depending on your deployment, you may want to set the above arguments on the CLI below:

knoxshell-0.14.0 larry$ bin/knoxline.sh

...

Load the data from the samples.log file in /tmp (copy the sample.log file from ./samples/hive/sample.log to the /tmp directory on your hiveserver2 host):

knoxline> LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs

...