Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.

...

NOTE: if you see errors related to SSL and PKIX your truststore is not properly setup

 

Add Hive Client Libraries

 

In order to add the client libraries that provide the HiveDriver and others, we will add an additional directory to the above structure.

...

/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.sql.DriverManager
import org.apache.hadoop.gateway.shell.Credentials

gatewayHost = "localhost";
gatewayPort = 8443;
trustStore = System.getProperty('user.home') + "/gateway-client-trust.jks";
trustStorePassword = "changeit";
contextPath = "gateway/sandbox/hive";

if (args.length == 0) {
  // accept defaults
} else if (args[0] == "?" || args[0] == "help") {
  System.out.println("\nExpected arguments: {host, port, truststore, truststore-pass, context-path}\n")
  System.exit(0);
} else if (args.length == 5) {
  gatewayHost = args[0];
  gatewayPort = args[1].toInteger();
  trustStore = args[2];
  trustStorePassword = args[3];
  contextPath = args[4];
} else if (args.length > 0) {
  System.out.println("\nERROR: Expected arguments: NONE for defaults or {host, port, truststore, truststore-pass, context-path}\n")
  System.exit(1);
}

connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );

credentials = new Credentials()
credentials.add("ClearInput", "Enter username: ", "user")
.add("HiddenInput", "Enter pas" + "sword: ", "pass")
credentials.collect()

user = credentials.get("user").string()
pass = credentials.get("pass").string()

// Load Hive JDBC Driver
Class.forName( "org.apache.hive.jdbc.HiveDriver" );

// Configure JDBC connection
connection = DriverManager.getConnection( connectionString, user, pass );

while(1) {
  def sql = System.console().readLine 'knoxline> '

  if (!sql.equals("")) {
    System.out.println(sql)

    rs = true;
    statement = connection.createStatement();
    try {
      if (statement.execute( sql )) {
        resultSet = statement.getResultSet()
        int colcount = 0
        colcount = resultSet.getMetaData().getColumnCount();
        row = 0
        header = "| "
        while ( resultSet.next() ) {
          line = "| "
          for (int i = 1; i <= colcount; i++) {
            colvalue = resultSet.getString( i )
            if (colvalue == null) colvalue = ""
              colsize = colvalue.length()
              headerSize = resultSet.getMetaData().getColumnLabel( i ).length()
              if (headerSize > colsize) colsize = headerSize
                if (row == 0) {
                  header += resultSet.getMetaData().getColumnLabel( i ).center(colsize) + " | ";
                }
                line += colvalue.center(colsize) + " | ";
              }
              if (row == 0) {
                System.out.println("".padLeft(header.length(), "="))
                System.out.println(header);
                System.out.println("".padLeft(header.length(), "="))
              }
              System.out.println(line);
              row++
            }
            System.out.println("\nRows: " + row + "\n");
            resultSet.close();
          }
        }
        catch(Exception e) {
         e.printStackTrace()
         connection = DriverManager.getConnection( connectionString, user, pass );
        }
        statement.close();
     }
 }
 connection.close();


Execute a SQL Commands using KnoxLine

...

 

Enter the knoxline sql client at the command line:

knoxshell-0.14.0 larry$ bin/knoxline.sh

Let's check for existing tables:

knoxline> show tables

 

 

Let's create a table by loading file from the local disk of the cluster machine:

knoxline> CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '

Show the created table:

Show the table description:

knoxline> desc logs

Load the data from the samples.log file in /tmp (copy the sample.log file from ./samples/hive/sample.log to the /tmp directory on your hiveserver2 host):

knoxline> LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs

Do a select from the table:
knoxline> select * from logs where column2='20:11:56' and column4='[TRACE]'

...