Java operates hive database and creates tables

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.3.4</version>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>3.3.4</version>
</dependency>

<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>3.1.3</version> <!-- Please check if there is a newer version -->
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-runner</artifactId>
</exclusion>
</exclusions>
</dependency>

The tools for kerberos login can be used for authentication such as hive hdfs.

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.HadoopKerberosName;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;

public class KerberosUtil {

    private static Logger logger = LoggerFactory.getLogger(KerberosUtil.class);

    public static boolean loginKerberos(
            org.apache.hadoop.conf.Configuration config,
            String keytabPrincipal,
            File keytabFile,
            File krb5Conf,
            File coreSiteConfig,
            File hdfsSiteConfig) {

        config.addResource(new Path(coreSiteConfig.getPath()));
        config.addResource(new Path(hdfsSiteConfig.getPath()));

        // Kerberos
        System.setProperty("java.security.krb5.conf", krb5Conf.getPath());
        config.set("kerberos.principal", keytabPrincipal);
        config.set("keytab.file", keytabFile.getPath());

        UserGroupInformation.setConfiguration(config);
        try {
            UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabFile.getPath());
            logger.info("UserGroupInformation LoginUser: {}", UserGroupInformation.getLoginUser().getUserName());
            return true;
        } catch (IOException e) {
            logger.error(e.getMessage(), e);
        }
        return false;
    }

    public static void reLoginKerberos() throws IOException {
        UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
    }

    public static void reset(){
        UserGroupInformation.setLoginUser(null);
        HadoopKerberosName.setRules(null);
    }

}

Log in to hive without authentication

public String createHiveTable(String[] fields, String dbUrl, DbSchema schema, String tableName, String location, String delimiter){
try {
//Load Hive JDBC driver
Class.forName("org.apache.hive.jdbc.HiveDriver");

Connection connection = DriverManager.getConnection(dbUrl, "", "");
//Create a connection to Hive
//Connection connection = DriverManager.getConnection(dbUrl);
System.out.println("Connected to Hive");

//Create a declaration object for executing SQL statements
Statement stmt = connection.createStatement();

// Check if the table exists
String checkTableSql = "SHOW TABLES IN " + schema.getSchemaName() + " LIKE '" + tableName + "'";
ResultSet resultSet = stmt.executeQuery(checkTableSql);
boolean tableExists = resultSet.next();

if (!tableExists) {
String sql = "";
for(int i = 0; i < fields.length;i + + ){
if(!StringUtils.isEmpty(fields[i])){
if(i != fields.length - 1){
sql + = " `" + fields[i] + "` string, \
";
}else {
sql + = " `" + fields[i] + "` string";
}
}
}
String locate = "";
if(!StringUtils.isEmpty(location)){
locate = "LOCATION \
" +
"'" + location + "'";
}
// If the table does not exist, create the table
String createTableSql = "CREATE TABLE `" + schema.getSchemaName() + "`.`" + tableName + "`(" +
sql +
")\
" +
"PARTITIONED BY ( \
" +
" `day` string) \
" +
"ROW FORMAT SERDE \
" +
" 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' \
" +
"WITH SERDEPROPERTIES ( \
" +
" 'field.delim'='" + delimiter + "', \
" +
" 'serialization.format'='" + delimiter + "', \
" +
" 'serialization.null.format'='')" +
"STORED AS INPUTFORMAT \
" +
" 'org.apache.hadoop.mapred.TextInputFormat' \
" +
"OUTPUTFORMAT \
" +
" 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\
" +
locate;
System.out.println(createTableSql);
// Modify to the table structure you want to create
stmt.executeUpdate(createTableSql);
System.out.println("Table created successfully");

//Create partition
// Get the current date
LocalDate currentDate = LocalDate.now();
//Create a DateTimeFormatter object to format the date into a string of days
DateTimeFormatter dayFormatter = DateTimeFormatter.ofPattern("yyyyMMdd");
// Get the date range of the last week
LocalDate startDate = currentDate.minus(Period.ofDays(1));
LocalDate endDate = currentDate.plus(Period.ofDays(7));

// Loop through the dates of the last week and output the day string
for (LocalDate date = startDate; date.isBefore(endDate.plusDays(1)); date = date.plusDays(1)) {
String dayString = date.format(dayFormatter);
String createPartitionSql = "ALTER TABLE `" + schema.getSchemaName() + "`.`" + tableName + "` ADD if not exists PARTITION(day='" + dayString + "')";
stmt.executeUpdate(createPartitionSql);
}

} else {
System.out.println(tableName + " already exists");
return "tableExist";
}

// Close the connection and declare the object
resultSet.close();
stmt.close();
connection.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.out.println("HiveDriver not found");
return "failed";
} catch (SQLException e) {
e.printStackTrace();
return "failed";
}

return "success";
}

Authentication login hive

public String createHiveTable(String[] fields, String dbUrl, DbSchema schema, String tableName, String location, String delimiter){
File coreSite = new File("/dataload/collect/core-site.xml");
File hdfsSite = new File("/dataload/collect/hdfs-site.xml");
File keytab = new File("/dataload/collect/xxx.keytab");
File krb5 = new File("/dataload/collect/krb5.conf");
Configuration conf = new Configuration();
boolean result = KerberosUtil.loginKerberos(conf, "[email protected]", keytab, krb5, coreSite, hdfsSite);
System.out.println("kerberos auth result: " + result);

try {
//Load Hive JDBC driver
Class.forName("org.apache.hive.jdbc.HiveDriver");

Connection connection = DriverManager.getConnection(dbUrl, "", "");
//Create a connection to Hive
//Connection connection = DriverManager.getConnection(dbUrl);
System.out.println("Connected to Hive");

//Create a declaration object for executing SQL statements
Statement stmt = connection.createStatement();

// Check if the table exists
String checkTableSql = "SHOW TABLES IN " + schema.getSchemaName() + " LIKE '" + tableName + "'";
ResultSet resultSet = stmt.executeQuery(checkTableSql);
boolean tableExists = resultSet.next();

if (!tableExists) {
String sql = "";
for(int i = 0; i < fields.length;i + + ){
if(!StringUtils.isEmpty(fields[i])){
if(i != fields.length - 1){
sql + = " `" + fields[i] + "` string, \
";
}else {
sql + = " `" + fields[i] + "` string";
}
}
}
String locate = "";
if(!StringUtils.isEmpty(location)){
locate = "LOCATION \
" +
"'" + location + "'";
}
// If the table does not exist, create the table
String createTableSql = "CREATE TABLE `" + schema.getSchemaName() + "`.`" + tableName + "`(" +
sql+
")\
" +
"PARTITIONED BY ( \
" +
" `day` string) \
" +
"ROW FORMAT SERDE \
" +
" 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' \
" +
"WITH SERDEPROPERTIES ( \
" +
" 'field.delim'='" + delimiter + "', \
" +
" 'serialization.format'='" + delimiter + "', \
" +
" 'serialization.null.format'='')" +
"STORED AS INPUTFORMAT \
" +
" 'org.apache.hadoop.mapred.TextInputFormat' \
" +
"OUTPUTFORMAT \
" +
" 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\
" +
locate;
System.out.println(createTableSql);
// Modify to the table structure you want to create
stmt.executeUpdate(createTableSql);
System.out.println("Table created successfully");

//Create partition
// Get the current date
LocalDate currentDate = LocalDate.now();
//Create a DateTimeFormatter object to format the date into a string of days
DateTimeFormatter dayFormatter = DateTimeFormatter.ofPattern("yyyyMMdd");
// Get the date range of the last week
LocalDate startDate = currentDate.minus(Period.ofDays(1));
LocalDate endDate = currentDate.plus(Period.ofDays(7));

// Loop through the dates of the last week and output the day string
for (LocalDate date = startDate; date.isBefore(endDate.plusDays(1)); date = date.plusDays(1)) {
String dayString = date.format(dayFormatter);
String createPartitionSql = "ALTER TABLE `" + schema.getSchemaName() + "`.`" + tableName + "` ADD if not exists PARTITION(day='" + dayString + "')";
stmt.executeUpdate(createPartitionSql);
}

} else {
System.out.println(tableName + " already exists");
return "tableExist";
}

// Close the connection and declare the object
resultSet.close();
stmt.close();
connection.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.out.println("HiveDriver not found");
return "failed";
} catch (SQLException e) {
e.printStackTrace();
return "failed";
}

return "success";
}