3737import org .apache .phoenix .query .ConfigurationFactory ;
3838import org .apache .phoenix .util .InstanceResolver ;
3939import org .apache .phoenix .util .PhoenixRuntime ;
40+ import org .slf4j .Logger ;
41+ import org .slf4j .LoggerFactory ;
4042
4143import java .io .File ;
4244import java .io .IOException ;
4648import java .util .ArrayList ;
4749import java .util .List ;
4850import java .util .Map ;
51+ import java .util .Properties ;
4952import java .util .UUID ;
5053
5154import static org .apache .hadoop .hbase .HConstants .HBASE_DIR ;
@@ -80,7 +83,7 @@ public class PhoenixEnvironment {
8083 static {
8184 try {
8285 // uncomment it for debugging purposes
83- // System.setProperty("sun.security.krb5.debug", "true");
86+ System .setProperty ("sun.security.krb5.debug" , "true" );
8487 LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME = InetAddress .getByName ("127.0.0.1" ).getCanonicalHostName ();
8588 String userName = System .getProperty ("user.name" );
8689 LOGIN_USER = userName != null ? userName : "securecluster" ;
@@ -100,6 +103,7 @@ public class PhoenixEnvironment {
100103 private int numCreatedUsers ;
101104
102105 private final String phoenixUrl ;
106+ private static final Logger logger = LoggerFactory .getLogger (PhoenixEnvironment .class );
103107
104108 private static Configuration conf () {
105109 Configuration configuration = HBaseConfiguration .create ();
@@ -206,49 +210,97 @@ public PhoenixEnvironment(final Configuration confIn, int numberOfUsers, boolean
206210 ensureIsEmptyDirectory (tempDir );
207211 ensureIsEmptyDirectory (keytabDir );
208212 keytab = new File (keytabDir , "test.keytab" );
213+
209214 // Start a MiniKDC
210- kdc = util .setupMiniKdc (keytab );
211- // Create a service principal and spnego principal in one keytab
212- // NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
213- // use separate identies for HBase and HDFS results in a GSS initiate error. The quick
214- // solution is to just use a single "service" principal instead of "hbase" and "hdfs"
215- // (or "dn" and "nn") per usual.
216- kdc .createPrincipal (keytab , SPNEGO_PRINCIPAL , PQS_PRINCIPAL , SERVICE_PRINCIPAL );
217- // Start ZK by hand
215+ File kdcWorkDir = new File (new File (getTempDir ()), "kdc-" + System .currentTimeMillis ());
216+ ensureIsEmptyDirectory (kdcWorkDir );
217+
218+ Properties kdcConf = org .apache .hadoop .minikdc .MiniKdc .createConf ();
219+ kdcConf .setProperty (org .apache .hadoop .minikdc .MiniKdc .KDC_BIND_ADDRESS , "127.0.0.1" );
220+ kdcConf .setProperty ("kdc.tcp.port" , "0" );
221+ kdcConf .setProperty ("kdc.allow_udp" , "false" );
222+ kdcConf .setProperty ("kdc.encryption.types" , "aes128-cts-hmac-sha1-96" );
223+ kdcConf .setProperty ("kdc.fast.enabled" , "false" );
224+ kdcConf .setProperty ("kdc.preauth.required" , "true" );
225+ kdcConf .setProperty ("kdc.allowable.clockskew" , "300000" ); // 5m
226+ kdcConf .setProperty (org .apache .hadoop .minikdc .MiniKdc .DEBUG , "true" );
227+
228+ kdc = new org .apache .hadoop .minikdc .MiniKdc (kdcConf , kdcWorkDir );
229+ kdc .start ();
230+
231+ // Write krb5.conf that disables referrals/canonicalization
232+ File krb5File = new File (kdcWorkDir , "krb5.conf" );
233+ writeKrb5Conf (krb5File .toPath (), kdc .getRealm (), "127.0.0.1" , kdc .getPort ());
234+ System .setProperty ("java.security.krb5.conf" , krb5File .getAbsolutePath ());
235+ System .setProperty ("sun.security.krb5.allowUdp" , "false" );
236+ System .setProperty ("sun.security.krb5.disableReferrals" , "true" );
237+ System .setProperty ("java.net.preferIPv4Stack" , "true" );
238+ System .setProperty ("sun.security.krb5.debug" , "true" );
239+ System .clearProperty ("java.security.krb5.realm" ); // avoid env overrides
240+ System .clearProperty ("java.security.krb5.kdc" );
241+
242+ // Fresh keytab every run; create principals in one shot
243+ if (keytab .exists () && !keytab .delete ()) {
244+ throw new IOException ("Couldn't delete old keytab: " + keytab );
245+ }
246+ keytab .getParentFile ().mkdirs ();
247+
248+ // Use a conventional service principal to avoid canonicalization surprises
249+ final String SERVICE_PRINCIPAL_LOCAL = "hbase/localhost" ;
250+ final String SPNEGO_PRINCIPAL_LOCAL = "HTTP/localhost" ;
251+ final String PQS_PRINCIPAL_LOCAL = "phoenixqs/localhost" ;
252+
253+ kdc .createPrincipal (
254+ keytab ,
255+ SPNEGO_PRINCIPAL_LOCAL ,
256+ PQS_PRINCIPAL_LOCAL ,
257+ SERVICE_PRINCIPAL_LOCAL
258+ );
259+ // --- End explicit MiniKDC setup ---
260+
261+ // Start ZK by hand
218262 util .startMiniZKCluster ();
219263
220264 // Create a number of unprivileged users
221265 createUsers (numberOfUsers );
222266
223- // Set configuration for HBase
224- HBaseKerberosUtils .setPrincipalForTesting (SERVICE_PRINCIPAL + "@" + kdc .getRealm ());
267+ // HBase ↔ Kerberos wiring: set creds BEFORE setSecuredConfiguration
268+ final String servicePrincipal = "hbase/localhost@" + kdc .getRealm ();
269+
270+ conf .set ("hadoop.security.authentication" , "kerberos" );
271+ conf .set ("hbase.security.authentication" , "kerberos" );
272+
273+ conf .set ("hbase.master.keytab.file" , keytab .getAbsolutePath ());
274+ conf .set ("hbase.regionserver.keytab.file" , keytab .getAbsolutePath ());
275+ conf .set ("hbase.master.kerberos.principal" , servicePrincipal );
276+ conf .set ("hbase.regionserver.kerberos.principal" , servicePrincipal );
277+
278+ // Make HBase copy its secured defaults *after* we have principals/keytab in conf
279+ HBaseKerberosUtils .setPrincipalForTesting (servicePrincipal );
280+ HBaseKerberosUtils .setKeytabFileForTesting (keytab .getAbsolutePath ());
225281 HBaseKerberosUtils .setSecuredConfiguration (conf );
282+
283+ // HDFS side
226284 setHdfsSecuredConfiguration (conf );
285+
286+ // UGI must see kerberos
287+ UserGroupInformation .setConfiguration (conf );
288+
289+ // Preflight: prove the keytab/KDC works *before* we start HBase
290+ UserGroupInformation .loginUserFromKeytab (servicePrincipal , keytab .getAbsolutePath ());
291+ logger .info ("UGI login OK for {}" , servicePrincipal );
292+
227293 UserGroupInformation .setConfiguration (conf );
294+
228295 conf .setInt (HConstants .MASTER_PORT , 0 );
229296 conf .setInt (HConstants .MASTER_INFO_PORT , 0 );
230297 conf .setInt (HConstants .REGIONSERVER_PORT , 0 );
231298 conf .setInt (HConstants .REGIONSERVER_INFO_PORT , 0 );
232299
233- conf .set ("hadoop.security.authentication" , "kerberos" );
234- conf .set ("hbase.security.authentication" , "kerberos" );
235-
236- File serviceKeytab = getServiceKeytab (); // already generated by your MiniKdc setup
237- String servicePrincipal = HBaseKerberosUtils .getPrincipalForTesting ();
238- if (serviceKeytab == null || servicePrincipal == null ) {
239- throw new IllegalStateException ("MiniKdc did not provide service keytab/principal" );
240- }
241-
242- conf .set ("hbase.master.keytab.file" , serviceKeytab .getAbsolutePath ());
243- conf .set ("hbase.regionserver.keytab.file" , serviceKeytab .getAbsolutePath ());
244- conf .set ("hbase.master.kerberos.principal" , servicePrincipal );
245- conf .set ("hbase.regionserver.kerberos.principal" , servicePrincipal );
246-
247300 // Coprocessors, proxy user configs, etc. (whatever you already have)
248301 conf .setStrings (CoprocessorHost .MASTER_COPROCESSOR_CONF_KEY , AccessController .class .getName ());
249302 conf .setStrings (CoprocessorHost .REGIONSERVER_COPROCESSOR_CONF_KEY , AccessController .class .getName ());
250- conf .setStrings (CoprocessorHost .REGION_COPROCESSOR_CONF_KEY ,
251- AccessController .class .getName (), TokenProvider .class .getName ());
303+ conf .setStrings (CoprocessorHost .REGION_COPROCESSOR_CONF_KEY , AccessController .class .getName (), TokenProvider .class .getName ());
252304
253305 // Clear the cached singletons so we can inject our own.
254306 InstanceResolver .clearSingletons ();
@@ -280,10 +332,47 @@ public Configuration getConfiguration(Configuration confToClone) {
280332 phoenixUrl = PhoenixRuntime .JDBC_PROTOCOL + ":localhost:" + getZookeeperPort ();
281333 }
282334
335+ private static void writeKrb5Conf (java .nio .file .Path path , String realm , String host , int port ) throws Exception {
336+ String cfg =
337+ "[libdefaults]\n " +
338+ " default_realm = " + realm + "\n " +
339+ " dns_lookup_kdc = false\n " +
340+ " dns_lookup_realm = false\n " +
341+ " dns_canonicalize_hostname = false\n " +
342+ " rdns = false\n " +
343+ " udp_preference_limit = 1\n " +
344+ " default_tkt_enctypes = aes128-cts-hmac-sha1-96\n " +
345+ " default_tgs_enctypes = aes128-cts-hmac-sha1-96\n " +
346+ " permitted_enctypes = aes128-cts-hmac-sha1-96\n " +
347+ "\n " +
348+ "[realms]\n " +
349+ " " + realm + " = {\n " +
350+ " kdc = " + host + ":" + port + "\n " +
351+ " admin_server = " + host + ":" + port + "\n " +
352+ " }\n " ;
353+ java .nio .file .Files .createDirectories (path .getParent ());
354+ java .nio .file .Files .write (path , cfg .getBytes (java .nio .charset .StandardCharsets .UTF_8 ));
355+ }
356+
357+
283358 public int getZookeeperPort () {
284359 return util .getConfiguration ().getInt (HConstants .ZOOKEEPER_CLIENT_PORT , 2181 );
285360 }
286361
362+ private static void createPrincipalIfAbsent (MiniKdc kdc , File keytab , String principal ) throws Exception {
363+ try {
364+ kdc .createPrincipal (keytab , principal );
365+ } catch (org .apache .kerby .kerberos .kerb .KrbException e ) {
366+ String msg = e .getMessage ();
367+ if (msg != null && msg .contains ("already exists" )) {
368+ // Principal is already in the KDC; fine to proceed.
369+ // (Keys were generated when it was first created.)
370+ return ;
371+ }
372+ throw e ;
373+ }
374+ }
375+
287376 public void stop () throws Exception {
288377 // Remove our custom ConfigurationFactory for future tests
289378 InstanceResolver .clearSingletons ();
0 commit comments