001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.security.provider.example;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertFalse;
022import static org.junit.Assert.assertNotNull;
023import static org.junit.Assert.assertTrue;
024import static org.junit.Assert.fail;
025
026import java.io.BufferedWriter;
027import java.io.File;
028import java.io.IOException;
029import java.io.OutputStreamWriter;
030import java.nio.charset.StandardCharsets;
031import java.security.PrivilegedExceptionAction;
032import java.util.ArrayList;
033import java.util.Collections;
034import java.util.List;
035import java.util.Map;
036import java.util.Map.Entry;
037import org.apache.hadoop.conf.Configuration;
038import org.apache.hadoop.fs.FSDataOutputStream;
039import org.apache.hadoop.fs.FileSystem;
040import org.apache.hadoop.fs.Path;
041import org.apache.hadoop.hbase.Cell;
042import org.apache.hadoop.hbase.CellUtil;
043import org.apache.hadoop.hbase.HBaseClassTestRule;
044import org.apache.hadoop.hbase.HBaseTestingUtility;
045import org.apache.hadoop.hbase.HConstants;
046import org.apache.hadoop.hbase.LocalHBaseCluster;
047import org.apache.hadoop.hbase.TableName;
048import org.apache.hadoop.hbase.client.Admin;
049import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
050import org.apache.hadoop.hbase.client.Connection;
051import org.apache.hadoop.hbase.client.ConnectionFactory;
052import org.apache.hadoop.hbase.client.Get;
053import org.apache.hadoop.hbase.client.Put;
054import org.apache.hadoop.hbase.client.Result;
055import org.apache.hadoop.hbase.client.RetriesExhaustedException;
056import org.apache.hadoop.hbase.client.Table;
057import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
058import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
059import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
060import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders;
061import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders;
062import org.apache.hadoop.hbase.security.token.TokenProvider;
063import org.apache.hadoop.hbase.testclassification.MediumTests;
064import org.apache.hadoop.hbase.testclassification.SecurityTests;
065import org.apache.hadoop.hbase.util.Bytes;
066import org.apache.hadoop.hbase.util.CommonFSUtils;
067import org.apache.hadoop.hbase.util.Pair;
068import org.apache.hadoop.minikdc.MiniKdc;
069import org.apache.hadoop.security.UserGroupInformation;
070import org.junit.AfterClass;
071import org.junit.Before;
072import org.junit.BeforeClass;
073import org.junit.ClassRule;
074import org.junit.Rule;
075import org.junit.Test;
076import org.junit.experimental.categories.Category;
077import org.junit.rules.TestName;
078import org.slf4j.Logger;
079import org.slf4j.LoggerFactory;
080
081@Category({ MediumTests.class, SecurityTests.class })
082public class TestShadeSaslAuthenticationProvider {
083  private static final Logger LOG =
084    LoggerFactory.getLogger(TestShadeSaslAuthenticationProvider.class);
085
086  @ClassRule
087  public static final HBaseClassTestRule CLASS_RULE =
088    HBaseClassTestRule.forClass(TestShadeSaslAuthenticationProvider.class);
089
090  private static final char[] USER1_PASSWORD = "foobarbaz".toCharArray();
091
092  static LocalHBaseCluster createCluster(HBaseTestingUtility util, File keytabFile, MiniKdc kdc,
093    Map<String, char[]> userDatabase) throws Exception {
094    String servicePrincipal = "hbase/localhost";
095    String spnegoPrincipal = "HTTP/localhost";
096    kdc.createPrincipal(keytabFile, servicePrincipal);
097    util.startMiniZKCluster();
098
099    HBaseKerberosUtils.setSecuredConfiguration(util.getConfiguration(),
100      servicePrincipal + "@" + kdc.getRealm(), spnegoPrincipal + "@" + kdc.getRealm());
101    HBaseKerberosUtils.setSSLConfiguration(util, TestShadeSaslAuthenticationProvider.class);
102
103    util.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
104      TokenProvider.class.getName());
105    util.startMiniDFSCluster(1);
106    Path testDir = util.getDataTestDirOnTestFS("TestShadeSaslAuthenticationProvider");
107    USER_DATABASE_FILE = new Path(testDir, "user-db.txt");
108
109    createUserDBFile(USER_DATABASE_FILE.getFileSystem(CONF), USER_DATABASE_FILE, userDatabase);
110    CONF.set(ShadeSaslServerAuthenticationProvider.PASSWORD_FILE_KEY,
111      USER_DATABASE_FILE.toString());
112
113    Path rootdir = new Path(testDir, "hbase-root");
114    CommonFSUtils.setRootDir(CONF, rootdir);
115    LocalHBaseCluster cluster = new LocalHBaseCluster(CONF, 1);
116    return cluster;
117  }
118
119  static void createUserDBFile(FileSystem fs, Path p, Map<String, char[]> userDatabase)
120    throws IOException {
121    if (fs.exists(p)) {
122      fs.delete(p, true);
123    }
124    try (FSDataOutputStream out = fs.create(p); BufferedWriter writer =
125      new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) {
126      for (Entry<String, char[]> e : userDatabase.entrySet()) {
127        writer.write(e.getKey());
128        writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR);
129        writer.write(e.getValue());
130        writer.newLine();
131      }
132    }
133  }
134
135  private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
136  private static final Configuration CONF = UTIL.getConfiguration();
137  private static LocalHBaseCluster CLUSTER;
138  private static File KEYTAB_FILE;
139  private static Path USER_DATABASE_FILE;
140
141  @BeforeClass
142  public static void setupCluster() throws Exception {
143    KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
144    final MiniKdc kdc = UTIL.setupMiniKdc(KEYTAB_FILE);
145
146    // Adds our test impls instead of creating service loader entries which
147    // might inadvertently get them loaded on a real cluster.
148    CONF.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY,
149      ShadeSaslClientAuthenticationProvider.class.getName());
150    CONF.setStrings(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY,
151      ShadeSaslServerAuthenticationProvider.class.getName());
152    CONF.set(SaslClientAuthenticationProviders.SELECTOR_KEY, ShadeProviderSelector.class.getName());
153
154    CLUSTER =
155      createCluster(UTIL, KEYTAB_FILE, kdc, Collections.singletonMap("user1", USER1_PASSWORD));
156    CLUSTER.startup();
157  }
158
159  @AfterClass
160  public static void teardownCluster() throws Exception {
161    if (CLUSTER != null) {
162      CLUSTER.shutdown();
163      CLUSTER = null;
164    }
165    UTIL.shutdownMiniZKCluster();
166  }
167
168  @Rule
169  public TestName name = new TestName();
170  TableName tableName;
171  String clusterId;
172
173  @Before
174  public void createTable() throws Exception {
175    tableName = TableName.valueOf(name.getMethodName());
176
177    // Create a table and write a record as the service user (hbase)
178    UserGroupInformation serviceUgi = UserGroupInformation
179      .loginUserFromKeytabAndReturnUGI("hbase/localhost", KEYTAB_FILE.getAbsolutePath());
180    clusterId = serviceUgi.doAs(new PrivilegedExceptionAction<String>() {
181      @Override
182      public String run() throws Exception {
183        try (Connection conn = ConnectionFactory.createConnection(CONF);
184          Admin admin = conn.getAdmin();) {
185          admin.createTable(TableDescriptorBuilder.newBuilder(tableName)
186            .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build());
187
188          UTIL.waitTableAvailable(tableName);
189
190          try (Table t = conn.getTable(tableName)) {
191            Put p = new Put(Bytes.toBytes("r1"));
192            p.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("q1"), Bytes.toBytes("1"));
193            t.put(p);
194          }
195
196          return admin.getClusterMetrics().getClusterId();
197        }
198      }
199    });
200
201    assertNotNull(clusterId);
202  }
203
204  @Test
205  public void testPositiveAuthentication() throws Exception {
206    final Configuration clientConf = new Configuration(CONF);
207    try (Connection conn1 = ConnectionFactory.createConnection(clientConf)) {
208      UserGroupInformation user1 =
209        UserGroupInformation.createUserForTesting("user1", new String[0]);
210      user1.addToken(ShadeClientTokenUtil.obtainToken(conn1, "user1", USER1_PASSWORD));
211      user1.doAs(new PrivilegedExceptionAction<Void>() {
212        @Override
213        public Void run() throws Exception {
214          try (Connection conn = ConnectionFactory.createConnection(clientConf)) {
215            try (Table t = conn.getTable(tableName)) {
216              Result r = t.get(new Get(Bytes.toBytes("r1")));
217              assertNotNull(r);
218              assertFalse("Should have read a non-empty Result", r.isEmpty());
219              final Cell cell = r.getColumnLatestCell(Bytes.toBytes("f1"), Bytes.toBytes("q1"));
220              assertTrue("Unexpected value", CellUtil.matchingValue(cell, Bytes.toBytes("1")));
221
222              return null;
223            }
224          }
225        }
226      });
227    }
228  }
229
230  @Test
231  public void testNegativeAuthentication() throws Exception {
232    List<Pair<String, Class<? extends Exception>>> params = new ArrayList<>();
233    // ZK based connection will fail on the master RPC
234    params.add(new Pair<String, Class<? extends Exception>>(
235      // ZKConnectionRegistry is package-private
236      HConstants.ZK_CONNECTION_REGISTRY_CLASS, RetriesExhaustedException.class));
237
238    params.forEach((pair) -> {
239      LOG.info("Running negative authentication test for client registry {}, expecting {}",
240        pair.getFirst(), pair.getSecond().getName());
241      // Validate that we can read that record back out as the user with our custom auth'n
242      final Configuration clientConf = new Configuration(CONF);
243      clientConf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3);
244      clientConf.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, pair.getFirst());
245      try (Connection conn = ConnectionFactory.createConnection(clientConf)) {
246        UserGroupInformation user1 =
247          UserGroupInformation.createUserForTesting("user1", new String[0]);
248        user1.addToken(
249          ShadeClientTokenUtil.obtainToken(conn, "user1", "not a real password".toCharArray()));
250
251        LOG.info("Executing request to HBase Master which should fail");
252        user1.doAs(new PrivilegedExceptionAction<Void>() {
253          @Override
254          public Void run() throws Exception {
255            try (Connection conn = ConnectionFactory.createConnection(clientConf);) {
256              conn.getAdmin().listTableDescriptors();
257              fail("Should not successfully authenticate with HBase");
258            } catch (Exception e) {
259              LOG.info("Caught exception in negative Master connectivity test", e);
260              assertEquals("Found unexpected exception", pair.getSecond(), e.getClass());
261            }
262            return null;
263          }
264        });
265
266        LOG.info("Executing request to HBase RegionServer which should fail");
267        user1.doAs(new PrivilegedExceptionAction<Void>() {
268          @Override
269          public Void run() throws Exception {
270            try (Connection conn = ConnectionFactory.createConnection(clientConf);
271              Table t = conn.getTable(tableName)) {
272              t.get(new Get(Bytes.toBytes("r1")));
273              fail("Should not successfully authenticate with HBase");
274            } catch (Exception e) {
275              LOG.info("Caught exception in negative RegionServer connectivity test", e);
276              assertEquals("Found unexpected exception", pair.getSecond(), e.getClass());
277            }
278            return null;
279          }
280        });
281      } catch (InterruptedException e) {
282        LOG.error("Caught interrupted exception", e);
283        Thread.currentThread().interrupt();
284        return;
285      } catch (IOException e) {
286        throw new RuntimeException(e);
287      }
288    });
289  }
290}