001 /** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018 019 package org.apache.hadoop.hdfs; 020 021 import java.io.FileInputStream; 022 import java.io.IOException; 023 import java.net.HttpURLConnection; 024 import java.net.InetSocketAddress; 025 import java.net.URI; 026 import java.net.URL; 027 import java.security.KeyStore; 028 import java.security.cert.X509Certificate; 029 030 import javax.net.ssl.HostnameVerifier; 031 import javax.net.ssl.HttpsURLConnection; 032 import javax.net.ssl.KeyManager; 033 import javax.net.ssl.KeyManagerFactory; 034 import javax.net.ssl.SSLContext; 035 import javax.net.ssl.SSLSession; 036 import javax.net.ssl.TrustManager; 037 import javax.net.ssl.TrustManagerFactory; 038 import javax.net.ssl.X509TrustManager; 039 040 import org.apache.hadoop.classification.InterfaceAudience; 041 import org.apache.hadoop.classification.InterfaceStability; 042 import org.apache.hadoop.conf.Configuration; 043 import org.apache.hadoop.hdfs.web.URLUtils; 044 import org.apache.hadoop.util.Time; 045 046 /** 047 * An implementation of a protocol for accessing filesystems over HTTPS. The 048 * following implementation provides a limited, read-only interface to a 049 * filesystem over HTTPS. 050 * 051 * @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet 052 * @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet 053 */ 054 @InterfaceAudience.Private 055 @InterfaceStability.Evolving 056 public class HsftpFileSystem extends HftpFileSystem { 057 058 private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24; 059 private volatile int ExpWarnDays = 0; 060 061 /** 062 * Return the protocol scheme for the FileSystem. 063 * <p/> 064 * 065 * @return <code>hsftp</code> 066 */ 067 @Override 068 public String getScheme() { 069 return "hsftp"; 070 } 071 072 /** 073 * Return the underlying protocol that is used to talk to the namenode. 074 */ 075 @Override 076 protected String getUnderlyingProtocol() { 077 return "https"; 078 } 079 080 @Override 081 public void initialize(URI name, Configuration conf) throws IOException { 082 super.initialize(name, conf); 083 setupSsl(conf); 084 ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30); 085 } 086 087 /** 088 * Set up SSL resources 089 * 090 * @throws IOException 091 */ 092 private static void setupSsl(Configuration conf) throws IOException { 093 Configuration sslConf = new HdfsConfiguration(false); 094 sslConf.addResource(conf.get(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, 095 DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); 096 FileInputStream fis = null; 097 try { 098 SSLContext sc = SSLContext.getInstance("SSL"); 099 KeyManager[] kms = null; 100 TrustManager[] tms = null; 101 if (sslConf.get("ssl.client.keystore.location") != null) { 102 // initialize default key manager with keystore file and pass 103 KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); 104 KeyStore ks = KeyStore.getInstance(sslConf.get( 105 "ssl.client.keystore.type", "JKS")); 106 char[] ksPass = sslConf.get("ssl.client.keystore.password", "changeit") 107 .toCharArray(); 108 fis = new FileInputStream(sslConf.get("ssl.client.keystore.location", 109 "keystore.jks")); 110 ks.load(fis, ksPass); 111 kmf.init(ks, sslConf.get("ssl.client.keystore.keypassword", "changeit") 112 .toCharArray()); 113 kms = kmf.getKeyManagers(); 114 fis.close(); 115 fis = null; 116 } 117 // initialize default trust manager with truststore file and pass 118 if (sslConf.getBoolean("ssl.client.do.not.authenticate.server", false)) { 119 // by pass trustmanager validation 120 tms = new DummyTrustManager[] { new DummyTrustManager() }; 121 } else { 122 TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX"); 123 KeyStore ts = KeyStore.getInstance(sslConf.get( 124 "ssl.client.truststore.type", "JKS")); 125 char[] tsPass = sslConf.get("ssl.client.truststore.password", 126 "changeit").toCharArray(); 127 fis = new FileInputStream(sslConf.get("ssl.client.truststore.location", 128 "truststore.jks")); 129 ts.load(fis, tsPass); 130 tmf.init(ts); 131 tms = tmf.getTrustManagers(); 132 } 133 sc.init(kms, tms, new java.security.SecureRandom()); 134 HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); 135 } catch (Exception e) { 136 throw new IOException("Could not initialize SSLContext", e); 137 } finally { 138 if (fis != null) { 139 fis.close(); 140 } 141 } 142 } 143 144 @Override 145 protected int getDefaultPort() { 146 return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 147 DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT); 148 } 149 150 @Override 151 protected HttpURLConnection openConnection(String path, String query) 152 throws IOException { 153 query = addDelegationTokenParam(query); 154 final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(), 155 nnUri.getPort(), path + '?' + query); 156 HttpsURLConnection conn = (HttpsURLConnection)URLUtils.openConnection(url); 157 // bypass hostname verification 158 conn.setHostnameVerifier(new DummyHostnameVerifier()); 159 conn.setRequestMethod("GET"); 160 conn.connect(); 161 162 // check cert expiration date 163 final int warnDays = ExpWarnDays; 164 if (warnDays > 0) { // make sure only check once 165 ExpWarnDays = 0; 166 long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now(); 167 X509Certificate[] clientCerts = (X509Certificate[]) conn 168 .getLocalCertificates(); 169 if (clientCerts != null) { 170 for (X509Certificate cert : clientCerts) { 171 long expTime = cert.getNotAfter().getTime(); 172 if (expTime < expTimeThreshold) { 173 StringBuilder sb = new StringBuilder(); 174 sb.append("\n Client certificate " 175 + cert.getSubjectX500Principal().getName()); 176 int dayOffSet = (int) ((expTime - Time.now()) / MM_SECONDS_PER_DAY); 177 sb.append(" have " + dayOffSet + " days to expire"); 178 LOG.warn(sb.toString()); 179 } 180 } 181 } 182 } 183 return (HttpURLConnection) conn; 184 } 185 186 /** 187 * Dummy hostname verifier that is used to bypass hostname checking 188 */ 189 protected static class DummyHostnameVerifier implements HostnameVerifier { 190 @Override 191 public boolean verify(String hostname, SSLSession session) { 192 return true; 193 } 194 } 195 196 /** 197 * Dummy trustmanager that is used to trust all server certificates 198 */ 199 protected static class DummyTrustManager implements X509TrustManager { 200 @Override 201 public void checkClientTrusted(X509Certificate[] chain, String authType) { 202 } 203 204 @Override 205 public void checkServerTrusted(X509Certificate[] chain, String authType) { 206 } 207 208 @Override 209 public X509Certificate[] getAcceptedIssuers() { 210 return null; 211 } 212 } 213 214 }