001 /** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018 package org.apache.hadoop.hdfs.tools; 019 020 import java.io.PrintStream; 021 import java.util.Arrays; 022 023 import org.apache.commons.logging.Log; 024 import org.apache.commons.logging.LogFactory; 025 import org.apache.hadoop.conf.Configuration; 026 import org.apache.hadoop.fs.CommonConfigurationKeys; 027 import org.apache.hadoop.ha.HAAdmin; 028 import org.apache.hadoop.ha.HAServiceTarget; 029 import org.apache.hadoop.hdfs.DFSConfigKeys; 030 import org.apache.hadoop.hdfs.HdfsConfiguration; 031 import org.apache.hadoop.util.ToolRunner; 032 033 /** 034 * Class to extend HAAdmin to do a little bit of HDFS-specific configuration. 035 */ 036 public class DFSHAAdmin extends HAAdmin { 037 038 private static final Log LOG = LogFactory.getLog(DFSHAAdmin.class); 039 040 private String nameserviceId; 041 042 protected void setErrOut(PrintStream errOut) { 043 this.errOut = errOut; 044 } 045 046 @Override 047 public void setConf(Configuration conf) { 048 if (conf != null) { 049 conf = addSecurityConfiguration(conf); 050 } 051 super.setConf(conf); 052 } 053 054 /** 055 * Add the requisite security principal settings to the given Configuration, 056 * returning a copy. 057 * @param conf the original config 058 * @return a copy with the security settings added 059 */ 060 public static Configuration addSecurityConfiguration(Configuration conf) { 061 // Make a copy so we don't mutate it. Also use an HdfsConfiguration to 062 // force loading of hdfs-site.xml. 063 conf = new HdfsConfiguration(conf); 064 String nameNodePrincipal = conf.get( 065 DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""); 066 if (LOG.isDebugEnabled()) { 067 LOG.debug("Using NN principal: " + nameNodePrincipal); 068 } 069 070 conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY, 071 nameNodePrincipal); 072 return conf; 073 } 074 075 /** 076 * Try to map the given namenode ID to its service address. 077 */ 078 @Override 079 protected HAServiceTarget resolveTarget(String nnId) { 080 HdfsConfiguration conf = (HdfsConfiguration)getConf(); 081 return new NNHAServiceTarget(conf, nameserviceId, nnId); 082 } 083 084 @Override 085 protected String getUsageString() { 086 return "Usage: DFSHAAdmin [-ns <nameserviceId>]"; 087 } 088 089 @Override 090 protected int runCmd(String[] argv) throws Exception { 091 if (argv.length < 1) { 092 printUsage(errOut); 093 return -1; 094 } 095 096 int i = 0; 097 String cmd = argv[i++]; 098 099 if ("-ns".equals(cmd)) { 100 if (i == argv.length) { 101 errOut.println("Missing nameservice ID"); 102 printUsage(errOut); 103 return -1; 104 } 105 nameserviceId = argv[i++]; 106 if (i >= argv.length) { 107 errOut.println("Missing command"); 108 printUsage(errOut); 109 return -1; 110 } 111 argv = Arrays.copyOfRange(argv, i, argv.length); 112 } 113 114 return super.runCmd(argv); 115 } 116 117 public static void main(String[] argv) throws Exception { 118 int res = ToolRunner.run(new DFSHAAdmin(), argv); 119 System.exit(res); 120 } 121 }