001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *     http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    package org.apache.hadoop.hdfs.tools;
019    
020    import java.io.IOException;
021    import java.io.PrintStream;
022    import java.net.InetSocketAddress;
023    import java.security.PrivilegedExceptionAction;
024    import java.util.Arrays;
025    import java.util.HashMap;
026    import java.util.List;
027    import java.util.Map;
028    
029    import org.apache.hadoop.HadoopIllegalArgumentException;
030    import org.apache.hadoop.conf.Configuration;
031    import org.apache.hadoop.conf.Configured;
032    import org.apache.hadoop.hdfs.DFSUtil;
033    import org.apache.hadoop.hdfs.HdfsConfiguration;
034    import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
035    import org.apache.hadoop.security.UserGroupInformation;
036    import org.apache.hadoop.util.Tool;
037    import org.apache.hadoop.util.ToolRunner;
038    
039    /**
040     * Tool for getting configuration information from a configuration file.
041     * 
042     * Adding more options:
043     * <ul>
044     * <li>
045     * If adding a simple option to get a value corresponding to a key in the 
046     * configuration, use regular {@link GetConf.CommandHandler}. 
047     * See {@link GetConf.Command#EXCLUDE_FILE} example.
048     * </li>
049     * <li>
050     * If adding an option that is does not return a value for a key, add
051     * a subclass of {@link GetConf.CommandHandler} and set it up in 
052     * {@link GetConf.Command}.
053     * 
054     * See {@link GetConf.Command#NAMENODE} for example.
055     * 
056     * Add for the new option added, a map entry with the corresponding
057     * {@link GetConf.CommandHandler}.
058     * </ul>
059     */
060    public class GetConf extends Configured implements Tool {
061      private static final String DESCRIPTION = "hdfs getconf is utility for "
062          + "getting configuration information from the config file.\n";
063    
064      enum Command {
065        NAMENODE("-namenodes", "gets list of namenodes in the cluster."),
066        SECONDARY("-secondaryNameNodes", 
067            "gets list of secondary namenodes in the cluster."),
068        BACKUP("-backupNodes", "gets list of backup nodes in the cluster."),
069        INCLUDE_FILE("-includeFile",
070            "gets the include file path that defines the datanodes " +
071            "that can join the cluster."),
072        EXCLUDE_FILE("-excludeFile",
073            "gets the exclude file path that defines the datanodes " +
074            "that need to decommissioned."),
075        NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"),
076        CONFKEY("-confKey [key]", "gets a specific key from the configuration");
077    
078        private static Map<String, CommandHandler> map;
079        static  {
080          map = new HashMap<String, CommandHandler>();
081          map.put(NAMENODE.getName().toLowerCase(), 
082              new NameNodesCommandHandler());
083          map.put(SECONDARY.getName().toLowerCase(),
084              new SecondaryNameNodesCommandHandler());
085          map.put(BACKUP.getName().toLowerCase(), 
086              new BackupNodesCommandHandler());
087          map.put(INCLUDE_FILE.getName().toLowerCase(), 
088              new CommandHandler("DFSConfigKeys.DFS_HOSTS"));
089          map.put(EXCLUDE_FILE.getName().toLowerCase(),
090              new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE"));
091          map.put(NNRPCADDRESSES.getName().toLowerCase(),
092              new NNRpcAddressesCommandHandler());
093          map.put(CONFKEY.getName().toLowerCase(),
094              new PrintConfKeyCommandHandler());
095        }
096        
097        private final String cmd;
098        private final String description;
099    
100        Command(String cmd, String description) {
101          this.cmd = cmd;
102          this.description = description;
103        }
104    
105        public String getName() {
106          return cmd.split(" ")[0];
107        }
108        
109        public String getUsage() {
110          return cmd;
111        }
112        
113        public String getDescription() {
114          return description;
115        }
116        
117        public static CommandHandler getHandler(String cmd) {
118          return map.get(cmd.toLowerCase());
119        }
120      }
121      
122      static final String USAGE;
123      static {
124        HdfsConfiguration.init();
125        
126        /* Initialize USAGE based on Command values */
127        StringBuilder usage = new StringBuilder(DESCRIPTION);
128        usage.append("\nhadoop getconf \n");
129        for (Command cmd : Command.values()) {
130          usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription()
131              + "\n");
132        }
133        USAGE = usage.toString();
134      }
135      
136      /** 
137       * Handler to return value for key corresponding to the {@link Command}
138       */
139      static class CommandHandler {
140        String key; // Configuration key to lookup
141        
142        CommandHandler() {
143          this(null);
144        }
145        
146        CommandHandler(String key) {
147          this.key = key;
148        }
149    
150        final int doWork(GetConf tool, String[] args) {
151          try {
152            checkArgs(args);
153    
154            return doWorkInternal(tool, args);
155          } catch (Exception e) {
156            tool.printError(e.getMessage());
157          }
158          return -1;
159        }
160    
161        protected void checkArgs(String args[]) {
162          if (args.length > 0) {
163            throw new HadoopIllegalArgumentException(
164                "Did not expect argument: " + args[0]);
165          }
166        }
167    
168        
169        /** Method to be overridden by sub classes for specific behavior 
170         * @param args */
171        int doWorkInternal(GetConf tool, String[] args) throws Exception {
172    
173          String value = tool.getConf().getTrimmed(key);
174          if (value != null) {
175            tool.printOut(value);
176            return 0;
177          }
178          tool.printError("Configuration " + key + " is missing.");
179          return -1;
180        }
181      }
182      
183      /**
184       * Handler for {@link Command#NAMENODE}
185       */
186      static class NameNodesCommandHandler extends CommandHandler {
187        @Override
188        int doWorkInternal(GetConf tool, String []args) throws IOException {
189          tool.printMap(DFSUtil.getNNServiceRpcAddresses(tool.getConf()));
190          return 0;
191        }
192      }
193      
194      /**
195       * Handler for {@link Command#BACKUP}
196       */
197      static class BackupNodesCommandHandler extends CommandHandler {
198        @Override
199        public int doWorkInternal(GetConf tool, String []args) throws IOException {
200          tool.printMap(DFSUtil.getBackupNodeAddresses(tool.getConf()));
201          return 0;
202        }
203      }
204      
205      /**
206       * Handler for {@link Command#SECONDARY}
207       */
208      static class SecondaryNameNodesCommandHandler extends CommandHandler {
209        @Override
210        public int doWorkInternal(GetConf tool, String []args) throws IOException {
211          tool.printMap(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf()));
212          return 0;
213        }
214      }
215      
216      /**
217       * Handler for {@link Command#NNRPCADDRESSES}
218       * If rpc addresses are defined in configuration, we return them. Otherwise, 
219       * return empty string.
220       */
221      static class NNRpcAddressesCommandHandler extends CommandHandler {
222        @Override
223        public int doWorkInternal(GetConf tool, String []args) throws IOException {
224          Configuration config = tool.getConf();
225          List<ConfiguredNNAddress> cnnlist = DFSUtil.flattenAddressMap(
226              DFSUtil.getNNServiceRpcAddresses(config));
227          if (!cnnlist.isEmpty()) {
228            for (ConfiguredNNAddress cnn : cnnlist) {
229              InetSocketAddress rpc = cnn.getAddress();
230              tool.printOut(rpc.getHostName()+":"+rpc.getPort());
231            }
232            return 0;
233          }
234          tool.printError("Did not get namenode service rpc addresses.");
235          return -1;
236        }
237      }
238      
239      static class PrintConfKeyCommandHandler extends CommandHandler {
240        @Override
241        protected void checkArgs(String[] args) {
242          if (args.length != 1) {
243            throw new HadoopIllegalArgumentException(
244                "usage: " + Command.CONFKEY.getUsage());
245          }
246        }
247    
248        @Override
249        int doWorkInternal(GetConf tool, String[] args) throws Exception {
250          this.key = args[0];
251          return super.doWorkInternal(tool, args);
252        }
253      }
254      
255      private final PrintStream out; // Stream for printing command output
256      private final PrintStream err; // Stream for printing error
257    
258      GetConf(Configuration conf) {
259        this(conf, System.out, System.err);
260      }
261    
262      GetConf(Configuration conf, PrintStream out, PrintStream err) {
263        super(conf);
264        this.out = out;
265        this.err = err;
266      }
267    
268      void printError(String message) {
269        err.println(message);
270      }
271    
272      void printOut(String message) {
273        out.println(message);
274      }
275      
276      void printMap(Map<String, Map<String, InetSocketAddress>> map) {
277        StringBuilder buffer = new StringBuilder();
278    
279        List<ConfiguredNNAddress> cnns = DFSUtil.flattenAddressMap(map);
280        for (ConfiguredNNAddress cnn : cnns) {
281          InetSocketAddress address = cnn.getAddress();
282          if (buffer.length() > 0) {
283            buffer.append(" ");
284          }
285          buffer.append(address.getHostName());
286        }
287        printOut(buffer.toString());
288      }
289    
290      private void printUsage() {
291        printError(USAGE);
292      }
293    
294      /**
295       * Main method that runs the tool for given arguments.
296       * @param args arguments
297       * @return return status of the command
298       */
299      private int doWork(String[] args) {
300        if (args.length >= 1) {
301          CommandHandler handler = Command.getHandler(args[0]);
302          if (handler != null) {
303            return handler.doWork(this,
304                Arrays.copyOfRange(args, 1, args.length));
305          }
306        }
307        printUsage();
308        return -1;
309      }
310    
311      @Override
312      public int run(final String[] args) throws Exception {
313        try {
314          return UserGroupInformation.getCurrentUser().doAs(
315              new PrivilegedExceptionAction<Integer>() {
316                @Override
317                public Integer run() throws Exception {
318                  return doWork(args);
319                }
320              });
321        } catch (InterruptedException e) {
322          throw new IOException(e);
323        }
324      }
325    
326      public static void main(String[] args) throws Exception {
327        if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
328          System.exit(0);
329        }
330        
331        int res = ToolRunner.run(new GetConf(new HdfsConfiguration()), args);
332        System.exit(res);
333      }
334    }