001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *     http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    package org.apache.hadoop.hdfs.tools;
019    
020    import java.io.IOException;
021    import java.io.PrintStream;
022    import java.net.InetSocketAddress;
023    import java.security.PrivilegedExceptionAction;
024    import java.util.Arrays;
025    import java.util.HashMap;
026    import java.util.List;
027    import java.util.Map;
028    
029    import org.apache.hadoop.HadoopIllegalArgumentException;
030    import org.apache.hadoop.conf.Configuration;
031    import org.apache.hadoop.conf.Configured;
032    import org.apache.hadoop.hdfs.DFSConfigKeys;
033    import org.apache.hadoop.hdfs.DFSUtil;
034    import org.apache.hadoop.hdfs.HdfsConfiguration;
035    import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
036    import org.apache.hadoop.security.UserGroupInformation;
037    import org.apache.hadoop.util.Tool;
038    import org.apache.hadoop.util.ToolRunner;
039    
040    /**
041     * Tool for getting configuration information from a configuration file.
042     * 
043     * Adding more options:
044     * <ul>
045     * <li>
046     * If adding a simple option to get a value corresponding to a key in the 
047     * configuration, use regular {@link GetConf.CommandHandler}. 
048     * See {@link GetConf.Command#EXCLUDE_FILE} example.
049     * </li>
050     * <li>
051     * If adding an option that is does not return a value for a key, add
052     * a subclass of {@link GetConf.CommandHandler} and set it up in 
053     * {@link GetConf.Command}.
054     * 
055     * See {@link GetConf.Command#NAMENODE} for example.
056     * 
057     * Add for the new option added, a map entry with the corresponding
058     * {@link GetConf.CommandHandler}.
059     * </ul>
060     */
061    public class GetConf extends Configured implements Tool {
062      private static final String DESCRIPTION = "hdfs getconf is utility for "
063          + "getting configuration information from the config file.\n";
064    
065      enum Command {
066        NAMENODE("-namenodes", "gets list of namenodes in the cluster."),
067        SECONDARY("-secondaryNameNodes", 
068            "gets list of secondary namenodes in the cluster."),
069        BACKUP("-backupNodes", "gets list of backup nodes in the cluster."),
070        INCLUDE_FILE("-includeFile",
071            "gets the include file path that defines the datanodes " +
072            "that can join the cluster."),
073        EXCLUDE_FILE("-excludeFile",
074            "gets the exclude file path that defines the datanodes " +
075            "that need to decommissioned."),
076        NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"),
077        CONFKEY("-confKey [key]", "gets a specific key from the configuration");
078    
079        private static final Map<String, CommandHandler> map;
080        static  {
081          map = new HashMap<String, CommandHandler>();
082          map.put(NAMENODE.getName().toLowerCase(), 
083              new NameNodesCommandHandler());
084          map.put(SECONDARY.getName().toLowerCase(),
085              new SecondaryNameNodesCommandHandler());
086          map.put(BACKUP.getName().toLowerCase(), 
087              new BackupNodesCommandHandler());
088          map.put(INCLUDE_FILE.getName().toLowerCase(), 
089              new CommandHandler(DFSConfigKeys.DFS_HOSTS));
090          map.put(EXCLUDE_FILE.getName().toLowerCase(),
091              new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
092          map.put(NNRPCADDRESSES.getName().toLowerCase(),
093              new NNRpcAddressesCommandHandler());
094          map.put(CONFKEY.getName().toLowerCase(),
095              new PrintConfKeyCommandHandler());
096        }
097        
098        private final String cmd;
099        private final String description;
100    
101        Command(String cmd, String description) {
102          this.cmd = cmd;
103          this.description = description;
104        }
105    
106        public String getName() {
107          return cmd.split(" ")[0];
108        }
109        
110        public String getUsage() {
111          return cmd;
112        }
113        
114        public String getDescription() {
115          return description;
116        }
117        
118        public static CommandHandler getHandler(String cmd) {
119          return map.get(cmd.toLowerCase());
120        }
121      }
122      
123      static final String USAGE;
124      static {
125        HdfsConfiguration.init();
126        
127        /* Initialize USAGE based on Command values */
128        StringBuilder usage = new StringBuilder(DESCRIPTION);
129        usage.append("\nhadoop getconf \n");
130        for (Command cmd : Command.values()) {
131          usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription()
132              + "\n");
133        }
134        USAGE = usage.toString();
135      }
136      
137      /** 
138       * Handler to return value for key corresponding to the {@link Command}
139       */
140      static class CommandHandler {
141        String key; // Configuration key to lookup
142        
143        CommandHandler() {
144          this(null);
145        }
146        
147        CommandHandler(String key) {
148          this.key = key;
149        }
150    
151        final int doWork(GetConf tool, String[] args) {
152          try {
153            checkArgs(args);
154    
155            return doWorkInternal(tool, args);
156          } catch (Exception e) {
157            tool.printError(e.getMessage());
158          }
159          return -1;
160        }
161    
162        protected void checkArgs(String args[]) {
163          if (args.length > 0) {
164            throw new HadoopIllegalArgumentException(
165                "Did not expect argument: " + args[0]);
166          }
167        }
168    
169        
170        /** Method to be overridden by sub classes for specific behavior 
171         * @param args */
172        int doWorkInternal(GetConf tool, String[] args) throws Exception {
173    
174          String value = tool.getConf().getTrimmed(key);
175          if (value != null) {
176            tool.printOut(value);
177            return 0;
178          }
179          tool.printError("Configuration " + key + " is missing.");
180          return -1;
181        }
182      }
183      
184      /**
185       * Handler for {@link Command#NAMENODE}
186       */
187      static class NameNodesCommandHandler extends CommandHandler {
188        @Override
189        int doWorkInternal(GetConf tool, String []args) throws IOException {
190          tool.printMap(DFSUtil.getNNServiceRpcAddresses(tool.getConf()));
191          return 0;
192        }
193      }
194      
195      /**
196       * Handler for {@link Command#BACKUP}
197       */
198      static class BackupNodesCommandHandler extends CommandHandler {
199        @Override
200        public int doWorkInternal(GetConf tool, String []args) throws IOException {
201          tool.printMap(DFSUtil.getBackupNodeAddresses(tool.getConf()));
202          return 0;
203        }
204      }
205      
206      /**
207       * Handler for {@link Command#SECONDARY}
208       */
209      static class SecondaryNameNodesCommandHandler extends CommandHandler {
210        @Override
211        public int doWorkInternal(GetConf tool, String []args) throws IOException {
212          tool.printMap(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf()));
213          return 0;
214        }
215      }
216      
217      /**
218       * Handler for {@link Command#NNRPCADDRESSES}
219       * If rpc addresses are defined in configuration, we return them. Otherwise, 
220       * return empty string.
221       */
222      static class NNRpcAddressesCommandHandler extends CommandHandler {
223        @Override
224        public int doWorkInternal(GetConf tool, String []args) throws IOException {
225          Configuration config = tool.getConf();
226          List<ConfiguredNNAddress> cnnlist = DFSUtil.flattenAddressMap(
227              DFSUtil.getNNServiceRpcAddresses(config));
228          if (!cnnlist.isEmpty()) {
229            for (ConfiguredNNAddress cnn : cnnlist) {
230              InetSocketAddress rpc = cnn.getAddress();
231              tool.printOut(rpc.getHostName()+":"+rpc.getPort());
232            }
233            return 0;
234          }
235          tool.printError("Did not get namenode service rpc addresses.");
236          return -1;
237        }
238      }
239      
240      static class PrintConfKeyCommandHandler extends CommandHandler {
241        @Override
242        protected void checkArgs(String[] args) {
243          if (args.length != 1) {
244            throw new HadoopIllegalArgumentException(
245                "usage: " + Command.CONFKEY.getUsage());
246          }
247        }
248    
249        @Override
250        int doWorkInternal(GetConf tool, String[] args) throws Exception {
251          this.key = args[0];
252          return super.doWorkInternal(tool, args);
253        }
254      }
255      
256      private final PrintStream out; // Stream for printing command output
257      private final PrintStream err; // Stream for printing error
258    
259      GetConf(Configuration conf) {
260        this(conf, System.out, System.err);
261      }
262    
263      GetConf(Configuration conf, PrintStream out, PrintStream err) {
264        super(conf);
265        this.out = out;
266        this.err = err;
267      }
268    
269      void printError(String message) {
270        err.println(message);
271      }
272    
273      void printOut(String message) {
274        out.println(message);
275      }
276      
277      void printMap(Map<String, Map<String, InetSocketAddress>> map) {
278        StringBuilder buffer = new StringBuilder();
279    
280        List<ConfiguredNNAddress> cnns = DFSUtil.flattenAddressMap(map);
281        for (ConfiguredNNAddress cnn : cnns) {
282          InetSocketAddress address = cnn.getAddress();
283          if (buffer.length() > 0) {
284            buffer.append(" ");
285          }
286          buffer.append(address.getHostName());
287        }
288        printOut(buffer.toString());
289      }
290    
291      private void printUsage() {
292        printError(USAGE);
293      }
294    
295      /**
296       * Main method that runs the tool for given arguments.
297       * @param args arguments
298       * @return return status of the command
299       */
300      private int doWork(String[] args) {
301        if (args.length >= 1) {
302          CommandHandler handler = Command.getHandler(args[0]);
303          if (handler != null) {
304            return handler.doWork(this,
305                Arrays.copyOfRange(args, 1, args.length));
306          }
307        }
308        printUsage();
309        return -1;
310      }
311    
312      @Override
313      public int run(final String[] args) throws Exception {
314        try {
315          return UserGroupInformation.getCurrentUser().doAs(
316              new PrivilegedExceptionAction<Integer>() {
317                @Override
318                public Integer run() throws Exception {
319                  return doWork(args);
320                }
321              });
322        } catch (InterruptedException e) {
323          throw new IOException(e);
324        }
325      }
326    
327      public static void main(String[] args) throws Exception {
328        if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
329          System.exit(0);
330        }
331        
332        int res = ToolRunner.run(new GetConf(new HdfsConfiguration()), args);
333        System.exit(res);
334      }
335    }