001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hdfs.tools;
019
020import java.io.IOException;
021import java.io.PrintStream;
022import java.net.InetSocketAddress;
023import java.security.PrivilegedExceptionAction;
024import java.util.Arrays;
025import java.util.HashMap;
026import java.util.List;
027import java.util.Map;
028
029import org.apache.hadoop.HadoopIllegalArgumentException;
030import org.apache.hadoop.conf.Configuration;
031import org.apache.hadoop.conf.Configured;
032import org.apache.hadoop.hdfs.DFSConfigKeys;
033import org.apache.hadoop.hdfs.DFSUtil;
034import org.apache.hadoop.hdfs.HdfsConfiguration;
035import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
036import org.apache.hadoop.security.UserGroupInformation;
037import org.apache.hadoop.util.Tool;
038import org.apache.hadoop.util.ToolRunner;
039
040/**
041 * Tool for getting configuration information from a configuration file.
042 * 
043 * Adding more options:
044 * <ul>
045 * <li>
046 * If adding a simple option to get a value corresponding to a key in the 
047 * configuration, use regular {@link GetConf.CommandHandler}. 
048 * See {@link GetConf.Command#EXCLUDE_FILE} example.
049 * </li>
050 * <li>
051 * If adding an option that is does not return a value for a key, add
052 * a subclass of {@link GetConf.CommandHandler} and set it up in 
053 * {@link GetConf.Command}.
054 * 
055 * See {@link GetConf.Command#NAMENODE} for example.
056 * 
057 * Add for the new option added, a map entry with the corresponding
058 * {@link GetConf.CommandHandler}.
059 * </ul>
060 */
061public class GetConf extends Configured implements Tool {
062  private static final String DESCRIPTION = "hdfs getconf is utility for "
063      + "getting configuration information from the config file.\n";
064
065  enum Command {
066    NAMENODE("-namenodes", "gets list of namenodes in the cluster."),
067    SECONDARY("-secondaryNameNodes", 
068        "gets list of secondary namenodes in the cluster."),
069    BACKUP("-backupNodes", "gets list of backup nodes in the cluster."),
070    INCLUDE_FILE("-includeFile",
071        "gets the include file path that defines the datanodes " +
072        "that can join the cluster."),
073    EXCLUDE_FILE("-excludeFile",
074        "gets the exclude file path that defines the datanodes " +
075        "that need to decommissioned."),
076    NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"),
077    CONFKEY("-confKey [key]", "gets a specific key from the configuration");
078
079    private static final Map<String, CommandHandler> map;
080    static  {
081      map = new HashMap<String, CommandHandler>();
082      map.put(NAMENODE.getName().toLowerCase(), 
083          new NameNodesCommandHandler());
084      map.put(SECONDARY.getName().toLowerCase(),
085          new SecondaryNameNodesCommandHandler());
086      map.put(BACKUP.getName().toLowerCase(), 
087          new BackupNodesCommandHandler());
088      map.put(INCLUDE_FILE.getName().toLowerCase(), 
089          new CommandHandler(DFSConfigKeys.DFS_HOSTS));
090      map.put(EXCLUDE_FILE.getName().toLowerCase(),
091          new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
092      map.put(NNRPCADDRESSES.getName().toLowerCase(),
093          new NNRpcAddressesCommandHandler());
094      map.put(CONFKEY.getName().toLowerCase(),
095          new PrintConfKeyCommandHandler());
096    }
097    
098    private final String cmd;
099    private final String description;
100
101    Command(String cmd, String description) {
102      this.cmd = cmd;
103      this.description = description;
104    }
105
106    public String getName() {
107      return cmd.split(" ")[0];
108    }
109    
110    public String getUsage() {
111      return cmd;
112    }
113    
114    public String getDescription() {
115      return description;
116    }
117    
118    public static CommandHandler getHandler(String cmd) {
119      return map.get(cmd.toLowerCase());
120    }
121  }
122  
123  static final String USAGE;
124  static {
125    HdfsConfiguration.init();
126    
127    /* Initialize USAGE based on Command values */
128    StringBuilder usage = new StringBuilder(DESCRIPTION);
129    usage.append("\nhadoop getconf \n");
130    for (Command cmd : Command.values()) {
131      usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription()
132          + "\n");
133    }
134    USAGE = usage.toString();
135  }
136  
137  /** 
138   * Handler to return value for key corresponding to the {@link Command}
139   */
140  static class CommandHandler {
141    String key; // Configuration key to lookup
142    
143    CommandHandler() {
144      this(null);
145    }
146    
147    CommandHandler(String key) {
148      this.key = key;
149    }
150
151    final int doWork(GetConf tool, String[] args) {
152      try {
153        checkArgs(args);
154
155        return doWorkInternal(tool, args);
156      } catch (Exception e) {
157        tool.printError(e.getMessage());
158      }
159      return -1;
160    }
161
162    protected void checkArgs(String args[]) {
163      if (args.length > 0) {
164        throw new HadoopIllegalArgumentException(
165            "Did not expect argument: " + args[0]);
166      }
167    }
168
169    
170    /** Method to be overridden by sub classes for specific behavior */
171    int doWorkInternal(GetConf tool, String[] args) throws Exception {
172
173      String value = tool.getConf().getTrimmed(key);
174      if (value != null) {
175        tool.printOut(value);
176        return 0;
177      }
178      tool.printError("Configuration " + key + " is missing.");
179      return -1;
180    }
181  }
182  
183  /**
184   * Handler for {@link Command#NAMENODE}
185   */
186  static class NameNodesCommandHandler extends CommandHandler {
187    @Override
188    int doWorkInternal(GetConf tool, String []args) throws IOException {
189      tool.printMap(DFSUtil.getNNServiceRpcAddressesForCluster(tool.getConf()));
190      return 0;
191    }
192  }
193  
194  /**
195   * Handler for {@link Command#BACKUP}
196   */
197  static class BackupNodesCommandHandler extends CommandHandler {
198    @Override
199    public int doWorkInternal(GetConf tool, String []args) throws IOException {
200      tool.printMap(DFSUtil.getBackupNodeAddresses(tool.getConf()));
201      return 0;
202    }
203  }
204  
205  /**
206   * Handler for {@link Command#SECONDARY}
207   */
208  static class SecondaryNameNodesCommandHandler extends CommandHandler {
209    @Override
210    public int doWorkInternal(GetConf tool, String []args) throws IOException {
211      tool.printMap(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf()));
212      return 0;
213    }
214  }
215  
216  /**
217   * Handler for {@link Command#NNRPCADDRESSES}
218   * If rpc addresses are defined in configuration, we return them. Otherwise, 
219   * return empty string.
220   */
221  static class NNRpcAddressesCommandHandler extends CommandHandler {
222    @Override
223    public int doWorkInternal(GetConf tool, String []args) throws IOException {
224      Configuration config = tool.getConf();
225      List<ConfiguredNNAddress> cnnlist = DFSUtil.flattenAddressMap(
226          DFSUtil.getNNServiceRpcAddressesForCluster(config));
227      if (!cnnlist.isEmpty()) {
228        for (ConfiguredNNAddress cnn : cnnlist) {
229          InetSocketAddress rpc = cnn.getAddress();
230          tool.printOut(rpc.getHostName()+":"+rpc.getPort());
231        }
232        return 0;
233      }
234      tool.printError("Did not get namenode service rpc addresses.");
235      return -1;
236    }
237  }
238  
239  static class PrintConfKeyCommandHandler extends CommandHandler {
240    @Override
241    protected void checkArgs(String[] args) {
242      if (args.length != 1) {
243        throw new HadoopIllegalArgumentException(
244            "usage: " + Command.CONFKEY.getUsage());
245      }
246    }
247
248    @Override
249    int doWorkInternal(GetConf tool, String[] args) throws Exception {
250      this.key = args[0];
251      return super.doWorkInternal(tool, args);
252    }
253  }
254  
255  private final PrintStream out; // Stream for printing command output
256  private final PrintStream err; // Stream for printing error
257
258  GetConf(Configuration conf) {
259    this(conf, System.out, System.err);
260  }
261
262  GetConf(Configuration conf, PrintStream out, PrintStream err) {
263    super(conf);
264    this.out = out;
265    this.err = err;
266  }
267
268  void printError(String message) {
269    err.println(message);
270  }
271
272  void printOut(String message) {
273    out.println(message);
274  }
275  
276  void printMap(Map<String, Map<String, InetSocketAddress>> map) {
277    StringBuilder buffer = new StringBuilder();
278
279    List<ConfiguredNNAddress> cnns = DFSUtil.flattenAddressMap(map);
280    for (ConfiguredNNAddress cnn : cnns) {
281      InetSocketAddress address = cnn.getAddress();
282      if (buffer.length() > 0) {
283        buffer.append(" ");
284      }
285      buffer.append(address.getHostName());
286    }
287    printOut(buffer.toString());
288  }
289
290  private void printUsage() {
291    printError(USAGE);
292  }
293
294  /**
295   * Main method that runs the tool for given arguments.
296   * @param args arguments
297   * @return return status of the command
298   */
299  private int doWork(String[] args) {
300    if (args.length >= 1) {
301      CommandHandler handler = Command.getHandler(args[0]);
302      if (handler != null) {
303        return handler.doWork(this,
304            Arrays.copyOfRange(args, 1, args.length));
305      }
306    }
307    printUsage();
308    return -1;
309  }
310
311  @Override
312  public int run(final String[] args) throws Exception {
313    try {
314      return UserGroupInformation.getCurrentUser().doAs(
315          new PrivilegedExceptionAction<Integer>() {
316            @Override
317            public Integer run() throws Exception {
318              return doWork(args);
319            }
320          });
321    } catch (InterruptedException e) {
322      throw new IOException(e);
323    }
324  }
325
326  public static void main(String[] args) throws Exception {
327    if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
328      System.exit(0);
329    }
330    
331    int res = ToolRunner.run(new GetConf(new HdfsConfiguration()), args);
332    System.exit(res);
333  }
334}