001/**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hdfs.tools;
019
020import java.io.IOException;
021import java.io.PrintStream;
022import java.net.InetSocketAddress;
023import java.security.PrivilegedExceptionAction;
024import java.util.HashMap;
025import java.util.List;
026import java.util.Map;
027
028import org.apache.hadoop.conf.Configuration;
029import org.apache.hadoop.conf.Configured;
030import org.apache.hadoop.hdfs.DFSUtil;
031import org.apache.hadoop.hdfs.HdfsConfiguration;
032import org.apache.hadoop.security.UserGroupInformation;
033import org.apache.hadoop.util.Tool;
034import org.apache.hadoop.util.ToolRunner;
035
036/**
037 * Tool for getting configuration information from a configuration file.
038 * 
039 * Adding more options:
040 * <ul>
041 * <li>
042 * If adding a simple option to get a value corresponding to a key in the 
043 * configuration, use regular {@link GetConf.CommandHandler}. 
044 * See {@link GetConf.Command#EXCLUDE_FILE} example.
045 * </li>
046 * <li>
047 * If adding an option that is does not return a value for a key, add
048 * a subclass of {@link GetConf.CommandHandler} and set it up in 
049 * {@link GetConf.Command}.
050 * 
051 * See {@link GetConf.Command#NAMENODE} for example.
052 * 
053 * Add for the new option added, a map entry with the corresponding
054 * {@link GetConf.CommandHandler}.
055 * </ul>
056 */
057public class GetConf extends Configured implements Tool {
058  private static final String DESCRIPTION = "hdfs getconf is utility for "
059      + "getting configuration information from the config file.\n";
060
061  enum Command {
062    NAMENODE("-namenodes", "gets list of namenodes in the cluster."),
063    SECONDARY("-secondaryNameNodes", 
064        "gets list of secondary namenodes in the cluster."),
065    BACKUP("-backupNodes", "gets list of backup nodes in the cluster."),
066    INCLUDE_FILE("-includeFile",
067        "gets the include file path that defines the datanodes " +
068        "that can join the cluster."),
069    EXCLUDE_FILE("-excludeFile",
070        "gets the exclude file path that defines the datanodes " +
071        "that need to decommissioned."),
072    NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses");
073
074    private static Map<String, CommandHandler> map;
075    static  {
076      map = new HashMap<String, CommandHandler>();
077      map.put(NAMENODE.getName().toLowerCase(), 
078          new NameNodesCommandHandler());
079      map.put(SECONDARY.getName().toLowerCase(),
080          new SecondaryNameNodesCommandHandler());
081      map.put(BACKUP.getName().toLowerCase(), 
082          new BackupNodesCommandHandler());
083      map.put(INCLUDE_FILE.getName().toLowerCase(), 
084          new CommandHandler("DFSConfigKeys.DFS_HOSTS"));
085      map.put(EXCLUDE_FILE.getName().toLowerCase(),
086          new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE"));
087      map.put(NNRPCADDRESSES.getName().toLowerCase(),
088          new NNRpcAddressesCommandHandler());
089    }
090    
091    private final String cmd;
092    private final String description;
093
094    Command(String cmd, String description) {
095      this.cmd = cmd;
096      this.description = description;
097    }
098
099    public String getName() {
100      return cmd;
101    }
102    
103    public String getDescription() {
104      return description;
105    }
106    
107    public static CommandHandler getHandler(String name) {
108      return map.get(name.toLowerCase());
109    }
110  }
111  
112  static final String USAGE;
113  static {
114    HdfsConfiguration.init();
115    
116    /* Initialize USAGE based on Command values */
117    StringBuilder usage = new StringBuilder(DESCRIPTION);
118    usage.append("\nhadoop getconf \n");
119    for (Command cmd : Command.values()) {
120      usage.append("\t[" + cmd.getName() + "]\t\t\t" + cmd.getDescription()
121          + "\n");
122    }
123    USAGE = usage.toString();
124  }
125  
126  /** 
127   * Handler to return value for key corresponding to the {@link Command}
128   */
129  static class CommandHandler {
130    final String key; // Configuration key to lookup
131    
132    CommandHandler() {
133      this(null);
134    }
135    
136    CommandHandler(String key) {
137      this.key = key;
138    }
139
140    final int doWork(GetConf tool) {
141      try {
142        return doWorkInternal(tool);
143      } catch (Exception e) {
144        tool.printError(e.getMessage());
145      }
146      return -1;
147    }
148    
149    /** Method to be overridden by sub classes for specific behavior */
150    int doWorkInternal(GetConf tool) throws Exception {
151      String value = tool.getConf().get(key);
152      if (value != null) {
153        tool.printOut(value);
154        return 0;
155      }
156      tool.printError("Configuration " + key + " is missing.");
157      return -1;
158    }
159  }
160  
161  /**
162   * Handler for {@link Command#NAMENODE}
163   */
164  static class NameNodesCommandHandler extends CommandHandler {
165    @Override
166    int doWorkInternal(GetConf tool) throws IOException {
167      tool.printList(DFSUtil.getNNServiceRpcAddresses(tool.getConf()));
168      return 0;
169    }
170  }
171  
172  /**
173   * Handler for {@link Command#BACKUP}
174   */
175  static class BackupNodesCommandHandler extends CommandHandler {
176    @Override
177    public int doWorkInternal(GetConf tool) throws IOException {
178      tool.printList(DFSUtil.getBackupNodeAddresses(tool.getConf()));
179      return 0;
180    }
181  }
182  
183  /**
184   * Handler for {@link Command#SECONDARY}
185   */
186  static class SecondaryNameNodesCommandHandler extends CommandHandler {
187    @Override
188    public int doWorkInternal(GetConf tool) throws IOException {
189      tool.printList(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf()));
190      return 0;
191    }
192  }
193  
194  /**
195   * Handler for {@link Command#NNRPCADDRESSES}
196   * If rpc addresses are defined in configuration, we return them. Otherwise, 
197   * return empty string.
198   */
199  static class NNRpcAddressesCommandHandler extends CommandHandler {
200    @Override
201    public int doWorkInternal(GetConf tool) throws IOException {
202      Configuration config = tool.getConf();
203      List<InetSocketAddress> rpclist = DFSUtil.getNNServiceRpcAddresses(config);
204      if (rpclist != null) {
205        for (InetSocketAddress rpc : rpclist) {
206          tool.printOut(rpc.getHostName()+":"+rpc.getPort());
207        }
208        return 0;
209      }
210      tool.printError("Did not get namenode service rpc addresses.");
211      return -1;
212    }
213  }
214  
215  private final PrintStream out; // Stream for printing command output
216  private final PrintStream err; // Stream for printing error
217
218  GetConf(Configuration conf) {
219    this(conf, System.out, System.err);
220  }
221
222  GetConf(Configuration conf, PrintStream out, PrintStream err) {
223    super(conf);
224    this.out = out;
225    this.err = err;
226  }
227
228  void printError(String message) {
229    err.println(message);
230  }
231
232  void printOut(String message) {
233    out.println(message);
234  }
235
236  void printList(List<InetSocketAddress> list) {
237    StringBuilder buffer = new StringBuilder();
238    for (InetSocketAddress address : list) {
239      if (buffer.length() > 0) {
240        buffer.append(" ");
241      }
242      buffer.append(address.getHostName());
243    }
244    printOut(buffer.toString());
245  }
246
247  private void printUsage() {
248    printError(USAGE);
249  }
250
251  /**
252   * Main method that runs the tool for given arguments.
253   * @param args arguments
254   * @return return status of the command
255   */
256  private int doWork(String[] args) {
257    if (args.length == 1) {
258      CommandHandler handler = Command.getHandler(args[0]);
259      if (handler != null) {
260        return handler.doWork(this);
261      }
262    }
263    printUsage();
264    return -1;
265  }
266
267  @Override
268  public int run(final String[] args) throws Exception {
269    try {
270      return UserGroupInformation.getCurrentUser().doAs(
271          new PrivilegedExceptionAction<Integer>() {
272            @Override
273            public Integer run() throws Exception {
274              return doWork(args);
275            }
276          });
277    } catch (InterruptedException e) {
278      throw new IOException(e);
279    }
280  }
281
282  public static void main(String[] args) throws Exception {
283    int res = ToolRunner.run(new GetConf(new HdfsConfiguration()), args);
284    System.exit(res);
285  }
286}