001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *     http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    package org.apache.hadoop.hdfs.client;
019    
020    import java.io.IOException;
021    import java.net.URI;
022    import java.util.EnumSet;
023    
024    import org.apache.hadoop.classification.InterfaceAudience;
025    import org.apache.hadoop.classification.InterfaceStability;
026    import org.apache.hadoop.conf.Configuration;
027    import org.apache.hadoop.fs.CacheFlag;
028    import org.apache.hadoop.fs.FileSystem;
029    import org.apache.hadoop.fs.Path;
030    import org.apache.hadoop.fs.RemoteIterator;
031    import org.apache.hadoop.hdfs.DistributedFileSystem;
032    import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
033    import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
034    import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
035    import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
036    import org.apache.hadoop.hdfs.protocol.HdfsConstants;
037    import org.apache.hadoop.hdfs.tools.DFSAdmin;
038    
039    /**
040     * The public API for performing administrative functions on HDFS. Those writing
041     * applications against HDFS should prefer this interface to directly accessing
042     * functionality in DistributedFileSystem or DFSClient.
043     * 
044     * Note that this is distinct from the similarly-named {@link DFSAdmin}, which
045     * is a class that provides the functionality for the CLI `hdfs dfsadmin ...'
046     * commands.
047     */
048    @InterfaceAudience.Public
049    @InterfaceStability.Evolving
050    public class HdfsAdmin {
051      
052      private DistributedFileSystem dfs;
053      
054      /**
055       * Create a new HdfsAdmin client.
056       * 
057       * @param uri the unique URI of the HDFS file system to administer
058       * @param conf configuration
059       * @throws IOException in the event the file system could not be created
060       */
061      public HdfsAdmin(URI uri, Configuration conf) throws IOException {
062        FileSystem fs = FileSystem.get(uri, conf);
063        if (!(fs instanceof DistributedFileSystem)) {
064          throw new IllegalArgumentException("'" + uri + "' is not an HDFS URI.");
065        } else {
066          dfs = (DistributedFileSystem)fs;
067        }
068      }
069      
070      /**
071       * Set the namespace quota (count of files, directories, and sym links) for a
072       * directory.
073       * 
074       * @param src the path to set the quota for
075       * @param quota the value to set for the quota
076       * @throws IOException in the event of error
077       */
078      public void setQuota(Path src, long quota) throws IOException {
079        dfs.setQuota(src, quota, HdfsConstants.QUOTA_DONT_SET);
080      }
081      
082      /**
083       * Clear the namespace quota (count of files, directories and sym links) for a
084       * directory.
085       * 
086       * @param src the path to clear the quota of
087       * @throws IOException in the event of error
088       */
089      public void clearQuota(Path src) throws IOException {
090        dfs.setQuota(src, HdfsConstants.QUOTA_RESET, HdfsConstants.QUOTA_DONT_SET);
091      }
092      
093      /**
094       * Set the disk space quota (size of files) for a directory. Note that
095       * directories and sym links do not occupy disk space.
096       * 
097       * @param src the path to set the space quota of
098       * @param spaceQuota the value to set for the space quota
099       * @throws IOException in the event of error
100       */
101      public void setSpaceQuota(Path src, long spaceQuota) throws IOException {
102        dfs.setQuota(src, HdfsConstants.QUOTA_DONT_SET, spaceQuota);
103      }
104      
105      /**
106       * Clear the disk space quota (size of files) for a directory. Note that
107       * directories and sym links do not occupy disk space.
108       * 
109       * @param src the path to clear the space quota of
110       * @throws IOException in the event of error
111       */
112      public void clearSpaceQuota(Path src) throws IOException {
113        dfs.setQuota(src, HdfsConstants.QUOTA_DONT_SET, HdfsConstants.QUOTA_RESET);
114      }
115      
116      /**
117       * Allow snapshot on a directory.
118       * @param path The path of the directory where snapshots will be taken.
119       */
120      public void allowSnapshot(Path path) throws IOException {
121        dfs.allowSnapshot(path);
122      }
123      
124      /**
125       * Disallow snapshot on a directory.
126       * @param path The path of the snapshottable directory.
127       */
128      public void disallowSnapshot(Path path) throws IOException {
129        dfs.disallowSnapshot(path);
130      }
131    
132      /**
133       * Add a new CacheDirectiveInfo.
134       * 
135       * @param info Information about a directive to add.
136       * @param flags {@link CacheFlag}s to use for this operation.
137       * @return the ID of the directive that was created.
138       * @throws IOException if the directive could not be added
139       */
140      public long addCacheDirective(CacheDirectiveInfo info,
141          EnumSet<CacheFlag> flags) throws IOException {
142      return dfs.addCacheDirective(info, flags);
143      }
144      
145      /**
146       * Modify a CacheDirective.
147       * 
148       * @param info Information about the directive to modify. You must set the ID
149       *          to indicate which CacheDirective you want to modify.
150       * @param flags {@link CacheFlag}s to use for this operation.
151       * @throws IOException if the directive could not be modified
152       */
153      public void modifyCacheDirective(CacheDirectiveInfo info,
154          EnumSet<CacheFlag> flags) throws IOException {
155        dfs.modifyCacheDirective(info, flags);
156      }
157    
158      /**
159       * Remove a CacheDirective.
160       * 
161       * @param id identifier of the CacheDirectiveInfo to remove
162       * @throws IOException if the directive could not be removed
163       */
164      public void removeCacheDirective(long id)
165          throws IOException {
166        dfs.removeCacheDirective(id);
167      }
168    
169      /**
170       * List cache directives. Incrementally fetches results from the server.
171       * 
172       * @param filter Filter parameters to use when listing the directives, null to
173       *               list all directives visible to us.
174       * @return A RemoteIterator which returns CacheDirectiveInfo objects.
175       */
176      public RemoteIterator<CacheDirectiveEntry> listCacheDirectives(
177          CacheDirectiveInfo filter) throws IOException {
178        return dfs.listCacheDirectives(filter);
179      }
180    
181      /**
182       * Add a cache pool.
183       *
184       * @param info
185       *          The request to add a cache pool.
186       * @throws IOException 
187       *          If the request could not be completed.
188       */
189      public void addCachePool(CachePoolInfo info) throws IOException {
190        dfs.addCachePool(info);
191      }
192    
193      /**
194       * Modify an existing cache pool.
195       *
196       * @param info
197       *          The request to modify a cache pool.
198       * @throws IOException 
199       *          If the request could not be completed.
200       */
201      public void modifyCachePool(CachePoolInfo info) throws IOException {
202        dfs.modifyCachePool(info);
203      }
204        
205      /**
206       * Remove a cache pool.
207       *
208       * @param poolName
209       *          Name of the cache pool to remove.
210       * @throws IOException 
211       *          if the cache pool did not exist, or could not be removed.
212       */
213      public void removeCachePool(String poolName) throws IOException {
214        dfs.removeCachePool(poolName);
215      }
216    
217      /**
218       * List all cache pools.
219       *
220       * @return A remote iterator from which you can get CachePoolEntry objects.
221       *          Requests will be made as needed.
222       * @throws IOException
223       *          If there was an error listing cache pools.
224       */
225      public RemoteIterator<CachePoolEntry> listCachePools() throws IOException {
226        return dfs.listCachePools();
227      }
228    }