Skip to content

Commit 04fd201

Browse files
committed
Merge r1609845 through r1611528 from trunk.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-6584@1611531 13f79535-47bb-0310-9956-ffa450edef68
2 parents 38af610 + 403ec8e commit 04fd201

File tree

131 files changed

+4550
-686
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

131 files changed

+4550
-686
lines changed

hadoop-common-project/hadoop-auth/pom.xml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,17 @@
139139
<attach>true</attach>
140140
</configuration>
141141
</plugin>
142+
<plugin>
143+
<groupId>org.apache.maven.plugins</groupId>
144+
<artifactId>maven-jar-plugin</artifactId>
145+
<executions>
146+
<execution>
147+
<goals>
148+
<goal>test-jar</goal>
149+
</goals>
150+
</execution>
151+
</executions>
152+
</plugin>
142153
</plugins>
143154
</build>
144155

hadoop-common-project/hadoop-common/CHANGES.txt

Lines changed: 42 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,6 @@ Trunk (Unreleased)
3636

3737
HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
3838

39-
HADOOP-7664. Remove warmings when overriding final parameter configuration
40-
if the override value is same as the final parameter value.
41-
(Ravi Prakash via suresh)
42-
4339
HADOOP-8078. Add capability to turn on security in unit tests. (Jaimin
4440
Jetly via jitendra)
4541

@@ -162,9 +158,6 @@ Trunk (Unreleased)
162158

163159
HADOOP-10485. Remove dead classes in hadoop-streaming. (wheat9)
164160

165-
HADOOP-10607. Create API to separate credential/password storage from
166-
applications. (Larry McCay via omalley)
167-
168161
HADOOP-10696. Add optional attributes to KeyProvider Options and Metadata.
169162
(tucu)
170163

@@ -182,6 +175,8 @@ Trunk (Unreleased)
182175

183176
HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
184177

178+
HADOOP-10824. Refactor KMSACLs to avoid locking. (Benoy Antony via umamahesh)
179+
185180
BUG FIXES
186181

187182
HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -379,6 +374,16 @@ Trunk (Unreleased)
379374
NativeAzureFileSystem#NativeAzureFsInputStream#close().
380375
(Chen He via cnauroth)
381376

377+
HADOOP-10831. UserProvider is not thread safe. (Benoy Antony via umamahesh)
378+
379+
HADOOP-10834. Typo in CredentialShell usage. (Benoy Antony via umamahesh)
380+
381+
HADOOP-10816. KeyShell returns -1 on error to the shell, should be 1.
382+
(Mike Yoder via wang)
383+
384+
HADOOP-10840. Fix OutOfMemoryError caused by metrics system in Azure File
385+
System. (Shanyu Zhao via cnauroth)
386+
382387
OPTIMIZATIONS
383388

384389
HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -397,6 +402,30 @@ Release 2.6.0 - UNRELEASED
397402

398403
HADOOP-10815. Implement Windows equivalent of mlock. (cnauroth)
399404

405+
HADOOP-7664. Remove warmings when overriding final parameter configuration
406+
if the override value is same as the final parameter value.
407+
(Ravi Prakash via suresh)
408+
409+
HADOOP-10673. Update rpc metrics when the call throws an exception. (Ming Ma
410+
via jing9)
411+
412+
HADOOP-10845. Add common tests for ACLs in combination with viewfs.
413+
(Stephen Chu via cnauroth)
414+
415+
HADOOP-10839. Add unregisterSource() to MetricsSystem API.
416+
(Shanyu Zhao via cnauroth)
417+
418+
HADOOP-10607. Create an API to separate credentials/password storage
419+
from applications (Larry McCay via omalley)
420+
421+
HADOOP-10732. Fix locking in credential update. (Ted Yu via omalley)
422+
423+
HADOOP-10733. Fix potential null dereference in CredShell. (Ted Yu via
424+
omalley)
425+
426+
HADOOP-10610. Upgrade S3n s3.fs.buffer.dir to support multi directories.
427+
(Ted Malaska via atm)
428+
400429
OPTIMIZATIONS
401430

402431
BUG FIXES
@@ -412,6 +441,12 @@ Release 2.6.0 - UNRELEASED
412441

413442
HADOOP-10810. Clean up native code compilation warnings. (cnauroth)
414443

444+
HADOOP-9921. daemon scripts should remove pid file on stop call after stop
445+
or process is found not running ( vinayakumarb )
446+
447+
HADOOP-10591. Compression codecs must used pooled direct buffers or
448+
deallocate direct buffers when stream is closed (cmccabe)
449+
415450
Release 2.5.0 - UNRELEASED
416451

417452
INCOMPATIBLE CHANGES

hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,7 @@ case $startStop in
198198
else
199199
echo no $command to stop
200200
fi
201+
rm -f $pid
201202
else
202203
echo no $command to stop
203204
fi

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java

Lines changed: 26 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,16 @@ public class KeyShell extends Configured implements Tool {
5757

5858
private boolean userSuppliedProvider = false;
5959

60+
/**
61+
* Primary entry point for the KeyShell; called via main().
62+
*
63+
* @param args Command line arguments.
64+
* @return 0 on success and 1 on failure. This value is passed back to
65+
* the unix shell, so we must follow shell return code conventions:
66+
* the return code is an unsigned character, and 0 means success, and
67+
* small positive integers mean failure.
68+
* @throws Exception
69+
*/
6070
@Override
6171
public int run(String[] args) throws Exception {
6272
int exitCode = 0;
@@ -68,11 +78,11 @@ public int run(String[] args) throws Exception {
6878
if (command.validate()) {
6979
command.execute();
7080
} else {
71-
exitCode = -1;
81+
exitCode = 1;
7282
}
7383
} catch (Exception e) {
7484
e.printStackTrace(err);
75-
return -1;
85+
return 1;
7686
}
7787
return exitCode;
7888
}
@@ -86,8 +96,8 @@ public int run(String[] args) throws Exception {
8696
* % hadoop key list [-provider providerPath]
8797
* % hadoop key delete keyName [--provider providerPath] [-i]
8898
* </pre>
89-
* @param args
90-
* @return
99+
* @param args Command line arguments.
100+
* @return 0 on success, 1 on failure.
91101
* @throws IOException
92102
*/
93103
private int init(String[] args) throws IOException {
@@ -105,7 +115,7 @@ private int init(String[] args) throws IOException {
105115
command = new CreateCommand(keyName, options);
106116
if ("--help".equals(keyName)) {
107117
printKeyShellUsage();
108-
return -1;
118+
return 1;
109119
}
110120
} else if (args[i].equals("delete")) {
111121
String keyName = "--help";
@@ -116,7 +126,7 @@ private int init(String[] args) throws IOException {
116126
command = new DeleteCommand(keyName);
117127
if ("--help".equals(keyName)) {
118128
printKeyShellUsage();
119-
return -1;
129+
return 1;
120130
}
121131
} else if (args[i].equals("roll")) {
122132
String keyName = "--help";
@@ -127,7 +137,7 @@ private int init(String[] args) throws IOException {
127137
command = new RollCommand(keyName);
128138
if ("--help".equals(keyName)) {
129139
printKeyShellUsage();
130-
return -1;
140+
return 1;
131141
}
132142
} else if ("list".equals(args[i])) {
133143
command = new ListCommand();
@@ -145,13 +155,13 @@ private int init(String[] args) throws IOException {
145155
out.println("\nAttributes must be in attribute=value form, " +
146156
"or quoted\nlike \"attribute = value\"\n");
147157
printKeyShellUsage();
148-
return -1;
158+
return 1;
149159
}
150160
if (attributes.containsKey(attr)) {
151161
out.println("\nEach attribute must correspond to only one value:\n" +
152162
"atttribute \"" + attr + "\" was repeated\n" );
153163
printKeyShellUsage();
154-
return -1;
164+
return 1;
155165
}
156166
attributes.put(attr, val);
157167
} else if ("--provider".equals(args[i]) && moreTokens) {
@@ -163,17 +173,17 @@ private int init(String[] args) throws IOException {
163173
interactive = true;
164174
} else if ("--help".equals(args[i])) {
165175
printKeyShellUsage();
166-
return -1;
176+
return 1;
167177
} else {
168178
printKeyShellUsage();
169179
ToolRunner.printGenericCommandUsage(System.err);
170-
return -1;
180+
return 1;
171181
}
172182
}
173183

174184
if (command == null) {
175185
printKeyShellUsage();
176-
return -1;
186+
return 1;
177187
}
178188

179189
if (!attributes.isEmpty()) {
@@ -491,10 +501,11 @@ public String getUsage() {
491501
}
492502

493503
/**
494-
* Main program.
504+
* main() entry point for the KeyShell. While strictly speaking the
505+
* return is void, it will System.exit() with a return code: 0 is for
506+
* success and 1 for failure.
495507
*
496-
* @param args
497-
* Command line arguments
508+
* @param args Command line arguments.
498509
* @throws Exception
499510
*/
500511
public static void main(String[] args) throws Exception {

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
import org.apache.hadoop.fs.FileAlreadyExistsException;
5151
import org.apache.hadoop.fs.FileStatus;
5252
import org.apache.hadoop.fs.FileSystem;
53+
import org.apache.hadoop.fs.LocalDirAllocator;
5354
import org.apache.hadoop.fs.Path;
5455
import org.apache.hadoop.fs.permission.FsPermission;
5556
import org.apache.hadoop.fs.s3.S3Exception;
@@ -225,6 +226,7 @@ private class NativeS3FsOutputStream extends OutputStream {
225226
private OutputStream backupStream;
226227
private MessageDigest digest;
227228
private boolean closed;
229+
private LocalDirAllocator lDirAlloc;
228230

229231
public NativeS3FsOutputStream(Configuration conf,
230232
NativeFileSystemStore store, String key, Progressable progress,
@@ -246,11 +248,10 @@ public NativeS3FsOutputStream(Configuration conf,
246248
}
247249

248250
private File newBackupFile() throws IOException {
249-
File dir = new File(conf.get("fs.s3.buffer.dir"));
250-
if (!dir.mkdirs() && !dir.exists()) {
251-
throw new IOException("Cannot create S3 buffer directory: " + dir);
251+
if (lDirAlloc == null) {
252+
lDirAlloc = new LocalDirAllocator("fs.s3.buffer.dir");
252253
}
253-
File result = File.createTempFile("output-", ".tmp", dir);
254+
File result = lDirAlloc.createTmpFileForWrite("output-", LocalDirAllocator.SIZE_UNKNOWN, conf);
254255
result.deleteOnExit();
255256
return result;
256257
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@
3737
import org.apache.hadoop.fs.Options.ChecksumOpt;
3838
import org.apache.hadoop.fs.Path;
3939
import org.apache.hadoop.fs.UnresolvedLinkException;
40+
import org.apache.hadoop.fs.permission.AclEntry;
41+
import org.apache.hadoop.fs.permission.AclStatus;
4042
import org.apache.hadoop.fs.permission.FsPermission;
4143
import org.apache.hadoop.security.token.Token;
4244
import org.apache.hadoop.util.Progressable;
@@ -279,6 +281,38 @@ public void setTimes(final Path f, final long mtime, final long atime)
279281
myFs.setTimes(fullPath(f), mtime, atime);
280282
}
281283

284+
@Override
285+
public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
286+
throws IOException {
287+
myFs.modifyAclEntries(fullPath(path), aclSpec);
288+
}
289+
290+
@Override
291+
public void removeAclEntries(Path path, List<AclEntry> aclSpec)
292+
throws IOException {
293+
myFs.removeAclEntries(fullPath(path), aclSpec);
294+
}
295+
296+
@Override
297+
public void removeDefaultAcl(Path path) throws IOException {
298+
myFs.removeDefaultAcl(fullPath(path));
299+
}
300+
301+
@Override
302+
public void removeAcl(Path path) throws IOException {
303+
myFs.removeAcl(fullPath(path));
304+
}
305+
306+
@Override
307+
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
308+
myFs.setAcl(fullPath(path), aclSpec);
309+
}
310+
311+
@Override
312+
public AclStatus getAclStatus(Path path) throws IOException {
313+
return myFs.getAclStatus(fullPath(path));
314+
}
315+
282316
@Override
283317
public void setVerifyChecksum(final boolean verifyChecksum)
284318
throws IOException, UnresolvedLinkException {

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
import org.apache.hadoop.fs.XAttrSetFlag;
5151
import org.apache.hadoop.fs.permission.AclEntry;
5252
import org.apache.hadoop.fs.permission.AclStatus;
53+
import org.apache.hadoop.fs.permission.AclUtil;
5354
import org.apache.hadoop.fs.permission.FsPermission;
5455
import org.apache.hadoop.fs.viewfs.InodeTree.INode;
5556
import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
@@ -871,5 +872,46 @@ public long getDefaultBlockSize(Path f) {
871872
public short getDefaultReplication(Path f) {
872873
throw new NotInMountpointException(f, "getDefaultReplication");
873874
}
875+
876+
@Override
877+
public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
878+
throws IOException {
879+
checkPathIsSlash(path);
880+
throw readOnlyMountTable("modifyAclEntries", path);
881+
}
882+
883+
@Override
884+
public void removeAclEntries(Path path, List<AclEntry> aclSpec)
885+
throws IOException {
886+
checkPathIsSlash(path);
887+
throw readOnlyMountTable("removeAclEntries", path);
888+
}
889+
890+
@Override
891+
public void removeDefaultAcl(Path path) throws IOException {
892+
checkPathIsSlash(path);
893+
throw readOnlyMountTable("removeDefaultAcl", path);
894+
}
895+
896+
@Override
897+
public void removeAcl(Path path) throws IOException {
898+
checkPathIsSlash(path);
899+
throw readOnlyMountTable("removeAcl", path);
900+
}
901+
902+
@Override
903+
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
904+
checkPathIsSlash(path);
905+
throw readOnlyMountTable("setAcl", path);
906+
}
907+
908+
@Override
909+
public AclStatus getAclStatus(Path path) throws IOException {
910+
checkPathIsSlash(path);
911+
return new AclStatus.Builder().owner(ugi.getUserName())
912+
.group(ugi.getGroupNames()[0])
913+
.addEntries(AclUtil.getMinimalAcl(PERMISSION_555))
914+
.stickyBit(false).build();
915+
}
874916
}
875917
}

0 commit comments

Comments
 (0)