aboutsummaryrefslogtreecommitdiff
path: root/bigtop-tests/test-artifacts/httpfs
diff options
context:
space:
mode:
authorAdam Kawa <kawa.adam@gmail.com>2014-03-13 11:07:55 -0700
committerKonstantin Boudnik <cos@apache.org>2014-03-13 11:07:55 -0700
commit01ac55b5ff06118119113b8b27c448598650a13e (patch)
treecf450bca9659da46664cf90c8ec45e03d125f553 /bigtop-tests/test-artifacts/httpfs
parent06a0b005ed3b8e6b386f3784b1c988fc8ac260c0 (diff)
GTOP-1246. Fix the shell-object bug in HttpFs smoke tests
Diffstat (limited to 'bigtop-tests/test-artifacts/httpfs')
-rw-r--r--bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy32
1 files changed, 16 insertions, 16 deletions
diff --git a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
index 0aa9a45e..9cd40ebc 100644
--- a/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
+++ b/bigtop-tests/test-artifacts/httpfs/src/main/groovy/org/apache/bigtop/itest/httpfs/TestHttpFs.groovy
@@ -46,9 +46,9 @@ public class TestHttpFs {
private static String testHttpFsFolder = "/tmp/httpfssmoke-" + (new Date().getTime());
private static String testHttpFsFolderRenamed = "$testHttpFsFolder-renamed";
- private static Shell shHDFS = new Shell("/bin/bash", USERNAME);
private static Shell sh = new Shell("/bin/bash");
-
+ // it will used to cleanup directories, as they are created with via curl with user.name=$USERNAME
+ private static Shell shUSERNAME = new Shell("/bin/bash", USERNAME);
@BeforeClass
public static void setUp() {
@@ -56,18 +56,18 @@ public class TestHttpFs {
@AfterClass
public static void tearDown() {
- // clean up of existing folders
- shHDFS.exec("hadoop fs -test -e $testHttpFsFolder");
- if (shHDFS.getRet() == 0) {
- shHDFS.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
+ // clean up of existing folders using USERNAME of user who created them via curl
+ shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolder");
+ if (shUSERNAME.getRet() == 0) {
+ shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolder");
assertTrue("Deletion of previous testHttpFsFolder from HDFS failed",
- shHDFS.getRet() == 0);
+ shUSERNAME.getRet() == 0);
}
- shHDFS.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
- if (shHDFS.getRet() == 0) {
- shHDFS.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
+ shUSERNAME.exec("hadoop fs -test -e $testHttpFsFolderRenamed");
+ if (shUSERNAME.getRet() == 0) {
+ shUSERNAME.exec("hadoop fs -rmr -skipTrash $testHttpFsFolderRenamed");
assertTrue("Deletion of previous testHttpFsFolderRenamed from HDFS failed",
- shHDFS.getRet() == 0);
+ shUSERNAME.getRet() == 0);
}
}
@@ -98,7 +98,7 @@ public class TestHttpFs {
assertTrue("curl command to create a dir failed", sh.getRet() == 0);
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=RENAME&destination=$testHttpFsFolderRenamed'");
- assertTrue("curl command to rename a dir failed", shHDFS.getRet() == 0);
+ assertTrue("curl command to rename a dir failed", sh.getRet() == 0);
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
}
@@ -108,7 +108,7 @@ public class TestHttpFs {
assertTrue("curl command to create a dir failed", sh.getRet() == 0);
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
sh.exec("curl -i -X DELETE '$HTTPFS_PREFIX$testHttpFsFolder?user.name=$USERNAME&op=DELETE'");
- assertTrue("curl command to delete a dir failed", shHDFS.getRet() == 0);
+ assertTrue("curl command to delete a dir failed", sh.getRet() == 0);
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
}
@@ -131,7 +131,7 @@ public class TestHttpFs {
createDir(testHttpFsFolder);
assertTrue("curl command to create a dir failed", sh.getRet() == 0);
sh.exec("curl -i -X PUT '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=CREATE'");
- assertTrue("curl command to create a file failed", shHDFS.getRet() == 0);
+ assertTrue("curl command to create a file failed", sh.getRet() == 0);
String datanodeLocation = null;
sh.getOut().each {
if (it.startsWith("Location:")) {
@@ -142,10 +142,10 @@ public class TestHttpFs {
LOG.debug("Datanode location: $datanodeLocation");
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
sh.exec("curl -i -T $DATA_DIR/$filename '$datanodeLocation' --header 'Content-Type:application/octet-stream'");
- assertTrue("curl command to create a file failed", shHDFS.getRet() == 0);
+ assertTrue("curl command to create a file failed", sh.getRet() == 0);
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
sh.exec("curl -i -L '$HTTPFS_PREFIX$testHttpFsFolder/$filename?user.name=$USERNAME&op=OPEN'");
- assertTrue("curl command to create a file failed", shHDFS.getRet() == 0);
+ assertTrue("curl command to create a file failed", sh.getRet() == 0);
assertValueExists(sh.getOut(), HTTPFS_SUCCESS);
assertValueExists(sh.getOut(), filenameContent);
}