summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVenkata Sairam <venkatasairam.lanka@gmail.com>2017-08-23 15:58:01 +0530
committerVenkata Sairam <venkatasairam.lanka@gmail.com>2017-08-23 16:00:35 +0530
commit9ba009b5a69b640e68db3c92aaf117bbbb92ab8d (patch)
tree8d96ff03952b186cdbd825778ea3cd7035d6a50a
parent589d967638bc72ea1596e60bb0c1b358e888ea09 (diff)
AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View
-rw-r--r--contrib/views/hive20/src/main/resources/ui/yarn.lock2
-rw-r--r--contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java18
-rw-r--r--contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java23
3 files changed, 34 insertions, 9 deletions
diff --git a/contrib/views/hive20/src/main/resources/ui/yarn.lock b/contrib/views/hive20/src/main/resources/ui/yarn.lock
index 477a15cb88..607cf81d25 100644
--- a/contrib/views/hive20/src/main/resources/ui/yarn.lock
+++ b/contrib/views/hive20/src/main/resources/ui/yarn.lock
@@ -569,7 +569,7 @@ babel-plugin-transform-es2015-block-scoped-functions@^6.22.0:
dependencies:
babel-runtime "^6.22.0"
-babel-plugin-transform-es2015-block-scoping@^6.23.0:
+babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1:
version "6.24.1"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576"
dependencies:
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 90fa483ef7..5bce7ba3f1 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -386,7 +386,20 @@ public class HdfsApi {
* @throws IOException
* @throws InterruptedException
*/
- public <T> T execute(PrivilegedExceptionAction<T> action)
+ public <T> T execute(PrivilegedExceptionAction<T> action) throws IOException, InterruptedException {
+ return this.execute(action, false);
+ }
+
+
+ /**
+ * Executes action on HDFS using doAs
+ * @param action strategy object
+ * @param <T> result type
+ * @return result of operation
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public <T> T execute(PrivilegedExceptionAction<T> action, boolean alwaysRetry)
throws IOException, InterruptedException {
T result = null;
@@ -401,7 +414,7 @@ public class HdfsApi {
result = ugi.doAs(action);
succeeded = true;
} catch (IOException ex) {
- if (!ex.getMessage().contains("Cannot obtain block length for")) {
+ if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block length for")) {
throw ex;
}
if (tryNumber >= 3) {
@@ -409,6 +422,7 @@ public class HdfsApi {
}
LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " +
"Retrying... Try #" + (tryNumber + 1));
+ LOG.error("Retrying: " + ex.getMessage(),ex);
Thread.sleep(1000); //retry after 1 second
}
} while (!succeeded);
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
index 0670f1ae3d..39958c323e 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Map;
+import java.security.PrivilegedExceptionAction;
public class HdfsUtil {
private final static Logger LOG =
@@ -38,19 +39,29 @@ public class HdfsUtil {
* @param filePath path to file
* @param content new content of file
*/
- public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException {
+ public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException {
FSDataOutputStream stream;
- try {
+ try {
synchronized (hdfs) {
- stream = hdfs.create(filePath, true);
- stream.write(content.getBytes());
- stream.close();
+ hdfs.execute(new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ stream = hdfs.create(filePath, true);
+ stream.write(content.getBytes());
+ stream.close();
+ return null;
+ }
+ }, true);
}
} catch (IOException e) {
throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
} catch (InterruptedException e) {
throw new HdfsApiException("HDFS021 Could not write file " + filePath, e);
- }
+ } finally {
+ if(stream != null) {
+ stream.close()
+ }
+ }
}
/**