1/********************************************************************
2 * Copyright (c) 2013 - 2014, Pivotal Inc.
3 * All rights reserved.
4 *
5 * Author: Zhanwei Wang
6 ********************************************************************/
7/********************************************************************
8 * 2014 -
9 * open source under Apache License Version 2.0
10 ********************************************************************/
11/**
12 * Licensed to the Apache Software Foundation (ASF) under one
13 * or more contributor license agreements. See the NOTICE file
14 * distributed with this work for additional information
15 * regarding copyright ownership. The ASF licenses this file
16 * to you under the Apache License, Version 2.0 (the
17 * "License"); you may not use this file except in compliance
18 * with the License. You may obtain a copy of the License at
19 *
20 * http://www.apache.org/licenses/LICENSE-2.0
21 *
22 * Unless required by applicable law or agreed to in writing, software
23 * distributed under the License is distributed on an "AS IS" BASIS,
24 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
25 * See the License for the specific language governing permissions and
26 * limitations under the License.
27 */
28#include "Exception.h"
29
30#include <sstream>
31
32namespace Hdfs {
33
34const char * HdfsIOException::ReflexName = "java.io.IOException";
35
36const char * AlreadyBeingCreatedException::ReflexName =
37 "org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException";
38
39const char * AccessControlException::ReflexName =
40 "org.apache.hadoop.security.AccessControlException";
41
42const char * FileAlreadyExistsException::ReflexName =
43 "org.apache.hadoop.fs.FileAlreadyExistsException";
44
45const char * DSQuotaExceededException::ReflexName =
46 "org.apache.hadoop.hdfs.protocol.DSQuotaExceededException";
47
48const char * NSQuotaExceededException::ReflexName =
49 "org.apache.hadoop.hdfs.protocol.NSQuotaExceededException";
50
51const char * ParentNotDirectoryException::ReflexName =
52 "org.apache.hadoop.fs.ParentNotDirectoryException";
53
54const char * SafeModeException::ReflexName =
55 "org.apache.hadoop.hdfs.server.namenode.SafeModeException";
56
57const char * NotReplicatedYetException::ReflexName =
58 "org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException";
59
60const char * FileNotFoundException::ReflexName = "java.io.FileNotFoundException";
61
62const char * UnresolvedLinkException::ReflexName =
63 "org.apache.hadoop.fs.UnresolvedLinkException";
64
65const char * UnsupportedOperationException::ReflexName =
66 "java.lang.UnsupportedOperationException";
67
68const char * ReplicaNotFoundException::ReflexName =
69 "org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException";
70
71const char * NameNodeStandbyException::ReflexName =
72 "org.apache.hadoop.ipc.StandbyException";
73
74const char * HdfsInvalidBlockToken::ReflexName =
75 "org.apache.hadoop.security.token.SecretManager$InvalidToken";
76
77const char * SaslException::ReflexName = "javax.security.sasl.SaslException";
78
79const char * RpcNoSuchMethodException::ReflexName = "org.apache.hadoop.ipc.RpcNoSuchMethodException";
80
81const char * InvalidParameter::ReflexName = "java.lang.IllegalArgumentException";
82
83const char *HadoopIllegalArgumentException::ReflexName =
84 "org.apache.hadoop.HadoopIllegalArgumentException";
85
86const char *RecoveryInProgressException::ReflexName =
87 "org.apache.hadoop.hdfs.protocol.RecoveryInProgressException";
88
89HdfsException::HdfsException(const std::string & arg, const char * file,
90 int line, const char * stack) :
91 std::runtime_error(arg) {
92 std::ostringstream ss;
93 ss << file << ": " << line << ": " << arg << std::endl << stack;
94 detail = ss.str();
95}
96}
97