Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,9 @@ private void configureConnection(Utils.JdbcConnectionParams connParams)
} else {
// for remote JDBC client, try to set the conf var using 'set foo=bar'
Statement stmt = createStatement();

stmt.execute("use " + connParams.getDbName());

for (Entry<String, String> hiveConf : connParams.getHiveConfs().entrySet()) {
stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
stmt.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -401,9 +401,15 @@ public static String unescapeIdentifier(String val) {
if (val == null) {
return null;
}

if (val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`') {
val = val.substring(1, val.length() - 1);
}

if (val.charAt(0) == '"' && val.charAt(val.length() - 1) == '"') {
val = val.substring(1, val.length() - 1);
}

return val;
}

Expand Down
13 changes: 11 additions & 2 deletions ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ tableSample
tableSource
@init { gParent.msgs.push("table source"); }
@after { gParent.msgs.pop(); }
: tabname=tableName (ts=tableSample)? (alias=identifier)?
-> ^(TOK_TABREF $tabname $ts? $alias?)
: tabname=tableName (props=tableProperties)? (ts=tableSample)? (KW_AS? alias=Identifier)?
-> ^(TOK_TABREF $tabname $props? $ts? $alias?)
;

tableName
Expand All @@ -183,6 +183,15 @@ tableName
db=identifier DOT tab=identifier
-> ^(TOK_TABNAME $db $tab)
|
db=identifier DOT tab2=StringLiteral
-> ^(TOK_TABNAME $db Identifier[$tab2.text.substring(1,$tab2.text.length()-1)])
|
db2=StringLiteral DOT tab=identifier
-> ^(TOK_TABNAME Identifier[$db2.text.substring(1,$db2.text.length()-1)] $tab)
|
db2=StringLiteral DOT tab2=StringLiteral
-> ^(TOK_TABNAME Identifier[$db2.text.substring(1,$db2.text.length()-1)] Identifier[$tab2.text.substring(1,$tab2.text.length()-1)])
|
tab=identifier
-> ^(TOK_TABNAME $tab)
;
Expand Down
1 change: 1 addition & 0 deletions ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,7 @@ BITWISEOR : '|';
BITWISEXOR : '^';
QUESTION : '?';
DOLLAR : '$';
DOUBLEQUOTE : '\"'

// LITERALS
fragment
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2619,9 +2619,11 @@ && isRegex(unescapeIdentifier(expr.getChild(0).getText()))) {
pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
} else if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& ((expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
.getChild(0).getText().toLowerCase())) && !hasAsClause
.getChild(0).getText().toLowerCase()))) || (expr.getChild(0).getType() == HiveParser.StringLiteral
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0).getText().toLowerCase()))) )
&& !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(1).getText()))) {
// In case the expression is TABLE.COL (col can be regex).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -956,6 +956,27 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
.getIsVirtualCol());
}

if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.StringLiteral
&& nodeOutputs[0] != null) {

RowResolver input = ctx.getInputRR();
String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr
.getChild(0).getText());
// NOTE: tableAlias must be a valid non-ambiguous table alias,
// because we've checked that in TOK_TABLE_OR_COL's process method.
ColumnInfo colInfo = input.get(tableAlias,
((ExprNodeConstantDesc) nodeOutputs[1]).getValue().toString());

if (colInfo == null) {
ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr);
return null;
}
return new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
}

// Return nulls for conversion operators
if (conversionFunctionTextHashMap.keySet().contains(expr.getType())
|| specialFunctionTextHashMap.keySet().contains(expr.getType())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@

package org.apache.hive.service.cli.operation;

import java.util.List;

import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
Expand Down Expand Up @@ -46,6 +48,20 @@ protected GetCatalogsOperation(HiveSession parentSession) {
@Override
public void run() throws HiveSQLException {
setState(OperationState.RUNNING);
try{
List<String> databaseNames = getParentSession().getMetaStoreClient().getAllDatabases();

if(databaseNames != null && databaseNames.size() > 0){
for(String databaseName : databaseNames){
Object rowData[] = new Object[] {databaseName};
rowSet.addRow(RESULT_SET_SCHEMA, rowData);
}
}
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}

setState(OperationState.FINISHED);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ public void run() throws HiveSQLException {
continue;
}
Object[] rowData = new Object[] {
null, // TABLE_CAT
table.getDbName(), // TABLE_CAT
table.getDbName(), // TABLE_SCHEM
table.getTableName(), // TABLE_NAME
column.getName(), // COLUMN_NAME
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.HashMap;

import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
Expand Down Expand Up @@ -50,6 +52,16 @@ public class GetTablesOperation extends MetadataOperation {
.addStringColumn("TABLE_NAME", "Table name.")
.addStringColumn("TABLE_TYPE", "The table type, e.g. \"TABLE\", \"VIEW\", etc.")
.addStringColumn("REMARKS", "Comments about the table.");

private static Map<String, String> typeMap;

static{
typeMap = new HashMap<String, String>();

typeMap.put("MANAGED_TABLE", "TABLE");
typeMap.put("EXTERNAL_TABLE", "TABLE");
typeMap.put("MANAGED_VIEW", "VIEW");
}

protected GetTablesOperation(HiveSession parentSession,
String catalogName, String schemaName, String tableName,
Expand Down Expand Up @@ -77,13 +89,14 @@ public void run() throws HiveSQLException {
List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
for (Table table : metastoreClient.getTableObjectsByName(dbName, tableNames)) {
Object[] rowData = new Object[] {
DEFAULT_HIVE_CATALOG,
dbName,
table.getDbName(),
table.getTableName(),
table.getTableType(),
typeMap.containsKey(table.getTableType())? typeMap.get(table.getTableType()) : table.getTableType(),
table.getParameters().get("comment")
};
if (tableTypes.isEmpty() || tableTypes.contains(table.getTableType())) {

if (tableTypes.isEmpty() || tableTypes.contains( (typeMap.containsKey(table.getTableType())? typeMap.get(table.getTableType()) : table.getTableType()) )) {
rowSet.addRow(RESULT_SET_SCHEMA, rowData);
}
}
Expand Down