本文整理汇总了Java中org.apache.hadoop.hive.ql.QueryPlan类的典型用法代码示例。如果您正苦于以下问题:Java QueryPlan类的具体用法?Java QueryPlan怎么用?Java QueryPlan使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
QueryPlan类属于org.apache.hadoop.hive.ql包,在下文中一共展示了QueryPlan类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: setWorkflowAdjacencies
点赞 3
import org.apache.hadoop.hive.ql.QueryPlan; //导入依赖的package包/类
public static void setWorkflowAdjacencies(Configuration conf, QueryPlan plan) {
try {
Graph stageGraph = plan.getQueryPlan().getStageGraph();
if (stageGraph == null) {
return;
}
List<Adjacency> adjList = stageGraph.getAdjacencyList();
if (adjList == null) {
return;
}
for (Adjacency adj : adjList) {
List<String> children = adj.getChildren();
if (children == null || children.isEmpty()) {
return;
}
conf.setStrings("mapreduce.workflow.adjacency."+adj.getNode(),
children.toArray(new String[children.size()]));
}
} catch (IOException e) {
}
}
开发者ID:mini666,
项目名称:hive-phoenix-handler,
代码行数:22,
代码来源:Utilities.java
示例2: SessionStateLite
点赞 2
import org.apache.hadoop.hive.ql.QueryPlan; //导入依赖的package包/类
/**
* Creates a lightweight representation of the session state.
*
* @param plan The Hive query plan
*/
public SessionStateLite(QueryPlan plan) {
SessionState sessionState = SessionState.get();
this.conf = new HiveConf(sessionState.getConf());
this.cmd = plan.getQueryStr();
this.commandType = plan.getOperationName();
this.queryId = plan.getQueryId();
this.mapRedStats = new HashMap<>(sessionState.getMapRedStats());
}
开发者ID:airbnb,
项目名称:reair,
代码行数:16,
代码来源:SessionStateLite.java
示例3: execute
点赞 2
import org.apache.hadoop.hive.ql.QueryPlan; //导入依赖的package包/类
/**
* Process a <code>statement</code>, making <code>hivevar</code> substitutions
* from <code>parameters</code> and session settings from
* <code>configuration</code>
*
* @return {@link List} of {@link String} results, no result will be indicated
* by 1-length empty {@link String} {@link List}
*/
public List<String> execute(String statement, Map<String, String> parameters, Map<String, String> configuration, int maxResults,
boolean quiet) throws Exception {
long time = System.currentTimeMillis();
if (!quiet) {
log(LOG, "execute", true);
}
List<String> results = new ArrayList<>();
try {
HiveConf confSession = new HiveConf((HiveConf) getConf());
confSession.set(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId());
for (String key : configuration.keySet()) {
confSession.set(key, configuration.get(key));
}
statement = new StrSubstitutor(parameters, "${hivevar:", "}")
.replace(statement.trim())
.replaceAll("(?i)LOCATION '/", "LOCATION '" + DfsServer.getInstance().getPathUri("/"));
CommandProcessor commandProcessor = CommandProcessorFactory.getForHiveCommand(
statement.split("\\s+"), confSession);
if (commandProcessor == null) {
((Driver) (commandProcessor = new Driver(confSession))).setMaxRows(maxResults);
}
if (!quiet) {
log(LOG, "execute", "statement:\n" + statement, true);
}
String responseErrorMessage = null;
int responseCode = commandProcessor.run(statement).getResponseCode();
if (commandProcessor instanceof Driver) {
((Driver) commandProcessor).getResults(results);
responseErrorMessage = ((Driver) commandProcessor).getErrorMsg();
}
if (!quiet) {
if (responseCode != 0 || responseErrorMessage != null) {
log(LOG, "execute",
"error code [" + responseCode + "]" + (responseErrorMessage != null ? " message [" + responseErrorMessage + " ]" : ""), true);
} else {
log(LOG, "execute", "results count [" + results.size() + (results.size() == maxResults ? " (MAX)" : "") + "]:\n"
+ StringUtils.join(results.toArray(), "\n"), true);
}
log(LOG, "execute", "finished in [" + (System.currentTimeMillis() - time) + "] ms", true);
}
if (responseCode != 0 || responseErrorMessage != null) {
throw new SQLException("Statement executed with error response code [" + responseCode + "]"
+ (responseErrorMessage != null ? " and error message [" + responseErrorMessage + " ]" : ""));
}
} finally {
if (SessionState.get().getSparkSession() != null)
SessionState.get().getSparkSession().close();
}
return results;
}
开发者ID:ggear,
项目名称:cloudera-framework,
代码行数:59,
代码来源:HiveServer.java
示例4: getPlan
点赞 2
import org.apache.hadoop.hive.ql.QueryPlan; //导入依赖的package包/类
/**
* @return The current query plan associated with this Driver, if any.
*/
public QueryPlan getPlan() {
return plan;
}
开发者ID:adrian-wang,
项目名称:project-panthera-skin,
代码行数:7,
代码来源:SkinDriver.java
示例5: initialize
点赞 2
import org.apache.hadoop.hive.ql.QueryPlan; //导入依赖的package包/类
@Override
public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) {
super.initialize(conf, queryPlan, driverContext);
this.conf = conf;
}
开发者ID:apache,
项目名称:incubator-sentry,
代码行数:6,
代码来源:SentryGrantRevokeTask.java