本文整理了Java中org.apache.hadoop.hive.metastore.api.Function.getResourceUris()
方法的一些代码示例,展示了Function.getResourceUris()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Function.getResourceUris()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.metastore.api.Function
类名称:Function
方法名:getResourceUris
暂无
代码示例来源:origin: apache/hive
private HiveWrapper.Tuple<Function> functionTuple(String functionName, String dbName, Hive hiveDb) {
try {
HiveWrapper.Tuple<Function> tuple = new HiveWrapper(hiveDb, dbName).function(functionName);
if (tuple.object.getResourceUris().isEmpty()) {
LOG.warn("Not replicating function: " + functionName + " as it seems to have been created "
+ "without USING clause");
return null;
}
return tuple;
} catch (HiveException e) {
//This can happen as we are querying the getFunctions before we are getting the actual function
//in between there can be a drop function by a user in which case our call will fail.
LOG.info("Function " + functionName
+ " could not be found, we are ignoring it as it can be a valid state ", e);
return null;
}
}
代码示例来源:origin: apache/hive
private CreateFunctionDesc build() throws SemanticException {
replCopyTasks.clear();
PrimaryToReplicaResourceFunction conversionFunction =
new PrimaryToReplicaResourceFunction(context, metadata, destinationDbName);
// We explicitly create immutable lists here as it forces the guava lib to run the transformations
// and not do them lazily. The reason being the function class used for transformations additionally
// also creates the corresponding replCopyTasks, which cannot be evaluated lazily. since the query
// plan needs to be complete before we execute and not modify it while execution in the driver.
List<ResourceUri> transformedUris = ImmutableList.copyOf(
Lists.transform(metadata.function.getResourceUris(), conversionFunction)
);
replCopyTasks.addAll(conversionFunction.replCopyTasks);
String fullQualifiedFunctionName = FunctionUtils.qualifyFunctionName(
metadata.function.getFunctionName(), destinationDbName
);
// For bootstrap load, the create function should be always performed.
// Only for incremental load, need to validate if event is newer than the database.
ReplicationSpec replSpec = (context.dmd == null) ? null : context.eventOnlyReplicationSpec();
return new CreateFunctionDesc(
fullQualifiedFunctionName, false, metadata.function.getClassName(),
transformedUris, replSpec
);
}
}
代码示例来源:origin: apache/hive
List<ResourceUri> resources = fn.getResourceUris();
if (resources == null || resources.isEmpty()) {
LOG.warn("Missing resources for " + fqfn);
代码示例来源:origin: apache/hive
/**
* This is called outside of the lock. Some of the methods that are called transitively by
* this (e.g. addFunction) will take the lock again and then release it, which is ok.
*/
private FunctionInfo getFunctionInfoFromMetastoreNoLock(String functionName, HiveConf conf) {
try {
String[] parts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
Function func = Hive.get(conf).getFunction(parts[0].toLowerCase(), parts[1]);
if (func == null) {
return null;
}
// Found UDF in metastore - now add it to the function registry.
FunctionInfo fi = registerPermanentFunction(functionName, func.getClassName(), true,
FunctionTask.toFunctionResource(func.getResourceUris()));
if (fi == null) {
LOG.error(func.getClassName() + " is not a valid UDF class and was not registered");
return null;
}
return fi;
} catch (Throwable e) {
LOG.info("Unable to look up " + functionName + " in metastore", e);
}
return null;
}
}
代码示例来源:origin: apache/hive
public void reloadFunctions() throws HiveException {
HashSet<String> registryFunctions = new HashSet<String>(
FunctionRegistry.getFunctionNames(".+\\..+"));
for (Function function : getAllFunctions()) {
String functionName = function.getFunctionName();
try {
LOG.info("Registering function " + functionName + " " + function.getClassName());
String qualFunc = FunctionUtils.qualifyFunctionName(functionName, function.getDbName());
FunctionRegistry.registerPermanentFunction(qualFunc, function.getClassName(), false,
FunctionTask.toFunctionResource(function.getResourceUris()));
registryFunctions.remove(qualFunc);
} catch (Exception e) {
LOG.warn("Failed to register persistent function " +
functionName + ":" + function.getClassName() + ". Ignore and continue.");
}
}
// unregister functions from local system registry that are not in getAllFunctions()
for (String functionName : registryFunctions) {
try {
FunctionRegistry.unregisterPermanentFunction(functionName);
} catch (Exception e) {
LOG.warn("Failed to unregister persistent function " +
functionName + "on reload. Ignore and continue.");
}
}
}
代码示例来源:origin: apache/hive
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
List<ResourceUri> resourceUris = new ArrayList<>();
for (ResourceUri uri : function.getResourceUris()) {
Path inputPath = new Path(uri.getUri());
if ("hdfs".equals(inputPath.toUri().getScheme())) {
assert resourceUris.size() == this.function.getResourceUris().size();
copyObj.setResourceUris(resourceUris);
代码示例来源:origin: apache/hive
public void startLocalizeAllFunctions() throws HiveException {
Hive hive = Hive.get(false);
// Do not allow embedded metastore in LLAP unless we are in test.
try {
hive.getMSC(HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST), true);
} catch (MetaException e) {
throw new HiveException(e);
}
List<Function> fns = hive.getAllFunctions();
for (Function fn : fns) {
String fqfn = fn.getDbName() + "." + fn.getFunctionName();
List<ResourceUri> resources = fn.getResourceUris();
if (resources == null || resources.isEmpty()) continue; // Nothing to localize.
FnResources result = new FnResources();
resourcesByFn.put(fqfn, result);
workQueue.add(new LocalizeFn(fqfn, resources, result, fn.getClassName(), false));
}
workQueue.add(new RefreshClassloader());
}
代码示例来源:origin: apache/drill
/**
* This is called outside of the lock. Some of the methods that are called transitively by
* this (e.g. addFunction) will take the lock again and then release it, which is ok.
*/
private FunctionInfo getFunctionInfoFromMetastoreNoLock(String functionName, HiveConf conf) {
try {
String[] parts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
Function func = Hive.get(conf).getFunction(parts[0].toLowerCase(), parts[1]);
if (func == null) {
return null;
}
// Found UDF in metastore - now add it to the function registry.
FunctionInfo fi = registerPermanentFunction(functionName, func.getClassName(), true,
FunctionTask.toFunctionResource(func.getResourceUris()));
if (fi == null) {
LOG.error(func.getClassName() + " is not a valid UDF class and was not registered");
return null;
}
return fi;
} catch (Throwable e) {
LOG.info("Unable to look up " + functionName + " in metastore", e);
}
return null;
}
}
代码示例来源:origin: apache/drill
public void reloadFunctions() throws HiveException {
HashSet<String> registryFunctions = new HashSet<String>(
FunctionRegistry.getFunctionNames(".+\\..+"));
for (Function function : getAllFunctions()) {
String functionName = function.getFunctionName();
try {
LOG.info("Registering function " + functionName + " " + function.getClassName());
String qualFunc = FunctionUtils.qualifyFunctionName(functionName, function.getDbName());
FunctionRegistry.registerPermanentFunction(qualFunc, function.getClassName(), false,
FunctionTask.toFunctionResource(function.getResourceUris()));
registryFunctions.remove(qualFunc);
} catch (Exception e) {
LOG.warn("Failed to register persistent function " +
functionName + ":" + function.getClassName() + ". Ignore and continue.");
}
}
// unregister functions from local system registry that are not in getAllFunctions()
for (String functionName : registryFunctions) {
try {
FunctionRegistry.unregisterPermanentFunction(functionName);
} catch (Exception e) {
LOG.warn("Failed to unregister persistent function " +
functionName + "on reload. Ignore and continue.");
}
}
}
代码示例来源:origin: apache/hive
if (func.getResourceUris() != null && !func.getResourceUris().isEmpty()) {
for (ResourceUri uri : func.getResourceUris()) {
if (uri.getUri().toLowerCase().startsWith("hdfs:") && isSourceOfReplication) {
wh.addToChangeManagement(new Path(uri.getUri()));
代码示例来源:origin: apache/hive
public Object getFieldValue(_Fields field) {
switch (field) {
case FUNCTION_NAME:
return getFunctionName();
case DB_NAME:
return getDbName();
case CLASS_NAME:
return getClassName();
case OWNER_NAME:
return getOwnerName();
case OWNER_TYPE:
return getOwnerType();
case CREATE_TIME:
return getCreateTime();
case FUNCTION_TYPE:
return getFunctionType();
case RESOURCE_URIS:
return getResourceUris();
case CAT_NAME:
return getCatName();
}
throw new IllegalStateException();
}
代码示例来源:origin: apache/hive
private MFunction convertToMFunction(Function func) throws InvalidObjectException {
if (func == null) {
return null;
}
MDatabase mdb = null;
String catName = func.isSetCatName() ? func.getCatName() : getDefaultCatalog(conf);
try {
mdb = getMDatabase(catName, func.getDbName());
} catch (NoSuchObjectException e) {
LOG.error("Database does not exist", e);
throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist.");
}
MFunction mfunc = new MFunction(func.getFunctionName(),
mdb,
func.getClassName(),
func.getOwnerName(),
func.getOwnerType().name(),
func.getCreateTime(),
func.getFunctionType().getValue(),
convertToMResourceUriList(func.getResourceUris()));
return mfunc;
}
代码示例来源:origin: apache/hive
assertEquals("function owner type", PrincipalType.USER, func.getOwnerType());
assertEquals("function type", funcType, func.getFunctionType());
List<ResourceUri> resources = func.getResourceUris();
assertEquals("Resource list size", resList.size(), resources.size());
for (ResourceUri res : resources) {
代码示例来源:origin: apache/hive
@Test
public void testCreateFunctionDefaultValues() throws Exception {
Function function = new Function();
function.setDbName(OTHER_DATABASE);
function.setFunctionName("test_function");
function.setClassName(TEST_FUNCTION_CLASS);
function.setOwnerType(PrincipalType.USER);
function.setFunctionType(FunctionType.JAVA);
client.createFunction(function);
Function createdFunction = client.getFunction(function.getDbName(),
function.getFunctionName());
Assert.assertNull("Comparing OwnerName", createdFunction.getOwnerName());
Assert.assertEquals("Comparing ResourceUris", 0, createdFunction.getResourceUris().size());
// The create time is set
Assert.assertNotEquals("Comparing CreateTime", 0, createdFunction.getCreateTime());
}
代码示例来源:origin: apache/hive
assertEquals("function owner type", PrincipalType.USER, func.getOwnerType());
assertEquals("function type", funcType, func.getFunctionType());
List<ResourceUri> resources = func.getResourceUris();
assertTrue("function resources", resources == null || resources.size() == 0);
代码示例来源:origin: apache/hive
Assert.assertEquals(function.getOwnerType(), createdFunction.getOwnerType());
Assert.assertEquals(function.getFunctionType(), createdFunction.getFunctionType());
Assert.assertEquals(function.getResourceUris(), createdFunction.getResourceUris());
代码示例来源:origin: org.spark-project.hive/hive-metastore
public Object getFieldValue(_Fields field) {
switch (field) {
case FUNCTION_NAME:
return getFunctionName();
case DB_NAME:
return getDbName();
case CLASS_NAME:
return getClassName();
case OWNER_NAME:
return getOwnerName();
case OWNER_TYPE:
return getOwnerType();
case CREATE_TIME:
return Integer.valueOf(getCreateTime());
case FUNCTION_TYPE:
return getFunctionType();
case RESOURCE_URIS:
return getResourceUris();
}
throw new IllegalStateException();
}
代码示例来源:origin: org.apache.hive/hive-llap-server
public void startLocalizeAllFunctions() throws HiveException {
Hive hive = Hive.get(false);
// Do not allow embedded metastore in LLAP unless we are in test.
try {
hive.getMSC(HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST), true);
} catch (MetaException e) {
throw new HiveException(e);
}
List<Function> fns = hive.getAllFunctions();
for (Function fn : fns) {
String fqfn = fn.getDbName() + "." + fn.getFunctionName();
List<ResourceUri> resources = fn.getResourceUris();
if (resources == null || resources.isEmpty()) continue; // Nothing to localize.
FnResources result = new FnResources();
resourcesByFn.put(fqfn, result);
workQueue.add(new LocalizeFn(fqfn, resources, result, fn.getClassName(), false));
}
workQueue.add(new RefreshClassloader());
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public static void reloadFunctions() throws HiveException {
Hive db = Hive.get();
for (String dbName : db.getAllDatabases()) {
for (String functionName : db.getFunctions(dbName, "*")) {
Function function = db.getFunction(dbName, functionName);
try {
FunctionRegistry.registerPermanentFunction(
FunctionUtils.qualifyFunctionName(functionName, dbName), function.getClassName(),
false, FunctionTask.toFunctionResource(function.getResourceUris()));
} catch (Exception e) {
LOG.warn("Failed to register persistent function " +
functionName + ":" + function.getClassName() + ". Ignore and continue.");
}
}
}
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
private MFunction convertToMFunction(Function func) throws InvalidObjectException {
if (func == null) {
return null;
}
MDatabase mdb = null;
try {
mdb = getMDatabase(func.getDbName());
} catch (NoSuchObjectException e) {
LOG.error(StringUtils.stringifyException(e));
throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist.");
}
MFunction mfunc = new MFunction(func.getFunctionName(),
mdb,
func.getClassName(),
func.getOwnerName(),
func.getOwnerType().name(),
func.getCreateTime(),
func.getFunctionType().getValue(),
convertToMResourceUriList(func.getResourceUris()));
return mfunc;
}
内容来源于网络,如有侵权,请联系作者删除!