本文整理了Java中org.molgenis.data.Repository.getName
方法的一些代码示例,展示了Repository.getName
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Repository.getName
方法的具体详情如下:
包路径:org.molgenis.data.Repository
类名称:Repository
方法名:getName
暂无
代码示例来源:origin: org.molgenis/molgenis-data-merge
/**
* Create a label for an attribute based on the attribute label in the original repository and the original
* repository name itself.
*/
private String getMergedAttributeLabel(Repository repository, String attributeLabel)
{
return attributeLabel + "(" + repository.getName() + ")";
}
代码示例来源:origin: org.molgenis/molgenis-data-merge
/**
* Create a name for an attribute based on the attribute name in the original repository and the original repository
* name itself.
*/
private String getMergedAttributeName(Repository repository, String attributeName)
{
return repository.getName() + "_" + attributeName;
}
代码示例来源:origin: org.molgenis/molgenis-data-merge
/**
* Create a new merged repository Metadata is merged based on the common attributes (those remain at root level) All
* non-common level attributes are organised in 1 compound attribute per repository Data of all repositories is
* merged based on the common columns
*
* @param repositoryList
* list of repositories to be merged
* @param commonAttributes
* list of common attributes, these columns are use to 'join'/'merge' on
* @param mergedRepository
* the resulting repository
* @param batchSize
* number of records after which the result is added or updated in the repository
* @return mergedRepository ElasticSearchRepository containing the merged data
*/
public Repository merge(List<Repository> repositoryList, List<AttributeMetaData> commonAttributes,
Repository mergedRepository, int batchSize)
{
mergeData(repositoryList, dataService.getRepository(mergedRepository.getName()), commonAttributes, batchSize);
return mergedRepository;
}
代码示例来源:origin: org.molgenis/molgenis-data-annotators
entityTypeId = repository.getName();
代码示例来源:origin: org.molgenis/molgenis-ontology
private void createInputRepository(Repository<Entity> inputRepository) {
// Add the original input dataset to database
dataService.getMeta().addEntityType(inputRepository.getEntityType());
Repository<Entity> target = dataService.getRepository(inputRepository.getName());
inputRepository.forEachBatched(entities -> target.add(entities.stream()), BATCH_SIZE);
}
代码示例来源:origin: org.molgenis/molgenis-omx-importer
if (repository.getName().toLowerCase().startsWith(DATASET_PREFIX + "_"))
String identifier = repository.getName().substring((DATASET_PREFIX + "_").length());
代码示例来源:origin: org.molgenis/molgenis-data-rest
new EntityTypeIdentity(repositoryToCopyFrom.getName()), EntityTypePermission.READ_DATA);
if (!readPermission)
throw new EntityTypePermissionDeniedException(EntityTypePermission.READ_DATA, entityTypeId);
.getCapabilities(repositoryToCopyFrom.getName())
.contains(RepositoryCapability.WRITABLE);
if (!writableCapabilities) {
throw new RepositoryNotCapableException(
repositoryToCopyFrom.getName(), RepositoryCapability.WRITABLE);
"Location", Href.concatMetaEntityHrefV2(RestControllerV2.BASE_URI, repository.getName()));
response.setStatus(HttpServletResponse.SC_CREATED);
return repository.getName();
代码示例来源:origin: org.molgenis/molgenis-data-cache
private LoadingCache<Query<Entity>, List<Object>> createQueryCache(
Repository<Entity> repository) {
LOG.trace("Creating Query cache for repository {}", repository.getName());
LoadingCache<Query<Entity>, List<Object>> cache =
CaffeinatedGuava.build(
Caffeine.newBuilder()
.recordStats()
.maximumSize(MAX_CACHE_SIZE_PER_QUERY)
.expireAfterAccess(10, MINUTES),
createCacheLoader(repository));
GuavaCacheMetrics.monitor(meterRegistry, cache, "l3." + repository.getEntityType().getId());
return cache;
}
代码示例来源:origin: org.molgenis/molgenis-data-cache
/**
* Create a cacheloader that loads entity ids from the repository and stores them together with
* their query
*
* @return the {@link CacheLoader}
*/
private CacheLoader<Query<Entity>, List<Object>> createCacheLoader(
final Repository<Entity> repository) {
String repositoryName = repository.getName();
Fetch idAttributeFetch =
new Fetch().field(repository.getEntityType().getIdAttribute().getName());
return new CacheLoader<Query<Entity>, List<Object>>() {
/**
* Loads {@link Entity} identifiers for a {@link Query}
*
* @param query the cache key to load
* @return {@link List} of identifier {@link Object}s
*/
@Override
public List<Object> load(@Nonnull Query<Entity> query) {
LOG.trace("Loading identifiers from repository {} for query {}", repositoryName, query);
return repository
.findAll(new QueryImpl<>(query).fetch(idAttributeFetch))
.map(Entity::getIdValue)
.collect(toList());
}
};
}
代码示例来源:origin: org.molgenis/molgenis-omx-importer
if (repository.getName().startsWith(DATASET_SHEET_PREFIX))
String identifier = repository.getName().substring(DATASET_SHEET_PREFIX.length());
violation.setImportInfo(String.format("Sheet: '%s', row: %d", repository.getName(),
violation.getRownr() + 1));
violation.setImportInfo(String.format("Sheet: '%s'", repository.getName()));
代码示例来源:origin: org.molgenis/molgenis-data-annotators
EntityType entityType = dataService.getMeta().getEntityType(repository.getName());
annotator)));
iterateOverEntitiesAndAnnotate(dataService.getRepository(repository.getName()), annotator, action);
代码示例来源:origin: org.molgenis/molgenis-data-merge
DefaultAttributeMetaData repositoryCompoundAttribute = new DefaultAttributeMetaData(repository.getName(),
MolgenisFieldTypes.FieldTypeEnum.COMPOUND);
List<AttributeMetaData> attributeParts = new ArrayList<>();
代码示例来源:origin: org.molgenis/molgenis-ontology
private SortaJobExecution createJobExecution(
Repository<Entity> inputData, String jobName, String ontologyIri) {
String resultEntityName = idGenerator.generateId();
SortaJobExecution sortaJobExecution = sortaJobExecutionFactory.create();
sortaJobExecution.setIdentifier(resultEntityName);
sortaJobExecution.setName(jobName);
User currentUser = userAccountService.getCurrentUser();
sortaJobExecution.setSourceEntityName(inputData.getName());
sortaJobExecution.setDeleteUrl(getSortaServiceMenuUrl() + "/delete/" + resultEntityName);
sortaJobExecution.setResultEntityName(resultEntityName);
sortaJobExecution.setThreshold(DEFAULT_THRESHOLD);
sortaJobExecution.setOntologyIri(ontologyIri);
RunAsSystemAspect.runAsSystem(
() -> {
createInputRepository(inputData);
createEmptyResultRepository(jobName, resultEntityName, inputData.getEntityType());
});
EntityType resultEntityType = entityTypeFactory.create(resultEntityName);
permissionSystemService.giveUserWriteMetaPermissions(
asList(inputData.getEntityType(), resultEntityType));
return sortaJobExecution;
}
内容来源于网络,如有侵权,请联系作者删除!