本文整理汇总了Java中org.apache.clerezza.rdf.ontologies.RDFS类的典型用法代码示例。如果您正苦于以下问题:Java RDFS类的具体用法?Java RDFS怎么用?Java RDFS使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
RDFS类属于org.apache.clerezza.rdf.ontologies包,在下文中一共展示了RDFS类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: ContentStoreImpl
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
public ContentStoreImpl() {
final List<IRI> subjectLabelPath = new ArrayList<IRI>();
subjectLabelPath.add(DC.subject);
subjectLabelPath.add(RDFS.label);
subjectLabel = new PathVirtualProperty(subjectLabelPath, false);
List<VirtualProperty> joinedProperties = new ArrayList<VirtualProperty>();
joinedProperties.add(contentProperty);
joinedProperties.add(subjectLabel);
labelsAndContent = new JoinVirtualProperty(joinedProperties,false);
}
开发者ID:fusepool,项目名称:fusepool-ecs,代码行数:11,代码来源:ContentStoreImpl.java
示例2: getEntity
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
@GET
@Path("entity")
public RdfViewable getEntity(@QueryParam("uri") IRI entityUri) {
final Graph resultGraph = new SimpleGraph();
addResourceDescription(entityUri, resultGraph);
final GraphNode resultNode = new GraphNode(entityUri, resultGraph);
resultNode.addPropertyValue(RDFS.comment, "here you go");
//TODO use own rendering spec
return new RdfViewable("ContentStoreView", resultNode, ContentStoreImpl.class);
}
开发者ID:fusepool,项目名称:fusepool-ecs,代码行数:12,代码来源:ContentStoreImpl.java
示例3: addRelevantDescription
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
private void addRelevantDescription(GraphNode cgContent, Graph resultGraph, boolean withContent) {
Lock l = cgContent.readLock();
l.lock();
try {
Iterator<Literal> valueIter = cgContent.getLiterals(SIOC.content);
//if (!withContent) {
while (valueIter.hasNext()) {
final Literal valueLit = valueIter.next();
final String textualContent = valueLit.getLexicalForm();
final String preview = textualContent.substring(
0, Math.min(PREVIEW_LENGTH, textualContent.length()))
.replace('\n', ' ')
.replace("\r", "");
Language language = valueLit.getLanguage();
resultGraph.add(new TripleImpl((BlankNodeOrIRI) cgContent.getNode(), ECS.textPreview,
new PlainLiteralImpl(preview, language)));
}
//}
copyProperties(cgContent, resultGraph, DCTERMS.title, DCTERMS.abstract_,
RDFS.comment, DC.description, MEDIA_TITLE);
if (withContent) {
copyProperties(cgContent, resultGraph, SIOC.content);
}
} finally {
l.unlock();
}
}
开发者ID:fusepool,项目名称:fusepool-ecs,代码行数:28,代码来源:ContentStoreImpl.java
示例4: getNode
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
GraphNode getNode() {
MGraph base = new IndexedMGraph();
GraphNode result = new GraphNode(uriRef, base);
result.addPropertyValue(DCTERMS.dateSubmitted, dateSubmitted);
if (startDate != null) {
result.addPropertyValue(DCTERMS.dateAccepted, startDate);
}
if (endDate != null) {
result.addPropertyValue(DLC.endDate, endDate);
}
result.addPropertyValue(RDFS.comment, messageStringWriter.toString());
return result;
}
开发者ID:fusepool,项目名称:datalifecycle,代码行数:14,代码来源:Task.java
示例5: updateDatasetStatus
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
/**
* Updates the status of a dataset to unpublished
* @param pipeName
*/
private void updateDatasetStatus(final UriRef datasetUri) {
final LockableMGraph dlcGraph = dlcGraphProvider.getDlcGraph();
final UriRef statusRef = new UriRef(datasetUri.getUnicodeString() + "/Status");
dlcGraph.remove(new TripleImpl(statusRef, RDF.type, DLC.Published));
dlcGraph.remove(new TripleImpl(statusRef, RDFS.label, new PlainLiteralImpl("Published")));
dlcGraph.add(new TripleImpl(statusRef, RDF.type, DLC.Unpublished));
dlcGraph.add(new TripleImpl(statusRef, RDFS.label, new PlainLiteralImpl("Unpublished")));
}
开发者ID:fusepool,项目名称:datalifecycle,代码行数:13,代码来源:PipesAdmin.java
示例6: getValidDatasetName
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
/**
* Check whether a label can be used as a dataset name. To be a valid name a
* label must be: 1) not null and at least one character long 2) without
* white spaces 3) unique (no two dataset can have the same name)
*
* @return String
*/
private String getValidDatasetName(String label) {
String newDatasetName = null;
//check validity
if (label == null || "".equals(label)) {
return null;
}
// replace white space if present
newDatasetName = label.replace(' ', '-');
//check uniqueness of name
Lock rl = dlcGraphProvider.getDlcGraph().getLock().readLock();
rl.lock();
try {
Iterator<Triple> idatasets = dlcGraphProvider.getDlcGraph().filter(null, RDF.type, DLC.Pipe);
while (idatasets.hasNext()) {
GraphNode datasetNode = new GraphNode((UriRef) idatasets.next().getSubject(), dlcGraphProvider.getDlcGraph());
String datasetName = datasetNode.getLiterals(RDFS.label).next().getLexicalForm();
if (newDatasetName.equals(datasetName)) {
return null;
}
}
} finally {
rl.unlock();
}
return newDatasetName;
}
开发者ID:fusepool,项目名称:datalifecycle,代码行数:36,代码来源:SourcingAdmin.java
示例7: updateDatasetStatus
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
/**
* Updates the dataset status to published in the dlc meta graph
*
* @param datasetName
*/
private void updateDatasetStatus(DataSet dataSet) {
UriRef statusRef = new UriRef(dataSet.getUri().getUnicodeString() + "/Status");
dlcGraphProvider.getDlcGraph().remove(new TripleImpl(statusRef, RDF.type, DLC.Unpublished));
dlcGraphProvider.getDlcGraph().remove(new TripleImpl(statusRef, RDFS.label, new PlainLiteralImpl("Unpublished")));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(statusRef, RDF.type, DLC.Published));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(statusRef, RDFS.label, new PlainLiteralImpl("Published")));
}
开发者ID:fusepool,项目名称:datalifecycle,代码行数:13,代码来源:SourcingAdmin.java
示例8: findEntity
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
@POST
@Path("/{id}/find")
public Response findEntity(@PathParam(value = "id") String id,
@FormParam(value = "name") String name,
@FormParam(value = "field") String parsedField,
@FormParam(value = "lang") String language,
// @FormParam(value="select") String select,
@FormParam(value = "limit") Integer limit,
@FormParam(value = "offset") Integer offset,
@FormParam(value = "ldpath") String ldpath,
@Context HttpHeaders headers) {
String ontologyURI = uriInfo.getBaseUri() + uriInfo.getPath().replaceAll("\\/find$", "");
Graph g = tcManager.getMGraph(new IRI("ontonethub-graph"));
Iterator<Triple> tripleIt = g.filter(new IRI(ontologyURI),
RDFS.label,
null);
String ontologyName = null;
if(tripleIt.hasNext()){
ontologyName = ((Literal)tripleIt.next().getObject()).getLexicalForm();
}
if(ontologyName == null) return Response.status(Status.NOT_FOUND).build();
else{
Site site = getSite(ontologyName);
log.debug("site/{}/find Request",site.getId());
Collection<String> supported = new HashSet<String>(JerseyUtils.QUERY_RESULT_SUPPORTED_MEDIA_TYPES);
supported.add(TEXT_HTML);
final MediaType acceptedMediaType = getAcceptableMediaType(
headers, supported, MediaType.APPLICATION_JSON_TYPE);
if(name == null || name.isEmpty()){
if(MediaType.TEXT_HTML_TYPE.isCompatible(acceptedMediaType)){
ResponseBuilder rb = Response.ok(new Viewable("find", new SiteResultData(site)));
rb.header(HttpHeaders.CONTENT_TYPE, TEXT_HTML+"; charset=utf-8");
//addCORSOrigin(servletContext, rb, headers);
return rb.build();
} else {
return Response.status(Status.BAD_REQUEST)
.entity("The name must not be null nor empty for find requests. Missing parameter name.\n")
.header(HttpHeaders.ACCEPT, acceptedMediaType).build();
}
}
final String property;
if (parsedField == null) {
property = DEFAULT_FIND_FIELD;
} else {
parsedField = parsedField.trim();
if (parsedField.isEmpty()) {
property = DEFAULT_FIND_FIELD;
} else {
property = nsPrefixService.getFullName(parsedField);
if(property == null){
String messsage = String.format("The prefix '%s' of the parsed field '%' is not "
+ "mapped to any namespace. Please parse the full URI instead!\n",
NamespaceMappingUtils.getPrefix(parsedField),parsedField);
return Response.status(Status.BAD_REQUEST)
.entity(messsage)
.header(HttpHeaders.ACCEPT, acceptedMediaType).build();
}
}
}
return executeQuery(site, createFieldQueryForFindRequest(
name, DEFAULT_SELECTED_FIELD, property, language,
limit == null || limit < 1 ? DEFAULT_FIND_RESULT_LIMIT : limit,
offset,ldpath),
headers);
}
}
开发者ID:teamdigitale,项目名称:ontonethub,代码行数:69,代码来源:OntonethubIndexingResource.java
示例9: queryNearby
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
/**
* Searches for points of interest within a circle of a given radius.
* The data used is stored in a named graph.
* @param point
* @param uri
* @param radius
* @return
*/
public TripleCollection queryNearby(WGS84Point point, String graphName, double radius){
TripleCollection resultGraph = new SimpleMGraph();
log.info("queryNearby()");
long startTime = System.nanoTime();
String pre = StrUtils.strjoinNL("PREFIX spatial: <http://jena.apache.org/spatial#>",
"PREFIX geo: <http://www.w3.org/2003/01/geo/wgs84_pos#>",
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>",
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>");
String qs = StrUtils.strjoinNL("SELECT * ",
"FROM NAMED <" + graphName + ">",
"WHERE { ",
"GRAPH <" + graphName + "> ",
" { ?s spatial:nearby (" + point.getLat() + " " + point.getLong() + " " + radius + " 'm') ;",
" rdf:type ?type ; ",
" geo:lat ?lat ;" ,
" geo:long ?lon ; ",
" rdfs:label ?label .", " }",
"}");
log.info(pre + "\n" + qs);
spatialDataset.begin(ReadWrite.READ);
int poiCounter = 0;
try {
Query q = QueryFactory.create(pre + "\n" + qs);
QueryExecution qexec = QueryExecutionFactory.create(q, spatialDataset);
ResultSet results = qexec.execSelect() ;
for ( ; results.hasNext() ; ) {
QuerySolution solution = results.nextSolution() ;
String poiUri = solution.getResource("s").getURI();
String poiName = checkUriName(poiUri);
String poiType = checkUriName(solution.getResource("type").getURI());
String poiLabel = solution.getLiteral("label").getString();
String poiLatitude = solution.getLiteral("lat").getString();
String poiLongitude = solution.getLiteral("lon").getString();
log.info("poi name: " + poiName + " label = " + poiLabel);
UriRef poiRef = new UriRef(poiName);
String positionUri = checkUriName(point.getUriName());
resultGraph.add( new TripleImpl(poiRef, schema_containedIn, new UriRef(positionUri)) );
resultGraph.add( new TripleImpl(poiRef, RDFS.label, new PlainLiteralImpl(poiLabel)) );
resultGraph.add( new TripleImpl(poiRef, RDF.type, new UriRef(poiType)));
resultGraph.add( new TripleImpl(poiRef, geo_lat, new TypedLiteralImpl(poiLatitude, XSD.float_)) );
resultGraph.add( new TripleImpl(poiRef, geo_long, new TypedLiteralImpl(poiLongitude, XSD.float_)) );
poiCounter++;
}
}
finally {
spatialDataset.end();
}
long finishTime = System.nanoTime();
double time = (finishTime - startTime) / 1.0e6;
log.info(String.format("FINISH - %.2fms", time));
log.info(String.format("Found " + poiCounter + " points of interest."));
return resultGraph;
}
开发者ID:fusepoolP3,项目名称:p3-geo-enriching-transformer,代码行数:67,代码来源:SpatialDataEnhancer.java
示例10: initialize
import org.apache.clerezza.rdf.ontologies.RDFS; //导入依赖的package包/类
void initialize(final String datasetName) {
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, RDF.type, DLC.Pipe));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, RDFS.label, new PlainLiteralImpl(datasetName)));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(DlcGraphProvider.DATA_LIFECYCLE_GRAPH_REFERENCE, DLC.pipe, dataSetUri));
/* waht are tasks, and what are this tripples for?*/
// create tasks
//rdf task
UriRef rdfTaskRef = new UriRef(dataSetUri.getUnicodeString() + "/rdf");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.creates, rdfTaskRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(rdfTaskRef, RDF.type, DLC.RdfTask));
// digest task
UriRef digestTaskRef = new UriRef(dataSetUri.getUnicodeString() + "/digest");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.creates, digestTaskRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(digestTaskRef, RDF.type, DLC.DigestTask));
// enhance task
UriRef enhanceTaskRef = new UriRef(dataSetUri.getUnicodeString() + "/enhance");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.creates, enhanceTaskRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(enhanceTaskRef, RDF.type, DLC.EnhanceTask));
// interlink task
UriRef interlinkTaskRef = new UriRef(dataSetUri.getUnicodeString() + "/interlink");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.creates, interlinkTaskRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(interlinkTaskRef, RDF.type, DLC.InterlinkTask));
// smush task
UriRef smushTaskRef = new UriRef(dataSetUri.getUnicodeString() + "/smush");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.creates, smushTaskRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, DLC.SmushTask));
// publish task
UriRef publishTaskRef = new UriRef(dataSetUri.getUnicodeString() + "/publish");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.creates, publishTaskRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, DLC.PublishTask));
// create the source graph for the dataset (result of transformation in RDF)
tcManager.createMGraph(getSourceGraphRef());
//GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
//dlcGraphNode.addProperty(DCTERMS.hasPart, graphRef);
dlcGraphProvider.getDlcGraph().add(new TripleImpl(rdfTaskRef, DLC.deliverable, getSourceGraphRef()));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getSourceGraphRef(), RDF.type, DLC.Dataset));
// create the graph to store text fields extract from properties in the source rdf
tcManager.createMGraph(getDigestGraphRef());
dlcGraphProvider.getDlcGraph().add(new TripleImpl(enhanceTaskRef, DLC.deliverable, getDigestGraphRef()));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getDigestGraphRef(), RDFS.label, new PlainLiteralImpl("Contains a sioc:content property with text " + "for indexing and references to entities found in the text by NLP enhancement engines")));
// create the graph to store enhancements found by NLP engines in the digest
tcManager.createMGraph(getEnhancementsGraphRef());
dlcGraphProvider.getDlcGraph().add(new TripleImpl(enhanceTaskRef, DLC.deliverable, getEnhancementsGraphRef()));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getEnhancementsGraphRef(), RDFS.label, new PlainLiteralImpl("Contains entities found " + "in digest by NLP enhancement engines")));
// create the graph to store the result of the interlinking task
tcManager.createMGraph(getInterlinksGraphRef());
dlcGraphProvider.getDlcGraph().add(new TripleImpl(interlinkTaskRef, DLC.deliverable, getInterlinksGraphRef()));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getInterlinksGraphRef(), RDF.type, DLC.Linkset));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getInterlinksGraphRef(), DLC.subjectsTarget, getSourceGraphRef()));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getInterlinksGraphRef(), DLC.linkPredicate, OWL.sameAs));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(getInterlinksGraphRef(), RDFS.label, new PlainLiteralImpl("Contains equivalence links")));
// create the graph to store the result of the smushing task
tcManager.createMGraph(getSmushGraphRef());
dlcGraphProvider.getDlcGraph().add(new TripleImpl(smushTaskRef, DLC.deliverable, getSmushGraphRef()));
// create the graph to store the result of the publishing task
tcManager.createMGraph(getPublishGraphRef());
dlcGraphProvider.getDlcGraph().add(new TripleImpl(publishTaskRef, DLC.deliverable, getPublishGraphRef()));
// set the initial dataset status as unpublished
UriRef statusRef = new UriRef(dataSetUri.getUnicodeString() + "/Status");
dlcGraphProvider.getDlcGraph().add(new TripleImpl(dataSetUri, DLC.status, statusRef));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(statusRef, RDF.type, DLC.Unpublished));
dlcGraphProvider.getDlcGraph().add(new TripleImpl(statusRef, RDFS.label, new PlainLiteralImpl("Unpublished")));
}
开发者ID:fusepool,项目名称:datalifecycle,代码行数:64,代码来源:DataSetFactory.java
注:本文中的org.apache.clerezza.rdf.ontologies.RDFS类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论