本文整理了Java中com.hp.hpl.jena.query.ResultSet
类的一些代码示例,展示了ResultSet
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。ResultSet
类的具体详情如下:
包路径:com.hp.hpl.jena.query.ResultSet
类名称:ResultSet
[英]Results from a query in a table-like manner for SELECT queries. Each row corresponds to a set of bindings which fulfil the conditions of the query. Access to the results is by variable name.
[中]对于SELECT查询,以类似表格的方式从查询中获取结果。每一行对应一组满足查询条件的绑定。通过变量名访问结果。
代码示例来源:origin: bio2rdf/bio2rdf-scripts
public Map<String, Double> getClassCounts(Model model) {
QueryExecution execution = QueryExecutionFactory
.create("select ?c ?i where { ?i <" + RDF.type + "> ?c }", model);
ResultSet result = execution.execSelect();
Map<String, Double> stats = new HashMap<String, Double>();
while (result.hasNext()) {
QuerySolution solution = result.next();
String className = solution.getResource("c").getURI();
if (stats.containsKey(className)) {
stats.put(className, stats.get(className) + 1);
} else {
stats.put(className, 1d);
}
}
return stats;
}
代码示例来源:origin: paulhoule/infovore
public static Map<RDFNode,RDFNode> fetchMap(Dataset m,Query query,QuerySolution bindings) throws Exception {
QueryExecution qe=QueryExecutionFactory.create(query,m);
try {
ResultSet results=qe.execSelect();
Map<RDFNode,RDFNode> map=Maps.newHashMap();
List<String> vars=results.getResultVars();
while(results.hasNext()) {
QuerySolution row=results.nextSolution();
map.put(row.get(vars.get(0)),row.get(vars.get(1)));
}
return map;
} finally { qe.close(); }
}
代码示例来源:origin: com.hp.hpl.jena/arq
private SortedResultSet(ResultSet rs, Comparator<Binding> comparator)
{
model = rs.getResourceModel() ;
// Put straight into a sorted structure
SortedSet<Binding> sorted = new TreeSet<Binding>(comparator) ;
for ( ; rs.hasNext() ; )
{
Binding b = rs.nextBinding() ;
sorted.add(b) ;
}
qIter = new QueryIterPlainWrapper(sorted.iterator()) ;
resultVars = rs.getResultVars() ;
//resultSet = new ResultSetStream(rs.getResultVars(), null, qIter) ;
}
代码示例来源:origin: com.github.ansell.pellet/pellet-query
public SlicedResultSet( ResultSet results, long offset, long limit ) {
this.results = results;
this.row = 0;
this.limit = limit;
for( int i = 0; i < offset && results.hasNext(); i++ ) {
results.next();
}
}
代码示例来源:origin: org.apache.clerezza/rdf.jena.sparql
public ResultSetWrapper(final ResultSet jenaResultSet) {
final List<QuerySolution> solutions = new ArrayList<QuerySolution>();
while (jenaResultSet.hasNext()) {
solutions.add(jenaResultSet.nextSolution());
}
solutionsIter = solutions.iterator();
resultVars = jenaResultSet.getResultVars();
}
代码示例来源:origin: epimorphics/elda
@Override public void consume(ResultSet rs) {
while (rs.hasNext()) {
QuerySolution qs = rs.next();
result[0] = qs.get(e.outName).toString();
}
}
代码示例来源:origin: de.unibonn.iai.eis/luzzu-semantics
public static Resource getPropertyResource(Resource uri){
String whereClause = "?prop " + " " + SPARQLHelper.toSPARQL(RDFS.range) + SPARQLHelper.toSPARQL(uri) + " . ";
Model m = InternalModelConf.getFlatModel();
String query = SPARQLHelper.SELECT_STATEMENT.replace("[variables]", "?prop").replace("[whereClauses]", whereClause);
Resource r = null;
Query qry = QueryFactory.create(query);
QueryExecution qe = QueryExecutionFactory.create(qry, m);
ResultSet rs = qe.execSelect();
while (rs.hasNext()){
r = rs.next().get("prop").asResource();
}
return r;
}
代码示例来源:origin: usc-isi-i2/Web-Karma
QueryExecution qexec = QueryExecutionFactory.create(query, jenaModel);
ResultSet results = qexec.execSelect() ;
if (!results.hasNext()) {
logger.info("query does not return any answer.");
return null;
for ( ; results.hasNext() ; )
QuerySolution soln = results.nextSolution() ;
Map<String, String> attValues =
new HashMap<>();
RDFNode argNode = soln.get(arg) ;
if (argNode != null) {
String value = argNode.toString();
attValues.put(arg, value);
return null;
} finally {
qexec.close() ;
代码示例来源:origin: com.hp.hpl.jena/arq
private static void exec(String qs, Model model, Map<Node, List<Node>> multimap)
{
String preamble = StrUtils.strjoinNL("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>",
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>",
"PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>",
"PREFIX owl: <http://www.w3.org/2002/07/owl#>",
"PREFIX skos: <http://www.w3.org/2004/02/skos/core#>") ;
Query query = QueryFactory.create(preamble+"\n"+qs, Syntax.syntaxARQ) ;
QueryExecution qexec = QueryExecutionFactory.create(query, model) ;
ResultSet rs = qexec.execSelect() ;
for ( ; rs.hasNext() ; )
{
QuerySolution soln= rs.next() ;
Node x = soln.get("x").asNode() ;
Node y = soln.get("y").asNode() ;
if ( ! multimap.containsKey(x) )
multimap.put(x, new ArrayList<Node>()) ;
multimap.get(x).add(y) ;
}
}
}
代码示例来源:origin: fr.lirmm.graphik/graal-store-jenaTDB
@Override
public Set<Predicate> getPredicates() {
Set<Predicate> predicates = new TreeSet<Predicate>();
dataset.begin(ReadWrite.READ);
QueryExecution qExec = null;
try {
qExec = QueryExecutionFactory.create(SELECT_PREDICATES_QUERY, dataset);
ResultSet rs = qExec.execSelect();
while (rs.hasNext()) {
predicates.add(new Predicate(rs.next().get("?p").toString(), 2));
}
} finally {
if (qExec != null) {
qExec.close();
}
dataset.end();
}
return predicates;
}
代码示例来源:origin: com.hp.hpl.jena/arq
/** Execute, expecting the result to be one row, one column.
* Return that one RDFNode or null
* Throw excpetion if more than one.
*/
public static RDFNode getOne(QueryExecution qExec, String varname)
{
try {
ResultSet rs = qExec.execSelect() ;
if ( ! rs.hasNext() )
return null ;
QuerySolution qs = rs.nextSolution() ;
RDFNode r = qs.get(varname) ;
if ( rs.hasNext() )
throw new ARQException("More than one: var ?"+varname) ;
return r ;
} finally { qExec.close() ; }
}
代码示例来源:origin: org.apache.stanbol/org.apache.stanbol.entityhub.indexing.source.jenatdb
public void debug(){
String entityVar = "s";
String fieldVar = "p";
String valueVar = "o";
StringBuilder qb = new StringBuilder();
qb.append(String.format("SELECT ?%s ?%s ?%s \n",
entityVar,fieldVar,valueVar)); //for the select
qb.append("{ \n");
qb.append(String.format(" ?%s ?%s ?%s . \n",
entityVar,fieldVar,valueVar)); //for the where
qb.append("} \n");
log.debug("EntityDataIterator Query: \n"+qb.toString());
Query q = QueryFactory.create(qb.toString(), Syntax.syntaxARQ);
ResultSet rs = QueryExecutionFactory.create(q, indexingDataset.toDataset()).execSelect();
Var s = Var.alloc(entityVar);
Var p = Var.alloc(fieldVar);
Var o = Var.alloc(valueVar);
while (rs.hasNext()){
Binding b = rs.nextBinding();
log.debug("{} {} {}",new Object[]{b.get(s),b.get(p),b.get(o)});
}
}
代码示例来源:origin: DSpace/DSpace
@Override
public List<String> getAllStoredGraphs() {
String queryString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }";
QueryExecution qexec;
if (configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY)
&& configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_PASSWORD_KEY)) {
HttpAuthenticator httpAuthenticator = new SimpleAuthenticator(
configurationService.getProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY),
configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY).toCharArray());
qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(),
queryString, httpAuthenticator);
} else {
qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(),
queryString);
}
ResultSet rs = qexec.execSelect();
List<String> graphs = Collections.synchronizedList(new ArrayList<String>());
while (rs.hasNext()) {
QuerySolution solution = rs.next();
if (solution.contains("g")) {
graphs.add(solution.get("g").asResource().getURI());
}
}
qexec.close();
return graphs;
}
代码示例来源:origin: uk.ac.open.kmi.iserve/iserve-sparql-text-search
private Map<URI, MatchResult> search(String textquery, Query sparqlQuery) {
logger.debug("Executing SPARQL query: {}", sparqlQuery);
QueryExecution qexec = QueryExecutionFactory.sparqlService(queryEndpoint.toString(), sparqlQuery);
ResultSet resultSet = qexec.execSelect();
Map<URI, MatchResult> r = Maps.newLinkedHashMap();
while (resultSet.hasNext()) {
QuerySolution solution = resultSet.next();
RDFNode s = solution.get("s");
if (s.isURIResource()) {
try {
String resource = s.asResource().getURI();
FreeTextMatchResult result = new FreeTextMatchResult(new URI(searchProperty + "?q=" + textquery), new URI(resource));
r.put(new URI(resource), result);
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
}
return r;
}
代码示例来源:origin: com.hp.hpl.jena/sdb
private static List<Pair<String, String>> storesByQuery(String fn)
{
Model model = FileManager.get().loadModel(fn) ;
List<Pair<String, String>> data = new ArrayList<Pair<String, String>>();
Query query = QueryFactory.create(queryString) ;
QueryExecution qExec = QueryExecutionFactory.create(query, model) ;
try {
ResultSet rs = qExec.execSelect() ;
for ( ; rs.hasNext() ; )
{
QuerySolution qs = rs.nextSolution() ;
String label = qs.getLiteral("label").getLexicalForm() ;
String desc = qs.getResource("desc").getURI() ;
data.add(new Pair<String, String>(label, desc)) ;
}
} finally { qExec.close() ; }
return data ;
}
}
代码示例来源:origin: org.apache.clerezza.ext/org.apache.jena.jena-arq
public void apply()
{
proc.start(rs) ;
for ( ; rs.hasNext() ; )
{
QuerySolution qs = rs.next() ;
proc.start(qs) ;
for ( String varName : rs.getResultVars() )
{
RDFNode node = qs.get(varName) ;
// node may be null
proc.binding(varName, node) ;
}
proc.finish(qs) ;
}
proc.finish(rs) ;
}
代码示例来源:origin: org.wso2.carbon.data/org.wso2.carbon.dataservices.core
private DataEntry getDataEntryFromRS(ResultSet rs) {
DataEntry dataEntry = new DataEntry();
QuerySolution soln = rs.nextSolution();
String colName, value;
boolean useColumnNumbers = this.isUsingColumnNumbers();
/* for each column get the colName and colValue and add to the data entry */
for (int i = 0; i < rs.getResultVars().size(); i++) {
colName = rs.getResultVars().get(i);
RDFNode node = soln.get(colName) ;
if (node.isLiteral()) {
value = convertRSToString(soln, colName);
} else {
value = soln.getResource(colName).getURI();
}
dataEntry.addValue(useColumnNumbers ? Integer.toString(i + 1) :
colName, new ParamValue(value));
}
return dataEntry;
}
代码示例来源:origin: com.hp.hpl.jena/arq
/** This operation faithfully walks the results but does nothing with them.
* @return The count of the number of solutions.
*/
public static int consume(ResultSet resultSet)
{
int count = 0 ;
for ( ; resultSet.hasNext() ; )
{
// Force nodes to be materialized.
QuerySolution result = resultSet.nextSolution() ;
for ( Iterator<String> iter = result.varNames() ; iter.hasNext() ; )
{
String vn = iter.next();
RDFNode n = result.get(vn) ;
}
count++ ;
}
return count ;
}
代码示例来源:origin: fr.inria.eventcloud/eventcloud-core
QueryExecutionFactory.create(
"SELECT (COUNT(*) as ?count) { GRAPH ?g { ?s ?p ?o } } ",
txnGraph.getUnderlyingDataset());
ResultSet rs = qExec.execSelect();
try {
result =
(Integer) rs.nextBinding()
.get(Var.alloc("count"))
.getLiteralValue();
} finally {
qExec.close();
代码示例来源:origin: org.apache.clerezza.ext/org.apache.jena.jena-arq
/**
* Extracts a List filled with the binding of selectElement variable for each
* query solution as RDFNodes (Resources or Literals).
* Exhausts the result set. Create a rewindable one to use multiple times.
*
* @see com.hp.hpl.jena.query.ResultSetFactory
*/
public static List<RDFNode> resultSetToList(ResultSet rs, String selectElement)
{
// feature suggested by James Howison
List<RDFNode> items = new ArrayList<RDFNode>() ;
while (rs.hasNext())
{
QuerySolution qs = rs.nextSolution() ;
RDFNode n = qs.get(selectElement) ;
items.add(n) ;
}
return items ;
}
内容来源于网络,如有侵权,请联系作者删除!