Merge pull request #654 from afs/rdfconnection-query-type

Enum QueryType; use with RDFconnection
diff --git a/jena-arq/src/main/java/org/apache/jena/query/Query.java b/jena-arq/src/main/java/org/apache/jena/query/Query.java
index 5e384db..de78da0 100644
--- a/jena-arq/src/main/java/org/apache/jena/query/Query.java
+++ b/jena-arq/src/main/java/org/apache/jena/query/Query.java
@@ -51,63 +51,66 @@
  *
  * Once a query is built, it can be passed to the QueryFactory to produce a query execution engine.
  * @see QueryExecutionFactory
- * @see ResultSet */
+ * @see ResultSet
+ */
 
 public class Query extends Prologue implements Cloneable, Printable
 {
     static { JenaSystem.init() ; /* Ensure everything has started properly */ }
-    
+
+    // Old constants. Retain for compatibility.
     public static final int QueryTypeUnknown    = -123 ;
     public static final int QueryTypeSelect     = 111 ;
     public static final int QueryTypeConstruct  = 222 ;
     public static final int QueryTypeDescribe   = 333 ;
     public static final int QueryTypeAsk        = 444 ;
     public static final int QueryTypeJson       = 555 ;
-    int queryType = QueryTypeUnknown ; 
-    
+
+    private QueryType queryType = QueryType.UNKNOWN;
+
     // If no model is provided explicitly, the query engine will load
     // a model from the URL.  Never a list of zero items.
-    
+
     private List<String> graphURIs = new ArrayList<>() ;
     private List<String> namedGraphURIs = new ArrayList<>() ;
-    
+
     // The WHERE clause
     private Element queryPattern = null ;
-    
+
     // Query syntax
     private Syntax syntax = Syntax.syntaxSPARQL ; // Default
-    
+
     // LIMIT/OFFSET
     public static final long  NOLIMIT = Long.MIN_VALUE ;
     private long resultLimit   = NOLIMIT ;
     private long resultOffset  = NOLIMIT ;
-    
+
     // ORDER BY
     private List<SortCondition> orderBy       = null ;
-    public static final int ORDER_ASCENDING           = 1 ; 
+    public static final int ORDER_ASCENDING           = 1 ;
     public static final int ORDER_DESCENDING          = -1 ;
-    public static final int ORDER_DEFAULT             = -2 ;    // Not explicitly given. 
-    public static final int ORDER_UNKNOW              = -3 ; 
+    public static final int ORDER_DEFAULT             = -2 ;    // Not explicitly given.
+    public static final int ORDER_UNKNOW              = -3 ;
 
     // VALUES trailing clause
     protected TableData valuesDataBlock = null ;
-    
+
     protected boolean strictQuery = true ;
-    
+
     // SELECT * seen
     protected boolean queryResultStar        = false ;
-    
+
     protected boolean distinct               = false ;
     protected boolean reduced                = false ;
-    
+
     // CONSTRUCT
     protected Template constructTemplate  = null ;
-    
+
     // DESCRIBE
     // Any URIs/QNames in the DESCRIBE clause
     // Also uses resultVars
     protected List<Node> resultNodes               = new ArrayList<>() ;     // Type in list: Node
-    
+
     /**
      * Creates a new empty query
      */
@@ -115,7 +118,7 @@
     {
         syntax = Syntax.syntaxSPARQL ;
     }
-    
+
     /**
      * Creates a new empty query with the given prologue
      */
@@ -124,50 +127,70 @@
         this() ;
         usePrologueFrom(prologue) ;
     }
-    
+
     // Allocate variables that are unique to this query.
     private VarAlloc varAlloc = new VarAlloc(ARQConstants.allocVarMarker) ;
     private Var allocInternVar() { return varAlloc.allocVar() ; }
-    
+
     //private VarAlloc varAnonAlloc = new VarAlloc(ARQConstants.allocVarAnonMarker) ;
     //public Var allocVarAnon() { return varAnonAlloc.allocVar() ; }
-    
-    public void setQuerySelectType()            { queryType = QueryTypeSelect ; }
-    public void setQueryConstructType()         { queryType = QueryTypeConstruct ; queryResultStar = true ; }
-    public void setQueryDescribeType()          { queryType = QueryTypeDescribe ; }
-    public void setQueryAskType()               { queryType = QueryTypeAsk ; }
-    public void setQueryJsonType()              { queryType = QueryTypeJson ; }
-    
-    public int getQueryType()                   { return queryType ; }
-    
-    public boolean isSelectType()               { return queryType == QueryTypeSelect ; }
 
-    public boolean isConstructType()            { return queryType == QueryTypeConstruct ; }
+    public void setQuerySelectType()            { queryType = QueryType.SELECT ; }
+    public void setQueryConstructType()         { queryType = QueryType.CONSTRUCT ; queryResultStar = true ; }
+    public void setQueryDescribeType()          { queryType = QueryType.DESCRIBE; }
+    public void setQueryAskType()               { queryType = QueryType.ASK; }
+    public void setQueryJsonType()              { queryType = QueryType.CONSTRUCT_JSON; }
 
-    public boolean isDescribeType()             { return queryType == QueryTypeDescribe ; }
+    /** Return the {@link QueryType} */
+    public QueryType queryType()                       { return queryType ; }
 
-    public boolean isAskType()                  { return queryType == QueryTypeAsk ; }
+    /** @deprecated Use {@link #queryType()} which returns an {@link QueryType} */
+    @Deprecated
+    public int getQueryType() {
+        // Old constants.
+        switch(queryType) {
+            case SELECT :           return QueryTypeSelect;
+            case ASK :              return QueryTypeAsk;
+            case CONSTRUCT :        return QueryTypeConstruct;
+            case CONSTRUCT_JSON :   return QueryTypeJson;
+            case CONSTRUCT_QUADS :  return QueryTypeConstruct;
+            case DESCRIBE :         return QueryTypeDescribe;
+            default :               return QueryTypeUnknown;
+        }
+    }
 
-    public boolean isJsonType()                 { return queryType == QueryTypeJson ; }
+    public boolean isSelectType()               { return queryType == QueryType.SELECT; }
 
-    public boolean isUnknownType()              { return queryType == QueryTypeUnknown ; }
-    
-    public boolean isConstructQuad()            { return isConstructType() && constructTemplate.containsRealQuad() ; }
+    public boolean isConstructType()            { return queryType == QueryType.CONSTRUCT ; }
+
+    public boolean isDescribeType()             { return queryType == QueryType.DESCRIBE ; }
+
+    public boolean isAskType()                  { return queryType == QueryType.ASK ; }
+
+    public boolean isJsonType()                 { return queryType == QueryType.CONSTRUCT_JSON ; }
+
+    public boolean isUnknownType()              { return queryType == QueryType.UNKNOWN ; }
+
+    public boolean isConstructQuad() {
+        return (isConstructType() && constructTemplate.containsRealQuad())
+            || queryType == QueryType.CONSTRUCT_QUADS;
+    }
+
     // It was a mistake to extend Prologue ... but what is done is done.
     public Prologue getPrologue()               { return this ; }
-    
+
     public void setStrict(boolean isStrict)
-    { 
+    {
         strictQuery = isStrict ;
-        
+
         if ( strictQuery )
             initStrict() ;
         else
             initLax() ;
     }
-    
+
     public boolean isStrict()                { return strictQuery ; }
-    
+
     private void initStrict()
     {
 //        if ( prefixMap.getGlobalPrefixMapping() == globalPrefixMap )
@@ -179,38 +202,38 @@
 //        if ( prefixMap.getGlobalPrefixMapping() == null )
 //            prefixMap.setGlobalPrefixMapping(globalPrefixMap) ;
     }
-    
+
     public void setDistinct(boolean b) { distinct = b ; }
     public boolean isDistinct()        { return distinct ; }
-    
+
     public void setReduced(boolean b) { reduced = b ; }
     public boolean isReduced()        { return reduced ; }
-    
+
     /** @return Returns the syntax. */
     public Syntax getSyntax()         { return syntax ; }
 
     /** @param syntax The syntax to set. */
     public void setSyntax(Syntax syntax)
-    { 
+    {
         this.syntax = syntax ;
         if ( syntax != Syntax.syntaxSPARQL )
             strictQuery = false ;
     }
 
     // ---- Limit/offset
-    
-    public long getLimit()             { return resultLimit ; } 
+
+    public long getLimit()             { return resultLimit ; }
     public void setLimit(long limit)   { resultLimit = limit ; }
     public boolean hasLimit()          { return resultLimit != NOLIMIT ; }
-    
-    public long getOffset()            { return resultOffset ; } 
+
+    public long getOffset()            { return resultOffset ; }
     public void setOffset(long offset) { resultOffset = offset ; }
     public boolean hasOffset()         { return resultOffset != NOLIMIT ; }
-    
+
     // ---- Order By
-    
+
     public boolean hasOrderBy()        { return orderBy != null && orderBy.size() > 0 ; }
-    
+
     public boolean isOrdered()         { return hasOrderBy() ; }
 
     public void addOrderBy(SortCondition condition)
@@ -225,49 +248,49 @@
         SortCondition sc = new SortCondition(expr, direction) ;
         addOrderBy(sc) ;
     }
-    
+
     public void addOrderBy(Node var, int direction)
-    { 
+    {
         if ( ! var.isVariable() )
             throw new QueryException("Not a variable: "+var) ;
         SortCondition sc = new SortCondition(var, direction) ;
         addOrderBy(sc) ;
     }
-    
+
     public void addOrderBy(String varName, int direction)
-    { 
+    {
         varName = Var.canonical(varName) ;
         SortCondition sc = new SortCondition(new ExprVar(varName), direction) ;
         addOrderBy(sc) ;
     }
 
     public List<SortCondition> getOrderBy()           { return orderBy ; }
-    
-    // ---- 
-    
+
+    // ----
+
     /** Answer whether the query had SELECT/DESCRIBE/CONSTRUCT *
      * @return boolean as to whether a * result form was seen
-     */ 
+     */
     public boolean isQueryResultStar() { return queryResultStar ; }
 
     /** Set whether the query had SELECT/DESCRIBE *
-     * 
-     * @param isQueryStar 
+     *
+     * @param isQueryStar
      */
     public void setQueryResultStar(boolean isQueryStar)
     {
         queryResultStar = isQueryStar ;
-        if ( isQueryStar ) 
+        if ( isQueryStar )
             resultVarsSet = false ;
     }
-    
+
     public void setQueryPattern(Element elt)
     {
         queryPattern = elt ;
     }
-    
+
     public Element getQueryPattern() { return queryPattern ; }
-    
+
      /** Location of the source for the data.  If the model is not set,
      *  then the QueryEngine will attempt to load the data from these URIs
      *  into the default (unamed) graph.
@@ -292,41 +315,41 @@
         else
             namedGraphURIs.add(uri) ;
     }
-    
+
     /** Return the list of URIs (strings) for the unnamed graph
-     * 
+     *
      * @return List of strings
      */
-    
+
     public List<String> getGraphURIs() { return graphURIs ; }
 
-    /** Test whether the query mentions a URI in forming the default graph (FROM clause) 
-     * 
+    /** Test whether the query mentions a URI in forming the default graph (FROM clause)
+     *
      * @param uri
-     * @return boolean  True if the URI used in a FROM clause 
+     * @return boolean  True if the URI used in a FROM clause
      */
     public boolean usesGraphURI(String uri) { return graphURIs.contains(uri) ; }
-    
+
     /** Return the list of URIs (strings) for the named graphs (FROM NAMED clause)
-     * 
+     *
      * @return List of strings
      */
-    
+
     public List<String> getNamedGraphURIs() { return namedGraphURIs ; }
 
-    /** Test whether the query mentions a URI for a named graph. 
-     * 
+    /** Test whether the query mentions a URI for a named graph.
+     *
      * @param uri
-     * @return True if the URI used in a FROM NAMED clause 
+     * @return True if the URI used in a FROM NAMED clause
      */
     public boolean usesNamedGraphURI(String uri) { return namedGraphURIs.contains(uri) ; }
-    
+
     /** Return true if the query has either some graph
      * URIs or some named graph URIs in its description.
      * This does not mean these URIs will be used - just that
-     * they are noted as part of the query. 
-     */ 
-    
+     * they are noted as part of the query.
+     */
+
     public boolean hasDatasetDescription()
     {
         if ( getGraphURIs() != null && getGraphURIs().size() > 0 )
@@ -335,45 +358,45 @@
             return true ;
         return false ;
     }
-    
-    /** Return a dataset description (FROM/FROM NAMED clauses) for the query. */  
+
+    /** Return a dataset description (FROM/FROM NAMED clauses) for the query. */
     public DatasetDescription getDatasetDescription()
     {
         if ( ! hasDatasetDescription() )
             return null;
-        
+
         DatasetDescription description = new DatasetDescription() ;
-        
+
         description.addAllDefaultGraphURIs(getGraphURIs()) ;
         description.addAllNamedGraphURIs(getNamedGraphURIs()) ;
         return description ;
     }
-    
+
     // ---- SELECT
 
     protected VarExprList projectVars = new VarExprList() ;
 
     /** Return a list of the variables requested (SELECT) */
     public List<String> getResultVars()
-    { 
+    {
         // Ensure "SELECT *" processed
         setResultVars() ;
         return Var.varNames(projectVars.getVars()) ;
     }
-    
+
     /** Return a list of the variables requested (SELECT) */
     public List<Var> getProjectVars()
-    { 
+    {
         // Ensure "SELECT *" processed
         setResultVars() ;
         return projectVars.getVars() ;
     }
-    
+
     public VarExprList getProject()
     {
         return projectVars ;
     }
-    
+
     /** Add a collection of projection variables to a SELECT query */
     public void addProjectVars(Collection<?> vars)
     {
@@ -394,7 +417,7 @@
         resultVarsSet = true ;
     }
 
-    
+
     /** Add a projection variable to a SELECT query */
     public void addResultVar(String varName)
     {
@@ -408,10 +431,10 @@
             throw new QueryException("Not a variable: "+v) ;
         _addResultVar(v.getName()) ;
     }
-    
+
     public void addResultVar(Node v, Expr expr)
     {
-        Var var = null ; 
+        Var var = null ;
         if ( v == null )
             var = allocInternVar() ;
         else
@@ -422,7 +445,7 @@
         }
         _addVarExpr(projectVars, var, expr) ;
     }
-    
+
     /** Add an to a SELECT query (a name will be created for it) */
     public void addResultVar(Expr expr)
     {
@@ -432,7 +455,7 @@
     /** Add a named expression to a SELECT query */
     public void addResultVar(String varName, Expr expr)
     {
-        Var var = null ; 
+        Var var = null ;
         if ( varName == null )
             var = allocInternVar() ;
         else
@@ -457,7 +480,7 @@
         {
             Expr expr = varExprList.getExpr(v) ;
             if ( expr != null )
-                
+
                 // SELECT (?a+?b AS ?x) ?x
                 throw new QueryBuildException("Duplicate variable (had an expression) in result projection '"+v+"'") ;
             // SELECT ?x ?x
@@ -473,20 +496,20 @@
         if ( varExprList.contains(v) )
             // SELECT ?x (?a+?b AS ?x)
             // SELECT (2*?a AS ?x) (?a+?b AS ?x)
-            throw new QueryBuildException("Duplicate variable in result projection '"+v+"'") ;  
+            throw new QueryBuildException("Duplicate variable in result projection '"+v+"'") ;
         varExprList.add(v, expr) ;
     }
 
     protected VarExprList groupVars = new VarExprList() ;
     protected List<Expr> havingExprs = new ArrayList<>() ;  // Expressions : Make an ExprList?
-    
+
     public boolean hasGroupBy()     { return ! groupVars.isEmpty() || getAggregators().size() > 0 ; }
     public boolean hasHaving()      { return havingExprs != null && havingExprs.size() > 0 ; }
-    
+
     public VarExprList getGroupBy()      { return groupVars ; }
-    
+
     public List<Expr> getHavingExprs()    { return havingExprs ; }
-    
+
     public void addGroupBy(String varName)
     {
         varName = Var.canonical(varName) ;
@@ -499,19 +522,19 @@
     }
 
     public void addGroupBy(Expr expr) { addGroupBy(null, expr) ; }
-    
+
     public void addGroupBy(Var v, Expr expr)
     {
         if ( v == null )
             v = allocInternVar() ;
-        
+
         if ( expr.isVariable() && v.isAllocVar() )
         {
             // It was (?x) with no AS - keep the name by adding by variable.
             addGroupBy(expr.asVar()) ;
             return ;
         }
-        
+
         groupVars.add(v, expr) ;
     }
 
@@ -535,34 +558,34 @@
     // ---- Aggregates
 
     // Record allocated aggregations.
-    // Later: The same aggregation expression used in a query 
+    // Later: The same aggregation expression used in a query
     // will always lead to the same aggregator.
     // For now, allocate a fresh one each time (cause the calculation
-    // to be done multiple times but (1) it's unusual to have repeated 
-    // aggregators normally and (2) the actual calculation is cheap. 
-        
+    // to be done multiple times but (1) it's unusual to have repeated
+    // aggregators normally and (2) the actual calculation is cheap.
+
     // Unlike SELECT expressions, here the expression itself (E_Aggregator) knows its variable
     // Commonality?
-    
+
     private List<ExprAggregator> aggregators = new ArrayList<>() ;
     private Map<Var, ExprAggregator> aggregatorsMap = new HashMap<>() ;
-    
+
     // Note any E_Aggregator created for reuse.
     private Map<String, Var> aggregatorsAllocated = new HashMap<>() ;
-    
+
     public boolean hasAggregators() { return aggregators.size() != 0  ; }
     public List<ExprAggregator> getAggregators() { return aggregators ; }
-    
+
     public Expr allocAggregate(Aggregator agg)
     {
         // We need to track the aggregators in case one aggregator is used twice, e.g. in HAVING and in SELECT expression
         // (is that much harm to do twice?  Yes, if distinct.)
         String key = agg.key() ;
-        
-        Var v = aggregatorsAllocated.get(key); 
+
+        Var v = aggregatorsAllocated.get(key);
         if ( v != null )
         {
-            ExprAggregator eAgg = aggregatorsMap.get(v) ; 
+            ExprAggregator eAgg = aggregatorsMap.get(v) ;
             if ( ! agg.equals(eAgg.getAggregator()) )
                 Log.warn(Query.class, "Internal inconsistency: Aggregator: "+agg) ;
             return eAgg ;
@@ -575,16 +598,16 @@
         aggregators.add(aggExpr) ;
         return aggExpr ;
     }
-    
+
     // ---- VALUES
-    
+
     /** Does the query have a VALUES trailing block? */
     public boolean hasValues()                { return valuesDataBlock != null ; }
-    
+
     /** Variables from a VALUES trailing block */
     public List<Var> getValuesVariables()     { return valuesDataBlock==null ? null : valuesDataBlock.getVars() ; }
-    
-    /** Data from a VALUES trailing block. null for a Node means undef */ 
+
+    /** Data from a VALUES trailing block. null for a Node means undef */
     public List<Binding> getValuesData()      { return valuesDataBlock==null ? null : valuesDataBlock.getRows() ; }
 
     public void setValuesDataBlock(List<Var> variables, List<Binding> values)
@@ -592,7 +615,7 @@
         checkDataBlock(variables, values) ;
         valuesDataBlock = new TableData(variables, values) ;
     }
-    
+
     private static void checkDataBlock(List<Var> variables, List<Binding> values)
     {
         // Check.
@@ -601,27 +624,27 @@
         {
             Iterator<Var> iter= valueRow.vars() ;
             for ( ; iter.hasNext() ; )
-            { 
+            {
                 Var v = iter.next() ;
                 if ( ! variables.contains(v) )
                     throw new QueryBuildException("Variable "+v+" not found in "+variables) ;
             }
         }
     }
-    
-    // ---- CONSTRUCT 
-    
-    /** Get the template pattern for a construct query */ 
-    public Template getConstructTemplate() 
-    { 
+
+    // ---- CONSTRUCT
+
+    /** Get the template pattern for a construct query */
+    public Template getConstructTemplate()
+    {
         return constructTemplate ;
     }
-    
-    /** Set triple patterns for a construct query */ 
+
+    /** Set triple patterns for a construct query */
     public void setConstructTemplate(Template templ)  { constructTemplate = templ ; }
 
     // ---- DESCRIBE
-    
+
     public void addDescribeNode(Node node)
     {
         if ( node.isVariable() ) { addResultVar(node) ; return ; }
@@ -636,12 +659,12 @@
         throw new QueryException("Result node not recognized: "+node) ;
     }
 
-    
+
     /** Get the result list (things wanted - not the results themselves)
-     *  of a DESCRIBE query. */ 
+     *  of a DESCRIBE query. */
     public List<Node> getResultURIs() { return resultNodes ; }
-    
-    private boolean resultVarsSet = false ; 
+
+    private boolean resultVarsSet = false ;
     /** Fix up when the query has "*" (when SELECT * or DESCRIBE *)
      *  and for a construct query.  This operation is idempotent.
      */
@@ -650,28 +673,28 @@
         if ( resultVarsSet )
             return ;
         resultVarsSet = true ;
-        
+
         if ( getQueryPattern() == null )
         {
             if ( ! this.isDescribeType() )
                 Log.warn(this, "setResultVars(): no query pattern") ;
             return ;
         }
-        
+
         if ( isSelectType() )
         {
             if ( isQueryResultStar() )
                 findAndAddNamedVars() ;
             return ;
         }
-        
+
         if ( isConstructType() )
         {
             // All named variables are in-scope
             findAndAddNamedVars() ;
             return ;
         }
-        
+
         if ( isDescribeType() )
         {
             if ( isQueryResultStar() )
@@ -682,7 +705,7 @@
 //        if ( isAskType() )
 //        {}
     }
-    
+
     private void findAndAddNamedVars()
     {
         Iterator<Var> varIter = null ;
@@ -699,9 +722,9 @@
 //                queryVars.addAll(getValuesVariables()) ;
             varIter = queryVars.iterator() ;
         }
-        
+
         // All query variables, including ones from bNodes in the query.
-        
+
         for ( ; varIter.hasNext() ; )
         {
             Object obj = varIter.next() ;
@@ -740,7 +763,7 @@
 
     @Override
     public Object clone() { return cloneQuery() ; }
-    
+
     /**
      * Makes a copy of this query.  Copies by parsing a query from the serialized form of this query
      * @return Copy of this query
@@ -751,25 +774,25 @@
         String qs = this.toString();
         return QueryFactory.create(qs, getSyntax()) ;
     }
-    
+
     // ---- Query canonical syntax
-    
+
     // Reverse of parsing : should produce a string that parses to an equivalent query
-    // "Equivalent" => gives the same results on any model  
+    // "Equivalent" => gives the same results on any model
     @Override
     public String toString()
     { return serialize() ; }
-    
+
     public String toString(Syntax syntax)
     { return serialize(syntax) ; }
 
-    
+
     /** Must align with .equals */
     private int hashcode = -1 ;
-    
+
     @Override
     public int hashCode()
-    { 
+    {
         if ( hashcode == -1 )
         {
             hashcode = QueryHashCode.calc(this) ;
@@ -778,29 +801,29 @@
         }
         return hashcode ;
     }
-    
+
     /** Are two queries equals - tests shape and details.
      * Equality means that the queries do the same thing, including
      * same variables, in the same places.  Being unequals does
-     * <b>not</b> mean the queries do different things.  
-     * 
+     * <b>not</b> mean the queries do different things.
+     *
      * For example, reordering a group or union
      * means that a query is different.
-     *  
-     * Two instances of a query parsed from the same string are equal. 
+     *
+     * Two instances of a query parsed from the same string are equal.
      */
-    
+
     @Override
     public boolean equals(Object other)
-    { 
+    {
         if ( ! ( other instanceof Query ) )
             return false ;
         if ( this == other ) return true ;
         return QueryCompare.equals(this, (Query)other) ;
     }
-    
+
 //    public static boolean sameAs(Query query1, Query query2)
-//    { return query1.sameAs(query2) ; }  
+//    { return query1.sameAs(query2) ; }
 
     @Override
     public void output(IndentedWriter out)
@@ -809,18 +832,18 @@
     }
 
     /** Convert the query to a string */
-    
+
     public String serialize()
     {
         IndentedLineBuffer buff = new IndentedLineBuffer() ;
         serialize(buff) ;
         return buff.toString();
     }
-    
+
     /** Convert the query to a string in the given syntax
      * @param syntax
      */
-    
+
     public String serialize(Syntax syntax)
     {
         IndentedLineBuffer buff = new IndentedLineBuffer() ;
@@ -832,54 +855,54 @@
      * @param out  OutputStream
      */
     public void serialize(OutputStream out) { serialize(out, syntax); }
-    
+
     /** Output the query
-     * 
+     *
      * @param out     OutputStream
      * @param syntax  Syntax URI
      */
-    
-    public void serialize(OutputStream out, Syntax syntax) { 
+
+    public void serialize(OutputStream out, Syntax syntax) {
         IndentedWriter writer = new IndentedWriter(out) ;
         serialize(writer, syntax) ;
         writer.flush() ;
-        try { out.flush() ; } catch (Exception ex) { } 
+        try { out.flush() ; } catch (Exception ex) { }
     }
 
     /** Format the query into the buffer
-     * 
+     *
      * @param buff    IndentedLineBuffer
      */
-    
-    public void serialize(IndentedLineBuffer buff) { 
-        serialize(buff, syntax); 
+
+    public void serialize(IndentedLineBuffer buff) {
+        serialize(buff, syntax);
     }
-    
+
     /** Format the query
-     * 
+     *
      * @param buff       IndentedLineBuffer in which to place the unparsed query
      * @param outSyntax  Syntax URI
      */
-    
-    public void serialize(IndentedLineBuffer buff, Syntax outSyntax) { 
+
+    public void serialize(IndentedLineBuffer buff, Syntax outSyntax) {
         serialize((IndentedWriter)buff, outSyntax);
     }
 
     /** Format the query
-     * 
+     *
      * @param writer  IndentedWriter
      */
-    
-    public void serialize(IndentedWriter writer) { 
-        serialize(writer, syntax); 
+
+    public void serialize(IndentedWriter writer) {
+        serialize(writer, syntax);
     }
 
     /** Format the query
-     * 
+     *
      * @param writer     IndentedWriter
      * @param outSyntax  Syntax URI
      */
-    
+
     public void serialize(IndentedWriter writer, Syntax outSyntax)
     {
         // Try to use a serializer factory if available
diff --git a/jena-arq/src/main/java/org/apache/jena/query/QueryType.java b/jena-arq/src/main/java/org/apache/jena/query/QueryType.java
new file mode 100644
index 0000000..15ecb7d
--- /dev/null
+++ b/jena-arq/src/main/java/org/apache/jena/query/QueryType.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.query;
+
+/**
+ * QueryType: one of the different kinds of query.
+ */
+public enum QueryType {
+    UNKNOWN,
+    // SPARQL 1.1
+    SELECT, CONSTRUCT, ASK, DESCRIBE,
+    // ARQ special formats.
+    CONSTRUCT_JSON, CONSTRUCT_QUADS
+}
\ No newline at end of file
diff --git a/jena-arq/src/main/java/org/apache/jena/riot/thrift/Binding2Thrift.java b/jena-arq/src/main/java/org/apache/jena/riot/thrift/Binding2Thrift.java
index 8a95acf..4ef2b72 100644
--- a/jena-arq/src/main/java/org/apache/jena/riot/thrift/Binding2Thrift.java
+++ b/jena-arq/src/main/java/org/apache/jena/riot/thrift/Binding2Thrift.java
@@ -19,6 +19,7 @@
 package org.apache.jena.riot.thrift;
 
 import java.io.OutputStream ;
+import java.util.ArrayList;
 import java.util.Collection ;
 import java.util.Iterator ;
 
@@ -51,13 +52,7 @@
     }
 
     private void varsRow() {
-        RDF_VarTuple vrow = new RDF_VarTuple() ;
-        // ** Java8
-//        vars.iterator().forEachRemaining( v -> {
-//            RDF_VAR rv = new RDF_VAR() ;
-//            rv.setName(v.getName()) ;
-//            vrow.addToVars(rv) ;
-//        }) ;
+        RDF_VarTuple vrow = new RDF_VarTuple(new ArrayList<>(vars.size())) ;
         for ( Var v : vars ) {
             RDF_VAR rv = new RDF_VAR() ;
             rv.setName(v.getName()) ;
diff --git a/jena-arq/src/main/java/org/apache/jena/riot/thrift/Thift2Binding.java b/jena-arq/src/main/java/org/apache/jena/riot/thrift/Thift2Binding.java
index cfcb874..6ce84f3 100644
--- a/jena-arq/src/main/java/org/apache/jena/riot/thrift/Thift2Binding.java
+++ b/jena-arq/src/main/java/org/apache/jena/riot/thrift/Thift2Binding.java
@@ -64,16 +64,13 @@
         RDF_VarTuple vrow = new RDF_VarTuple() ;
         try { vrow.read(protocol) ; }
         catch (TException e) { TRDF.exception(e) ; }
-        // ** Java8
-//        vrow.getVars().forEach(rv -> {
-//            String vn = rv.getName() ;
-//            Var v = Var.alloc(rv.getName()) ;
-//            varNames.add(vn) ;
-//        }) ;
-        for ( RDF_VAR rv : vrow.getVars() ) {
-            String vn = rv.getName() ;
-            Var v = Var.alloc(rv.getName()) ;
-            varNames.add(vn) ;
+        if ( vrow.getVars() != null ) {
+            // It can be null if there are no variables and both the encoder
+            // and the allocation above used RDF_VarTuple().
+            for ( RDF_VAR rv : vrow.getVars() ) {
+                String vn = rv.getName() ;
+                varNames.add(vn) ;
+            }
         }
         vars = Var.varList(varNames) ;
     }
@@ -81,7 +78,6 @@
     public List<Var> getVars()              { return vars ; }
 
     public List<String> getVarNames()       { return varNames ; }
-
     
     @Override
     protected Binding moveToNext() {
diff --git a/jena-arq/src/main/java/org/apache/jena/sparql/core/QueryCompare.java b/jena-arq/src/main/java/org/apache/jena/sparql/core/QueryCompare.java
index 7a6df46..b29f586 100644
--- a/jena-arq/src/main/java/org/apache/jena/sparql/core/QueryCompare.java
+++ b/jena-arq/src/main/java/org/apache/jena/sparql/core/QueryCompare.java
@@ -61,7 +61,7 @@
 
     @Override
     public void visitResultForm(Query query1)
-    { check("Query result form", query1.getQueryType() == query2.getQueryType()) ; }
+    { check("Query result form", query1.queryType() == query2.queryType()) ; }
 
     @Override
     public void visitPrologue(Prologue query1)
diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnection.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnection.java
index b131830..17fa4dc 100644
--- a/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnection.java
+++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnection.java
@@ -31,12 +31,12 @@
 /**
  * Interface for SPARQL operations on a datasets, whether local or remote.
  * Operations can performed via this interface or via the various
- * interfaces for a subset of the operations.  
- * 
+ * interfaces for a subset of the operations.
+ *
  * <ul>
- * <li>query ({@link SparqlQueryConnection}) 
+ * <li>query ({@link SparqlQueryConnection})
  * <li>update ({@link SparqlUpdateConnection})
- * <li>graph store protocol ({@link RDFDatasetConnection}). 
+ * <li>graph store protocol ({@link RDFDatasetConnection}).
  * </ul>
  *
  * For remote operations, the
@@ -45,60 +45,59 @@
  * <a href="http://www.w3.org/TR/sparql11-http-rdf-update/">SPARQL Graph Store
  * Protocol</a> for the graph operations and in addition, there are analogous
  * operations on datasets (fetch, load, put; but not delete).
- * 
+ *
  * {@code RDFConnection} provides transaction boundaries. If not in a
  * transaction, an implicit transactional wrapper is applied ("autocommit").
- * 
+ *
  * Remote SPARQL operations are atomic but without additional capabilities from
  * the remote server, multiple operations are not combined into a single
  * transaction.
- * 
- * Not all implementations may implement all operations. 
+ *
+ * Not all implementations may implement all operations.
  * See the implementation notes for details.
- * 
+ *
  * @see RDFConnectionFactory
  * @see RDFConnectionLocal
  * @see RDFConnectionRemote
  * @see SparqlQueryConnection
  * @see SparqlUpdateConnection
  * @see RDFDatasetConnection
- * 
- */  
+ */
+
 public interface RDFConnection extends
         SparqlQueryConnection, SparqlUpdateConnection, RDFDatasetConnection,
-        Transactional, AutoCloseable 
+        Transactional, AutoCloseable
  {
     // Default implementations could be pushed up but then they can't be mentioned here
-    // so that the Javadoc for RDFConnection is not in one place.
+    // and the javadoc for RDFConnection is not in one place.
     // Inheriting interfaces and re-mentioning gets the javadoc in one place.
-    
 
     // ---- SparqlQueryConnection
-    // Where the argument is a query string, this code avoid simply parsing it and calling
+    // Where the argument is a query string, this code avoids simply parsing it and calling
     // the Query object form. This allows RDFConnectionRemote to pass the query string
     // untouched to the connection depending in the internal setting to parse/check
     // queries.
-    // Java9 introduces private methods for interfaces which could clear the duplication up by passing in a Creator<QueryExecution>. 
-    // Alternatively, RDFConnectionBase with protected query(String, Query)
-    // See RDFConnectionRemote
-    
+    // Java9 introduces private methods for interfaces which could clear the duplication up by passing in a Creator<QueryExecution>.
+    // (Alternatively, add RDFConnectionBase with protected query(String, Query)
+    // See RDFConnectionRemote.
+
     /**
-     * Execute a SELECT query and process the ResultSet with the handler code.  
-     * @param query
+     * Execute a SELECT query and process the ResultSet with the handler code.
+     * @param queryString
      * @param resultSetAction
      */
     @Override
-    public default void queryResultSet(String query, Consumer<ResultSet> resultSetAction) {
-        Txn.executeRead(this, ()->{ 
-            try ( QueryExecution qExec = query(query) ) {
+    public default void queryResultSet(String queryString, Consumer<ResultSet> resultSetAction) {
+        Txn.executeRead(this, ()->{
+            try ( QueryExecution qExec = query(queryString) ) {
                 ResultSet rs = qExec.execSelect();
                 resultSetAction.accept(rs);
             }
         } );
     }
-    
+
     /**
-     * Execute a SELECT query and process the ResultSet with the handler code.  
+     * Execute a SELECT query and process the ResultSet with the handler code.
      * @param query
      * @param resultSetAction
      */
@@ -106,30 +105,30 @@
     public default void queryResultSet(Query query, Consumer<ResultSet> resultSetAction) {
         if ( ! query.isSelectType() )
             throw new JenaConnectionException("Query is not a SELECT query");
-        Txn.executeRead(this, ()->{ 
+        Txn.executeRead(this, ()->{
             try ( QueryExecution qExec = query(query) ) {
                 ResultSet rs = qExec.execSelect();
                 resultSetAction.accept(rs);
             }
-        } ); 
+        } );
     }
 
     /**
-     * Execute a SELECT query and process the rows of the results with the handler code.  
-     * @param query
+     * Execute a SELECT query and process the rows of the results with the handler code.
+     * @param queryString
      * @param rowAction
      */
     @Override
-    public default void querySelect(String query, Consumer<QuerySolution> rowAction) {
-        Txn.executeRead(this, ()->{ 
-            try ( QueryExecution qExec = query(query) ) {
+    public default void querySelect(String queryString, Consumer<QuerySolution> rowAction) {
+        Txn.executeRead(this, ()->{
+            try ( QueryExecution qExec = query(queryString) ) {
                 qExec.execSelect().forEachRemaining(rowAction);
             }
-        } ); 
+        } );
     }
-    
+
     /**
-     * Execute a SELECT query and process the rows of the results with the handler code.  
+     * Execute a SELECT query and process the rows of the results with the handler code.
      * @param query
      * @param rowAction
      */
@@ -137,63 +136,63 @@
     public default void querySelect(Query query, Consumer<QuerySolution> rowAction) {
         if ( ! query.isSelectType() )
             throw new JenaConnectionException("Query is not a SELECT query");
-        Txn.executeRead(this, ()->{ 
+        Txn.executeRead(this, ()->{
             try ( QueryExecution qExec = query(query) ) {
                 qExec.execSelect().forEachRemaining(rowAction);
             }
-        } ); 
+        } );
     }
 
     /** Execute a CONSTRUCT query and return as a Model */
     @Override
-    public default Model queryConstruct(String query) {
-        return 
-            Txn.calculateRead(this, ()->{ 
-                try ( QueryExecution qExec = query(query) ) {
+    public default Model queryConstruct(String queryString) {
+        return
+            Txn.calculateRead(this, ()->{
+                try ( QueryExecution qExec = query(queryString) ) {
                     return qExec.execConstruct();
                 }
-            } ); 
+            } );
     }
-    
+
     /** Execute a CONSTRUCT query and return as a Model */
     @Override
     public default Model queryConstruct(Query query) {
-        return 
-            Txn.calculateRead(this, ()->{ 
+        return
+            Txn.calculateRead(this, ()->{
                 try ( QueryExecution qExec = query(query) ) {
                     return qExec.execConstruct();
                 }
-            } ); 
+            } );
     }
 
     /** Execute a DESCRIBE query and return as a Model */
     @Override
-    public default Model queryDescribe(String query) {
-        return 
-            Txn.calculateRead(this, ()->{ 
-                try ( QueryExecution qExec = query(query) ) {
+    public default Model queryDescribe(String queryString) {
+        return
+            Txn.calculateRead(this, ()->{
+                try ( QueryExecution qExec = query(queryString) ) {
                     return qExec.execDescribe();
                 }
             } );
     }
-    
+
     /** Execute a DESCRIBE query and return as a Model */
     @Override
     public default Model queryDescribe(Query query) {
-        return 
-            Txn.calculateRead(this, ()->{ 
+        return
+            Txn.calculateRead(this, ()->{
                 try ( QueryExecution qExec = query(query) ) {
                     return qExec.execDescribe();
                 }
             } );
     }
-    
+
     /** Execute a ASK query and return a boolean */
     @Override
-    public default boolean queryAsk(String query) {
-        return 
-            Txn.calculateRead(this, ()->{ 
-                try ( QueryExecution qExec = query(query) ) {
+    public default boolean queryAsk(String queryString) {
+        return
+            Txn.calculateRead(this, ()->{
+                try ( QueryExecution qExec = query(queryString) ) {
                     return qExec.execAsk();
                 }
             } );
@@ -202,20 +201,20 @@
     /** Execute a ASK query and return a boolean */
     @Override
     public default boolean queryAsk(Query query) {
-        return 
-            Txn.calculateRead(this, ()->{ 
+        return
+            Txn.calculateRead(this, ()->{
                 try ( QueryExecution qExec = query(query) ) {
                     return qExec.execAsk();
                 }
-            } ); 
+            } );
     }
 
     /** Setup a SPARQL query execution.
-     * 
-     *  See also {@link #querySelect(Query, Consumer)}, {@link #queryConstruct(Query)}, 
+     *
+     *  See also {@link #querySelect(Query, Consumer)}, {@link #queryConstruct(Query)},
      *  {@link #queryDescribe(Query)}, {@link #queryAsk(Query)}
      *  for ways to execute queries for of a specific form.
-     * 
+     *
      * @param query
      * @return QueryExecution
      */
@@ -223,23 +222,27 @@
     public QueryExecution query(Query query);
 
     /** Setup a SPARQL query execution.
-     * 
-     *  See also {@link #querySelect(String, Consumer)}, {@link #queryConstruct(String)}, 
+     * This is a low-level operation.
+     * Handling the {@link QueryExecution} should be done with try-resource.
+     * Some {@link QueryExecution QueryExecutions}, such as ones connecting to a remote server,
+     * need to be properly closed to release system resources.
+     *
+     *  See also {@link #querySelect(String, Consumer)}, {@link #queryConstruct(String)},
      *  {@link #queryDescribe(String)}, {@link #queryAsk(String)}
-     *  for ways to execute queries for of a specific form.
-     * 
-     * @param queryString 
+     *  for ways to execute queries of a specific form.
+     *
+     * @param queryString
      * @return QueryExecution
      */
     @Override
     public default QueryExecution query(String queryString) {
         return query(QueryFactory.create(queryString));
     }
-    
+
     // ---- SparqlUpdateConnection
-    
+
     /** Execute a SPARQL Update.
-     * 
+     *
      * @param update
      */
     @Override
@@ -248,99 +251,99 @@
     }
 
     /** Execute a SPARQL Update.
-     * 
+     *
      * @param update
      */
     @Override
-    public void update(UpdateRequest update); 
-    
+    public void update(UpdateRequest update);
+
     /** Execute a SPARQL Update.
-     * 
+     *
      * @param updateString
      */
     @Override
     public default void update(String updateString) {
         update(UpdateFactory.create(updateString));
     }
-    
+
     // ---- RDFDatasetConnection
-    
+
     /** Load (add, append) RDF into a named graph in a dataset.
-     * This is SPARQL Graph Store Protocol HTTP POST or equivalent. 
-     * 
+     * This is SPARQL Graph Store Protocol HTTP POST or equivalent.
+     *
      * @param graphName Graph name (null or "default" for the default graph)
      * @param file File of the data.
      */
     @Override
     public void load(String graphName, String file);
-    
+
     /** Load (add, append) RDF into the default graph of a dataset.
-     * This is SPARQL Graph Store Protocol HTTP POST or equivalent. 
-     * 
+     * This is SPARQL Graph Store Protocol HTTP POST or equivalent.
+     *
      * @param file File of the data.
      */
     @Override
     public void load(String file);
 
     /** Load (add, append) RDF into a named graph in a dataset.
-     * This is SPARQL Graph Store Protocol HTTP POST or equivalent. 
-     * 
+     * This is SPARQL Graph Store Protocol HTTP POST or equivalent.
+     *
      * @param graphName Graph name (null or "default" for the default graph)
      * @param model Data.
      */
     @Override
     public void load(String graphName, Model model);
-    
+
     /** Load (add, append) RDF into the default graph of a dataset.
-     * This is SPARQL Graph Store Protocol HTTP POST or equivalent. 
-     * 
+     * This is SPARQL Graph Store Protocol HTTP POST or equivalent.
+     *
      * @param model Data.
      */
     @Override
     public void load(Model model);
 
     /** Set the contents of a named graph of a dataset.
-     * Any existing data is lost. 
-     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent. 
+     * Any existing data is lost.
+     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent.
      *
      * @param graphName Graph name (null or "default" for the default graph)
      * @param file File of the data.
      */
     @Override
     public void put(String graphName, String file);
-    
+
     /** Set the contents of the default graph of a dataset.
-     * Any existing data is lost. 
-     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent. 
-     * 
+     * Any existing data is lost.
+     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent.
+     *
      * @param file File of the data.
      */
     @Override
     public void put(String file);
-        
+
     /** Set the contents of a named graph of a dataset.
-     * Any existing data is lost. 
-     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent. 
+     * Any existing data is lost.
+     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent.
      *
      * @param graphName Graph name (null or "default" for the default graph)
      * @param model Data.
      */
     @Override
     public void put(String graphName, Model model);
-    
+
     /** Set the contents of the default graph of a dataset.
-     * Any existing data is lost. 
-     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent. 
-     * 
+     * Any existing data is lost.
+     * This is SPARQL Graph Store Protocol HTTP PUT or equivalent.
+     *
      * @param model Data.
      */
     @Override
     public void put( Model model);
-        
+
     /**
      * Delete a graph from the dataset.
      * Null or "default" means the default graph, which is cleared, not removed.
-     * 
+     *
      * @param graphName
      */
     @Override
@@ -348,10 +351,10 @@
 
     /**
      * Remove all data from the default graph.
-     */ 
+     */
     @Override
     public void delete();
-    
+
     /* Load (add, append) RDF triple or quad data into a dataset. Triples wil go into the default graph.
      * This is not a SPARQL Graph Store Protocol operation.
      * It is an HTTP POST equivalent to the dataset.
@@ -373,7 +376,7 @@
      */
     @Override
     public void putDataset(String file);
-    
+
     /* Set RDF triple or quad data as the dataset contents.
      * Triples will go into the default graph, quads in named graphs.
      * This is not a SPARQL Graph Store Protocol operation.
@@ -384,14 +387,13 @@
 
     //    /** Clear the dataset - remove all named graphs, clear the default graph. */
     //    public void clearDataset();
-    
-    
+
     /** Test whether this connection is closed or not */
     @Override
     public boolean isClosed();
-    
-    /** Close this connection.  Use with try-resource. */ 
-    @Override 
+
+    /** Close this connection.  Use with try-resource. */
+    @Override
     public void close();
 }
 
diff --git a/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnectionRemote.java b/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnectionRemote.java
index fa76242..1447d1b 100644
--- a/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnectionRemote.java
+++ b/jena-rdfconnection/src/main/java/org/apache/jena/rdfconnection/RDFConnectionRemote.java
@@ -21,6 +21,7 @@
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.util.Objects;
+import java.util.function.Consumer;
 import java.util.function.Supplier;
 
 import org.apache.http.HttpEntity;
@@ -78,9 +79,10 @@
     protected final RDFFormat outputTriples;
     protected final String acceptGraph;
     protected final String acceptDataset;
-    protected final String acceptSparqlResults;
     protected final String acceptSelectResult;
     protected final String acceptAskResult;
+    // All purpose SPARQL results header used if above specific cases do not apply. 
+    protected final String acceptSparqlResults;
 
     // Whether to check SPARQL queries given as strings by parsing them.
     protected final boolean parseCheckQueries;
@@ -142,19 +144,99 @@
         return destination;
     }
 
+    // For custom content negotiation.
+
+    // This class overrides each of these to pass down the query type as well.
+    // Then we can derive the accept header if customized without needing to parse
+    // the query. This allows an arbitrary string for a query and allows the remote
+    // server to have custom syntax extensions or interpretations of comments.
+
+    /**
+     * Execute a SELECT query and process the ResultSet with the handler code.
+     * @param queryString
+     * @param resultSetAction
+     */
+    @Override
+    public void queryResultSet(String queryString, Consumer<ResultSet> resultSetAction) {
+        Txn.executeRead(this, ()->{
+            try ( QueryExecution qExec = query(queryString, QueryType.SELECT) ) {
+                ResultSet rs = qExec.execSelect();
+                resultSetAction.accept(rs);
+            }
+        } );
+    }
+
+    /**
+     * Execute a SELECT query and process the rows of the results with the handler code.
+     * @param queryString
+     * @param rowAction
+     */
+    @Override
+    public void querySelect(String queryString, Consumer<QuerySolution> rowAction) {
+        Txn.executeRead(this, ()->{
+            try ( QueryExecution qExec = query(queryString, QueryType.SELECT) ) {
+                qExec.execSelect().forEachRemaining(rowAction);
+            }
+        } );
+    }
+
+    /** Execute a CONSTRUCT query and return as a Model */
+    @Override
+    public Model queryConstruct(String queryString) {
+        return
+            Txn.calculateRead(this, ()->{
+                try ( QueryExecution qExec = query(queryString, QueryType.CONSTRUCT) ) {
+                    return qExec.execConstruct();
+                }
+            } );
+    }
+
+    /** Execute a DESCRIBE query and return as a Model */
+    @Override
+    public Model queryDescribe(String queryString) {
+        return
+            Txn.calculateRead(this, ()->{
+                try ( QueryExecution qExec = query(queryString, QueryType.DESCRIBE) ) {
+                    return qExec.execDescribe();
+                }
+            } );
+    }
+
+    /** Execute a ASK query and return a boolean */
+    @Override
+    public boolean queryAsk(String queryString) {
+        return
+            Txn.calculateRead(this, ()->{
+                try ( QueryExecution qExec = query(queryString, QueryType.ASK) ) {
+                    return qExec.execAsk();
+                }
+            } );
+    }
+
+    /**
+     * Operation that passed down the query type so the accept header can be set without parsing the query string.
+     * @param queryString
+     * @param queryType
+     * @return QueryExecution
+     */
+    protected QueryExecution query(String queryString, QueryType queryType) {
+        Objects.requireNonNull(queryString);
+        return queryExec(null, queryString, queryType);
+    }
+
     @Override
     public QueryExecution query(String queryString) {
         Objects.requireNonNull(queryString);
-        return queryExec(null, queryString);
+        return queryExec(null, queryString, null);
     }
 
     @Override
     public QueryExecution query(Query query) {
         Objects.requireNonNull(query);
-        return queryExec(query, null);
+        return queryExec(query, null, null);
     }
 
-    private QueryExecution queryExec(Query query, String queryString) {
+    private QueryExecution queryExec(Query query, String queryString, QueryType queryType) {
         checkQuery();
         if ( query == null && queryString == null )
             throw new InternalErrorException("Both query and query string are null");
@@ -164,34 +246,59 @@
         }
 
         // Use the query string as provided if possible, otherwise serialize the query.
-        String queryStringToSend = ( queryString != null ) ?  queryString : query.toString();
-        return exec(()-> createQueryExecution(query, queryStringToSend));
+        String queryStringToSend = ( queryString != null ) ? queryString : query.toString();
+        return exec(()-> createQueryExecution(query, queryStringToSend, queryType));
     }
 
     // Create the QueryExecution
-    private QueryExecution createQueryExecution(Query query, String queryStringToSend) {
+    private QueryExecution createQueryExecution(Query query, String queryStringToSend, QueryType queryType) {
         QueryExecution qExec = new QueryEngineHTTP(svcQuery, queryStringToSend, httpClient, httpContext);
         QueryEngineHTTP qEngine = (QueryEngineHTTP)qExec;
+        QueryType qt = queryType;
+        if ( query != null && qt == null )
+            qt = query.queryType();
+        if ( qt == null )
+            qt = QueryType.UNKNOWN;
         // Set the accept header - use the most specific method.
-        if ( query != null ) {
-            if ( query.isSelectType() && acceptSelectResult != null )
-                qEngine.setAcceptHeader(acceptSelectResult);
-            if ( query.isAskType() && acceptAskResult != null )
-                qEngine.setAcceptHeader(acceptAskResult);
-            if ( ( query.isConstructType() || query.isDescribeType() ) && acceptGraph != null )
-                qEngine.setAcceptHeader(acceptGraph);
-            if ( query.isConstructQuad() )
-                qEngine.setDatasetContentType(acceptDataset);
+        switch(qt) {
+            case SELECT :
+                if ( acceptSelectResult != null )
+                    qEngine.setAcceptHeader(acceptSelectResult);
+                break;
+            case ASK :
+                if ( acceptAskResult != null )
+                    qEngine.setAcceptHeader(acceptAskResult);
+                break;
+            case DESCRIBE :
+            case CONSTRUCT :
+                if ( acceptGraph != null )
+                    qEngine.setAcceptHeader(acceptGraph);
+                break;
+            case UNKNOWN:
+                // All-purpose content type.
+                if ( acceptSparqlResults != null )
+                    qEngine.setAcceptHeader(acceptSparqlResults);
+                else
+                    // No idea! Set an "anything" and hope.
+                    // (Reasonable chance this is going to end up as HTML though.)
+                    qEngine.setAcceptHeader("*/*");
+            default :
+                break;
         }
-        // Use the general one.
-        if ( qEngine.getAcceptHeader() == null && acceptSparqlResults != null )
-            qEngine.setAcceptHeader(acceptSparqlResults);
         // Make sure it was set somehow.
         if ( qEngine.getAcceptHeader() == null )
             throw new JenaConnectionException("No Accept header");
         return qExec ;
     }
 
+    private void acc(StringBuilder sBuff, String acceptString) {
+        if ( acceptString == null )
+            return;
+        if ( sBuff.length() != 0 )
+            sBuff.append(", ");
+        sBuff.append(acceptString);
+    }
+
     @Override
     public void update(String updateString) {
         Objects.requireNonNull(updateString);
@@ -427,7 +534,7 @@
         });
     }
 
-    /** Do a PUT or POST to a dataset, sending the contents of a daatsets.
+    /** Do a PUT or POST to a dataset, sending the contents of a dataset.
      * The Content-Type is {@code application/n-quads}.
      * <p>
      * "Replace" implies PUT, otherwise a POST is used.
@@ -497,13 +604,13 @@
 
     /** Create an HttpEntity for the graph. */
     protected HttpEntity graphToHttpEntity(Graph graph, RDFFormat syntax) {
-        // Length - leaves connection reusable. 
+        // Length - leaves connection reusable.
         return graphToHttpEntityWithLength(graph, syntax);
     }
-    
-    /** 
+
+    /**
      * Create an HttpEntity for the graph. The HTTP entity will have the length but this
-     * requires serialising the graph at the point when this function is called.  
+     * requires serialising the graph at the point when this function is called.
      */
     private HttpEntity graphToHttpEntityWithLength(Graph graph, RDFFormat syntax) {
         String ct = syntax.getLang().getContentType().getContentType();
@@ -535,10 +642,10 @@
 
     /** Create an HttpEntity for the dataset */
     protected HttpEntity datasetToHttpEntity(DatasetGraph dataset, RDFFormat syntax) {
-        // Length - leaves connection reusable. 
+        // Length - leaves connection reusable.
         return datasetToHttpEntityWithLength(dataset, syntax);
     }
-        
+
     private HttpEntity datasetToHttpEntityWithLength(DatasetGraph dataset, RDFFormat syntax) {
         String ct = syntax.getLang().getContentType().getContentType();
         ByteArrayOutputStream out = new ByteArrayOutputStream(128*1024);
diff --git a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/AbstractTestRDFConnection.java b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/AbstractTestRDFConnection.java
index 0482e9c..91971f3 100644
--- a/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/AbstractTestRDFConnection.java
+++ b/jena-rdfconnection/src/test/java/org/apache/jena/rdfconnection/AbstractTestRDFConnection.java
@@ -23,12 +23,9 @@
 import org.apache.jena.atlas.iterator.Iter;
 import org.apache.jena.atlas.junit.BaseTest;
 import org.apache.jena.atlas.lib.StrUtils;
-import org.apache.jena.query.Dataset;
-import org.apache.jena.query.DatasetFactory;
-import org.apache.jena.query.ReadWrite;
+import org.apache.jena.query.*;
 import org.apache.jena.rdf.model.Model;
 import org.apache.jena.rdf.model.ModelFactory;
-import org.apache.jena.rdfconnection.RDFConnection;
 import org.apache.jena.riot.RDFDataMgr;
 import org.apache.jena.sparql.core.DatasetGraph;
 import org.apache.jena.sparql.sse.SSE;
@@ -41,10 +38,10 @@
 public abstract class AbstractTestRDFConnection extends BaseTest {
     // Testing data.
     static String DIR = "testing/RDFConnection/";
-    
+
     protected abstract RDFConnection connection();
     // Not all connection types support abort.
-    protected abstract boolean supportsAbort(); 
+    protected abstract boolean supportsAbort();
 
     // ---- Data
     static String dsgdata = StrUtils.strjoinNL
@@ -54,7 +51,7 @@
         ,"  (graph :g2 (:s :p :o) (:s2 :p2 :o))"
         ,")"
         );
-    
+
     static String dsgdata2 = StrUtils.strjoinNL
         ("(dataset"
         ,"  (graph (:x :y :z))"
@@ -62,7 +59,7 @@
         ,")"
         );
 
-    
+
     static String graph1 = StrUtils.strjoinNL
         ("(graph (:s :p :o) (:s1 :p1 :o))"
         );
@@ -70,7 +67,7 @@
     static String graph2 = StrUtils.strjoinNL
         ("(graph (:s :p :o) (:s2 :p2 :o))"
         );
-    
+
     static DatasetGraph dsg        = SSE.parseDatasetGraph(dsgdata);
     static Dataset      dataset    = DatasetFactory.wrap(dsg);
     static DatasetGraph dsg2       = SSE.parseDatasetGraph(dsgdata2);
@@ -91,9 +88,9 @@
         // Allow multiple close()
         conn.close();
     }
-    
+
     @Test public void dataset_load_1() {
-        String testDataFile = DIR+"data.trig"; 
+        String testDataFile = DIR+"data.trig";
         try ( RDFConnection conn = connection() ) {
             conn.loadDataset(testDataFile);
             Dataset ds0 = RDFDataMgr.loadDataset(testDataFile);
@@ -104,7 +101,7 @@
 
     @Test public void dataset_put_1() {
         try ( RDFConnection conn = connection() ) {
-            conn.putDataset(dataset); 
+            conn.putDataset(dataset);
             Dataset ds1 = conn.fetchDataset();
             assertTrue("Datasets not isomorphic", isomorphic(dataset, ds1));
         }
@@ -112,7 +109,7 @@
 
     @Test public void dataset_put_2() {
         try ( RDFConnection conn = connection() ) {
-            conn.putDataset(dataset); 
+            conn.putDataset(dataset);
             conn.putDataset(dataset2);
             Dataset ds1 = conn.fetchDataset();
             assertTrue("Datasets not isomorphic", isomorphic(dataset2, ds1));
@@ -126,7 +123,7 @@
             assertTrue("Datasets not isomorphic", isomorphic(dataset, ds1));
         }
     }
-    
+
     @Test public void dataset_post_2() {
         try ( RDFConnection conn = connection() ) {
             conn.loadDataset(dataset);
@@ -140,9 +137,9 @@
     }
 
     // Default graph
-    
+
     @Test public void graph_load_1() {
-        String testDataFile = DIR+"data.ttl"; 
+        String testDataFile = DIR+"data.ttl";
         Model m0 = RDFDataMgr.loadModel(testDataFile);
         try ( RDFConnection conn = connection() ) {
             conn.load(testDataFile);
@@ -153,7 +150,7 @@
 
     @Test public void graph_put_1() {
         try ( RDFConnection conn = connection() ) {
-            conn.put(model1); 
+            conn.put(model1);
             Dataset ds1 = conn.fetchDataset();
             Model m0 = conn.fetch();
             assertTrue("Models not isomorphic", isomorphic(model1, ds1.getDefaultModel()));
@@ -164,7 +161,7 @@
 
     @Test public void graph_put_2() {
         try ( RDFConnection conn = connection() ) {
-            conn.put(model1); 
+            conn.put(model1);
             conn.put(model2);
             Model m = conn.fetch();
             assertTrue("Models not isomorphic", isomorphic(m, model2));
@@ -179,7 +176,7 @@
             assertTrue("Models not isomorphic", isomorphic(m, model1));
         }
     }
-    
+
     @Test public void graph_post_2() {
         try ( RDFConnection conn = connection() ) {
             conn.load(model1);
@@ -191,11 +188,11 @@
     }
 
     // DELETE
-    
+
     // Named graphs
-    
+
     @Test public void named_graph_load_1() {
-        String testDataFile = DIR+"data.ttl"; 
+        String testDataFile = DIR+"data.ttl";
         Model m0 = RDFDataMgr.loadModel(testDataFile);
         try ( RDFConnection conn = connection() ) {
             conn.load(graphName, testDataFile);
@@ -208,7 +205,7 @@
 
     @Test public void named_graph_put_1() {
         try ( RDFConnection conn = connection() ) {
-            conn.put(graphName, model1); 
+            conn.put(graphName, model1);
             Dataset ds1 = conn.fetchDataset();
             Model m0 = conn.fetch(graphName);
             assertTrue("Models not isomorphic", isomorphic(model1, ds1.getNamedModel(graphName)));
@@ -219,7 +216,7 @@
 
     @Test public void named_graph_put_2() {
         try ( RDFConnection conn = connection() ) {
-            conn.put(graphName, model1); 
+            conn.put(graphName, model1);
             conn.put(graphName, model2);
             Model m = conn.fetch(graphName);
             assertTrue("Models not isomorphic", isomorphic(m, model2));
@@ -229,7 +226,7 @@
 
     @Test public void named_graph_put_2_different() {
         try ( RDFConnection conn = connection() ) {
-            conn.put(graphName, model1); 
+            conn.put(graphName, model1);
             conn.put(graphName2, model2);
             Model m1 = conn.fetch(graphName);
             Model m2 = conn.fetch(graphName2);
@@ -245,7 +242,7 @@
             assertTrue("Models not isomorphic", isomorphic(m, model1));
         }
     }
-    
+
     @Test public void named_graph_post_2() {
         try ( RDFConnection conn = connection() ) {
             conn.load(graphName, model1);
@@ -257,18 +254,39 @@
     }
 
     // DELETE
-    
-    // Remote connections don't support transactions fully.  
-    //@Test public void transaction_01() 
+
+    // Remote connections don't support transactions fully.
+    //@Test public void transaction_01()
 
     private static boolean isomorphic(Dataset ds1, Dataset ds2) {
         return IsoMatcher.isomorphic(ds1.asDatasetGraph(), ds2.asDatasetGraph());
     }
-    
+
     private static boolean isomorphic(Model model1, Model model2) {
         return model1.isIsomorphicWith(model2);
     }
-    
+
+    @Test public void query_01() {
+        try ( RDFConnection conn = connection() ) {
+            Txn.executeRead(conn, ()->{
+                try ( QueryExecution qExec = conn.query("SELECT ?x {}") ) {
+                    ResultSet rs = qExec.execSelect();
+                    ResultSetFormatter.consume(rs);
+                }
+            });
+        }
+    }
+
+    @Test public void query_02() {
+        try ( RDFConnection conn = connection() ) {
+            Txn.executeRead(conn, ()->{
+                try ( QueryExecution qExec = conn.query("ASK{}") ) {
+                    boolean b = qExec.execAsk();
+                    assertTrue(b);
+                }
+            });
+        }
+    }
 
     @Test public void query_ask_01() {
         try ( RDFConnection conn = connection() ) {
@@ -322,7 +340,7 @@
             assertEquals(2, m.size());
         }
     }
-    
+
     @Test public void update_01() {
         try ( RDFConnection conn = connection() ) {
             conn.update("INSERT DATA { <urn:x:s> <urn:x:p> <urn:x:o>}");
@@ -352,32 +370,32 @@
     }
     // Not all Transactional support abort.
     @Test public void transaction_commit_read_01() {
-        String testDataFile = DIR+"data.trig"; 
+        String testDataFile = DIR+"data.trig";
         try ( RDFConnection conn = connection() ) {
 
             conn.begin(ReadWrite.WRITE);
             conn.loadDataset(dataset);
             conn.commit();
             conn.end();
-            
+
             conn.begin(ReadWrite.READ);
             Model m = conn.fetch();
             assertTrue(isomorphic(m, dataset.getDefaultModel()));
             conn.end();
         }
     }
-    
+
     // Not all RDFConnections support abort.
     @Test public void transaction_abort_read02() {
         Assume.assumeTrue(supportsAbort());
-        
-        String testDataFile = DIR+"data.trig"; 
+
+        String testDataFile = DIR+"data.trig";
         try ( RDFConnection conn = connection() ) {
             conn.begin(ReadWrite.WRITE);
             conn.loadDataset(testDataFile);
             conn.abort();
             conn.end();
-            
+
             conn.begin(ReadWrite.READ);
             Model m = conn.fetch();
             assertTrue(m.isEmpty());