From 4cbea5440045751adbe68dde4e67ae663ca234b7 Mon Sep 17 00:00:00 2001 From: Wenxin Hu Date: Thu, 15 Aug 2019 11:45:23 +0200 Subject: [PATCH 1/6] add changes to make quitstore working with eccenca DataPlatform sorry, all in one ... --- quit/application.py | 6 ++- quit/conf.py | 2 +- quit/core.py | 84 +++++++++++++++++++++++++---- quit/git.py | 3 +- quit/helpers.py | 96 ++++++++++++++++++++++++++++++--- quit/tools/algebra.py | 4 +- quit/tools/evaluate.py | 6 +-- quit/tools/update.py | 16 +++++- quit/web/modules/application.py | 2 +- quit/web/modules/endpoint.py | 24 ++++++--- quit/web/templates/sparql.html | 2 +- 11 files changed, 210 insertions(+), 35 deletions(-) diff --git a/quit/application.py b/quit/application.py index 4722ec7a..780f4097 100644 --- a/quit/application.py +++ b/quit/application.py @@ -209,6 +209,7 @@ def parseArgs(args): configfile_default = "config.ttl" oauthclientid_default = None oauthclientsecret_default = None + feature_default = Feature.Unknown if 'QUIT_PORT' in os.environ: port_default = os.environ['QUIT_PORT'] @@ -234,6 +235,9 @@ def parseArgs(args): if 'QUIT_OAUTH_SECRET' in os.environ: oauthclientsecret_default = os.environ['QUIT_OAUTH_SECRET'] + if 'FEATURE' in os.environ: + feature_default = os.environ['FEATURE'] + parser = argparse.ArgumentParser() parser.add_argument('-b', '--basepath', type=str, default=basepath_default, help=basepathhelp) parser.add_argument( @@ -248,7 +252,7 @@ def parseArgs(args): parser.add_argument('--flask-debug', action='store_true') parser.add_argument('--defaultgraph-union', action='store_true') parser.add_argument('-f', '--features', nargs='*', action=FeaturesAction, - default=Feature.Unknown, + default=feature_default, help=featurehelp) parser.add_argument('-p', '--port', default=port_default, type=int) parser.add_argument('--host', default='::', type=str) diff --git a/quit/conf.py b/quit/conf.py index 2a7155d1..75972f5e 100644 --- a/quit/conf.py +++ b/quit/conf.py @@ -110,7 +110,7 @@ def __initstoreconfig(self, namespace, upstream, targetdir, configfile): return def hasFeature(self, flags): - return flags == (self.features & flags) + return flags == (self.features and flags) def getBindings(self): q = """SELECT DISTINCT ?prefix ?namespace WHERE {{ diff --git a/quit/core.py b/quit/core.py index d32dd64f..dbed24f4 100644 --- a/quit/core.py +++ b/quit/core.py @@ -415,14 +415,44 @@ def getFileReferenceAndContext(self, blob, commit): return self._blobs.get(blob) def applyQueryOnCommit(self, parsedQuery, parent_commit_ref, target_ref, query=None, - default_graph=[], named_graph=[]): + default_graph=[], named_graph=[], queryType=None, comment=None): """Apply an update query on the graph and the git repository.""" graph, commitid = self.instance(parent_commit_ref) resultingChanges, exception = graph.update(parsedQuery) if exception: # TODO need to revert or invalidate the graph at this point. pass - oid = self.commit(graph, resultingChanges, 'New Commit from QuitStore', parent_commit_ref, + + graphuri = None + print(resultingChanges) + print("comment: ") + print(comment) + + if comment is not None: + queryType = comment + elif len(resultingChanges) > 1: + queryType = 'Edit resource in' + for entry in resultingChanges: + if "delta" in entry: + for x in entry["delta"]: + graphuri = str(x) + if queryType == 'Modify': + ls = entry["delta"][x] + if len(ls) == 1 and "removals" in ls[0]: + queryType = 'Remove resource in' + elif len(ls) == 1 and "additions" in ls[0]: + queryType = 'Add resource in' + + if queryType is not None and graphuri is not None: + if queryType == 'InsertData' or queryType == 'Load': + message = 'Insert data into Graph <' + graphuri + '>' + elif queryType == 'DeleteData' or queryType == 'DeleteWhere': + message = 'Delete data from Graph <' + graphuri + '>' + else: + message = queryType + ' Graph <' + graphuri + '>' + else: + message = 'New Commit from QuitStore' + oid = self.commit(graph, resultingChanges, message, parent_commit_ref, target_ref, query=query, default_graph=default_graph, named_graph=named_graph) if exception: @@ -464,6 +494,8 @@ def commit(self, graph, delta, message, parent_commit_ref, target_ref, query=Non parent_commit_id = parent_commit.id try: blobs = self.getFilesForCommit(parent_commit) + print("blobs: ") + print(blobs) except KeyError: pass index = self.repository.index(parent_commit_id) @@ -473,8 +505,13 @@ def commit(self, graph, delta, message, parent_commit_ref, target_ref, query=Non graphconfig = self._graphconfigs.get(parent_commit_id) known_files = graphconfig.getfiles().keys() + print("knownfiles: ") + print(known_files) - blobs_new = self._applyKnownGraphs(delta, blobs, parent_commit, index) + blobs_new = self._applyKnownGraphs(delta, blobs, parent_commit, index, graphconfig) + print("blobs_new: ") + print(blobs_new) + print(graphconfig.getfiles().keys()) new_contexts = self._applyUnknownGraphs(delta, known_files) new_config = copy(graphconfig) @@ -498,12 +535,15 @@ def commit(self, graph, delta, message, parent_commit_ref, target_ref, query=Non author = self.repository._repository.default_signature oid = index.commit(message, author.name, author.email, ref=target_ref) + print("oid: ") + print(oid) if self.config.hasFeature(Feature.GarbageCollection): self.garbagecollection() if oid: self._commits.set(oid.hex, blobs_new) + print(oid.hex) commit = self.repository.revision(oid.hex) self.syncSingle(commit) @@ -536,25 +576,45 @@ def _build_message(self, message, query, result, default_graph, named_graph, **k out.append('{}: "{}"'.format(k, v.replace('"', "\\\""))) return "\n".join(out) - def _applyKnownGraphs(self, delta, blobs, parent_commit, index): + def _applyKnownGraphs(self, delta, blobs, parent_commit, index, graphconfig): blobs_new = set() for blob in blobs: (fileName, oid) = blob + type = None + try: file_reference, context = self.getFileReferenceAndContext(blob, parent_commit) + print(file_reference) + print(context.identifier) for entry in delta: + changeset = entry['delta'].get(context.identifier, None) if changeset: - applyChangeset(file_reference, changeset, context.identifier) - del(entry['delta'][context.identifier]) - - index.add(file_reference.path, file_reference.content) + type = entry['type'] + print("type: ") + print(type) + if type == 'DROP': + index.remove(file_reference.path) + index.remove(file_reference.path + '.graph') + graphconfig.removegraph(context.identifier) + del (entry['delta'][context.identifier]) + else: + applyChangeset(file_reference, changeset, context.identifier) + del (entry['delta'][context.identifier]) self._blobs.remove(blob) - blob = fileName, index.stash[file_reference.path][0] - self._blobs.set(blob, (file_reference, context)) - blobs_new.add(blob) + + if type == 'DROP': + pass + else: + index.add(file_reference.path, file_reference.content) + blob = fileName, index.stash[file_reference.path][0] + self._blobs.set(blob, (file_reference, context)) + print("addToNew") + blobs_new.add(blob) + print(blobs_new) + except KeyError: pass return blobs_new @@ -576,8 +636,10 @@ def _applyUnknownGraphs(self, delta, known_blobs): int(m.group(1)) for b in known_blobs for m in [reg.search(b)] if m ] + [0] fileName = '{}_{}.nt'.format(iri_to_name(identifier), max(n)+1) + print("created a new file with known name") new_contexts[identifier] = FileReference(fileName, '') + print("created a new file with unknown name") fileReference = new_contexts[identifier] applyChangeset(fileReference, changeset, identifier) diff --git a/quit/git.py b/quit/git.py index d3d95bf4..6943f699 100644 --- a/quit/git.py +++ b/quit/git.py @@ -658,7 +658,8 @@ def commit(self, message, author_name, author_email, **kwargs): commiter = pygit2.Signature(commiter_name, commiter_email) # Sort index items - items = sorted(self.stash.items(), key=lambda x: (x[1][0], x[0])) + #items = sorted(self.stash.items(), key=lambda x: (x[1][0], x[0])) + items = list(self.stash.items()) # Create tree tree = IndexTree(self) diff --git a/quit/helpers.py b/quit/helpers.py index 467782b9..43caecbd 100644 --- a/quit/helpers.py +++ b/quit/helpers.py @@ -1,12 +1,20 @@ #!/usr/bin/env python3 +import cgi import logging import os +from pprint import pprint +from xml.dom.minidom import parse + +import uwsgi from pyparsing import ParseException +from rdflib import Graph +from werkzeug.wsgi import make_chunk_iter + from quit.exceptions import UnSupportedQuery, SparqlProtocolError, NonAbsoluteBaseError -from rdflib.term import URIRef +from rdflib.term import URIRef, Variable from rdflib.plugins.sparql.parserutils import CompValue, plist -from rdflib.plugins.sparql.parser import parseQuery, parseUpdate -from quit.tools.algebra import translateQuery, translateUpdate +from rdflib.plugins.sparql.parser import parseQuery, parseUpdate, Query +from quit.tools.algebra import translateQuery, translateUpdate, pprintAlgebra from rdflib.plugins.serializers.nt import _nt_row as _nt from rdflib.plugins.sparql import parser, algebra from rdflib.plugins import sparql @@ -153,6 +161,7 @@ def configure_query_dataset(parsed_query, default_graphs, named_graphs): default_graphs: a list of uri strings for default graphs named_graphs: a list of uri strings for named graphs """ + if not isinstance(default_graphs, list) or not isinstance(named_graphs, list): return parsed_query @@ -167,7 +176,8 @@ def configure_query_dataset(parsed_query, default_graphs, named_graphs): for uri in default_graphs: parsed_query[1]['datasetClause'].append(CompValue('DatasetClause', default=URIRef(uri))) for uri in named_graphs: - parsed_query[1]['datasetClause'].append(CompValue('DatasetClause', named=URIRef(uri))) + if uri not in default_graphs: + parsed_query[1]['datasetClause'].append(CompValue('DatasetClause', named=URIRef(uri))) return parsed_query @@ -210,8 +220,11 @@ def parse_query_type(query, base=None, default_graph=[], named_graph=[]): """Parse a query and add default and named graph uri if possible.""" try: parsed_query = parseQuery(query) + parsed_query = parse_named_graph_query(parsed_query) parsed_query = configure_query_dataset(parsed_query, default_graph, named_graph) translated_query = translateQuery(parsed_query, base=base) + + except ParseException: raise UnSupportedQuery() except SparqlProtocolError as e: @@ -287,6 +300,7 @@ def parse_sparql_request(request): default_graph = [] named_graph = [] accept_header = None + comment = None if request.method == "GET": default_graph = request.args.getlist('default-graph-uri') @@ -296,6 +310,7 @@ def parse_sparql_request(request): elif request.method == "POST": if 'Content-Type' in request.headers: content_mimetype, options = parse_options_header(request.headers['Content-Type']) + if content_mimetype == "application/x-www-form-urlencoded": if 'query' in request.form: default_graph = request.form.getlist('default-graph-uri') @@ -317,5 +332,74 @@ def parse_sparql_request(request): named_graph = request.args.getlist('using-named-graph-uri') query = request.data.decode("utf-8") type = 'update' - - return query, type, default_graph, named_graph + elif content_mimetype == "application/rdf+xml": + default_graph = request.args.getlist('default-graph-uri') + named_graph = request.args.getlist('named-graph-uri') + graph = request.args.get('graph') + data = request.data.decode("utf-8") + g = Graph() + g.parse(data=data, format='application/rdf+xml') + query = 'INSERT DATA { GRAPH <' + graph + '> { ' + g.serialize(format="nt").decode("utf-8") + ' } }' + type = 'update' + elif request.method == "PUT": + if 'Content-Type' in request.headers: + content_mimetype, options = parse_options_header(request.headers['Content-Type']) + default_graph = request.args.getlist('default-graph-uri') + named_graph = request.args.getlist('named-graph-uri') + graph = request.args.get('graph') + data = request.input_stream.read() + g = Graph() + if content_mimetype is not None: + print("content type not none") + g.parse(data=data, format=content_mimetype) + else: + g.parse(data=data, format='application/rdf+xml') + query = 'WITH <' + graph + '> DELETE { ?s ?p ?o . } INSERT { ' + g.serialize(format="nt").decode("utf-8") + ' } WHERE { ?s ?p ?o .}' + type = 'update' + comment = 'Replace' + + return query, type, default_graph, named_graph, comment + + +def parse_named_graph_query(query): + + datasetClause = query[1].datasetClause + + if datasetClause is not None: + default_list = [] + named_list = [] + for d in datasetClause: + if d.default: + default_list.append(d.default) + + for d in datasetClause: + if d.named: + if d.named in default_list: + query[1].datasetClause.remove(d) + print("namedGraph") + print(d) + print("was removed") + else: + named_list.append(d.named) + + if len(named_list) > 0: + q = "SELECT * WHERE{ FILTER ( ?" + for t in query[1].where.part: + try: + term = t.term + except ParseException: + raise UnSupportedQuery() + q = q + term + " IN (<" + '>,<'.join(named_list) + ">))}" + + parsedFilter = Query.parseString(q, parseAll=True)[1].where.part[0] + query[1].where.part.append(parsedFilter) + else: + if 'graph' in query[1].where.part[0]: + pass + else: + print("no graph defined") + graphValue = query[1].where + whereValue = CompValue('GroupGraphPatternSub', part=[CompValue('GraphGraphPattern', term=Variable('selfDefinedGraphVariable'), graph=graphValue)]) + query[1].where = whereValue + + return query \ No newline at end of file diff --git a/quit/tools/algebra.py b/quit/tools/algebra.py index 5ce5364c..3ccd94e9 100644 --- a/quit/tools/algebra.py +++ b/quit/tools/algebra.py @@ -747,6 +747,8 @@ def translateQuery(q, base=None, initNs=None): q[1], visitPost=functools.partial(translatePName, prologue=prologue)) P, PV = translate(q[1]) + if Variable('selfDefinedGraphVariable') in PV: + PV.remove(Variable('selfDefinedGraphVariable')) datasetClause = q[1].datasetClause if q[1].name == 'ConstructQuery': @@ -777,7 +779,7 @@ def pp(p, ind=" "): return print("%s(" % (p.name, )) for k in p: - print("%s%s =" % (ind, k,), end=' ') + #print("%s%s =" % (ind, k,), end=' ') pp(p[k], ind + " ") print("%s)" % ind) diff --git a/quit/tools/evaluate.py b/quit/tools/evaluate.py index 0323217a..6e24184a 100644 --- a/quit/tools/evaluate.py +++ b/quit/tools/evaluate.py @@ -483,8 +483,8 @@ def evalQuery(graph, query, initBindings, base=None): # TODO re-enable original behaviour if FROM NAMED works with named graphs # https://github.com/AKSW/QuitStore/issues/144 elif d.named: - raise FromNamedError - # g = d.named - # ctx.load(g, default=False) + # raise FromNamedError + g = d.named + ctx.load(g, default=False) return evalPart(ctx, main) diff --git a/quit/tools/update.py b/quit/tools/update.py index 9dc63b46..9a17ac57 100644 --- a/quit/tools/update.py +++ b/quit/tools/update.py @@ -109,12 +109,23 @@ def evalDrop(ctx, u): """ http://www.w3.org/TR/sparql11-update/#drop """ + + res = {} + res["type"] = "DROP" + res["delta"] = {} + if ctx.dataset.store.graph_aware: for g in _graphAll(ctx, u.graphiri): + _append(res["delta"], u.graphiri, 'removals', g) ctx.dataset.store.remove_graph(g) + graph = ctx.dataset.get_context(u.graphiri) + graph -= g else: + _append(res["delta"], u.graphiri, 'removals', list(u.triples)) evalClear(ctx, u) + return res + def evalInsertData(ctx, u): """ @@ -390,7 +401,9 @@ def evalUpdate(graph, update, initBindings=None, actionLog=False): elif u.name == 'Clear': evalClear(ctx, u) elif u.name == 'Drop': - evalDrop(ctx, u) + result = evalDrop(ctx, u) + if result: + res.append(result) elif u.name == 'Create': evalCreate(ctx, u) elif u.name == 'Add': @@ -422,4 +435,5 @@ def evalUpdate(graph, update, initBindings=None, actionLog=False): except Exception: if not u.silent: raise + return res, None diff --git a/quit/web/modules/application.py b/quit/web/modules/application.py index ee854867..a02fd5b9 100644 --- a/quit/web/modules/application.py +++ b/quit/web/modules/application.py @@ -18,7 +18,7 @@ def login(): else: state = session["state"] logger.debug("request url: {}".format(request.url)) - redirect_uri = request.url + redirect_uri = 'http://docker.local/quitstore/login' authorizeEndpoint = "https://github.com/login/oauth/authorize" tokenEndpoint = "https://github.com/login/oauth/access_token" diff --git a/quit/web/modules/endpoint.py b/quit/web/modules/endpoint.py index 9783cb8c..43a399ff 100644 --- a/quit/web/modules/endpoint.py +++ b/quit/web/modules/endpoint.py @@ -1,11 +1,13 @@ import traceback import logging + from flask import Blueprint, request, current_app, make_response from rdflib import ConjunctiveGraph from quit.conf import Feature from quit import helpers as helpers from quit.helpers import parse_sparql_request, parse_query_type +from quit.tools.algebra import pprintAlgebra from quit.web.app import render_template, feature_required from quit.exceptions import UnSupportedQuery, SparqlProtocolError, NonAbsoluteBaseError from quit.exceptions import FromNamedError, QuitMergeConflict, RevisionNotFound @@ -33,8 +35,8 @@ 'application/json'] -@endpoint.route("/sparql", defaults={'branch_or_ref': None}, methods=['POST', 'GET']) -@endpoint.route("/sparql/", methods=['POST', 'GET']) +@endpoint.route("/sparql", defaults={'branch_or_ref': None}, methods=['POST', 'GET', 'PUT']) +@endpoint.route("/sparql/", methods=['POST', 'GET', 'PUT']) def sparql(branch_or_ref): """Process a SPARQL query (Select or Update). @@ -51,7 +53,7 @@ def sparql(branch_or_ref): logger.debug("Request method: {}".format(request.method)) - query, type, default_graph, named_graph = parse_sparql_request(request) + query, type, default_graph, named_graph, comment = parse_sparql_request(request) if query is None: if request.accept_mimetypes.best_match(['text/html']) == 'text/html': @@ -62,14 +64,20 @@ def sparql(branch_or_ref): 'to the SPARQL 1.1 standard', 400) else: # TODO allow USING NAMED when fixed in rdflib - if len(named_graph) > 0: - return make_response('FROM NAMED and USING NAMED not supported, yet', 400) + #if len(named_graph) > 0: + #print('namedGraph exists') + #return make_response('FROM NAMED and USING NAMED not supported, yet', 400) parse_type = getattr(helpers, 'parse_' + type + '_type') + try: queryType, parsedQuery = parse_type( query, quit.config.namespace, default_graph, named_graph) + if queryType != 'AskQuery': + print(queryType) + print("query:") + print(query) except UnSupportedQuery: return make_response('Unsupported Query', 400) except NonAbsoluteBaseError: @@ -77,7 +85,7 @@ def sparql(branch_or_ref): except SparqlProtocolError: return make_response('Sparql Protocol Error', 400) - if queryType in ['InsertData', 'DeleteData', 'Modify', 'DeleteWhere', 'Load']: + if queryType in ['InsertData', 'DeleteData', 'Modify', 'DeleteWhere', 'Load', 'Drop']: if branch_or_ref: commit_id = quit.repository.revision(branch_or_ref).id else: @@ -107,7 +115,7 @@ def sparql(branch_or_ref): logger.debug("target ref is: {}".format(target_ref)) oid = quit.applyQueryOnCommit(parsedQuery, parent_commit_id, target_ref, query=query, default_graph=default_graph, - named_graph=named_graph) + named_graph=named_graph, queryType=queryType, comment=comment) if resolution_method == "merge": logger.debug(("going to merge update into {} because it is at {} but {} was " @@ -137,7 +145,7 @@ def sparql(branch_or_ref): try: oid = quit.applyQueryOnCommit(parsedQuery, branch_or_ref, target_ref, query=query, default_graph=default_graph, - named_graph=named_graph) + named_graph=named_graph, queryType=queryType, comment=comment) response = make_response('', 200) response.headers["X-CurrentBranch"] = target_head if oid is not None: diff --git a/quit/web/templates/sparql.html b/quit/web/templates/sparql.html index d29c83d0..69fa2c66 100644 --- a/quit/web/templates/sparql.html +++ b/quit/web/templates/sparql.html @@ -28,7 +28,7 @@ value: '{{ config["defaultQuery"] | safe }}', sparql: { showQueryButton: true, - endpoint: '{{ request.url }}' + endpoint: '{{ url_for('endpoint.sparql') }}' } }); var yasr = YASR(document.getElementById("yasr"), { From ec109cda5ca300bcb6f5cb5347bdde1f6e765ecc Mon Sep 17 00:00:00 2001 From: Natanael Arndt Date: Tue, 20 Aug 2019 17:47:04 +0200 Subject: [PATCH 2/6] Remove some print() calls --- quit/core.py | 21 --------------------- quit/helpers.py | 7 +------ 2 files changed, 1 insertion(+), 27 deletions(-) diff --git a/quit/core.py b/quit/core.py index dbed24f4..8559fcba 100644 --- a/quit/core.py +++ b/quit/core.py @@ -424,9 +424,6 @@ def applyQueryOnCommit(self, parsedQuery, parent_commit_ref, target_ref, query=N pass graphuri = None - print(resultingChanges) - print("comment: ") - print(comment) if comment is not None: queryType = comment @@ -494,8 +491,6 @@ def commit(self, graph, delta, message, parent_commit_ref, target_ref, query=Non parent_commit_id = parent_commit.id try: blobs = self.getFilesForCommit(parent_commit) - print("blobs: ") - print(blobs) except KeyError: pass index = self.repository.index(parent_commit_id) @@ -505,13 +500,8 @@ def commit(self, graph, delta, message, parent_commit_ref, target_ref, query=Non graphconfig = self._graphconfigs.get(parent_commit_id) known_files = graphconfig.getfiles().keys() - print("knownfiles: ") - print(known_files) blobs_new = self._applyKnownGraphs(delta, blobs, parent_commit, index, graphconfig) - print("blobs_new: ") - print(blobs_new) - print(graphconfig.getfiles().keys()) new_contexts = self._applyUnknownGraphs(delta, known_files) new_config = copy(graphconfig) @@ -535,15 +525,12 @@ def commit(self, graph, delta, message, parent_commit_ref, target_ref, query=Non author = self.repository._repository.default_signature oid = index.commit(message, author.name, author.email, ref=target_ref) - print("oid: ") - print(oid) if self.config.hasFeature(Feature.GarbageCollection): self.garbagecollection() if oid: self._commits.set(oid.hex, blobs_new) - print(oid.hex) commit = self.repository.revision(oid.hex) self.syncSingle(commit) @@ -584,16 +571,12 @@ def _applyKnownGraphs(self, delta, blobs, parent_commit, index, graphconfig): try: file_reference, context = self.getFileReferenceAndContext(blob, parent_commit) - print(file_reference) - print(context.identifier) for entry in delta: changeset = entry['delta'].get(context.identifier, None) if changeset: type = entry['type'] - print("type: ") - print(type) if type == 'DROP': index.remove(file_reference.path) index.remove(file_reference.path + '.graph') @@ -611,9 +594,7 @@ def _applyKnownGraphs(self, delta, blobs, parent_commit, index, graphconfig): index.add(file_reference.path, file_reference.content) blob = fileName, index.stash[file_reference.path][0] self._blobs.set(blob, (file_reference, context)) - print("addToNew") blobs_new.add(blob) - print(blobs_new) except KeyError: pass @@ -636,10 +617,8 @@ def _applyUnknownGraphs(self, delta, known_blobs): int(m.group(1)) for b in known_blobs for m in [reg.search(b)] if m ] + [0] fileName = '{}_{}.nt'.format(iri_to_name(identifier), max(n)+1) - print("created a new file with known name") new_contexts[identifier] = FileReference(fileName, '') - print("created a new file with unknown name") fileReference = new_contexts[identifier] applyChangeset(fileReference, changeset, identifier) diff --git a/quit/helpers.py b/quit/helpers.py index 43caecbd..7f84d1b6 100644 --- a/quit/helpers.py +++ b/quit/helpers.py @@ -350,7 +350,6 @@ def parse_sparql_request(request): data = request.input_stream.read() g = Graph() if content_mimetype is not None: - print("content type not none") g.parse(data=data, format=content_mimetype) else: g.parse(data=data, format='application/rdf+xml') @@ -376,9 +375,6 @@ def parse_named_graph_query(query): if d.named: if d.named in default_list: query[1].datasetClause.remove(d) - print("namedGraph") - print(d) - print("was removed") else: named_list.append(d.named) @@ -397,9 +393,8 @@ def parse_named_graph_query(query): if 'graph' in query[1].where.part[0]: pass else: - print("no graph defined") graphValue = query[1].where whereValue = CompValue('GroupGraphPatternSub', part=[CompValue('GraphGraphPattern', term=Variable('selfDefinedGraphVariable'), graph=graphValue)]) query[1].where = whereValue - return query \ No newline at end of file + return query From ed77fb986390c4676a8ca35f659c27e8a4551740 Mon Sep 17 00:00:00 2001 From: Natanael Arndt Date: Wed, 21 Aug 2019 14:00:37 +0200 Subject: [PATCH 3/6] Remove white space changes and out commented code --- quit/helpers.py | 3 --- quit/tools/update.py | 3 --- quit/web/modules/endpoint.py | 5 ----- 3 files changed, 11 deletions(-) diff --git a/quit/helpers.py b/quit/helpers.py index 7f84d1b6..caf00646 100644 --- a/quit/helpers.py +++ b/quit/helpers.py @@ -223,8 +223,6 @@ def parse_query_type(query, base=None, default_graph=[], named_graph=[]): parsed_query = parse_named_graph_query(parsed_query) parsed_query = configure_query_dataset(parsed_query, default_graph, named_graph) translated_query = translateQuery(parsed_query, base=base) - - except ParseException: raise UnSupportedQuery() except SparqlProtocolError as e: @@ -310,7 +308,6 @@ def parse_sparql_request(request): elif request.method == "POST": if 'Content-Type' in request.headers: content_mimetype, options = parse_options_header(request.headers['Content-Type']) - if content_mimetype == "application/x-www-form-urlencoded": if 'query' in request.form: default_graph = request.form.getlist('default-graph-uri') diff --git a/quit/tools/update.py b/quit/tools/update.py index 9a17ac57..e9cf10e7 100644 --- a/quit/tools/update.py +++ b/quit/tools/update.py @@ -109,11 +109,9 @@ def evalDrop(ctx, u): """ http://www.w3.org/TR/sparql11-update/#drop """ - res = {} res["type"] = "DROP" res["delta"] = {} - if ctx.dataset.store.graph_aware: for g in _graphAll(ctx, u.graphiri): _append(res["delta"], u.graphiri, 'removals', g) @@ -435,5 +433,4 @@ def evalUpdate(graph, update, initBindings=None, actionLog=False): except Exception: if not u.silent: raise - return res, None diff --git a/quit/web/modules/endpoint.py b/quit/web/modules/endpoint.py index 43a399ff..0e11cf6e 100644 --- a/quit/web/modules/endpoint.py +++ b/quit/web/modules/endpoint.py @@ -1,7 +1,6 @@ import traceback import logging - from flask import Blueprint, request, current_app, make_response from rdflib import ConjunctiveGraph from quit.conf import Feature @@ -64,13 +63,9 @@ def sparql(branch_or_ref): 'to the SPARQL 1.1 standard', 400) else: # TODO allow USING NAMED when fixed in rdflib - #if len(named_graph) > 0: - #print('namedGraph exists') - #return make_response('FROM NAMED and USING NAMED not supported, yet', 400) parse_type = getattr(helpers, 'parse_' + type + '_type') - try: queryType, parsedQuery = parse_type( query, quit.config.namespace, default_graph, named_graph) From 0db436022b846be144da7b6d5938c1de02a31c7e Mon Sep 17 00:00:00 2001 From: Natanael Arndt Date: Wed, 21 Aug 2019 14:03:40 +0200 Subject: [PATCH 4/6] Remove one more white space line --- quit/helpers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/quit/helpers.py b/quit/helpers.py index caf00646..455a9d1f 100644 --- a/quit/helpers.py +++ b/quit/helpers.py @@ -161,7 +161,6 @@ def configure_query_dataset(parsed_query, default_graphs, named_graphs): default_graphs: a list of uri strings for default graphs named_graphs: a list of uri strings for named graphs """ - if not isinstance(default_graphs, list) or not isinstance(named_graphs, list): return parsed_query From c9c3234c42169a53d9f58938405a710c63098323 Mon Sep 17 00:00:00 2001 From: Natanael Arndt Date: Wed, 21 Aug 2019 16:13:19 +0200 Subject: [PATCH 5/6] Revert changes regarding feature selection with env variables --- quit/application.py | 6 +----- quit/conf.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/quit/application.py b/quit/application.py index 780f4097..4722ec7a 100644 --- a/quit/application.py +++ b/quit/application.py @@ -209,7 +209,6 @@ def parseArgs(args): configfile_default = "config.ttl" oauthclientid_default = None oauthclientsecret_default = None - feature_default = Feature.Unknown if 'QUIT_PORT' in os.environ: port_default = os.environ['QUIT_PORT'] @@ -235,9 +234,6 @@ def parseArgs(args): if 'QUIT_OAUTH_SECRET' in os.environ: oauthclientsecret_default = os.environ['QUIT_OAUTH_SECRET'] - if 'FEATURE' in os.environ: - feature_default = os.environ['FEATURE'] - parser = argparse.ArgumentParser() parser.add_argument('-b', '--basepath', type=str, default=basepath_default, help=basepathhelp) parser.add_argument( @@ -252,7 +248,7 @@ def parseArgs(args): parser.add_argument('--flask-debug', action='store_true') parser.add_argument('--defaultgraph-union', action='store_true') parser.add_argument('-f', '--features', nargs='*', action=FeaturesAction, - default=feature_default, + default=Feature.Unknown, help=featurehelp) parser.add_argument('-p', '--port', default=port_default, type=int) parser.add_argument('--host', default='::', type=str) diff --git a/quit/conf.py b/quit/conf.py index d67ee1bc..2aa95211 100644 --- a/quit/conf.py +++ b/quit/conf.py @@ -110,7 +110,7 @@ def __initstoreconfig(self, namespace, upstream, targetdir, configfile): return def hasFeature(self, flags): - return flags == (self.features and flags) + return flags == (self.features & flags) def getBindings(self): q = """SELECT DISTINCT ?prefix ?namespace WHERE {{ From a83d34f1febe58bddc7f285fc141db182436cb06 Mon Sep 17 00:00:00 2001 From: Natanael Arndt Date: Wed, 21 Aug 2019 16:24:27 +0200 Subject: [PATCH 6/6] Fix pylava --- quit/helpers.py | 11 ++++++++--- quit/web/modules/endpoint.py | 6 ++++-- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/quit/helpers.py b/quit/helpers.py index 455a9d1f..e573ceeb 100644 --- a/quit/helpers.py +++ b/quit/helpers.py @@ -335,7 +335,9 @@ def parse_sparql_request(request): data = request.data.decode("utf-8") g = Graph() g.parse(data=data, format='application/rdf+xml') - query = 'INSERT DATA { GRAPH <' + graph + '> { ' + g.serialize(format="nt").decode("utf-8") + ' } }' + query = ('INSERT DATA {{ GRAPH <{graph}> ' + '{{ {data} }} }}').format(graph=graph, + data=g.serialize(format="nt").decode("utf-8")) type = 'update' elif request.method == "PUT": if 'Content-Type' in request.headers: @@ -349,7 +351,9 @@ def parse_sparql_request(request): g.parse(data=data, format=content_mimetype) else: g.parse(data=data, format='application/rdf+xml') - query = 'WITH <' + graph + '> DELETE { ?s ?p ?o . } INSERT { ' + g.serialize(format="nt").decode("utf-8") + ' } WHERE { ?s ?p ?o .}' + query = ('WITH <{graph}> DELETE {{ ?s ?p ?o }} INSERT {{ {data} }} ' + 'WHERE {{ ?s ?p ?o }}').format(graph=graph, + data=g.serialize(format="nt").decode("utf-8")) type = 'update' comment = 'Replace' @@ -390,7 +394,8 @@ def parse_named_graph_query(query): pass else: graphValue = query[1].where - whereValue = CompValue('GroupGraphPatternSub', part=[CompValue('GraphGraphPattern', term=Variable('selfDefinedGraphVariable'), graph=graphValue)]) + whereValue = CompValue('GroupGraphPatternSub', part=[CompValue('GraphGraphPattern', + term=Variable('selfDefinedGraphVariable'), graph=graphValue)]) query[1].where = whereValue return query diff --git a/quit/web/modules/endpoint.py b/quit/web/modules/endpoint.py index 0e11cf6e..517ab068 100644 --- a/quit/web/modules/endpoint.py +++ b/quit/web/modules/endpoint.py @@ -110,7 +110,8 @@ def sparql(branch_or_ref): logger.debug("target ref is: {}".format(target_ref)) oid = quit.applyQueryOnCommit(parsedQuery, parent_commit_id, target_ref, query=query, default_graph=default_graph, - named_graph=named_graph, queryType=queryType, comment=comment) + named_graph=named_graph, queryType=queryType, + comment=comment) if resolution_method == "merge": logger.debug(("going to merge update into {} because it is at {} but {} was " @@ -140,7 +141,8 @@ def sparql(branch_or_ref): try: oid = quit.applyQueryOnCommit(parsedQuery, branch_or_ref, target_ref, query=query, default_graph=default_graph, - named_graph=named_graph, queryType=queryType, comment=comment) + named_graph=named_graph, queryType=queryType, + comment=comment) response = make_response('', 200) response.headers["X-CurrentBranch"] = target_head if oid is not None: