From f86c42a40a889f8b7affa67f99013210ee405ff0 Mon Sep 17 00:00:00 2001 From: Paulo Villegas Date: Tue, 28 Aug 2018 20:53:37 +0200 Subject: [PATCH] improved PEP8 compliance --- CHANGES.txt | 2 + sparqlkernel/connection.py | 319 ++++++++++++++++++------------------- sparqlkernel/kernel.py | 126 +++++++-------- 3 files changed, 223 insertions(+), 224 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 24f96b0..4421c2e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,8 @@ v. 1.2.0 * added %header magic (by gpotdevin) * fixed JSON format for raw format (reported by BoPeng) +* full output file path for magic message +* improved PEP8 compliance v. 1.1.0 * bugfix: `%lang all` magic didn't work diff --git a/sparqlkernel/connection.py b/sparqlkernel/connection.py index 7b1b039..39edfa3 100644 --- a/sparqlkernel/connection.py +++ b/sparqlkernel/connection.py @@ -2,6 +2,7 @@ The class used to manage the connection to SPARQL endpoint: send queries and format results for notebook display. Also process all the defined magics """ + from __future__ import print_function import sys @@ -10,11 +11,9 @@ import json import datetime import logging -import pprint import os.path from IPython.utils.tokenutil import token_at_cursor, line_at_cursor -from ipykernel.kernelbase import Kernel from traitlets import List import SPARQLWrapper @@ -34,55 +33,55 @@ unicode = str touc = str else: - touc = lambda x : str(x).decode('utf-8','replace') + touc = lambda x : str(x).decode('utf-8', 'replace') # Valid mime types in the SPARQL response (depending on what we requested) -mime_type = { SPARQLWrapper.JSON : set(['application/sparql-results+json', +mime_type = { SPARQLWrapper.JSON: set(['application/sparql-results+json', 'text/javascript']), - SPARQLWrapper.N3 : set(['text/rdf+n3', 'text/turtle', + SPARQLWrapper.N3: set(['text/rdf+n3', 'text/turtle', 'application/x-turtle', 'application/rdf+xml']), - SPARQLWrapper.RDF : set(['text/rdf', 'application/rdf+xml']), + SPARQLWrapper.RDF: set(['text/rdf', 'application/rdf+xml']), SPARQLWrapper.TURTLE: set(['text/turtle', 'application/x-turtle']), - SPARQLWrapper.XML : set(['application/sparql-results+xml']), + SPARQLWrapper.XML: set(['application/sparql-results+xml']), } # ---------------------------------------------------------------------- # The list of implemented magics with their help, as a pair [param,help-text] -magics = { - '%lsmagics' : [ '', 'list all magics'], - '%endpoint' : [ '', 'set SPARQL endpoint. **REQUIRED**'], - '%auth': ['(basic|digest|none) ', 'send HTTP authentication'], - '%qparam' : [ ' []', 'add (or delete) a persistent custom parameter to the endpoint query'], - '%header' : [ ' | OFF', 'add a persistent header line before the query, or delete all defined headers'], - '%prefix' : [ ' []', 'set (or delete) a persistent URI prefix for all queries'], - '%graph' : [ '', 'set default graph for the queries' ], - '%format' : [ 'JSON | N3 | XML | any | default', 'set requested result format' ], - '%display' : [ 'raw | table [withtypes] | diagram [svg|png] [withliterals]', - 'set display format' ], - '%lang' : [ ' [...] | default | all', - 'language(s) preferred for labels' ], - '%show' : [ ' | all', - 'maximum number of shown results' ], - '%outfile' : [ ' | NONE', 'save raw output to a file (use "%d" in name to add cell number, "NONE" to cancel saving)'], - '%log' : [ 'critical | error | warning | info | debug', - 'set logging level'], +magics = { + '%lsmagics': ['', 'list all magics'], + '%endpoint': ['', 'set SPARQL endpoint. **REQUIRED**'], + '%auth': ['(basic|digest|none) ', 'send HTTP authentication'], + '%qparam': [' []', 'add (or delete) a persistent custom parameter to the endpoint query'], + '%header': [' | OFF', 'add a persistent header line before the query, or delete all defined headers'], + '%prefix': [' []', 'set (or delete) a persistent URI prefix for all queries'], + '%graph': ['', 'set default graph for the queries'], + '%format': ['JSON | N3 | XML | any | default', 'set requested result format'], + '%display': ['raw | table [withtypes] | diagram [svg|png] [withliterals]', + 'set display format'], + '%lang': [' [...] | default | all', + 'language(s) preferred for labels'], + '%show': [' | all', + 'maximum number of shown results'], + '%outfile': [' | NONE', 'save raw output to a file (use "%d" in name to add cell number, "NONE" to cancel saving)'], + '%log': ['critical | error | warning | info | debug', + 'set logging level'], } # The full list of all magics -magic_help = ('Available magics:\n' + - ' '.join( sorted(magics.keys()) ) + +magic_help = ('Available magics:\n' + + ' '.join(sorted(magics.keys())) + '\n\n' + - '\n'.join( ('{0} {1} : {2}'.format(k,*magics[k]) - for k in sorted(magics) ) ) ) + '\n'.join(('{0} {1} : {2}'.format(k, *magics[k]) + for k in sorted(magics)))) # ---------------------------------------------------------------------- -def html_elem( e, ct, withtype=False ): +def html_elem(e, ct, withtype=False): """ Format a result element as an HTML table cell. @param e (list): a pair \c (value,type) @@ -93,17 +92,17 @@ def html_elem( e, ct, withtype=False ): if ct == 'th': return '{0}{1}'.format(*e) if withtype else '{}'.format(e) # Content cell - if e[1] in ('uri','URIRef'): - html = u'<{0} class=val>{2}'.format(ct,e[0],escape(e[0])) + if e[1] in ('uri', 'URIRef'): + html = u'<{0} class=val>{2}'.format(ct, e[0], escape(e[0])) else: - html = u'<{0} class=val>{1}'.format(ct,escape(e[0])) + html = u'<{0} class=val>{1}'.format(ct, escape(e[0])) # Create the optional cell for the type if withtype: - html += u'<{0} class=typ>{1}'.format(ct,e[1]) + html += u'<{0} class=typ>{1}'.format(ct, e[1]) return html -def html_table( data, header=True, limit=None, withtype=False ): +def html_table(data, header=True, limit=None, withtype=False): """ Return a double iterable as an HTML table @param data (iterable): the data to format @@ -131,7 +130,7 @@ def html_table( data, header=True, limit=None, withtype=False ): rn = -1 for rn, row in enumerate(data): html += u''.format(rc) - html += '\n'.join( (html_elem(c,ct,withtype) for c in row) ) + html += '\n'.join((html_elem(c, ct, withtype) for c in row)) html += u'' rc = 'even' if rc == 'odd' else 'odd' ct = 'td' @@ -144,15 +143,15 @@ def html_table( data, header=True, limit=None, withtype=False ): # ---------------------------------------------------------------------- -def jtype( c ): +def jtype(c): """ Return the a string with the data type of a value, for JSON data """ ct = c['type'] - return ct if ct != 'literal' else '{}, {}'.format(ct,c.get('xml:lang')) + return ct if ct != 'literal' else '{}, {}'.format(ct, c.get('xml:lang')) -def gtype( n ): +def gtype(n): """ Return the a string with the data type of a value, for Graph data """ @@ -160,20 +159,20 @@ def gtype( n ): return str(t) if t != 'Literal' else 'Literal, {}'.format(n.language) -def lang_match_json( row, hdr, accepted_languages ): +def lang_match_json(row, hdr, accepted_languages): '''Find if the JSON row contains acceptable language data''' if not accepted_languages: return True - languages = set( [ row[c].get('xml:lang') for c in hdr - if c in row and row[c]['type'] == 'literal' ] ) + languages = set([row[c].get('xml:lang') for c in hdr + if c in row and row[c]['type'] == 'literal']) return (not languages) or (languages & accepted_languages) -def lang_match_rdf( triple, accepted_languages ): +def lang_match_rdf(triple, accepted_languages): '''Find if the RDF triple contains acceptable language data''' if not accepted_languages: return True - languages = set( [ n.language for n in triple if isinstance(n,Literal) ] ) + languages = set([n.language for n in triple if isinstance(n, Literal)]) return (not languages) or (languages & accepted_languages) @@ -197,13 +196,13 @@ def json_iterator(hdr, rowlist, lang, add_vtype=False): Optionally add element type, and filter triples by language (on literals) """ # Return the header row - yield hdr if not add_vtype else ( (h, 'type') for h in hdr ) + yield hdr if not add_vtype else ((h, 'type') for h in hdr) # Now the data rows for row in rowlist: - if lang and not lang_match_json( row, hdr, lang ): + if lang and not lang_match_json(row, hdr, lang): continue - yield ( (row[c]['value'], jtype(row[c])) if c in row else ('','') - for c in hdr ) + yield ((row[c]['value'], jtype(row[c])) if c in row else ('', '') + for c in hdr) def rdf_iterator(graph, lang, add_vtype=False): @@ -212,44 +211,44 @@ def rdf_iterator(graph, lang, add_vtype=False): Optionally add element type, and filter triples by language (on literals) """ # Return the header row - hdr = ('subject','predicate','object') - yield hdr if not add_vtype else ( (h, 'type') for h in hdr ) + hdr = ('subject', 'predicate', 'object') + yield hdr if not add_vtype else ((h, 'type') for h in hdr) # Now the data rows for row in graph: - if lang and not lang_match_rdf( row, lang ): + if lang and not lang_match_rdf(row, lang): continue - yield ( (unicode(c), gtype(c)) for c in row) + yield ((unicode(c), gtype(c)) for c in row) -def render_json( result, cfg, **kwargs ): +def render_json(result, cfg, **kwargs): """ Render to output a result in JSON format """ - result = json.loads( result.decode('utf-8') ) + result = json.loads(result.decode('utf-8')) head = result['head'] if 'results' not in result: if 'boolean' in result: r = u'Result: {}'.format(result['boolean']) else: - r = u'Unsupported result: \n' + unicode( result ) - return { 'data' : { 'text/plain' : r }, - 'metadata' : {} } + r = u'Unsupported result: \n' + unicode(result) + return {'data': {'text/plain': r}, + 'metadata': {}} vars = head['vars'] - nrow = len( result['results']['bindings'] ) + nrow = len(result['results']['bindings']) if cfg.dis == 'table': j = json_iterator(vars, result['results']['bindings'], set(cfg.lan), add_vtype=cfg.typ) - n, data = html_table( j, limit=cfg.lmt, withtype=cfg.typ ) - data += div( 'Total: {}, Shown: {}', nrow, n, css="tinfo" ) - data = {'text/html' : div(data) } + n, data = html_table(j, limit=cfg.lmt, withtype=cfg.typ) + data += div('Total: {}, Shown: {}', nrow, n, css="tinfo") + data = {'text/html': div(data)} else: - result = json.dumps( result, - ensure_ascii=False, indent=2, sort_keys=True) - data = {'text/plain' : unicode(result) } - - return { 'data': data , - 'metadata' : {} } + result = json.dumps(result, + ensure_ascii=False, indent=2, sort_keys=True) + data = {'text/plain': unicode(result)} + + return {'data': data, + 'metadata': {}} def xml_row(row, lang): @@ -308,54 +307,54 @@ def render_xml(result, cfg, **kwargs): 'metadata': {}} -def render_graph( result, cfg, **kwargs ): +def render_graph(result, cfg, **kwargs): """ Render to output a result that can be parsed as an RDF graph """ # Mapping from MIME types to formats accepted by RDFlib - rdflib_formats = { 'text/rdf+n3' : 'n3', - 'text/turtle' : 'turtle', - 'application/x-turtle' : 'turtle', - 'text/turtle' : 'turtle', - 'application/rdf+xml' : 'xml', - 'text/rdf' : 'xml', - 'application/rdf+xml' : 'xml', - } + rdflib_formats = {'text/rdf+n3': 'n3', + 'text/turtle': 'turtle', + 'application/x-turtle': 'turtle', + 'text/turtle': 'turtle', + 'application/rdf+xml': 'xml', + 'text/rdf': 'xml', + 'application/rdf+xml': 'xml'} + try: - got = kwargs.get('format','text/rdf+n3') + got = kwargs.get('format', 'text/rdf+n3') fmt = rdflib_formats[got] except KeyError: - raise KrnlException( 'Unsupported format for graph processing: {!s}', got ) + raise KrnlException('Unsupported format for graph processing: {!s}', got) g = ConjunctiveGraph() - g.load( StringInputSource(result), format=fmt ) + g.load(StringInputSource(result), format=fmt) display = cfg.dis[0] if is_collection(cfg.dis) else cfg.dis - if display in ('png','svg') : + if display in ('png', 'svg'): try: literal = len(cfg.dis) > 1 and cfg.dis[1].startswith('withlit') - opt = { 'lang' : cfg.lan, 'literal' : literal, 'graphviz' : [] } - data, metadata = draw_graph(g,fmt=display,options=opt) - return { 'data' : data, - 'metadata' : metadata } + opt = {'lang': cfg.lan, 'literal': literal, 'graphviz': []} + data, metadata = draw_graph(g, fmt=display, options=opt) + return {'data': data, + 'metadata': metadata} except Exception as e: - raise KrnlException( 'Exception while drawing graph: {!r}', e ) + raise KrnlException('Exception while drawing graph: {!r}', e) elif display == 'table': it = rdf_iterator(g, set(cfg.lan), add_vtype=cfg.typ) - n, data = html_table(it,limit=cfg.lmt, withtype=cfg.typ) - data += div( 'Shown: {}, Total rows: {}', n if cfg.lmt else 'all', - len(g), css="tinfo" ) - data = {'text/html' : div(data) } + n, data = html_table(it, limit=cfg.lmt, withtype=cfg.typ) + data += div('Shown: {}, Total rows: {}', n if cfg.lmt else 'all', + len(g), css="tinfo") + data = {'text/html': div(data)} elif len(g) == 0: - data = { 'text/html' : div( div('empty graph',css='krn-warn') ) } + data = {'text/html': div(div('empty graph', css='krn-warn'))} else: - data = { 'text/plain' : g.serialize(format='nt').decode('utf-8') } + data = {'text/plain': g.serialize(format='nt').decode('utf-8')} + + return {'data': data, + 'metadata': {}} - return { 'data': data, - 'metadata' : {} } - # ---------------------------------------------------------------------- @@ -363,25 +362,25 @@ class CfgStruct: """ A simple class containing a bunch of fields """ - def __init__(self, **entries): + def __init__(self, **entries): self.__dict__.update(entries) # ---------------------------------------------------------------------- -class SparqlConnection( object ): +class SparqlConnection(object): - def __init__( self, logger=None ): + def __init__(self, logger=None): """ Initialize an empty configuration """ self.log = logger or logging.getLogger(__name__) self.srv = None - self.log.info( "START" ) - self.cfg = CfgStruct( hdr=[], pfx={}, lmt=20, fmt=None, out=None, aut=None, - grh=None, dis='table', typ=False, lan=[], par={} ) + self.log.info("START") + self.cfg = CfgStruct(hdr=[], pfx={}, lmt=20, fmt=None, out=None, aut=None, + grh=None, dis='table', typ=False, lan=[], par={}) - def magic( self, line ): + def magic(self, line): """ Read and process magics @param line (str): the full line containing a magic @@ -390,20 +389,20 @@ def magic( self, line ): a Python format string and its arguments) """ # The %lsmagic has no parameters - if line.startswith( '%lsmagic' ): + if line.startswith('%lsmagic'): return magic_help, 'magic-help' # Split line into command & parameters try: cmd, param = line.split(None, 1) except ValueError: - raise KrnlException( "invalid magic: {}", line ) + raise KrnlException("invalid magic: {}", line) cmd = cmd[1:].lower() # Process each magic if cmd == 'endpoint': - self.srv = SPARQLWrapper.SPARQLWrapper( param ) + self.srv = SPARQLWrapper.SPARQLWrapper(param) return ['Endpoint set to: {}', param], 'magic' elif cmd == 'auth': @@ -421,7 +420,7 @@ def magic( self, line ): v = param.split(None, 1) if len(v) == 0: - raise KrnlException( "missing %qparam name" ) + raise KrnlException("missing %qparam name") elif len(v) == 1: self.cfg.par.pop(v[0],None) return ['Param deleted: {}', v[0]] @@ -433,13 +432,13 @@ def magic( self, line ): v = param.split(None, 1) if len(v) == 0: - raise KrnlException( "missing %prefix value" ) + raise KrnlException("missing %prefix value") elif len(v) == 1: - self.cfg.pfx.pop(v[0],None) + self.cfg.pfx.pop(v[0], None) return ['Prefix deleted: {}', v[0]], 'magic' else: self.cfg.pfx[v[0]] = v[1] - return ['Prefix set: {} = {}'] + v, 'magic' + return ['Prefix set: {} = {}'] + v, 'magic' elif cmd == 'show': @@ -449,22 +448,22 @@ def magic( self, line ): try: self.cfg.lmt = int(param) except ValueError as e: - raise KrnlException( "invalid result limit: {}", e ) - l = self.cfg.lmt if self.cfg.lmt is not None else 'unlimited' - return ['Result maximum size: {}', l], 'magic' + raise KrnlException("invalid result limit: {}", e) + sz = self.cfg.lmt if self.cfg.lmt is not None else 'unlimited' + return ['Result maximum size: {}', sz], 'magic' elif cmd == 'format': - fmt_list = { 'JSON' : SPARQLWrapper.JSON, - 'N3' : SPARQLWrapper.N3, - 'XML' : SPARQLWrapper.XML, - 'DEFAULT' : None, - 'ANY' : False } + fmt_list = {'JSON': SPARQLWrapper.JSON, + 'N3': SPARQLWrapper.N3, + 'XML': SPARQLWrapper.XML, + 'DEFAULT': None, + 'ANY': False} try: fmt = param.upper() self.cfg.fmt = fmt_list[fmt] except KeyError: - raise KrnlException( 'unsupported format: {}\nSupported formats are: {!s}', param, list(fmt_list.keys()) ) + raise KrnlException('unsupported format: {}\nSupported formats are: {!s}', param, list(fmt_list.keys())) return ['Return format: {}', fmt], 'magic' elif cmd == 'lang': @@ -475,55 +474,54 @@ def magic( self, line ): elif cmd in 'graph': self.cfg.grh = param if param else None - return [ 'Default graph: {}', param if param else 'None' ], 'magic' + return ['Default graph: {}', param if param else 'None'], 'magic' elif cmd == 'display': - v = param.lower().split(None, 2) - if len(v) == 0 or v[0] not in ('table','raw','graph','diagram'): - raise KrnlException( 'invalid %display command: {}', param ) + v = param.lower().split(None, 2) + if len(v) == 0 or v[0] not in ('table', 'raw', 'graph', 'diagram'): + raise KrnlException('invalid %display command: {}', param) msg_extra = '' - if v[0] not in ('diagram','graph'): + if v[0] not in ('diagram', 'graph'): self.cfg.dis = v[0] - self.cfg.typ = len(v)>1 and v[1].startswith('withtype') + self.cfg.typ = len(v) > 1 and v[1].startswith('withtype') if self.cfg.typ and self.cfg.dis == 'table': msg_extra = '\nShow Types: on' elif len(v) == 1: # graph format, defaults self.cfg.dis = ['svg'] - else: # graph format, with options - if v[1] not in ('png','svg'): - raise KrnlException( 'invalid graph format: {}', param ) + else: # graph format, with options + if v[1] not in ('png', 'svg'): + raise KrnlException('invalid graph format: {}', param) if len(v) > 2: if not v[2].startswith('withlit'): - raise KrnlException( 'invalid graph option: {}',param) + raise KrnlException('invalid graph option: {}', param) msg_extra = '\nShow literals: on' self.cfg.dis = v[1:3] display = self.cfg.dis[0] if is_collection(self.cfg.dis) else self.cfg.dis - return [ 'Display: {}{}', display, msg_extra ], 'magic' + return ['Display: {}{}', display, msg_extra], 'magic' elif cmd == 'outfile': if param == 'NONE': self.cfg.out = None - return [ 'no output file' ], 'magic' + return ['no output file'], 'magic' else: self.cfg.out = param - return [ 'Output file: {}', os.path.abspath(param) ], 'magic' + return ['Output file: {}', os.path.abspath(param)], 'magic' elif cmd == 'log': if not param: - raise KrnlException( 'missing log level' ) + raise KrnlException('missing log level') try: - l = param.upper() - parent_logger = logging.getLogger( __name__.rsplit('.',1)[0] ) - #parent_logger.error( '[%s][%s]', __name__, __name__.rsplit('.',1)[0] ) - parent_logger.setLevel( l ) - return ("Logging set to {}", l), 'magic' + lev = param.upper() + parent_logger = logging.getLogger(__name__.rsplit('.', 1)[0]) + parent_logger.setLevel(lev) + return ("Logging set to {}", lev), 'magic' except ValueError: - raise KrnlException( 'unknown log level: {}', param ) + raise KrnlException('unknown log level: {}', param) elif cmd == 'header': @@ -538,21 +536,21 @@ def magic( self, line ): return ['Header added: {}', param], 'magic' else: - raise KrnlException( "magic not found: {}", cmd ) + raise KrnlException("magic not found: {}", cmd) - def query( self, query, num=0, silent=False ): + def query(self, query, num=0, silent=False): """ Launch an SPARQL query, process & convert results and return them """ if self.srv is None: - raise KrnlException('no endpoint defined') + raise KrnlException('no endpoint defined') # Add to the query all predefined SPARQL prefixes if self.cfg.pfx: - prefix = '\n'.join( ( 'PREFIX {} {}'.format(*v) - for v in self.cfg.pfx.items() ) ) - query = prefix + '\n' + query + prefix = '\n'.join(('PREFIX {} {}'.format(*v) + for v in self.cfg.pfx.items())) + query = prefix + '\n' + query # Prepend to the query all predefined Header entries # The header should be before the prefix and other sparql commands @@ -560,14 +558,14 @@ def query( self, query, num=0, silent=False ): query = '\n'.join(self.cfg.hdr) + '\n' + query if self.log.isEnabledFor(logging.DEBUG): - self.log.debug( "\n%50s%s", query, '...' if len(query)>50 else '' ) + self.log.debug("\n%50s%s", query, '...' if len(query) > 50 else '') # Select requested format if self.cfg.fmt is not None: fmt_req = self.cfg.fmt - elif re.search(r'\bselect\b',query,re.I): + elif re.search(r'\bselect\b', query, re.I): fmt_req = SPARQLWrapper.JSON - elif re.search(r'\b(?:describe|construct)\b',query,re.I): + elif re.search(r'\b(?:describe|construct)\b', query, re.I): fmt_req = SPARQLWrapper.N3 else: fmt_req = False @@ -581,12 +579,12 @@ def query( self, query, num=0, silent=False ): self.srv.setCredentials(None, None) self.log.debug(u'request-format: %s display: %s', fmt_req, self.cfg.dis) if fmt_req: - self.srv.setReturnFormat( fmt_req ) + self.srv.setReturnFormat(fmt_req) if self.cfg.grh: - self.srv.addParameter("default-graph-uri",self.cfg.grh) + self.srv.addParameter("default-graph-uri", self.cfg.grh) for p in self.cfg.par.items(): - self.srv.addParameter( *p ) - self.srv.setQuery( query ) + self.srv.addParameter(*p) + self.srv.setQuery(query) if not silent or self.cfg.out: try: @@ -594,12 +592,12 @@ def query( self, query, num=0, silent=False ): start = datetime.datetime.utcnow() res = self.srv.query() now = datetime.datetime.utcnow() - self.log.debug( u'response elapsed=%s', now-start ) + self.log.debug(u'response elapsed=%s', now-start) start = now # See what we got info = res.info() - self.log.debug( u'response info: %s', info ) + self.log.debug(u'response info: %s', info) fmt_got = info['content-type'].split(';')[0] if 'content-type' in info else None # Check we received a MIME type according to what we requested @@ -607,14 +605,14 @@ def query( self, query, num=0, silent=False ): raise KrnlException(u'Unexpected response format: {} (requested: {})', fmt_got, fmt_req) # Get the result - data = b''.join( (line for line in res) ) + data = b''.join((line for line in res)) except KrnlException: raise except SPARQLWrapperException as e: - raise KrnlException( u'SPARQL error: {}', touc(e) ) + raise KrnlException(u'SPARQL error: {}', touc(e)) except Exception as e: - raise KrnlException( u'Query processing error: {!s}', e ) + raise KrnlException(u'Query processing error: {!s}', e) # Write the raw result to a file if self.cfg.out: @@ -622,8 +620,8 @@ def query( self, query, num=0, silent=False ): outname = self.cfg.out % num except TypeError: outname = self.cfg.out - with io.open(outname,'wb') as f: - f.write( data ) + with io.open(outname, 'wb') as f: + f.write(data) # Render the result into the desired display format try: @@ -643,12 +641,11 @@ def query( self, query, num=0, silent=False ): r = {'data': {fmt: out}, 'metadata': {}} else: f = render_json if fmt == SPARQLWrapper.JSON else render_xml if fmt == SPARQLWrapper.XML else render_graph - r = f( data, self.cfg, format=fmt_got ) + r = f(data, self.cfg, format=fmt_got) now = datetime.datetime.utcnow() - self.log.debug( u'response formatted=%s', now-start ) + self.log.debug(u'response formatted=%s', now-start) if not silent: return r except Exception as e: - raise KrnlException( u'Response processing error: {}', touc(e) ) - + raise KrnlException(u'Response processing error: {}', touc(e)) diff --git a/sparqlkernel/kernel.py b/sparqlkernel/kernel.py index f978eb5..b0e1641 100644 --- a/sparqlkernel/kernel.py +++ b/sparqlkernel/kernel.py @@ -22,15 +22,15 @@ # ----------------------------------------------------------------------- -def is_magic( token, token_start, buf ): +def is_magic(token, token_start, buf): """ Detect if the passed token corresponds to a magic command: starts with a percent, and it's at the beginning of a line """ - return token[0] == '%' and (token_start==0 or buf[token_start-1] == '\n') + return token[0] == '%' and (token_start == 0 or buf[token_start-1] == '\n') -def token_at_cursor( code, pos=0 ): +def token_at_cursor(code, pos=0): """ Find the token present at the passed position in the code buffer :return (tuple): a pair (token, start_position) @@ -38,13 +38,13 @@ def token_at_cursor( code, pos=0 ): l = len(code) end = start = pos # Go forwards while we get alphanumeric chars - while end0 and code[start-1].isalpha(): - start-=1 + while start > 0 and code[start-1].isalpha(): + start -= 1 # If previous character is a %, add it (potential magic) - if start>0 and code[start-1] == '%': + if start > 0 and code[start-1] == '%': start -= 1 return code[start:end], start @@ -62,11 +62,11 @@ class SparqlKernel(Kernel): banner = "SPARQL kernel" language = LANGUAGE language_version = '1.1' - language_info = { 'name': 'sparql', - 'mimetype': 'application/sparql-query', - 'codemirror_mode': { "name": "sparql" }, - 'pygments_lexer' : 'sparql-nb', - } + language_info = {'name': 'sparql', + 'mimetype': 'application/sparql-query', + 'codemirror_mode': {"name": "sparql"}, + 'pygments_lexer': 'sparql-nb'} + # Add some items to notebook help menu help_links = List([ @@ -81,7 +81,7 @@ class SparqlKernel(Kernel): { 'text': "SPARQL Tutorial", 'url': "https://jena.apache.org/tutorials/sparql.html", - },]) + }, ]) # ----------------------------------------------------------------- @@ -91,19 +91,19 @@ def __init__(self, **kwargs): Initialize the object """ # Define logging status before calling parent constructor - set_logging( level='WARN' ) + set_logging(level='WARN') # Initialize parent class super(SparqlKernel, self).__init__(**kwargs) # Define our own logger, different from parent's (i.e. self.log) - self._klog = logging.getLogger( __name__ ) - self._klog.info( 'START' ) + self._klog = logging.getLogger(__name__) + self._klog.info('START') # Create the object holding the SPARQL connections self._k = SparqlConnection() # ----------------------------------------------------------------- - def _send( self, data, msg_type='ok', silent=False ): + def _send(self, data, msg_type='ok', silent=False): """ Send a response to the frontend and return an execute message @param data: response to send @@ -115,36 +115,37 @@ def _send( self, data, msg_type='ok', silent=False ): if data is not None: # log the message try: - self._klog.debug(u"msg to frontend (%d): %.160s...",silent,data) + self._klog.debug(u"msg to frontend (%d): %.160s...", silent, data) except Exception as e: - self._klog.warn(u"can't log response: %s",e) + self._klog.warn(u"can't log response: %s", e) # send it to the frontend if not silent: if msg_type != 'raw': - data = data_msg( data, mtype=msg_type ) + data = data_msg(data, mtype=msg_type) self.send_response(self.iopub_socket, 'display_data', data) # Result message return {'status': 'error' if msg_type == 'error' else 'ok', # The base class will increment the execution count 'execution_count': self.execution_count, - 'payload' : [], - 'user_expressions': {}, - } + 'payload': [], + 'user_expressions': {} + } - def do_execute( self, code, silent, store_history=True, - user_expressions=None, allow_stdin=False ): + + def do_execute(self, code, silent, store_history=True, + user_expressions=None, allow_stdin=False): """ Method called to execute a cell """ - self._klog.info( "[%.30s] [%d] [%s]", code, silent, user_expressions ) + self._klog.info("[%.30s] [%d] [%s]", code, silent, user_expressions) - # Split lines and remove empty lines & comments - code_noc = [ line.strip() for line in code.split('\n') - if line and line[0] != '#' ] + # Split lines and remove empty lines & comments + code_noc = [line.strip() for line in code.split('\n') + if line and line[0] != '#'] if not code_noc: - return self._send( None ) + return self._send(None) # Process try: @@ -153,22 +154,22 @@ def do_execute( self, code, silent, store_history=True, for line in code_noc: if line[0] != '%': break - magic_lines.append( line ) + magic_lines.append(line) # Process magics. Once done, remove them from the query buffer if magic_lines: - out = [ self._k.magic(line) for line in magic_lines ] - self._send( out, 'multi', silent=silent ) - code = '\n'.join( code_noc[len(magic_lines):] ) + out = [self._k.magic(line) for line in magic_lines] + self._send(out, 'multi', silent=silent) + code = '\n'.join(code_noc[len(magic_lines):]) # If we have a regular SPARQL query, process it now - result = self._k.query( code, num=self.execution_count ) if code else None + result = self._k.query(code, num=self.execution_count) if code else None # Return the result - return self._send( result, 'raw', silent=silent ) + return self._send(result, 'raw', silent=silent) except Exception as e: - return self._send( e, 'error', silent=silent ) + return self._send(e, 'error', silent=silent) # ----------------------------------------------------------------- @@ -177,54 +178,53 @@ def do_inspect(self, code, cursor_pos, detail_level=0): """ Method called on help requests """ - self._klog.info( "{%s}", code[cursor_pos:cursor_pos+10] ) + self._klog.info("{%s}", code[cursor_pos:cursor_pos+10]) # Find the token for which help is requested - token, start = token_at_cursor( code, cursor_pos ) - self._klog.debug( "token={%s} {%d}", token, detail_level ) + token, start = token_at_cursor(code, cursor_pos) + self._klog.debug("token={%s} {%d}", token, detail_level) # Find the help for this token - if not is_magic( token, start, code ): - info = sparql_help.get( token.upper(), None ) + if not is_magic(token, start, code): + info = sparql_help.get(token.upper(), None) elif token == '%': info = magic_help else: - info = magics.get( token, None ) + info = magics.get(token, None) if info: - info = '{} {}\n\n{}'.format(token,*info) + info = '{} {}\n\n{}'.format(token, *info) - return { 'status' : 'ok', - 'data' : { 'text/plain' : info }, - 'metadata' : {}, - 'found' : info is not None + return {'status': 'ok', + 'data': {'text/plain': info}, + 'metadata': {}, + 'found': info is not None } # ----------------------------------------------------------------- - def do_complete(self, code, cursor_pos ): + def do_complete(self, code, cursor_pos): """ Method called on autocompletion requests """ - self._klog.info( "{%s}", code[cursor_pos:cursor_pos+10] ) + self._klog.info("{%s}", code[cursor_pos:cursor_pos+10]) - token, start = token_at_cursor( code, cursor_pos ) + token, start = token_at_cursor(code, cursor_pos) tkn_low = token.lower() - if is_magic( token, start, code ): - matches = [ k for k in magics.keys() if k.startswith(tkn_low) ] + if is_magic(token, start, code): + matches = [k for k in magics.keys() if k.startswith(tkn_low)] else: - matches = [ sparql_names[k] for k in sparql_names - if k.startswith(tkn_low) ] - self._klog.debug( "token={%s} matches={%r}", token, matches ) + matches = [sparql_names[k] for k in sparql_names + if k.startswith(tkn_low)] + self._klog.debug("token={%s} matches={%r}", token, matches) if matches: - return {'status': 'ok', - 'cursor_start' : start, - 'cursor_end': start+len(token), - 'matches' : matches } - + return {'status': 'ok', + 'cursor_start': start, + 'cursor_end': start+len(token), + 'matches': matches} # ----------------------------------------------------------------- if __name__ == '__main__': from ipykernel.kernelapp import IPKernelApp - IPKernelApp.launch_instance( kernel_class=SparqlKernel ) + IPKernelApp.launch_instance(kernel_class=SparqlKernel)