diff --git a/.gitignore b/.gitignore
index ae5fe72..6c4644e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,7 @@
**/*.pyc
.vscode/
lib/*
-!lib/.gitkeep
\ No newline at end of file
+!lib/.gitkeep
+
+.config
+.irb_history
\ No newline at end of file
diff --git a/github.py b/github.py
index 5099a83..e7c82dd 100644
--- a/github.py
+++ b/github.py
@@ -1,7 +1,10 @@
# encoding: utf-8
import sys
import re
-from workflow import Workflow3, ICON_WEB, web, ICON_ERROR
+from workflow import Workflow3, ICON_ERROR
+from urllib.request import Request
+from urllib import request
+from json.decoder import JSONDecoder
log = None
@@ -16,14 +19,18 @@ def get_token(wf):
)
wf.send_feedback()
sys.exit(1)
- return token
+ return token.decode('utf-8')
-def request(token, url='https://api.github.com/user/repos'):
- r = web.get(url, headers={'Authorization': 'token %s' % token})
- r.raise_for_status()
- return r
+def repos_request(token, url='https://api.github.com/user/repos'):
+ request_with_header = Request(url, headers={'Authorization': 'token %s' % token})
+ return request.urlopen(request_with_header)
+def decoded_repos_request(token, url='https://api.github.com/user/repos'):
+ request_with_header = Request(url, headers={'Authorization': 'token %s' % token})
+ f = request.urlopen(request_with_header)
+
+ return f.read().decode('utf-8')
def get_last(r):
link = r.headers.get('link')
@@ -42,22 +49,22 @@ def get_all_urls(r):
inclusive_last = last + 1
return ['https://api.github.com/user/repos?page=%d' % page for page in range(2, inclusive_last)]
-
def get_all(token):
from concurrent.futures import ThreadPoolExecutor, as_completed
- r = request(token)
- repos = r.json()
+ r = repos_request(token)
+ repos = JSONDecoder().decode(r.read().decode('utf-8'))
urls = get_all_urls(r)
pool = ThreadPoolExecutor(20)
- futures = [pool.submit(request,token,url) for url in urls]
+ futures = [pool.submit(decoded_repos_request,token,url) for url in urls]
results = [r.result() for r in as_completed(futures)]
for result in results:
- repos += result.json()
+ repos += JSONDecoder().decode(result)
return repos
def get_cached_repos(wf):
- return wf.stored_data('repos')
+ repos = wf.stored_data('repos')
+ return repos
def load_repos(wf, token):
@@ -79,7 +86,7 @@ def main(wf):
# TODO provide helper to take them to documentation to get api token
# configured correctly
token = args[1]
- wf.store_data('token', token)
+ wf.store_data('token', token.encode())
load_repos(wf, token)
return
@@ -91,7 +98,7 @@ def main(wf):
repos = get_repos(wf, token)
if args:
- repos = wf.filter(args[0], repos, lambda repo: repo['full_name'])
+ repos = wf.filter(args[0], repos, lambda repo: repo.get('full_name'))
if not repos:
wf.warn_empty('No repos found. Refresh repos or try a different query.')
diff --git a/info.plist b/info.plist
index b111890..7050209 100644
--- a/info.plist
+++ b/info.plist
@@ -3,7 +3,7 @@
bundleid
- com.matthewmcg.github
+ com.ramsey.github
connections
089FEA1F-952B-4635-91C6-24861402B336
@@ -132,7 +132,7 @@
createdby
- Matthew McGarvey
+ Ramsey Solutions
description
disabled
@@ -148,6 +148,8 @@
alfredfiltersresultsmatchmode
0
+ argumenttreatemptyqueryasnil
+
argumenttrimmode
0
argumenttype
@@ -167,7 +169,7 @@
runningsubtext
Github repos are being loaded...
script
- python github.py {query}
+ /usr/bin/python3 github.py {query}
scriptargtype
0
scriptfile
@@ -186,7 +188,7 @@
uid
F32B16BE-138F-4523-960F-775878AA1D76
version
- 2
+ 3
config
@@ -274,7 +276,7 @@
escaping
102
script
- python github.py --auth {query}
+ /usr/bin/python3 github.py --auth {query}
scriptargtype
0
scriptfile
@@ -379,7 +381,7 @@
escaping
102
script
- python github.py --refresh
+ /usr/bin/python3 github.py --refresh
scriptargtype
1
scriptfile
@@ -532,6 +534,6 @@
webaddress
- https://github.com/matthewmcgarvey/alfred-github-workflow
+ https://github.com/lampo/alfred-github-workflow
diff --git a/install-libraries.sh b/install-libraries.sh
index 462fa4f..3f146c3 100755
--- a/install-libraries.sh
+++ b/install-libraries.sh
@@ -1,3 +1,3 @@
#!/usr/bin/env bash
-pip install -r requirements.txt --target=./lib
\ No newline at end of file
+pip3 install -r requirements.txt --target=./lib
\ No newline at end of file
diff --git a/workflow/background.py b/workflow/background.py
index cd5400b..82bcfe6 100644
--- a/workflow/background.py
+++ b/workflow/background.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2014 deanishe@deanishe.net
@@ -8,8 +7,8 @@
# Created on 2014-04-06
#
-"""
-This module provides an API to run commands in background processes.
+"""This module provides an API to run commands in background processes.
+
Combine with the :ref:`caching API ` to work from cached data
while you fetch fresh data in the background.
@@ -17,17 +16,16 @@
and examples.
"""
-from __future__ import print_function, unicode_literals
-import signal
-import sys
import os
-import subprocess
import pickle
+import signal
+import subprocess
+import sys
from workflow import Workflow
-__all__ = ['is_running', 'run_in_background']
+__all__ = ["is_running", "run_in_background"]
_wf = None
@@ -52,7 +50,7 @@ def _arg_cache(name):
:rtype: ``unicode`` filepath
"""
- return wf().cachefile(name + '.argcache')
+ return wf().cachefile(name + ".argcache")
def _pid_file(name):
@@ -64,7 +62,7 @@ def _pid_file(name):
:rtype: ``unicode`` filepath
"""
- return wf().cachefile(name + '.pid')
+ return wf().cachefile(name + ".pid")
def _process_exists(pid):
@@ -96,16 +94,16 @@ def _job_pid(name):
if not os.path.exists(pidfile):
return
- with open(pidfile, 'rb') as fp:
- pid = int(fp.read())
+ with open(pidfile, "rb") as fp:
+ read = fp.read()
+ # print(str(read))
+ pid = int.from_bytes(read, sys.byteorder)
+ # print(pid)
if _process_exists(pid):
return pid
- try:
- os.unlink(pidfile)
- except Exception: # pragma: no cover
- pass
+ os.unlink(pidfile)
def is_running(name):
@@ -123,8 +121,9 @@ def is_running(name):
return False
-def _background(pidfile, stdin='/dev/null', stdout='/dev/null',
- stderr='/dev/null'): # pragma: no cover
+def _background(
+ pidfile, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"
+): # pragma: no cover
"""Fork the current process into a background daemon.
:param pidfile: file to write PID of daemon process to.
@@ -137,42 +136,43 @@ def _background(pidfile, stdin='/dev/null', stdout='/dev/null',
:type stderr: filepath
"""
+
def _fork_and_exit_parent(errmsg, wait=False, write=False):
try:
pid = os.fork()
if pid > 0:
if write: # write PID of child process to `pidfile`
- tmp = pidfile + '.tmp'
- with open(tmp, 'wb') as fp:
- fp.write(str(pid))
+ tmp = pidfile + ".tmp"
+ with open(tmp, "wb") as fp:
+ fp.write(pid.to_bytes(4, sys.byteorder))
os.rename(tmp, pidfile)
if wait: # wait for child process to exit
os.waitpid(pid, 0)
os._exit(0)
except OSError as err:
- _log().critical('%s: (%d) %s', errmsg, err.errno, err.strerror)
+ _log().critical("%s: (%d) %s", errmsg, err.errno, err.strerror)
raise err
# Do first fork and wait for second fork to finish.
- _fork_and_exit_parent('fork #1 failed', wait=True)
+ _fork_and_exit_parent("fork #1 failed", wait=True)
# Decouple from parent environment.
os.chdir(wf().workflowdir)
os.setsid()
# Do second fork and write PID to pidfile.
- _fork_and_exit_parent('fork #2 failed', write=True)
+ _fork_and_exit_parent("fork #2 failed", write=True)
# Now I am a daemon!
# Redirect standard file descriptors.
- si = open(stdin, 'r', 0)
- so = open(stdout, 'a+', 0)
- se = open(stderr, 'a+', 0)
- if hasattr(sys.stdin, 'fileno'):
+ si = open(stdin, "r", 1)
+ so = open(stdout, "a+", 1)
+ se = open(stderr, "a+", 1)
+ if hasattr(sys.stdin, "fileno"):
os.dup2(si.fileno(), sys.stdin.fileno())
- if hasattr(sys.stdout, 'fileno'):
+ if hasattr(sys.stdout, "fileno"):
os.dup2(so.fileno(), sys.stdout.fileno())
- if hasattr(sys.stderr, 'fileno'):
+ if hasattr(sys.stderr, "fileno"):
os.dup2(se.fileno(), sys.stderr.fileno())
@@ -222,25 +222,25 @@ def run_in_background(name, args, **kwargs):
"""
if is_running(name):
- _log().info('[%s] job already running', name)
+ _log().info("[%s] job already running", name)
return
argcache = _arg_cache(name)
# Cache arguments
- with open(argcache, 'wb') as fp:
- pickle.dump({'args': args, 'kwargs': kwargs}, fp)
- _log().debug('[%s] command cached: %s', name, argcache)
+ with open(argcache, "wb") as fp:
+ pickle.dump({"args": args, "kwargs": kwargs}, fp)
+ _log().debug("[%s] command cached: %s", name, argcache)
# Call this script
- cmd = ['/usr/bin/python', __file__, name]
- _log().debug('[%s] passing job to background runner: %r', name, cmd)
+ cmd = [sys.executable, "-m", "workflow.background", name]
+ _log().debug("[%s] passing job to background runner: %r", name, cmd)
retcode = subprocess.call(cmd)
if retcode: # pragma: no cover
- _log().error('[%s] background runner failed with %d', name, retcode)
+ _log().error("[%s] background runner failed with %d", name, retcode)
else:
- _log().debug('[%s] background job started', name)
+ _log().debug("[%s] background job started", name)
return retcode
@@ -256,7 +256,7 @@ def main(wf): # pragma: no cover
name = wf.args[0]
argcache = _arg_cache(name)
if not os.path.exists(argcache):
- msg = '[{0}] command cache not found: {1}'.format(name, argcache)
+ msg = "[{0}] command cache not found: {1}".format(name, argcache)
log.critical(msg)
raise IOError(msg)
@@ -265,29 +265,29 @@ def main(wf): # pragma: no cover
_background(pidfile)
# Load cached arguments
- with open(argcache, 'rb') as fp:
+ with open(argcache, "rb") as fp:
data = pickle.load(fp)
# Cached arguments
- args = data['args']
- kwargs = data['kwargs']
+ args = data["args"]
+ kwargs = data["kwargs"]
# Delete argument cache file
os.unlink(argcache)
try:
# Run the command
- log.debug('[%s] running command: %r', name, args)
+ log.debug("[%s] running command: %r", name, args)
retcode = subprocess.call(args, **kwargs)
if retcode:
- log.error('[%s] command failed with status %d', name, retcode)
+ log.error("[%s] command failed with status %d", name, retcode)
finally:
os.unlink(pidfile)
- log.debug('[%s] job complete', name)
+ log.debug("[%s] job complete", name)
-if __name__ == '__main__': # pragma: no cover
- wf().run(main)
+if __name__ == "__main__": # pragma: no cover
+ wf().run(main)
\ No newline at end of file
diff --git a/workflow/web.py b/workflow/web.py
deleted file mode 100644
index d64bb6f..0000000
--- a/workflow/web.py
+++ /dev/null
@@ -1,678 +0,0 @@
-# encoding: utf-8
-#
-# Copyright (c) 2014 Dean Jackson
-#
-# MIT Licence. See http://opensource.org/licenses/MIT
-#
-# Created on 2014-02-15
-#
-
-"""Lightweight HTTP library with a requests-like interface."""
-
-import codecs
-import json
-import mimetypes
-import os
-import random
-import re
-import socket
-import string
-import unicodedata
-import urllib
-import urllib2
-import urlparse
-import zlib
-
-
-USER_AGENT = u'Alfred-Workflow/1.19 (+http://www.deanishe.net/alfred-workflow)'
-
-# Valid characters for multipart form data boundaries
-BOUNDARY_CHARS = string.digits + string.ascii_letters
-
-# HTTP response codes
-RESPONSES = {
- 100: 'Continue',
- 101: 'Switching Protocols',
- 200: 'OK',
- 201: 'Created',
- 202: 'Accepted',
- 203: 'Non-Authoritative Information',
- 204: 'No Content',
- 205: 'Reset Content',
- 206: 'Partial Content',
- 300: 'Multiple Choices',
- 301: 'Moved Permanently',
- 302: 'Found',
- 303: 'See Other',
- 304: 'Not Modified',
- 305: 'Use Proxy',
- 307: 'Temporary Redirect',
- 400: 'Bad Request',
- 401: 'Unauthorized',
- 402: 'Payment Required',
- 403: 'Forbidden',
- 404: 'Not Found',
- 405: 'Method Not Allowed',
- 406: 'Not Acceptable',
- 407: 'Proxy Authentication Required',
- 408: 'Request Timeout',
- 409: 'Conflict',
- 410: 'Gone',
- 411: 'Length Required',
- 412: 'Precondition Failed',
- 413: 'Request Entity Too Large',
- 414: 'Request-URI Too Long',
- 415: 'Unsupported Media Type',
- 416: 'Requested Range Not Satisfiable',
- 417: 'Expectation Failed',
- 500: 'Internal Server Error',
- 501: 'Not Implemented',
- 502: 'Bad Gateway',
- 503: 'Service Unavailable',
- 504: 'Gateway Timeout',
- 505: 'HTTP Version Not Supported'
-}
-
-
-def str_dict(dic):
- """Convert keys and values in ``dic`` into UTF-8-encoded :class:`str`.
-
- :param dic: Mapping of Unicode strings
- :type dic: dict
- :returns: Dictionary containing only UTF-8 strings
- :rtype: dict
-
- """
- if isinstance(dic, CaseInsensitiveDictionary):
- dic2 = CaseInsensitiveDictionary()
- else:
- dic2 = {}
- for k, v in dic.items():
- if isinstance(k, unicode):
- k = k.encode('utf-8')
- if isinstance(v, unicode):
- v = v.encode('utf-8')
- dic2[k] = v
- return dic2
-
-
-class NoRedirectHandler(urllib2.HTTPRedirectHandler):
- """Prevent redirections."""
-
- def redirect_request(self, *args):
- return None
-
-
-# Adapted from https://gist.github.com/babakness/3901174
-class CaseInsensitiveDictionary(dict):
- """Dictionary with caseless key search.
-
- Enables case insensitive searching while preserving case sensitivity
- when keys are listed, ie, via keys() or items() methods.
-
- Works by storing a lowercase version of the key as the new key and
- stores the original key-value pair as the key's value
- (values become dictionaries).
-
- """
-
- def __init__(self, initval=None):
- """Create new case-insensitive dictionary."""
- if isinstance(initval, dict):
- for key, value in initval.iteritems():
- self.__setitem__(key, value)
-
- elif isinstance(initval, list):
- for (key, value) in initval:
- self.__setitem__(key, value)
-
- def __contains__(self, key):
- return dict.__contains__(self, key.lower())
-
- def __getitem__(self, key):
- return dict.__getitem__(self, key.lower())['val']
-
- def __setitem__(self, key, value):
- return dict.__setitem__(self, key.lower(), {'key': key, 'val': value})
-
- def get(self, key, default=None):
- try:
- v = dict.__getitem__(self, key.lower())
- except KeyError:
- return default
- else:
- return v['val']
-
- def update(self, other):
- for k, v in other.items():
- self[k] = v
-
- def items(self):
- return [(v['key'], v['val']) for v in dict.itervalues(self)]
-
- def keys(self):
- return [v['key'] for v in dict.itervalues(self)]
-
- def values(self):
- return [v['val'] for v in dict.itervalues(self)]
-
- def iteritems(self):
- for v in dict.itervalues(self):
- yield v['key'], v['val']
-
- def iterkeys(self):
- for v in dict.itervalues(self):
- yield v['key']
-
- def itervalues(self):
- for v in dict.itervalues(self):
- yield v['val']
-
-
-class Response(object):
- """
- Returned by :func:`request` / :func:`get` / :func:`post` functions.
-
- Simplified version of the ``Response`` object in the ``requests`` library.
-
- >>> r = request('http://www.google.com')
- >>> r.status_code
- 200
- >>> r.encoding
- ISO-8859-1
- >>> r.content # bytes
- ...
- >>> r.text # unicode, decoded according to charset in HTTP header/meta tag
- u' ...'
- >>> r.json() # content parsed as JSON
-
- """
-
- def __init__(self, request, stream=False):
- """Call `request` with :mod:`urllib2` and process results.
-
- :param request: :class:`urllib2.Request` instance
- :param stream: Whether to stream response or retrieve it all at once
- :type stream: bool
-
- """
- self.request = request
- self._stream = stream
- self.url = None
- self.raw = None
- self._encoding = None
- self.error = None
- self.status_code = None
- self.reason = None
- self.headers = CaseInsensitiveDictionary()
- self._content = None
- self._content_loaded = False
- self._gzipped = False
-
- # Execute query
- try:
- self.raw = urllib2.urlopen(request)
- except urllib2.HTTPError as err:
- self.error = err
- try:
- self.url = err.geturl()
- # sometimes (e.g. when authentication fails)
- # urllib can't get a URL from an HTTPError
- # This behaviour changes across Python versions,
- # so no test cover (it isn't important).
- except AttributeError: # pragma: no cover
- pass
- self.status_code = err.code
- else:
- self.status_code = self.raw.getcode()
- self.url = self.raw.geturl()
- self.reason = RESPONSES.get(self.status_code)
-
- # Parse additional info if request succeeded
- if not self.error:
- headers = self.raw.info()
- self.transfer_encoding = headers.getencoding()
- self.mimetype = headers.gettype()
- for key in headers.keys():
- self.headers[key.lower()] = headers.get(key)
-
- # Is content gzipped?
- # Transfer-Encoding appears to not be used in the wild
- # (contrary to the HTTP standard), but no harm in testing
- # for it
- if ('gzip' in headers.get('content-encoding', '') or
- 'gzip' in headers.get('transfer-encoding', '')):
- self._gzipped = True
-
- @property
- def stream(self):
- """Whether response is streamed.
-
- Returns:
- bool: `True` if response is streamed.
- """
- return self._stream
-
- @stream.setter
- def stream(self, value):
- if self._content_loaded:
- raise RuntimeError("`content` has already been read from "
- "this Response.")
-
- self._stream = value
-
- def json(self):
- """Decode response contents as JSON.
-
- :returns: object decoded from JSON
- :rtype: list, dict or unicode
-
- """
- return json.loads(self.content, self.encoding or 'utf-8')
-
- @property
- def encoding(self):
- """Text encoding of document or ``None``.
-
- :returns: Text encoding if found.
- :rtype: str or ``None``
-
- """
- if not self._encoding:
- self._encoding = self._get_encoding()
-
- return self._encoding
-
- @property
- def content(self):
- """Raw content of response (i.e. bytes).
-
- :returns: Body of HTTP response
- :rtype: str
-
- """
- if not self._content:
-
- # Decompress gzipped content
- if self._gzipped:
- decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
- self._content = decoder.decompress(self.raw.read())
-
- else:
- self._content = self.raw.read()
-
- self._content_loaded = True
-
- return self._content
-
- @property
- def text(self):
- """Unicode-decoded content of response body.
-
- If no encoding can be determined from HTTP headers or the content
- itself, the encoded response body will be returned instead.
-
- :returns: Body of HTTP response
- :rtype: unicode or str
-
- """
- if self.encoding:
- return unicodedata.normalize('NFC', unicode(self.content,
- self.encoding))
- return self.content
-
- def iter_content(self, chunk_size=4096, decode_unicode=False):
- """Iterate over response data.
-
- .. versionadded:: 1.6
-
- :param chunk_size: Number of bytes to read into memory
- :type chunk_size: int
- :param decode_unicode: Decode to Unicode using detected encoding
- :type decode_unicode: bool
- :returns: iterator
-
- """
- if not self.stream:
- raise RuntimeError("You cannot call `iter_content` on a "
- "Response unless you passed `stream=True`"
- " to `get()`/`post()`/`request()`.")
-
- if self._content_loaded:
- raise RuntimeError(
- "`content` has already been read from this Response.")
-
- def decode_stream(iterator, r):
-
- decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
-
- for chunk in iterator:
- data = decoder.decode(chunk)
- if data:
- yield data
-
- data = decoder.decode(b'', final=True)
- if data: # pragma: no cover
- yield data
-
- def generate():
-
- if self._gzipped:
- decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
-
- while True:
- chunk = self.raw.read(chunk_size)
- if not chunk:
- break
-
- if self._gzipped:
- chunk = decoder.decompress(chunk)
-
- yield chunk
-
- chunks = generate()
-
- if decode_unicode and self.encoding:
- chunks = decode_stream(chunks, self)
-
- return chunks
-
- def save_to_path(self, filepath):
- """Save retrieved data to file at ``filepath``.
-
- .. versionadded: 1.9.6
-
- :param filepath: Path to save retrieved data.
-
- """
- filepath = os.path.abspath(filepath)
- dirname = os.path.dirname(filepath)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
-
- self.stream = True
-
- with open(filepath, 'wb') as fileobj:
- for data in self.iter_content():
- fileobj.write(data)
-
- def raise_for_status(self):
- """Raise stored error if one occurred.
-
- error will be instance of :class:`urllib2.HTTPError`
- """
- if self.error is not None:
- raise self.error
- return
-
- def _get_encoding(self):
- """Get encoding from HTTP headers or content.
-
- :returns: encoding or `None`
- :rtype: unicode or ``None``
-
- """
- headers = self.raw.info()
- encoding = None
-
- if headers.getparam('charset'):
- encoding = headers.getparam('charset')
-
- # HTTP Content-Type header
- for param in headers.getplist():
- if param.startswith('charset='):
- encoding = param[8:]
- break
-
- if not self.stream: # Try sniffing response content
- # Encoding declared in document should override HTTP headers
- if self.mimetype == 'text/html': # sniff HTML headers
- m = re.search("""""",
- self.content)
- if m:
- encoding = m.group(1)
-
- elif ((self.mimetype.startswith('application/') or
- self.mimetype.startswith('text/')) and
- 'xml' in self.mimetype):
- m = re.search("""]*\?>""",
- self.content)
- if m:
- encoding = m.group(1)
-
- # Format defaults
- if self.mimetype == 'application/json' and not encoding:
- # The default encoding for JSON
- encoding = 'utf-8'
-
- elif self.mimetype == 'application/xml' and not encoding:
- # The default for 'application/xml'
- encoding = 'utf-8'
-
- if encoding:
- encoding = encoding.lower()
-
- return encoding
-
-
-def request(method, url, params=None, data=None, headers=None, cookies=None,
- files=None, auth=None, timeout=60, allow_redirects=False,
- stream=False):
- """Initiate an HTTP(S) request. Returns :class:`Response` object.
-
- :param method: 'GET' or 'POST'
- :type method: unicode
- :param url: URL to open
- :type url: unicode
- :param params: mapping of URL parameters
- :type params: dict
- :param data: mapping of form data ``{'field_name': 'value'}`` or
- :class:`str`
- :type data: dict or str
- :param headers: HTTP headers
- :type headers: dict
- :param cookies: cookies to send to server
- :type cookies: dict
- :param files: files to upload (see below).
- :type files: dict
- :param auth: username, password
- :type auth: tuple
- :param timeout: connection timeout limit in seconds
- :type timeout: int
- :param allow_redirects: follow redirections
- :type allow_redirects: bool
- :param stream: Stream content instead of fetching it all at once.
- :type stream: bool
- :returns: Response object
- :rtype: :class:`Response`
-
-
- The ``files`` argument is a dictionary::
-
- {'fieldname' : { 'filename': 'blah.txt',
- 'content': '',
- 'mimetype': 'text/plain'}
- }
-
- * ``fieldname`` is the name of the field in the HTML form.
- * ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
- be used to guess the mimetype, or ``application/octet-stream``
- will be used.
-
- """
- # TODO: cookies
- socket.setdefaulttimeout(timeout)
-
- # Default handlers
- openers = []
-
- if not allow_redirects:
- openers.append(NoRedirectHandler())
-
- if auth is not None: # Add authorisation handler
- username, password = auth
- password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
- password_manager.add_password(None, url, username, password)
- auth_manager = urllib2.HTTPBasicAuthHandler(password_manager)
- openers.append(auth_manager)
-
- # Install our custom chain of openers
- opener = urllib2.build_opener(*openers)
- urllib2.install_opener(opener)
-
- if not headers:
- headers = CaseInsensitiveDictionary()
- else:
- headers = CaseInsensitiveDictionary(headers)
-
- if 'user-agent' not in headers:
- headers['user-agent'] = USER_AGENT
-
- # Accept gzip-encoded content
- encodings = [s.strip() for s in
- headers.get('accept-encoding', '').split(',')]
- if 'gzip' not in encodings:
- encodings.append('gzip')
-
- headers['accept-encoding'] = ', '.join(encodings)
-
- # Force POST by providing an empty data string
- if method == 'POST' and not data:
- data = ''
-
- if files:
- if not data:
- data = {}
- new_headers, data = encode_multipart_formdata(data, files)
- headers.update(new_headers)
- elif data and isinstance(data, dict):
- data = urllib.urlencode(str_dict(data))
-
- # Make sure everything is encoded text
- headers = str_dict(headers)
-
- if isinstance(url, unicode):
- url = url.encode('utf-8')
-
- if params: # GET args (POST args are handled in encode_multipart_formdata)
-
- scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
-
- if query: # Combine query string and `params`
- url_params = urlparse.parse_qs(query)
- # `params` take precedence over URL query string
- url_params.update(params)
- params = url_params
-
- query = urllib.urlencode(str_dict(params), doseq=True)
- url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
-
- req = urllib2.Request(url, data, headers)
- return Response(req, stream)
-
-
-def get(url, params=None, headers=None, cookies=None, auth=None,
- timeout=60, allow_redirects=True, stream=False):
- """Initiate a GET request. Arguments as for :func:`request`.
-
- :returns: :class:`Response` instance
-
- """
- return request('GET', url, params, headers=headers, cookies=cookies,
- auth=auth, timeout=timeout, allow_redirects=allow_redirects,
- stream=stream)
-
-
-def post(url, params=None, data=None, headers=None, cookies=None, files=None,
- auth=None, timeout=60, allow_redirects=False, stream=False):
- """Initiate a POST request. Arguments as for :func:`request`.
-
- :returns: :class:`Response` instance
-
- """
- return request('POST', url, params, data, headers, cookies, files, auth,
- timeout, allow_redirects, stream)
-
-
-def encode_multipart_formdata(fields, files):
- """Encode form data (``fields``) and ``files`` for POST request.
-
- :param fields: mapping of ``{name : value}`` pairs for normal form fields.
- :type fields: dict
- :param files: dictionary of fieldnames/files elements for file data.
- See below for details.
- :type files: dict of :class:`dict`
- :returns: ``(headers, body)`` ``headers`` is a
- :class:`dict` of HTTP headers
- :rtype: 2-tuple ``(dict, str)``
-
- The ``files`` argument is a dictionary::
-
- {'fieldname' : { 'filename': 'blah.txt',
- 'content': '',
- 'mimetype': 'text/plain'}
- }
-
- - ``fieldname`` is the name of the field in the HTML form.
- - ``mimetype`` is optional. If not provided, :mod:`mimetypes` will
- be used to guess the mimetype, or ``application/octet-stream``
- will be used.
-
- """
- def get_content_type(filename):
- """Return or guess mimetype of ``filename``.
-
- :param filename: filename of file
- :type filename: unicode/str
- :returns: mime-type, e.g. ``text/html``
- :rtype: str
-
- """
-
- return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
-
- boundary = '-----' + ''.join(random.choice(BOUNDARY_CHARS)
- for i in range(30))
- CRLF = '\r\n'
- output = []
-
- # Normal form fields
- for (name, value) in fields.items():
- if isinstance(name, unicode):
- name = name.encode('utf-8')
- if isinstance(value, unicode):
- value = value.encode('utf-8')
- output.append('--' + boundary)
- output.append('Content-Disposition: form-data; name="%s"' % name)
- output.append('')
- output.append(value)
-
- # Files to upload
- for name, d in files.items():
- filename = d[u'filename']
- content = d[u'content']
- if u'mimetype' in d:
- mimetype = d[u'mimetype']
- else:
- mimetype = get_content_type(filename)
- if isinstance(name, unicode):
- name = name.encode('utf-8')
- if isinstance(filename, unicode):
- filename = filename.encode('utf-8')
- if isinstance(mimetype, unicode):
- mimetype = mimetype.encode('utf-8')
- output.append('--' + boundary)
- output.append('Content-Disposition: form-data; '
- 'name="%s"; filename="%s"' % (name, filename))
- output.append('Content-Type: %s' % mimetype)
- output.append('')
- output.append(content)
-
- output.append('--' + boundary + '--')
- output.append('')
- body = CRLF.join(output)
- headers = {
- 'Content-Type': 'multipart/form-data; boundary=%s' % boundary,
- 'Content-Length': str(len(body)),
- }
- return (headers, body)
diff --git a/workflow/workflow.py b/workflow/workflow.py
index c2c1616..1909eca 100644
--- a/workflow/workflow.py
+++ b/workflow/workflow.py
@@ -19,11 +19,8 @@
"""
-from __future__ import print_function, unicode_literals
import binascii
-import cPickle
-from copy import deepcopy
import json
import logging
import logging.handlers
@@ -37,18 +34,20 @@
import sys
import time
import unicodedata
+from contextlib import contextmanager
+from copy import deepcopy
+from typing import Optional
try:
import xml.etree.cElementTree as ET
except ImportError: # pragma: no cover
import xml.etree.ElementTree as ET
-from util import (
- AcquisitionError, # imported to maintain API
- atomic_writer,
- LockFile,
- uninterruptible,
-)
+# imported to maintain API
+from workflow.util import AcquisitionError # noqa: F401
+from workflow.util import LockFile, atomic_writer, uninterruptible
+
+assert sys.version_info[0] == 3
#: Sentinel for properties that haven't been set yet (that might
#: correctly have the value ``None``)
@@ -67,32 +66,32 @@
# The system icons are all in this directory. There are many more than
# are listed here
-ICON_ROOT = '/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources'
+ICON_ROOT = "/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources"
-ICON_ACCOUNT = os.path.join(ICON_ROOT, 'Accounts.icns')
-ICON_BURN = os.path.join(ICON_ROOT, 'BurningIcon.icns')
-ICON_CLOCK = os.path.join(ICON_ROOT, 'Clock.icns')
-ICON_COLOR = os.path.join(ICON_ROOT, 'ProfileBackgroundColor.icns')
+ICON_ACCOUNT = os.path.join(ICON_ROOT, "Accounts.icns")
+ICON_BURN = os.path.join(ICON_ROOT, "BurningIcon.icns")
+ICON_CLOCK = os.path.join(ICON_ROOT, "Clock.icns")
+ICON_COLOR = os.path.join(ICON_ROOT, "ProfileBackgroundColor.icns")
ICON_COLOUR = ICON_COLOR # Queen's English, if you please
-ICON_EJECT = os.path.join(ICON_ROOT, 'EjectMediaIcon.icns')
+ICON_EJECT = os.path.join(ICON_ROOT, "EjectMediaIcon.icns")
# Shown when a workflow throws an error
-ICON_ERROR = os.path.join(ICON_ROOT, 'AlertStopIcon.icns')
-ICON_FAVORITE = os.path.join(ICON_ROOT, 'ToolbarFavoritesIcon.icns')
+ICON_ERROR = os.path.join(ICON_ROOT, "AlertStopIcon.icns")
+ICON_FAVORITE = os.path.join(ICON_ROOT, "ToolbarFavoritesIcon.icns")
ICON_FAVOURITE = ICON_FAVORITE
-ICON_GROUP = os.path.join(ICON_ROOT, 'GroupIcon.icns')
-ICON_HELP = os.path.join(ICON_ROOT, 'HelpIcon.icns')
-ICON_HOME = os.path.join(ICON_ROOT, 'HomeFolderIcon.icns')
-ICON_INFO = os.path.join(ICON_ROOT, 'ToolbarInfo.icns')
-ICON_NETWORK = os.path.join(ICON_ROOT, 'GenericNetworkIcon.icns')
-ICON_NOTE = os.path.join(ICON_ROOT, 'AlertNoteIcon.icns')
-ICON_SETTINGS = os.path.join(ICON_ROOT, 'ToolbarAdvanced.icns')
-ICON_SWIRL = os.path.join(ICON_ROOT, 'ErasingIcon.icns')
-ICON_SWITCH = os.path.join(ICON_ROOT, 'General.icns')
-ICON_SYNC = os.path.join(ICON_ROOT, 'Sync.icns')
-ICON_TRASH = os.path.join(ICON_ROOT, 'TrashIcon.icns')
-ICON_USER = os.path.join(ICON_ROOT, 'UserIcon.icns')
-ICON_WARNING = os.path.join(ICON_ROOT, 'AlertCautionIcon.icns')
-ICON_WEB = os.path.join(ICON_ROOT, 'BookmarkIcon.icns')
+ICON_GROUP = os.path.join(ICON_ROOT, "GroupIcon.icns")
+ICON_HELP = os.path.join(ICON_ROOT, "HelpIcon.icns")
+ICON_HOME = os.path.join(ICON_ROOT, "HomeFolderIcon.icns")
+ICON_INFO = os.path.join(ICON_ROOT, "ToolbarInfo.icns")
+ICON_NETWORK = os.path.join(ICON_ROOT, "GenericNetworkIcon.icns")
+ICON_NOTE = os.path.join(ICON_ROOT, "AlertNoteIcon.icns")
+ICON_SETTINGS = os.path.join(ICON_ROOT, "ToolbarAdvanced.icns")
+ICON_SWIRL = os.path.join(ICON_ROOT, "ErasingIcon.icns")
+ICON_SWITCH = os.path.join(ICON_ROOT, "General.icns")
+ICON_SYNC = os.path.join(ICON_ROOT, "Sync.icns")
+ICON_TRASH = os.path.join(ICON_ROOT, "TrashIcon.icns")
+ICON_USER = os.path.join(ICON_ROOT, "UserIcon.icns")
+ICON_WARNING = os.path.join(ICON_ROOT, "AlertCautionIcon.icns")
+ICON_WEB = os.path.join(ICON_ROOT, "BookmarkIcon.icns")
####################################################################
# non-ASCII to ASCII diacritic folding.
@@ -100,241 +99,241 @@
####################################################################
ASCII_REPLACEMENTS = {
- 'À': 'A',
- 'Á': 'A',
- 'Â': 'A',
- 'Ã': 'A',
- 'Ä': 'A',
- 'Å': 'A',
- 'Æ': 'AE',
- 'Ç': 'C',
- 'È': 'E',
- 'É': 'E',
- 'Ê': 'E',
- 'Ë': 'E',
- 'Ì': 'I',
- 'Í': 'I',
- 'Î': 'I',
- 'Ï': 'I',
- 'Ð': 'D',
- 'Ñ': 'N',
- 'Ò': 'O',
- 'Ó': 'O',
- 'Ô': 'O',
- 'Õ': 'O',
- 'Ö': 'O',
- 'Ø': 'O',
- 'Ù': 'U',
- 'Ú': 'U',
- 'Û': 'U',
- 'Ü': 'U',
- 'Ý': 'Y',
- 'Þ': 'Th',
- 'ß': 'ss',
- 'à': 'a',
- 'á': 'a',
- 'â': 'a',
- 'ã': 'a',
- 'ä': 'a',
- 'å': 'a',
- 'æ': 'ae',
- 'ç': 'c',
- 'è': 'e',
- 'é': 'e',
- 'ê': 'e',
- 'ë': 'e',
- 'ì': 'i',
- 'í': 'i',
- 'î': 'i',
- 'ï': 'i',
- 'ð': 'd',
- 'ñ': 'n',
- 'ò': 'o',
- 'ó': 'o',
- 'ô': 'o',
- 'õ': 'o',
- 'ö': 'o',
- 'ø': 'o',
- 'ù': 'u',
- 'ú': 'u',
- 'û': 'u',
- 'ü': 'u',
- 'ý': 'y',
- 'þ': 'th',
- 'ÿ': 'y',
- 'Ł': 'L',
- 'ł': 'l',
- 'Ń': 'N',
- 'ń': 'n',
- 'Ņ': 'N',
- 'ņ': 'n',
- 'Ň': 'N',
- 'ň': 'n',
- 'Ŋ': 'ng',
- 'ŋ': 'NG',
- 'Ō': 'O',
- 'ō': 'o',
- 'Ŏ': 'O',
- 'ŏ': 'o',
- 'Ő': 'O',
- 'ő': 'o',
- 'Œ': 'OE',
- 'œ': 'oe',
- 'Ŕ': 'R',
- 'ŕ': 'r',
- 'Ŗ': 'R',
- 'ŗ': 'r',
- 'Ř': 'R',
- 'ř': 'r',
- 'Ś': 'S',
- 'ś': 's',
- 'Ŝ': 'S',
- 'ŝ': 's',
- 'Ş': 'S',
- 'ş': 's',
- 'Š': 'S',
- 'š': 's',
- 'Ţ': 'T',
- 'ţ': 't',
- 'Ť': 'T',
- 'ť': 't',
- 'Ŧ': 'T',
- 'ŧ': 't',
- 'Ũ': 'U',
- 'ũ': 'u',
- 'Ū': 'U',
- 'ū': 'u',
- 'Ŭ': 'U',
- 'ŭ': 'u',
- 'Ů': 'U',
- 'ů': 'u',
- 'Ű': 'U',
- 'ű': 'u',
- 'Ŵ': 'W',
- 'ŵ': 'w',
- 'Ŷ': 'Y',
- 'ŷ': 'y',
- 'Ÿ': 'Y',
- 'Ź': 'Z',
- 'ź': 'z',
- 'Ż': 'Z',
- 'ż': 'z',
- 'Ž': 'Z',
- 'ž': 'z',
- 'ſ': 's',
- 'Α': 'A',
- 'Β': 'B',
- 'Γ': 'G',
- 'Δ': 'D',
- 'Ε': 'E',
- 'Ζ': 'Z',
- 'Η': 'E',
- 'Θ': 'Th',
- 'Ι': 'I',
- 'Κ': 'K',
- 'Λ': 'L',
- 'Μ': 'M',
- 'Ν': 'N',
- 'Ξ': 'Ks',
- 'Ο': 'O',
- 'Π': 'P',
- 'Ρ': 'R',
- 'Σ': 'S',
- 'Τ': 'T',
- 'Υ': 'U',
- 'Φ': 'Ph',
- 'Χ': 'Kh',
- 'Ψ': 'Ps',
- 'Ω': 'O',
- 'α': 'a',
- 'β': 'b',
- 'γ': 'g',
- 'δ': 'd',
- 'ε': 'e',
- 'ζ': 'z',
- 'η': 'e',
- 'θ': 'th',
- 'ι': 'i',
- 'κ': 'k',
- 'λ': 'l',
- 'μ': 'm',
- 'ν': 'n',
- 'ξ': 'x',
- 'ο': 'o',
- 'π': 'p',
- 'ρ': 'r',
- 'ς': 's',
- 'σ': 's',
- 'τ': 't',
- 'υ': 'u',
- 'φ': 'ph',
- 'χ': 'kh',
- 'ψ': 'ps',
- 'ω': 'o',
- 'А': 'A',
- 'Б': 'B',
- 'В': 'V',
- 'Г': 'G',
- 'Д': 'D',
- 'Е': 'E',
- 'Ж': 'Zh',
- 'З': 'Z',
- 'И': 'I',
- 'Й': 'I',
- 'К': 'K',
- 'Л': 'L',
- 'М': 'M',
- 'Н': 'N',
- 'О': 'O',
- 'П': 'P',
- 'Р': 'R',
- 'С': 'S',
- 'Т': 'T',
- 'У': 'U',
- 'Ф': 'F',
- 'Х': 'Kh',
- 'Ц': 'Ts',
- 'Ч': 'Ch',
- 'Ш': 'Sh',
- 'Щ': 'Shch',
- 'Ъ': "'",
- 'Ы': 'Y',
- 'Ь': "'",
- 'Э': 'E',
- 'Ю': 'Iu',
- 'Я': 'Ia',
- 'а': 'a',
- 'б': 'b',
- 'в': 'v',
- 'г': 'g',
- 'д': 'd',
- 'е': 'e',
- 'ж': 'zh',
- 'з': 'z',
- 'и': 'i',
- 'й': 'i',
- 'к': 'k',
- 'л': 'l',
- 'м': 'm',
- 'н': 'n',
- 'о': 'o',
- 'п': 'p',
- 'р': 'r',
- 'с': 's',
- 'т': 't',
- 'у': 'u',
- 'ф': 'f',
- 'х': 'kh',
- 'ц': 'ts',
- 'ч': 'ch',
- 'ш': 'sh',
- 'щ': 'shch',
- 'ъ': "'",
- 'ы': 'y',
- 'ь': "'",
- 'э': 'e',
- 'ю': 'iu',
- 'я': 'ia',
+ "À": "A",
+ "Á": "A",
+ "Â": "A",
+ "Ã": "A",
+ "Ä": "A",
+ "Å": "A",
+ "Æ": "AE",
+ "Ç": "C",
+ "È": "E",
+ "É": "E",
+ "Ê": "E",
+ "Ë": "E",
+ "Ì": "I",
+ "Í": "I",
+ "Î": "I",
+ "Ï": "I",
+ "Ð": "D",
+ "Ñ": "N",
+ "Ò": "O",
+ "Ó": "O",
+ "Ô": "O",
+ "Õ": "O",
+ "Ö": "O",
+ "Ø": "O",
+ "Ù": "U",
+ "Ú": "U",
+ "Û": "U",
+ "Ü": "U",
+ "Ý": "Y",
+ "Þ": "Th",
+ "ß": "ss",
+ "à": "a",
+ "á": "a",
+ "â": "a",
+ "ã": "a",
+ "ä": "a",
+ "å": "a",
+ "æ": "ae",
+ "ç": "c",
+ "è": "e",
+ "é": "e",
+ "ê": "e",
+ "ë": "e",
+ "ì": "i",
+ "í": "i",
+ "î": "i",
+ "ï": "i",
+ "ð": "d",
+ "ñ": "n",
+ "ò": "o",
+ "ó": "o",
+ "ô": "o",
+ "õ": "o",
+ "ö": "o",
+ "ø": "o",
+ "ù": "u",
+ "ú": "u",
+ "û": "u",
+ "ü": "u",
+ "ý": "y",
+ "þ": "th",
+ "ÿ": "y",
+ "Ł": "L",
+ "ł": "l",
+ "Ń": "N",
+ "ń": "n",
+ "Ņ": "N",
+ "ņ": "n",
+ "Ň": "N",
+ "ň": "n",
+ "Ŋ": "ng",
+ "ŋ": "NG",
+ "Ō": "O",
+ "ō": "o",
+ "Ŏ": "O",
+ "ŏ": "o",
+ "Ő": "O",
+ "ő": "o",
+ "Œ": "OE",
+ "œ": "oe",
+ "Ŕ": "R",
+ "ŕ": "r",
+ "Ŗ": "R",
+ "ŗ": "r",
+ "Ř": "R",
+ "ř": "r",
+ "Ś": "S",
+ "ś": "s",
+ "Ŝ": "S",
+ "ŝ": "s",
+ "Ş": "S",
+ "ş": "s",
+ "Š": "S",
+ "š": "s",
+ "Ţ": "T",
+ "ţ": "t",
+ "Ť": "T",
+ "ť": "t",
+ "Ŧ": "T",
+ "ŧ": "t",
+ "Ũ": "U",
+ "ũ": "u",
+ "Ū": "U",
+ "ū": "u",
+ "Ŭ": "U",
+ "ŭ": "u",
+ "Ů": "U",
+ "ů": "u",
+ "Ű": "U",
+ "ű": "u",
+ "Ŵ": "W",
+ "ŵ": "w",
+ "Ŷ": "Y",
+ "ŷ": "y",
+ "Ÿ": "Y",
+ "Ź": "Z",
+ "ź": "z",
+ "Ż": "Z",
+ "ż": "z",
+ "Ž": "Z",
+ "ž": "z",
+ "ſ": "s",
+ "Α": "A",
+ "Β": "B",
+ "Γ": "G",
+ "Δ": "D",
+ "Ε": "E",
+ "Ζ": "Z",
+ "Η": "E",
+ "Θ": "Th",
+ "Ι": "I",
+ "Κ": "K",
+ "Λ": "L",
+ "Μ": "M",
+ "Ν": "N",
+ "Ξ": "Ks",
+ "Ο": "O",
+ "Π": "P",
+ "Ρ": "R",
+ "Σ": "S",
+ "Τ": "T",
+ "Υ": "U",
+ "Φ": "Ph",
+ "Χ": "Kh",
+ "Ψ": "Ps",
+ "Ω": "O",
+ "α": "a",
+ "β": "b",
+ "γ": "g",
+ "δ": "d",
+ "ε": "e",
+ "ζ": "z",
+ "η": "e",
+ "θ": "th",
+ "ι": "i",
+ "κ": "k",
+ "λ": "l",
+ "μ": "m",
+ "ν": "n",
+ "ξ": "x",
+ "ο": "o",
+ "π": "p",
+ "ρ": "r",
+ "ς": "s",
+ "σ": "s",
+ "τ": "t",
+ "υ": "u",
+ "φ": "ph",
+ "χ": "kh",
+ "ψ": "ps",
+ "ω": "o",
+ "А": "A",
+ "Б": "B",
+ "В": "V",
+ "Г": "G",
+ "Д": "D",
+ "Е": "E",
+ "Ж": "Zh",
+ "З": "Z",
+ "И": "I",
+ "Й": "I",
+ "К": "K",
+ "Л": "L",
+ "М": "M",
+ "Н": "N",
+ "О": "O",
+ "П": "P",
+ "Р": "R",
+ "С": "S",
+ "Т": "T",
+ "У": "U",
+ "Ф": "F",
+ "Х": "Kh",
+ "Ц": "Ts",
+ "Ч": "Ch",
+ "Ш": "Sh",
+ "Щ": "Shch",
+ "Ъ": "'",
+ "Ы": "Y",
+ "Ь": "'",
+ "Э": "E",
+ "Ю": "Iu",
+ "Я": "Ia",
+ "а": "a",
+ "б": "b",
+ "в": "v",
+ "г": "g",
+ "д": "d",
+ "е": "e",
+ "ж": "zh",
+ "з": "z",
+ "и": "i",
+ "й": "i",
+ "к": "k",
+ "л": "l",
+ "м": "m",
+ "н": "n",
+ "о": "o",
+ "п": "p",
+ "р": "r",
+ "с": "s",
+ "т": "t",
+ "у": "u",
+ "ф": "f",
+ "х": "kh",
+ "ц": "ts",
+ "ч": "ch",
+ "ш": "sh",
+ "щ": "shch",
+ "ъ": "'",
+ "ы": "y",
+ "ь": "'",
+ "э": "e",
+ "ю": "iu",
+ "я": "ia",
# 'ᴀ': '',
# 'ᴁ': '',
# 'ᴂ': '',
@@ -373,18 +372,18 @@
# 'ᴣ': '',
# 'ᴤ': '',
# 'ᴥ': '',
- 'ᴦ': 'G',
- 'ᴧ': 'L',
- 'ᴨ': 'P',
- 'ᴩ': 'R',
- 'ᴪ': 'PS',
- 'ẞ': 'Ss',
- 'Ỳ': 'Y',
- 'ỳ': 'y',
- 'Ỵ': 'Y',
- 'ỵ': 'y',
- 'Ỹ': 'Y',
- 'ỹ': 'y',
+ "ᴦ": "G",
+ "ᴧ": "L",
+ "ᴨ": "P",
+ "ᴩ": "R",
+ "ᴪ": "PS",
+ "ẞ": "Ss",
+ "Ỳ": "Y",
+ "ỳ": "y",
+ "Ỵ": "Y",
+ "ỵ": "y",
+ "Ỹ": "Y",
+ "ỹ": "y",
}
####################################################################
@@ -392,14 +391,14 @@
####################################################################
DUMB_PUNCTUATION = {
- '‘': "'",
- '’': "'",
- '‚': "'",
- '“': '"',
- '”': '"',
- '„': '"',
- '–': '-',
- '—': '-'
+ "‘": "'",
+ "’": "'",
+ "‚": "'",
+ "“": '"',
+ "”": '"',
+ "„": '"',
+ "–": "-",
+ "—": "-",
}
@@ -412,7 +411,7 @@
INITIALS = string.ascii_uppercase + string.digits
#: Split on non-letters, numbers
-split_on_delimiters = re.compile('[^a-zA-Z0-9]').split
+split_on_delimiters = re.compile("[^a-zA-Z0-9]").split
# Match filter flags
#: Match items that start with ``query``
@@ -482,6 +481,7 @@ class PasswordExists(KeychainError):
# Helper functions
####################################################################
+
def isascii(text):
"""Test if ``text`` contains only ASCII characters.
@@ -492,7 +492,7 @@ def isascii(text):
"""
try:
- text.encode('ascii')
+ text.encode("ascii")
except UnicodeEncodeError:
return False
return True
@@ -502,6 +502,7 @@ def isascii(text):
# Implementation classes
####################################################################
+
class SerializerManager(object):
"""Contains registered serializers.
@@ -539,8 +540,8 @@ def register(self, name, serializer):
"""
# Basic validation
- getattr(serializer, 'load')
- getattr(serializer, 'dump')
+ serializer.load
+ serializer.dump
self._serializers[name] = serializer
@@ -567,8 +568,7 @@ def unregister(self, name):
"""
if name not in self._serializers:
- raise ValueError('No such serializer registered : {0}'.format(
- name))
+ raise ValueError("No such serializer registered : {0}".format(name))
serializer = self._serializers[name]
del self._serializers[name]
@@ -581,86 +581,72 @@ def serializers(self):
return sorted(self._serializers.keys())
-class JSONSerializer(object):
- """Wrapper around :mod:`json`. Sets ``indent`` and ``encoding``.
-
- .. versionadded:: 1.8
-
- Use this serializer if you need readable data files. JSON doesn't
- support Python objects as well as ``cPickle``/``pickle``, so be
- careful which data you try to serialize as JSON.
-
- """
+class BaseSerializer:
+ is_binary: Optional[bool] = None
@classmethod
- def load(cls, file_obj):
- """Load serialized object from open JSON file.
-
- .. versionadded:: 1.8
-
- :param file_obj: file handle
- :type file_obj: ``file`` object
- :returns: object loaded from JSON file
- :rtype: object
-
- """
- return json.load(file_obj)
+ def binary_mode(cls):
+ return "b" if cls.is_binary else ""
@classmethod
- def dump(cls, obj, file_obj):
- """Serialize object ``obj`` to open JSON file.
+ def _opener(cls, opener, path, mode="r"):
+ with opener(path, mode + cls.binary_mode()) as fp:
+ yield fp
- .. versionadded:: 1.8
-
- :param obj: Python object to serialize
- :type obj: JSON-serializable data structure
- :param file_obj: file handle
- :type file_obj: ``file`` object
+ @classmethod
+ @contextmanager
+ def atomic_writer(cls, path, mode):
+ yield from cls._opener(atomic_writer, path, mode)
- """
- return json.dump(obj, file_obj, indent=2, encoding='utf-8')
+ @classmethod
+ @contextmanager
+ def open(cls, path, mode):
+ yield from cls._opener(open, path, mode)
-class CPickleSerializer(object):
- """Wrapper around :mod:`cPickle`. Sets ``protocol``.
+class JSONSerializer(BaseSerializer):
+ """Wrapper around :mod:`json`. Sets ``indent`` and ``encoding``.
.. versionadded:: 1.8
- This is the default serializer and the best combination of speed and
- flexibility.
+ Use this serializer if you need readable data files. JSON doesn't
+ support Python objects as well as ``pickle``, so be
+ careful which data you try to serialize as JSON.
"""
+ is_binary = False
+
@classmethod
def load(cls, file_obj):
- """Load serialized object from open pickle file.
+ """Load serialized object from open JSON file.
.. versionadded:: 1.8
:param file_obj: file handle
:type file_obj: ``file`` object
- :returns: object loaded from pickle file
+ :returns: object loaded from JSON file
:rtype: object
"""
- return cPickle.load(file_obj)
+ return json.load(file_obj)
@classmethod
def dump(cls, obj, file_obj):
- """Serialize object ``obj`` to open pickle file.
+ """Serialize object ``obj`` to open JSON file.
.. versionadded:: 1.8
:param obj: Python object to serialize
- :type obj: Python object
+ :type obj: JSON-serializable data structure
:param file_obj: file handle
:type file_obj: ``file`` object
"""
- return cPickle.dump(obj, file_obj, protocol=-1)
+ return json.dump(obj, file_obj, indent=2)
-class PickleSerializer(object):
+class PickleSerializer(BaseSerializer):
"""Wrapper around :mod:`pickle`. Sets ``protocol``.
.. versionadded:: 1.8
@@ -669,6 +655,8 @@ class PickleSerializer(object):
"""
+ is_binary = True
+
@classmethod
def load(cls, file_obj):
"""Load serialized object from open pickle file.
@@ -700,9 +688,8 @@ def dump(cls, obj, file_obj):
# Set up default manager and register built-in serializers
manager = SerializerManager()
-manager.register('cpickle', CPickleSerializer)
-manager.register('pickle', PickleSerializer)
-manager.register('json', JSONSerializer)
+manager.register("pickle", PickleSerializer)
+manager.register("json", JSONSerializer)
class Item(object):
@@ -716,10 +703,22 @@ class Item(object):
"""
- def __init__(self, title, subtitle='', modifier_subtitles=None,
- arg=None, autocomplete=None, valid=False, uid=None,
- icon=None, icontype=None, type=None, largetext=None,
- copytext=None, quicklookurl=None):
+ def __init__(
+ self,
+ title,
+ subtitle="",
+ modifier_subtitles=None,
+ arg=None,
+ autocomplete=None,
+ valid=False,
+ uid=None,
+ icon=None,
+ icontype=None,
+ type=None,
+ largetext=None,
+ copytext=None,
+ quicklookurl=None,
+ ):
"""Same arguments as :meth:`Workflow.add_item`."""
self.title = title
self.subtitle = subtitle
@@ -746,35 +745,36 @@ def elem(self):
# Attributes on - element
attr = {}
if self.valid:
- attr['valid'] = 'yes'
+ attr["valid"] = "yes"
else:
- attr['valid'] = 'no'
+ attr["valid"] = "no"
# Allow empty string for autocomplete. This is a useful value,
# as TABing the result will revert the query back to just the
# keyword
if self.autocomplete is not None:
- attr['autocomplete'] = self.autocomplete
+ attr["autocomplete"] = self.autocomplete
# Optional attributes
- for name in ('uid', 'type'):
+ for name in ("uid", "type"):
value = getattr(self, name, None)
if value:
attr[name] = value
- root = ET.Element('item', attr)
- ET.SubElement(root, 'title').text = self.title
- ET.SubElement(root, 'subtitle').text = self.subtitle
+ root = ET.Element("item", attr)
+ ET.SubElement(root, "title").text = self.title
+ ET.SubElement(root, "subtitle").text = self.subtitle
# Add modifier subtitles
- for mod in ('cmd', 'ctrl', 'alt', 'shift', 'fn'):
+ for mod in ("cmd", "ctrl", "alt", "shift", "fn"):
if mod in self.modifier_subtitles:
- ET.SubElement(root, 'subtitle',
- {'mod': mod}).text = self.modifier_subtitles[mod]
+ ET.SubElement(
+ root, "subtitle", {"mod": mod}
+ ).text = self.modifier_subtitles[mod]
# Add arg as element instead of attribute on
- , as it's more
# flexible (newlines aren't allowed in attributes)
if self.arg:
- ET.SubElement(root, 'arg').text = self.arg
+ ET.SubElement(root, "arg").text = self.arg
# Add icon if there is one
if self.icon:
@@ -782,18 +782,16 @@ def elem(self):
attr = dict(type=self.icontype)
else:
attr = {}
- ET.SubElement(root, 'icon', attr).text = self.icon
+ ET.SubElement(root, "icon", attr).text = self.icon
if self.largetext:
- ET.SubElement(root, 'text',
- {'type': 'largetype'}).text = self.largetext
+ ET.SubElement(root, "text", {"type": "largetype"}).text = self.largetext
if self.copytext:
- ET.SubElement(root, 'text',
- {'type': 'copy'}).text = self.copytext
+ ET.SubElement(root, "text", {"type": "copy"}).text = self.copytext
if self.quicklookurl:
- ET.SubElement(root, 'quicklookurl').text = self.quicklookurl
+ ET.SubElement(root, "quicklookurl").text = self.quicklookurl
return root
@@ -825,7 +823,7 @@ def __init__(self, filepath, defaults=None):
if os.path.exists(self._filepath):
self._load()
elif defaults:
- for key, val in defaults.items():
+ for key, val in list(defaults.items()):
self[key] = val
self.save() # save default settings
@@ -833,7 +831,7 @@ def _load(self):
"""Load cached settings from JSON file `self._filepath`."""
data = {}
with LockFile(self._filepath, 0.5):
- with open(self._filepath, 'rb') as fp:
+ with open(self._filepath, "r") as fp:
data.update(json.load(fp))
self._original = deepcopy(data)
@@ -857,9 +855,8 @@ def save(self):
data.update(self)
with LockFile(self._filepath, 0.5):
- with atomic_writer(self._filepath, 'wb') as fp:
- json.dump(data, fp, sort_keys=True, indent=2,
- encoding='utf-8')
+ with atomic_writer(self._filepath, "w") as fp:
+ json.dump(data, fp, sort_keys=True, indent=2)
# dict methods
def __setitem__(self, key, value):
@@ -892,9 +889,9 @@ class Workflow(object):
storing & caching data, using Keychain, and generating Script
Filter feedback.
- ``Workflow`` is compatible with both Alfred 2 and 3. The
- :class:`~workflow.Workflow3` subclass provides additional,
- Alfred 3-only features, such as workflow variables.
+ ``Workflow`` is compatible with Alfred 2+. Subclass
+ :class:`~workflow.Workflow3` provides additional features,
+ only available in Alfred 3+, such as workflow variables.
:param default_settings: default workflow settings. If no settings file
exists, :class:`Workflow.settings` will be pre-populated with
@@ -935,11 +932,20 @@ class Workflow(object):
# won't want to change this
item_class = Item
- def __init__(self, default_settings=None, update_settings=None,
- input_encoding='utf-8', normalization='NFC',
- capture_args=True, libraries=None,
- help_url=None):
+ def __init__(
+ self,
+ default_settings=None,
+ update_settings=None,
+ input_encoding="utf-8",
+ normalization="NFC",
+ capture_args=True,
+ libraries=None,
+ help_url=None,
+ ):
"""Create new :class:`Workflow` object."""
+
+ seralizer = "pickle"
+
self._default_settings = default_settings or {}
self._update_settings = update_settings or {}
self._input_encoding = input_encoding
@@ -952,8 +958,8 @@ def __init__(self, default_settings=None, update_settings=None,
self._bundleid = None
self._debugging = None
self._name = None
- self._cache_serializer = 'cpickle'
- self._data_serializer = 'cpickle'
+ self._cache_serializer = seralizer
+ self._data_serializer = seralizer
self._info = None
self._info_loaded = False
self._logger = None
@@ -965,9 +971,10 @@ def __init__(self, default_settings=None, update_settings=None,
self._last_version_run = UNSET
# Cache for regex patterns created for filter keys
self._search_pattern_cache = {}
- # Magic arguments
- #: The prefix for all magic arguments. Default is ``workflow:``
- self.magic_prefix = 'workflow:'
+ #: Prefix for all magic arguments.
+ #: The default value is ``workflow:`` so keyword
+ #: ``config`` would match user query ``workflow:config``.
+ self.magic_prefix = "workflow:"
#: Mapping of available magic arguments. The built-in magic
#: arguments are registered by default. To add your own magic arguments
#: (or override built-ins), add a key:value pair where the key is
@@ -994,8 +1001,9 @@ def __init__(self, default_settings=None, update_settings=None,
@property
def alfred_version(self):
"""Alfred version as :class:`~workflow.update.Version` object."""
- from update import Version
- return Version(self.alfred_env.get('version'))
+ from .update import Version
+
+ return Version(self.alfred_env.get("version"))
@property
def alfred_env(self):
@@ -1050,31 +1058,34 @@ def alfred_env(self):
data = {}
for key in (
- 'alfred_debug',
- 'alfred_preferences',
- 'alfred_preferences_localhash',
- 'alfred_theme',
- 'alfred_theme_background',
- 'alfred_theme_subtext',
- 'alfred_version',
- 'alfred_version_build',
- 'alfred_workflow_bundleid',
- 'alfred_workflow_cache',
- 'alfred_workflow_data',
- 'alfred_workflow_name',
- 'alfred_workflow_uid',
- 'alfred_workflow_version'):
-
- value = os.getenv(key)
-
- if isinstance(value, str):
- if key in ('alfred_debug', 'alfred_version_build',
- 'alfred_theme_subtext'):
- value = int(value)
+ "debug",
+ "preferences",
+ "preferences_localhash",
+ "theme",
+ "theme_background",
+ "theme_subtext",
+ "version",
+ "version_build",
+ "workflow_bundleid",
+ "workflow_cache",
+ "workflow_data",
+ "workflow_name",
+ "workflow_uid",
+ "workflow_version",
+ ):
+
+ value = os.getenv("alfred_" + key, "")
+
+ if value:
+ if key in ("debug", "version_build", "theme_subtext"):
+ if value.isdigit():
+ value = int(value)
+ else:
+ value = False
else:
value = self.decode(value)
- data[key[7:]] = value
+ data[key] = value
self._alfred_env = data
@@ -1096,10 +1107,10 @@ def bundleid(self):
"""
if not self._bundleid:
- if self.alfred_env.get('workflow_bundleid'):
- self._bundleid = self.alfred_env.get('workflow_bundleid')
+ if self.alfred_env.get("workflow_bundleid"):
+ self._bundleid = self.alfred_env.get("workflow_bundleid")
else:
- self._bundleid = unicode(self.info['bundleid'], 'utf-8')
+ self._bundleid = self.info["bundleid"]
return self._bundleid
@@ -1111,12 +1122,9 @@ def debugging(self):
:rtype: ``bool``
"""
- if self._debugging is None:
- if self.alfred_env.get('debug') == 1:
- self._debugging = True
- else:
- self._debugging = False
- return self._debugging
+ return bool(
+ self.alfred_env.get("debug") == 1 or os.environ.get("PYTEST_RUNNING")
+ )
@property
def name(self):
@@ -1127,10 +1135,10 @@ def name(self):
"""
if not self._name:
- if self.alfred_env.get('workflow_name'):
- self._name = self.decode(self.alfred_env.get('workflow_name'))
+ if self.alfred_env.get("workflow_name"):
+ self._name = self.decode(self.alfred_env.get("workflow_name"))
else:
- self._name = self.decode(self.info['name'])
+ self._name = self.decode(self.info["name"])
return self._name
@@ -1155,27 +1163,28 @@ def version(self):
version = None
# environment variable has priority
- if self.alfred_env.get('workflow_version'):
- version = self.alfred_env['workflow_version']
+ if self.alfred_env.get("workflow_version"):
+ version = self.alfred_env["workflow_version"]
# Try `update_settings`
elif self._update_settings:
- version = self._update_settings.get('version')
+ version = self._update_settings.get("version")
# `version` file
if not version:
- filepath = self.workflowfile('version')
+ filepath = self.workflowfile("version")
if os.path.exists(filepath):
- with open(filepath, 'rb') as fileobj:
+ with open(filepath, "r") as fileobj:
version = fileobj.read()
# info.plist
if not version:
- version = self.info.get('version')
+ version = self.info.get("version")
if version:
- from update import Version
+ from .update import Version
+
version = Version(version)
self._version = version
@@ -1208,7 +1217,7 @@ def args(self):
# Handle magic args
if len(args) and self._capture_args:
for name in self.magic_arguments:
- key = '{0}{1}'.format(self.magic_prefix, name)
+ key = "{0}{1}".format(self.magic_prefix, name)
if key in args:
msg = self.magic_arguments[name]()
@@ -1225,18 +1234,22 @@ def cachedir(self):
"""Path to workflow's cache directory.
The cache directory is a subdirectory of Alfred's own cache directory
- in ``~/Library/Caches``. The full path is:
+ in ``~/Library/Caches``. The full path is in Alfred 4+ is:
+
+ ``~/Library/Caches/com.runningwithcrayons.Alfred/Workflow Data/``
+
+ For earlier versions:
``~/Library/Caches/com.runningwithcrayons.Alfred-X/Workflow Data/``
- ``Alfred-X`` may be ``Alfred-2`` or ``Alfred-3``.
+ where ``Alfred-X`` may be ``Alfred-2`` or ``Alfred-3``.
- :returns: full path to workflow's cache directory
- :rtype: ``unicode``
+ Returns:
+ unicode: full path to workflow's cache directory
"""
- if self.alfred_env.get('workflow_cache'):
- dirpath = self.alfred_env.get('workflow_cache')
+ if self.alfred_env.get("workflow_cache"):
+ dirpath = self.alfred_env.get("workflow_cache")
else:
dirpath = self._default_cachedir
@@ -1248,25 +1261,32 @@ def _default_cachedir(self):
"""Alfred 2's default cache directory."""
return os.path.join(
os.path.expanduser(
- '~/Library/Caches/com.runningwithcrayons.Alfred-2/'
- 'Workflow Data/'),
- self.bundleid)
+ "~/Library/Caches/com.runningwithcrayons.Alfred-2/" "Workflow Data/"
+ ),
+ self.bundleid,
+ )
@property
def datadir(self):
"""Path to workflow's data directory.
The data directory is a subdirectory of Alfred's own data directory in
- ``~/Library/Application Support``. The full path is:
+ ``~/Library/Application Support``. The full path for Alfred 4+ is:
- ``~/Library/Application Support/Alfred 2/Workflow Data/``
+ ``~/Library/Application Support/Alfred/Workflow Data/``
- :returns: full path to workflow data directory
- :rtype: ``unicode``
+ For earlier versions, the path is:
+
+ ``~/Library/Application Support/Alfred X/Workflow Data/``
+
+ where ``Alfred X` is ``Alfred 2`` or ``Alfred 3``.
+
+ Returns:
+ unicode: full path to workflow data directory
"""
- if self.alfred_env.get('workflow_data'):
- dirpath = self.alfred_env.get('workflow_data')
+ if self.alfred_env.get("workflow_data"):
+ dirpath = self.alfred_env.get("workflow_data")
else:
dirpath = self._default_datadir
@@ -1276,16 +1296,17 @@ def datadir(self):
@property
def _default_datadir(self):
"""Alfred 2's default data directory."""
- return os.path.join(os.path.expanduser(
- '~/Library/Application Support/Alfred 2/Workflow Data/'),
- self.bundleid)
+ return os.path.join(
+ os.path.expanduser("~/Library/Application Support/Alfred 2/Workflow Data/"),
+ self.bundleid,
+ )
@property
def workflowdir(self):
"""Path to workflow's root directory (where ``info.plist`` is).
- :returns: full path to workflow root directory
- :rtype: ``unicode``
+ Returns:
+ unicode: full path to workflow root directory
"""
if not self._workflowdir:
@@ -1293,8 +1314,9 @@ def workflowdir(self):
# the library is in. CWD will be the workflow root if
# a workflow is being run in Alfred
candidates = [
- os.path.abspath(os.getcwdu()),
- os.path.dirname(os.path.abspath(os.path.dirname(__file__)))]
+ os.path.abspath(os.getcwd()),
+ os.path.dirname(os.path.abspath(os.path.dirname(__file__))),
+ ]
# climb the directory tree until we find `info.plist`
for dirpath in candidates:
@@ -1303,11 +1325,11 @@ def workflowdir(self):
dirpath = self.decode(dirpath)
while True:
- if os.path.exists(os.path.join(dirpath, 'info.plist')):
+ if os.path.exists(os.path.join(dirpath, "info.plist")):
self._workflowdir = dirpath
break
- elif dirpath == '/':
+ elif dirpath == "/":
# no `info.plist` found
break
@@ -1370,7 +1392,7 @@ def logfile(self):
:rtype: ``unicode``
"""
- return self.cachefile('%s.log' % self.bundleid)
+ return self.cachefile("%s.log" % self.bundleid)
@property
def logger(self):
@@ -1388,7 +1410,7 @@ def logger(self):
return self._logger
# Initialise new logger and optionally handlers
- logger = logging.getLogger('')
+ logger = logging.getLogger("")
# Only add one set of handlers
# Exclude from coverage, as pytest will have configured the
@@ -1396,14 +1418,13 @@ def logger(self):
if not len(logger.handlers): # pragma: no cover
fmt = logging.Formatter(
- '%(asctime)s %(filename)s:%(lineno)s'
- ' %(levelname)-8s %(message)s',
- datefmt='%H:%M:%S')
+ "%(asctime)s %(filename)s:%(lineno)s" " %(levelname)-8s %(message)s",
+ datefmt="%H:%M:%S",
+ )
logfile = logging.handlers.RotatingFileHandler(
- self.logfile,
- maxBytes=1024 * 1024,
- backupCount=1)
+ self.logfile, maxBytes=1024 * 1024, backupCount=1
+ )
logfile.setFormatter(fmt)
logger.addHandler(logfile)
@@ -1439,7 +1460,7 @@ def settings_path(self):
"""
if not self._settings_path:
- self._settings_path = self.datafile('settings.json')
+ self._settings_path = self.datafile("settings.json")
return self._settings_path
@property
@@ -1459,9 +1480,8 @@ def settings(self):
"""
if not self._settings:
- self.logger.debug('reading settings from %s', self.settings_path)
- self._settings = Settings(self.settings_path,
- self._default_settings)
+ self.logger.debug("reading settings from %s", self.settings_path)
+ self._settings = Settings(self.settings_path, self._default_settings)
return self._settings
@property
@@ -1500,10 +1520,11 @@ def cache_serializer(self, serializer_name):
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
- 'Unknown serializer : `{0}`. Register your serializer '
- 'with `manager` first.'.format(serializer_name))
+ "Unknown serializer : `{0}`. Register your serializer "
+ "with `manager` first.".format(serializer_name)
+ )
- self.logger.debug('default cache serializer: %s', serializer_name)
+ self.logger.debug("default cache serializer: %s", serializer_name)
self._cache_serializer = serializer_name
@@ -1542,10 +1563,11 @@ def data_serializer(self, serializer_name):
"""
if manager.serializer(serializer_name) is None:
raise ValueError(
- 'Unknown serializer : `{0}`. Register your serializer '
- 'with `manager` first.'.format(serializer_name))
+ "Unknown serializer : `{0}`. Register your serializer "
+ "with `manager` first.".format(serializer_name)
+ )
- self.logger.debug('default data serializer: %s', serializer_name)
+ self.logger.debug("default data serializer: %s", serializer_name)
self._data_serializer = serializer_name
@@ -1559,39 +1581,40 @@ def stored_data(self, name):
:param name: name of datastore
"""
- metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
+ metadata_path = self.datafile(".{0}.alfred-workflow".format(name))
if not os.path.exists(metadata_path):
- self.logger.debug('no data stored for `%s`', name)
+ self.logger.debug("no data stored for `%s`", name)
return None
- with open(metadata_path, 'rb') as file_obj:
+ with open(metadata_path, "r") as file_obj:
serializer_name = file_obj.read().strip()
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
- 'Unknown serializer `{0}`. Register a corresponding '
- 'serializer with `manager.register()` '
- 'to load this data.'.format(serializer_name))
+ "Unknown serializer `{0}`. Register a corresponding "
+ "serializer with `manager.register()` "
+ "to load this data.".format(serializer_name)
+ )
- self.logger.debug('data `%s` stored as `%s`', name, serializer_name)
+ self.logger.debug("data `%s` stored as `%s`", name, serializer_name)
- filename = '{0}.{1}'.format(name, serializer_name)
+ filename = "{0}.{1}".format(name, serializer_name)
data_path = self.datafile(filename)
if not os.path.exists(data_path):
- self.logger.debug('no data stored: %s', name)
+ self.logger.debug("no data stored: %s", name)
if os.path.exists(metadata_path):
os.unlink(metadata_path)
return None
- with open(data_path, 'rb') as file_obj:
+ with open(data_path, "rb") as file_obj:
data = serializer.load(file_obj)
- self.logger.debug('stored data loaded: %s', data_path)
+ self.logger.debug("stored data loaded: %s", data_path)
return data
@@ -1620,47 +1643,52 @@ def delete_paths(paths):
for path in paths:
if os.path.exists(path):
os.unlink(path)
- self.logger.debug('deleted data file: %s', path)
+ self.logger.debug("deleted data file: %s", path)
serializer_name = serializer or self.data_serializer
# In order for `stored_data()` to be able to load data stored with
# an arbitrary serializer, yet still have meaningful file extensions,
# the format (i.e. extension) is saved to an accompanying file
- metadata_path = self.datafile('.{0}.alfred-workflow'.format(name))
- filename = '{0}.{1}'.format(name, serializer_name)
+ metadata_path = self.datafile(".{0}.alfred-workflow".format(name))
+ filename = "{0}.{1}".format(name, serializer_name)
data_path = self.datafile(filename)
if data_path == self.settings_path:
raise ValueError(
- 'Cannot save data to' +
- '`{0}` with format `{1}`. '.format(name, serializer_name) +
- "This would overwrite Alfred-Workflow's settings file.")
+ "Cannot save data to"
+ + "`{0}` with format `{1}`. ".format(name, serializer_name)
+ + "This would overwrite Alfred-Workflow's settings file."
+ )
serializer = manager.serializer(serializer_name)
if serializer is None:
raise ValueError(
- 'Invalid serializer `{0}`. Register your serializer with '
- '`manager.register()` first.'.format(serializer_name))
+ "Invalid serializer `{0}`. Register your serializer with "
+ "`manager.register()` first.".format(serializer_name)
+ )
if data is None: # Delete cached data
delete_paths((metadata_path, data_path))
return
+ if isinstance(data, str):
+ data = bytearray(data)
+
# Ensure write is not interrupted by SIGTERM
@uninterruptible
def _store():
# Save file extension
- with atomic_writer(metadata_path, 'wb') as file_obj:
+ with atomic_writer(metadata_path, "w") as file_obj:
file_obj.write(serializer_name)
- with atomic_writer(data_path, 'wb') as file_obj:
+ with serializer.atomic_writer(data_path, "w") as file_obj:
serializer.dump(data, file_obj)
_store()
- self.logger.debug('saved data: %s', data_path)
+ self.logger.debug("saved data: %s", data_path)
def cached_data(self, name, data_func=None, max_age=60):
"""Return cached data if younger than ``max_age`` seconds.
@@ -1680,13 +1708,13 @@ def cached_data(self, name, data_func=None, max_age=60):
"""
serializer = manager.serializer(self.cache_serializer)
- cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
+ cache_path = self.cachefile("%s.%s" % (name, self.cache_serializer))
age = self.cached_data_age(name)
if (age < max_age or max_age == 0) and os.path.exists(cache_path):
- with open(cache_path, 'rb') as file_obj:
- self.logger.debug('loading cached data: %s', cache_path)
+ with open(cache_path, "rb") as file_obj:
+ self.logger.debug("loading cached data: %s", cache_path)
return serializer.load(file_obj)
if not data_func:
@@ -1710,18 +1738,18 @@ def cache_data(self, name, data):
"""
serializer = manager.serializer(self.cache_serializer)
- cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
+ cache_path = self.cachefile("%s.%s" % (name, self.cache_serializer))
if data is None:
if os.path.exists(cache_path):
os.unlink(cache_path)
- self.logger.debug('deleted cache file: %s', cache_path)
+ self.logger.debug("deleted cache file: %s", cache_path)
return
- with atomic_writer(cache_path, 'wb') as file_obj:
+ with serializer.atomic_writer(cache_path, "w") as file_obj:
serializer.dump(data, file_obj)
- self.logger.debug('cached data: %s', cache_path)
+ self.logger.debug("cached data: %s", cache_path)
def cached_data_fresh(self, name, max_age):
"""Whether cache `name` is less than `max_age` seconds old.
@@ -1749,16 +1777,25 @@ def cached_data_age(self, name):
:rtype: ``int``
"""
- cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
+ cache_path = self.cachefile("%s.%s" % (name, self.cache_serializer))
if not os.path.exists(cache_path):
return 0
return time.time() - os.stat(cache_path).st_mtime
- def filter(self, query, items, key=lambda x: x, ascending=False,
- include_score=False, min_score=0, max_results=0,
- match_on=MATCH_ALL, fold_diacritics=True):
+ def filter(
+ self,
+ query,
+ items,
+ key=lambda x: x,
+ ascending=False,
+ include_score=False,
+ min_score=0,
+ max_results=0,
+ match_on=MATCH_ALL,
+ fold_diacritics=True,
+ ):
"""Fuzzy search filter. Returns list of ``items`` that match ``query``.
``query`` is case-insensitive. Any item that does not contain the
@@ -1867,23 +1904,25 @@ def filter(self, query, items, key=lambda x: x, ascending=False,
return items
# Use user override if there is one
- fold_diacritics = self.settings.get('__workflow_diacritic_folding',
- fold_diacritics)
+ fold_diacritics = self.settings.get(
+ "__workflow_diacritic_folding", fold_diacritics
+ )
results = []
for item in items:
skip = False
score = 0
- words = [s.strip() for s in query.split(' ')]
+ words = [s.strip() for s in query.split(" ")]
+ # print("ere")
+ # print(key(item).strip())
value = key(item).strip()
- if value == '':
+ if value == "":
continue
for word in words:
- if word == '':
+ if word == "":
continue
- s, rule = self._filter_item(value, word, match_on,
- fold_diacritics)
+ s, rule = self._filter_item(value, word, match_on, fold_diacritics)
if not s: # Skip items that don't match part of the query
skip = True
@@ -1896,8 +1935,9 @@ def filter(self, query, items, key=lambda x: x, ascending=False,
# use "reversed" `score` (i.e. highest becomes lowest) and
# `value` as sort key. This means items with the same score
# will be sorted in alphabetical not reverse alphabetical order
- results.append(((100.0 / score, value.lower(), score),
- (item, score, rule)))
+ results.append(
+ ((100.0 / score, value.lower(), score), (item, score, rule))
+ )
# sort on keys, then discard the keys
results.sort(reverse=ascending)
@@ -1944,7 +1984,7 @@ def _filter_item(self, value, query, match_on, fold_diacritics):
# query matches capitalised letters in item,
# e.g. of = OmniFocus
if match_on & MATCH_CAPITALS:
- initials = ''.join([c for c in value if c in INITIALS])
+ initials = "".join([c for c in value if c in INITIALS])
if initials.lower().startswith(query):
score = 100.0 - (len(initials) / len(query))
@@ -1952,13 +1992,15 @@ def _filter_item(self, value, query, match_on, fold_diacritics):
# split the item into "atoms", i.e. words separated by
# spaces or other non-word characters
- if (match_on & MATCH_ATOM or
- match_on & MATCH_INITIALS_CONTAIN or
- match_on & MATCH_INITIALS_STARTSWITH):
+ if (
+ match_on & MATCH_ATOM
+ or match_on & MATCH_INITIALS_CONTAIN
+ or match_on & MATCH_INITIALS_STARTSWITH
+ ):
atoms = [s.lower() for s in split_on_delimiters(value)]
# print('atoms : %s --> %s' % (value, atoms))
# initials of the atoms
- initials = ''.join([s[0] for s in atoms if s])
+ initials = "".join([s[0] for s in atoms if s])
if match_on & MATCH_ATOM:
# is `query` one of the atoms in item?
@@ -1973,16 +2015,14 @@ def _filter_item(self, value, query, match_on, fold_diacritics):
# atoms, e.g. ``himym`` matches "How I Met Your Mother"
# *and* "how i met your mother" (the ``capitals`` rule only
# matches the former)
- if (match_on & MATCH_INITIALS_STARTSWITH and
- initials.startswith(query)):
+ if match_on & MATCH_INITIALS_STARTSWITH and initials.startswith(query):
score = 100.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_STARTSWITH)
# `query` is a substring of initials, e.g. ``doh`` matches
# "The Dukes of Hazzard"
- elif (match_on & MATCH_INITIALS_CONTAIN and
- query in initials):
+ elif match_on & MATCH_INITIALS_CONTAIN and query in initials:
score = 95.0 - (len(initials) / len(query))
return (score, MATCH_INITIALS_CONTAIN)
@@ -1999,8 +2039,9 @@ def _filter_item(self, value, query, match_on, fold_diacritics):
search = self._search_for_query(query)
match = search(value)
if match:
- score = 100.0 / ((1 + match.start()) *
- (match.end() - match.start() + 1))
+ score = 100.0 / (
+ (1 + match.start()) * (match.end() - match.start() + 1)
+ )
return (score, MATCH_ALLCHARS)
@@ -2015,8 +2056,8 @@ def _search_for_query(self, query):
pattern = []
for c in query:
# pattern.append('[^{0}]*{0}'.format(re.escape(c)))
- pattern.append('.*?{0}'.format(re.escape(c)))
- pattern = ''.join(pattern)
+ pattern.append(".*?{0}".format(re.escape(c)))
+ pattern = "".join(pattern)
search = re.compile(pattern, re.IGNORECASE).search
self._search_pattern_cache[query] = search
@@ -2045,16 +2086,17 @@ def run(self, func, text_errors=False):
start = time.time()
# Write to debugger to ensure "real" output starts on a new line
- print('.', file=sys.stderr)
+ print(".", file=sys.stderr)
# Call workflow's entry function/method within a try-except block
# to catch any errors and display an error message in Alfred
try:
if self.version:
- self.logger.debug('---------- %s (%s) ----------',
- self.name, self.version)
+ self.logger.debug(
+ "---------- %s (%s) ----------", self.name, self.version
+ )
else:
- self.logger.debug('---------- %s ----------', self.name)
+ self.logger.debug("---------- %s ----------", self.name)
# Run update check if configured for self-updates.
# This call has to go in the `run` try-except block, as it will
@@ -2073,11 +2115,11 @@ def run(self, func, text_errors=False):
except Exception as err:
self.logger.exception(err)
if self.help_url:
- self.logger.info('for assistance, see: %s', self.help_url)
+ self.logger.info("for assistance, see: %s", self.help_url)
if not sys.stdout.isatty(): # Show error in Alfred
if text_errors:
- print(unicode(err).encode('utf-8'), end='')
+ print(str(err).encode("utf-8"), end="")
else:
self._items = []
if self._name:
@@ -2086,24 +2128,37 @@ def run(self, func, text_errors=False):
name = self._bundleid
else: # pragma: no cover
name = os.path.dirname(__file__)
- self.add_item("Error in workflow '%s'" % name,
- unicode(err),
- icon=ICON_ERROR)
+ self.add_item(
+ "Error in workflow '%s'" % name, str(err), icon=ICON_ERROR
+ )
self.send_feedback()
return 1
finally:
- self.logger.debug('---------- finished in %0.3fs ----------',
- time.time() - start)
+ self.logger.debug(
+ "---------- finished in %0.3fs ----------", time.time() - start
+ )
return 0
# Alfred feedback methods ------------------------------------------
- def add_item(self, title, subtitle='', modifier_subtitles=None, arg=None,
- autocomplete=None, valid=False, uid=None, icon=None,
- icontype=None, type=None, largetext=None, copytext=None,
- quicklookurl=None):
+ def add_item(
+ self,
+ title,
+ subtitle="",
+ modifier_subtitles=None,
+ arg=None,
+ autocomplete=None,
+ valid=False,
+ uid=None,
+ icon=None,
+ icontype=None,
+ type=None,
+ largetext=None,
+ copytext=None,
+ quicklookurl=None,
+ ):
"""Add an item to be output to Alfred.
:param title: Title shown in Alfred
@@ -2161,19 +2216,31 @@ def add_item(self, title, subtitle='', modifier_subtitles=None, arg=None,
edit it or do something with it other than send it to Alfred.
"""
- item = self.item_class(title, subtitle, modifier_subtitles, arg,
- autocomplete, valid, uid, icon, icontype, type,
- largetext, copytext, quicklookurl)
+ item = self.item_class(
+ title,
+ subtitle,
+ modifier_subtitles,
+ arg,
+ autocomplete,
+ valid,
+ uid,
+ icon,
+ icontype,
+ type,
+ largetext,
+ copytext,
+ quicklookurl,
+ )
self._items.append(item)
return item
def send_feedback(self):
"""Print stored items to console/Alfred as XML."""
- root = ET.Element('items')
+ root = ET.Element("items")
for item in self._items:
root.append(item.elem)
sys.stdout.write('\n')
- sys.stdout.write(ET.tostring(root).encode('utf-8'))
+ sys.stdout.write(ET.tostring(root, encoding="unicode"))
sys.stdout.flush()
####################################################################
@@ -2190,7 +2257,7 @@ def first_run(self):
"""
if not self.version:
- raise ValueError('No workflow version set')
+ raise ValueError("No workflow version set")
if not self.last_version_run:
return True
@@ -2209,14 +2276,15 @@ def last_version_run(self):
"""
if self._last_version_run is UNSET:
- version = self.settings.get('__workflow_last_version')
+ version = self.settings.get("__workflow_last_version")
if version:
- from update import Version
+ from .update import Version
+
version = Version(version)
self._last_version_run = version
- self.logger.debug('last run version: %s', self._last_version_run)
+ self.logger.debug("last run version: %s", self._last_version_run)
return self._last_version_run
@@ -2233,19 +2301,19 @@ def set_last_version(self, version=None):
"""
if not version:
if not self.version:
- self.logger.warning(
- "Can't save last version: workflow has no version")
+ self.logger.warning("Can't save last version: workflow has no version")
return False
version = self.version
- if isinstance(version, basestring):
- from update import Version
+ if isinstance(version, str):
+ from .update import Version
+
version = Version(version)
- self.settings['__workflow_last_version'] = str(version)
+ self.settings["__workflow_last_version"] = str(version)
- self.logger.debug('set last run version: %s', version)
+ self.logger.debug("set last run version: %s", version)
return True
@@ -2261,17 +2329,16 @@ def update_available(self):
:returns: ``True`` if an update is available, else ``False``
"""
+ key = "__workflow_latest_version"
# Create a new workflow object to ensure standard serialiser
# is used (update.py is called without the user's settings)
- update_data = Workflow().cached_data('__workflow_update_status',
- max_age=0)
-
- self.logger.debug('update_data: %r', update_data)
+ status = Workflow().cached_data(key, max_age=0)
- if not update_data or not update_data.get('available'):
+ # self.logger.debug('update status: %r', status)
+ if not status or not status.get("available"):
return False
- return update_data['available']
+ return status["available"]
@property
def prereleases(self):
@@ -2284,10 +2351,10 @@ def prereleases(self):
``False``.
"""
- if self._update_settings.get('prereleases'):
+ if self._update_settings.get("prereleases"):
return True
- return self.settings.get('__workflow_prereleases') or False
+ return self.settings.get("__workflow_prereleases") or False
def check_update(self, force=False):
"""Call update script if it's time to check for a new release.
@@ -2304,39 +2371,34 @@ def check_update(self, force=False):
:type force: ``Boolean``
"""
- frequency = self._update_settings.get('frequency',
- DEFAULT_UPDATE_FREQUENCY)
+ key = "__workflow_latest_version"
+ frequency = self._update_settings.get("frequency", DEFAULT_UPDATE_FREQUENCY)
- if not force and not self.settings.get('__workflow_autoupdate', True):
- self.logger.debug('Auto update turned off by user')
+ if not force and not self.settings.get("__workflow_autoupdate", True):
+ self.logger.debug("Auto update turned off by user")
return
# Check for new version if it's time
- if (force or not self.cached_data_fresh(
- '__workflow_update_status', frequency * 86400)):
-
- github_slug = self._update_settings['github_slug']
+ if force or not self.cached_data_fresh(key, frequency * 86400):
+ repo = self._update_settings["github_slug"]
# version = self._update_settings['version']
version = str(self.version)
- from background import run_in_background
+ from .background import run_in_background
# update.py is adjacent to this file
- update_script = os.path.join(os.path.dirname(__file__),
- b'update.py')
-
- cmd = ['/usr/bin/python', update_script, 'check', github_slug,
- version]
+ update_script = os.path.join(os.path.dirname(__file__), "update.py")
+ cmd = [sys.executable, update_script, "check", repo, version]
if self.prereleases:
- cmd.append('--prereleases')
+ cmd.append("--prereleases")
- self.logger.info('checking for update ...')
+ self.logger.info("checking for update ...")
- run_in_background('__workflow_update_check', cmd)
+ run_in_background("__workflow_update_check", cmd)
else:
- self.logger.debug('update check not due')
+ self.logger.debug("update check not due")
def start_update(self):
"""Check for update and download and install new workflow file.
@@ -2350,29 +2412,27 @@ def start_update(self):
installed, else ``False``
"""
- import update
+ from . import update
- github_slug = self._update_settings['github_slug']
+ repo = self._update_settings["github_slug"]
# version = self._update_settings['version']
version = str(self.version)
- if not update.check_update(github_slug, version, self.prereleases):
+ if not update.check_update(repo, version, self.prereleases):
return False
- from background import run_in_background
+ from .background import run_in_background
# update.py is adjacent to this file
- update_script = os.path.join(os.path.dirname(__file__),
- b'update.py')
+ update_script = os.path.join(os.path.dirname(__file__), "update.py")
- cmd = ['/usr/bin/python', update_script, 'install', github_slug,
- version]
+ cmd = [sys.executable, update_script, "install", repo, version]
if self.prereleases:
- cmd.append('--prereleases')
+ cmd.append("--prereleases")
- self.logger.debug('downloading update ...')
- run_in_background('__workflow_update_install', cmd)
+ self.logger.debug("downloading update ...")
+ run_in_background("__workflow_update_install", cmd)
return True
@@ -2403,22 +2463,24 @@ def save_password(self, account, password, service=None):
service = self.bundleid
try:
- self._call_security('add-generic-password', service, account,
- '-w', password)
- self.logger.debug('saved password : %s:%s', service, account)
+ self._call_security(
+ "add-generic-password", service, account, "-w", password
+ )
+ self.logger.debug("saved password : %s:%s", service, account)
except PasswordExists:
- self.logger.debug('password exists : %s:%s', service, account)
+ self.logger.debug("password exists : %s:%s", service, account)
current_password = self.get_password(account, service)
if current_password == password:
- self.logger.debug('password unchanged')
+ self.logger.debug("password unchanged")
else:
self.delete_password(account, service)
- self._call_security('add-generic-password', service,
- account, '-w', password)
- self.logger.debug('save_password : %s:%s', service, account)
+ self._call_security(
+ "add-generic-password", service, account, "-w", password
+ )
+ self.logger.debug("save_password : %s:%s", service, account)
def get_password(self, account, service=None):
"""Retrieve the password saved at ``service/account``.
@@ -2438,24 +2500,23 @@ def get_password(self, account, service=None):
if not service:
service = self.bundleid
- output = self._call_security('find-generic-password', service,
- account, '-g')
+ output = self._call_security("find-generic-password", service, account, "-g")
# Parsing of `security` output is adapted from python-keyring
# by Jason R. Coombs
# https://pypi.python.org/pypi/keyring
m = re.search(
- r'password:\s*(?:0x(?P[0-9A-F]+)\s*)?(?:"(?P.*)")?',
- output)
+ r'password:\s*(?:0x(?P[0-9A-F]+)\s*)?(?:"(?P.*)")?', output
+ )
if m:
groups = m.groupdict()
- h = groups.get('hex')
- password = groups.get('pw')
+ h = groups.get("hex")
+ password = groups.get("pw")
if h:
- password = unicode(binascii.unhexlify(h), 'utf-8')
+ password = str(binascii.unhexlify(h), "utf-8")
- self.logger.debug('got password : %s:%s', service, account)
+ self.logger.debug("got password : %s:%s", service, account)
return password
@@ -2475,15 +2536,15 @@ def delete_password(self, account, service=None):
if not service:
service = self.bundleid
- self._call_security('delete-generic-password', service, account)
+ self._call_security("delete-generic-password", service, account)
- self.logger.debug('deleted password : %s:%s', service, account)
+ self.logger.debug("deleted password : %s:%s", service, account)
####################################################################
# Methods for workflow:* magic args
####################################################################
- def _register_default_magic(self):
+ def _register_default_magic(self): # noqa: C901
"""Register the built-in magic arguments."""
# TODO: refactor & simplify
# Wrap callback and message with callable
@@ -2494,91 +2555,98 @@ def wrapper():
return wrapper
- self.magic_arguments['delcache'] = callback(self.clear_cache,
- 'Deleted workflow cache')
- self.magic_arguments['deldata'] = callback(self.clear_data,
- 'Deleted workflow data')
- self.magic_arguments['delsettings'] = callback(
- self.clear_settings, 'Deleted workflow settings')
- self.magic_arguments['reset'] = callback(self.reset,
- 'Reset workflow')
- self.magic_arguments['openlog'] = callback(self.open_log,
- 'Opening workflow log file')
- self.magic_arguments['opencache'] = callback(
- self.open_cachedir, 'Opening workflow cache directory')
- self.magic_arguments['opendata'] = callback(
- self.open_datadir, 'Opening workflow data directory')
- self.magic_arguments['openworkflow'] = callback(
- self.open_workflowdir, 'Opening workflow directory')
- self.magic_arguments['openterm'] = callback(
- self.open_terminal, 'Opening workflow root directory in Terminal')
+ self.magic_arguments["delcache"] = callback(
+ self.clear_cache, "Deleted workflow cache"
+ )
+ self.magic_arguments["deldata"] = callback(
+ self.clear_data, "Deleted workflow data"
+ )
+ self.magic_arguments["delsettings"] = callback(
+ self.clear_settings, "Deleted workflow settings"
+ )
+ self.magic_arguments["reset"] = callback(self.reset, "Reset workflow")
+ self.magic_arguments["openlog"] = callback(
+ self.open_log, "Opening workflow log file"
+ )
+ self.magic_arguments["opencache"] = callback(
+ self.open_cachedir, "Opening workflow cache directory"
+ )
+ self.magic_arguments["opendata"] = callback(
+ self.open_datadir, "Opening workflow data directory"
+ )
+ self.magic_arguments["openworkflow"] = callback(
+ self.open_workflowdir, "Opening workflow directory"
+ )
+ self.magic_arguments["openterm"] = callback(
+ self.open_terminal, "Opening workflow root directory in Terminal"
+ )
# Diacritic folding
def fold_on():
- self.settings['__workflow_diacritic_folding'] = True
- return 'Diacritics will always be folded'
+ self.settings["__workflow_diacritic_folding"] = True
+ return "Diacritics will always be folded"
def fold_off():
- self.settings['__workflow_diacritic_folding'] = False
- return 'Diacritics will never be folded'
+ self.settings["__workflow_diacritic_folding"] = False
+ return "Diacritics will never be folded"
def fold_default():
- if '__workflow_diacritic_folding' in self.settings:
- del self.settings['__workflow_diacritic_folding']
- return 'Diacritics folding reset'
+ if "__workflow_diacritic_folding" in self.settings:
+ del self.settings["__workflow_diacritic_folding"]
+ return "Diacritics folding reset"
- self.magic_arguments['foldingon'] = fold_on
- self.magic_arguments['foldingoff'] = fold_off
- self.magic_arguments['foldingdefault'] = fold_default
+ self.magic_arguments["foldingon"] = fold_on
+ self.magic_arguments["foldingoff"] = fold_off
+ self.magic_arguments["foldingdefault"] = fold_default
# Updates
def update_on():
- self.settings['__workflow_autoupdate'] = True
- return 'Auto update turned on'
+ self.settings["__workflow_autoupdate"] = True
+ return "Auto update turned on"
def update_off():
- self.settings['__workflow_autoupdate'] = False
- return 'Auto update turned off'
+ self.settings["__workflow_autoupdate"] = False
+ return "Auto update turned off"
def prereleases_on():
- self.settings['__workflow_prereleases'] = True
- return 'Prerelease updates turned on'
+ self.settings["__workflow_prereleases"] = True
+ return "Prerelease updates turned on"
def prereleases_off():
- self.settings['__workflow_prereleases'] = False
- return 'Prerelease updates turned off'
+ self.settings["__workflow_prereleases"] = False
+ return "Prerelease updates turned off"
def do_update():
if self.start_update():
- return 'Downloading and installing update ...'
+ return "Downloading and installing update ..."
else:
- return 'No update available'
+ return "No update available"
- self.magic_arguments['autoupdate'] = update_on
- self.magic_arguments['noautoupdate'] = update_off
- self.magic_arguments['prereleases'] = prereleases_on
- self.magic_arguments['noprereleases'] = prereleases_off
- self.magic_arguments['update'] = do_update
+ self.magic_arguments["autoupdate"] = update_on
+ self.magic_arguments["noautoupdate"] = update_off
+ self.magic_arguments["prereleases"] = prereleases_on
+ self.magic_arguments["noprereleases"] = prereleases_off
+ self.magic_arguments["update"] = do_update
# Help
def do_help():
if self.help_url:
self.open_help()
- return 'Opening workflow help URL in browser'
+ return "Opening workflow help URL in browser"
else:
- return 'Workflow has no help URL'
+ return "Workflow has no help URL"
def show_version():
if self.version:
- return 'Version: {0}'.format(self.version)
+ return "Version: {0}".format(self.version)
else:
- return 'This workflow has no version number'
+ return "This workflow has no version number"
def list_magic():
"""Display all available magic args in Alfred."""
isatty = sys.stderr.isatty()
for name in sorted(self.magic_arguments.keys()):
- if name == 'magic':
+ if name == "magic":
continue
arg = self.magic_prefix + name
self.logger.debug(arg)
@@ -2589,9 +2657,9 @@ def list_magic():
if not isatty:
self.send_feedback()
- self.magic_arguments['help'] = do_help
- self.magic_arguments['magic'] = list_magic
- self.magic_arguments['version'] = show_version
+ self.magic_arguments["help"] = do_help
+ self.magic_arguments["magic"] = list_magic
+ self.magic_arguments["version"] = show_version
def clear_cache(self, filter_func=lambda f: True):
"""Delete all files in workflow's :attr:`cachedir`.
@@ -2621,7 +2689,7 @@ def clear_settings(self):
"""Delete workflow's :attr:`settings_path`."""
if os.path.exists(self.settings_path):
os.unlink(self.settings_path)
- self.logger.debug('deleted : %r', self.settings_path)
+ self.logger.debug("deleted : %r", self.settings_path)
def reset(self):
"""Delete workflow settings, cache and data.
@@ -2636,30 +2704,29 @@ def reset(self):
def open_log(self):
"""Open :attr:`logfile` in default app (usually Console.app)."""
- subprocess.call(['open', self.logfile])
+ subprocess.call(["open", self.logfile]) # nosec
def open_cachedir(self):
"""Open the workflow's :attr:`cachedir` in Finder."""
- subprocess.call(['open', self.cachedir])
+ subprocess.call(["open", self.cachedir]) # nosec
def open_datadir(self):
"""Open the workflow's :attr:`datadir` in Finder."""
- subprocess.call(['open', self.datadir])
+ subprocess.call(["open", self.datadir]) # nosec
def open_workflowdir(self):
"""Open the workflow's :attr:`workflowdir` in Finder."""
- subprocess.call(['open', self.workflowdir])
+ subprocess.call(["open", self.workflowdir]) # nosec
def open_terminal(self):
"""Open a Terminal window at workflow's :attr:`workflowdir`."""
- subprocess.call(['open', '-a', 'Terminal',
- self.workflowdir])
+ subprocess.call(["open", "-a", "Terminal", self.workflowdir]) # nosec
def open_help(self):
"""Open :attr:`help_url` in default browser."""
- subprocess.call(['open', self.help_url])
+ subprocess.call(["open", self.help_url]) # nosec
- return 'Opening workflow help URL in browser'
+ return "Opening workflow help URL in browser"
####################################################################
# Helper methods
@@ -2695,8 +2762,8 @@ def decode(self, text, encoding=None, normalization=None):
"""
encoding = encoding or self._input_encoding
normalization = normalization or self._normalizsation
- if not isinstance(text, unicode):
- text = unicode(text, encoding)
+ if not isinstance(text, str):
+ text = str(text, encoding)
return unicodedata.normalize(normalization, text)
def fold_to_ascii(self, text):
@@ -2714,9 +2781,8 @@ def fold_to_ascii(self, text):
"""
if isascii(text):
return text
- text = ''.join([ASCII_REPLACEMENTS.get(c, c) for c in text])
- return unicode(unicodedata.normalize('NFKD',
- text).encode('ascii', 'ignore'))
+ text = "".join([ASCII_REPLACEMENTS.get(c, c) for c in text])
+ return unicodedata.normalize("NFKD", text)
def dumbify_punctuation(self, text):
"""Convert non-ASCII punctuation to closest ASCII equivalent.
@@ -2736,7 +2802,7 @@ def dumbify_punctuation(self, text):
if isascii(text):
return text
- text = ''.join([DUMB_PUNCTUATION.get(c, c) for c in text])
+ text = "".join([DUMB_PUNCTUATION.get(c, c) for c in text])
return text
def _delete_directory_contents(self, dirpath, filter_func):
@@ -2758,12 +2824,13 @@ def _delete_directory_contents(self, dirpath, filter_func):
shutil.rmtree(path)
else:
os.unlink(path)
- self.logger.debug('deleted : %r', path)
+ self.logger.debug("deleted : %r", path)
def _load_info_plist(self):
"""Load workflow info from ``info.plist``."""
# info.plist should be in the directory above this one
- self._info = plistlib.readPlist(self.workflowfile('info.plist'))
+ with open(self.workflowfile("info.plist"), "rb") as file_obj:
+ self._info = plistlib.load(file_obj)
self._info_loaded = True
def _create(self, dirpath):
@@ -2803,16 +2870,15 @@ def _call_security(self, action, service, account, *args):
:rtype: `tuple` (`int`, ``unicode``)
"""
- cmd = ['security', action, '-s', service, '-a', account] + list(args)
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ cmd = ["security", action, "-s", service, "-a", account] + list(args)
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
if p.returncode == 44: # password does not exist
raise PasswordNotFound()
elif p.returncode == 45: # password already exists
raise PasswordExists()
elif p.returncode > 0:
- err = KeychainError('Unknown Keychain error : %s' % stdout)
+ err = KeychainError("Unknown Keychain error : %s" % stdout)
err.retcode = p.returncode
raise err
- return stdout.strip().decode('utf-8')
+ return stdout.strip().decode("utf-8")
\ No newline at end of file
diff --git a/workflow/workflow3.py b/workflow/workflow3.py
index a6c07c9..ce565c2 100644
--- a/workflow/workflow3.py
+++ b/workflow/workflow3.py
@@ -7,11 +7,11 @@
# Created on 2016-06-25
#
-"""An Alfred 3-only version of :class:`~workflow.Workflow`.
+"""An Alfred 3+ version of :class:`~workflow.Workflow`.
-:class:`~workflow.Workflow3` supports Alfred 3's new features, such as
+:class:`~workflow.Workflow3` supports new features, such as
setting :ref:`workflow-variables` and
-:class:`the more advanced modifiers ` supported by Alfred 3.
+:class:`the more advanced modifiers ` supported by Alfred 3+.
In order for the feedback mechanism to work correctly, it's important
to create :class:`Item3` and :class:`Modifier` objects via the
@@ -23,7 +23,6 @@
"""
-from __future__ import print_function, unicode_literals, absolute_import
import json
import os
@@ -50,12 +49,16 @@ class Variables(dict):
information.
Args:
- arg (unicode, optional): Main output/``{query}``.
+ arg (unicode or list, optional): Main output/``{query}``.
**variables: Workflow variables to set.
+ In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
+ :class:`list` or :class:`tuple`.
Attributes:
- arg (unicode): Output value (``{query}``).
+ arg (unicode or list): Output value (``{query}``).
+ In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
+ :class:`list` or :class:`tuple`.
config (dict): Configuration for downstream workflow element.
"""
@@ -68,23 +71,23 @@ def __init__(self, arg=None, **variables):
@property
def obj(self):
- """Return ``alfredworkflow`` `dict`."""
+ """``alfredworkflow`` :class:`dict`."""
o = {}
if self:
d2 = {}
- for k, v in self.items():
+ for k, v in list(self.items()):
d2[k] = v
- o['variables'] = d2
+ o["variables"] = d2
if self.config:
- o['config'] = self.config
+ o["config"] = self.config
if self.arg is not None:
- o['arg'] = self.arg
+ o["arg"] = self.arg
- return {'alfredworkflow': o}
+ return {"alfredworkflow": o}
- def __unicode__(self):
+ def __str__(self):
"""Convert to ``alfredworkflow`` JSON object.
Returns:
@@ -92,22 +95,13 @@ def __unicode__(self):
"""
if not self and not self.config:
- if self.arg:
+ if not self.arg:
+ return ""
+ if isinstance(self.arg, str):
return self.arg
- else:
- return u''
return json.dumps(self.obj)
- def __str__(self):
- """Convert to ``alfredworkflow`` JSON object.
-
- Returns:
- str: UTF-8 encoded ``alfredworkflow`` JSON object
-
- """
- return unicode(self).encode('utf-8')
-
class Modifier(object):
"""Modify :class:`Item3` arg/icon/variables when modifier key is pressed.
@@ -149,8 +143,9 @@ class Modifier(object):
"""
- def __init__(self, key, subtitle=None, arg=None, valid=None, icon=None,
- icontype=None):
+ def __init__(
+ self, key, subtitle=None, arg=None, valid=None, icon=None, icontype=None
+ ):
"""Create a new :class:`Modifier`.
Don't use this class directly (as it won't be associated with any
@@ -212,23 +207,23 @@ def obj(self):
o = {}
if self.subtitle is not None:
- o['subtitle'] = self.subtitle
+ o["subtitle"] = self.subtitle
if self.arg is not None:
- o['arg'] = self.arg
+ o["arg"] = self.arg
if self.valid is not None:
- o['valid'] = self.valid
+ o["valid"] = self.valid
if self.variables:
- o['variables'] = self.variables
+ o["variables"] = self.variables
if self.config:
- o['config'] = self.config
+ o["config"] = self.config
icon = self._icon()
if icon:
- o['icon'] = icon
+ o["icon"] = icon
return o
@@ -241,16 +236,16 @@ def _icon(self):
"""
icon = {}
if self.icon is not None:
- icon['path'] = self.icon
+ icon["path"] = self.icon
if self.icontype is not None:
- icon['type'] = self.icontype
+ icon["type"] = self.icontype
return icon
class Item3(object):
- """Represents a feedback item for Alfred 3.
+ """Represents a feedback item for Alfred 3+.
Generates Alfred-compliant JSON for a single item.
@@ -261,9 +256,22 @@ class Item3(object):
"""
- def __init__(self, title, subtitle='', arg=None, autocomplete=None,
- match=None, valid=False, uid=None, icon=None, icontype=None,
- type=None, largetext=None, copytext=None, quicklookurl=None):
+ def __init__(
+ self,
+ title,
+ subtitle="",
+ arg=None,
+ autocomplete=None,
+ match=None,
+ valid=False,
+ uid=None,
+ icon=None,
+ icontype=None,
+ type=None,
+ largetext=None,
+ copytext=None,
+ quicklookurl=None,
+ ):
"""Create a new :class:`Item3` object.
Use same arguments as for
@@ -314,8 +322,9 @@ def getvar(self, name, default=None):
"""
return self.variables.get(name, default)
- def add_modifier(self, key, subtitle=None, arg=None, valid=None, icon=None,
- icontype=None):
+ def add_modifier(
+ self, key, subtitle=None, arg=None, valid=None, icon=None, icontype=None
+ ):
"""Add alternative values for a modifier key.
Args:
@@ -328,6 +337,9 @@ def add_modifier(self, key, subtitle=None, arg=None, valid=None, icon=None,
:meth:`Workflow.add_item() `
for valid values.
+ In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
+ :class:`list` or :class:`tuple`.
+
Returns:
Modifier: Configured :class:`Modifier`.
@@ -350,50 +362,46 @@ def obj(self):
"""
# Required values
- o = {
- 'title': self.title,
- 'subtitle': self.subtitle,
- 'valid': self.valid,
- }
+ o = {"title": self.title, "subtitle": self.subtitle, "valid": self.valid}
# Optional values
if self.arg is not None:
- o['arg'] = self.arg
+ o["arg"] = self.arg
if self.autocomplete is not None:
- o['autocomplete'] = self.autocomplete
+ o["autocomplete"] = self.autocomplete
if self.match is not None:
- o['match'] = self.match
+ o["match"] = self.match
if self.uid is not None:
- o['uid'] = self.uid
+ o["uid"] = self.uid
if self.type is not None:
- o['type'] = self.type
+ o["type"] = self.type
if self.quicklookurl is not None:
- o['quicklookurl'] = self.quicklookurl
+ o["quicklookurl"] = self.quicklookurl
if self.variables:
- o['variables'] = self.variables
+ o["variables"] = self.variables
if self.config:
- o['config'] = self.config
+ o["config"] = self.config
# Largetype and copytext
text = self._text()
if text:
- o['text'] = text
+ o["text"] = text
icon = self._icon()
if icon:
- o['icon'] = icon
+ o["icon"] = icon
# Modifiers
mods = self._modifiers()
if mods:
- o['mods'] = mods
+ o["mods"] = mods
return o
@@ -406,10 +414,10 @@ def _icon(self):
"""
icon = {}
if self.icon is not None:
- icon['path'] = self.icon
+ icon["path"] = self.icon
if self.icontype is not None:
- icon['type'] = self.icontype
+ icon["type"] = self.icontype
return icon
@@ -422,10 +430,10 @@ def _text(self):
"""
text = {}
if self.largetext is not None:
- text['largetype'] = self.largetext
+ text["largetype"] = self.largetext
if self.copytext is not None:
- text['copy'] = self.copytext
+ text["copy"] = self.copytext
return text
@@ -438,7 +446,7 @@ def _modifiers(self):
"""
if self.modifiers:
mods = {}
- for k, mod in self.modifiers.items():
+ for k, mod in list(self.modifiers.items()):
mods[k] = mod.obj
return mods
@@ -447,7 +455,7 @@ def _modifiers(self):
class Workflow3(Workflow):
- """Workflow class that generates Alfred 3 feedback.
+ """Workflow class that generates Alfred 3+ feedback.
It is a subclass of :class:`~workflow.Workflow` and most of its
methods are documented there.
@@ -470,25 +478,27 @@ def __init__(self, **kwargs):
self.variables = {}
self._rerun = 0
# Get session ID from environment if present
- self._session_id = os.getenv('_WF_SESSION_ID') or None
+ self._session_id = os.getenv("_WF_SESSION_ID") or None
if self._session_id:
- self.setvar('_WF_SESSION_ID', self._session_id)
+ self.setvar("_WF_SESSION_ID", self._session_id)
@property
def _default_cachedir(self):
- """Alfred 3's default cache directory."""
+ """Alfred 4's default cache directory."""
return os.path.join(
os.path.expanduser(
- '~/Library/Caches/com.runningwithcrayons.Alfred-3/'
- 'Workflow Data/'),
- self.bundleid)
+ "~/Library/Caches/com.runningwithcrayons.Alfred/" "Workflow Data/"
+ ),
+ self.bundleid,
+ )
@property
def _default_datadir(self):
- """Alfred 3's default data directory."""
- return os.path.join(os.path.expanduser(
- '~/Library/Application Support/Alfred 3/Workflow Data/'),
- self.bundleid)
+ """Alfred 4's default data directory."""
+ return os.path.join(
+ os.path.expanduser("~/Library/Application Support/Alfred/Workflow Data/"),
+ self.bundleid,
+ )
@property
def rerun(self):
@@ -517,8 +527,9 @@ def session_id(self):
"""
if not self._session_id:
from uuid import uuid4
+
self._session_id = uuid4().hex
- self.setvar('_WF_SESSION_ID', self._session_id)
+ self.setvar("_WF_SESSION_ID", self._session_id)
return self._session_id
@@ -541,9 +552,11 @@ def setvar(self, name, value, persist=False):
self.variables[name] = value
if persist:
from .util import set_config
+
set_config(name, value, self.bundleid)
- self.logger.debug('saved variable %r with value %r to info.plist',
- name, value)
+ self.logger.debug(
+ "saved variable %r with value %r to info.plist", name, value
+ )
def getvar(self, name, default=None):
"""Return value of workflow variable for ``name`` or ``default``.
@@ -558,9 +571,22 @@ def getvar(self, name, default=None):
"""
return self.variables.get(name, default)
- def add_item(self, title, subtitle='', arg=None, autocomplete=None,
- valid=False, uid=None, icon=None, icontype=None, type=None,
- largetext=None, copytext=None, quicklookurl=None, match=None):
+ def add_item(
+ self,
+ title,
+ subtitle="",
+ arg=None,
+ autocomplete=None,
+ valid=False,
+ uid=None,
+ icon=None,
+ icontype=None,
+ type=None,
+ largetext=None,
+ copytext=None,
+ quicklookurl=None,
+ match=None,
+ ):
"""Add an item to be output to Alfred.
Args:
@@ -568,6 +594,9 @@ def add_item(self, title, subtitle='', arg=None, autocomplete=None,
turned on for your Script Filter, Alfred (version 3.5 and
above) will filter against this field, not ``title``.
+ In Alfred 4.1+ and Alfred-Workflow 1.40+, ``arg`` may also be a
+ :class:`list` or :class:`tuple`.
+
See :meth:`Workflow.add_item() ` for
the main documentation and other parameters.
@@ -579,9 +608,21 @@ def add_item(self, title, subtitle='', arg=None, autocomplete=None,
Item3: Alfred feedback item.
"""
- item = self.item_class(title, subtitle, arg, autocomplete,
- match, valid, uid, icon, icontype, type,
- largetext, copytext, quicklookurl)
+ item = self.item_class(
+ title,
+ subtitle,
+ arg,
+ autocomplete,
+ match,
+ valid,
+ uid,
+ icon,
+ icontype,
+ type,
+ largetext,
+ copytext,
+ quicklookurl,
+ )
# Add variables to child item
item.variables.update(self.variables)
@@ -592,7 +633,7 @@ def add_item(self, title, subtitle='', arg=None, autocomplete=None,
@property
def _session_prefix(self):
"""Filename prefix for current session."""
- return '_wfsess-{0}-'.format(self.session_id)
+ return "_wfsess-{0}-".format(self.session_id)
def _mk_session_name(self, name):
"""New cache name/key based on session ID."""
@@ -662,11 +703,13 @@ def clear_session_cache(self, current=False):
current session.
"""
+
def _is_session_file(filename):
if current:
- return filename.startswith('_wfsess-')
- return filename.startswith('_wfsess-') \
- and not filename.startswith(self._session_prefix)
+ return filename.startswith("_wfsess-")
+ return filename.startswith("_wfsess-") and not filename.startswith(
+ self._session_prefix
+ )
self.clear_cache(_is_session_file)
@@ -682,14 +725,14 @@ def obj(self):
for item in self._items:
items.append(item.obj)
- o = {'items': items}
+ o = {"items": items}
if self.variables:
- o['variables'] = self.variables
+ o["variables"] = self.variables
if self.rerun:
- o['rerun'] = self.rerun
+ o["rerun"] = self.rerun
return o
- def warn_empty(self, title, subtitle=u'', icon=None):
+ def warn_empty(self, title, subtitle="", icon=None):
"""Add a warning to feedback if there are no items.
.. versionadded:: 1.31
@@ -707,6 +750,7 @@ def warn_empty(self, title, subtitle=u'', icon=None):
Returns:
Item3: Newly-created item.
+
"""
if len(self._items):
return
@@ -716,5 +760,8 @@ def warn_empty(self, title, subtitle=u'', icon=None):
def send_feedback(self):
"""Print stored items to console/Alfred as JSON."""
- json.dump(self.obj, sys.stdout)
- sys.stdout.flush()
+ if self.debugging:
+ json.dump(self.obj, sys.stdout, indent=2, separators=(",", ": "))
+ else:
+ json.dump(self.obj, sys.stdout)
+ sys.stdout.flush()
\ No newline at end of file