Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix deprecation warnings #38

Merged
merged 18 commits into from
Aug 10, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ ignore =
bootstrap.py

[flake8]
ignore = C901,N801,N802,N803,N805,N806,N812,E301
ignore = E301
exclude = bootstrap.py

[bdist_wheel]
Expand Down
21 changes: 11 additions & 10 deletions src/Products/PluginIndexes/BooleanIndex/BooleanIndex.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class BooleanIndex(UnIndex):
has a roughly equal 50/50 split.
"""

meta_type = "BooleanIndex"
meta_type = 'BooleanIndex'

manage_options = (
{'label': 'Settings',
Expand All @@ -55,7 +55,7 @@ class BooleanIndex(UnIndex):
'action': 'manage_browse'},
)

query_options = ["query"]
query_options = ['query']

manage = manage_main = DTMLFile('dtml/manageBooleanIndex', globals())
manage_main._setName('manage_main')
Expand Down Expand Up @@ -136,12 +136,13 @@ def removeForwardIndexEntry(self, entry, documentId, check=True):
except ConflictError:
raise
except Exception:
LOG.exception(
'%s: unindex_object could not remove documentId %s '
'from index %s. This should not happen.' % (
self.__class__.__name__,
str(documentId),
str(self.id)))
LOG.exception('%(context)s: unindex_object could not '
'remove documentId %(doc_id)s from '
'index %(index)r. This should not '
'happen.', dict(
context=self.__class__.__name__,
doc_id=documentId,
index=self.id))
elif check:
# is the index (after removing the current entry) larger than
# 60% of the total length? than switch the indexed value
Expand Down Expand Up @@ -174,7 +175,7 @@ def _index_object(self, documentId, obj, threshold=None, attr=''):
raise
except Exception:
LOG.error('Should not happen: oldDatum was there, now '
'its not, for document with id %s' %
'its not, for document with id %s',
documentId)

if datum is not _marker:
Expand Down Expand Up @@ -203,7 +204,7 @@ def unindex_object(self, documentId):
raise
except Exception:
LOG.debug('Attempt to unindex nonexistent document'
' with id %s' % documentId, exc_info=True)
' with id %s', documentId, exc_info=True)

def query_index(self, record, resultset=None):
index = self._index
Expand Down
28 changes: 16 additions & 12 deletions src/Products/PluginIndexes/CompositeIndex/CompositeIndex.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import logging
from itertools import product
from itertools import combinations
from six.moves import urllib
import time
import transaction

Expand Down Expand Up @@ -142,8 +143,8 @@ def rawAttributes(self):
return self._attributes

def __repr__(self):
return "<id: %s; metatype: %s; attributes: %s>" % \
(self.id, self.meta_type, self.attributes)
return ('<id: {0.id}; metatype: {0.meta_type}; '
'attributes: {0.attributes}>').format(self)


@implementer(ITransposeQuery)
Expand All @@ -153,7 +154,7 @@ class CompositeIndex(KeywordIndex):
or sequences of items
"""

meta_type = "CompositeIndex"
meta_type = 'CompositeIndex'

manage_options = (
{'label': 'Settings',
Expand All @@ -162,7 +163,7 @@ class CompositeIndex(KeywordIndex):
'action': 'manage_browse'},
)

query_options = ("query", "operator")
query_options = ('query', 'operator')

def __init__(self, id, ignore_ex=None, call_methods=None,
extra=None, caller=None):
Expand Down Expand Up @@ -304,8 +305,10 @@ def make_query(self, query):
zc = aq_parent(aq_parent(self))
skip = zc.getProperty('skip_compositeindex', False)
if skip:
LOG.debug('%s: skip composite query build %r' %
(self.__class__.__name__, zc))
LOG.debug('%(context)s: skip composite query build '
'for %(zcatalog)r', dict(
context=self.__class__.__name__,
zcatalog=zc))
return query
except AttributeError:
pass
Expand Down Expand Up @@ -377,7 +380,7 @@ def addComponent(self, c_id, c_meta_type, c_attributes):
# Add a component object by 'c_id'.
if c_id in self._components:
raise KeyError('A component with this '
'name already exists: %s' % c_id)
'name already exists: {0}'.format(c_id))

self._components[c_id] = Component(c_id,
c_meta_type,
Expand All @@ -387,7 +390,7 @@ def addComponent(self, c_id, c_meta_type, c_attributes):
def delComponent(self, c_id):
# Delete the component object specified by 'c_id'.
if c_id not in self._components:
raise KeyError('no such Component: %s' % c_id)
raise KeyError('no such Component: {0}'.format(c_id))

del self._components[c_id]

Expand Down Expand Up @@ -486,10 +489,11 @@ def manage_fastBuild(self, threshold=None, URL1=None,
ct = time.clock() - ct

if RESPONSE:
RESPONSE.redirect(URL1 + '/manage_main?'
'manage_tabs_message=ComponentIndex%%20fast%%20'
'reindexed%%20in%%20%.3f%%20'
'seconds%%20(%.3f%%20cpu)' % (tt, ct))
msg = ('ComponentIndex fast reindexed '
'in {0:.3f}s ({1:.3f}s cpu time)').format(tt, ct)
param = urllib.parse.urlencode({'manage_tabs_message': msg})

RESPONSE.redirect(URL1 + '/manage_main?' + param)

manage = manage_main = DTMLFile('dtml/manageCompositeIndex', globals())
manage_main._setName('manage_main')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,8 @@ def defaultSearch(self, req, expectedValues=None, verbose=False):
break

if verbose and (index.id in req):
logger.info("index %s: %s hits in %3.2fms" %
(index.id, r and len(r) or 0, duration))
logger.info('index %s: %s hits in %3.2fms',
index.id, r and len(r) or 0, duration)

if not rs:
return set()
Expand All @@ -148,7 +148,7 @@ def compositeSearch(self, req, expectedValues=None, verbose=False):
query = comp_index.make_query(req)

# catch successful?
self.assertTrue('comp01' in query)
self.assertIn('comp01', query)

return self.defaultSearch(query,
expectedValues=expectedValues,
Expand All @@ -168,8 +168,8 @@ def info(index):
n_obj = index.numObjects()
ratio = float(size) / float(n_obj)
logger.info('<id: %15s unique keys: '
'%3s length: %5s ratio: %6.3f pm>' %
(index.id, size, n_obj, ratio * 1000))
'%3s length: %5s ratio: %6.3f pm>',
index.id, size, n_obj, ratio * 1000)
return ratio

for index in self._indexes:
Expand Down Expand Up @@ -256,26 +256,27 @@ def profileSearch(query, warmup=False, verbose=False):
duration1 = (time() - st) * 1000

if verbose:
logger.info("atomic: %s hits in %3.2fms" %
(len(res1), duration1))
logger.info('atomic: %s hits in %3.2fms',
len(res1), duration1)

st = time()
res2 = self.compositeSearch(query, verbose=False)
duration2 = (time() - st) * 1000

if verbose:
logger.info("composite: %s hits in %3.2fms" %
(len(res2), duration2))
logger.info('composite: %s hits in %3.2fms',
len(res2), duration2)

if verbose:
logger.info('[composite/atomic] factor %3.2f' %
(duration1 / duration2,))
logger.info('[composite/atomic] factor %3.2f',
duration1 / duration2,)

if not warmup:
# if length of result is greater than zero composite
# search must be roughly faster than default search
if res1 and res2:
assert 0.5 * duration2 < duration1, (duration2, duration1)
self.assertLess(
0.5 * duration2, duration1, (duration2, duration1))

# is result identical?
self.assertEqual(len(res1), len(res2), '%s != %s for %s' %
Expand All @@ -285,7 +286,7 @@ def profileSearch(query, warmup=False, verbose=False):
for l in lengths:
self.clearIndexes()
logger.info('************************************\n'
'indexing %s objects' % l)
'indexing %s objects', l)

for i in range(l):
name = '%s' % i
Expand All @@ -299,14 +300,14 @@ def profileSearch(query, warmup=False, verbose=False):
logger.info('\nstart queries')

# warming up indexes
logger.info("warming up indexes")
logger.info('warming up indexes')
for name, query in queries:
profileSearch(query, warmup=True)

# in memory measure
logger.info("in memory measure")
logger.info('in memory measure')
for name, query in queries:
logger.info("\nquery: %s" % name)
logger.info('\nquery: %s', name)
profileSearch(query, verbose=True)

logger.info('\nqueries finished')
Expand Down
11 changes: 6 additions & 5 deletions src/Products/PluginIndexes/DateIndex/DateIndex.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,9 @@ def index_object(self, documentId, obj, threshold=None):
except ConflictError:
raise
except Exception:
LOG.error("Should not happen: ConvertedDate was there,"
" now it's not, for document with id %s" %
documentId)
LOG.error('Should not happen: ConvertedDate was there,'
' now it\'s not, for document'
' with id %s', documentId)

if ConvertedDate is not _marker:
self.insertForwardIndexEntry(ConvertedDate, documentId)
Expand Down Expand Up @@ -193,8 +193,9 @@ def _convert(self, value, default=None):
if t_val > MAX32:
# t_val must be integer fitting in the 32bit range
raise OverflowError(
"%s is not within the range of indexable dates (index: %s)"
% (value, self.id))
('{0} is not within the range of'
' indexable dates (index: {1})'.format(
value, self.id)))
return t_val


Expand Down
34 changes: 17 additions & 17 deletions src/Products/PluginIndexes/DateRangeIndex/DateRangeIndex.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class DateRangeIndex(UnIndex):

security = ClassSecurityInfo()

meta_type = "DateRangeIndex"
meta_type = 'DateRangeIndex'
query_options = ('query', )

manage_options = ({'label': 'Properties',
Expand Down Expand Up @@ -93,46 +93,46 @@ def __init__(self, id, since_field=None, until_field=None,
ceiling_value, precision_value)
self.clear()

security.declareProtected(view, 'getSinceField')
@security.protected(view)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I like this change!

def getSinceField(self):
"""Get the name of the attribute indexed as start date.
"""
return self._since_field

security.declareProtected(view, 'getUntilField')
@security.protected(view)
def getUntilField(self):
"""Get the name of the attribute indexed as end date.
"""
return self._until_field

security.declareProtected(view, 'getFloorValue')
@security.protected(view)
def getFloorValue(self):
""" """
return self.floor_value

security.declareProtected(view, 'getCeilingValue')
@security.protected(view)
def getCeilingValue(self):
""" """
return self.ceiling_value

security.declareProtected(view, 'getPrecisionValue')
@security.protected(view)
def getPrecisionValue(self):
""" """
return self.precision_value

manage_indexProperties = DTMLFile('manageDateRangeIndex', _dtmldir)

security.declareProtected(manage_zcatalog_indexes, 'manage_edit')
@security.protected(manage_zcatalog_indexes)
def manage_edit(self, since_field, until_field, floor_value,
ceiling_value, precision_value, REQUEST):
""" """
self._edit(since_field, until_field, floor_value, ceiling_value,
precision_value)
REQUEST['RESPONSE'].redirect('%s/manage_main'
'?manage_tabs_message=Updated'
% REQUEST.get('URL2'))
REQUEST['RESPONSE'].redirect('{0}/manage_main'
'?manage_tabs_message=Updated'.format(
REQUEST.get('URL2')))

security.declarePrivate('_edit')
@security.private
def _edit(self, since_field, until_field, floor_value=None,
ceiling_value=None, precision_value=None):
"""Update the fields used to compute the range.
Expand All @@ -146,7 +146,7 @@ def _edit(self, since_field, until_field, floor_value=None,
if precision_value not in (None, ''):
self.precision_value = int(precision_value)

security.declareProtected(manage_zcatalog_indexes, 'clear')
@security.protected(manage_zcatalog_indexes)
def clear(self):
"""Start over fresh."""
self._always = IITreeSet()
Expand Down Expand Up @@ -224,7 +224,7 @@ def uniqueValues(self, name=None, withLengths=0):
the form '(value, length)'.
"""
if name not in (self._since_field, self._until_field):
raise StopIteration
return
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this change needed?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I may have ignored backward compatibility, but see https://docs.python.org/3.3/reference/simple_stmts.html#the-return-statement

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TIL


if name == self._since_field:
sets = (self._since, self._since_only)
Expand All @@ -248,13 +248,13 @@ def getRequestCacheKey(self, record, resultset=None):
tid = str(term)

# unique index identifier
iid = '_%s_%s_%s' % (self.__class__.__name__,
self.id, self.getCounter())
iid = '_{0}_{1}_{2}'.format(self.__class__.__name__,
self.id, self.getCounter())
# record identifier
if resultset is None:
rid = '_%s' % (tid, )
rid = '_{0}'.format(tid)
else:
rid = '_inverse_%s' % (tid, )
rid = '_inverse_{0}'.format(tid)

return (iid, rid)

Expand Down
Loading