|
| 1 | +# coding: utf8 |
| 2 | +""" |
| 3 | + Speed tests |
| 4 | + ----------- |
| 5 | +
|
| 6 | + Note: this file is not named test_*.py as it is not part of the |
| 7 | + test suite ran by pytest. |
| 8 | +
|
| 9 | + :copyright: (c) 2012 by Simon Sapin. |
| 10 | + :license: BSD, see LICENSE for more details. |
| 11 | +""" |
| 12 | + |
| 13 | + |
| 14 | +from __future__ importunicode_literals, division |
| 15 | + |
| 16 | +importsys |
| 17 | +importos.path |
| 18 | +importcontextlib |
| 19 | +importtimeit |
| 20 | +importfunctools |
| 21 | + |
| 22 | +fromcssutilsimportparseString |
| 23 | + |
| 24 | +from .. importtokenizer |
| 25 | +from ..css21importCSS21Parser |
| 26 | +from ..parsingimportremove_whitespace |
| 27 | + |
| 28 | + |
| 29 | +CSS_REPEAT=4 |
| 30 | +TIMEIT_REPEAT=3 |
| 31 | +TIMEIT_NUMBER=20 |
| 32 | + |
| 33 | + |
| 34 | +defload_css(): |
| 35 | +filename=os.path.join(os.path.dirname(__file__), |
| 36 | +'..', '..', 'docs', '_static', 'custom.css') |
| 37 | +withopen(filename, 'rb') asfd: |
| 38 | +returnb'\n'.join([fd.read()] *CSS_REPEAT) |
| 39 | + |
| 40 | + |
| 41 | +# Pre-load so that I/O is not measured |
| 42 | +CSS=load_css() |
| 43 | + |
| 44 | + |
| 45 | +@contextlib.contextmanager |
| 46 | +definstall_tokenizer(name): |
| 47 | +original=tokenizer.tokenize_flat |
| 48 | +try: |
| 49 | +tokenizer.tokenize_flat=getattr(tokenizer, name) |
| 50 | +yield |
| 51 | +finally: |
| 52 | +tokenizer.tokenize_flat=original |
| 53 | + |
| 54 | + |
| 55 | +defparse(tokenizer_name): |
| 56 | +withinstall_tokenizer(tokenizer_name): |
| 57 | +stylesheet=CSS21Parser().parse_stylesheet_bytes(CSS) |
| 58 | +result= [] |
| 59 | +forruleinstylesheet.rules: |
| 60 | +selector=''.join(s.as_cssforsinrule.selector) |
| 61 | +declarations= [ |
| 62 | + (declaration.name, len(list(remove_whitespace(declaration.value)))) |
| 63 | +fordeclarationinrule.declarations] |
| 64 | +result.append((selector, declarations)) |
| 65 | +returnresult |
| 66 | + |
| 67 | +parse_cython=functools.partial(parse, 'cython_tokenize_flat') |
| 68 | +parse_python=functools.partial(parse, 'python_tokenize_flat') |
| 69 | + |
| 70 | + |
| 71 | +defparse_cssutils(): |
| 72 | +stylesheet=parseString(CSS) |
| 73 | +result= [] |
| 74 | +forruleinstylesheet.cssRules: |
| 75 | +selector=rule.selectorText |
| 76 | +declarations= [ |
| 77 | + (declaration.name, len(list(declaration.propertyValue))) |
| 78 | +fordeclarationinrule.style.getProperties(all=True)] |
| 79 | +result.append((selector, declarations)) |
| 80 | +returnresult |
| 81 | + |
| 82 | + |
| 83 | +defcheck_consistency(): |
| 84 | +#import pprint |
| 85 | +#pprint.pprint(parse_python()) |
| 86 | +result=parse_cython() |
| 87 | +assertlen(result) >0 |
| 88 | +assertparse_python() ==result |
| 89 | +assertparse_cssutils() ==result |
| 90 | +version='.'.join(map(str, sys.version_info[:3])) |
| 91 | +print('Python{}, consistency OK.'.format(version)) |
| 92 | + |
| 93 | + |
| 94 | +deftime(function): |
| 95 | +seconds=timeit.Timer(function).repeat(TIMEIT_REPEAT, TIMEIT_NUMBER) |
| 96 | +miliseconds=int(min(seconds) *1000) |
| 97 | +returnmiliseconds |
| 98 | + |
| 99 | + |
| 100 | +defrun(): |
| 101 | +data_set= [ |
| 102 | + ('tinycss + speedups ', parse_cython), |
| 103 | + ('tinycss WITHOUT speedups', parse_python), |
| 104 | +# ('cssutils ', parse_cssutils), |
| 105 | + ] |
| 106 | +label, function=data_set.pop(0) |
| 107 | +ref=time(function) |
| 108 | +print('{}{} ms'.format(label, ref)) |
| 109 | +forlabel, functionindata_set: |
| 110 | +result=time(function) |
| 111 | +print('{}{} ms{:.2f}x'.format(label, result, result/ref)) |
| 112 | + |
| 113 | + |
| 114 | +if__name__=='__main__': |
| 115 | +check_consistency() |
| 116 | +run() |
0 commit comments