From 2463d0e13140d08437d43e720f40374290198bbc Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Sat, 1 Mar 2014 16:19:03 -0800 Subject: [PATCH 001/291] Prepare 0.10.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2556009..6bdd370 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.9.0", + "version": "0.10.0", "engines": { "atom": "*", "node": "*" From c1ddc26253f7ee258ebfeeaa7179e8098cb84ee0 Mon Sep 17 00:00:00 2001 From: Varun S Lingaraju Date: Sat, 1 Mar 2014 16:57:49 -0800 Subject: [PATCH 002/291] Fix defs snippet --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index c84dc7f..83ba24c 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -22,7 +22,7 @@ 'body': 'class ${1:ClassName}(${2:object}):\n\t${3/.+/"""/}${3:docstring for $1}${3/.+/"""\\n/}${3/.+/\\t/}def __init__(self${4/([^,])?(.*)/(?1:, )/}${4:arg}):\n\t\t${5:super($1, self).__init__()}\n${4/(\\A\\s*,\\s*\\Z)|,?\\s*([A-Za-z_][a-zA-Z0-9_]*)\\s*(=[^,]*)?(,\\s*|$)/(?2:\\t\\tself.$2 = $2\\n)/g}\t\t$0' 'New Method': 'prefix': 'defs' - 'body': 'def ${1:mname}(self${2/([^,])?.*/(?1:, )/}${2:arg}):\n\t${3:pass}' + 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' 'New Property': 'prefix': 'property' 'body': 'def ${1:foo}():\n doc = "${2:The $1 property.}"\n def fget(self):\n ${3:return self._$1}\n def fset(self, value):\n ${4:self._$1 = value}\n def fdel(self):\n ${5:del self._$1}\n return locals()\n$1 = property(**$1())$0' From be8702e46157be7a9ca2fe92cb63759e998452a3 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Sat, 1 Mar 2014 17:02:25 -0800 Subject: [PATCH 003/291] Prepare 0.11.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6bdd370..76b67fd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.10.0", + "version": "0.11.0", "engines": { "atom": "*", "node": "*" From 18355c60e346e572f1c722ad9e75572c4afbf199 Mon Sep 17 00:00:00 2001 From: Ankush Agarwal Date: Tue, 4 Mar 2014 11:42:53 -0800 Subject: [PATCH 004/291] Add snippet for defining functions The 'defs' snippet defines only methods. We need a simple 'def' snippet which expands to a simple one argument function. --- snippets/language-python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 53fdda0..db6c2a8 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -26,6 +26,9 @@ 'New Method': 'prefix': 'defs' 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' + 'New Function': + 'prefix': 'def' + 'body': 'def ${1:fname}(${2:arg}):\n\t${3:pass}' 'New Property': 'prefix': 'property' 'body': 'def ${1:foo}():\n doc = "${2:The $1 property.}"\n def fget(self):\n ${3:return self._$1}\n def fset(self, value):\n ${4:self._$1 = value}\n def fdel(self):\n ${5:del self._$1}\n return locals()\n$1 = property(**$1())$0' From 2b7290e79733131ff8d3873719d06aaea3dc350f Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 5 Mar 2014 17:44:40 -0800 Subject: [PATCH 005/291] Prepare 0.12.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 76b67fd..3c1746c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.11.0", + "version": "0.12.0", "engines": { "atom": "*", "node": "*" From c9bcd8e6667717f786a55f52bbe0f5316dd014f0 Mon Sep 17 00:00:00 2001 From: Varun S Lingaraju Date: Fri, 21 Mar 2014 11:20:25 -0700 Subject: [PATCH 006/291] Add snippet for pdb usage --- snippets/language-python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index db6c2a8..5b24fea 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -62,6 +62,9 @@ 'Dictionary Comprehension': 'prefix': 'dc' 'body': '{${1:key}: ${2:value} for ${3:key}, ${4:value} in ${5:variable}}' + 'PDB set trace': + 'prefix': 'pdb' + 'body': 'import pdb; pdb.set_trace()\n' '__magic__': 'prefix': '__' 'body': '__${1:init}__' From 2756f12a645373650f4e1024852e6ae5b3574142 Mon Sep 17 00:00:00 2001 From: Mathias Bynens Date: Fri, 21 Mar 2014 09:43:00 +0100 Subject: [PATCH 007/291] Shorten the `enc` and `env` snippets --- snippets/language-python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index db6c2a8..93474fc 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -2,9 +2,9 @@ '#!/usr/bin/env python': 'prefix': 'env' 'body': '#!/usr/bin/env python\n' - '# -*- coding: utf-8 -*-': + '# coding=utf-8': 'prefix': 'enc' - 'body': '# -*- coding: utf-8 -*-\n' + 'body': '# coding=utf-8\n' 'Assert Equal': 'prefix': 'ase' 'body': 'self.assertEqual(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' From b4f7ff3a51da9d503f7b867b09d9935ccf77b2a1 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Thu, 27 Mar 2014 10:25:12 -0700 Subject: [PATCH 008/291] Prepare 0.13.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3c1746c..48c3d75 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.12.0", + "version": "0.13.0", "engines": { "atom": "*", "node": "*" From 5ea2161f2532d36e91bada13ee295fa25bd45ee4 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 2 Apr 2014 14:15:25 -0700 Subject: [PATCH 009/291] Remove unused properties --- grammars/python.cson | 2 -- 1 file changed, 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 5e284f9..d420260 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1,5 +1,3 @@ -'bundleUUID': 'E3BADC20-6B0E-11D9-9DC9-000D93589AF6' -'comment': '\n\ttodo:\n\t\tlist comprehension / generator comprehension scope.\n\t\t\n\t' 'fileTypes': [ 'py' 'rpy' From cb0294cb0a916bc6b5626a9f06196621185292cb Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 2 Apr 2014 14:15:40 -0700 Subject: [PATCH 010/291] Move scopeName to the top --- grammars/python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index d420260..9a4c32d 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1,3 +1,5 @@ +'scopeName': 'source.python' +'name': 'Python' 'fileTypes': [ 'py' 'rpy' @@ -11,7 +13,6 @@ 'gypi' ] 'firstLineMatch': '^#!/.*\\bpython\\b' -'name': 'Python' 'patterns': [ { 'begin': '(^[ \\t]+)?(?=#)' @@ -1607,4 +1608,3 @@ 'include': '#string_quoted_single' } ] -'scopeName': 'source.python' From 247baf90f908807186b399e37ea0e9e7766eb3fb Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 2 Apr 2014 14:15:45 -0700 Subject: [PATCH 011/291] Prepare 0.14.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 48c3d75..7850fb6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.13.0", + "version": "0.14.0", "engines": { "atom": "*", "node": "*" From 96be4de8a389f316fb09f78ad0e1e3ecab7c6bec Mon Sep 17 00:00:00 2001 From: Arturo Fernandez Date: Mon, 7 Apr 2014 20:49:48 +0200 Subject: [PATCH 012/291] Adding ipdb snippet --- snippets/language-python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 5ebccef..a18aace 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -65,6 +65,9 @@ 'PDB set trace': 'prefix': 'pdb' 'body': 'import pdb; pdb.set_trace()\n' + 'iPDB set trace': + 'prefix': 'ipdb' + 'body': 'import ipdb; ipdb.set_trace()\n' '__magic__': 'prefix': '__' 'body': '__${1:init}__' From 9fb3dd8da9e167685f40bcb691568721d30e8c73 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 7 Apr 2014 11:53:56 -0700 Subject: [PATCH 013/291] Prepare 0.15.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 7850fb6..d85b331 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.14.0", + "version": "0.15.0", "engines": { "atom": "*", "node": "*" From 456d35fb80ed551b0bd7f1a45a53299f306c9114 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Fri, 16 May 2014 10:36:25 -0700 Subject: [PATCH 014/291] Properly match python length modifiers Previously h and l were incorrectly matched to include a trailing % Closes atom/atom#2262 --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 9a4c32d..7664c78 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -635,7 +635,7 @@ 'match': '(?x)\\b(\n\t\t\t\tbasestring|bool|buffer|bytearray|bytes|complex|dict|float|frozenset|int|\n\t\t\t\tlist|long|memoryview|object|range|set|slice|str|tuple|unicode|xrange\n\t\t\t)\\b' 'name': 'support.type.python' 'constant_placeholder': - 'match': '(?i:(%(\\([a-z_]+\\))?#?0?\\-?[ ]?\\+?([0-9]*|\\*)(\\.([0-9]*|\\*))?[hL]?[a-z%])|(\\{([!\\[\\].:\\w ]+)?\\}))' + 'match': '(?i:(%(\\([a-z_]+\\))?#?0?\\-?[ ]?\\+?([0-9]*|\\*)(\\.([0-9]*|\\*))?([hL][a-z]|[a-z%]))|(\\{([!\\[\\].:\\w ]+)?\\}))' 'name': 'constant.other.placeholder.python' 'docstrings': 'patterns': [ From c7f989b1fc685d17d0f5570f851f0cd66d02a22e Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Fri, 16 May 2014 10:38:30 -0700 Subject: [PATCH 015/291] Prepare 0.16.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d85b331..c14a61c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.15.0", + "version": "0.16.0", "engines": { "atom": "*", "node": "*" From 3a106c5703ee731f945f2e153ed4f0843ac59fae Mon Sep 17 00:00:00 2001 From: Justin Noah Date: Sat, 17 May 2014 01:17:20 -0700 Subject: [PATCH 016/291] Break down flow into categories Add condition, exception, repeat, and statement as subcategories to keyword.control.flow to allow for some vim-like theme features. --- grammars/python.cson | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 7664c78..667aeab 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -87,8 +87,15 @@ } { 'comment': 'keywords that delimit flow blocks or alter flow from within a block' - 'match': '\\b(elif|else|except|finally|for|if|try|while|with|break|continue|pass|raise|return|yield)\\b' - 'name': 'keyword.control.flow.python' + '1': + 'name': 'keyword.control.flow.condition.python' + '2': + 'name': 'keyword.control.flow.exception.python' + '3': + 'name': 'keyword.control.flow.repeat.python' + '4': + 'name': 'keyword.control.flow.statement.python' + 'match': '\\b(elif|else|if)|(except|finally|try|raise)|(for|while)|(with|break|continue|pass|return|yield)\\b' } { 'comment': 'keyword operators that evaluate to True or False' From 87205b39f09e91742becfde5db5c8108f11b251c Mon Sep 17 00:00:00 2001 From: Justin Noah Date: Sat, 17 May 2014 01:26:36 -0700 Subject: [PATCH 017/291] Rename a keyword.control.flow subcat to match vim condition -> conditional --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 667aeab..f2eeb9b 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -88,7 +88,7 @@ { 'comment': 'keywords that delimit flow blocks or alter flow from within a block' '1': - 'name': 'keyword.control.flow.condition.python' + 'name': 'keyword.control.flow.conditional.python' '2': 'name': 'keyword.control.flow.exception.python' '3': From b005313b35e6853e367aa86737328403470a8c58 Mon Sep 17 00:00:00 2001 From: Justin Noah Date: Mon, 19 May 2014 12:39:25 -0700 Subject: [PATCH 018/291] separate match patterns --- grammars/python.cson | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index f2eeb9b..965081d 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -86,16 +86,24 @@ 'match': '\\b(?:(import)|(from))\\b' } { - 'comment': 'keywords that delimit flow blocks or alter flow from within a block' - '1': - 'name': 'keyword.control.flow.conditional.python' - '2': - 'name': 'keyword.control.flow.exception.python' - '3': - 'name': 'keyword.control.flow.repeat.python' - '4': - 'name': 'keyword.control.flow.statement.python' - 'match': '\\b(elif|else|if)|(except|finally|try|raise)|(for|while)|(with|break|continue|pass|return|yield)\\b' + 'comment': 'keywords that delimit flow conditionals' + 'name': 'keyword.control.flow.conditional.pytho' + 'match': '\\b(if|elif|else)\\b' + } + { + 'comment': 'keywords that delimit an exception' + 'name': 'keyword.control.flow.exception.pytho' + 'match': '\\b(except|finally|try|raise)\\b' + } + { + 'comment': 'keywords that delimit loops' + 'name': 'keyword.control.flow.repeat.python' + 'match': '\\b(for|while)\\b' + } + { + 'comment': 'keywords that alter flow from within a block' + 'name': 'keyword.control.flow.statement.python' + 'match': '\\b(with|break|continue|pass|return|yield)\\b' } { 'comment': 'keyword operators that evaluate to True or False' From d7509038b5bb3320a6673881d12a883da77eb4b2 Mon Sep 17 00:00:00 2001 From: Justin Noah Date: Mon, 19 May 2014 13:03:41 -0700 Subject: [PATCH 019/291] Add missing 'n' --- grammars/python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 965081d..2ba613b 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -87,12 +87,12 @@ } { 'comment': 'keywords that delimit flow conditionals' - 'name': 'keyword.control.flow.conditional.pytho' + 'name': 'keyword.control.flow.conditional.python' 'match': '\\b(if|elif|else)\\b' } { 'comment': 'keywords that delimit an exception' - 'name': 'keyword.control.flow.exception.pytho' + 'name': 'keyword.control.flow.exception.python' 'match': '\\b(except|finally|try|raise)\\b' } { From 7961cd4a8510f07165fd78552162a1cff79696c1 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 19 May 2014 13:24:50 -0700 Subject: [PATCH 020/291] Prepare 0.17.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c14a61c..c349e57 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.16.0", + "version": "0.17.0", "engines": { "atom": "*", "node": "*" From f8d642136622b51c6cf0ff05aef9f3a4a1244e48 Mon Sep 17 00:00:00 2001 From: Justin Noah Date: Tue, 3 Jun 2014 21:47:38 +0000 Subject: [PATCH 021/291] align flow control with other languages --- grammars/python.cson | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 2ba613b..ce38718 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -87,22 +87,22 @@ } { 'comment': 'keywords that delimit flow conditionals' - 'name': 'keyword.control.flow.conditional.python' + 'name': 'keyword.control.conditional.python' 'match': '\\b(if|elif|else)\\b' } { 'comment': 'keywords that delimit an exception' - 'name': 'keyword.control.flow.exception.python' + 'name': 'keyword.control.exception.python' 'match': '\\b(except|finally|try|raise)\\b' } { 'comment': 'keywords that delimit loops' - 'name': 'keyword.control.flow.repeat.python' + 'name': 'keyword.control.repeat.python' 'match': '\\b(for|while)\\b' } { 'comment': 'keywords that alter flow from within a block' - 'name': 'keyword.control.flow.statement.python' + 'name': 'keyword.control.statement.python' 'match': '\\b(with|break|continue|pass|return|yield)\\b' } { From e8976b0755167b19b61e54ffb7b6f03fd706c38c Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 4 Jun 2014 14:49:29 -0700 Subject: [PATCH 022/291] Prepare 0.18.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c349e57..c81e1df 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.17.0", + "version": "0.18.0", "engines": { "atom": "*", "node": "*" From fd2147af31eea97e361eeae085b3cbc23492e957 Mon Sep 17 00:00:00 2001 From: probablycorey Date: Wed, 25 Jun 2014 14:49:34 -0700 Subject: [PATCH 023/291] Add multiline string spec --- spec/python-spec.coffee | 42 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 spec/python-spec.coffee diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee new file mode 100644 index 0000000..7633450 --- /dev/null +++ b/spec/python-spec.coffee @@ -0,0 +1,42 @@ +describe "Python grammar", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-python") + + runs -> + grammar = atom.syntax.grammarForScopeName("source.python") + + it "parses the grammar", -> + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe "source.python" + + it "tokenizes multi-line strings", -> + tokens = grammar.tokenizeLines('"1\\\n2"') + + # Line 0 + expect(tokens[0][0]).toEqual + value: '"' + scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + + expect(tokens[0][1]).toEqual + value: '1' + scopes: ['source.python', 'string.quoted.double.single-line.python'] + + expect(tokens[0][2]).toEqual + value: '\\' + scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python'] + + expect(tokens[0][3]).not.toBeDefined() + + # Line 1 + expect(tokens[1][0]).toEqual + value: '2' + scopes: ['source.python', 'string.quoted.double.single-line.python'] + + expect(tokens[1][1]).toEqual + value: '"' + scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + expect(tokens[1][2]).not.toBeDefined() From 74bb81ed8dcd279fc783f06b7a927571e5838a16 Mon Sep 17 00:00:00 2001 From: probablycorey Date: Wed, 25 Jun 2014 14:53:01 -0700 Subject: [PATCH 024/291] :lipstick: --- spec/python-spec.coffee | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 7633450..18fc213 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -16,27 +16,22 @@ describe "Python grammar", -> tokens = grammar.tokenizeLines('"1\\\n2"') # Line 0 - expect(tokens[0][0]).toEqual - value: '"' - scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][0].value).toBe '"' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][1]).toEqual - value: '1' - scopes: ['source.python', 'string.quoted.double.single-line.python'] + expect(tokens[0][1].value).toBe '1' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python'] - expect(tokens[0][2]).toEqual - value: '\\' - scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python'] + expect(tokens[0][2].value).toBe '\\' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python'] expect(tokens[0][3]).not.toBeDefined() # Line 1 - expect(tokens[1][0]).toEqual - value: '2' - scopes: ['source.python', 'string.quoted.double.single-line.python'] + expect(tokens[1][0].value).toBe '2' + expect(tokens[1][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python'] - expect(tokens[1][1]).toEqual - value: '"' - scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + expect(tokens[1][1].value).toBe '"' + expect(tokens[1][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] expect(tokens[1][2]).not.toBeDefined() From 53be31020be039759799766380c9702b373f7ba4 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 13 Aug 2014 14:44:45 -0700 Subject: [PATCH 025/291] Add initial Travis configuration --- .travis.yml | 8 ++++++++ README.md | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..31be69d --- /dev/null +++ b/.travis.yml @@ -0,0 +1,8 @@ +language: objective-c + +notifications: + email: + on_success: never + on_failure: change + +script: 'curl -s https://raw.githubusercontent.com/atom/ci/master/build-package.sh | sh' diff --git a/README.md b/README.md index 1f661db..6e7daf9 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Python language support in Atom +# Python language support in Atom [![Build Status](https://travis-ci.org/atom/language-python.svg?branch=master)](https://travis-ci.org/atom/language-python) Adds syntax highlighting and snippets to Python files in Atom. From 4c0803020de631052c22dbb6e18551b37fa33e5b Mon Sep 17 00:00:00 2001 From: Peter Dave Hello Date: Thu, 21 Aug 2014 00:16:49 +0800 Subject: [PATCH 026/291] make CI build faster --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 31be69d..d73c8e2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,3 +6,6 @@ notifications: on_failure: change script: 'curl -s https://raw.githubusercontent.com/atom/ci/master/build-package.sh | sh' + +git: + depth: 10 From 713bd81bb6e36d5ad97bd9dce64a0cd5bff84342 Mon Sep 17 00:00:00 2001 From: syndg Date: Mon, 1 Sep 2014 00:25:54 +0300 Subject: [PATCH 027/291] Added more assert snippets in language-python.cson --- snippets/language-python.cson | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index a18aace..c66dd71 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -14,6 +14,30 @@ 'Assert Raises': 'prefix': 'asr' 'body': 'self.assertRaises(${1:exception}, ${2:callable})$0' + 'Assert True': + 'prefix': 'ast' + 'body': 'self.assertTrue(${1:actual}${2:, \'${3:message}\'})$0' + 'Assert False': + 'prefix': 'asf' + 'body': 'self.assertFalse(${1:actual}${2:, \'${3:message}\'})$0' + 'Assert Is': + 'prefix': 'asi' + 'body': 'self.assertIs(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' + 'Assert Is Not': + 'prefix': 'asint' + 'body': 'self.assertIsNot(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' + 'Assert Is None': + 'prefix': 'asino' + 'body': 'self.assertIsNone(${1:actual}${2:, \'${3:message}\'})$0' + 'Assert Is Not None': + 'prefix': 'asinno' + 'body': 'self.assertIsNotNone(${1:actual}${2:, \'${3:message}\'})$0' + 'Assert In': + 'prefix': 'asin' + 'body': 'self.assertIn(${1:needle}, ${2:haystack}${3:, \'${4:message}\'})$0' + 'Assert Not In': + 'prefix': 'asni' + 'body': 'self.assertNotIn(${1:needle}, ${2:haystack}${3:, \'${4:message}\'})$0' 'Assert': 'prefix': 'as' 'body': 'self.assert_(${1:boolean expression}${2:, \'${3:message}\'})$0' From 82986511f0ca2912afa41e923ad4aaf849e68a97 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Tue, 2 Sep 2014 08:55:32 -0700 Subject: [PATCH 028/291] Use multiple cursors in class snippet Closes #32 --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index c66dd71..54ea5d1 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -46,7 +46,7 @@ 'body': 'self.fail(\'${1:message}\')$0' 'New Class': 'prefix': 'class' - 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for ClassName}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super(ClassName, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' + 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' 'New Method': 'prefix': 'defs' 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' From abb20e2e77b0802df0217f9858a0bbb391d66966 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Tue, 2 Sep 2014 08:55:51 -0700 Subject: [PATCH 029/291] Prepare 0.19.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c81e1df..c311ead 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.18.0", + "version": "0.19.0", "engines": { "atom": "*", "node": "*" From bfef6b8cffee8998953d5e11110faccf7ba28fca Mon Sep 17 00:00:00 2001 From: Dral Date: Mon, 13 Oct 2014 14:05:56 +0200 Subject: [PATCH 030/291] Highlight .wsgi files using this syntax Attempt to fix atom/atom#3807 --- grammars/python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/python.cson b/grammars/python.cson index ce38718..f6f6903 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -11,6 +11,7 @@ 'SConscript' 'gyp' 'gypi' + 'wsgi' ] 'firstLineMatch': '^#!/.*\\bpython\\b' 'patterns': [ From eab2b6239c1ea50ba8b5eba1027070f30e13d33f Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 13 Oct 2014 10:12:08 -0400 Subject: [PATCH 031/291] :lipstick: Sort file types --- grammars/python.cson | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index f6f6903..df8bf91 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1,16 +1,16 @@ 'scopeName': 'source.python' 'name': 'Python' 'fileTypes': [ + 'cpy' + 'gyp' + 'gypi' 'py' - 'rpy' 'pyw' - 'cpy' + 'rpy' + 'SConscript' 'SConstruct' 'Sconstruct' 'sconstruct' - 'SConscript' - 'gyp' - 'gypi' 'wsgi' ] 'firstLineMatch': '^#!/.*\\bpython\\b' From d15295f214f0b94be84b202ae470096d60191f87 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 13 Oct 2014 10:12:12 -0400 Subject: [PATCH 032/291] Prepare 0.20.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c311ead..de3e70c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.19.0", + "version": "0.20.0", "engines": { "atom": "*", "node": "*" From 47ac14ae8b4343c3f2785670a312dd0ec8a71187 Mon Sep 17 00:00:00 2001 From: Ben Ogle Date: Thu, 16 Oct 2014 13:20:03 -0700 Subject: [PATCH 033/291] Default tab length in python is 4 spaces --- scoped-properties/language-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/scoped-properties/language-python.cson b/scoped-properties/language-python.cson index b14da2d..68e85e7 100644 --- a/scoped-properties/language-python.cson +++ b/scoped-properties/language-python.cson @@ -1,5 +1,6 @@ '.source.python': 'editor': + 'tabLength': 4 'foldEndPattern': '^\\s*"""\\s*$' 'commentStart': '# ' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' From 03501b6a8eab0341fbaeb188f6876c4dd8c74cb2 Mon Sep 17 00:00:00 2001 From: Ben Ogle Date: Thu, 16 Oct 2014 13:21:14 -0700 Subject: [PATCH 034/291] Prepare 0.21.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index de3e70c..9e1cf50 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.20.0", + "version": "0.21.0", "engines": { "atom": "*", "node": "*" From 35f85f6f10755f1e01d8b5cb8428592ae16972da Mon Sep 17 00:00:00 2001 From: Adam Roben Date: Wed, 5 Nov 2014 14:59:43 -0500 Subject: [PATCH 035/291] Add a grammar for Python Traceback (.pytb) files --- grammars/python traceback.cson | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 grammars/python traceback.cson diff --git a/grammars/python traceback.cson b/grammars/python traceback.cson new file mode 100644 index 0000000..a73e873 --- /dev/null +++ b/grammars/python traceback.cson @@ -0,0 +1,34 @@ +'scopeName': 'text.python.traceback' +'name': 'Python Traceback' +'fileTypes': [ + 'pytb' +] +'patterns': [ + { + 'match': '^ File ("[^"]+"), line (\\d+)(?:, in (.+))?$' + 'captures': { + '1': + 'name': 'string.python.traceback' + '2': + 'name': 'constant.numeric.python.traceback' + '3': + 'name': 'entity.name.function.python.traceback' + } + } + { + 'match': '^ (.+)$' + 'captures': + '1': + 'patterns': [ + 'include': 'source.python' + ] + } + { + 'match': '^([^\\s:]+):(?: (.+))?$' + 'captures': + '1': + 'name': 'entity.name.type.class.python.traceback' + '2': + 'name': 'string.python.traceback' + } +] From e399332ff0c85d81c43b15720652156d7514d821 Mon Sep 17 00:00:00 2001 From: Adam Roben Date: Wed, 5 Nov 2014 15:43:53 -0500 Subject: [PATCH 036/291] Rename python traceback.cson to python-traceback.cson --- grammars/{python traceback.cson => python-traceback.cson} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename grammars/{python traceback.cson => python-traceback.cson} (100%) diff --git a/grammars/python traceback.cson b/grammars/python-traceback.cson similarity index 100% rename from grammars/python traceback.cson rename to grammars/python-traceback.cson From acd4ffa0390739678b7bb2c2b3d633af085bc04e Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Wed, 5 Nov 2014 12:48:01 -0800 Subject: [PATCH 037/291] Prepare 0.22.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9e1cf50..17608eb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.21.0", + "version": "0.22.0", "engines": { "atom": "*", "node": "*" From b0b3ca86ffbaa80f32bcd406e04b2c82dd918c50 Mon Sep 17 00:00:00 2001 From: Adam Roben Date: Thu, 6 Nov 2014 11:15:33 -0500 Subject: [PATCH 038/291] Fix parsing of raw strings containing unclosed parentheses and brackets Strings like r'[' and r'(' were causing the rest of the document to be parsed as if it were inside the regex. Now we terminate the regex at the closing quote in all cases. This is a backport of https://github.com/textmate/python.tmbundle/commit/2567262452b389f7042ef3026e0443f6336d1735 --- grammars/python.cson | 128 ++++++++++++----------------------- spec/python-spec.coffee | 144 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 186 insertions(+), 86 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index df8bf91..10c4a5e 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -994,36 +994,23 @@ ] } { - 'begin': '([uU]r)(")' - 'beginCaptures': + 'captures': '1': 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'double-quoted raw string' - 'end': '((?<=")(")|")|(\\n)' - 'endCaptures': - '1': - 'name': 'punctuation.definition.string.end.python' - '2': - 'name': 'meta.empty-string.double.python' '3': - 'name': 'invalid.illegal.unclosed-string.python' + 'patterns': [ + 'include': '#constant_placeholder' + 'include': '#escaped_unicode_char' + 'include': '#escaped_char' + 'include': '#regular_expressions' + ] + '4': + 'name': 'punctuation.definition.string.end.python' + 'comment': 'double-quoted raw string' + 'match': '([uU]r)(")((?:[^"\\\\]|\\\\.)*)(")' 'name': 'string.quoted.double.single-line.unicode-raw-regex.python' - 'patterns': [ - { - 'include': '#constant_placeholder' - } - { - 'include': '#escaped_unicode_char' - } - { - 'include': '#escaped_char' - } - { - 'include': '#regular_expressions' - } - ] } { 'begin': '([uU]R)(")' @@ -1055,33 +1042,22 @@ ] } { - 'begin': '(r)(")' - 'beginCaptures': + 'captures': '1': 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'double-quoted raw string' - 'end': '((?<=")(")|")|(\\n)' - 'endCaptures': - '1': - 'name': 'punctuation.definition.string.end.python' - '2': - 'name': 'meta.empty-string.double.python' '3': - 'name': 'invalid.illegal.unclosed-string.python' + 'patterns': [ + 'include': '#constant_placeholder' + 'include': '#escaped_char' + 'include': '#regular_expressions' + ] + '4': + 'name': 'punctuation.definition.string.end.python' + 'comment': 'double-quoted raw string' + 'match': '(r)(")((?:[^"\\\\]|\\\\.)*)(")' 'name': 'string.quoted.double.single-line.raw-regex.python' - 'patterns': [ - { - 'include': '#constant_placeholder' - } - { - 'include': '#escaped_char' - } - { - 'include': '#regular_expressions' - } - ] } { 'begin': '(R)(")' @@ -1386,34 +1362,23 @@ ] } { - 'begin': '([uU]r)(\')' - 'beginCaptures': + 'captures': '1': 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'single quoted raw string' - 'end': '(\')|(\\n)' - 'endCaptures': - '1': + '3': + 'patterns': [ + 'include': '#constant_placeholder' + 'include': '#escaped_unicode_char' + 'include': '#escaped_char' + 'include': '#regular_expressions' + ] + '4': 'name': 'punctuation.definition.string.end.python' - '2': - 'name': 'invalid.illegal.unclosed-string.python' + 'comment': 'single quoted raw string' + 'match': '([uU]r)(\')((?:[^\'\\\\]|\\\\.)*)(\')' 'name': 'string.quoted.single.single-line.unicode-raw-regex.python' - 'patterns': [ - { - 'include': '#constant_placeholder' - } - { - 'include': '#escaped_unicode_char' - } - { - 'include': '#escaped_char' - } - { - 'include': '#regular_expressions' - } - ] } { 'begin': '([uU]R)(\')' @@ -1443,31 +1408,22 @@ ] } { - 'begin': '(r)(\')' - 'beginCaptures': + 'captures': '1': 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'single quoted raw string' - 'end': '(\')|(\\n)' - 'endCaptures': - '1': + '3': + 'patterns': [ + 'include': '#constant_placeholder' + 'include': '#escaped_char' + 'include': '#regular_expressions' + ] + '4': 'name': 'punctuation.definition.string.end.python' - '2': - 'name': 'invalid.illegal.unclosed-string.python' + 'comment': 'single quoted raw string' + 'match': '(r)(\')((?:[^\'\\\\]|\\\\.)*)(\')' 'name': 'string.quoted.single.single-line.raw-regex.python' - 'patterns': [ - { - 'include': '#constant_placeholder' - } - { - 'include': '#escaped_char' - } - { - 'include': '#regular_expressions' - } - ] } { 'begin': '(R)(\')' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 18fc213..4467286 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -35,3 +35,147 @@ describe "Python grammar", -> expect(tokens[1][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] expect(tokens[1][2]).not.toBeDefined() + + it "terminates a single-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines("r'(' #foo") + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '(' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][3].value).toBe "'" + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a single-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines("r'[' #foo") + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '[' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].value).toBe "'" + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a double-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines('r"(" #foo') + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '(' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][3].value).toBe '"' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a double-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines('r"[" #foo') + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '[' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].value).toBe '"' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines("ur'(' #foo") + + expect(tokens[0][0].value).toBe 'ur' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '(' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][3].value).toBe "'" + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a unicode single-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines("ur'[' #foo") + + expect(tokens[0][0].value).toBe 'ur' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '[' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].value).toBe "'" + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines('ur"(" #foo') + + expect(tokens[0][0].value).toBe 'ur' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '(' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][3].value).toBe '"' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates a unicode double-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines('ur"[" #foo') + + expect(tokens[0][0].value).toBe 'ur' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][2].value).toBe '[' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].value).toBe '"' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][4].value).toBe ' ' + expect(tokens[0][4].scopes).toEqual ['source.python'] + expect(tokens[0][5].value).toBe '#' + expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][6].value).toBe 'foo' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] From c2120ec667049b07940a667ecbc02be4785fb714 Mon Sep 17 00:00:00 2001 From: Adam Roben Date: Thu, 6 Nov 2014 12:40:32 -0500 Subject: [PATCH 039/291] Make sure things other than regexes get parsed inside raw strings My CSON formatting was bad, which caused only the regular expression grammar to get parsed inside of raw strings. --- grammars/python.cson | 28 +++--- spec/python-spec.coffee | 192 ++++++++++++++++++++++------------------ 2 files changed, 118 insertions(+), 102 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 10c4a5e..c0757fa 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1001,10 +1001,10 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - 'include': '#constant_placeholder' - 'include': '#escaped_unicode_char' - 'include': '#escaped_char' - 'include': '#regular_expressions' + {'include': '#constant_placeholder'} + {'include': '#escaped_unicode_char'} + {'include': '#escaped_char'} + {'include': '#regular_expressions'} ] '4': 'name': 'punctuation.definition.string.end.python' @@ -1049,9 +1049,9 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - 'include': '#constant_placeholder' - 'include': '#escaped_char' - 'include': '#regular_expressions' + {'include': '#constant_placeholder'} + {'include': '#escaped_char'} + {'include': '#regular_expressions'} ] '4': 'name': 'punctuation.definition.string.end.python' @@ -1369,10 +1369,10 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - 'include': '#constant_placeholder' - 'include': '#escaped_unicode_char' - 'include': '#escaped_char' - 'include': '#regular_expressions' + {'include': '#constant_placeholder'} + {'include': '#escaped_unicode_char'} + {'include': '#escaped_char'} + {'include': '#regular_expressions'} ] '4': 'name': 'punctuation.definition.string.end.python' @@ -1415,9 +1415,9 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - 'include': '#constant_placeholder' - 'include': '#escaped_char' - 'include': '#regular_expressions' + {'include': '#constant_placeholder'} + {'include': '#escaped_char'} + {'include': '#regular_expressions'} ] '4': 'name': 'punctuation.definition.string.end.python' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 4467286..28f1b5d 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -37,145 +37,161 @@ describe "Python grammar", -> expect(tokens[1][2]).not.toBeDefined() it "terminates a single-quoted raw string containing opening parenthesis at closing quote", -> - tokens = grammar.tokenizeLines("r'(' #foo") + tokens = grammar.tokenizeLines("r'%d(' #foo") expect(tokens[0][0].value).toBe 'r' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe "'" expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '(' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] - expect(tokens[0][3].value).toBe "'" - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a single-quoted raw string containing opening bracket at closing quote", -> - tokens = grammar.tokenizeLines("r'[' #foo") + tokens = grammar.tokenizeLines("r'%d[' #foo") expect(tokens[0][0].value).toBe 'r' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe "'" expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '[' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] - expect(tokens[0][3].value).toBe "'" - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a double-quoted raw string containing opening parenthesis at closing quote", -> - tokens = grammar.tokenizeLines('r"(" #foo') + tokens = grammar.tokenizeLines('r"%d(" #foo') expect(tokens[0][0].value).toBe 'r' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe '"' expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '(' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] - expect(tokens[0][3].value).toBe '"' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a double-quoted raw string containing opening bracket at closing quote", -> - tokens = grammar.tokenizeLines('r"[" #foo') + tokens = grammar.tokenizeLines('r"%d[" #foo') expect(tokens[0][0].value).toBe 'r' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe '"' expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '[' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] - expect(tokens[0][3].value).toBe '"' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", -> - tokens = grammar.tokenizeLines("ur'(' #foo") + tokens = grammar.tokenizeLines("ur'%d(' #foo") expect(tokens[0][0].value).toBe 'ur' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe "'" expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '(' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] - expect(tokens[0][3].value).toBe "'" - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a unicode single-quoted raw string containing opening bracket at closing quote", -> - tokens = grammar.tokenizeLines("ur'[' #foo") + tokens = grammar.tokenizeLines("ur'%d[' #foo") expect(tokens[0][0].value).toBe 'ur' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe "'" expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '[' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] - expect(tokens[0][3].value).toBe "'" - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", -> - tokens = grammar.tokenizeLines('ur"(" #foo') + tokens = grammar.tokenizeLines('ur"%d(" #foo') expect(tokens[0][0].value).toBe 'ur' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe '"' expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '(' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] - expect(tokens[0][3].value).toBe '"' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] it "terminates a unicode double-quoted raw string containing opening bracket at closing quote", -> - tokens = grammar.tokenizeLines('ur"[" #foo') + tokens = grammar.tokenizeLines('ur"%d[" #foo') expect(tokens[0][0].value).toBe 'ur' expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python'] expect(tokens[0][1].value).toBe '"' expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][2].value).toBe '[' - expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] - expect(tokens[0][3].value).toBe '"' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] - expect(tokens[0][4].value).toBe ' ' - expect(tokens[0][4].scopes).toEqual ['source.python'] - expect(tokens[0][5].value).toBe '#' - expect(tokens[0][5].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][6].value).toBe 'foo' - expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] From 4aaa3d99c724f0a8298cc2bff05fe422984e6538 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Fri, 7 Nov 2014 09:48:01 -0800 Subject: [PATCH 040/291] Prepare 0.23.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 17608eb..8e14747 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.22.0", + "version": "0.23.0", "engines": { "atom": "*", "node": "*" From 84d3bce9a92c0b86bc970adcbef80516e6bb79b7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 18 Nov 2014 10:58:06 -0800 Subject: [PATCH 041/291] Disable auto-indent on paste --- scoped-properties/language-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/scoped-properties/language-python.cson b/scoped-properties/language-python.cson index 68e85e7..6e39060 100644 --- a/scoped-properties/language-python.cson +++ b/scoped-properties/language-python.cson @@ -1,5 +1,6 @@ '.source.python': 'editor': + 'autoIndentOnPaste': false 'tabLength': 4 'foldEndPattern': '^\\s*"""\\s*$' 'commentStart': '# ' From 343931c16f3d401687fd22db013e525e9e7e13a6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 18 Nov 2014 11:02:55 -0800 Subject: [PATCH 042/291] Prepare 0.24.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8e14747..c49510f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.23.0", + "version": "0.24.0", "engines": { "atom": "*", "node": "*" From 32fa50edf093595f13657caf0d371d2569efe2b0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sat, 29 Nov 2014 09:52:28 -0800 Subject: [PATCH 043/291] Use atom.gramamrs in specs --- spec/python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 28f1b5d..f2532b4 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -6,7 +6,7 @@ describe "Python grammar", -> atom.packages.activatePackage("language-python") runs -> - grammar = atom.syntax.grammarForScopeName("source.python") + grammar = atom.grammars.grammarForScopeName("source.python") it "parses the grammar", -> expect(grammar).toBeDefined() From e53896c152ddec33e12b211f5e5bc2e1c917ad65 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sat, 29 Nov 2014 09:52:40 -0800 Subject: [PATCH 044/291] Prepare 0.24.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c49510f..00eca5d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.24.0", + "version": "0.24.1", "engines": { "atom": "*", "node": "*" From ef6a611ef50f55bfb48090bc2a4da65c697a7bba Mon Sep 17 00:00:00 2001 From: Karel Brinda Date: Fri, 5 Dec 2014 15:14:50 +0100 Subject: [PATCH 045/291] Python syntax also for Snakefile (=Snakemake files) --- grammars/python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/python.cson b/grammars/python.cson index c0757fa..a4b90b6 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -11,6 +11,7 @@ 'SConstruct' 'Sconstruct' 'sconstruct' + 'Snakefile' 'wsgi' ] 'firstLineMatch': '^#!/.*\\bpython\\b' From 516d4c1423afcc5ddb15c76ac4ec581b48c01068 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Fri, 5 Dec 2014 09:28:50 -0800 Subject: [PATCH 046/291] Prepare 0.25.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 00eca5d..40e3b25 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.24.1", + "version": "0.25.0", "engines": { "atom": "*", "node": "*" From 546a70adf4135721070bb8dbc241c97bad2f154d Mon Sep 17 00:00:00 2001 From: Krzysztof Jeziorny Date: Sat, 6 Dec 2014 16:56:00 +0100 Subject: [PATCH 047/291] kv file format Support for kv file extension, construction language in Kivy (http://kivy.org) --- grammars/python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/python.cson b/grammars/python.cson index a4b90b6..f7cea24 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -4,6 +4,7 @@ 'cpy' 'gyp' 'gypi' + 'kv' 'py' 'pyw' 'rpy' From 476a353595caca2105c5984466390333036d0f11 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 8 Dec 2014 09:47:44 -0800 Subject: [PATCH 048/291] Prepare 0.26.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 40e3b25..c9778da 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.25.0", + "version": "0.26.0", "engines": { "atom": "*", "node": "*" From e8741ed2f576c9f7fb807177e107b6e431c2a863 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 22 Dec 2014 10:50:43 -0800 Subject: [PATCH 049/291] Add pattern for nonlocal keyword Closes #47 --- grammars/python.cson | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index f7cea24..aea9fb1 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -80,6 +80,12 @@ 'name': 'storage.modifier.global.python' 'match': '\\b(global)\\b' } + { + 'captures': + '1': + 'name': 'storage.modifier.nonlocal.python' + 'match': '\\b(nonlocal)\\b' + } { 'captures': '1': @@ -814,7 +820,7 @@ 'generic_names': 'match': '[A-Za-z_][A-Za-z0-9_]*' 'illegal_names': - 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|not|or|pass|print|raise|return|try|while|with|yield)\\b' + 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\\b' 'name': 'invalid.illegal.name.python' 'keyword_arguments': 'begin': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(=)(?!=)' From cbf6af800cd7fe8668e74d3498a80826ea18fcbf Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 22 Dec 2014 10:51:26 -0800 Subject: [PATCH 050/291] Prepare 0.27.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c9778da..d01afd7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.26.0", + "version": "0.27.0", "engines": { "atom": "*", "node": "*" From 68ffd7f681195ad8b2690afe3c76865e32767faf Mon Sep 17 00:00:00 2001 From: Marc Abramowitz Date: Fri, 2 Jan 2015 23:19:38 -0800 Subject: [PATCH 051/291] Add highlighting for a Python console session or doctest with ">>>" and "..." at beginning of lines. This is the format that was supported by the `pycon` lexer in Pygments. As a bonus, I also added support for IPython-style prompts (`In \\[\\d+\\]:`) Refs: https://github.com/github/linguist/issues/1939 --- grammars/python-console.cson | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 grammars/python-console.cson diff --git a/grammars/python-console.cson b/grammars/python-console.cson new file mode 100644 index 0000000..ca4630f --- /dev/null +++ b/grammars/python-console.cson @@ -0,0 +1,17 @@ +'scopeName': 'text.python.console' +'name': 'Python Console' +'fileTypes': [ + 'pycon' +] +'patterns': [ + { + 'match': '^(>{3}|\\.{3}|In \\[\\d+\\]:) (.+)$' + 'captures': + '1': + 'name': 'punctuation.separator.prompt.python-session' + '2': + 'patterns': [ + 'include': 'source.python' + ] + } +] From 8221fbc00feaedc25122876487cdd58b8a9d9e95 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 5 Jan 2015 17:51:37 -0800 Subject: [PATCH 052/291] Use trailing scope name --- grammars/python-console.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python-console.cson b/grammars/python-console.cson index ca4630f..66dd315 100644 --- a/grammars/python-console.cson +++ b/grammars/python-console.cson @@ -8,7 +8,7 @@ 'match': '^(>{3}|\\.{3}|In \\[\\d+\\]:) (.+)$' 'captures': '1': - 'name': 'punctuation.separator.prompt.python-session' + 'name': 'punctuation.separator.prompt.python.console' '2': 'patterns': [ 'include': 'source.python' From f518e495f6bb13124d44cfc2366c424dc58859ac Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 5 Jan 2015 17:55:31 -0800 Subject: [PATCH 053/291] Prepare 0.28.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d01afd7..cb78b54 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.27.0", + "version": "0.28.0", "engines": { "atom": "*", "node": "*" From 0b1190b6573675713ff1b8a23957cb41fa90bb80 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 6 Jan 2015 10:15:59 -0700 Subject: [PATCH 054/291] Rename scoped-properties/ to settings/ --- {scoped-properties => settings}/language-python.cson | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {scoped-properties => settings}/language-python.cson (100%) diff --git a/scoped-properties/language-python.cson b/settings/language-python.cson similarity index 100% rename from scoped-properties/language-python.cson rename to settings/language-python.cson From 46072e32e3060eb8e2fea98a106a86db89acc842 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 6 Jan 2015 10:16:03 -0700 Subject: [PATCH 055/291] Prepare 0.29.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index cb78b54..8a4247e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.28.0", + "version": "0.29.0", "engines": { "atom": "*", "node": "*" From 93a19b9466c5369cc020118452a22778d3dfbf6c Mon Sep 17 00:00:00 2001 From: Marc Abramowitz Date: Sun, 11 Jan 2015 08:28:28 -0800 Subject: [PATCH 056/291] Make python-console handle doctest filetype too This is commonly used in docs as well. E.g.: https://github.com/hynek/characteristic/pull/26#issuecomment-69324256 --- grammars/python-console.cson | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/grammars/python-console.cson b/grammars/python-console.cson index 66dd315..f53bc92 100644 --- a/grammars/python-console.cson +++ b/grammars/python-console.cson @@ -1,7 +1,8 @@ 'scopeName': 'text.python.console' 'name': 'Python Console' 'fileTypes': [ - 'pycon' + 'pycon', + 'doctest' ] 'patterns': [ { From 0dc712f198f6e220d47ce01eca1937214d7da9d8 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 12 Jan 2015 09:17:55 -0800 Subject: [PATCH 057/291] :lipstick: Sort file types --- grammars/python-console.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python-console.cson b/grammars/python-console.cson index f53bc92..f85fc4e 100644 --- a/grammars/python-console.cson +++ b/grammars/python-console.cson @@ -1,8 +1,8 @@ 'scopeName': 'text.python.console' 'name': 'Python Console' 'fileTypes': [ - 'pycon', 'doctest' + 'pycon' ] 'patterns': [ { From b3d05594074784ad44a710e376538b70ecdba2e2 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 12 Jan 2015 09:18:03 -0800 Subject: [PATCH 058/291] Prepare 0.30.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8a4247e..3d8d3dd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.29.0", + "version": "0.30.0", "engines": { "atom": "*", "node": "*" From a53286c241004f74c5798ff120c6fd0535e36cc1 Mon Sep 17 00:00:00 2001 From: syndbg Date: Thu, 15 Jan 2015 22:31:10 +0200 Subject: [PATCH 059/291] Added .tac to python.cson's filetypes. .tac is a twisted application configuration, using Python syntax. --- grammars/python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/python.cson b/grammars/python.cson index aea9fb1..622fce1 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -13,6 +13,7 @@ 'Sconstruct' 'sconstruct' 'Snakefile' + 'tac' 'wsgi' ] 'firstLineMatch': '^#!/.*\\bpython\\b' From 67eb1e80a2ebed733ca458a91e4498d2d53cc6cb Mon Sep 17 00:00:00 2001 From: Nick Timkovich Date: Tue, 10 Feb 2015 00:38:19 -0600 Subject: [PATCH 060/291] Match Python 3 octal literals PEP-3127 for full integer literal syntax --- grammars/python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 622fce1..969578b 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -44,11 +44,11 @@ 'name': 'constant.numeric.integer.hexadecimal.python' } { - 'match': '\\b(?i:(0[0-7]+)L)' + 'match': '\\b(?i:(0[oO]?[0-7]+)L)' 'name': 'constant.numeric.integer.long.octal.python' } { - 'match': '\\b(0[0-7]+)' + 'match': '\\b(0[oO]?[0-7]+)' 'name': 'constant.numeric.integer.octal.python' } { From a8376a52d1c2d2c34c0c450132635c5eeaf26a9e Mon Sep 17 00:00:00 2001 From: Daniel Hengeveld Date: Tue, 10 Feb 2015 09:34:49 -0800 Subject: [PATCH 061/291] Prepare 0.31.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3d8d3dd..af909b4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.30.0", + "version": "0.31.0", "engines": { "atom": "*", "node": "*" From b958f852e32cf80eb28ca6a0201cc83ead8fab8c Mon Sep 17 00:00:00 2001 From: Nick Timkovich Date: Tue, 10 Feb 2015 14:32:23 -0600 Subject: [PATCH 062/291] Add binary literals More PEP-3127! --- grammars/python.cson | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 969578b..b1e429b 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -44,13 +44,21 @@ 'name': 'constant.numeric.integer.hexadecimal.python' } { - 'match': '\\b(?i:(0[oO]?[0-7]+)L)' + 'match': '\\b(?i:(0o?[0-7]+)L)' 'name': 'constant.numeric.integer.long.octal.python' } { - 'match': '\\b(0[oO]?[0-7]+)' + 'match': '\\b(?i:(0o?[0-7]+))' 'name': 'constant.numeric.integer.octal.python' } + { + 'match': '\\b(?i:(0b[01]+)L)', + 'name': 'constant.numeric.integer.long.binary.python' + } + { + 'match': '\\b(?i:(0b[01]+))', + 'name': 'constant.numeric.integer.binary.python' + } { 'match': '\\b(?i:(((\\d+(\\.(?=[^a-zA-Z_])\\d*)?|(?<=[^0-9a-zA-Z_])\\.\\d+)(e[\\-\\+]?\\d+)?))J)' 'name': 'constant.numeric.complex.python' From 8daa10089a14bacf2346bb16f31d1189489696e0 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Tue, 10 Feb 2015 12:45:23 -0800 Subject: [PATCH 063/291] Prepare 0.32.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index af909b4..37ffefc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.31.0", + "version": "0.32.0", "engines": { "atom": "*", "node": "*" From ac7fb31121838e75327fd1940a338188b981ad71 Mon Sep 17 00:00:00 2001 From: Kevin Cuzner Date: Fri, 27 Mar 2015 23:28:32 -0600 Subject: [PATCH 064/291] Add support for version numbers in the first line for python For example, the following will now match: /usr/bin/python3 /usr/bin/env python3 /usr/bin/env python3.4 --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index b1e429b..0fae944 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -16,7 +16,7 @@ 'tac' 'wsgi' ] -'firstLineMatch': '^#!/.*\\bpython\\b' +'firstLineMatch': '^#!/.*\\bpython[\\d\\.]*\\b' 'patterns': [ { 'begin': '(^[ \\t]+)?(?=#)' From f65cc95f0bd24ec04e6be39377c1b04b9d0188d6 Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 30 Mar 2015 10:03:30 -0700 Subject: [PATCH 065/291] Prepare 0.33.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 37ffefc..3250cd7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.32.0", + "version": "0.33.0", "engines": { "atom": "*", "node": "*" From 11d98b1b2c3341be5b625c48f795a4cf07d36d4e Mon Sep 17 00:00:00 2001 From: Nick Timkovich Date: Tue, 7 Apr 2015 18:34:55 -0500 Subject: [PATCH 066/291] Add more magic Add highlighting support for additional magic attributes, variables, and method names. See https://docs.python.org/dev/reference/datamodel.html --- grammars/python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 0fae944..186b61e 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -859,11 +859,11 @@ 'match': '(\\\\)(.*)$\\n?' 'magic_function_names': 'comment': 'these methods have magic interpretation by python and are generally called indirectly through syntactic constructs' - 'match': '(?x)\\b(__(?:\n\t\t\t\t\t\tabs|add|and|call|cmp|coerce|complex|contains|del|delattr|\n\t\t\t\t\t\tdelete|delitem|delslice|div|divmod|enter|eq|exit|float|\n\t\t\t\t\t\tfloordiv|ge|get|getattr|getattribute|getitem|getslice|gt|\n\t\t\t\t\t\thash|hex|iadd|iand|idiv|ifloordiv|ilshift|imod|imul|init|\n\t\t\t\t\t\tint|invert|ior|ipow|irshift|isub|iter|itruediv|ixor|le|len|\n\t\t\t\t\t\tlong|lshift|lt|mod|mul|ne|neg|new|nonzero|oct|or|pos|pow|\n\t\t\t\t\t\tradd|rand|rdiv|rdivmod|repr|rfloordiv|rlshift|rmod|rmul|ror|\n\t\t\t\t\t\trpow|rrshift|rshift|rsub|rtruediv|rxor|set|setattr|setitem|\n\t\t\t\t\t\tsetslice|str|sub|truediv|unicode|xor\n\t\t\t\t\t)__)\\b' + 'match': '(?x)\\b(__(?:abs|add|and|bool|bytes|call|cmp|coerce|complex|contains|del|delattr|delete|delitem|delslice|dir|div|divmod|enter|eq|exit|float|floordiv|format|ge|get|getattr|getattribute|getitem|getslice|gt|hash|hex|iadd|iand|idiv|ifloordiv|ilshift|imatmul|imod|imul|index|init|instancecheck|int|invert|ior|ipow|irshift|isub|iter|itruediv|ixor|le|len|length_hint|long|lshift|lt|matmul|missing|mod|mul|ne|neg|next|new|nonzero|oct|or|pos|pow|radd|rand|rdiv|rdivmod|repr|reversed|rfloordiv|rlshift|rmatmul|rmod|rmul|ror|round|rpow|rrshift|rshift|rsub|rtruediv|rxor|set|setattr|setitem|setslice|str|sub|subclasscheck|truediv|unicode|xor)__)\\b' 'name': 'support.function.magic.python' 'magic_variable_names': 'comment': 'magic variables which a class/module may have.' - 'match': '\\b__(all|bases|class|debug|dict|doc|file|members|metaclass|methods|name|slots|weakref)__\\b' + 'match': '\\b__(all|annotations|bases|class|closure|code|debug|dict|doc|file|func|globals|kwdefaults|members|metaclass|methods|module|name|qualname|self|slots|weakref)__\\b' 'name': 'support.variable.magic.python' 'regular_expressions': 'comment': 'Changed disabled to 1 to turn off syntax highlighting in “r” strings.' From 00529e691575496eb82e5a154b592b5a2616e57f Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Thu, 9 Apr 2015 16:36:42 -0700 Subject: [PATCH 067/291] Prepare 0.34.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3250cd7..7f0ab7a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.33.0", + "version": "0.34.0", "engines": { "atom": "*", "node": "*" From f9f9947153362aa39d9cdcd1267ea3d20714cebe Mon Sep 17 00:00:00 2001 From: Thomas Johansen Date: Sat, 16 May 2015 09:22:03 +0200 Subject: [PATCH 068/291] Ensure soft tabs is used by default PEP8 clearly states that 4 spaces should be used for indentation, and this change ensures that regardless of whether the user has configured hard tabs as their global default. Refs #62 --- settings/language-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/settings/language-python.cson b/settings/language-python.cson index 6e39060..be04d31 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -1,6 +1,7 @@ '.source.python': 'editor': 'autoIndentOnPaste': false + 'softTabs': true 'tabLength': 4 'foldEndPattern': '^\\s*"""\\s*$' 'commentStart': '# ' From 6d0272312012174b62b3331c84eed87370bdc819 Mon Sep 17 00:00:00 2001 From: Thomas Johansen Date: Sat, 16 May 2015 09:27:44 +0200 Subject: [PATCH 069/291] Prepare 0.35.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 7f0ab7a..94e0f38 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.34.0", + "version": "0.35.0", "engines": { "atom": "*", "node": "*" From 72d06b50b9facec9a3c475295ceeb2fe34e975ee Mon Sep 17 00:00:00 2001 From: Matthew Rees Date: Wed, 27 May 2015 08:46:52 +0200 Subject: [PATCH 070/291] Fix for issue #60, match one or more whitespace instead of zero or more --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 186b61e..7593f86 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -503,7 +503,7 @@ ] } { - 'begin': '(?<=\\)|\\])\\s*(\\[)' + 'begin': '(?<=\\)|\\])\\s+(\\[)' 'beginCaptures': '1': 'name': 'punctuation.definition.arguments.begin.python' From 59aafc7aa0e46794afad20b9bfe5fdc2f61eae1b Mon Sep 17 00:00:00 2001 From: Matthew Rees Date: Sat, 30 May 2015 13:16:37 +0200 Subject: [PATCH 071/291] Added a test for the changes in PR 65 --- spec/python-spec.coffee | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index f2532b4..9dd19ed 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -195,3 +195,33 @@ describe "Python grammar", -> expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] expect(tokens[0][7].value).toBe 'foo' expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python'] + + it "terminates referencing an item in a list variable after a sequence of a closing and opening bracket", -> + tokens = grammar.tokenizeLines('foo[i[0]][j[0]]') + + expect(tokens[0][0].value).toBe 'foo' + expect(tokens[0][0].scopes).toEqual ['source.python', 'meta.item-access.python'] + expect(tokens[0][1].value).toBe '[' + expect(tokens[0][1].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[0][2].value).toBe 'i' + expect(tokens[0][2].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[0][4].value).toBe '0' + expect(tokens[0][4].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[0][5].value).toBe ']' + expect(tokens[0][5].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[0][6].value).toBe ']' + expect(tokens[0][6].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[0][7].value).toBe '[' + expect(tokens[0][7].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.begin.python'] + expect(tokens[0][8].value).toBe 'j' + expect(tokens[0][8].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python'] + expect(tokens[0][9].value).toBe '[' + expect(tokens[0][9].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[0][10].value).toBe '0' + expect(tokens[0][10].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[0][11].value).toBe ']' + expect(tokens[0][11].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[0][12].value).toBe ']' + expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python'] From 799142271cc9d9115fdad02c95a8b545bcfb45c1 Mon Sep 17 00:00:00 2001 From: Machiste Quintana Date: Tue, 2 Jun 2015 20:31:54 -0400 Subject: [PATCH 072/291] Add coffeelint support --- .coffeelintignore | 1 + .gitignore | 1 + coffeelint.json | 37 +++++++++++++++++++++++++++++++++++++ package.json | 3 +++ 4 files changed, 42 insertions(+) create mode 100644 .coffeelintignore create mode 100644 .gitignore create mode 100644 coffeelint.json diff --git a/.coffeelintignore b/.coffeelintignore new file mode 100644 index 0000000..1db51fe --- /dev/null +++ b/.coffeelintignore @@ -0,0 +1 @@ +spec/fixtures diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/coffeelint.json b/coffeelint.json new file mode 100644 index 0000000..a5dd715 --- /dev/null +++ b/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/package.json b/package.json index 94e0f38..d4ad531 100644 --- a/package.json +++ b/package.json @@ -14,5 +14,8 @@ "license": "MIT", "bugs": { "url": "https://github.com/atom/language-python/issues" + }, + "devDependencies": { + "coffeelint": "^1.10.1" } } From db14372b475bf41f0f01032076ba53b3940fb80a Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Mon, 8 Jun 2015 11:13:32 -0700 Subject: [PATCH 073/291] Prepare 0.36.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d4ad531..9cdf481 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.35.0", + "version": "0.36.0", "engines": { "atom": "*", "node": "*" From 3bbf9e1bb1d439d6c912c2d9d986b292643ee620 Mon Sep 17 00:00:00 2001 From: Akihiro Kameda Date: Fri, 26 Jun 2015 14:20:14 +0900 Subject: [PATCH 074/291] update builtin functions --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 7593f86..7b24369 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -662,7 +662,7 @@ 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' 'name': 'support.type.exception.python' 'builtin_functions': - 'match': '(?x)\\b(\n\t\t\t\t__import__|all|abs|any|apply|ascii|bin|callable|chr|classmethod|cmp|coerce|\n\t\t\t\tcompile|delattr|dir|divmod|enumerate|eval|execfile|filter|format|getattr|\n\t\t\t\tglobals|hasattr|hash|help|hex|id|input|intern|isinstance|issubclass|iter|\n\t\t\t\tlen|locals|map|max|min|next|oct|open|ord|pow|print|property|range|\n\t\t\t\traw_input|reduce|reload|repr|reversed|round|setattr|sorted|staticmethod|\n\t\t\t\tsum|super|type|unichr|vars|zip\n\t\t\t)\\b' + 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip)\\b' 'name': 'support.function.builtin.python' 'builtin_types': 'match': '(?x)\\b(\n\t\t\t\tbasestring|bool|buffer|bytearray|bytes|complex|dict|float|frozenset|int|\n\t\t\t\tlist|long|memoryview|object|range|set|slice|str|tuple|unicode|xrange\n\t\t\t)\\b' From 114876a394b9f9467bdd0994bd5d804cc0344dfd Mon Sep 17 00:00:00 2001 From: Akihiro Kameda Date: Fri, 26 Jun 2015 14:35:08 +0900 Subject: [PATCH 075/291] update builtin functions (added that of 2.7) --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 7b24369..03d1166 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -662,7 +662,7 @@ 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' 'name': 'support.type.exception.python' 'builtin_functions': - 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip)\\b' + 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|file|long|raw_input|reduce|reload|unichr|unicode|xrange|apply|buffer|coerce|intern)\\b' 'name': 'support.function.builtin.python' 'builtin_types': 'match': '(?x)\\b(\n\t\t\t\tbasestring|bool|buffer|bytearray|bytes|complex|dict|float|frozenset|int|\n\t\t\t\tlist|long|memoryview|object|range|set|slice|str|tuple|unicode|xrange\n\t\t\t)\\b' From 7271315c858451ad302ebce2c5db2eee528013cc Mon Sep 17 00:00:00 2001 From: Kevin Sawicki Date: Fri, 26 Jun 2015 09:30:52 -0500 Subject: [PATCH 076/291] Prepare 0.37.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9cdf481..a9a8ae2 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.36.0", + "version": "0.37.0", "engines": { "atom": "*", "node": "*" From a5bd7e0bad9c5f126e62177a948c4307b3380db5 Mon Sep 17 00:00:00 2001 From: James Gill Date: Wed, 22 Jul 2015 00:36:09 -0700 Subject: [PATCH 077/291] :bug: Correctly tokenize 'self.foo'. This fixes #77. --- grammars/python.cson | 6 +++--- spec/python-spec.coffee | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 03d1166..35a11ca 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -534,9 +534,6 @@ { 'include': '#line_continuation' } - { - 'include': '#language_variables' - } { 'match': '\\b(None|True|False|Ellipsis|NotImplemented)\\b' 'name': 'constant.language.python' @@ -550,6 +547,9 @@ { 'include': '#dotted_name' } + { + 'include': '#language_variables' + } { 'begin': '(\\()' 'end': '(\\))' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 9dd19ed..59d22ba 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -225,3 +225,23 @@ describe "Python grammar", -> expect(tokens[0][11].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python'] expect(tokens[0][12].value).toBe ']' expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python'] + + it "tokenizes properties of self as variables", -> + tokens = grammar.tokenizeLines('self.foo') + expect(tokens[0].length).toBe 3 + expect(tokens[0][0].value).toBe 'self' + expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.python'] + expect(tokens[0][1].value).toBe '.' + expect(tokens[0][1].scopes).toEqual ['source.python'] + expect(tokens[0][2].value).toBe 'foo' + expect(tokens[0][2].scopes).toEqual ['source.python'] + + it "tokenizes properties of a variable as variables", -> + tokens = grammar.tokenizeLines('bar.foo') + expect(tokens[0].length).toBe 3 + expect(tokens[0][0].value).toBe 'bar' + expect(tokens[0][0].scopes).toEqual ['source.python'] + expect(tokens[0][1].value).toBe '.' + expect(tokens[0][1].scopes).toEqual ['source.python'] + expect(tokens[0][2].value).toBe 'foo' + expect(tokens[0][2].scopes).toEqual ['source.python'] From 80b45a839ea86cacbec178d4e41b84a9b505012a Mon Sep 17 00:00:00 2001 From: Wliu Date: Fri, 24 Jul 2015 09:38:25 -0400 Subject: [PATCH 078/291] Prepare 0.38.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a9a8ae2..e2f13cd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.37.0", + "version": "0.38.0", "engines": { "atom": "*", "node": "*" From 7d38baf80b89c1dc58e77f8a12763c5d18d5e1e5 Mon Sep 17 00:00:00 2001 From: Machiste Quintana Date: Tue, 4 Aug 2015 07:48:45 -0400 Subject: [PATCH 079/291] Only build pushes on Travis for master branch [ci skip] --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index d73c8e2..20cfe51 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,3 +9,7 @@ script: 'curl -s https://raw.githubusercontent.com/atom/ci/master/build-package. git: depth: 10 + +branches: + only: + - master From 94c5edf2ee149f3f320da20759a60f03bae5df4d Mon Sep 17 00:00:00 2001 From: Gregorio Robles Date: Sat, 15 Aug 2015 02:06:46 +0200 Subject: [PATCH 080/291] :bug: several Try/Except snippets with same prefix In that case, only the last will have an effect. New prefixes: Try/Except/Else/Finally (tryef) Try/Except/Else (trye) Try/Except/Finally (tryf) Try/Except (try) --- snippets/language-python.cson | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 54ea5d1..50daa9d 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -66,13 +66,13 @@ 'prefix': 'while' 'body': 'while ${1:condition}:\n\t${2:pass}' 'Try/Except/Else/Finally': - 'prefix': 'try' + 'prefix': 'tryef' 'body': 'try:\n\t${1:pass}\nexcept${2: ${3:Exception} as ${4:e}}:\n\t${5:raise}\nelse:\n\t${6:pass}\nfinally:\n\t${7:pass}' 'Try/Except/Else': - 'prefix': 'try' + 'prefix': 'trye' 'body': 'try:\n\t${1:pass}\nexcept ${2:Exception} as ${3:e}:\n\t${4:raise $3}\nelse:\n\t${5:pass}' 'Try/Except/Finally': - 'prefix': 'try' + 'prefix': 'tryf' 'body': 'try:\n\t${1:pass}\nexcept ${2:Exception} as ${3:e}:\n\t${4:raise $3}\nfinally:\n\t${5:pass}' 'Try/Except': 'prefix': 'try' From 3079785430a35df44c45f99d02fc7653a989e3cb Mon Sep 17 00:00:00 2001 From: Ivan Zuzak Date: Thu, 20 Aug 2015 19:03:04 +0200 Subject: [PATCH 081/291] Prepare 0.39.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index e2f13cd..0c22d02 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.38.0", + "version": "0.39.0", "engines": { "atom": "*", "node": "*" From 150847ccb0853a485085def5958ae7dec2df4940 Mon Sep 17 00:00:00 2001 From: Paul Chaignon Date: Tue, 25 Aug 2015 09:58:04 +0200 Subject: [PATCH 082/291] Line comments as a repository entry --- grammars/python.cson | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 35a11ca..9cc18f0 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -19,21 +19,7 @@ 'firstLineMatch': '^#!/.*\\bpython[\\d\\.]*\\b' 'patterns': [ { - 'begin': '(^[ \\t]+)?(?=#)' - 'beginCaptures': - '1': - 'name': 'punctuation.whitespace.comment.leading.python' - 'end': '(?!\\G)' - 'patterns': [ - { - 'begin': '#' - 'beginCaptures': - '0': - 'name': 'punctuation.definition.comment.python' - 'end': '\\n' - 'name': 'comment.line.number-sign.python' - } - ] + 'include': '#line_comments' } { 'match': '\\b(?i:(0x\\h*)L)' @@ -693,6 +679,22 @@ ] } ] + 'line_comments': + 'begin': '(^[ \\t]+)?(?=#)' + 'beginCaptures': + '1': + 'name': 'punctuation.whitespace.comment.leading.python' + 'end': '(?!\\G)' + 'patterns': [ + { + 'begin': '#' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.python' + 'end': '\\n' + 'name': 'comment.line.number-sign.python' + } + ] 'dotted_name': 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*)' 'end': '(?![A-Za-z0-9_\\.])' From deb68c308c6e3a1f49dbc295d50b5d7b61581206 Mon Sep 17 00:00:00 2001 From: Paul Chaignon Date: Tue, 25 Aug 2015 09:59:46 +0200 Subject: [PATCH 083/291] Function parameters can contain comments Fixes #81 --- grammars/python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/grammars/python.cson b/grammars/python.cson index 9cc18f0..9427c5e 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -266,6 +266,9 @@ 'contentName': 'meta.function.parameters.python' 'end': '(?=\\)\\s*\\:)' 'patterns': [ + { + 'include': '#line_comments' + } { 'include': '#keyword_arguments' } From 08861f076c544e78c2483a875836de2ca321d391 Mon Sep 17 00:00:00 2001 From: Paul Chaignon Date: Tue, 25 Aug 2015 17:16:19 +0200 Subject: [PATCH 084/291] Test for comments inside function parameters --- spec/python-spec.coffee | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 59d22ba..a707d67 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -245,3 +245,21 @@ describe "Python grammar", -> expect(tokens[0][1].scopes).toEqual ['source.python'] expect(tokens[0][2].value).toBe 'foo' expect(tokens[0][2].scopes).toEqual ['source.python'] + + it "tokenizes comments inside function parameters", -> + tokens = grammar.tokenizeLines('def test(arg, # comment') + expect(tokens[0].length).toBe 10 + expect(tokens[0][0].value).toBe 'def' + expect(tokens[0][0].scopes).toEqual ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[0][2].value).toBe 'test' + expect(tokens[0][2].scopes).toEqual ['source.python', 'meta.function.python', 'entity.name.function.python'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] + expect(tokens[0][4].value).toBe 'arg' + expect(tokens[0][4].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[0][5].value).toBe ',' + expect(tokens[0][5].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] + expect(tokens[0][7].value).toBe '#' + expect(tokens[0][7].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[0][8].value).toBe ' comment' + expect(tokens[0][8].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] From df10cf87084450fe08f787d3107f7ab96e8a9395 Mon Sep 17 00:00:00 2001 From: Paul Chaignon Date: Wed, 26 Aug 2015 20:25:02 +0200 Subject: [PATCH 085/291] Refactor test + multiline test --- spec/python-spec.coffee | 46 +++++++++++++++++++++++++++-------------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index a707d67..9942282 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -247,19 +247,33 @@ describe "Python grammar", -> expect(tokens[0][2].scopes).toEqual ['source.python'] it "tokenizes comments inside function parameters", -> - tokens = grammar.tokenizeLines('def test(arg, # comment') - expect(tokens[0].length).toBe 10 - expect(tokens[0][0].value).toBe 'def' - expect(tokens[0][0].scopes).toEqual ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[0][2].value).toBe 'test' - expect(tokens[0][2].scopes).toEqual ['source.python', 'meta.function.python', 'entity.name.function.python'] - expect(tokens[0][3].value).toBe '(' - expect(tokens[0][3].scopes).toEqual ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] - expect(tokens[0][4].value).toBe 'arg' - expect(tokens[0][4].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[0][5].value).toBe ',' - expect(tokens[0][5].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] - expect(tokens[0][7].value).toBe '#' - expect(tokens[0][7].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[0][8].value).toBe ' comment' - expect(tokens[0][8].scopes).toEqual ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] + {tokens} = grammar.tokenizeLine('def test(arg, # comment') + expect(tokens.length).toBe 10 + expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] + expect(tokens[4]).toEqual value: 'arg', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[5]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] + expect(tokens[7]).toEqual value: '#', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[8]).toEqual value: ' comment', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] + + tokens = grammar.tokenizeLines(""" + def __init__( + self, + codec, # comment + config + ): + """) + expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[0][2]).toEqual value: '__init__', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python', 'support.function.magic.python'] + expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] + expect(tokens[1][1]).toEqual value: 'self', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[1][2]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] + expect(tokens[2][1]).toEqual value: 'codec', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[2][2]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] + expect(tokens[2][4]).toEqual value: '#', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] + expect(tokens[2][5]).toEqual value: ' comment', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] + expect(tokens[3][1]).toEqual value: 'config', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] + expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] + From d9128ac87ca50aefffa49c35d4e05a6c8570fc9d Mon Sep 17 00:00:00 2001 From: Paul Chaignon Date: Wed, 26 Aug 2015 20:49:30 +0200 Subject: [PATCH 086/291] Remove unnecessary test for number of tokens --- spec/python-spec.coffee | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 9942282..78842c6 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -228,7 +228,7 @@ describe "Python grammar", -> it "tokenizes properties of self as variables", -> tokens = grammar.tokenizeLines('self.foo') - expect(tokens[0].length).toBe 3 + expect(tokens[0][0].value).toBe 'self' expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.python'] expect(tokens[0][1].value).toBe '.' @@ -238,7 +238,7 @@ describe "Python grammar", -> it "tokenizes properties of a variable as variables", -> tokens = grammar.tokenizeLines('bar.foo') - expect(tokens[0].length).toBe 3 + expect(tokens[0][0].value).toBe 'bar' expect(tokens[0][0].scopes).toEqual ['source.python'] expect(tokens[0][1].value).toBe '.' @@ -248,7 +248,7 @@ describe "Python grammar", -> it "tokenizes comments inside function parameters", -> {tokens} = grammar.tokenizeLine('def test(arg, # comment') - expect(tokens.length).toBe 10 + expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] expect(tokens[3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] @@ -264,6 +264,7 @@ describe "Python grammar", -> config ): """) + expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] expect(tokens[0][2]).toEqual value: '__init__', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python', 'support.function.magic.python'] expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] From 802aa701b541cb3b34d27b4050d966a09dd6ba1b Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Thu, 3 Sep 2015 23:15:39 +0100 Subject: [PATCH 087/291] Fix inline SQL for block strings with starting new lines --- grammars/python.cson | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 9427c5e..300c837 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1137,7 +1137,7 @@ ] } { - 'begin': '(""")(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'begin': '(""")' 'beginCaptures': '1': 'name': 'punctuation.definition.string.begin.python' @@ -1151,13 +1151,19 @@ 'name': 'string.quoted.double.block.sql.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'end': '(?=\\s*""")' + 'patterns': [ + { + 'include': 'source.sql' + } + ] } { - 'include': '#escaped_char' + 'include': '#constant_placeholder' } { - 'include': 'source.sql' + 'include': '#escaped_char' } ] } @@ -1499,7 +1505,7 @@ ] } { - 'begin': '(\'\'\')(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'begin': '(\'\'\')' 'beginCaptures': '1': 'name': 'punctuation.definition.string.begin.python' @@ -1510,16 +1516,22 @@ 'name': 'punctuation.definition.string.end.python' '2': 'name': 'meta.empty-string.single.python' - 'name': 'string.quoted.single.block.python' + 'name': 'string.quoted.single.block.sql.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'end': '(?=\\s*\'\'\')' + 'patterns': [ + { + 'include': 'source.sql' + } + ] } { - 'include': '#escaped_char' + 'include': '#constant_placeholder' } { - 'include': 'source.sql' + 'include': '#escaped_char' } ] } From cc0f3826a3c95d7678d25e08254bbf8165df1d30 Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Sat, 5 Sep 2015 18:31:21 +0100 Subject: [PATCH 088/291] Add specs for inline SQL --- spec/python-spec.coffee | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 78842c6..01256ad 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -278,3 +278,28 @@ describe "Python grammar", -> expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] + + it "correctly enables SQL inline highlighting on blocks", -> + tokens = grammar.tokenizeLines(''' + """ + SELECT bar + FROM foo + """ + ''') + + expect(tokens[0][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.sql.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', 'string.quoted.double.block.sql.python'] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', 'string.quoted.double.block.sql.python'] + expect(tokens[3][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.sql.python', 'punctuation.definition.string.end.python'] + + tokens = grammar.tokenizeLines(""" + ''' + SELECT bar + FROM foo + ''' + """) + + expect(tokens[0][0]).toEqual value: '\'\'\'', scopes: ['source.python', 'string.quoted.single.block.sql.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', 'string.quoted.single.block.sql.python'] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', 'string.quoted.single.block.sql.python'] + expect(tokens[3][0]).toEqual value: '\'\'\'', scopes: ['source.python', 'string.quoted.single.block.sql.python', 'punctuation.definition.string.end.python'] From 5d13f9e90c87ab72322fc5a034b769f9f100b20e Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Sat, 5 Sep 2015 19:17:08 +0100 Subject: [PATCH 089/291] enables->tokenizes --- spec/python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 01256ad..3e469dd 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -279,7 +279,7 @@ describe "Python grammar", -> expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] - it "correctly enables SQL inline highlighting on blocks", -> + it "tokenizes SQL inline highlighting on blocks", -> tokens = grammar.tokenizeLines(''' """ SELECT bar From 3fd32899dad1721a1c7d204d0c8374f2015c536c Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Sat, 5 Sep 2015 19:26:46 +0100 Subject: [PATCH 090/291] Use for loop --- spec/python-spec.coffee | 53 +++++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 23 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 3e469dd..602bf44 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -280,26 +280,33 @@ describe "Python grammar", -> it "tokenizes SQL inline highlighting on blocks", -> - tokens = grammar.tokenizeLines(''' - """ - SELECT bar - FROM foo - """ - ''') - - expect(tokens[0][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.sql.python', 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', 'string.quoted.double.block.sql.python'] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', 'string.quoted.double.block.sql.python'] - expect(tokens[3][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.sql.python', 'punctuation.definition.string.end.python'] - - tokens = grammar.tokenizeLines(""" - ''' - SELECT bar - FROM foo - ''' - """) - - expect(tokens[0][0]).toEqual value: '\'\'\'', scopes: ['source.python', 'string.quoted.single.block.sql.python', 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', 'string.quoted.single.block.sql.python'] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', 'string.quoted.single.block.sql.python'] - expect(tokens[3][0]).toEqual value: '\'\'\'', scopes: ['source.python', 'string.quoted.single.block.sql.python', 'punctuation.definition.string.end.python'] + testCases = [ + { + text: ''' + """ + SELECT bar + FROM foo + """ + ''' + quotes: '"""' + scope: 'string.quoted.double.block.sql.python' + } + { + text: """ + ''' + SELECT bar + FROM foo + ''' + """ + quotes: "'''" + scope: 'string.quoted.single.block.sql.python' + } + ] + + for test in testCases + tokens = grammar.tokenizeLines(test['text']) + + expect(tokens[0][0]).toEqual value: test['quotes'], scopes: ['source.python', test['scope'], 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', test['scope']] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', test['scope']] + expect(tokens[3][0]).toEqual value: test['quotes'], scopes: ['source.python', test['scope'], 'punctuation.definition.string.end.python'] From 0854f32acdb2b97c6c851c49443d3f101410af33 Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Sat, 5 Sep 2015 19:36:43 +0100 Subject: [PATCH 091/291] Better naming of keys --- spec/python-spec.coffee | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 602bf44..6e4b5dd 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -289,7 +289,7 @@ describe "Python grammar", -> """ ''' quotes: '"""' - scope: 'string.quoted.double.block.sql.python' + blockScope: 'string.quoted.double.block.sql.python' } { text: """ @@ -299,14 +299,14 @@ describe "Python grammar", -> ''' """ quotes: "'''" - scope: 'string.quoted.single.block.sql.python' + blockScope: 'string.quoted.single.block.sql.python' } ] for test in testCases tokens = grammar.tokenizeLines(test['text']) - expect(tokens[0][0]).toEqual value: test['quotes'], scopes: ['source.python', test['scope'], 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', test['scope']] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', test['scope']] - expect(tokens[3][0]).toEqual value: test['quotes'], scopes: ['source.python', test['scope'], 'punctuation.definition.string.end.python'] + expect(tokens[0][0]).toEqual value: test['quotes'], scopes: ['source.python', test['blockScope'], 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', test['blockScope']] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', test['blockScope']] + expect(tokens[3][0]).toEqual value: test['quotes'], scopes: ['source.python', test['blockScope'], 'punctuation.definition.string.end.python'] From ac72a24c47ce43051071050ed634c69453bee0d8 Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Sat, 5 Sep 2015 19:42:36 +0100 Subject: [PATCH 092/291] Simplify --- spec/python-spec.coffee | 46 ++++++++++++++--------------------------- 1 file changed, 16 insertions(+), 30 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 6e4b5dd..85669e3 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -280,33 +280,19 @@ describe "Python grammar", -> it "tokenizes SQL inline highlighting on blocks", -> - testCases = [ - { - text: ''' - """ - SELECT bar - FROM foo - """ - ''' - quotes: '"""' - blockScope: 'string.quoted.double.block.sql.python' - } - { - text: """ - ''' - SELECT bar - FROM foo - ''' - """ - quotes: "'''" - blockScope: 'string.quoted.single.block.sql.python' - } - ] - - for test in testCases - tokens = grammar.tokenizeLines(test['text']) - - expect(tokens[0][0]).toEqual value: test['quotes'], scopes: ['source.python', test['blockScope'], 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', test['blockScope']] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', test['blockScope']] - expect(tokens[3][0]).toEqual value: test['quotes'], scopes: ['source.python', test['blockScope'], 'punctuation.definition.string.end.python'] + delimsByScope = + "string.quoted.double.block.sql.python": '"""' + "string.quoted.single.block.sql.python": "'''" + + for scope, delim in delimsByScope + tokens = grammar.tokenizeLines( + delim + + 'SELECT bar + FROM foo' + + delim + ) + + expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] + expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] From c25946435bf5f5043fc0e24f0fec24c96e140ba0 Mon Sep 17 00:00:00 2001 From: Michael Aquilina Date: Sat, 5 Sep 2015 20:00:29 +0100 Subject: [PATCH 093/291] Remove extra whitespace --- spec/python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 85669e3..d586be2 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -285,7 +285,7 @@ describe "Python grammar", -> "string.quoted.single.block.sql.python": "'''" for scope, delim in delimsByScope - tokens = grammar.tokenizeLines( + tokens = grammar.tokenizeLines( delim + 'SELECT bar FROM foo' From 44e1d1836675a9b19928a5b2cfce83613fd5b706 Mon Sep 17 00:00:00 2001 From: Wliu Date: Sat, 5 Sep 2015 15:02:53 -0400 Subject: [PATCH 094/291] Prepare 0.40.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 0c22d02..31d7655 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.39.0", + "version": "0.40.0", "engines": { "atom": "*", "node": "*" From 09c67de34d672ddcc5678e6ff2a21012ad882847 Mon Sep 17 00:00:00 2001 From: foresmac Date: Fri, 9 Oct 2015 13:54:29 -0500 Subject: [PATCH 095/291] Add support for CTEs in embedded SQL --- grammars/python.cson | 6 +++--- spec/python-spec.coffee | 25 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 300c837..80d726a 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1151,7 +1151,7 @@ 'name': 'string.quoted.double.block.sql.python' 'patterns': [ { - 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' 'end': '(?=\\s*""")' 'patterns': [ { @@ -1168,7 +1168,7 @@ ] } { - 'begin': '(")(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'begin': '(")(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' 'beginCaptures': '1': 'name': 'punctuation.definition.string.begin.python' @@ -1519,7 +1519,7 @@ 'name': 'string.quoted.single.block.sql.python' 'patterns': [ { - 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' 'end': '(?=\\s*\'\'\')' 'patterns': [ { diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index d586be2..82548a4 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -296,3 +296,28 @@ describe "Python grammar", -> expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + + + it "tokenizes SQL inline highlighting on blocks with a CTE", -> + delimsByScope = + "string.quoted.double.block.sql.python": '"""' + "string.quoted.single.block.sql.python": "'''" + + for scope, delim in delimsByScope + tokens = grammar.tokenizeLines( + delim + + 'WITH example_cte AS ( + SELECT bar + FROM foo + GROUP BY bar + ) + + SELECT COUNT(*) + FROM example_cte' + + delim + ) + print(tokens) + expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] + expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] From c4be8884aead7db523476f994fd4329943611c8b Mon Sep 17 00:00:00 2001 From: foresmac Date: Fri, 9 Oct 2015 13:55:48 -0500 Subject: [PATCH 096/291] Remove print --- spec/python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 82548a4..b03eedd 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -316,7 +316,7 @@ describe "Python grammar", -> FROM example_cte' + delim ) - print(tokens) + expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] From 40df23723fa00dd3ebdfcc8c58ff426d77045e32 Mon Sep 17 00:00:00 2001 From: foresmac Date: Fri, 9 Oct 2015 16:04:31 -0500 Subject: [PATCH 097/291] Update test to actually run Thanks @50Wliu! --- spec/python-spec.coffee | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index b03eedd..b58ab93 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -303,21 +303,27 @@ describe "Python grammar", -> "string.quoted.double.block.sql.python": '"""' "string.quoted.single.block.sql.python": "'''" - for scope, delim in delimsByScope - tokens = grammar.tokenizeLines( - delim + - 'WITH example_cte AS ( - SELECT bar - FROM foo - GROUP BY bar + for scope, delim of delimsByScope + tokens = grammar.tokenizeLines(""" + #{delim} + WITH example_cte AS ( + SELECT bar + FROM foo + GROUP BY bar ) - SELECT COUNT(*) - FROM example_cte' - + delim - ) + SELECT COUNT(*) + FROM example_cte + #{delim} + """) expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] - expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (', scopes: ['source.python', scope] + expect(tokens[2][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] + expect(tokens[3][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] + expect(tokens[4][0]).toEqual value: 'GROUP BY bar', scopes: ['source.python', scope] + expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope] + expect(tokens[6][0]).toEqual value: '', scopes: ['source.python', scope] + expect(tokens[7][0]).toEqual value: 'SELECT COUNT(*)', scopes: ['source.python', scope] + expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python', scope] + expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] From 3546ef7c6fd7da91620f8119f058623a300bac67 Mon Sep 17 00:00:00 2001 From: Bill Glover Date: Sat, 10 Oct 2015 19:18:52 +0100 Subject: [PATCH 098/291] Add `execfile` to the list of built in functions This commit adds `execfile` to the list of built in functions. The documentation provided by The Python Foundation confirms that this is indeed a built in function. https://docs.python.org/2/library/functions.html#execfile --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 300c837..da3b6de 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -651,7 +651,7 @@ 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' 'name': 'support.type.exception.python' 'builtin_functions': - 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|file|long|raw_input|reduce|reload|unichr|unicode|xrange|apply|buffer|coerce|intern)\\b' + 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|file|long|raw_input|reduce|reload|unichr|unicode|xrange|apply|buffer|coerce|intern|execfile)\\b' 'name': 'support.function.builtin.python' 'builtin_types': 'match': '(?x)\\b(\n\t\t\t\tbasestring|bool|buffer|bytearray|bytes|complex|dict|float|frozenset|int|\n\t\t\t\tlist|long|memoryview|object|range|set|slice|str|tuple|unicode|xrange\n\t\t\t)\\b' From 64b9c5f9d7544b69bc71904566e6df608f891a88 Mon Sep 17 00:00:00 2001 From: Kyle Pittman Date: Thu, 15 Oct 2015 08:43:41 -0500 Subject: [PATCH 099/291] Add await keyword Added `await` keyword to `illegal_name` and `keyword.control.statement.python` https://github.com/atom/language-python/issues/93 --- grammars/python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index da3b6de..04e07b3 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -107,7 +107,7 @@ { 'comment': 'keywords that alter flow from within a block' 'name': 'keyword.control.statement.python' - 'match': '\\b(with|break|continue|pass|return|yield)\\b' + 'match': '\\b(with|break|continue|pass|return|yield|await)\\b' } { 'comment': 'keyword operators that evaluate to True or False' @@ -834,7 +834,7 @@ 'generic_names': 'match': '[A-Za-z_][A-Za-z0-9_]*' 'illegal_names': - 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\\b' + 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield|await)\\b' 'name': 'invalid.illegal.name.python' 'keyword_arguments': 'begin': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(=)(?!=)' From 6d7b52b8823954c33a698e5d10721cee326c5fa1 Mon Sep 17 00:00:00 2001 From: Wliu Date: Mon, 19 Oct 2015 18:28:48 -0400 Subject: [PATCH 100/291] Prepare 0.41.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 31d7655..0fbf32b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.40.0", + "version": "0.41.0", "engines": { "atom": "*", "node": "*" From 98609acfb997747d29d74a99808a61c46ac6abd0 Mon Sep 17 00:00:00 2001 From: foresmac Date: Mon, 16 Nov 2015 20:00:01 -0600 Subject: [PATCH 101/291] Add check for single-line quoted strings which are SQL statements --- grammars/python.cson | 2 +- spec/python-spec.coffee | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index cfbc334..5b24d82 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1536,7 +1536,7 @@ ] } { - 'begin': '(\')(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER))' + 'begin': '(\')(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' 'beginCaptures': '1': 'name': 'punctuation.definition.string.begin.python' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index b58ab93..a6549eb 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -327,3 +327,11 @@ describe "Python grammar", -> expect(tokens[7][0]).toEqual value: 'SELECT COUNT(*)', scopes: ['source.python', scope] expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python', scope] expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + +it "tokenizes SQL inline highlighting on single line with a CTE", -> + + tokens = grammar.tokenizeLines('WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte') + + expect(tokens[0][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', scope] + expect(tokens[2][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] From d1419680ba6172ae4560aea8fc1ad7a155cc04ab Mon Sep 17 00:00:00 2001 From: foresmac Date: Tue, 17 Nov 2015 07:30:28 -0600 Subject: [PATCH 102/291] linting --- spec/python-spec.coffee | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index a6549eb..6a76cf1 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -297,7 +297,6 @@ describe "Python grammar", -> expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] - it "tokenizes SQL inline highlighting on blocks with a CTE", -> delimsByScope = "string.quoted.double.block.sql.python": '"""' @@ -328,10 +327,10 @@ describe "Python grammar", -> expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python', scope] expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] -it "tokenizes SQL inline highlighting on single line with a CTE", -> + it "tokenizes SQL inline highlighting on single line with a CTE", -> - tokens = grammar.tokenizeLines('WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte') + tokens = grammar.tokenizeLines('WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte') - expect(tokens[0][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', scope] - expect(tokens[2][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + expect(tokens[0][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', scope] + expect(tokens[2][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] From 1a0e14eaaa5ed91f498fcd1685077372b8906a95 Mon Sep 17 00:00:00 2001 From: foresmac Date: Tue, 17 Nov 2015 07:42:34 -0600 Subject: [PATCH 103/291] Fix test --- spec/python-spec.coffee | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 6a76cf1..321d9be 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -329,8 +329,9 @@ describe "Python grammar", -> it "tokenizes SQL inline highlighting on single line with a CTE", -> - tokens = grammar.tokenizeLines('WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte') + tokens = grammar.tokenizeLines('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') + console.log(tokens) - expect(tokens[0][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', scope] - expect(tokens[2][0]).toEqual value: '\'', scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + expect(tokens[0][0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python'] + expect(tokens[0][2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] From a9edeaef3790f787bdc47a38cbf70c620d5657e8 Mon Sep 17 00:00:00 2001 From: foresmac Date: Tue, 17 Nov 2015 09:01:43 -0600 Subject: [PATCH 104/291] Use tokenizeLine() --- spec/python-spec.coffee | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 321d9be..299e35b 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -329,9 +329,8 @@ describe "Python grammar", -> it "tokenizes SQL inline highlighting on single line with a CTE", -> - tokens = grammar.tokenizeLines('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') - console.log(tokens) + {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') - expect(tokens[0][0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] - expect(tokens[0][1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python'] - expect(tokens[0][2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python'] + expect(tokens[2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] From cf4d32f5882589f545d3da406b2eab4e22bff940 Mon Sep 17 00:00:00 2001 From: Wliu Date: Tue, 17 Nov 2015 20:12:47 -0500 Subject: [PATCH 105/291] Prepare 0.42.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 0fbf32b..244e851 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.41.0", + "version": "0.42.0", "engines": { "atom": "*", "node": "*" From cdb699e7a86fd9f9f84ae561abddb696aad777aa Mon Sep 17 00:00:00 2001 From: Wliu Date: Tue, 17 Nov 2015 20:14:45 -0500 Subject: [PATCH 106/291] Prepare 0.42.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 244e851..f88e55d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.42.0", + "version": "0.42.1", "engines": { "atom": "*", "node": "*" From 76ee0f225a06264eac16fe0961e25ee2bf90b19f Mon Sep 17 00:00:00 2001 From: Juan Rial Date: Wed, 6 Jan 2016 14:12:52 +0100 Subject: [PATCH 107/291] No newline after pdb/ipdb for consistency with other mid-file snippets --- snippets/language-python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 50daa9d..e8e0df2 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -88,10 +88,10 @@ 'body': '{${1:key}: ${2:value} for ${3:key}, ${4:value} in ${5:variable}}' 'PDB set trace': 'prefix': 'pdb' - 'body': 'import pdb; pdb.set_trace()\n' + 'body': 'import pdb; pdb.set_trace()' 'iPDB set trace': 'prefix': 'ipdb' - 'body': 'import ipdb; ipdb.set_trace()\n' + 'body': 'import ipdb; ipdb.set_trace()' '__magic__': 'prefix': '__' 'body': '__${1:init}__' From bbf428cd3e13bac3c031097ebf21f1fefd968f3c Mon Sep 17 00:00:00 2001 From: Juan Rial Date: Wed, 6 Jan 2016 14:44:31 +0100 Subject: [PATCH 108/291] Added rpdb/pudb snippets --- snippets/language-python.cson | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index e8e0df2..765f55a 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -92,6 +92,12 @@ 'iPDB set trace': 'prefix': 'ipdb' 'body': 'import ipdb; ipdb.set_trace()' + 'rPDB set trace - ': + 'prefix': 'rpdb' + 'body': 'import rpdb2; rpdb2.start_embedded_debugger(\'${1:debug_password}\')$0' + 'PuDB set trace - ': + 'prefix': 'pudb' + 'body': 'import pudb; pudb.set_trace()' '__magic__': 'prefix': '__' 'body': '__${1:init}__' From 1d1719801199b94afc6c3f4405b0d9999c853b14 Mon Sep 17 00:00:00 2001 From: Juan Rial Date: Wed, 6 Jan 2016 20:09:22 +0100 Subject: [PATCH 109/291] Removed trailing dashes --- snippets/language-python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 765f55a..642a0d9 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -92,10 +92,10 @@ 'iPDB set trace': 'prefix': 'ipdb' 'body': 'import ipdb; ipdb.set_trace()' - 'rPDB set trace - ': + 'rPDB set trace': 'prefix': 'rpdb' 'body': 'import rpdb2; rpdb2.start_embedded_debugger(\'${1:debug_password}\')$0' - 'PuDB set trace - ': + 'PuDB set trace': 'prefix': 'pudb' 'body': 'import pudb; pudb.set_trace()' '__magic__': From 75f0d2b06122a51db6e8e0b129b57585cd68f99c Mon Sep 17 00:00:00 2001 From: Wliu Date: Thu, 7 Jan 2016 22:48:25 -0500 Subject: [PATCH 110/291] Prepare 0.43.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f88e55d..2b9334e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.42.1", + "version": "0.43.0", "engines": { "atom": "*", "node": "*" From 8a33fc4617e602d1f0c15c895b52832cb6094e7c Mon Sep 17 00:00:00 2001 From: James Gill Date: Thu, 24 Mar 2016 10:09:58 -0700 Subject: [PATCH 111/291] Give self and cls the class 'variable.language.self.python'. Fixes issue #33. --- grammars/python.cson | 2 +- spec/python-spec.coffee | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 5b24d82..a06c322 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -854,7 +854,7 @@ ] 'language_variables': 'match': '\\b(self|cls)\\b' - 'name': 'variable.language.python' + 'name': 'variable.language.self.python' 'line_continuation': 'captures': '1': diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 299e35b..022a4ba 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -226,11 +226,21 @@ describe "Python grammar", -> expect(tokens[0][12].value).toBe ']' expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python'] - it "tokenizes properties of self as variables", -> + it "tokenizes properties of self as self-type variables", -> tokens = grammar.tokenizeLines('self.foo') expect(tokens[0][0].value).toBe 'self' - expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.python'] + expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.self.python'] + expect(tokens[0][1].value).toBe '.' + expect(tokens[0][1].scopes).toEqual ['source.python'] + expect(tokens[0][2].value).toBe 'foo' + expect(tokens[0][2].scopes).toEqual ['source.python'] + + it "tokenizes cls as a self-type variable", -> + tokens = grammar.tokenizeLines('cls.foo') + + expect(tokens[0][0].value).toBe 'cls' + expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.self.python'] expect(tokens[0][1].value).toBe '.' expect(tokens[0][1].scopes).toEqual ['source.python'] expect(tokens[0][2].value).toBe 'foo' From 96b2b601ea4bdd15b62d66b71837175dd70b520f Mon Sep 17 00:00:00 2001 From: Lee Dohm Date: Sat, 26 Mar 2016 16:57:14 -0700 Subject: [PATCH 112/291] Update CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e70782f..83ed661 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1 +1 @@ -See the [Atom contributing guide](https://atom.io/docs/latest/contributing) +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md). From e44c37a2edebc2c199fcd0cf5eb0b7e25494fa14 Mon Sep 17 00:00:00 2001 From: Wliu Date: Mon, 28 Mar 2016 18:43:18 -0400 Subject: [PATCH 113/291] Prepare 0.43.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2b9334e..edf95b5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.43.0", + "version": "0.43.1", "engines": { "atom": "*", "node": "*" From e68d3b837725f17c25116b57bca4ff0a566dfeae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian-Robert=20St=C3=B6ter?= Date: Fri, 6 May 2016 11:31:09 +0200 Subject: [PATCH 114/291] PEP 257: "The docstring is a phrase ending in a period." --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 642a0d9..384b76b 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -46,7 +46,7 @@ 'body': 'self.fail(\'${1:message}\')$0' 'New Class': 'prefix': 'class' - 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' + 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1.}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' 'New Method': 'prefix': 'defs' 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' From 00b6b2489a500847e40cba62ecb5e97527adef4d Mon Sep 17 00:00:00 2001 From: Wliu Date: Sat, 7 May 2016 17:21:10 -0400 Subject: [PATCH 115/291] Prepare 0.43.2 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index edf95b5..a73eff3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.43.1", + "version": "0.43.2", "engines": { "atom": "*", "node": "*" From 72418a055b064f58be3550898e27ede373fd9c96 Mon Sep 17 00:00:00 2001 From: Joe Glancy Date: Tue, 10 May 2016 17:03:32 +0100 Subject: [PATCH 116/291] Added RecursionError Added RecursionError, an exception derived from RuntimeError (see https://docs.python.org/3/library/exceptions.html#RecursionError). --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index a06c322..b510257 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -648,7 +648,7 @@ ] 'repository': 'builtin_exceptions': - 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' + 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Recursion|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' 'name': 'support.type.exception.python' 'builtin_functions': 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|file|long|raw_input|reduce|reload|unichr|unicode|xrange|apply|buffer|coerce|intern|execfile)\\b' From 88027e0e0918654bbb8137b72f43895be45e6f59 Mon Sep 17 00:00:00 2001 From: Joe Glancy Date: Fri, 13 May 2016 19:34:27 +0100 Subject: [PATCH 117/291] Add StopSyncIteration exception (missed from the previous PR, apologies) Add StopSyncIteration exception (see [https://docs.python.org/3/library/exceptions.html#StopAsyncIteration](https://docs.python.org/3/library/exceptions.html#StopAsyncIteration)). --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index b510257..1d109a2 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -648,7 +648,7 @@ ] 'repository': 'builtin_exceptions': - 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Recursion|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' + 'match': '(?x)\\b(\n\t\t\t\t(\n\t\t\t\t\tArithmetic|Assertion|Attribute|BlockingIO|BrokenPipe|Buffer|ChildProcess|\n\t\t\t\t\tConnection(Aborted|Refused|Reset)?|EOF|Environment|FileExists|\n\t\t\t\t\tFileNotFound|FloatingPoint|Interrupted|IO|IsADirectoryError|\n\t\t\t\t\tImport|Indentation|Index|Key|Lookup|Memory|Name|NotADirectory|\n\t\t\t\t\tNotImplemented|OS|Overflow|Permission|ProcessLookup|Recursion|Reference|\n\t\t\t\t\tRuntime|Standard|Syntax|System|Tab|Timeout|Type|UnboundLocal|\n\t\t\t\t\tUnicode(Encode|Decode|Translate)?|Value|VMS|Windows|ZeroDivision\n\t\t\t\t)Error|\n\t\t\t\t((Pending)?Deprecation|Runtime|Syntax|User|Future|Import|Unicode|Bytes)?Warning|\n\t\t\t\t(Base)?Exception|\n\t\t\t\tSystemExit|StopAsyncIteration|StopIteration|NotImplemented|KeyboardInterrupt|GeneratorExit\n\t\t\t)\\b' 'name': 'support.type.exception.python' 'builtin_functions': 'match': '(?x)\\b(__import__|abs|all|any|ascii|bin|bool|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|file|long|raw_input|reduce|reload|unichr|unicode|xrange|apply|buffer|coerce|intern|execfile)\\b' From 86f5c5c3a6b6a54b220f50ccaa6c2a0a0c16ffba Mon Sep 17 00:00:00 2001 From: Wliu Date: Tue, 17 May 2016 18:38:47 -0400 Subject: [PATCH 118/291] Prepare 0.44.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a73eff3..2b8816e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.43.2", + "version": "0.44.0", "engines": { "atom": "*", "node": "*" From d9b4ac3667557c1a0147fd8fb6eb2563160724a4 Mon Sep 17 00:00:00 2001 From: esdoppio Date: Fri, 27 May 2016 11:08:53 +0800 Subject: [PATCH 119/291] Add import snippet --- snippets/language-python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 384b76b..c48ed76 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -5,6 +5,9 @@ '# coding=utf-8': 'prefix': 'enc' 'body': '# coding=utf-8\n' + 'Import': + 'prefix': 'im' + 'body': 'import ${1:package/module}' 'Assert Equal': 'prefix': 'ase' 'body': 'self.assertEqual(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' From d25242063f6ef4537f262a72550129afe0cdd393 Mon Sep 17 00:00:00 2001 From: esdoppio Date: Fri, 27 May 2016 11:35:13 +0800 Subject: [PATCH 120/291] Add from/import snippet --- snippets/language-python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index c48ed76..8906e44 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -8,6 +8,9 @@ 'Import': 'prefix': 'im' 'body': 'import ${1:package/module}' + 'From/Import': + 'prefix': 'fim' + 'body': 'from ${1:package/module} import ${2:names}' 'Assert Equal': 'prefix': 'ase' 'body': 'self.assertEqual(${1:expected}, ${2:actual}${3:, \'${4:message}\'})$0' From d08d22d37b9c0e068fa8c3666c468a363dd3c879 Mon Sep 17 00:00:00 2001 From: Wliu Date: Fri, 27 May 2016 16:47:32 -0400 Subject: [PATCH 121/291] Prepare 0.45.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2b8816e..66527d3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.44.0", + "version": "0.45.0", "engines": { "atom": "*", "node": "*" From d5ae69749bde41cc9da8020786106958f376ef5b Mon Sep 17 00:00:00 2001 From: Damien Guard Date: Tue, 14 Jun 2016 09:03:25 -0700 Subject: [PATCH 122/291] Enable Windows builds on AppVeyor --- README.md | 11 ++++++----- appveyor.yml | 17 +++++++++++++++++ 2 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 appveyor.yml diff --git a/README.md b/README.md index 6e7daf9..04fbf64 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ -# Python language support in Atom [![Build Status](https://travis-ci.org/atom/language-python.svg?branch=master)](https://travis-ci.org/atom/language-python) +# Python language support in Atom +[![OS X Build Status](https://travis-ci.org/atom/language-python.svg?branch=master)](https://travis-ci.org/atom/language-python) +[![Windows Build Status](https://ci.appveyor.com/api/projects/status/hmxrb9jttjh41es9/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-python/branch/master) +[![Dependency Status](https://david-dm.org/atom/language-python.svg)](https://david-dm.org/atom/language-python) Adds syntax highlighting and snippets to Python files in Atom. -Originally [converted](http://atom.io/docs/latest/converting-a-text-mate-bundle) -from the [Python TextMate bundle](https://github.com/textmate/python.tmbundle). +Originally [converted](http://atom.io/docs/latest/converting-a-text-mate-bundle) from the [Python TextMate bundle](https://github.com/textmate/python.tmbundle). -Contributions are greatly appreciated. Please fork this repository and open a -pull request to add snippets, make grammar tweaks, etc. +Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000..efe989f --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,17 @@ +version: "{build}" + +os: Windows Server 2012 R2 + +install: + - choco install atom -y + - cd %APPVEYOR_BUILD_FOLDER% + - "%LOCALAPPDATA%/atom/bin/apm clean" + - "%LOCALAPPDATA%/atom/bin/apm install" + +build_script: + - cd %APPVEYOR_BUILD_FOLDER% + - "%LOCALAPPDATA%/atom/bin/apm test --path %LOCALAPPDATA%/atom/bin/atom.cmd" + +test: off + +deploy: off From bc204508498b1695a4448bd2cf9a3d31c1cdaf5e Mon Sep 17 00:00:00 2001 From: Damien Guard Date: Fri, 24 Jun 2016 17:09:22 -0700 Subject: [PATCH 123/291] AppVeyor should test against stable & beta --- appveyor.yml | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index efe989f..2b0fde4 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,17 +1,27 @@ version: "{build}" -os: Windows Server 2012 R2 +platform: x64 + +branches: + only: + - master + +clone_depth: 10 + +skip_tags: true + +environment: + APM_TEST_PACKAGES: + + matrix: + - ATOM_CHANNEL: stable + - ATOM_CHANNEL: beta install: - - choco install atom -y - - cd %APPVEYOR_BUILD_FOLDER% - - "%LOCALAPPDATA%/atom/bin/apm clean" - - "%LOCALAPPDATA%/atom/bin/apm install" + - ps: Install-Product node 4 build_script: - - cd %APPVEYOR_BUILD_FOLDER% - - "%LOCALAPPDATA%/atom/bin/apm test --path %LOCALAPPDATA%/atom/bin/atom.cmd" + - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/atom/ci/master/build-package.ps1')) test: off - deploy: off From c684a350333501e62d20db6e6bbd00029a6be398 Mon Sep 17 00:00:00 2001 From: Lukas Geiger Date: Wed, 7 Sep 2016 12:27:26 +0200 Subject: [PATCH 124/291] Fix folding --- settings/language-python.cson | 1 - 1 file changed, 1 deletion(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index be04d31..60e2aa7 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -3,7 +3,6 @@ 'autoIndentOnPaste': false 'softTabs': true 'tabLength': 4 - 'foldEndPattern': '^\\s*"""\\s*$' 'commentStart': '# ' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:' From d653cb9a470b10be4284b1f92a7ee93134f30802 Mon Sep 17 00:00:00 2001 From: Wliu Date: Wed, 28 Sep 2016 21:03:05 -0400 Subject: [PATCH 125/291] Prepare 0.45.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 66527d3..f47c3d5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.0", + "version": "0.45.1", "engines": { "atom": "*", "node": "*" From 23d145f10a15addfdf69bcf9682679099c9267f6 Mon Sep 17 00:00:00 2001 From: Long Nhat Nguyen Date: Thu, 29 Sep 2016 19:42:03 +0000 Subject: [PATCH 126/291] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 04fbf64..e646780 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # Python language support in Atom -[![OS X Build Status](https://travis-ci.org/atom/language-python.svg?branch=master)](https://travis-ci.org/atom/language-python) +[![macOS Build Status](https://travis-ci.org/atom/language-python.svg?branch=master)](https://travis-ci.org/atom/language-python) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/hmxrb9jttjh41es9/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-python/branch/master) [![Dependency Status](https://david-dm.org/atom/language-python.svg)](https://david-dm.org/atom/language-python) Adds syntax highlighting and snippets to Python files in Atom. -Originally [converted](http://atom.io/docs/latest/converting-a-text-mate-bundle) from the [Python TextMate bundle](https://github.com/textmate/python.tmbundle). +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [Python TextMate bundle](https://github.com/textmate/python.tmbundle). Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. From 93acd8c1f4dddbe3ec7c50eb26056e5be57ac70d Mon Sep 17 00:00:00 2001 From: Lee Dohm Date: Thu, 22 Dec 2016 10:42:14 -0800 Subject: [PATCH 127/291] Update issue and PR templates --- ISSUE_TEMPLATE.md | 40 ++++++++++++++++++++++++++++++++++++++++ PULL_REQUEST_TEMPLATE.md | 28 ++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 ISSUE_TEMPLATE.md create mode 100644 PULL_REQUEST_TEMPLATE.md diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md new file mode 100644 index 0000000..b60bb86 --- /dev/null +++ b/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ + + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..2750afc --- /dev/null +++ b/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + + + +## Alternate Designs + + + +### Benefits + + + +### Possible Drawbacks + + + +### Applicable Issues + + From e8c487f08ebaa55725c5690ea2d405428cb8fc6a Mon Sep 17 00:00:00 2001 From: Lee Dohm Date: Mon, 26 Dec 2016 10:05:42 -0800 Subject: [PATCH 128/291] :memo: Update issue and PR templates --- PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index 2750afc..cdaa94a 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -11,7 +11,7 @@ We must be able to understand the design of your change from this description. I --> -## Alternate Designs +### Alternate Designs From 9e680296618c2d5d5c576b70a9a00d85a281ca88 Mon Sep 17 00:00:00 2001 From: Zach Ovington Date: Tue, 31 Jan 2017 14:37:24 -0500 Subject: [PATCH 129/291] add regex to support spaces in firstline shebang, add corresponding specs --- grammars/python.cson | 2 +- spec/python-spec.coffee | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 1d109a2..c7ee5fb 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -16,7 +16,7 @@ 'tac' 'wsgi' ] -'firstLineMatch': '^#!/.*\\bpython[\\d\\.]*\\b' +'firstLineMatch': '^#![ \\t]*/.*\\bpython[\\d\\.]*\\b' 'patterns': [ { 'include': '#line_comments' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 022a4ba..21c10ba 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -8,6 +8,10 @@ describe "Python grammar", -> runs -> grammar = atom.grammars.grammarForScopeName("source.python") + it "recognises shebang on firstline", -> + expect(grammar.firstLineRegex.scanner.findNextMatchSync("#!/usr/bin/env python")).not.toBeNull() + expect(grammar.firstLineRegex.scanner.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull() + it "parses the grammar", -> expect(grammar).toBeDefined() expect(grammar.scopeName).toBe "source.python" From 0bb1c4410883bccd38b179d46307bec547d762f4 Mon Sep 17 00:00:00 2001 From: Wliu Date: Mon, 6 Feb 2017 12:53:23 -0500 Subject: [PATCH 130/291] Prepare 0.45.2 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f47c3d5..c71e1ed 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.1", + "version": "0.45.2", "engines": { "atom": "*", "node": "*" From 8ca317685a64f273ae71424a93f00bc43ee8715c Mon Sep 17 00:00:00 2001 From: Alessandro Pisa Date: Tue, 16 May 2017 17:04:28 +0200 Subject: [PATCH 131/291] Be compliant with the Python documentation Even if -*- is cosmetic and encoding: utf8 is equal to encoding= utf8 in the Python documentation the shown example is ``` #!/usr/bin/env python # -*- coding: latin-1 -*- ``` See: - https://docs.python.org/3/howto/unicode.html#unicode-literals-in-python-source-code I would use the form proposed in the doc. It seems to me to be more popular. --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 8906e44..c89c442 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -4,7 +4,7 @@ 'body': '#!/usr/bin/env python\n' '# coding=utf-8': 'prefix': 'enc' - 'body': '# coding=utf-8\n' + 'body': '# -*- coding: utf-8 -*-\n' 'Import': 'prefix': 'im' 'body': 'import ${1:package/module}' From 89b9623ad200380a442b8d91147de0b06a0eafa8 Mon Sep 17 00:00:00 2001 From: Christoph Buchner Date: Wed, 17 May 2017 14:12:14 +0200 Subject: [PATCH 132/291] Add with statement to snippets After reviewing a recent reddit discussion about [the most repetitive code pieces people type](https://www.reddit.com/r/Python/comments/6bjgkt/what_are_the_most_repetitive_pieces_of_code_that/), I checked the most often mentioned items against the available snippet list. The only one that stood out to me as missing was the with statement, which I add here after confirming that it works in my own snippets. It's basically copy/pasted from the for statement snippet, and a simple change, so should be ok. --- snippets/language-python.cson | 3 +++ 1 file changed, 3 insertions(+) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index c89c442..53cea7e 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -71,6 +71,9 @@ 'while': 'prefix': 'while' 'body': 'while ${1:condition}:\n\t${2:pass}' + 'with statement': + 'prefix': 'with' + 'body': 'with ${1:expression} as ${2:target}:\n\t${3:pass}' 'Try/Except/Else/Finally': 'prefix': 'tryef' 'body': 'try:\n\t${1:pass}\nexcept${2: ${3:Exception} as ${4:e}}:\n\t${5:raise}\nelse:\n\t${6:pass}\nfinally:\n\t${7:pass}' From ed3f4587df517ab8c5f77a3e26c4e9f093e1ba23 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Wed, 24 May 2017 19:05:09 -0400 Subject: [PATCH 133/291] Prepare 0.45.3 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c71e1ed..bebf715 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.2", + "version": "0.45.3", "engines": { "atom": "*", "node": "*" From d56de7b06de067644a3f58f3477c7b34d8c5e276 Mon Sep 17 00:00:00 2001 From: tennyson-mccalla Date: Wed, 5 Jul 2017 16:00:30 -0400 Subject: [PATCH 134/291] Made some additions to the snippets --- snippets/language-python.cson | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 53cea7e..1292992 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -2,6 +2,9 @@ '#!/usr/bin/env python': 'prefix': 'env' 'body': '#!/usr/bin/env python\n' + '#!/usr/bin/env python3': + 'prefix': 'env3' + 'body': '#!/usr/bin/env python3\n' '# coding=utf-8': 'prefix': 'enc' 'body': '# -*- coding: utf-8 -*-\n' @@ -52,7 +55,7 @@ 'body': 'self.fail(\'${1:message}\')$0' 'New Class': 'prefix': 'class' - 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1.}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' + 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' 'New Method': 'prefix': 'defs' 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' @@ -95,6 +98,9 @@ 'Dictionary Comprehension': 'prefix': 'dc' 'body': '{${1:key}: ${2:value} for ${3:key}, ${4:value} in ${5:variable}}' + 'Set Comprehension': + 'prefix': 'sc' + 'body': '{${1:value} for ${2:value} in ${3:variable}}' 'PDB set trace': 'prefix': 'pdb' 'body': 'import pdb; pdb.set_trace()' From 87c01e2d1c22d7530b53c5ab386f0a7c7c38e3cf Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Sat, 26 Mar 2016 20:05:04 -0500 Subject: [PATCH 135/291] dedent next line for some keywords --- settings/language-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/settings/language-python.cson b/settings/language-python.cson index 60e2aa7..7d26491 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -6,3 +6,4 @@ 'commentStart': '# ' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:' + 'decreaseNextIndentPattern': '^\\s*(return|yield|continue|break|raise)\\b.*$' From f0b7a478a87647d4c1371e93b397ff729f0804c7 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Fri, 7 Jul 2017 00:06:02 -0500 Subject: [PATCH 136/291] Technically right regex, but triggers to soon to be useful This is the technically correct regex to prevent dedenting the current line on things like in-line else expressions, but unfortunately the regex is already matched before anything can be done about it :( Leaving it in as is in the hopes that it will be useful someday. --- settings/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index 7d26491..003ced1 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -5,5 +5,5 @@ 'tabLength': 4 'commentStart': '# ' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' - 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:' + 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' 'decreaseNextIndentPattern': '^\\s*(return|yield|continue|break|raise)\\b.*$' From a039867ca58d68148e0e44ec73cd317c5ef791e9 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Fri, 7 Jul 2017 00:12:33 -0500 Subject: [PATCH 137/291] Adding indent spec --- spec/language-python-spec.coffee | 89 ++++++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 spec/language-python-spec.coffee diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee new file mode 100644 index 0000000..3bf83fc --- /dev/null +++ b/spec/language-python-spec.coffee @@ -0,0 +1,89 @@ +describe 'Python settings', -> + [editor, languageMode] = [] + + afterEach -> + editor.destroy() + + beforeEach -> + waitsForPromise -> + atom.workspace.open('sample.py').then (o) -> + editor = o + languageMode = editor.languageMode + + waitsForPromise -> + atom.packages.activatePackage('language-python') + + it 'matches lines correctly using the increaseIndentPattern', -> + increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']) + + expect(increaseIndentRegex.testSync('for i in range(n):')).toBeTruthy() + expect(increaseIndentRegex.testSync(' for i in range(n):')).toBeTruthy() + expect(increaseIndentRegex.testSync('class TheClass(Object):')).toBeTruthy() + expect(increaseIndentRegex.testSync(' class TheClass(Object):')).toBeTruthy() + expect(increaseIndentRegex.testSync('def f(x):')).toBeTruthy() + expect(increaseIndentRegex.testSync(' def f(x):')).toBeTruthy() + expect(increaseIndentRegex.testSync('if this_var == that_var:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' if this_var == that_var:')).toBeTruthy() + expect(increaseIndentRegex.testSync('elif this_var == that_var:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' elif this_var == that_var:')).toBeTruthy() + expect(increaseIndentRegex.testSync('else:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' else:')).toBeTruthy() + expect(increaseIndentRegex.testSync('except Exception:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' except Exception:')).toBeTruthy() + expect(increaseIndentRegex.testSync('except Exception as e:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' except Exception as e:')).toBeTruthy() + expect(increaseIndentRegex.testSync('finally:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' finally:')).toBeTruthy() + expect(increaseIndentRegex.testSync('with open("filename") as f:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' with open("filename") as f:')).toBeTruthy() + expect(increaseIndentRegex.testSync('while True:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' while True:')).toBeTruthy() + expect(increaseIndentRegex.testSync('\t\t while True:')).toBeTruthy() + + it 'does not match lines incorrectly using the increaseIndentPattern', -> + increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']) + + expect(increaseIndentRegex.testSync('for i in range(n)')).toBeFalsy() + expect(increaseIndentRegex.testSync('class TheClass(Object)')).toBeFalsy() + expect(increaseIndentRegex.testSync('def f(x)')).toBeFalsy() + expect(increaseIndentRegex.testSync('if this_var == that_var')).toBeFalsy() + expect(increaseIndentRegex.testSync('"for i in range(n):"')).toBeFalsy() + + it 'matches lines correctly using the decreaseIndentPattern', -> + decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']) + + expect(decreaseIndentRegex.testSync('elif this_var == that_var:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' elif this_var == that_var:')).toBeTruthy() + expect(decreaseIndentRegex.testSync('else:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' else:')).toBeTruthy() + expect(decreaseIndentRegex.testSync('except Exception:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' except Exception:')).toBeTruthy() + expect(decreaseIndentRegex.testSync('except Exception as e:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' except Exception as e:')).toBeTruthy() + expect(decreaseIndentRegex.testSync('finally:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' finally:')).toBeTruthy() + expect(decreaseIndentRegex.testSync('\t\t finally:')).toBeTruthy() + + it 'does not match lines incorrectly using the decreaseIndentPattern', -> + decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']) + + # NOTE! This first one is different from most other rote tests here. + expect(decreaseIndentRegex.testSync('else: expression()')).toBeFalsy() + expect(decreaseIndentRegex.testSync('elif this_var == that_var')).toBeFalsy() + expect(decreaseIndentRegex.testSync(' elif this_var == that_var')).toBeFalsy() + expect(decreaseIndentRegex.testSync('else')).toBeFalsy() + expect(decreaseIndentRegex.testSync(' "finally:"')).toBeFalsy() + + + it 'matches lines correctly using the decreaseNextIndentPattern', -> + decreaseNextIndentRegex = languageMode.decreaseNextIndentRegexForScopeDescriptor(['source.python']) + + expect(decreaseNextIndentRegex.testSync(' return')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' return')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' return x')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' yield x')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' yield expression()')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' continue')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' break')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' raise')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' raise Exception()')).toBeTruthy() From 08ba2c75608a7eda32a084e09a22aebfed127296 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Fri, 7 Jul 2017 00:16:27 -0500 Subject: [PATCH 138/291] adding 'pass' as dedent keyword --- settings/language-python.cson | 2 +- spec/language-python-spec.coffee | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index 003ced1..e6d1c09 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -6,4 +6,4 @@ 'commentStart': '# ' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' - 'decreaseNextIndentPattern': '^\\s*(return|yield|continue|break|raise)\\b.*$' + 'decreaseNextIndentPattern': '^\\s*(pass|return|yield|continue|break|raise)\\b.*$' diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index 3bf83fc..ba605bd 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -85,5 +85,6 @@ describe 'Python settings', -> expect(decreaseNextIndentRegex.testSync(' yield expression()')).toBeTruthy() expect(decreaseNextIndentRegex.testSync(' continue')).toBeTruthy() expect(decreaseNextIndentRegex.testSync(' break')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' pass')).toBeTruthy() expect(decreaseNextIndentRegex.testSync(' raise')).toBeTruthy() expect(decreaseNextIndentRegex.testSync(' raise Exception()')).toBeTruthy() From 16c7485c9224ffcc2df5bf8ed2b35d5526c508ec Mon Sep 17 00:00:00 2001 From: tennyson-mccalla Date: Sun, 9 Jul 2017 16:01:22 -0400 Subject: [PATCH 139/291] Reverted an addition to the snippets --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 1292992..6962bb7 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -55,7 +55,7 @@ 'body': 'self.fail(\'${1:message}\')$0' 'New Class': 'prefix': 'class' - 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' + 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1.}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' 'New Method': 'prefix': 'defs' 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' From 512e0a06abaa9e1b8c8951783365dfffdae2ce6a Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Wed, 12 Jul 2017 21:55:19 -0500 Subject: [PATCH 140/291] remove extra newline --- spec/language-python-spec.coffee | 1 - 1 file changed, 1 deletion(-) diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index ba605bd..f3f0509 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -74,7 +74,6 @@ describe 'Python settings', -> expect(decreaseIndentRegex.testSync('else')).toBeFalsy() expect(decreaseIndentRegex.testSync(' "finally:"')).toBeFalsy() - it 'matches lines correctly using the decreaseNextIndentPattern', -> decreaseNextIndentRegex = languageMode.decreaseNextIndentRegexForScopeDescriptor(['source.python']) From cb2e90840c01646418fe1f9054a1d280ac310cb3 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Wed, 12 Jul 2017 23:44:28 -0400 Subject: [PATCH 141/291] Prepare 0.45.4 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index bebf715..e5d3949 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.3", + "version": "0.45.4", "engines": { "atom": "*", "node": "*" From 54fd1fc3cf3fa39f14678952d41c881a30e9ba94 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Tue, 17 Oct 2017 21:44:16 +0200 Subject: [PATCH 142/291] Remove patterns for decreasing next indent level --- settings/language-python.cson | 1 - spec/language-python-spec.coffee | 14 -------------- 2 files changed, 15 deletions(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index e6d1c09..e9105bb 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -6,4 +6,3 @@ 'commentStart': '# ' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' - 'decreaseNextIndentPattern': '^\\s*(pass|return|yield|continue|break|raise)\\b.*$' diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index f3f0509..47fd519 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -73,17 +73,3 @@ describe 'Python settings', -> expect(decreaseIndentRegex.testSync(' elif this_var == that_var')).toBeFalsy() expect(decreaseIndentRegex.testSync('else')).toBeFalsy() expect(decreaseIndentRegex.testSync(' "finally:"')).toBeFalsy() - - it 'matches lines correctly using the decreaseNextIndentPattern', -> - decreaseNextIndentRegex = languageMode.decreaseNextIndentRegexForScopeDescriptor(['source.python']) - - expect(decreaseNextIndentRegex.testSync(' return')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' return')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' return x')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' yield x')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' yield expression()')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' continue')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' break')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' pass')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' raise')).toBeTruthy() - expect(decreaseNextIndentRegex.testSync(' raise Exception()')).toBeTruthy() From 4d826a0fe05872e15ccd55cc0295d6a5a4e8f93f Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Wed, 18 Oct 2017 02:23:50 -0500 Subject: [PATCH 143/291] Add async indent patterns. --- settings/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index e9105bb..0d2c66c 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -4,5 +4,5 @@ 'softTabs': true 'tabLength': 4 'commentStart': '# ' - 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while)\\b.*:\\s*$' + 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while|async def|async for|async with)\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' From 32ed25dcc3507b345fdb4b000041e17ec9371087 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Wed, 18 Oct 2017 02:23:59 -0500 Subject: [PATCH 144/291] Add tests for async indent patterns. --- spec/language-python-spec.coffee | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index 47fd519..1a69570 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -18,10 +18,14 @@ describe 'Python settings', -> expect(increaseIndentRegex.testSync('for i in range(n):')).toBeTruthy() expect(increaseIndentRegex.testSync(' for i in range(n):')).toBeTruthy() + expect(increaseIndentRegex.testSync('async for i in range(n):')).toBeTruthy() + expect(increaseIndentRegex.testSync(' async for i in range(n):')).toBeTruthy() expect(increaseIndentRegex.testSync('class TheClass(Object):')).toBeTruthy() expect(increaseIndentRegex.testSync(' class TheClass(Object):')).toBeTruthy() expect(increaseIndentRegex.testSync('def f(x):')).toBeTruthy() expect(increaseIndentRegex.testSync(' def f(x):')).toBeTruthy() + expect(increaseIndentRegex.testSync('async def f(x):')).toBeTruthy() + expect(increaseIndentRegex.testSync(' async def f(x):')).toBeTruthy() expect(increaseIndentRegex.testSync('if this_var == that_var:')).toBeTruthy() expect(increaseIndentRegex.testSync(' if this_var == that_var:')).toBeTruthy() expect(increaseIndentRegex.testSync('elif this_var == that_var:')).toBeTruthy() @@ -36,6 +40,8 @@ describe 'Python settings', -> expect(increaseIndentRegex.testSync(' finally:')).toBeTruthy() expect(increaseIndentRegex.testSync('with open("filename") as f:')).toBeTruthy() expect(increaseIndentRegex.testSync(' with open("filename") as f:')).toBeTruthy() + expect(increaseIndentRegex.testSync('async with open("filename") as f:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' async with open("filename") as f:')).toBeTruthy() expect(increaseIndentRegex.testSync('while True:')).toBeTruthy() expect(increaseIndentRegex.testSync(' while True:')).toBeTruthy() expect(increaseIndentRegex.testSync('\t\t while True:')).toBeTruthy() From 7f8ea3bb40d013667fa5eaec73daad98af4689a1 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Wed, 18 Oct 2017 02:25:48 -0500 Subject: [PATCH 145/291] Update path to test file so it exists in a directory from tests point of view. --- spec/language-python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index 1a69570..72fcecd 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -6,7 +6,7 @@ describe 'Python settings', -> beforeEach -> waitsForPromise -> - atom.workspace.open('sample.py').then (o) -> + atom.workspace.open('../../sample.py').then (o) -> editor = o languageMode = editor.languageMode From d8cc1f6612e0b1470982171b806a4aa0807de5f2 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Fri, 20 Oct 2017 08:45:50 -0500 Subject: [PATCH 146/291] Simplify async pattern w/ suggestion from 50Wliu --- settings/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index 0d2c66c..001e981 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -4,5 +4,5 @@ 'softTabs': true 'tabLength': 4 'commentStart': '# ' - 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while|async def|async for|async with)\\b.*:\\s*$' + 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while|async\\s+(def|for|with))\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' From 6983f1dc9c620b7e185d38e6b22e971a0f4e0316 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Fri, 20 Oct 2017 08:47:34 -0500 Subject: [PATCH 147/291] Remove unneeded filename from open call in spec --- spec/language-python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index 72fcecd..e5b431e 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -6,7 +6,7 @@ describe 'Python settings', -> beforeEach -> waitsForPromise -> - atom.workspace.open('../../sample.py').then (o) -> + atom.workspace.open().then (o) -> editor = o languageMode = editor.languageMode From 19dadc166cbce8839111e456f0d44d458ebee23c Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Wed, 18 Oct 2017 02:45:28 -0500 Subject: [PATCH 148/291] Add lowercase letters to hex char inside strings. --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index c7ee5fb..1cc5f06 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -800,7 +800,7 @@ 'name': 'constant.character.escape.tab.python' '13': 'name': 'constant.character.escape.vertical-tab.python' - 'match': '(\\\\x[0-9A-F]{2})|(\\\\[0-7]{3})|(\\\\\\n)|(\\\\\\\\)|(\\\\\\")|(\\\\\')|(\\\\a)|(\\\\b)|(\\\\f)|(\\\\n)|(\\\\r)|(\\\\t)|(\\\\v)' + 'match': '(\\\\x[0-9A-Fa-f]{2})|(\\\\[0-7]{3})|(\\\\\\n)|(\\\\\\\\)|(\\\\\\")|(\\\\\')|(\\\\a)|(\\\\b)|(\\\\f)|(\\\\n)|(\\\\r)|(\\\\t)|(\\\\v)' 'escaped_unicode_char': 'captures': '1': From 40db4c1f97a3311c0c935952fe8ea4a62035498e Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Wed, 18 Oct 2017 02:45:49 -0500 Subject: [PATCH 149/291] Add tests for hex chars inside python strings. --- spec/python-spec.coffee | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 21c10ba..edc1bb4 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -230,6 +230,21 @@ describe "Python grammar", -> expect(tokens[0][12].value).toBe ']' expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python'] + it "tokenizes a hex escape inside a string", -> + tokens = grammar.tokenizeLines('"\\x5A"') + + expect(tokens[0][0].value).toBe '"' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][1].value).toBe '\\x5A' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] + + tokens = grammar.tokenizeLines('"\\x9f"') + + expect(tokens[0][0].value).toBe '"' + expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[0][1].value).toBe '\\x9f' + expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] + it "tokenizes properties of self as self-type variables", -> tokens = grammar.tokenizeLines('self.foo') From 2c7f3e8322d28a566b8999b285b2090389187950 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Fri, 20 Oct 2017 08:55:14 -0500 Subject: [PATCH 150/291] Remove unneeded filename from open call in specs --- spec/language-python-spec.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index 47fd519..bb18a8d 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -6,7 +6,7 @@ describe 'Python settings', -> beforeEach -> waitsForPromise -> - atom.workspace.open('sample.py').then (o) -> + atom.workspace.open().then (o) -> editor = o languageMode = editor.languageMode From 97bdd762ba56c2acda5d7f4570bef2f1cd7b47ad Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Wed, 25 Oct 2017 14:56:11 +0200 Subject: [PATCH 151/291] Prepare 0.45.5 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index e5d3949..fa67250 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.4", + "version": "0.45.5", "engines": { "atom": "*", "node": "*" From 8aaa2e0914e86ffef97a7c28282c87db4b544c4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6ster?= Date: Tue, 7 Nov 2017 11:11:55 +0100 Subject: [PATCH 152/291] Update python.cson --- grammars/python.cson | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 1cc5f06..382b7dc 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -12,7 +12,8 @@ 'SConstruct' 'Sconstruct' 'sconstruct' - 'Snakefile' + 'Snakefile' # Snakemake support + 'smk' # Snakemake support 'tac' 'wsgi' ] From fb01993a826ef56af50f5f91f94cb6c01dbdd61f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 5 Dec 2017 14:13:00 -0800 Subject: [PATCH 153/291] Add tree-sitter grammar --- grammars/tree-sitter-python.cson | 92 ++++++++++++++++++++++++++++++++ package.json | 3 ++ 2 files changed, 95 insertions(+) create mode 100644 grammars/tree-sitter-python.cson diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson new file mode 100644 index 0000000..aa1ff74 --- /dev/null +++ b/grammars/tree-sitter-python.cson @@ -0,0 +1,92 @@ +id: 'python' +name: 'Python' +type: 'tree-sitter' +parser: 'tree-sitter-python' + +fileTypes: [ + 'py' +] + +folds: [ + { + type: [ + 'if_statement' + 'for_statement' + 'try_statement' + 'with_statement' + 'while_statement' + 'class_definition' + 'function_definition' + 'async_function_definition' + ] + start: {type: ':'} + } + { + start: {type: '(', index: 0} + end: {type: ')', index: -1} + }, + { + start: {type: '[', index: 0} + end: {type: ']', index: -1} + }, + { + start: {type: '{', index: 0} + end: {type: '}', index: -1} + } +] + +comments: + start: '# ' + +scopes: + 'module': 'source.python' + + 'comment': 'comment.line' + 'string': 'string.quoted' + + 'class_definition > identifier': 'entity.name.type.class' + 'function_definition > identifier': 'entity.name.function' + 'call > identifier:nth-child(0)': 'entity.name.function' + 'call > attribute > identifier:nth-child(2)': 'entity.name.function' + + 'attribute > identifier:nth-child(2)': 'variable.other.object.property' + + 'decorator': 'entity.name.function.decorator' + + 'none': 'constant.language' + 'true': 'constant.language' + 'false': 'constant.language' + + 'type > identifier': 'support.storage.type' + + '"class"': 'storage.type.class' + '"def"': 'storage.type.function' + '"lambda"': 'storage.type.function' + + '"if"': 'keyword.control' + '"else"': 'keyword.control' + '"elif"': 'keyword.control' + '"while"': 'keyword.control' + '"for"': 'keyword.control' + '"return"': 'keyword.control' + '"break"': 'keyword.control' + '"continue"': 'keyword.control' + '"raise"': 'keyword.control' + '"try"': 'keyword.control' + '"except"': 'keyword.control' + '"with"': 'keyword.control' + '"as"': 'keyword.control' + '"finally"': 'keyword.control' + '"import"': 'keyword.control' + '"from"': 'keyword.control' + + '"+"': 'keyword.operator' + '"-"': 'keyword.operator' + '"*"': 'keyword.operator' + '"/"': 'keyword.operator' + '"%"': 'keyword.operator' + '"in"': 'keyword.operator.in' + '"and"': 'keyword.operator.logical' + '"or"': 'keyword.operator.logical' + '"not"': 'keyword.operator.logical' + '"is"': 'keyword.operator.logical' diff --git a/package.json b/package.json index fa67250..d54ada9 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,9 @@ "bugs": { "url": "https://github.com/atom/language-python/issues" }, + "dependencies": { + "tree-sitter-python": "^0.2.0" + }, "devDependencies": { "coffeelint": "^1.10.1" } From 0513c49f6819b59c880e6abf59f883bd2334bcc1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 5 Dec 2017 14:13:17 -0800 Subject: [PATCH 154/291] Prepare 0.46.0-0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d54ada9..92c09cb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.5", + "version": "0.46.0-0", "engines": { "atom": "*", "node": "*" From 8cc423da81e1cae7b77f135d7a2baf77d25d19e4 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Thu, 7 Dec 2017 11:57:07 +0100 Subject: [PATCH 155/291] Prepare 0.45.6 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index fa67250..3a38ce4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.45.5", + "version": "0.45.6", "engines": { "atom": "*", "node": "*" From e89cf07522391aea51962d915fd3c51d043aabc0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 15 Dec 2017 16:31:24 -0800 Subject: [PATCH 156/291] Add legacy scope name property --- grammars/tree-sitter-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index aa1ff74..8a0b960 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -2,6 +2,7 @@ id: 'python' name: 'Python' type: 'tree-sitter' parser: 'tree-sitter-python' +legacyScopeName: 'source.python' fileTypes: [ 'py' From 42767cfdab595e6a4deaa3280868895cc91a78d0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 15 Dec 2017 16:31:33 -0800 Subject: [PATCH 157/291] Prepare 0.46.0-1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 92c09cb..d0be9f5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.46.0-0", + "version": "0.46.0-1", "engines": { "atom": "*", "node": "*" From 4e9e8d900a945abfa8b7b8a3e0418625e42e486d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Dec 2017 13:45:16 -0800 Subject: [PATCH 158/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d0be9f5..48e2fcf 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "url": "https://github.com/atom/language-python/issues" }, "dependencies": { - "tree-sitter-python": "^0.2.0" + "tree-sitter-python": "^0.3.0" }, "devDependencies": { "coffeelint": "^1.10.1" From ddc3ede7875224014c90410cc72c279d2c5b838c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Dec 2017 13:45:25 -0800 Subject: [PATCH 159/291] Prepare 0.46.0-2 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 48e2fcf..71febf9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.46.0-1", + "version": "0.46.0-2", "engines": { "atom": "*", "node": "*" From d122d50d2d1fd63daf2430b51a277a224c63bcfa Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 8 Jan 2018 09:48:08 -0800 Subject: [PATCH 160/291] 0.47.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 71febf9..9710447 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.46.0-2", + "version": "0.47.0", "engines": { "atom": "*", "node": "*" From 5c7eb63119be20dae141a8762dee07962f387cfe Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sat, 20 Jan 2018 21:38:45 -0500 Subject: [PATCH 161/291] Clean up function calls --- grammars/python.cson | 49 +++++++++-------------------------------- spec/python-spec.coffee | 16 ++++++++++++++ 2 files changed, 26 insertions(+), 39 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 382b7dc..7c850ca 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -406,16 +406,22 @@ ] } { - 'begin': '(?<=\\)|\\])\\s*(\\()' + 'begin': '(?:([A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*)|(?<=\\)|\\]))\\s*(\\()' 'beginCaptures': '1': + 'patterns': [ + { + 'include': '#dotted_name' + } + ] + '2': 'name': 'punctuation.definition.arguments.begin.python' - 'contentName': 'meta.function-call.arguments.python' - 'end': '(\\))' + 'end': '\\)' 'endCaptures': - '1': + '0': 'name': 'punctuation.definition.arguments.end.python' 'name': 'meta.function-call.python' + 'contentName': 'meta.function-call.arguments.python' 'patterns': [ { 'include': '#keyword_arguments' @@ -425,41 +431,6 @@ } ] } - { - 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\()' - 'end': '(\\))' - 'endCaptures': - '1': - 'name': 'punctuation.definition.arguments.end.python' - 'name': 'meta.function-call.python' - 'patterns': [ - { - 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*\\s*\\()' - 'end': '(?=\\s*\\()' - 'patterns': [ - { - 'include': '#dotted_name' - } - ] - } - { - 'begin': '(\\()' - 'beginCaptures': - '1': - 'name': 'punctuation.definition.arguments.begin.python' - 'contentName': 'meta.function-call.arguments.python' - 'end': '(?=\\))' - 'patterns': [ - { - 'include': '#keyword_arguments' - } - { - 'include': '$self' - } - ] - } - ] - } { 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\[)' 'end': '(\\])' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index edc1bb4..13cacdf 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -307,6 +307,22 @@ describe "Python grammar", -> expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] + it "tokenizes complex function calls", -> + {tokens} = grammar.tokenizeLine "torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0]" + + expect(tokens[4]).toEqual value: 'BCELoss', scopes: ['source.python', 'meta.function-call.python'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[8]).toEqual value: 'Variable', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python'] + expect(tokens[9]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[10]).toEqual value: 'bayes_optimal_prob', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python'] + expect(tokens[14]).toEqual value: 'requires_grad', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'variable.parameter.function.python'] + expect(tokens[16]).toEqual value: 'False', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'constant.language.python'] + expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[18]).toEqual value: ', ', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python'] + expect(tokens[20]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[21]).toEqual value: '.', scopes: ['source.python'] it "tokenizes SQL inline highlighting on blocks", -> delimsByScope = From 1c281b367a0142d6ead38cd5742b6ced324fa708 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sat, 20 Jan 2018 21:49:02 -0500 Subject: [PATCH 162/291] Use Trusty on Travis --- .travis.yml | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 20cfe51..47ee9a1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,15 +1,41 @@ -language: objective-c +### Project specific config ### +language: generic + +env: + global: + - APM_TEST_PACKAGES="" + - ATOM_LINT_WITH_BUNDLED_NODE="true" + + matrix: + - ATOM_CHANNEL=stable + - ATOM_CHANNEL=beta + +### Generic setup follows ### +script: + - curl -s -O https://raw.githubusercontent.com/atom/ci/master/build-package.sh + - chmod u+x build-package.sh + - ./build-package.sh notifications: email: on_success: never on_failure: change -script: 'curl -s https://raw.githubusercontent.com/atom/ci/master/build-package.sh | sh' +branches: + only: + - master git: depth: 10 -branches: - only: - - master +sudo: false + +dist: trusty + +addons: + apt: + packages: + - build-essential + - fakeroot + - git + - libsecret-1-dev From 1c2d2bd558d94566645087cf8363b7c189de4190 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sat, 20 Jan 2018 22:41:31 -0500 Subject: [PATCH 163/291] Level up string formatting --- grammars/python.cson | 100 +++++++++++++++++++++++++++++-------------- 1 file changed, 69 insertions(+), 31 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 7c850ca..f1be1aa 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -628,9 +628,6 @@ 'builtin_types': 'match': '(?x)\\b(\n\t\t\t\tbasestring|bool|buffer|bytearray|bytes|complex|dict|float|frozenset|int|\n\t\t\t\tlist|long|memoryview|object|range|set|slice|str|tuple|unicode|xrange\n\t\t\t)\\b' 'name': 'support.type.python' - 'constant_placeholder': - 'match': '(?i:(%(\\([a-z_]+\\))?#?0?\\-?[ ]?\\+?([0-9]*|\\*)(\\.([0-9]*|\\*))?([hL][a-z]|[a-z%]))|(\\{([!\\[\\].:\\w ]+)?\\}))' - 'name': 'constant.other.placeholder.python' 'docstrings': 'patterns': [ { @@ -850,6 +847,47 @@ 'include': 'source.regexp.python' } ] + 'string_formatting': + # TODO: Add $self highlighting? + 'match': '''(?x) + # https://docs.python.org/2/library/stdtypes.html#string-formatting (deprecated) + % + (\\([a-zA-Z_]+\\))? # mapping key + [#0+\\- ]? # conversion flags + (\\d+|\\*)? # minimum field width + (\\.(\\d+)|\\*)? # precision + [hlL]? # length modifier + [diouxXeEfFgGcrs%] # conversion type + | + # https://docs.python.org/3/library/string.html#format-string-syntax + { + ( + ( + \\d # integer + | + [a-zA-Z_]\\w* # identifier + ) + ( + \\.[a-zA-Z_]\\w* # attribute name + | + \\[[^\\]]+\\] # element index + )* + )? + (![rsa])? # conversion + ( + : + (.?[<>=^])? # fill followed by align + [+\\- ]? # sign (space at the end is intentional) + \\#? # alternate form + 0? + \\d* # width + [_,]? # grouping option + (\\.\\d+)? # precision + [bcdeEfFgGnosxX%]? # type + )? + } + ''' + 'name': 'constant.other.placeholder.python' 'string_quoted_double': 'patterns': [ { @@ -869,7 +907,7 @@ 'name': 'string.quoted.double.block.unicode-raw-regex.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -899,7 +937,7 @@ 'name': 'string.quoted.double.block.unicode-raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -926,7 +964,7 @@ 'name': 'string.quoted.double.block.raw-regex.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -953,7 +991,7 @@ 'name': 'string.quoted.double.block.raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -977,7 +1015,7 @@ 'name': 'string.quoted.double.block.unicode.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -995,7 +1033,7 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - {'include': '#constant_placeholder'} + {'include': '#string_formatting'} {'include': '#escaped_unicode_char'} {'include': '#escaped_char'} {'include': '#regular_expressions'} @@ -1025,7 +1063,7 @@ 'name': 'string.quoted.double.single-line.unicode-raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1043,7 +1081,7 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - {'include': '#constant_placeholder'} + {'include': '#string_formatting'} {'include': '#escaped_char'} {'include': '#regular_expressions'} ] @@ -1072,7 +1110,7 @@ 'name': 'string.quoted.double.single-line.raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1098,7 +1136,7 @@ 'name': 'string.quoted.double.single-line.unicode.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1132,7 +1170,7 @@ ] } { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1156,7 +1194,7 @@ 'name': 'string.quoted.double.single-line.sql.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1181,7 +1219,7 @@ 'name': 'string.quoted.double.block.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1205,7 +1243,7 @@ 'name': 'string.quoted.double.single-line.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1243,7 +1281,7 @@ 'name': 'string.quoted.single.block.unicode-raw-regex.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1273,7 +1311,7 @@ 'name': 'string.quoted.single.block.unicode-raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1300,7 +1338,7 @@ 'name': 'string.quoted.single.block.raw-regex.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1327,7 +1365,7 @@ 'name': 'string.quoted.single.block.raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1351,7 +1389,7 @@ 'name': 'string.quoted.single.block.unicode.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1369,7 +1407,7 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - {'include': '#constant_placeholder'} + {'include': '#string_formatting'} {'include': '#escaped_unicode_char'} {'include': '#escaped_char'} {'include': '#regular_expressions'} @@ -1397,7 +1435,7 @@ 'name': 'string.quoted.single.single-line.unicode-raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1415,7 +1453,7 @@ 'name': 'punctuation.definition.string.begin.python' '3': 'patterns': [ - {'include': '#constant_placeholder'} + {'include': '#string_formatting'} {'include': '#escaped_char'} {'include': '#regular_expressions'} ] @@ -1442,7 +1480,7 @@ 'name': 'string.quoted.single.single-line.raw.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1466,7 +1504,7 @@ 'name': 'string.quoted.single.single-line.unicode.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_unicode_char' @@ -1500,7 +1538,7 @@ ] } { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1522,7 +1560,7 @@ 'name': 'string.quoted.single.single-line.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1547,7 +1585,7 @@ 'name': 'string.quoted.single.block.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' @@ -1569,7 +1607,7 @@ 'name': 'string.quoted.single.single-line.python' 'patterns': [ { - 'include': '#constant_placeholder' + 'include': '#string_formatting' } { 'include': '#escaped_char' From 6ea6dcff01436210c662097ce6c2963859860d33 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 00:43:21 -0500 Subject: [PATCH 164/291] :bug: --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index f1be1aa..0fe5ca2 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -855,7 +855,7 @@ (\\([a-zA-Z_]+\\))? # mapping key [#0+\\- ]? # conversion flags (\\d+|\\*)? # minimum field width - (\\.(\\d+)|\\*)? # precision + (\\.(\\d+|\\*))? # precision [hlL]? # length modifier [diouxXeEfFgGcrs%] # conversion type | From 1a650797e2f487fe27bab5a6deb7600ec23af4b7 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 11:48:43 -0500 Subject: [PATCH 165/291] Specs --- spec/python-spec.coffee | 202 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 202 insertions(+) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 13cacdf..811dc2d 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -245,6 +245,208 @@ describe "Python grammar", -> expect(tokens[0][1].value).toBe '\\x9f' expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] + describe "string formatting", -> + describe "%-style formatting", -> + it "tokenizes the conversion type", -> + {tokens} = grammar.tokenizeLine '"%d"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%d', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes an optional mapping key", -> + {tokens} = grammar.tokenizeLine '"%(key)x"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%(key)x', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes an optional conversion flag", -> + {tokens} = grammar.tokenizeLine '"% F"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '% F', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes an optional field width", -> + {tokens} = grammar.tokenizeLine '"%11s"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%11s', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes * as the optional field width", -> + {tokens} = grammar.tokenizeLine '"%*g"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%*g', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes an optional precision", -> + {tokens} = grammar.tokenizeLine '"%.4r"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%.4r', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes * as the optional precision", -> + {tokens} = grammar.tokenizeLine '"%.*%"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%.*%', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes an optional length modifier", -> + {tokens} = grammar.tokenizeLine '"%Lo"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%Lo', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes complex formats", -> + {tokens} = grammar.tokenizeLine '"%(key)#5.*hc"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '%(key)#5.*hc', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + describe "{}-style formatting", -> + it "tokenizes the empty replacement field", -> + {tokens} = grammar.tokenizeLine '"{}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes a number as the field name", -> + {tokens} = grammar.tokenizeLine '"{1}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{1}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes a variable name as the field name", -> + {tokens} = grammar.tokenizeLine '"{key}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{key}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes field name attributes", -> + {tokens} = grammar.tokenizeLine '"{key.length}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{key.length}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + {tokens} = grammar.tokenizeLine '"{4.width}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{4.width}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + {tokens} = grammar.tokenizeLine '"{python2[\'3\']}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{python2[\'3\']}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + {tokens} = grammar.tokenizeLine '"{2[4]}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{2[4]}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes multiple field name attributes", -> + {tokens} = grammar.tokenizeLine '"{nested.a[2][\'val\'].value}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{nested.a[2][\'val\'].value}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes conversions", -> + {tokens} = grammar.tokenizeLine '"{!r}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{!r}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + describe "format specifiers", -> + it "tokenizes alignment", -> + {tokens} = grammar.tokenizeLine '"{:<}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:<}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + {tokens} = grammar.tokenizeLine '"{:a^}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:a^}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes signs", -> + {tokens} = grammar.tokenizeLine '"{:+}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:+}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + {tokens} = grammar.tokenizeLine '"{: }"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{: }', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes the alternate form indicator", -> + {tokens} = grammar.tokenizeLine '"{:#}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:#}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes 0", -> + {tokens} = grammar.tokenizeLine '"{:0}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:0}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes the width", -> + {tokens} = grammar.tokenizeLine '"{:34}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:34}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes the grouping option", -> + {tokens} = grammar.tokenizeLine '"{:,}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:,}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes the precision", -> + {tokens} = grammar.tokenizeLine '"{:.5}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:.5}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes the type", -> + {tokens} = grammar.tokenizeLine '"{:b}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes complex formats", -> + {tokens} = grammar.tokenizeLine '"{0.players[2]!a:2>-#01_.3d}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + it "tokenizes properties of self as self-type variables", -> tokens = grammar.tokenizeLines('self.foo') From 4bfe85edf17e43af5e9c24d9877763bbea409d24 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 11:51:21 -0500 Subject: [PATCH 166/291] Punt on $self highlighting for now If it turns out people want it it'll be trivial to add --- grammars/python.cson | 1 - 1 file changed, 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 0fe5ca2..9380b04 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -848,7 +848,6 @@ } ] 'string_formatting': - # TODO: Add $self highlighting? 'match': '''(?x) # https://docs.python.org/2/library/stdtypes.html#string-formatting (deprecated) % From 95a3416883041a7dbad9fa0a0b707a0e8f21c574 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 11:52:09 -0500 Subject: [PATCH 167/291] :memo: --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 9380b04..3e8dcfd 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -852,7 +852,7 @@ # https://docs.python.org/2/library/stdtypes.html#string-formatting (deprecated) % (\\([a-zA-Z_]+\\))? # mapping key - [#0+\\- ]? # conversion flags + [#0+\\- ]? # conversion flags (space at the end is intentional) (\\d+|\\*)? # minimum field width (\\.(\\d+|\\*))? # precision [hlL]? # length modifier From 668a624f70d3a2ab158430ca65946f69f5e79469 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 12:03:13 -0500 Subject: [PATCH 168/291] Tokenize {{ and }} as escape characters --- grammars/python.cson | 18 +++++++++++++++++- spec/python-spec.coffee | 9 +++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 3e8dcfd..2a49353 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -742,6 +742,21 @@ } ] 'escaped_char': + 'match': '''(?x) + (\\\\x[0-9A-Fa-f]{2})| + (\\\\[0-7]{3})|(\\\\\\n)| + (\\\\\\\\)| + (\\\\\\")| + (\\\\\')| + (\\\\a)| + (\\\\b)| + (\\\\f)| + (\\\\n)| + (\\\\r)| + (\\\\t)| + (\\\\v)| + ({{|}}) + ''' 'captures': '1': 'name': 'constant.character.escape.hex.python' @@ -769,7 +784,8 @@ 'name': 'constant.character.escape.tab.python' '13': 'name': 'constant.character.escape.vertical-tab.python' - 'match': '(\\\\x[0-9A-Fa-f]{2})|(\\\\[0-7]{3})|(\\\\\\n)|(\\\\\\\\)|(\\\\\\")|(\\\\\')|(\\\\a)|(\\\\b)|(\\\\f)|(\\\\n)|(\\\\r)|(\\\\t)|(\\\\v)' + '14': + 'name': 'constant.character.escape.curly-bracket.python' 'escaped_unicode_char': 'captures': '1': diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 811dc2d..21e9123 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -447,6 +447,15 @@ describe "Python grammar", -> expect(tokens[1]).toEqual value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + it "tokenizes {{ and }} as escape characters and not formatters", -> + {tokens} = grammar.tokenizeLine '"{{hello}}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{{', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python'] + expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', 'string.quoted.double.single-line.python'] + expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python'] + expect(tokens[4]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + it "tokenizes properties of self as self-type variables", -> tokens = grammar.tokenizeLines('self.foo') From 0f5b04a0ba07e14f4287b9c4f5ce998acb9d9664 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 12:31:44 -0500 Subject: [PATCH 169/291] Do not recursively match character classes --- grammars/regular expressions (python).cson | 59 +++++++++------------- spec/python-regex-spec.coffee | 18 +++++++ spec/python-spec.coffee | 8 +-- 3 files changed, 46 insertions(+), 39 deletions(-) create mode 100644 spec/python-regex-spec.coffee diff --git a/grammars/regular expressions (python).cson b/grammars/regular expressions (python).cson index b3295a0..6594138 100644 --- a/grammars/regular expressions (python).cson +++ b/grammars/regular expressions (python).cson @@ -1,10 +1,10 @@ -'comment': 'Matches Python\'s regular expression syntax.' +'name': 'Regular Expressions (Python)' +'scopeName': 'source.regexp.python' +'foldingStartMarker': '(/\\*|\\{|\\()' +'foldingStopMarker': '(\\*/|\\}|\\))' 'fileTypes': [ 're' ] -'foldingStartMarker': '(/\\*|\\{|\\()' -'foldingStopMarker': '(\\*/|\\}|\\))' -'name': 'Regular Expressions (Python)' 'patterns': [ { 'match': '\\\\[bBAZzG]|\\^|\\$' @@ -109,14 +109,20 @@ ] } { - 'include': '#character-class' - } -] -'repository': - 'character-class': + 'begin': '(\\[)(\\^)?' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.character-class.begin.regexp' + '2': + 'name': 'keyword.operator.negation.regexp' + 'end': '\\]' + 'endCaptures': + '0': + 'name': 'punctuation.definition.character-class.end.regexp' + 'name': 'constant.other.character-class.set.regexp' 'patterns': [ { - 'match': '\\\\[wWsSdDhH]|\\.' + 'match': '\\\\[wWsSdDhH]' 'name': 'constant.character.character-class.regexp' } { @@ -124,31 +130,14 @@ 'name': 'constant.character.escape.backslash.regexp' } { - 'begin': '(\\[)(\\^)?' - 'beginCaptures': - '1': - 'name': 'punctuation.definition.character-class.regexp' + 'captures': '2': - 'name': 'keyword.operator.negation.regexp' - 'end': '(\\])' - 'endCaptures': - '1': - 'name': 'punctuation.definition.character-class.regexp' - 'name': 'constant.other.character-class.set.regexp' - 'patterns': [ - { - 'include': '#character-class' - } - { - 'captures': - '2': - 'name': 'constant.character.escape.backslash.regexp' - '4': - 'name': 'constant.character.escape.backslash.regexp' - 'match': '((\\\\.)|.)\\-((\\\\.)|[^\\]])' - 'name': 'constant.other.character-class.range.regexp' - } - ] + 'name': 'constant.character.escape.backslash.regexp' + '4': + 'name': 'constant.character.escape.backslash.regexp' + 'match': '((\\\\.)|.)\\-((\\\\.)|[^\\]])' + 'name': 'constant.other.character-class.range.regexp' } ] -'scopeName': 'source.regexp.python' + } +] diff --git a/spec/python-regex-spec.coffee b/spec/python-regex-spec.coffee new file mode 100644 index 0000000..45920ac --- /dev/null +++ b/spec/python-regex-spec.coffee @@ -0,0 +1,18 @@ +describe 'Python regular expression grammar', -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage('language-python') + + runs -> + grammar = atom.grammars.grammarForScopeName('source.regexp.python') + + describe 'character classes', -> + it 'does not recursively match character classes', -> + {tokens} = grammar.tokenizeLine '[.:[\\]@]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] + expect(tokens[1]).toEqual value: '.:[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] + expect(tokens[2]).toEqual value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp'] + expect(tokens[3]).toEqual value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] + expect(tokens[4]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 21e9123..e0635e1 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -70,7 +70,7 @@ describe "Python grammar", -> expect(tokens[0][2].value).toBe '%d' expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python'] expect(tokens[0][3].value).toBe '[' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] expect(tokens[0][4].value).toBe "'" expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] expect(tokens[0][5].value).toBe ' ' @@ -110,7 +110,7 @@ describe "Python grammar", -> expect(tokens[0][2].value).toBe '%d' expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python'] expect(tokens[0][3].value).toBe '[' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] expect(tokens[0][4].value).toBe '"' expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python'] expect(tokens[0][5].value).toBe ' ' @@ -150,7 +150,7 @@ describe "Python grammar", -> expect(tokens[0][2].value).toBe '%d' expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] expect(tokens[0][3].value).toBe '[' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] expect(tokens[0][4].value).toBe "'" expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] expect(tokens[0][5].value).toBe ' ' @@ -190,7 +190,7 @@ describe "Python grammar", -> expect(tokens[0][2].value).toBe '%d' expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python'] expect(tokens[0][3].value).toBe '[' - expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] expect(tokens[0][4].value).toBe '"' expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python'] expect(tokens[0][5].value).toBe ' ' From 5ddcb282daa1da44ae9d186ea19d3890aaddbc12 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 12:44:25 -0500 Subject: [PATCH 170/291] Also handle the []] edge case while we're at it --- grammars/regular expressions (python).cson | 2 +- spec/python-regex-spec.coffee | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/grammars/regular expressions (python).cson b/grammars/regular expressions (python).cson index 6594138..fabcd80 100644 --- a/grammars/regular expressions (python).cson +++ b/grammars/regular expressions (python).cson @@ -115,7 +115,7 @@ 'name': 'punctuation.definition.character-class.begin.regexp' '2': 'name': 'keyword.operator.negation.regexp' - 'end': '\\]' + 'end': '(?!\\G)\\]' # Character classes cannot be empty (if the first character is a ] it is treated literally) 'endCaptures': '0': 'name': 'punctuation.definition.character-class.end.regexp' diff --git a/spec/python-regex-spec.coffee b/spec/python-regex-spec.coffee index 45920ac..ae6ee92 100644 --- a/spec/python-regex-spec.coffee +++ b/spec/python-regex-spec.coffee @@ -16,3 +16,9 @@ describe 'Python regular expression grammar', -> expect(tokens[2]).toEqual value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp'] expect(tokens[3]).toEqual value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] expect(tokens[4]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] + + it 'does not end the character class early if the first character is a ]', -> + {tokens} = grammar.tokenizeLine '[][]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] + expect(tokens[1]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] + expect(tokens[2]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] From 7df338f11de2c003a3d1ec159a5821d0675b6788 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 12:46:46 -0500 Subject: [PATCH 171/291] Add test for negation as well --- spec/python-regex-spec.coffee | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/spec/python-regex-spec.coffee b/spec/python-regex-spec.coffee index ae6ee92..cb11eee 100644 --- a/spec/python-regex-spec.coffee +++ b/spec/python-regex-spec.coffee @@ -22,3 +22,9 @@ describe 'Python regular expression grammar', -> expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] expect(tokens[1]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] expect(tokens[2]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] + + {tokens} = grammar.tokenizeLine '[^][]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp'] + expect(tokens[1]).toEqual value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp'] + expect(tokens[2]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] + expect(tokens[3]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] From 78f84a25922f60b487596d9b94c4f85690386be9 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 17:59:33 -0500 Subject: [PATCH 172/291] Add support for f-strings and nested replacement fields --- grammars/python.cson | 376 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 357 insertions(+), 19 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 2a49353..6ef768a 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -855,26 +855,8 @@ 'comment': 'magic variables which a class/module may have.' 'match': '\\b__(all|annotations|bases|class|closure|code|debug|dict|doc|file|func|globals|kwdefaults|members|metaclass|methods|module|name|qualname|self|slots|weakref)__\\b' 'name': 'support.variable.magic.python' - 'regular_expressions': - 'comment': 'Changed disabled to 1 to turn off syntax highlighting in “r” strings.' - 'disabled': 0 - 'patterns': [ - { - 'include': 'source.regexp.python' - } - ] - 'string_formatting': + 'nested_replacement_field': 'match': '''(?x) - # https://docs.python.org/2/library/stdtypes.html#string-formatting (deprecated) - % - (\\([a-zA-Z_]+\\))? # mapping key - [#0+\\- ]? # conversion flags (space at the end is intentional) - (\\d+|\\*)? # minimum field width - (\\.(\\d+|\\*))? # precision - [hlL]? # length modifier - [diouxXeEfFgGcrs%] # conversion type - | - # https://docs.python.org/3/library/string.html#format-string-syntax { ( ( @@ -903,6 +885,126 @@ } ''' 'name': 'constant.other.placeholder.python' + 'regular_expressions': + 'comment': 'Changed disabled to 1 to turn off syntax highlighting in “r” strings.' + 'disabled': 0 + 'patterns': [ + { + 'include': 'source.regexp.python' + } + ] + 'string_formatting': + 'patterns': [ + { + # https://docs.python.org/2/library/stdtypes.html#string-formatting (deprecated) + 'match': '''(?x) + % + (\\([a-zA-Z_]+\\))? # mapping key + [#0+\\- ]? # conversion flags (space at the end is intentional) + (\\d+|\\*)? # minimum field width + (\\.(\\d+|\\*))? # precision + [hlL]? # length modifier + [diouxXeEfFgGcrs%] # conversion type + ''' + 'name': 'constant.other.placeholder.python' + } + { + # https://docs.python.org/3/library/string.html#format-string-syntax + 'match': '''(?x) + { + (?: + (?: + \\d # integer + | + [a-zA-Z_]\\w* # identifier + ) + (?: + \\.[a-zA-Z_]\\w* # attribute name + | + \\[[^\\]]+\\] # element index + )* + )? + (?:![rsa])? # conversion + (?: + # Yup, this is disgusting. But top-level format specifiers can have nested replacement fields. + : + (?:(?:.|({[^}]*}))?(?:[<>=^]|({[^}]*})))? # fill followed by align + (?:[+\\- ]|({[^}]*}))? # sign (space at the end is intentional) + (?:\\#|({[^}]*}))? # alternate form + (?:0|({[^}]*}))? + (?:\\d+|({[^}]*}))? # width + (?:[_,]|({[^}]*}))? # grouping option + (?:\\.(?:\\d+|({[^}]*}))|({[^}]*}))? # precision + (?:[bcdeEfFgGnosxX%]|({[^}]*}))? # type + )? + } + ''' + 'name': 'constant.other.placeholder.python' + 'captures': + '1': 'patterns': [{'include': '#nested_replacement_field'}] + '2': 'patterns': [{'include': '#nested_replacement_field'}] + '3': 'patterns': [{'include': '#nested_replacement_field'}] + '4': 'patterns': [{'include': '#nested_replacement_field'}] + '5': 'patterns': [{'include': '#nested_replacement_field'}] + '6': 'patterns': [{'include': '#nested_replacement_field'}] + '7': 'patterns': [{'include': '#nested_replacement_field'}] + '8': 'patterns': [{'include': '#nested_replacement_field'}] + '9': 'patterns': [{'include': '#nested_replacement_field'}] + '10': 'patterns': [{'include': '#nested_replacement_field'}] + } + ] + 'string_interpolation': + # https://docs.python.org/3/reference/lexical_analysis.html#f-strings + # and https://www.python.org/dev/peps/pep-0498/ + # Unlike string_formatting, string_interpolation can contain expressions + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.interpolation.begin.bracket.curly.python' + 'end': '''(?x)(?!\\G) + ( + (?:![rsa])? # conversion + (?: + # Yup, this is disgusting. But top-level format specifiers can have nested replacement fields. + : + (?:(?:.|({[^}]*}))?(?:[<>=^]|({[^}]*})))? # fill followed by align + (?:[+\\- ]|({[^}]*}))? # sign (space at the end is intentional) + (?:\\#|({[^}]*}))? # alternate form + (?:0|({[^}]*}))? + (?:\\d+|({[^}]*}))? # width + (?:[_,]|({[^}]*}))? # grouping option + (?:\\.(?:\\d+|({[^}]*}))|({[^}]*}))? # precision + (?:[bcdeEfFgGnosxX%]|({[^}]*}))? # type + )? + ) + (}) + ''' + 'endCaptures': + '1': + 'name': 'constant.other.placeholder.python' + '2': 'patterns': [{'include': '#nested_replacement_field'}] + '3': 'patterns': [{'include': '#nested_replacement_field'}] + '4': 'patterns': [{'include': '#nested_replacement_field'}] + '5': 'patterns': [{'include': '#nested_replacement_field'}] + '6': 'patterns': [{'include': '#nested_replacement_field'}] + '7': 'patterns': [{'include': '#nested_replacement_field'}] + '8': 'patterns': [{'include': '#nested_replacement_field'}] + '9': 'patterns': [{'include': '#nested_replacement_field'}] + '10': 'patterns': [{'include': '#nested_replacement_field'}] + '11': 'patterns': [{'include': '#nested_replacement_field'}] + '12': + 'name': 'punctuation.definition.interpolation.end.bracket.curly.python' + 'name': 'meta.interpolation.python' + 'contentName': 'meta.embedded.python' + 'patterns': [ + { + 'match': '\\\\' + 'name': 'invalid.illegal.backslash.python' + } + { + 'include': '$self' + } + ] 'string_quoted_double': 'patterns': [ { @@ -1040,6 +1142,65 @@ } ] } + { + 'begin': '([fF])(""")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'comment': 'double quoted unicode string' + 'end': '((?<=""")(")""|""")' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + 'name': 'string.quoted.double.block.format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_unicode_char' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } + { + 'begin': '([rR][fF])(""")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'comment': 'double quoted unicode string' + 'end': '((?<=""")(")""|""")' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + 'name': 'string.quoted.double.block.raw-format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } { 'captures': '1': @@ -1161,6 +1322,70 @@ } ] } + { + 'begin': '([fF])(")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=")(")|")|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + '3': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.double.single-line.format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_unicode_char' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } + { + 'begin': '([rR][fF])(")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=")(")|")|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + '3': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.double.single-line.raw-format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_unicode_char' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } { 'begin': '(""")' 'beginCaptures': @@ -1406,12 +1631,68 @@ { 'include': '#string_formatting' } + { + 'include': '#escaped_char' + } + ] + } + { + 'begin': '([fF])(\'\'\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'comment': 'single quoted unicode string' + 'end': '((?<=\'\'\')(\')\'\'|\'\'\')' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.single.python' + 'name': 'string.quoted.single.block.format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } { 'include': '#escaped_unicode_char' } { 'include': '#escaped_char' } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } + { + 'begin': '([rR][fF])(\'\'\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'comment': 'single quoted unicode string' + 'end': '((?<=\'\'\')(\')\'\'|\'\'\')' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.single.python' + 'name': 'string.quoted.single.block.raw-format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } ] } { @@ -1529,6 +1810,63 @@ } ] } + { + 'begin': '([fF])(\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '(\')|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.single.single-line.format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_unicode_char' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } + { + 'begin': '([rR][fF])(\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '(\')|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.single.single-line.raw-format.python' + 'patterns': [ + { + 'include': '#string_interpolation' + } + { + 'include': '#escaped_char' + } + { + 'match': '}' + 'name': 'invalid.illegal.closing-curly-bracket.python' + } + ] + } { 'begin': '(\'\'\')' 'beginCaptures': From 3cfa8ac9895156850ec5011bd74da7254db56d94 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 20:57:00 -0500 Subject: [PATCH 173/291] Specs --- grammars/python.cson | 42 ++++++------- spec/python-spec.coffee | 134 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 155 insertions(+), 21 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 6ef768a..8fa6e9c 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1158,15 +1158,15 @@ 'name': 'meta.empty-string.double.python' 'name': 'string.quoted.double.block.format.python' 'patterns': [ - { - 'include': '#string_interpolation' - } { 'include': '#escaped_unicode_char' } { 'include': '#escaped_char' } + { + 'include': '#string_interpolation' + } { 'match': '}' 'name': 'invalid.illegal.closing-curly-bracket.python' @@ -1190,10 +1190,10 @@ 'name': 'string.quoted.double.block.raw-format.python' 'patterns': [ { - 'include': '#string_interpolation' + 'include': '#escaped_char' } { - 'include': '#escaped_char' + 'include': '#string_interpolation' } { 'match': '}' @@ -1339,15 +1339,15 @@ 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.double.single-line.format.python' 'patterns': [ - { - 'include': '#string_interpolation' - } { 'include': '#escaped_unicode_char' } { 'include': '#escaped_char' } + { + 'include': '#string_interpolation' + } { 'match': '}' 'name': 'invalid.illegal.closing-curly-bracket.python' @@ -1371,15 +1371,15 @@ 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.double.single-line.raw-format.python' 'patterns': [ - { - 'include': '#string_interpolation' - } { 'include': '#escaped_unicode_char' } { 'include': '#escaped_char' } + { + 'include': '#string_interpolation' + } { 'match': '}' 'name': 'invalid.illegal.closing-curly-bracket.python' @@ -1652,15 +1652,15 @@ 'name': 'meta.empty-string.single.python' 'name': 'string.quoted.single.block.format.python' 'patterns': [ - { - 'include': '#string_interpolation' - } { 'include': '#escaped_unicode_char' } { 'include': '#escaped_char' } + { + 'include': '#string_interpolation' + } { 'match': '}' 'name': 'invalid.illegal.closing-curly-bracket.python' @@ -1684,10 +1684,10 @@ 'name': 'string.quoted.single.block.raw-format.python' 'patterns': [ { - 'include': '#string_interpolation' + 'include': '#escaped_char' } { - 'include': '#escaped_char' + 'include': '#string_interpolation' } { 'match': '}' @@ -1825,15 +1825,15 @@ 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.single.single-line.format.python' 'patterns': [ - { - 'include': '#string_interpolation' - } { 'include': '#escaped_unicode_char' } { 'include': '#escaped_char' } + { + 'include': '#string_interpolation' + } { 'match': '}' 'name': 'invalid.illegal.closing-curly-bracket.python' @@ -1856,10 +1856,10 @@ 'name': 'string.quoted.single.single-line.raw-format.python' 'patterns': [ { - 'include': '#string_interpolation' + 'include': '#escaped_char' } { - 'include': '#escaped_char' + 'include': '#string_interpolation' } { 'match': '}' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index e0635e1..86bc0ba 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -245,6 +245,129 @@ describe "Python grammar", -> expect(tokens[0][1].value).toBe '\\x9f' expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] + describe "f-strings", -> + types = + 'f': 'format' + 'F': 'format' + 'rf': 'raw-format' + 'rF': 'raw-format' + 'Rf': 'raw-format' + 'RF': 'raw-format' + + quotes = + '"': 'double.single-line' + "'": 'single.single-line' + '"""': 'double.block' + "'''": 'single.block' + + for type, typeScope of types + for quote, quoteScope of quotes + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}hello#{quote}" + + expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[3]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] + + it "tokenizes {{ and }} as escape characters", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}he}}l{{lo#{quote}" + + expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'constant.character.escape.curly-bracket.python'] + expect(tokens[4]).toEqual value: 'l', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[5]).toEqual value: '{{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'constant.character.escape.curly-bracket.python'] + expect(tokens[6]).toEqual value: 'lo', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[7]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] + + it "tokenizes unmatched closing curly brackets as invalid", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}he}llo#{quote}" + + expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[3]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'invalid.illegal.closing-curly-bracket.python'] + expect(tokens[4]).toEqual value: 'llo', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[5]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] + + describe "in expressions", -> + it "tokenizes variables", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes arithmetic", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}{5 - 3}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: '5', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[5]).toEqual value: '-', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python'] + expect(tokens[7]).toEqual value: '3', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[8]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes function and method calls", -> + argumentQuote = '"' + argumentQuoteScope = 'double' + + if quote is '"' + argumentQuote = "'" + argumentQuoteScope = 'single' + + {tokens} = grammar.tokenizeLine "#{type}#{quote}{name.decode(#{argumentQuote}utf-8#{argumentQuote}).lower()}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[7]).toEqual value: argumentQuote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.#{argumentQuoteScope}.single-line.python", 'punctuation.definition.string.begin.python'] + expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.#{argumentQuoteScope}.single-line.python"] + expect(tokens[9]).toEqual value: argumentQuote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.#{argumentQuoteScope}.single-line.python", 'punctuation.definition.string.end.python'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes conversion flags", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc!r}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: '!r', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes format specifiers", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc:^d}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: ':^d', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes nested replacement fields in top-level format specifiers", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc:{align}d}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '{align}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] + expect(tokens[6]).toEqual value: 'd', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes backslashes as invalid", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}{ab\\n}#{quote}" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'ab', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + describe "string formatting", -> describe "%-style formatting", -> it "tokenizes the conversion type", -> @@ -440,6 +563,17 @@ describe "Python grammar", -> expect(tokens[1]).toEqual value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + it "tokenizes nested replacement fields", -> + {tokens} = grammar.tokenizeLine '"{:{align}-.{precision}%}"' + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: '{:', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[2]).toEqual value: '{align}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] + expect(tokens[3]).toEqual value: '-.', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[4]).toEqual value: '{precision}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '%}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python'] + expect(tokens[6]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] + it "tokenizes complex formats", -> {tokens} = grammar.tokenizeLine '"{0.players[2]!a:2>-#01_.3d}"' From 292a7fc3b0ac1fbd79526caa91f7de4c83a8e00b Mon Sep 17 00:00:00 2001 From: Cory Jones Date: Tue, 4 Aug 2015 12:41:44 -0400 Subject: [PATCH 174/291] Add support for Python3 parameter annotations --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 8fa6e9c..65659b1 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -279,7 +279,7 @@ 'name': 'variable.parameter.function.python' '2': 'name': 'punctuation.separator.parameters.python' - 'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(,)|(?=[\\n\\)]))' + 'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(,)|\\:\\s*\\w*(,*))' } ] } From 3389fafa148580e9b4944f7e51d94c497665720d Mon Sep 17 00:00:00 2001 From: Dario Bertini Date: Sat, 19 Sep 2015 23:21:18 +0100 Subject: [PATCH 175/291] Add function annotations --- grammars/python.cson | 14 +++++++++++--- spec/python-spec.coffee | 14 ++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 65659b1..25c7d3b 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -239,13 +239,17 @@ 'beginCaptures': '1': 'name': 'storage.type.function.python' - 'end': '(\\))\\s*(?:(\\:)|(.*$\\n?))' + 'end': '(\\))\\s*(?:(->)\\s*([A-Za-z_][A-Za-z0-9_]*)\\s*)?(?:(\\:)|(.*$\\n?))' 'endCaptures': '1': 'name': 'punctuation.definition.parameters.end.python' '2': - 'name': 'punctuation.section.function.begin.python' + 'name': 'punctuation.definition.annotation.return.python' '3': + 'name': 'variable.annotation.function.python' + '4': + 'name': 'punctuation.section.function.begin.python' + '5': 'name': 'invalid.illegal.missing-section-begin.python' 'name': 'meta.function.python' 'patterns': [ @@ -278,8 +282,12 @@ '1': 'name': 'variable.parameter.function.python' '2': + 'name': 'punctuation.definition.annotation.parameter.python' + '3': + 'name': 'variable.annotation.function.python' + '4': 'name': 'punctuation.separator.parameters.python' - 'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(,)|\\:\\s*\\w*(,*))' + 'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(:)\\s*([a-zA-Z_][a-zA-Z_0-9]*))?(?:(,)|(?=[\\n\\)]))' } ] } diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 86bc0ba..37d0f3c 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -724,3 +724,17 @@ describe "Python grammar", -> expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python'] expect(tokens[2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes a function definition with annotations", -> + tokens = grammar.tokenizeLines('def f(a: int) -> int:') + + expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[0][2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python', 'support.function.magic.python'] + expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] + expect(tokens[1][1]).toEqual value: 'a', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[1][2]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.definition.annotation.parameter.python'] + expect(tokens[2][1]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.annotation.function.python'] + expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] + expect(tokens[4][0]).toEqual value: '->', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.annotation.return.python'] + expect(tokens[4][0]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'variable.annotation.function.python'] + expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] From 40d397b6264c0a48acdb257d02e0cb09d592198b Mon Sep 17 00:00:00 2001 From: Dario Bertini Date: Sat, 17 Oct 2015 13:02:14 +0100 Subject: [PATCH 176/291] Apply some fixes --- grammars/python.cson | 2 +- spec/python-spec.coffee | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 25c7d3b..996e41f 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -246,7 +246,7 @@ '2': 'name': 'punctuation.definition.annotation.return.python' '3': - 'name': 'variable.annotation.function.python' + 'name': 'variable.other.annotation' '4': 'name': 'punctuation.section.function.begin.python' '5': diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 37d0f3c..d7f1de5 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -729,7 +729,7 @@ describe "Python grammar", -> tokens = grammar.tokenizeLines('def f(a: int) -> int:') expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[0][2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python', 'support.function.magic.python'] + expect(tokens[0][2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] expect(tokens[1][1]).toEqual value: 'a', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] expect(tokens[1][2]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.definition.annotation.parameter.python'] From d5482d3d4f1964be56299ee80808ef75c6023240 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 22:11:35 -0500 Subject: [PATCH 177/291] Tokenize function and parameter annotations --- grammars/python.cson | 66 ++++++++++++++++++++++++++++------------- spec/python-spec.coffee | 37 +++++++++++++---------- 2 files changed, 66 insertions(+), 37 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 996e41f..ac650ac 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -235,22 +235,14 @@ ] } { - 'begin': '^\\s*(def)\\s+(?=[A-Za-z_][A-Za-z0-9_]*\\s*\\()' + 'begin': '^\\s*(def)\\s+(?=[A-Za-z_][\\w_]*\\s*\\()' 'beginCaptures': '1': 'name': 'storage.type.function.python' - 'end': '(\\))\\s*(?:(->)\\s*([A-Za-z_][A-Za-z0-9_]*)\\s*)?(?:(\\:)|(.*$\\n?))' + 'end': ':' 'endCaptures': - '1': - 'name': 'punctuation.definition.parameters.end.python' - '2': - 'name': 'punctuation.definition.annotation.return.python' - '3': - 'name': 'variable.other.annotation' - '4': - 'name': 'punctuation.section.function.begin.python' - '5': - 'name': 'invalid.illegal.missing-section-begin.python' + '0': + 'name': 'punctuation.definition.function.begin.python' 'name': 'meta.function.python' 'patterns': [ { @@ -264,33 +256,65 @@ ] } { - 'begin': '(\\()' + 'begin': '\\(' 'beginCaptures': - '1': + '0': 'name': 'punctuation.definition.parameters.begin.python' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.parameters.end.python' 'contentName': 'meta.function.parameters.python' - 'end': '(?=\\)\\s*\\:)' 'patterns': [ { 'include': '#line_comments' } { - 'include': '#keyword_arguments' + # param = 3 + # param: int = 3 + 'begin': '\\b([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?\\s*(=)\\s*' + 'beginCaptures': + '1': + 'name': 'variable.parameter.function.python' + '2': + 'name': 'punctuation.separator.python' + '3': + 'name': 'storage.type.python' + '4': + 'name': 'keyword.operator.assignment.python' + 'end': '(?!\\G)' + 'patterns': [ + { + 'include': '$self' + } + ] } { + # param + # param: int + 'match': '\\b([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?' 'captures': '1': 'name': 'variable.parameter.function.python' '2': - 'name': 'punctuation.definition.annotation.parameter.python' + 'name': 'punctuation.separator.python' '3': - 'name': 'variable.annotation.function.python' - '4': - 'name': 'punctuation.separator.parameters.python' - 'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(:)\\s*([a-zA-Z_][a-zA-Z_0-9]*))?(?:(,)|(?=[\\n\\)]))' + 'name': 'storage.type.python' + } + { + 'match': ',' + 'name': 'punctuation.separator.parameters.python' } ] } + { + 'match': '(->)\\s*([A-Za-z_][\\w_]*)(?=\\s*:)' + 'captures': + '1': + 'name': 'keyword.operator.function-annotation.python' + '2': + 'name': 'storage.type.python' + } ] } { diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index d7f1de5..8e1f8a0 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -650,7 +650,27 @@ describe "Python grammar", -> expect(tokens[2][5]).toEqual value: ' comment', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] expect(tokens[3][1]).toEqual value: 'config', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] - expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] + expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.function.begin.python'] + + it "tokenizes a function definition with annotations", -> + {tokens} = grammar.tokenizeLine 'def f(a: None, b: int = 3) -> int:' + + expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] + expect(tokens[4]).toEqual value: 'a', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.python'] + expect(tokens[7]).toEqual value: 'None', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'storage.type.python'] + expect(tokens[8]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] + expect(tokens[10]).toEqual value: 'b', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] + expect(tokens[11]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.python'] + expect(tokens[13]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'storage.type.python'] + expect(tokens[15]).toEqual value: '=', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'keyword.operator.assignment.python'] + expect(tokens[17]).toEqual value: '3', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[18]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] + expect(tokens[20]).toEqual value: '->', scopes: ['source.python', 'meta.function.python', 'keyword.operator.function-annotation.python'] + expect(tokens[22]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'storage.type.python'] + expect(tokens[23]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.function.begin.python'] it "tokenizes complex function calls", -> {tokens} = grammar.tokenizeLine "torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0]" @@ -718,23 +738,8 @@ describe "Python grammar", -> expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] it "tokenizes SQL inline highlighting on single line with a CTE", -> - {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python'] expect(tokens[2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] - - it "tokenizes a function definition with annotations", -> - tokens = grammar.tokenizeLines('def f(a: int) -> int:') - - expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[0][2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] - expect(tokens[1][1]).toEqual value: 'a', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[1][2]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.definition.annotation.parameter.python'] - expect(tokens[2][1]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.annotation.function.python'] - expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] - expect(tokens[4][0]).toEqual value: '->', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.annotation.return.python'] - expect(tokens[4][0]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'variable.annotation.function.python'] - expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.section.function.begin.python'] From 475f40dfe844d25d9453e0bf7b457391f9c14269 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 22:42:05 -0500 Subject: [PATCH 178/291] Tokenize `yield from` PEP-380 --- grammars/python.cson | 2 +- spec/python-spec.coffee | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 8fa6e9c..dfa1439 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -108,7 +108,7 @@ { 'comment': 'keywords that alter flow from within a block' 'name': 'keyword.control.statement.python' - 'match': '\\b(with|break|continue|pass|return|yield|await)\\b' + 'match': '\\b(with|break|continue|pass|return|yield(\\s+from)?|await)\\b' } { 'comment': 'keyword operators that evaluate to True or False' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 86bc0ba..a4f3f08 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -16,6 +16,16 @@ describe "Python grammar", -> expect(grammar).toBeDefined() expect(grammar.scopeName).toBe "source.python" + it "tokenizes `yield`", -> + {tokens} = grammar.tokenizeLine 'yield v' + + expect(tokens[0]).toEqual value: 'yield', scopes: ['source.python', 'keyword.control.statement.python'] + + it "tokenizes `yield from`", -> + {tokens} = grammar.tokenizeLine 'yield from v' + + expect(tokens[0]).toEqual value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python'] + it "tokenizes multi-line strings", -> tokens = grammar.tokenizeLines('"1\\\n2"') From cb368398aa2a2d49e18f62a873710c703304439a Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 21 Jan 2018 22:54:02 -0500 Subject: [PATCH 179/291] Add wscript as a Python filetype Fixes #56 --- grammars/python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/python.cson b/grammars/python.cson index dfa1439..6d5acfc 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -15,6 +15,7 @@ 'Snakefile' # Snakemake support 'smk' # Snakemake support 'tac' + 'wscript' 'wsgi' ] 'firstLineMatch': '^#![ \\t]*/.*\\bpython[\\d\\.]*\\b' From aa451a8a8792dbe82e63540a3f951f79029a2429 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 10:06:26 -0500 Subject: [PATCH 180/291] Improve lambda tokenization --- grammars/python.cson | 45 +++++++++++++++++++++-------------------- spec/python-spec.coffee | 13 ++++++++++++ 2 files changed, 36 insertions(+), 22 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 6d5acfc..b62e884 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -312,35 +312,42 @@ ] } { - 'begin': '(lambda)(?=\\s+)' + 'begin': '(lambda)\\s+' 'beginCaptures': '1': 'name': 'storage.type.function.inline.python' - 'end': '(\\:)' + 'end': ':' 'endCaptures': - '1': - 'name': 'punctuation.definition.parameters.end.python' - '2': - 'name': 'punctuation.section.function.begin.python' - '3': - 'name': 'invalid.illegal.missing-section-begin.python' + '0': + 'name': 'punctuation.definition.function.begin.python' 'name': 'meta.function.inline.python' 'patterns': [ { - 'begin': '\\s+' - 'contentName': 'meta.function.inline.parameters.python' + 'begin': '\\G' 'end': '(?=\\:)' + 'contentName': 'meta.function.inline.parameters.python' 'patterns': [ { - 'include': '#keyword_arguments' - } - { - 'captures': + 'begin': '\\b([a-zA-Z_][\\w_]*)\\s*(=)\\s*' + 'beginCaptures': '1': 'name': 'variable.parameter.function.python' '2': - 'name': 'punctuation.separator.parameters.python' - 'match': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(?:(,)|(?=[\\n\\)\\:]))' + 'name': 'keyword.operator.assignment.python' + 'end': '(?!\\G)' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'match': '\\b([a-zA-Z_][\\w_]*)\\b' + 'name': 'variable.parameter.function.python' + } + { + 'match': ',' + 'name': 'punctuation.separator.parameters.python' } ] } @@ -481,12 +488,6 @@ } ] } - { - 'captures': - '1': - 'name': 'storage.type.function.python' - 'match': '\\b(def|lambda)\\b' - } { 'captures': '1': diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index a4f3f08..d482e27 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -679,6 +679,19 @@ describe "Python grammar", -> expect(tokens[20]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] expect(tokens[21]).toEqual value: '.', scopes: ['source.python'] + it "tokenizes lambdas", -> + {tokens} = grammar.tokenizeLine "lambda x, z = 4: x * z" + + expect(tokens[0]).toEqual value: 'lambda', scopes: ['source.python', 'meta.function.inline.python', 'storage.type.function.inline.python'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.python', 'meta.function.inline.python'] + expect(tokens[2]).toEqual value: 'x', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'variable.parameter.function.python'] + expect(tokens[3]).toEqual value: ',', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'punctuation.separator.parameters.python'] + expect(tokens[5]).toEqual value: 'z', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'variable.parameter.function.python'] + expect(tokens[7]).toEqual value: '=', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'keyword.operator.assignment.python'] + expect(tokens[9]).toEqual value: '4', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] + expect(tokens[11]).toEqual value: ' ', scopes: ['source.python'] + it "tokenizes SQL inline highlighting on blocks", -> delimsByScope = "string.quoted.double.block.sql.python": '"""' From 5f88595c82f0197ae0720377739acba598a5d96a Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 10:13:17 -0500 Subject: [PATCH 181/291] :memo: --- grammars/python.cson | 2 ++ 1 file changed, 2 insertions(+) diff --git a/grammars/python.cson b/grammars/python.cson index b62e884..50067ca 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -328,6 +328,7 @@ 'contentName': 'meta.function.inline.parameters.python' 'patterns': [ { + # param = 3 'begin': '\\b([a-zA-Z_][\\w_]*)\\s*(=)\\s*' 'beginCaptures': '1': @@ -342,6 +343,7 @@ ] } { + # param 'match': '\\b([a-zA-Z_][\\w_]*)\\b' 'name': 'variable.parameter.function.python' } From a41f10325f5ea3f0c7b4c282a67a1c836cef9a6d Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 10:45:52 -0500 Subject: [PATCH 182/291] Tokenize async function definitions * Consolidate missing parameter tokenization PEP-492 --- grammars/python.cson | 31 +++++++------------------------ spec/python-spec.coffee | 21 +++++++++++++++++++++ 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 50067ca..f6d1de9 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -236,9 +236,11 @@ ] } { - 'begin': '^\\s*(def)\\s+(?=[A-Za-z_][A-Za-z0-9_]*\\s*\\()' + 'begin': '^\\s*(?:(async)\\s+)?(def)\\s+(?=[A-Za-z_][A-Za-z0-9_]*)' 'beginCaptures': '1': + 'name': 'storage.modifier.async.python' + '2': 'name': 'storage.type.function.python' 'end': '(\\))\\s*(?:(\\:)|(.*$\\n?))' 'endCaptures': @@ -284,30 +286,11 @@ } ] } - ] - } - { - 'begin': '^\\s*(def)\\s+(?=[A-Za-z_][A-Za-z0-9_]*)' - 'beginCaptures': - '1': - 'name': 'storage.type.function.python' - 'end': '(\\()|\\s*($\\n?|#.*$\\n?)' - 'endCaptures': - '1': - 'name': 'punctuation.definition.parameters.begin.python' - '2': - 'name': 'invalid.illegal.missing-parameters.python' - 'name': 'meta.function.python' - 'patterns': [ { - 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*)' - 'contentName': 'entity.name.function.python' - 'end': '(?![A-Za-z0-9_])' - 'patterns': [ - { - 'include': '#entity_name_function' - } - ] + # No match, not at the end of the line, and no opening parentheses + 'begin': '(?!\\G)(?!\\s*$)(?!.*\\()' + 'end': '$' + 'name': 'invalid.illegal.missing-parameters.python' } ] } diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index d482e27..876aa6a 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -630,6 +630,27 @@ describe "Python grammar", -> expect(tokens[0][2].value).toBe 'foo' expect(tokens[0][2].scopes).toEqual ['source.python'] + it "tokenizes async function definitions", -> + {tokens} = grammar.tokenizeLine 'async def test(param):' + + expect(tokens[0]).toEqual value: 'async', scopes: ['source.python', 'meta.function.python', 'storage.modifier.async.python'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.python', 'meta.function.python'] + expect(tokens[2]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[4]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] + + it "tokenizes functions that are missing parameters", -> + {tokens} = grammar.tokenizeLine 'def test # whoops' + + expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] + expect(tokens[3]).toEqual value: ' # whoops', scopes: ['source.python', 'meta.function.python', 'invalid.illegal.missing-parameters.python'] + + {tokens} = grammar.tokenizeLine 'def test:' + + expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'invalid.illegal.missing-parameters.python'] + it "tokenizes comments inside function parameters", -> {tokens} = grammar.tokenizeLine('def test(arg, # comment') From dd0d44b7a478882f9cc6e133bf5cedbcb10d4703 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 18:04:56 -0500 Subject: [PATCH 183/291] Support fr as well as rf and reduce spec iterations --- grammars/python.cson | 8 ++++---- spec/python-spec.coffee | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index f6d1de9..5c7ee6e 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1161,7 +1161,7 @@ ] } { - 'begin': '([rR][fF])(""")' + 'begin': '([rR][fF]|[fF][rR])(""")' 'beginCaptures': '1': 'name': 'storage.type.string.python' @@ -1342,7 +1342,7 @@ ] } { - 'begin': '([rR][fF])(")' + 'begin': '([rR][fF]|[fF][rR])(")' 'beginCaptures': '1': 'name': 'storage.type.string.python' @@ -1655,7 +1655,7 @@ ] } { - 'begin': '([rR][fF])(\'\'\')' + 'begin': '([rR][fF]|[fF][rR])(\'\'\')' 'beginCaptures': '1': 'name': 'storage.type.string.python' @@ -1828,7 +1828,7 @@ ] } { - 'begin': '([rR][fF])(\')' + 'begin': '([rR][fF]|[fF][rR])(\')' 'beginCaptures': '1': 'name': 'storage.type.string.python' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 876aa6a..341da65 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -260,9 +260,9 @@ describe "Python grammar", -> 'f': 'format' 'F': 'format' 'rf': 'raw-format' - 'rF': 'raw-format' - 'Rf': 'raw-format' 'RF': 'raw-format' + 'fr': 'raw-format' + 'FR': 'raw-format' quotes = '"': 'double.single-line' From 10bd11ddfb45cb8f20c224fa23ae769bec186937 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 18:49:54 -0500 Subject: [PATCH 184/291] Tokenize binary strings PEP-3112 --- grammars/python.cson | 239 ++++++++++++++++++++++++++++++++++++---- spec/python-spec.coffee | 35 ++++++ 2 files changed, 255 insertions(+), 19 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 5c7ee6e..bffbcaf 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1136,7 +1136,6 @@ 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'double quoted unicode string' 'end': '((?<=""")(")""|""")' 'endCaptures': '1': @@ -1145,9 +1144,6 @@ 'name': 'meta.empty-string.double.python' 'name': 'string.quoted.double.block.format.python' 'patterns': [ - { - 'include': '#escaped_unicode_char' - } { 'include': '#escaped_char' } @@ -1167,7 +1163,6 @@ 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'double quoted unicode string' 'end': '((?<=""")(")""|""")' 'endCaptures': '1': @@ -1188,6 +1183,60 @@ } ] } + { + 'begin': '([bB])(""")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=""")(")""|""")' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + 'name': 'string.quoted.double.block.binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } + { + 'begin': '([rR][bB]|[bB][rR])(""")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=""")(")""|""")' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + 'name': 'string.quoted.double.block.raw-binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } { 'captures': '1': @@ -1326,9 +1375,6 @@ 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.double.single-line.format.python' 'patterns': [ - { - 'include': '#escaped_unicode_char' - } { 'include': '#escaped_char' } @@ -1358,9 +1404,6 @@ 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.double.single-line.raw-format.python' 'patterns': [ - { - 'include': '#escaped_unicode_char' - } { 'include': '#escaped_char' } @@ -1373,6 +1416,64 @@ } ] } + { + 'begin': '([bB])(")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=")(")|")|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + '3': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.double.single-line.binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } + { + 'begin': '([rR][bB]|[bB][rR])(")' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=")(")|")|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.double.python' + '3': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.double.single-line.raw-binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } { 'begin': '(""")' 'beginCaptures': @@ -1630,7 +1731,6 @@ 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'single quoted unicode string' 'end': '((?<=\'\'\')(\')\'\'|\'\'\')' 'endCaptures': '1': @@ -1639,9 +1739,6 @@ 'name': 'meta.empty-string.single.python' 'name': 'string.quoted.single.block.format.python' 'patterns': [ - { - 'include': '#escaped_unicode_char' - } { 'include': '#escaped_char' } @@ -1661,7 +1758,6 @@ 'name': 'storage.type.string.python' '2': 'name': 'punctuation.definition.string.begin.python' - 'comment': 'single quoted unicode string' 'end': '((?<=\'\'\')(\')\'\'|\'\'\')' 'endCaptures': '1': @@ -1682,6 +1778,60 @@ } ] } + { + 'begin': '([bB])(\'\'\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=\'\'\')(\')\'\'|\'\'\')' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.single.python' + 'name': 'string.quoted.single.block.binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } + { + 'begin': '([rR][bB]|[bB][rR])(\'\'\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '((?<=\'\'\')(\')\'\'|\'\'\')' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'meta.empty-string.single.python' + 'name': 'string.quoted.single.block.raw-binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } { 'captures': '1': @@ -1812,9 +1962,6 @@ 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.single.single-line.format.python' 'patterns': [ - { - 'include': '#escaped_unicode_char' - } { 'include': '#escaped_char' } @@ -1854,6 +2001,60 @@ } ] } + { + 'begin': '([bB])(\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '(\')|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.single.single-line.binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } + { + 'begin': '([rR][bB]|[bB][rR])(\')' + 'beginCaptures': + '1': + 'name': 'storage.type.string.python' + '2': + 'name': 'punctuation.definition.string.begin.python' + 'end': '(\')|(\\n)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.python' + '2': + 'name': 'invalid.illegal.unclosed-string.python' + 'name': 'string.quoted.single.single-line.raw-binary.python' + 'patterns': [ + { + 'include': '#escaped_char' + } + { + 'include': '#string_formatting' + } + { + 'match': '[^\\x{01}-\\x{7f}]' + 'name': 'invalid.illegal.character-out-of-range.python' + } + ] + } { 'begin': '(\'\'\')' 'beginCaptures': diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 341da65..2d0659d 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -378,6 +378,41 @@ describe "Python grammar", -> expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python'] expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + describe "binary strings", -> + types = + 'b': 'binary' + 'B': 'binary' + 'rb': 'raw-binary' + 'RB': 'raw-binary' + 'br': 'raw-binary' + 'BR': 'raw-binary' + + quotes = + '"': 'double.single-line' + "'": 'single.single-line' + '"""': 'double.block' + "'''": 'single.block' + + for type, typeScope of types + for quote, quoteScope of quotes + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}test#{quote}" + + expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[3]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] + + it "tokenizes invalid characters", -> + {tokens} = grammar.tokenizeLine "#{type}#{quote}tést#{quote}" + + expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 't', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[3]).toEqual value: 'é', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'invalid.illegal.character-out-of-range.python'] + expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] + expect(tokens[5]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] + describe "string formatting", -> describe "%-style formatting", -> it "tokenizes the conversion type", -> From 00c1c0697e63390acf20f3903a03a91cf1408944 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 19:43:57 -0500 Subject: [PATCH 185/291] Remove broken for-loop testing --- spec/python-spec.coffee | 269 +++++++++++++++++----------------------- 1 file changed, 115 insertions(+), 154 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 2d0659d..58f0851 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -256,162 +256,123 @@ describe "Python grammar", -> expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python'] describe "f-strings", -> - types = - 'f': 'format' - 'F': 'format' - 'rf': 'raw-format' - 'RF': 'raw-format' - 'fr': 'raw-format' - 'FR': 'raw-format' - - quotes = - '"': 'double.single-line' - "'": 'single.single-line' - '"""': 'double.block' - "'''": 'single.block' - - for type, typeScope of types - for quote, quoteScope of quotes - it "tokenizes them", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}hello#{quote}" - - expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] - expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] - expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[3]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] - - it "tokenizes {{ and }} as escape characters", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}he}}l{{lo#{quote}" - - expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] - expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] - expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'constant.character.escape.curly-bracket.python'] - expect(tokens[4]).toEqual value: 'l', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[5]).toEqual value: '{{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'constant.character.escape.curly-bracket.python'] - expect(tokens[6]).toEqual value: 'lo', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[7]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] - - it "tokenizes unmatched closing curly brackets as invalid", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}he}llo#{quote}" - - expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] - expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] - expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[3]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'invalid.illegal.closing-curly-bracket.python'] - expect(tokens[4]).toEqual value: 'llo', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[5]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] - - describe "in expressions", -> - it "tokenizes variables", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[4]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] - - it "tokenizes arithmetic", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}{5 - 3}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: '5', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] - expect(tokens[5]).toEqual value: '-', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python'] - expect(tokens[7]).toEqual value: '3', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] - expect(tokens[8]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] - - it "tokenizes function and method calls", -> - argumentQuote = '"' - argumentQuoteScope = 'double' - - if quote is '"' - argumentQuote = "'" - argumentQuoteScope = 'single' - - {tokens} = grammar.tokenizeLine "#{type}#{quote}{name.decode(#{argumentQuote}utf-8#{argumentQuote}).lower()}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] - expect(tokens[7]).toEqual value: argumentQuote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.#{argumentQuoteScope}.single-line.python", 'punctuation.definition.string.begin.python'] - expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.#{argumentQuoteScope}.single-line.python"] - expect(tokens[9]).toEqual value: argumentQuote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.#{argumentQuoteScope}.single-line.python", 'punctuation.definition.string.end.python'] - expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] - expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] - expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] - expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] - - it "tokenizes conversion flags", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc!r}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[4]).toEqual value: '!r', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] - expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] - - it "tokenizes format specifiers", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc:^d}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[4]).toEqual value: ':^d', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] - expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] - - it "tokenizes nested replacement fields in top-level format specifiers", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}{abc:{align}d}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[4]).toEqual value: ':', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] - expect(tokens[5]).toEqual value: '{align}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] - expect(tokens[6]).toEqual value: 'd', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] - expect(tokens[7]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] - - it "tokenizes backslashes as invalid", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}{ab\\n}#{quote}" - - expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'ab', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python'] - expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine "f'hello'" + + expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', "string.quoted.single.single-line.format.python"] + expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python'] + + it "tokenizes {{ and }} as escape characters", -> + {tokens} = grammar.tokenizeLine "f'he}}l{{lo'" + + expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"] + expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python'] + expect(tokens[4]).toEqual value: 'l', scopes: ['source.python', "string.quoted.single.single-line.format.python"] + expect(tokens[5]).toEqual value: '{{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python'] + expect(tokens[6]).toEqual value: 'lo', scopes: ['source.python', "string.quoted.single.single-line.format.python"] + expect(tokens[7]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python'] + + it "tokenizes unmatched closing curly brackets as invalid", -> + {tokens} = grammar.tokenizeLine "f'he}llo'" + + expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"] + expect(tokens[3]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'invalid.illegal.closing-curly-bracket.python'] + expect(tokens[4]).toEqual value: 'llo', scopes: ['source.python', "string.quoted.single.single-line.format.python"] + expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python'] + + describe "in expressions", -> + it "tokenizes variables", -> + {tokens} = grammar.tokenizeLine "f'{abc}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes arithmetic", -> + {tokens} = grammar.tokenizeLine "f'{5 - 3}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: '5', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[5]).toEqual value: '-', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python'] + expect(tokens[7]).toEqual value: '3', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python'] + expect(tokens[8]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes function and method calls", -> + {tokens} = grammar.tokenizeLine "f'{name.decode(\"utf-8\").lower()}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python'] + expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.double.single-line.python"] + expect(tokens[9]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] + expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes conversion flags", -> + {tokens} = grammar.tokenizeLine "f'{abc!r}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: '!r', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes format specifiers", -> + {tokens} = grammar.tokenizeLine "f'{abc:^d}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: ':^d', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes nested replacement fields in top-level format specifiers", -> + {tokens} = grammar.tokenizeLine "f'{abc:{align}d}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[5]).toEqual value: '{align}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python'] + expect(tokens[6]).toEqual value: 'd', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] + + it "tokenizes backslashes as invalid", -> + {tokens} = grammar.tokenizeLine "f'{ab\\n}'" + + expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] + expect(tokens[3]).toEqual value: 'ab', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] + expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] describe "binary strings", -> - types = - 'b': 'binary' - 'B': 'binary' - 'rb': 'raw-binary' - 'RB': 'raw-binary' - 'br': 'raw-binary' - 'BR': 'raw-binary' - - quotes = - '"': 'double.single-line' - "'": 'single.single-line' - '"""': 'double.block' - "'''": 'single.block' - - for type, typeScope of types - for quote, quoteScope of quotes - it "tokenizes them", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}test#{quote}" - - expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] - expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] - expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[3]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] - - it "tokenizes invalid characters", -> - {tokens} = grammar.tokenizeLine "#{type}#{quote}tést#{quote}" - - expect(tokens[0]).toEqual value: type, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'storage.type.string.python'] - expect(tokens[1]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.begin.python'] - expect(tokens[2]).toEqual value: 't', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[3]).toEqual value: 'é', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'invalid.illegal.character-out-of-range.python'] - expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python"] - expect(tokens[5]).toEqual value: quote, scopes: ['source.python', "string.quoted.#{quoteScope}.#{typeScope}.python", 'punctuation.definition.string.end.python'] + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine "b'test'" + + expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] + expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python'] + + it "tokenizes invalid characters", -> + {tokens} = grammar.tokenizeLine "b'tést'" + + expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python'] + expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python'] + expect(tokens[2]).toEqual value: 't', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] + expect(tokens[3]).toEqual value: 'é', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'invalid.illegal.character-out-of-range.python'] + expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] + expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python'] describe "string formatting", -> describe "%-style formatting", -> From ecaf5645c7a58840694037deb858dd0dc129d578 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 22 Jan 2018 20:25:33 -0500 Subject: [PATCH 186/291] Remove highlighting of missing parameters It's a bit too complicated to do now with annotations in the mix. --- grammars/python.cson | 6 ------ spec/python-spec.coffee | 13 ------------- 2 files changed, 19 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index ed7ee5b..ca7e36b 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -318,12 +318,6 @@ '2': 'name': 'storage.type.python' } - { - # No match, not at the end of the line, and no opening parentheses - 'begin': '(?!\\G)(?!\\s*$)(?!.*\\()' - 'end': '$' - 'name': 'invalid.illegal.missing-parameters.python' - } ] } { diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 4fbf2dd..978fab7 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -634,19 +634,6 @@ describe "Python grammar", -> expect(tokens[2]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] expect(tokens[4]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - it "tokenizes functions that are missing parameters", -> - {tokens} = grammar.tokenizeLine 'def test # whoops' - - expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - expect(tokens[3]).toEqual value: ' # whoops', scopes: ['source.python', 'meta.function.python', 'invalid.illegal.missing-parameters.python'] - - {tokens} = grammar.tokenizeLine 'def test:' - - expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - expect(tokens[3]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'invalid.illegal.missing-parameters.python'] - it "tokenizes comments inside function parameters", -> {tokens} = grammar.tokenizeLine('def test(arg, # comment') From 8c5320dcead2fc723d66e7b078d3b68d423aa5ca Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Tue, 23 Jan 2018 15:16:46 -0500 Subject: [PATCH 187/291] Tokenize function names in function/method calls --- grammars/python.cson | 55 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 7 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index bffbcaf..1a906a2 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -399,20 +399,20 @@ ] } { - 'begin': '(?:([A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*)|(?<=\\)|\\]))\\s*(\\()' + 'begin': '(?:([A-Za-z_][A-Za-z0-9_]*)|(?<=\\)|\\]))\\s*(\\()' 'beginCaptures': '1': 'patterns': [ { - 'include': '#dotted_name' + 'include': '#function_names' } ] '2': - 'name': 'punctuation.definition.arguments.begin.python' + 'name': 'punctuation.definition.arguments.begin.bracket.round.python' 'end': '\\)' 'endCaptures': '0': - 'name': 'punctuation.definition.arguments.end.python' + 'name': 'punctuation.definition.arguments.end.bracket.round.python' 'name': 'meta.function-call.python' 'contentName': 'meta.function-call.arguments.python' 'patterns': [ @@ -424,6 +424,34 @@ } ] } + { + 'begin': '(\\.)([a-zA-Z_][a-zA-Z0-9_]*)\\s*(\\()' + 'beginCaptures': + '1': + 'name': 'punctuation.separator.method.period.python' + '2': + 'patterns': [ + { + 'include': '#function_names' + } + ] + '3': + 'name': 'punctuation.definition.arguments.begin.bracket.round.python' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.arguments.end.bracket.round.python' + 'name': 'meta.method-call.python' + 'contentName': 'meta.method-call.arguments.python' + 'patterns': [ + { + 'include': '#keyword_arguments' + } + { + 'include': '$self' + } + ] + } { 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\[)' 'end': '(\\])' @@ -492,9 +520,6 @@ { 'include': '#string_quoted_double' } - { - 'include': '#dotted_name' - } { 'include': '#language_variables' } @@ -638,6 +663,22 @@ ] } ] + 'function_names': + 'patterns': [ + { + 'include': '#magic_function_names' + } + { + 'include': '#magic_variable_names' + } + { + 'include': '#illegal_names' + } + { + 'match': '[a-zA-Z_][a-zA-Z0-9_]*' + 'name': 'entity.name.function.python' + } + ] 'line_comments': 'begin': '(^[ \\t]+)?(?=#)' 'beginCaptures': From da5eeadc462a416cecc3fe18c91082079061e056 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Tue, 23 Jan 2018 15:17:48 -0500 Subject: [PATCH 188/291] Tokenize objects and properties --- grammars/python.cson | 67 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/grammars/python.cson b/grammars/python.cson index 1a906a2..7ad4013 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -452,6 +452,12 @@ } ] } + { + 'include': '#objects' + } + { + 'include': '#properties' + } { 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\[)' 'end': '(\\])' @@ -883,6 +889,67 @@ 'comment': 'magic variables which a class/module may have.' 'match': '\\b__(all|annotations|bases|class|closure|code|debug|dict|doc|file|func|globals|kwdefaults|members|metaclass|methods|module|name|qualname|self|slots|weakref)__\\b' 'name': 'support.variable.magic.python' + 'objects': + 'patterns': [ + { + # OBJ in OBJ.prop, OBJ.methodCall() + 'match': '[A-Z][A-Z0-9_]*(?=\\s*\\.\\s*[a-zA-Z_][a-zA-Z0-9_]*)' + 'name': 'constant.other.object.python' + } + { + # obj in obj.prop, obj.methodCall() + 'match': '[a-zA-Z_][a-zA-Z0-9_]*(?=\\s*\\.\\s*[a-zA-Z_][a-zA-Z0-9_]*)' + 'name': 'variable.other.object.python' + } + ] + 'properties': + 'patterns': [ + { + # PROP1 in obj.PROP1.prop2, func().PROP1.prop2 + 'match': '(\\.)\\s*([A-Z][A-Z0-9_]*\\b\\$*)(?=\\s*\\.\\s*[a-zA-Z_][a-zA-Z0-9_]*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.python' + '2': + 'name': 'constant.other.object.property.python' + } + { + # prop1 in obj.prop1.prop2, func().prop1.prop2 + 'match': '(\\.)\\s*(\\$*[a-zA-Z_][a-zA-Z0-9_]*)(?=\\s*\\.\\s*[a-zA-Z_][a-zA-Z0-9_]*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.python' + '2': + 'name': 'variable.other.object.property.python' + } + { + # PROP in obj.PROP, func().PROP + 'match': '(\\.)\\s*([A-Z][A-Z0-9_$]*\\b\\$*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.python' + '2': + 'name': 'constant.other.property.python' + } + { + # prop in obj.prop, func().prop + 'match': '(\\.)\\s*(\\$*[a-zA-Z_][a-zA-Z0-9_]*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.python' + '2': + 'name': 'variable.other.property.python' + } + { + # 123illegal in obj.123illegal, func().123illegal + 'match': '(\\.)\\s*([0-9][a-zA-Z0-9_]*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.python' + '2': + 'name': 'invalid.illegal.identifier.python' + } + ] 'nested_replacement_field': 'match': '''(?x) { From 421f783a58fe4cf2ff787882d539ae06fc0b1643 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Tue, 23 Jan 2018 15:19:40 -0500 Subject: [PATCH 189/291] :art: Reorganization --- grammars/python.cson | 112 ++++++++++++++++++++++++------------------- 1 file changed, 62 insertions(+), 50 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 7ad4013..bfec77c 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -399,58 +399,10 @@ ] } { - 'begin': '(?:([A-Za-z_][A-Za-z0-9_]*)|(?<=\\)|\\]))\\s*(\\()' - 'beginCaptures': - '1': - 'patterns': [ - { - 'include': '#function_names' - } - ] - '2': - 'name': 'punctuation.definition.arguments.begin.bracket.round.python' - 'end': '\\)' - 'endCaptures': - '0': - 'name': 'punctuation.definition.arguments.end.bracket.round.python' - 'name': 'meta.function-call.python' - 'contentName': 'meta.function-call.arguments.python' - 'patterns': [ - { - 'include': '#keyword_arguments' - } - { - 'include': '$self' - } - ] + 'include': '#function_calls' } { - 'begin': '(\\.)([a-zA-Z_][a-zA-Z0-9_]*)\\s*(\\()' - 'beginCaptures': - '1': - 'name': 'punctuation.separator.method.period.python' - '2': - 'patterns': [ - { - 'include': '#function_names' - } - ] - '3': - 'name': 'punctuation.definition.arguments.begin.bracket.round.python' - 'end': '\\)' - 'endCaptures': - '0': - 'name': 'punctuation.definition.arguments.end.bracket.round.python' - 'name': 'meta.method-call.python' - 'contentName': 'meta.method-call.arguments.python' - 'patterns': [ - { - 'include': '#keyword_arguments' - } - { - 'include': '$self' - } - ] + 'include': '#method_calls' } { 'include': '#objects' @@ -669,6 +621,35 @@ ] } ] + 'function_calls': + 'patterns': [ + { + 'begin': '(?:([A-Za-z_][A-Za-z0-9_]*)|(?<=\\)|\\]))\\s*(\\()' + 'beginCaptures': + '1': + 'patterns': [ + { + 'include': '#function_names' + } + ] + '2': + 'name': 'punctuation.definition.arguments.begin.bracket.round.python' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.arguments.end.bracket.round.python' + 'name': 'meta.function-call.python' + 'contentName': 'meta.function-call.arguments.python' + 'patterns': [ + { + 'include': '#keyword_arguments' + } + { + 'include': '$self' + } + ] + } + ] 'function_names': 'patterns': [ { @@ -889,6 +870,37 @@ 'comment': 'magic variables which a class/module may have.' 'match': '\\b__(all|annotations|bases|class|closure|code|debug|dict|doc|file|func|globals|kwdefaults|members|metaclass|methods|module|name|qualname|self|slots|weakref)__\\b' 'name': 'support.variable.magic.python' + 'method_calls': + 'patterns': [ + { + 'begin': '(\\.)([a-zA-Z_][a-zA-Z0-9_]*)\\s*(\\()' + 'beginCaptures': + '1': + 'name': 'punctuation.separator.method.period.python' + '2': + 'patterns': [ + { + 'include': '#function_names' + } + ] + '3': + 'name': 'punctuation.definition.arguments.begin.bracket.round.python' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.arguments.end.bracket.round.python' + 'name': 'meta.method-call.python' + 'contentName': 'meta.method-call.arguments.python' + 'patterns': [ + { + 'include': '#keyword_arguments' + } + { + 'include': '$self' + } + ] + } + ] 'objects': 'patterns': [ { From 84c4815c28a8f8bcea2d67130c6e5f04fa8f3f29 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Tue, 23 Jan 2018 21:23:00 -0500 Subject: [PATCH 190/291] Get builtin functions tokenizing correctly --- grammars/python.cson | 67 +++++++++++++++----------------------------- 1 file changed, 23 insertions(+), 44 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index bfec77c..39faa62 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -628,6 +628,9 @@ 'beginCaptures': '1': 'patterns': [ + { + 'include': '#builtin_functions' + } { 'include': '#function_names' } @@ -650,38 +653,6 @@ ] } ] - 'function_names': - 'patterns': [ - { - 'include': '#magic_function_names' - } - { - 'include': '#magic_variable_names' - } - { - 'include': '#illegal_names' - } - { - 'match': '[a-zA-Z_][a-zA-Z0-9_]*' - 'name': 'entity.name.function.python' - } - ] - 'line_comments': - 'begin': '(^[ \\t]+)?(?=#)' - 'beginCaptures': - '1': - 'name': 'punctuation.whitespace.comment.leading.python' - 'end': '(?!\\G)' - 'patterns': [ - { - 'begin': '#' - 'beginCaptures': - '0': - 'name': 'punctuation.definition.comment.python' - 'end': '\\n' - 'name': 'comment.line.number-sign.python' - } - ] 'dotted_name': 'begin': '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*)' 'end': '(?![A-Za-z0-9_\\.])' @@ -810,25 +781,17 @@ '3': 'name': 'constant.character.escape.unicode.name.python' 'match': '(\\\\U[0-9A-Fa-f]{8})|(\\\\u[0-9A-Fa-f]{4})|(\\\\N\\{[a-zA-Z ]+\\})' - 'function_name': + 'function_names': 'patterns': [ { 'include': '#magic_function_names' } { - 'include': '#magic_variable_names' - } - { - 'include': '#builtin_exceptions' - } - { - 'include': '#builtin_functions' - } - { - 'include': '#builtin_types' + 'include': '#illegal_names' } { - 'include': '#generic_names' + 'match': '[a-zA-Z_][a-zA-Z0-9_]*' + 'name': 'entity.name.function.python' } ] 'generic_names': @@ -855,6 +818,22 @@ 'language_variables': 'match': '\\b(self|cls)\\b' 'name': 'variable.language.self.python' + 'line_comments': + 'begin': '(^[ \\t]+)?(?=#)' + 'beginCaptures': + '1': + 'name': 'punctuation.whitespace.comment.leading.python' + 'end': '(?!\\G)' + 'patterns': [ + { + 'begin': '#' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.python' + 'end': '\\n' + 'name': 'comment.line.number-sign.python' + } + ] 'line_continuation': 'captures': '1': From 3e7969f3dcc73eacbc3a060a917f8d52a804fc1a Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Fri, 26 Jan 2018 14:26:05 -0500 Subject: [PATCH 191/291] Prepare 0.48.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9710447..5c86613 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.47.0", + "version": "0.48.0", "engines": { "atom": "*", "node": "*" From 8c968a52f5efbb058aad6783f96180ba3ebb2367 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sat, 27 Jan 2018 12:13:46 -0500 Subject: [PATCH 192/291] Fix specs --- grammars/python.cson | 14 ++++++-- spec/python-spec.coffee | 77 ++++++++++++++++++----------------------- 2 files changed, 45 insertions(+), 46 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index d7f618d..36afd60 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -23,6 +23,9 @@ { 'include': '#line_comments' } + { + 'include': '#language_variables' + } { 'match': '\\b(?i:(0x\\h*)L)' 'name': 'constant.numeric.integer.long.hexadecimal.python' @@ -504,9 +507,6 @@ { 'include': '#string_quoted_double' } - { - 'include': '#language_variables' - } { 'begin': '(\\()' 'end': '(\\))' @@ -673,6 +673,10 @@ { 'include': '#keyword_arguments' } + { + 'match': ',' + 'name': 'punctuation.separator.arguments.python' + } { 'include': '$self' } @@ -900,6 +904,10 @@ { 'include': '#keyword_arguments' } + { + 'match': ',' + 'name': 'punctuation.separator.arguments.python' + } { 'include': '$self' } diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 978fab7..5644506 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -307,18 +307,18 @@ describe "Python grammar", -> {tokens} = grammar.tokenizeLine "f'{name.decode(\"utf-8\").lower()}'" expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python'] - expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] - expect(tokens[7]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python'] - expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.double.single-line.python"] - expect(tokens[9]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'meta.function-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python'] - expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] - expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python'] - expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python'] - expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] - expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] + expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'variable.other.object.python'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python'] + expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python'] + expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python"] + expect(tokens[9]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] + expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python'] + expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python'] + expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] + expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python'] it "tokenizes conversion flags", -> @@ -599,32 +599,23 @@ describe "Python grammar", -> it "tokenizes properties of self as self-type variables", -> tokens = grammar.tokenizeLines('self.foo') - expect(tokens[0][0].value).toBe 'self' - expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.self.python'] - expect(tokens[0][1].value).toBe '.' - expect(tokens[0][1].scopes).toEqual ['source.python'] - expect(tokens[0][2].value).toBe 'foo' - expect(tokens[0][2].scopes).toEqual ['source.python'] + expect(tokens[0][0]).toEqual value: 'self', scopes: ['source.python', 'variable.language.self.python'] + expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] + expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] it "tokenizes cls as a self-type variable", -> tokens = grammar.tokenizeLines('cls.foo') - expect(tokens[0][0].value).toBe 'cls' - expect(tokens[0][0].scopes).toEqual ['source.python', 'variable.language.self.python'] - expect(tokens[0][1].value).toBe '.' - expect(tokens[0][1].scopes).toEqual ['source.python'] - expect(tokens[0][2].value).toBe 'foo' - expect(tokens[0][2].scopes).toEqual ['source.python'] + expect(tokens[0][0]).toEqual value: 'cls', scopes: ['source.python', 'variable.language.self.python'] + expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] + expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] it "tokenizes properties of a variable as variables", -> tokens = grammar.tokenizeLines('bar.foo') - expect(tokens[0][0].value).toBe 'bar' - expect(tokens[0][0].scopes).toEqual ['source.python'] - expect(tokens[0][1].value).toBe '.' - expect(tokens[0][1].scopes).toEqual ['source.python'] - expect(tokens[0][2].value).toBe 'foo' - expect(tokens[0][2].scopes).toEqual ['source.python'] + expect(tokens[0][0]).toEqual value: 'bar', scopes: ['source.python', 'variable.other.object.python'] + expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] + expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] it "tokenizes async function definitions", -> {tokens} = grammar.tokenizeLine 'async def test(param):' @@ -689,19 +680,19 @@ describe "Python grammar", -> it "tokenizes complex function calls", -> {tokens} = grammar.tokenizeLine "torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0]" - expect(tokens[4]).toEqual value: 'BCELoss', scopes: ['source.python', 'meta.function-call.python'] - expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] - expect(tokens[6]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] - expect(tokens[7]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] - expect(tokens[8]).toEqual value: 'Variable', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python'] - expect(tokens[9]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.python'] + expect(tokens[4]).toEqual value: 'BCELoss', scopes: ['source.python', 'meta.method-call.python', 'entity.name.function.python'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] + expect(tokens[8]).toEqual value: 'Variable', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'entity.name.function.python'] + expect(tokens[9]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] expect(tokens[10]).toEqual value: 'bayes_optimal_prob', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python'] - expect(tokens[14]).toEqual value: 'requires_grad', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'variable.parameter.function.python'] - expect(tokens[16]).toEqual value: 'False', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'constant.language.python'] - expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] - expect(tokens[18]).toEqual value: ', ', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python'] - expect(tokens[20]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.python'] - expect(tokens[21]).toEqual value: '.', scopes: ['source.python'] + expect(tokens[16]).toEqual value: 'requires_grad', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'variable.parameter.function.python'] + expect(tokens[18]).toEqual value: 'False', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'constant.language.python'] + expect(tokens[19]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] + expect(tokens[20]).toEqual value: ',', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'punctuation.separator.arguments.python'] + expect(tokens[22]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] + expect(tokens[23]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] it "tokenizes lambdas", -> {tokens} = grammar.tokenizeLine "lambda x, z = 4: x * z" @@ -714,7 +705,7 @@ describe "Python grammar", -> expect(tokens[7]).toEqual value: '=', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'keyword.operator.assignment.python'] expect(tokens[9]).toEqual value: '4', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'constant.numeric.integer.decimal.python'] expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] - expect(tokens[11]).toEqual value: ' ', scopes: ['source.python'] + expect(tokens[11]).toEqual value: ' x ', scopes: ['source.python'] it "tokenizes SQL inline highlighting on blocks", -> delimsByScope = From 1522ac01752f90339c061769e1dad7b47b0a212f Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 4 Feb 2018 23:21:04 -0500 Subject: [PATCH 193/291] Prepare 0.49.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 5c86613..3fa7ef7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.48.0", + "version": "0.49.0", "engines": { "atom": "*", "node": "*" From 8bdd7a4d54a2a07cc45ed5070af46a403ac0035f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 12 Feb 2018 21:48:51 -0800 Subject: [PATCH 194/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3fa7ef7..ad13071 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "url": "https://github.com/atom/language-python/issues" }, "dependencies": { - "tree-sitter-python": "^0.3.0" + "tree-sitter-python": "^0.4.0" }, "devDependencies": { "coffeelint": "^1.10.1" From e977d904b127876814b62d6ff04298279c981a8f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 12 Feb 2018 21:50:20 -0800 Subject: [PATCH 195/291] Prepare 0.49.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ad13071..52d1c84 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.49.0", + "version": "0.49.1", "engines": { "atom": "*", "node": "*" From b1fb6eadd4ab661348ad635ee469a0f32eba03a7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 16 Feb 2018 13:54:48 -0800 Subject: [PATCH 196/291] Add missing highlighting in tree-sitter grammar --- grammars/tree-sitter-python.cson | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 8a0b960..b513a35 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -57,6 +57,8 @@ scopes: 'none': 'constant.language' 'true': 'constant.language' 'false': 'constant.language' + 'integer': 'constant.language' + 'float': 'constant.language' 'type > identifier': 'support.storage.type' @@ -73,6 +75,7 @@ scopes: '"break"': 'keyword.control' '"continue"': 'keyword.control' '"raise"': 'keyword.control' + '"yield"': 'keyword.control' '"try"': 'keyword.control' '"except"': 'keyword.control' '"with"': 'keyword.control' @@ -80,6 +83,8 @@ scopes: '"finally"': 'keyword.control' '"import"': 'keyword.control' '"from"': 'keyword.control' + '"print"': 'keyword.control' + '"assert"': 'keyword.control' '"+"': 'keyword.operator' '"-"': 'keyword.operator' @@ -87,7 +92,7 @@ scopes: '"/"': 'keyword.operator' '"%"': 'keyword.operator' '"in"': 'keyword.operator.in' - '"and"': 'keyword.operator.logical' - '"or"': 'keyword.operator.logical' - '"not"': 'keyword.operator.logical' - '"is"': 'keyword.operator.logical' + '"and"': 'keyword.operator.logical.python' + '"or"': 'keyword.operator.logical.python' + '"not"': 'keyword.operator.logical.python' + '"is"': 'keyword.operator.logical.python' From e835e3a176fec97c3420f18d60777901d2ff44e7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 16 Feb 2018 14:48:13 -0800 Subject: [PATCH 197/291] Prepare 0.49.2 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 52d1c84..9d9df13 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.49.1", + "version": "0.49.2", "engines": { "atom": "*", "node": "*" From 0a0fe39fc7187446b8e3c6b1e37951a074ae878b Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Fri, 23 Mar 2018 14:09:33 -0400 Subject: [PATCH 198/291] Inject Python string patterns into embedded SQL strings --- grammars/python.cson | 38 ++++++-------- spec/python-spec.coffee | 106 ++++++++++++++++++++-------------------- 2 files changed, 68 insertions(+), 76 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 36afd60..f17d339 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -19,6 +19,16 @@ 'wsgi' ] 'firstLineMatch': '^#![ \\t]*/.*\\bpython[\\d\\.]*\\b' +'injections': + 'L:source.python meta.embedded.sql': + 'patterns': [ + { + 'include': '#string_formatting' + } + { + 'include': '#escaped_char' + } + ] 'patterns': [ { 'include': '#line_comments' @@ -1620,6 +1630,7 @@ '2': 'name': 'meta.empty-string.double.python' 'name': 'string.quoted.double.block.sql.python' + 'contentName': 'meta.embedded.sql' 'patterns': [ { 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' @@ -1630,12 +1641,6 @@ } ] } - { - 'include': '#string_formatting' - } - { - 'include': '#escaped_char' - } ] } { @@ -1653,13 +1658,8 @@ '3': 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.double.single-line.sql.python' + 'contentName': 'meta.embedded.sql' 'patterns': [ - { - 'include': '#string_formatting' - } - { - 'include': '#escaped_char' - } { 'include': 'source.sql' } @@ -2201,6 +2201,7 @@ '2': 'name': 'meta.empty-string.single.python' 'name': 'string.quoted.single.block.sql.python' + 'contentName': 'meta.embedded.sql' 'patterns': [ { 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' @@ -2211,12 +2212,6 @@ } ] } - { - 'include': '#string_formatting' - } - { - 'include': '#escaped_char' - } ] } { @@ -2232,13 +2227,8 @@ '2': 'name': 'invalid.illegal.unclosed-string.python' 'name': 'string.quoted.single.single-line.python' + 'contentName': 'meta.embedded.sql' 'patterns': [ - { - 'include': '#string_formatting' - } - { - 'include': '#escaped_char' - } { 'include': 'source.sql' } diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 5644506..c2dba66 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -707,57 +707,59 @@ describe "Python grammar", -> expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] expect(tokens[11]).toEqual value: ' x ', scopes: ['source.python'] - it "tokenizes SQL inline highlighting on blocks", -> - delimsByScope = - "string.quoted.double.block.sql.python": '"""' - "string.quoted.single.block.sql.python": "'''" - - for scope, delim in delimsByScope - tokens = grammar.tokenizeLines( - delim + - 'SELECT bar - FROM foo' - + delim - ) - - expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] - expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] - - it "tokenizes SQL inline highlighting on blocks with a CTE", -> - delimsByScope = - "string.quoted.double.block.sql.python": '"""' - "string.quoted.single.block.sql.python": "'''" - - for scope, delim of delimsByScope - tokens = grammar.tokenizeLines(""" - #{delim} - WITH example_cte AS ( - SELECT bar - FROM foo - GROUP BY bar + # FIXME: These tests are quite useless as they don't actually use the language-sql package + describe "SQL highlighting", -> + it "tokenizes SQL inline highlighting on blocks", -> + delimsByScope = + "string.quoted.double.block.sql.python": '"""' + "string.quoted.single.block.sql.python": "'''" + + for scope, delim in delimsByScope + tokens = grammar.tokenizeLines( + delim + + 'SELECT bar + FROM foo' + + delim ) - SELECT COUNT(*) - FROM example_cte - #{delim} - """) - - expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (', scopes: ['source.python', scope] - expect(tokens[2][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope] - expect(tokens[3][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope] - expect(tokens[4][0]).toEqual value: 'GROUP BY bar', scopes: ['source.python', scope] - expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope] - expect(tokens[6][0]).toEqual value: '', scopes: ['source.python', scope] - expect(tokens[7][0]).toEqual value: 'SELECT COUNT(*)', scopes: ['source.python', scope] - expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python', scope] - expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] - - it "tokenizes SQL inline highlighting on single line with a CTE", -> - {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') - - expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] - expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python'] - expect(tokens[2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + + it "tokenizes SQL inline highlighting on blocks with a CTE", -> + delimsByScope = + "string.quoted.double.block.sql.python": '"""' + "string.quoted.single.block.sql.python": "'''" + + for scope, delim of delimsByScope + tokens = grammar.tokenizeLines(""" + #{delim} + WITH example_cte AS ( + SELECT bar + FROM foo + GROUP BY bar + ) + + SELECT COUNT(*) + FROM example_cte + #{delim} + """) + + expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] + expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[2][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[3][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[4][0]).toEqual value: 'GROUP BY bar', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[6][0]).toEqual value: '', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[7][0]).toEqual value: 'SELECT COUNT(*)', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] + + it "tokenizes SQL inline highlighting on single line with a CTE", -> + {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') + + expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] From 3ec9d3d0d1eace59af99e1c9444e9776a645c63c Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 25 Mar 2018 13:58:52 -0400 Subject: [PATCH 199/291] Specs --- spec/python-spec.coffee | 67 +++++++++++++++++++++++++++++++++-------- 1 file changed, 54 insertions(+), 13 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index c2dba66..01c0258 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -707,8 +707,11 @@ describe "Python grammar", -> expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] expect(tokens[11]).toEqual value: ' x ', scopes: ['source.python'] - # FIXME: These tests are quite useless as they don't actually use the language-sql package describe "SQL highlighting", -> + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage('language-sql') + it "tokenizes SQL inline highlighting on blocks", -> delimsByScope = "string.quoted.double.block.sql.python": '"""' @@ -723,8 +726,10 @@ describe "Python grammar", -> ) expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[1][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[1][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[2][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[2][1]).toEqual value ' foo', scopes: ['source.python', scope, 'meta.embedded.sql'] expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] it "tokenizes SQL inline highlighting on blocks with a CTE", -> @@ -747,19 +752,55 @@ describe "Python grammar", -> """) expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python'] - expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[2][0]).toEqual value: 'SELECT bar', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[3][0]).toEqual value: 'FROM foo', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[4][0]).toEqual value: 'GROUP BY bar', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[6][0]).toEqual value: '', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[7][0]).toEqual value: 'SELECT COUNT(*)', scopes: ['source.python', scope, 'meta.embedded.sql'] - expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[1][0]).toEqual value: 'WITH', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[1][1]).toEqual value: ' example_cte ', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[1][2]).toEqual value: 'AS', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.alias.sql'] + expect(tokens[1][3]).toEqual value: ' ', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[1][4]).toEqual value: '(', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] + expect(tokens[2][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[2][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[3][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[3][1]).toEqual value: ' foo', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[4][0]).toEqual value: 'GROUP BY', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[4][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql'] + expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] + expect(tokens[7][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[8][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql'] expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] it "tokenizes SQL inline highlighting on single line with a CTE", -> {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] - expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[2]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + expect(tokens[1]).toEqual value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[2]).toEqual value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[3]).toEqual value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.alias.sql'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] + expect(tokens[6]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[7]).toEqual value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[8]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[9]).toEqual value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] + expect(tokens[11]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[12]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[13]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[14]).toEqual value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'support.function.aggregate.sql'] + expect(tokens[15]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] + expect(tokens[16]).toEqual value: '*', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.operator.star.sql'] + expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] + expect(tokens[18]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[19]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[20]).toEqual value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] + expect(tokens[21]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + + it "tokenizes Python escape characters and formatting specifiers in SQL strings", -> + {tokens} = grammar.tokenizeLine('"INSERT INTO url (image_uri) VALUES (\\\'%s\\\');" % values') + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.begin.python'] + expect(tokens[10]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python'] + expect(tokens[11]).toEqual value: '%s', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.other.placeholder.python'] + expect(tokens[12]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python'] + expect(tokens[13]).toEqual value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] + expect(tokens[15]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python'] + expect(tokens[17]).toEqual value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python'] From 1f06de1cb96d6be7476969abcd417689e0d66e72 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Sun, 25 Mar 2018 15:23:20 -0400 Subject: [PATCH 200/291] Add missing sql scope to single-quoted single-line SQL strings --- grammars/python.cson | 2 +- spec/python-spec.coffee | 44 ++++++++++++++++++++--------------------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index f17d339..68c1ee6 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -2226,7 +2226,7 @@ 'name': 'punctuation.definition.string.end.python' '2': 'name': 'invalid.illegal.unclosed-string.python' - 'name': 'string.quoted.single.single-line.python' + 'name': 'string.quoted.single.single-line.sql.python' 'contentName': 'meta.embedded.sql' 'patterns': [ { diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 01c0258..55e47f6 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -771,28 +771,28 @@ describe "Python grammar", -> it "tokenizes SQL inline highlighting on single line with a CTE", -> {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') - expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] - expect(tokens[1]).toEqual value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] - expect(tokens[2]).toEqual value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[3]).toEqual value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.alias.sql'] - expect(tokens[4]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] - expect(tokens[6]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] - expect(tokens[7]).toEqual value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[8]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] - expect(tokens[9]).toEqual value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[10]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] - expect(tokens[11]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[12]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] - expect(tokens[13]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[14]).toEqual value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'support.function.aggregate.sql'] - expect(tokens[15]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] - expect(tokens[16]).toEqual value: '*', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.operator.star.sql'] - expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] - expect(tokens[18]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[19]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] - expect(tokens[20]).toEqual value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.python', 'meta.embedded.sql'] - expect(tokens[21]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[2]).toEqual value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[3]).toEqual value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.alias.sql'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] + expect(tokens[6]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[7]).toEqual value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[8]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[9]).toEqual value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] + expect(tokens[11]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[12]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[13]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[14]).toEqual value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'support.function.aggregate.sql'] + expect(tokens[15]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql'] + expect(tokens[16]).toEqual value: '*', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.operator.star.sql'] + expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] + expect(tokens[18]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[19]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql'] + expect(tokens[20]).toEqual value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql'] + expect(tokens[21]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python'] it "tokenizes Python escape characters and formatting specifiers in SQL strings", -> {tokens} = grammar.tokenizeLine('"INSERT INTO url (image_uri) VALUES (\\\'%s\\\');" % values') From 2fe4680deea64213df50bb64457b53ca99aae22c Mon Sep 17 00:00:00 2001 From: Ash Wilson Date: Mon, 26 Mar 2018 15:07:08 -0400 Subject: [PATCH 201/291] Prepare 0.49.3 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9d9df13..94b9a4a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.49.2", + "version": "0.49.3", "engines": { "atom": "*", "node": "*" From 09f71df695a988d55f07ba30f27faf4be203bc78 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Wed, 28 Mar 2018 17:32:20 -0400 Subject: [PATCH 202/291] Not all docstrings are SQL --- grammars/python.cson | 8 ++++---- spec/python-spec.coffee | 29 +++++++++++++++++++++++++++-- 2 files changed, 31 insertions(+), 6 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 68c1ee6..f4fd955 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1629,11 +1629,11 @@ 'name': 'punctuation.definition.string.end.python' '2': 'name': 'meta.empty-string.double.python' - 'name': 'string.quoted.double.block.sql.python' - 'contentName': 'meta.embedded.sql' + 'name': 'string.quoted.double.block.python' 'patterns': [ { 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' + 'name': 'meta.embedded.sql' 'end': '(?=\\s*""")' 'patterns': [ { @@ -2200,12 +2200,12 @@ 'name': 'punctuation.definition.string.end.python' '2': 'name': 'meta.empty-string.single.python' - 'name': 'string.quoted.single.block.sql.python' - 'contentName': 'meta.embedded.sql' + 'name': 'string.quoted.single.block.python' 'patterns': [ { 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' 'end': '(?=\\s*\'\'\')' + 'name': 'meta.embedded.sql' 'patterns': [ { 'include': 'source.sql' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 55e47f6..719b3c7 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -374,6 +374,29 @@ describe "Python grammar", -> expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"] expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python'] + describe "docstrings", -> + it "tokenizes them", -> + lines = grammar.tokenizeLines ''' + """ + Bla bla bla "wow" what's this? + """ + ''' + + expect(lines[0][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.begin.python'] + expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.double.block.python'] + expect(lines[2][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.end.python'] + + lines = grammar.tokenizeLines """ + ''' + Bla bla bla "wow" what's this? + ''' + """ + + expect(lines[0][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.begin.python'] + expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.single.block.python'] + expect(lines[2][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python'] + + describe "string formatting", -> describe "%-style formatting", -> it "tokenizes the conversion type", -> @@ -733,9 +756,11 @@ describe "Python grammar", -> expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python'] it "tokenizes SQL inline highlighting on blocks with a CTE", -> + # Note that these scopes do not contain .sql because we can't definitively tell + # if the string contains SQL or not delimsByScope = - "string.quoted.double.block.sql.python": '"""' - "string.quoted.single.block.sql.python": "'''" + "string.quoted.double.block.python": '"""' + "string.quoted.single.block.python": "'''" for scope, delim of delimsByScope tokens = grammar.tokenizeLines(""" From 3c56985130e5758f19ae5470cc3102ef7233dd98 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Mar 2018 14:49:16 -0700 Subject: [PATCH 203/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 94b9a4a..f52b328 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "url": "https://github.com/atom/language-python/issues" }, "dependencies": { - "tree-sitter-python": "^0.4.0" + "tree-sitter-python": "^0.11.0" }, "devDependencies": { "coffeelint": "^1.10.1" From 962d07e33a98cc8cf2ab467d6dd2fbb1804b482e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 28 Mar 2018 14:49:23 -0700 Subject: [PATCH 204/291] Prepare 0.49.4 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f52b328..f736bae 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.49.3", + "version": "0.49.4", "engines": { "atom": "*", "node": "*" From a6691d3f2ecf5ba2d7cb306dc9e2894a665b629b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Langa?= Date: Thu, 12 Apr 2018 16:26:26 -0700 Subject: [PATCH 205/291] Add more file types to tree-sitter-python --- grammars/tree-sitter-python.cson | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index b513a35..39f9ec2 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -6,6 +6,15 @@ legacyScopeName: 'source.python' fileTypes: [ 'py' + 'pyi' + 'pyw' + 'gyp' + 'gypi' + 'SConstruct' + 'Sconstruct' + 'sconstruct' + 'SConscript' + 'wsgi' ] folds: [ From aa7d8a79445313b8d2a928607700995ffdc372b0 Mon Sep 17 00:00:00 2001 From: Glen Mailer Date: Fri, 13 Apr 2018 00:35:30 +0100 Subject: [PATCH 206/291] Also include .pyi files for the python grammar (#248) These are type stub files for PEP 484. --- grammars/python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/python.cson b/grammars/python.cson index f4fd955..6685db1 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -7,6 +7,7 @@ 'kv' 'py' 'pyw' + 'pyi' 'rpy' 'SConscript' 'SConstruct' From 36a8c282ba801f128e7532085796ae838fd305a6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 12 Apr 2018 15:11:40 -0700 Subject: [PATCH 207/291] Add highlighting for async & await in tree-sitter mode --- grammars/tree-sitter-python.cson | 2 ++ package.json | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 39f9ec2..09471fc 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -85,6 +85,8 @@ scopes: '"continue"': 'keyword.control' '"raise"': 'keyword.control' '"yield"': 'keyword.control' + '"await"': 'keyword.control' + '"async"': 'keyword.control' '"try"': 'keyword.control' '"except"': 'keyword.control' '"with"': 'keyword.control' diff --git a/package.json b/package.json index f736bae..8404674 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "url": "https://github.com/atom/language-python/issues" }, "dependencies": { - "tree-sitter-python": "^0.11.0" + "tree-sitter-python": "^0.11.2" }, "devDependencies": { "coffeelint": "^1.10.1" From 37849918137989e20be44d1b0a4f8a2ffd77d836 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 12 Apr 2018 17:42:18 -0700 Subject: [PATCH 208/291] Give "in" the same highlighting as "and"/"or" --- grammars/tree-sitter-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 09471fc..ee5db99 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -102,7 +102,7 @@ scopes: '"*"': 'keyword.operator' '"/"': 'keyword.operator' '"%"': 'keyword.operator' - '"in"': 'keyword.operator.in' + '"in"': 'keyword.operator.logical.python' '"and"': 'keyword.operator.logical.python' '"or"': 'keyword.operator.logical.python' '"not"': 'keyword.operator.logical.python' From 2efe23931e44cf6e49c282aa8edf12c2ac599edb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 12 Apr 2018 17:43:12 -0700 Subject: [PATCH 209/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8404674..280a25b 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "url": "https://github.com/atom/language-python/issues" }, "dependencies": { - "tree-sitter-python": "^0.11.2" + "tree-sitter-python": "^0.11.3" }, "devDependencies": { "coffeelint": "^1.10.1" From 7286bf1e663c683dff3481b2126476cea242e1b5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 12 Apr 2018 17:43:21 -0700 Subject: [PATCH 210/291] Prepare 0.49.5 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 280a25b..1ce31c0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.49.4", + "version": "0.49.5", "engines": { "atom": "*", "node": "*" From 74d5c53ce6a5ae5a0dedf17ceef44da1c636cc5f Mon Sep 17 00:00:00 2001 From: Falcon Dai Date: Wed, 18 Apr 2018 20:49:25 -0500 Subject: [PATCH 211/291] fix lambda parsing issues #246 --- grammars/python.cson | 2 +- spec/python-spec.coffee | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 6685db1..47a7eef 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -335,7 +335,7 @@ ] } { - 'begin': '(lambda)\\s+' + 'begin': '\\b(lambda)\\s?+' 'beginCaptures': '1': 'name': 'storage.type.function.inline.python' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 719b3c7..9923626 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -730,6 +730,15 @@ describe "Python grammar", -> expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] expect(tokens[11]).toEqual value: ' x ', scopes: ['source.python'] + it "tokenizes lambdas without arguments", -> + {tokens} = grammar.tokenizeLine "lambda: None" + expect(tokens[0]).toEqual value: 'lambda', scopes: ['source.python', 'meta.function.inline.python', 'storage.type.function.inline.python'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] + + it "does not tokenizes a variable name containing lambda as a lambda", -> + {tokens} = grammar.tokenizeLine "not_a_lambda.foo" + expect(tokens[0]).toEqual value: 'not_a_lambda', scopes: ['source.python', 'variable.other.object.python'] + describe "SQL highlighting", -> beforeEach -> waitsForPromise -> From 15d62797ac3e71ceb709ebcd922ea23b784bec2d Mon Sep 17 00:00:00 2001 From: Falcon Dai Date: Tue, 24 Apr 2018 15:18:31 -0500 Subject: [PATCH 212/291] add more test cases for lambda tokenization --- grammars/python.cson | 2 +- spec/python-spec.coffee | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index 47a7eef..81a1c34 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -335,7 +335,7 @@ ] } { - 'begin': '\\b(lambda)\\s?+' + 'begin': '\\b(lambda)(?=[\\s\\:])' 'beginCaptures': '1': 'name': 'storage.type.function.inline.python' diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 9923626..9a8939a 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -721,7 +721,6 @@ describe "Python grammar", -> {tokens} = grammar.tokenizeLine "lambda x, z = 4: x * z" expect(tokens[0]).toEqual value: 'lambda', scopes: ['source.python', 'meta.function.inline.python', 'storage.type.function.inline.python'] - expect(tokens[1]).toEqual value: ' ', scopes: ['source.python', 'meta.function.inline.python'] expect(tokens[2]).toEqual value: 'x', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'variable.parameter.function.python'] expect(tokens[3]).toEqual value: ',', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'punctuation.separator.parameters.python'] expect(tokens[5]).toEqual value: 'z', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'variable.parameter.function.python'] @@ -735,10 +734,14 @@ describe "Python grammar", -> expect(tokens[0]).toEqual value: 'lambda', scopes: ['source.python', 'meta.function.inline.python', 'storage.type.function.inline.python'] expect(tokens[1]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] - it "does not tokenizes a variable name containing lambda as a lambda", -> + it "does not tokenizes a variable name ending with lambda as a lambda", -> {tokens} = grammar.tokenizeLine "not_a_lambda.foo" expect(tokens[0]).toEqual value: 'not_a_lambda', scopes: ['source.python', 'variable.other.object.python'] + it "does not tokenizes a variable name starting with lambda as a lambda", -> + {tokens} = grammar.tokenizeLine "lambda_not.foo" + expect(tokens[0]).toEqual value: 'lambda_not', scopes: ['source.python', 'variable.other.object.python'] + describe "SQL highlighting", -> beforeEach -> waitsForPromise -> From abe1168c9d6860b86b0c5e2ac1c503980863c3e2 Mon Sep 17 00:00:00 2001 From: Falcon Dai Date: Tue, 24 Apr 2018 15:44:03 -0500 Subject: [PATCH 213/291] simplify lambda regex --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index 81a1c34..c2feb23 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -335,7 +335,7 @@ ] } { - 'begin': '\\b(lambda)(?=[\\s\\:])' + 'begin': '\\b(lambda)\\b' 'beginCaptures': '1': 'name': 'storage.type.function.inline.python' From bd780100a879cdd347ef852626e4ca463025b7b5 Mon Sep 17 00:00:00 2001 From: Kevin Stone Date: Sun, 22 Apr 2018 13:40:50 -0700 Subject: [PATCH 214/291] Added Syntax Tests for Grammar Validation --- package.json | 1 + spec/fixtures/grammar/syntax_test_python.py | 34 +++++++++++++++++++ .../grammar/syntax_test_python_typing.py | 23 +++++++++++++ spec/python-spec.coffee | 18 ++++------ 4 files changed, 64 insertions(+), 12 deletions(-) create mode 100644 spec/fixtures/grammar/syntax_test_python.py create mode 100644 spec/fixtures/grammar/syntax_test_python_typing.py diff --git a/package.json b/package.json index 1ce31c0..d8d1107 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "url": "https://github.com/atom/language-python/issues" }, "dependencies": { + "atom-grammar-test": "^0.6.4", "tree-sitter-python": "^0.11.3" }, "devDependencies": { diff --git a/spec/fixtures/grammar/syntax_test_python.py b/spec/fixtures/grammar/syntax_test_python.py new file mode 100644 index 0000000..528176a --- /dev/null +++ b/spec/fixtures/grammar/syntax_test_python.py @@ -0,0 +1,34 @@ +# SYNTAX TEST "source.python" + + +def my_func(first, second=False, *third, **forth): +# <- storage.type.function +# ^^^^^^^ entity.name.function +# ^ punctuation.definition.parameters.begin +# ^^^^^ variable.parameter.function +# ^ punctuation.separator.parameters +# ^^^^^^ variable.parameter.function +# ^ keyword.operator.assignment +# ^^^^^ constant +# ^ punctuation.separator.parameters +# ^^^^^ variable.parameter.function +# ^ punctuation.separator.parameters +# ^^^^^ variable.parameter.function +# ^ punctuation.definition.function.begin + pass + + +my_func2 = lambda x, y=2, *z, **kw: x + y + 1 +# ^ keyword.operator.assignment +# ^^^^^ meta.function.inline storage.type.function.inline +# ^^^^^^^^^^^^^^^^ meta.function.inline.parameters +# ^ variable.parameter.function +# ^ punctuation.separator.parameters +# ^ variable.parameter.function +# ^ keyword.operator.assignment +# ^ constant +# ^ punctuation.separator.parameters +# ^ variable.parameter.function +# ^ punctuation.separator.parameters +# ^^ variable.parameter.function +# ^ punctuation.definition.function.begin diff --git a/spec/fixtures/grammar/syntax_test_python_typing.py b/spec/fixtures/grammar/syntax_test_python_typing.py new file mode 100644 index 0000000..7721c28 --- /dev/null +++ b/spec/fixtures/grammar/syntax_test_python_typing.py @@ -0,0 +1,23 @@ +# SYNTAX TEST "source.python" + + +def right_hand_split( +# <- storage.type.function +# ^^^^^^^^^^^^^^^^ entity.name.function +# ^ punctuation.definition.parameters.begin + line: Line, py36: bool = False, omit: Collection[LeafID] = () +# ^^^^ variable.parameter.function +# ^ punctuation.separator +# ^^^^ storage.type +# ^ punctuation.separator.parameters +# ^^^^ variable.parameter.function +# ^ punctuation.separator +# ^^^^ storage.type +# ^ keyword.operator.assignment +# ^^^^^ constant +# ^ punctuation.separator.parameters +# ^^^^ variable.parameter.function +# ^ punctuation.separator +) -> Iterator[Line]: +# ^ punctuation.definition.function.begin + pass diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 9a8939a..eef08ed 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -1,3 +1,6 @@ +path = require 'path' +grammarTest = require 'atom-grammar-test' + describe "Python grammar", -> grammar = null @@ -729,18 +732,9 @@ describe "Python grammar", -> expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] expect(tokens[11]).toEqual value: ' x ', scopes: ['source.python'] - it "tokenizes lambdas without arguments", -> - {tokens} = grammar.tokenizeLine "lambda: None" - expect(tokens[0]).toEqual value: 'lambda', scopes: ['source.python', 'meta.function.inline.python', 'storage.type.function.inline.python'] - expect(tokens[1]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] - - it "does not tokenizes a variable name ending with lambda as a lambda", -> - {tokens} = grammar.tokenizeLine "not_a_lambda.foo" - expect(tokens[0]).toEqual value: 'not_a_lambda', scopes: ['source.python', 'variable.other.object.python'] - - it "does not tokenizes a variable name starting with lambda as a lambda", -> - {tokens} = grammar.tokenizeLine "lambda_not.foo" - expect(tokens[0]).toEqual value: 'lambda_not', scopes: ['source.python', 'variable.other.object.python'] + # Add the grammar test fixtures + grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python.py') + grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py') describe "SQL highlighting", -> beforeEach -> From 3eb38d7b6c314042313afec3ff790b73c004f714 Mon Sep 17 00:00:00 2001 From: Kevin Stone Date: Sun, 22 Apr 2018 14:00:21 -0700 Subject: [PATCH 215/291] Fix missing vararg and keyword arg syntax support --- grammars/python.cson | 32 ++++++++++++++------- spec/fixtures/grammar/syntax_test_python.py | 4 +++ 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index c2feb23..dc84094 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -289,15 +289,17 @@ { # param = 3 # param: int = 3 - 'begin': '\\b([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?\\s*(=)\\s*' + 'begin': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?\\s*(=)\\s*' 'beginCaptures': '1': - 'name': 'variable.parameter.function.python' + 'name': 'keyword.operator.unpacking.arguments.python' '2': - 'name': 'punctuation.separator.python' + 'name': 'variable.parameter.function.python' '3': - 'name': 'storage.type.python' + 'name': 'punctuation.separator.python' '4': + 'name': 'storage.type.python' + '5': 'name': 'keyword.operator.assignment.python' 'end': '(?!\\G)' 'patterns': [ @@ -309,13 +311,15 @@ { # param # param: int - 'match': '\\b([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?' + 'match': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\s*(?:(:)\\s*([a-zA-Z_][\\w_]*))?' 'captures': '1': - 'name': 'variable.parameter.function.python' + 'name': 'keyword.operator.unpacking.arguments.python' '2': - 'name': 'punctuation.separator.python' + 'name': 'variable.parameter.function.python' '3': + 'name': 'punctuation.separator.python' + '4': 'name': 'storage.type.python' } { @@ -352,11 +356,13 @@ 'patterns': [ { # param = 3 - 'begin': '\\b([a-zA-Z_][\\w_]*)\\s*(=)\\s*' + 'begin': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\s*(=)\\s*' 'beginCaptures': '1': - 'name': 'variable.parameter.function.python' + 'name': 'keyword.operator.unpacking.arguments.python' '2': + 'name': 'variable.parameter.function.python' + '3': 'name': 'keyword.operator.assignment.python' 'end': '(?!\\G)' 'patterns': [ @@ -367,8 +373,12 @@ } { # param - 'match': '\\b([a-zA-Z_][\\w_]*)\\b' - 'name': 'variable.parameter.function.python' + 'match': '(?:(\\*{0,2})|\\b)([a-zA-Z_][\\w_]*)\\b' + 'captures': + '1': + 'name': 'keyword.operator.unpacking.arguments.python' + '2': + 'name': 'variable.parameter.function.python' } { 'match': ',' diff --git a/spec/fixtures/grammar/syntax_test_python.py b/spec/fixtures/grammar/syntax_test_python.py index 528176a..55935da 100644 --- a/spec/fixtures/grammar/syntax_test_python.py +++ b/spec/fixtures/grammar/syntax_test_python.py @@ -11,8 +11,10 @@ def my_func(first, second=False, *third, **forth): # ^ keyword.operator.assignment # ^^^^^ constant # ^ punctuation.separator.parameters +# ^ keyword.operator.unpacking.arguments # ^^^^^ variable.parameter.function # ^ punctuation.separator.parameters +# ^^ keyword.operator.unpacking.arguments # ^^^^^ variable.parameter.function # ^ punctuation.definition.function.begin pass @@ -28,7 +30,9 @@ def my_func(first, second=False, *third, **forth): # ^ keyword.operator.assignment # ^ constant # ^ punctuation.separator.parameters +# ^ keyword.operator.unpacking.arguments # ^ variable.parameter.function # ^ punctuation.separator.parameters +# ^^ keyword.operator.unpacking.arguments # ^^ variable.parameter.function # ^ punctuation.definition.function.begin From 66219b2f491f229c78ca24265a2df1c178a71b27 Mon Sep 17 00:00:00 2001 From: Kevin Stone Date: Sun, 22 Apr 2018 14:26:16 -0700 Subject: [PATCH 216/291] Migrated lambda specs to atom-grammar-tests --- spec/fixtures/grammar/syntax_test_python.py | 18 ----------- .../grammar/syntax_test_python_lambdas.py | 32 +++++++++++++++++++ spec/python-spec.coffee | 13 +------- 3 files changed, 33 insertions(+), 30 deletions(-) create mode 100644 spec/fixtures/grammar/syntax_test_python_lambdas.py diff --git a/spec/fixtures/grammar/syntax_test_python.py b/spec/fixtures/grammar/syntax_test_python.py index 55935da..d1674c4 100644 --- a/spec/fixtures/grammar/syntax_test_python.py +++ b/spec/fixtures/grammar/syntax_test_python.py @@ -18,21 +18,3 @@ def my_func(first, second=False, *third, **forth): # ^^^^^ variable.parameter.function # ^ punctuation.definition.function.begin pass - - -my_func2 = lambda x, y=2, *z, **kw: x + y + 1 -# ^ keyword.operator.assignment -# ^^^^^ meta.function.inline storage.type.function.inline -# ^^^^^^^^^^^^^^^^ meta.function.inline.parameters -# ^ variable.parameter.function -# ^ punctuation.separator.parameters -# ^ variable.parameter.function -# ^ keyword.operator.assignment -# ^ constant -# ^ punctuation.separator.parameters -# ^ keyword.operator.unpacking.arguments -# ^ variable.parameter.function -# ^ punctuation.separator.parameters -# ^^ keyword.operator.unpacking.arguments -# ^^ variable.parameter.function -# ^ punctuation.definition.function.begin diff --git a/spec/fixtures/grammar/syntax_test_python_lambdas.py b/spec/fixtures/grammar/syntax_test_python_lambdas.py new file mode 100644 index 0000000..b1f0644 --- /dev/null +++ b/spec/fixtures/grammar/syntax_test_python_lambdas.py @@ -0,0 +1,32 @@ +# SYNTAX TEST "source.python" + + +my_func2 = lambda x, y=2, *z, **kw: x + y + 1 +# ^ keyword.operator.assignment +# ^^^^^ meta.function.inline storage.type.function.inline +# ^^^^^^^^^^^^^^^^ meta.function.inline.parameters +# ^ variable.parameter.function +# ^ punctuation.separator.parameters +# ^ variable.parameter.function +# ^ keyword.operator.assignment +# ^ constant +# ^ punctuation.separator.parameters +# ^ keyword.operator.unpacking.arguments +# ^ variable.parameter.function +# ^ punctuation.separator.parameters +# ^^ keyword.operator.unpacking.arguments +# ^^ variable.parameter.function +# ^ punctuation.definition.function.begin + + +lambda x, z = 4: x * z +# <- source.python meta.function.inline.python storage.type.function.inline.python +# ^ source.python meta.function.inline.python +# ^ source.python meta.function.inline.python meta.function.inline.parameters.python variable.parameter.function.python +# ^ source.python meta.function.inline.python meta.function.inline.parameters.python punctuation.separator.parameters.python +# ^ source.python meta.function.inline.python +# ^ source.python meta.function.inline.python meta.function.inline.parameters.python variable.parameter.function.python +# ^ source.python meta.function.inline.python meta.function.inline.parameters.python keyword.operator.assignment.python +# ^ source.python meta.function.inline.python meta.function.inline.parameters.python constant.numeric.integer.decimal.python +# ^ source.python meta.function.inline.python punctuation.definition.function.begin.python +# ^^^^^^ source.python diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index eef08ed..ec13264 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -720,20 +720,9 @@ describe "Python grammar", -> expect(tokens[22]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] expect(tokens[23]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] - it "tokenizes lambdas", -> - {tokens} = grammar.tokenizeLine "lambda x, z = 4: x * z" - - expect(tokens[0]).toEqual value: 'lambda', scopes: ['source.python', 'meta.function.inline.python', 'storage.type.function.inline.python'] - expect(tokens[2]).toEqual value: 'x', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'variable.parameter.function.python'] - expect(tokens[3]).toEqual value: ',', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'punctuation.separator.parameters.python'] - expect(tokens[5]).toEqual value: 'z', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'variable.parameter.function.python'] - expect(tokens[7]).toEqual value: '=', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'keyword.operator.assignment.python'] - expect(tokens[9]).toEqual value: '4', scopes: ['source.python', 'meta.function.inline.python', 'meta.function.inline.parameters.python', 'constant.numeric.integer.decimal.python'] - expect(tokens[10]).toEqual value: ':', scopes: ['source.python', 'meta.function.inline.python', 'punctuation.definition.function.begin.python'] - expect(tokens[11]).toEqual value: ' x ', scopes: ['source.python'] - # Add the grammar test fixtures grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python.py') + grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py') grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py') describe "SQL highlighting", -> From f129be1c615e3e5853e9421573e7161e02494fd2 Mon Sep 17 00:00:00 2001 From: Kevin Stone Date: Sun, 22 Apr 2018 14:41:12 -0700 Subject: [PATCH 217/291] Migrated function syntax checks to atom-grammar-tests --- spec/fixtures/grammar/syntax_test_python.py | 12 +-- .../grammar/syntax_test_python_functions.py | 88 +++++++++++++++++++ .../grammar/syntax_test_python_lambdas.py | 31 +++---- spec/python-spec.coffee | 78 +--------------- 4 files changed, 105 insertions(+), 104 deletions(-) create mode 100644 spec/fixtures/grammar/syntax_test_python_functions.py diff --git a/spec/fixtures/grammar/syntax_test_python.py b/spec/fixtures/grammar/syntax_test_python.py index d1674c4..b3532f0 100644 --- a/spec/fixtures/grammar/syntax_test_python.py +++ b/spec/fixtures/grammar/syntax_test_python.py @@ -5,16 +5,10 @@ def my_func(first, second=False, *third, **forth): # <- storage.type.function # ^^^^^^^ entity.name.function # ^ punctuation.definition.parameters.begin -# ^^^^^ variable.parameter.function -# ^ punctuation.separator.parameters -# ^^^^^^ variable.parameter.function +# ^^^^^ ^^^^^^ ^^^^^ ^^^^^ variable.parameter.function +# ^ ^ ^ punctuation.separator.parameters # ^ keyword.operator.assignment # ^^^^^ constant -# ^ punctuation.separator.parameters -# ^ keyword.operator.unpacking.arguments -# ^^^^^ variable.parameter.function -# ^ punctuation.separator.parameters -# ^^ keyword.operator.unpacking.arguments -# ^^^^^ variable.parameter.function +# ^ ^^ keyword.operator.unpacking.arguments # ^ punctuation.definition.function.begin pass diff --git a/spec/fixtures/grammar/syntax_test_python_functions.py b/spec/fixtures/grammar/syntax_test_python_functions.py new file mode 100644 index 0000000..a22c066 --- /dev/null +++ b/spec/fixtures/grammar/syntax_test_python_functions.py @@ -0,0 +1,88 @@ +# SYNTAX TEST "source.python" + + +# it "tokenizes async function definitions" +async def test(param): +# <- meta.function.python storage.modifier.async.python +# ^^^ storage.type.function.python +# ^^^^ entity.name.function.python + pass + + +# it "tokenizes comments inside function parameters" +def test(arg, # comment') +# <- meta.function.python storage.type.function.python +# ^^^^ entity.name.function.python +# ^ punctuation.definition.parameters.begin.python +# ^^^^^^^^^^^^^^^^ meta.function.parameters.python +# ^^^ variable.parameter.function.python +# ^ punctuation.separator.parameters.python +# ^ comment.line.number-sign.python punctuation.definition.comment.python +# ^^^^^^^ comment.line.number-sign.python + ): + pass + + +def __init__( +# <- meta.function.python storage.type.function.python +# ^^^^^^^^ entity.name.function.python support.function.magic.python +# ^ punctuation.definition.parameters.begin.python + self, +# ^^^^^ meta.function.parameters.python +# ^^^^ variable.parameter.function.python +# ^ punctuation.separator.parameters.python + codec, # comment +# ^^^^^^^^^^^^^^^^ meta.function.parameters.python +# ^^^^^ variable.parameter.function.python +# ^ punctuation.separator.parameters.python +# ^ comment.line.number-sign.python punctuation.definition.comment.python +# ^^^^^^^ comment.line.number-sign.python + config +# ^^^^^^ meta.function.parameters.python variable.parameter.function.python +# >> meta.function.python +): +# <- punctuation.definition.parameters.end.python +#^ punctuation.definition.function.begin.python + pass + + +# it "tokenizes a function definition with annotations" +def f(a: None, b: int = 3) -> int: +# <- meta.function.python storage.type.function.python +# ^ entity.name.function.python +# ^ punctuation.definition.parameters.begin.python +# ^^^^^^^^^^^^^^^^^^^ meta.function.parameters.python +# ^ variable.parameter.function.python +# ^ punctuation.separator.python +# ^^^^ storage.type.python +# ^ punctuation.separator.parameters.python +# ^ variable.parameter.function.python +# ^ punctuation.separator.python +# ^^^ storage.type.python +# ^ keyword.operator.assignment.python +# ^ constant.numeric.integer.decimal.python +# ^ punctuation.definition.parameters.end.python +# ^^ keyword.operator.function-annotation.python +# ^^^ storage.type.python +# ^ punctuation.definition.function.begin.python + pass + + +# it "tokenizes complex function calls" +torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0] +# ^^^^^^^^^ meta.method-call.python +# ^^^^^^^ entity.name.function.python +# ^ punctuation.definition.arguments.begin.bracket.round.python +# ^ punctuation.definition.arguments.end.bracket.round.python +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ meta.function-call.python +# ^ punctuation.definition.arguments.begin.bracket.round.python +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ meta.function-call.arguments.python +# ^^^^^^^^ entity.name.function.python +# ^ punctuation.definition.arguments.begin.bracket.round.python +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ meta.function-call.arguments.python +# ^^^^^^^^^^^^^ variable.parameter.function.python +# ^^^^^ constant.language.python +# ^ punctuation.definition.arguments.end.bracket.round.python +# ^ punctuation.separator.arguments.python +# ^ punctuation.definition.arguments.end.bracket.round.python +# ^ punctuation.separator.property.period.python diff --git a/spec/fixtures/grammar/syntax_test_python_lambdas.py b/spec/fixtures/grammar/syntax_test_python_lambdas.py index b1f0644..e5950d7 100644 --- a/spec/fixtures/grammar/syntax_test_python_lambdas.py +++ b/spec/fixtures/grammar/syntax_test_python_lambdas.py @@ -3,30 +3,25 @@ my_func2 = lambda x, y=2, *z, **kw: x + y + 1 # ^ keyword.operator.assignment -# ^^^^^ meta.function.inline storage.type.function.inline +# ^^^^^^^^^^^^^^^^^^^^^^^ meta.function.inline +# ^^^^^ storage.type.function.inline # ^^^^^^^^^^^^^^^^ meta.function.inline.parameters -# ^ variable.parameter.function -# ^ punctuation.separator.parameters +# ^ ^ ^ ^^ variable.parameter.function +# ^ ^ ^ punctuation.separator.parameters # ^ variable.parameter.function # ^ keyword.operator.assignment # ^ constant -# ^ punctuation.separator.parameters -# ^ keyword.operator.unpacking.arguments +# ^ ^^ keyword.operator.unpacking.arguments # ^ variable.parameter.function -# ^ punctuation.separator.parameters -# ^^ keyword.operator.unpacking.arguments -# ^^ variable.parameter.function # ^ punctuation.definition.function.begin lambda x, z = 4: x * z -# <- source.python meta.function.inline.python storage.type.function.inline.python -# ^ source.python meta.function.inline.python -# ^ source.python meta.function.inline.python meta.function.inline.parameters.python variable.parameter.function.python -# ^ source.python meta.function.inline.python meta.function.inline.parameters.python punctuation.separator.parameters.python -# ^ source.python meta.function.inline.python -# ^ source.python meta.function.inline.python meta.function.inline.parameters.python variable.parameter.function.python -# ^ source.python meta.function.inline.python meta.function.inline.parameters.python keyword.operator.assignment.python -# ^ source.python meta.function.inline.python meta.function.inline.parameters.python constant.numeric.integer.decimal.python -# ^ source.python meta.function.inline.python punctuation.definition.function.begin.python -# ^^^^^^ source.python +# ^^^^^^^^^^^^^ meta.function.inline.python +# <- storage.type.function.inline.python +# ^^^^^^^^ meta.function.inline.parameters.python +# ^ ^ variable.parameter.function.python +# ^ punctuation.separator.parameters.python +# ^ keyword.operator.assignment.python +# ^ constant.numeric.integer.decimal.python +# ^ punctuation.definition.function.begin.python diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index ec13264..3f5eeba 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -643,85 +643,9 @@ describe "Python grammar", -> expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python'] - it "tokenizes async function definitions", -> - {tokens} = grammar.tokenizeLine 'async def test(param):' - - expect(tokens[0]).toEqual value: 'async', scopes: ['source.python', 'meta.function.python', 'storage.modifier.async.python'] - expect(tokens[1]).toEqual value: ' ', scopes: ['source.python', 'meta.function.python'] - expect(tokens[2]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[4]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - - it "tokenizes comments inside function parameters", -> - {tokens} = grammar.tokenizeLine('def test(arg, # comment') - - expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - expect(tokens[3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] - expect(tokens[4]).toEqual value: 'arg', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[5]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] - expect(tokens[7]).toEqual value: '#', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[8]).toEqual value: ' comment', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] - - tokens = grammar.tokenizeLines(""" - def __init__( - self, - codec, # comment - config - ): - """) - - expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[0][2]).toEqual value: '__init__', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python', 'support.function.magic.python'] - expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] - expect(tokens[1][1]).toEqual value: 'self', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[1][2]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] - expect(tokens[2][1]).toEqual value: 'codec', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[2][2]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] - expect(tokens[2][4]).toEqual value: '#', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python'] - expect(tokens[2][5]).toEqual value: ' comment', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'comment.line.number-sign.python'] - expect(tokens[3][1]).toEqual value: 'config', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] - expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.function.begin.python'] - - it "tokenizes a function definition with annotations", -> - {tokens} = grammar.tokenizeLine 'def f(a: None, b: int = 3) -> int:' - - expect(tokens[0]).toEqual value: 'def', scopes: ['source.python', 'meta.function.python', 'storage.type.function.python'] - expect(tokens[2]).toEqual value: 'f', scopes: ['source.python', 'meta.function.python', 'entity.name.function.python'] - expect(tokens[3]).toEqual value: '(', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.begin.python'] - expect(tokens[4]).toEqual value: 'a', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[5]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.python'] - expect(tokens[7]).toEqual value: 'None', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'storage.type.python'] - expect(tokens[8]).toEqual value: ',', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.parameters.python'] - expect(tokens[10]).toEqual value: 'b', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'variable.parameter.function.python'] - expect(tokens[11]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'punctuation.separator.python'] - expect(tokens[13]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'storage.type.python'] - expect(tokens[15]).toEqual value: '=', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'keyword.operator.assignment.python'] - expect(tokens[17]).toEqual value: '3', scopes: ['source.python', 'meta.function.python', 'meta.function.parameters.python', 'constant.numeric.integer.decimal.python'] - expect(tokens[18]).toEqual value: ')', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.parameters.end.python'] - expect(tokens[20]).toEqual value: '->', scopes: ['source.python', 'meta.function.python', 'keyword.operator.function-annotation.python'] - expect(tokens[22]).toEqual value: 'int', scopes: ['source.python', 'meta.function.python', 'storage.type.python'] - expect(tokens[23]).toEqual value: ':', scopes: ['source.python', 'meta.function.python', 'punctuation.definition.function.begin.python'] - - it "tokenizes complex function calls", -> - {tokens} = grammar.tokenizeLine "torch.nn.BCELoss()(Variable(bayes_optimal_prob, 1, requires_grad=False), Yvar).data[0]" - - expect(tokens[4]).toEqual value: 'BCELoss', scopes: ['source.python', 'meta.method-call.python', 'entity.name.function.python'] - expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] - expect(tokens[6]).toEqual value: ')', scopes: ['source.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] - expect(tokens[7]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] - expect(tokens[8]).toEqual value: 'Variable', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'entity.name.function.python'] - expect(tokens[9]).toEqual value: '(', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.begin.bracket.round.python'] - expect(tokens[10]).toEqual value: 'bayes_optimal_prob', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python'] - expect(tokens[16]).toEqual value: 'requires_grad', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'variable.parameter.function.python'] - expect(tokens[18]).toEqual value: 'False', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'constant.language.python'] - expect(tokens[19]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] - expect(tokens[20]).toEqual value: ',', scopes: ['source.python', 'meta.function-call.python', 'meta.function-call.arguments.python', 'punctuation.separator.arguments.python'] - expect(tokens[22]).toEqual value: ')', scopes: ['source.python', 'meta.function-call.python', 'punctuation.definition.arguments.end.bracket.round.python'] - expect(tokens[23]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python'] - # Add the grammar test fixtures grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python.py') + grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_functions.py') grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py') grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py') From 007c4b516024226a311f494d2a844cfdeed34455 Mon Sep 17 00:00:00 2001 From: Kevin Stone Date: Sat, 28 Apr 2018 18:19:15 -0700 Subject: [PATCH 218/291] Upgraded the lambda tests to match master/HEAD --- .../fixtures/grammar/syntax_test_python_lambdas.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/spec/fixtures/grammar/syntax_test_python_lambdas.py b/spec/fixtures/grammar/syntax_test_python_lambdas.py index e5950d7..ffdcfcd 100644 --- a/spec/fixtures/grammar/syntax_test_python_lambdas.py +++ b/spec/fixtures/grammar/syntax_test_python_lambdas.py @@ -25,3 +25,17 @@ # ^ keyword.operator.assignment.python # ^ constant.numeric.integer.decimal.python # ^ punctuation.definition.function.begin.python + + +lambda: None +# ^^^^ meta.function.inline.python +# <- storage.type.function.inline.python +# ^ punctuation.definition.function.begin.python + + +not_a_lambda.foo +# <- ! meta.function.inline.python + + +lambda_not.foo +# <- ! meta.function.inline.python From c5d3d9ca9e9494fca6b539da78ac4f028ab23708 Mon Sep 17 00:00:00 2001 From: "Creech (Intern)" Date: Thu, 31 May 2018 10:42:51 -0400 Subject: [PATCH 219/291] 'lcie' prefix was 'else condition' when it should have been 'else value' --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 6962bb7..30e2110 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -94,7 +94,7 @@ 'body': '[${1:value} for ${2:value} in ${3:variable}]' 'List Comprehension If Else': 'prefix': 'lcie' - 'body': '[${1:value} if ${2:condition} else ${3:condition} for ${4:value} in ${5:variable}]' + 'body': '[${1:value} if ${2:condition} else ${3:value} for ${4:value} in ${5:variable}]' 'Dictionary Comprehension': 'prefix': 'dc' 'body': '{${1:key}: ${2:value} for ${3:key}, ${4:value} in ${5:variable}}' From 3db6a65a687d6ff35395669e6ed619cf4c9a0ab6 Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Thu, 31 May 2018 13:04:19 -0400 Subject: [PATCH 220/291] Use Visual Studio 2015 on Appveyor --- appveyor.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/appveyor.yml b/appveyor.yml index 2b0fde4..7d07d05 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,5 +1,7 @@ version: "{build}" +image: Visual Studio 2015 + platform: x64 branches: From 43ba8b655ab436e643b6a61004cda0933831024f Mon Sep 17 00:00:00 2001 From: Wliu <50Wliu@users.noreply.github.com> Date: Mon, 11 Jun 2018 09:49:54 -0400 Subject: [PATCH 221/291] Prepare 0.50.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d8d1107..19e5ce3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.49.5", + "version": "0.50.0", "engines": { "atom": "*", "node": "*" From d6942972a506a5d979a84a81ca5cbfeb1b56c111 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 18 Jun 2018 11:51:58 -0700 Subject: [PATCH 222/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 19e5ce3..117b0de 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.11.3" + "tree-sitter-python": "^0.12.0" }, "devDependencies": { "coffeelint": "^1.10.1" From f6beecff7c2943ffc928ae56070f39dd7ea195a1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 18 Jun 2018 11:52:03 -0700 Subject: [PATCH 223/291] Prepare 0.50.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 117b0de..ace3bee 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.50.0", + "version": "0.50.1", "engines": { "atom": "*", "node": "*" From aaaafdb77c446a00c6e7a48bb66ae422c87115c6 Mon Sep 17 00:00:00 2001 From: Pieter Goetschalckx <3.14.e.ter@gmail.com> Date: Wed, 27 Jun 2018 17:36:10 +0200 Subject: [PATCH 224/291] Add missing keywords to tree-sitter-python --- grammars/tree-sitter-python.cson | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index ee5db99..9f238c5 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -75,6 +75,9 @@ scopes: '"def"': 'storage.type.function' '"lambda"': 'storage.type.function' + '"global"': 'storage.modifier.global' + '"nonlocal"': 'storage.modifier.nonlocal' + '"if"': 'keyword.control' '"else"': 'keyword.control' '"elif"': 'keyword.control' @@ -83,6 +86,7 @@ scopes: '"return"': 'keyword.control' '"break"': 'keyword.control' '"continue"': 'keyword.control' + '"pass"': 'keyword.control' '"raise"': 'keyword.control' '"yield"': 'keyword.control' '"await"': 'keyword.control' @@ -94,8 +98,11 @@ scopes: '"finally"': 'keyword.control' '"import"': 'keyword.control' '"from"': 'keyword.control' - '"print"': 'keyword.control' - '"assert"': 'keyword.control' + + '"print"': 'keyword.other' + '"assert"': 'keyword.other' + '"exec"': 'keyword.other' + '"del"': 'keyword.other' '"+"': 'keyword.operator' '"-"': 'keyword.operator' From f0314f098517e1c1c07601a3cb5affcb61436bf1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 18 Jul 2018 16:19:14 -0700 Subject: [PATCH 225/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ace3bee..78ab534 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.12.0" + "tree-sitter-python": "^0.13.0" }, "devDependencies": { "coffeelint": "^1.10.1" From dc380a47243e9585af2171935bd0d62334f853cd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 18 Jul 2018 16:19:22 -0700 Subject: [PATCH 226/291] Prepare 0.51.0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 78ab534..0fdd80c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.50.1", + "version": "0.51.0", "engines": { "atom": "*", "node": "*" From 3d8eeeb2328e8e5755b20d8bea623883be3e3f3c Mon Sep 17 00:00:00 2001 From: AmyShackles Date: Sat, 21 Jul 2018 22:47:30 -0700 Subject: [PATCH 227/291] Removed 'self' snippet --- snippets/language-python.cson | 4 ---- 1 file changed, 4 deletions(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 30e2110..bedd86f 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -119,7 +119,3 @@ 'if __name__ == \'__main__\'': 'prefix': 'ifmain' 'body': 'if __name__ == \'__main__\':\n\t${1:main()}$0' -'.source.python:not(.string)': - 'self': - 'prefix': '.' - 'body': 'self.' From 4241f2664a86dd61302e8f1a05a63c70da05f8eb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Jul 2018 11:59:07 -0700 Subject: [PATCH 228/291] :arrow_up: tree-sitter-python, highlight escape sequences --- grammars/tree-sitter-python.cson | 4 ++++ package.json | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 9f238c5..be5cd11 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -53,6 +53,10 @@ scopes: 'comment': 'comment.line' 'string': 'string.quoted' + 'escape_sequence': 'constant.character.escape' + 'interpolation': 'meta.embedded' + 'interpolation > "{"': 'punctuation.section.embedded' + 'interpolation > "}"': 'punctuation.section.embedded' 'class_definition > identifier': 'entity.name.type.class' 'function_definition > identifier': 'entity.name.function' diff --git a/package.json b/package.json index 0fdd80c..fb274bf 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.13.0" + "tree-sitter-python": "^0.13.1" }, "devDependencies": { "coffeelint": "^1.10.1" From 4cc2994c46fa5849ad7f7e71ca6ef9d0802a589b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Jul 2018 11:59:25 -0700 Subject: [PATCH 229/291] Prepare 0.51.1 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index fb274bf..fd06b7d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.0", + "version": "0.51.1", "engines": { "atom": "*", "node": "*" From 07bf91515bdf75f3c399bccb7692a436f5cf2aa9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Jul 2018 13:35:22 -0700 Subject: [PATCH 230/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index fd06b7d..fc672fe 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.13.1" + "tree-sitter-python": "^0.13.3" }, "devDependencies": { "coffeelint": "^1.10.1" From 348ec0537676200ffb85204f47f96ca78f34bd23 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 31 Jul 2018 12:41:50 -0700 Subject: [PATCH 231/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index fc672fe..be28f3b 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.13.3" + "tree-sitter-python": "^0.13.4" }, "devDependencies": { "coffeelint": "^1.10.1" From 3e26b67f6c840823536f5b1ac726d9d92f662e29 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 31 Jul 2018 12:43:11 -0700 Subject: [PATCH 232/291] Prepare 0.51.2 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index be28f3b..010860b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.1", + "version": "0.51.2", "engines": { "atom": "*", "node": "*" From 9febb29afd78b50f180d4b6271ef776d93f0f661 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Aug 2018 14:40:24 -0700 Subject: [PATCH 233/291] Use new tree-sitter grammar regex --- grammars/tree-sitter-python.cson | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index be5cd11..8ea2df4 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -4,6 +4,14 @@ type: 'tree-sitter' parser: 'tree-sitter-python' legacyScopeName: 'source.python' +firstLineRegex: [ + # shebang line + '^#!.*\\b(python)\\r?\\n' + + # vim modeline + 'vim\\b.*\\bset\\b.*\\b(filetype|ft|syntax)=python' +] + fileTypes: [ 'py' 'pyi' From 09159c6a21be6675a0878d839c2dab6ad621612b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Aug 2018 14:40:28 -0700 Subject: [PATCH 234/291] Prepare 0.51.3 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 010860b..a2d0aab 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.2", + "version": "0.51.3", "engines": { "atom": "*", "node": "*" From ae80d779fae54685d79bfcee3dbc53b3235e072b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Aug 2018 11:14:31 -0700 Subject: [PATCH 235/291] Replace id and legacyScopeName with scopeName --- grammars/tree-sitter-python.cson | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 8ea2df4..974174c 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -1,8 +1,7 @@ -id: 'python' name: 'Python' +scopeName: 'source.python' type: 'tree-sitter' parser: 'tree-sitter-python' -legacyScopeName: 'source.python' firstLineRegex: [ # shebang line From dd148532dcca3185f306daf484d71147178ec0ba Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Aug 2018 11:19:12 -0700 Subject: [PATCH 236/291] Prepare 0.51.4-0 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a2d0aab..8a0747e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.3", + "version": "0.51.4-0", "engines": { "atom": "*", "node": "*" From b460357555af775bd6ce517015a750759dcdbdf4 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Aug 2018 13:23:38 -0700 Subject: [PATCH 237/291] Prepare 0.51.4 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8a0747e..62b8462 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.4-0", + "version": "0.51.4", "engines": { "atom": "*", "node": "*" From 7f89dc3ebe2a425dcf136e96652eecdb5ce88096 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 24 Aug 2018 09:16:06 -0700 Subject: [PATCH 238/291] Explicitly disable tree-sitter for textmate grammar specs --- spec/language-python-spec.coffee | 2 ++ spec/python-regex-spec.coffee | 2 ++ spec/python-spec.coffee | 2 ++ 3 files changed, 6 insertions(+) diff --git a/spec/language-python-spec.coffee b/spec/language-python-spec.coffee index e5b431e..e21fb82 100644 --- a/spec/language-python-spec.coffee +++ b/spec/language-python-spec.coffee @@ -5,6 +5,8 @@ describe 'Python settings', -> editor.destroy() beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + waitsForPromise -> atom.workspace.open().then (o) -> editor = o diff --git a/spec/python-regex-spec.coffee b/spec/python-regex-spec.coffee index cb11eee..f2f2ae5 100644 --- a/spec/python-regex-spec.coffee +++ b/spec/python-regex-spec.coffee @@ -2,6 +2,8 @@ describe 'Python regular expression grammar', -> grammar = null beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + waitsForPromise -> atom.packages.activatePackage('language-python') diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 3f5eeba..c1851d0 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -5,6 +5,8 @@ describe "Python grammar", -> grammar = null beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + waitsForPromise -> atom.packages.activatePackage("language-python") From 50aa96b504a8c8ce24609c2b7ad1c4bc20e6e9d2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 24 Aug 2018 09:16:25 -0700 Subject: [PATCH 239/291] Prepare 0.51.5 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 62b8462..d273160 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.4", + "version": "0.51.5", "engines": { "atom": "*", "node": "*" From c78d2c159fe02deb52713963456319e613096d7f Mon Sep 17 00:00:00 2001 From: Kyle Barron Date: Tue, 11 Sep 2018 18:52:46 -0400 Subject: [PATCH 240/291] Add foldEndPattern --- settings/language-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/settings/language-python.cson b/settings/language-python.cson index 001e981..1d89625 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -4,5 +4,6 @@ 'softTabs': true 'tabLength': 4 'commentStart': '# ' + 'foldEndPattern': '^\\s*\\}|^\\s*\\]|^\\s*\\)' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while|async\\s+(def|for|with))\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' From bc05c8473d66b6bb88b722f9c914e062dd3b4174 Mon Sep 17 00:00:00 2001 From: Benjamin Gray Date: Thu, 20 Sep 2018 21:49:31 +1000 Subject: [PATCH 241/291] Escape regex properly --- grammars/regular expressions (python).cson | 4 ++++ spec/python-regex-spec.coffee | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/grammars/regular expressions (python).cson b/grammars/regular expressions (python).cson index fabcd80..18d1438 100644 --- a/grammars/regular expressions (python).cson +++ b/grammars/regular expressions (python).cson @@ -14,6 +14,10 @@ 'match': '\\\\[1-9][0-9]?' 'name': 'keyword.other.back-reference.regexp' } + { + 'match': '\\\\.' + 'name': 'constant.character.escape.backslash.regexp' + } { 'match': '[?+*][?+]?|\\{(\\d+,\\d+|\\d+,|,\\d+|\\d+)\\}\\??' 'name': 'keyword.operator.quantifier.regexp' diff --git a/spec/python-regex-spec.coffee b/spec/python-regex-spec.coffee index f2f2ae5..d1cb81a 100644 --- a/spec/python-regex-spec.coffee +++ b/spec/python-regex-spec.coffee @@ -30,3 +30,23 @@ describe 'Python regular expression grammar', -> expect(tokens[1]).toEqual value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp'] expect(tokens[2]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp'] expect(tokens[3]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp'] + + it 'escapes the character following any backslash', -> + {tokens} = grammar.tokenizeLine '''\\q\\(\\[\\'\\"\\?\\^\\-\\*\\.\\#''' + expect(tokens[0]).toEqual value: '\\q', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[2]).toEqual value: '\\[', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[3]).toEqual value: '\\\'', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[4]).toEqual value: '\\"', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[5]).toEqual value: '\\?', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[6]).toEqual value: '\\^', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[7]).toEqual value: '\\-', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[8]).toEqual value: '\\*', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[9]).toEqual value: '\\.', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + expect(tokens[10]).toEqual value: '\\#', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] + + {tokens} = grammar.tokenizeLine '''(\\()\\)''' + expect(tokens[0]).toEqual value: '(', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'meta.group.regexp', 'constant.character.escape.backslash.regexp'] + expect(tokens[2]).toEqual value: ')', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[3]).toEqual value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp'] From e230b399b28e72289e5bc4b480453c02eaf4d717 Mon Sep 17 00:00:00 2001 From: Jason Rudolph Date: Tue, 25 Sep 2018 14:34:27 -0400 Subject: [PATCH 242/291] :memo: Update .github --- .github/no-response.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/no-response.yml diff --git a/.github/no-response.yml b/.github/no-response.yml new file mode 100644 index 0000000..3c6b33d --- /dev/null +++ b/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 180 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. From 5e7410c43cb68d759b0b02f7bbdee898305a8adc Mon Sep 17 00:00:00 2001 From: Jason Rudolph Date: Tue, 2 Oct 2018 11:44:14 -0400 Subject: [PATCH 243/291] :memo: Update .github --- .github/no-response.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/no-response.yml b/.github/no-response.yml index 3c6b33d..1c8799d 100644 --- a/.github/no-response.yml +++ b/.github/no-response.yml @@ -1,7 +1,7 @@ # Configuration for probot-no-response - https://github.com/probot/no-response # Number of days of inactivity before an issue is closed for lack of response -daysUntilClose: 180 +daysUntilClose: 28 # Label requiring a response responseRequiredLabel: more-information-needed From d9eaf164bdb0a0febccbbf71036511848ea56a10 Mon Sep 17 00:00:00 2001 From: David Wilson Date: Wed, 17 Oct 2018 07:18:45 -0700 Subject: [PATCH 244/291] Prepare 0.51.6 release --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d273160..eb64879 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.5", + "version": "0.51.6", "engines": { "atom": "*", "node": "*" From a9bbc868ad321c297d6db5da37f1478e28f32673 Mon Sep 17 00:00:00 2001 From: Linus Eriksson Date: Mon, 29 Oct 2018 12:21:14 +0100 Subject: [PATCH 245/291] Scope operators as keyword.operator --- grammars/tree-sitter-python.cson | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 974174c..7470566 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -120,6 +120,29 @@ scopes: '"*"': 'keyword.operator' '"/"': 'keyword.operator' '"%"': 'keyword.operator' + '"**"': 'keyword.operator' + '"//"': 'keyword.operator' + '"=="': 'keyword.operator' + '"!="': 'keyword.operator' + '"<>"': 'keyword.operator' + '">"': 'keyword.operator' + '"<"': 'keyword.operator' + '">="': 'keyword.operator' + '"<="': 'keyword.operator' + '"="': 'keyword.operator' + '"+="': 'keyword.operator' + '"-="': 'keyword.operator' + '"*="': 'keyword.operator' + '"/="': 'keyword.operator' + '"%="': 'keyword.operator' + '"**="': 'keyword.operator' + '"//="': 'keyword.operator' + '"&"': 'keyword.operator' + '"|"': 'keyword.operator' + '"^"': 'keyword.operator' + '"~"': 'keyword.operator' + '"<<"': 'keyword.operator' + '">>"': 'keyword.operator' '"in"': 'keyword.operator.logical.python' '"and"': 'keyword.operator.logical.python' '"or"': 'keyword.operator.logical.python' From df3934c7b13248af1a4c213c1f7e5070b0b5310c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 29 Oct 2018 14:09:37 -0700 Subject: [PATCH 246/291] Highlight keyword argument names Refs #281 --- grammars/tree-sitter-python.cson | 2 ++ 1 file changed, 2 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 7470566..bfc6d06 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -89,6 +89,8 @@ scopes: '"global"': 'storage.modifier.global' '"nonlocal"': 'storage.modifier.nonlocal' + 'keyword_argument > identifier:nth-child(0)': 'variable.parameter.function' + '"if"': 'keyword.control' '"else"': 'keyword.control' '"elif"': 'keyword.control' From 526f638e4f985081049ba4c8bd13c31425f70cd4 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 29 Oct 2018 14:12:40 -0700 Subject: [PATCH 247/291] 0.51.8 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index eb64879..2f327e8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.6", + "version": "0.51.8", "engines": { "atom": "*", "node": "*" From 05d9238afec45e45881963888f797800c9fbc836 Mon Sep 17 00:00:00 2001 From: Winston Liu <50Wliu@users.noreply.github.com> Date: Mon, 12 Nov 2018 23:32:26 -0500 Subject: [PATCH 248/291] Update settings/language-python.cson Co-Authored-By: kylebarron --- settings/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings/language-python.cson b/settings/language-python.cson index 1d89625..8538f10 100644 --- a/settings/language-python.cson +++ b/settings/language-python.cson @@ -4,6 +4,6 @@ 'softTabs': true 'tabLength': 4 'commentStart': '# ' - 'foldEndPattern': '^\\s*\\}|^\\s*\\]|^\\s*\\)' + 'foldEndPattern': '^\\s*[}\\])]' 'increaseIndentPattern': '^\\s*(class|def|elif|else|except|finally|for|if|try|with|while|async\\s+(def|for|with))\\b.*:\\s*$' 'decreaseIndentPattern': '^\\s*(elif|else|except|finally)\\b.*:\\s*$' From 56389716e91ee009936fe6b4e9643a256b7d54dd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 12 Nov 2018 21:47:27 -0800 Subject: [PATCH 249/291] :arrow_up: tree-sitter-python --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2f327e8..8714f04 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.13.4" + "tree-sitter-python": "^0.13.6" }, "devDependencies": { "coffeelint": "^1.10.1" From 38dd8484267376001da0d117a73e301d4e52705e Mon Sep 17 00:00:00 2001 From: Linus Eriksson Date: Tue, 13 Nov 2018 19:32:24 +0100 Subject: [PATCH 250/291] Scope built in functions as support.function --- grammars/tree-sitter-python.cson | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index bfc6d06..46a4e37 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -67,7 +67,11 @@ scopes: 'class_definition > identifier': 'entity.name.type.class' 'function_definition > identifier': 'entity.name.function' - 'call > identifier:nth-child(0)': 'entity.name.function' + 'call > identifier:nth-child(0)': [ + {match: '^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$', + scopes: 'support.function'}, + 'entity.name.function' + ] 'call > attribute > identifier:nth-child(2)': 'entity.name.function' 'attribute > identifier:nth-child(2)': 'variable.other.object.property' From 3b2bcdd3ac92cd4adadee5ef79c65e1f27481557 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 15 Nov 2018 12:45:57 -0800 Subject: [PATCH 251/291] Add tree-sitter keyword to package.json --- package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/package.json b/package.json index 8714f04..e437e9a 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,9 @@ "node": "*" }, "description": "Python language support in Atom", + "keywords": [ + "tree-sitter" + ], "homepage": "https://atom.github.io/language-python", "repository": { "type": "git", From 9eae0b418567c3a15a81ad71694c658d254a8d1c Mon Sep 17 00:00:00 2001 From: Linus Eriksson Date: Fri, 23 Nov 2018 20:51:32 +0100 Subject: [PATCH 252/291] Add more scopes to the tree sitter grammar Exceptions -> support.type.exception integer&float -> numeric add @ and @= operator --- grammars/tree-sitter-python.cson | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 46a4e37..1afdc38 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -74,6 +74,11 @@ scopes: ] 'call > attribute > identifier:nth-child(2)': 'entity.name.function' + 'identifier': + {match: + '^(BaseException|Exception|TypeError|StopAsyncIteration|StopIteration|ImportError|ModuleNotFoundError|OSError|ConnectionError|BrokenPipeError|ConnectionAbortedError|ConnectionRefusedError|ConnectionResetError|BlockingIOError|ChildProcessError|FileExistsError|FileNotFoundError|IsADirectoryError|NotADirectoryError|InterruptedError|PermissionError|ProcessLookupError|TimeoutError|EOFError|RuntimeError|RecursionError|NotImplementedError|NameError|UnboundLocalError|AttributeError|SyntaxError|IndentationError|TabError|LookupError|IndexError|KeyError|ValueError|UnicodeError|UnicodeEncodeError|UnicodeDecodeError|UnicodeTranslateError|AssertionError|ArithmeticError|FloatingPointError|OverflowError|ZeroDivisionError|SystemError|ReferenceError|BufferError|MemoryError|Warning|UserWarning|DeprecationWarning|PendingDeprecationWarning|SyntaxWarning|RuntimeWarning|FutureWarning|ImportWarning|UnicodeWarning|BytesWarning|ResourceWarning|GeneratorExit|SystemExit|KeyboardInterrupt)$' + scopes: 'support.type.exception'} + 'attribute > identifier:nth-child(2)': 'variable.other.object.property' 'decorator': 'entity.name.function.decorator' @@ -81,8 +86,8 @@ scopes: 'none': 'constant.language' 'true': 'constant.language' 'false': 'constant.language' - 'integer': 'constant.language' - 'float': 'constant.language' + 'integer': 'constant.numeric' + 'float': 'constant.numeric' 'type > identifier': 'support.storage.type' @@ -149,6 +154,8 @@ scopes: '"~"': 'keyword.operator' '"<<"': 'keyword.operator' '">>"': 'keyword.operator' + 'binary_operator > "@"': 'keyword.operator' + 'binary_operator > "@="': 'keyword.operator' '"in"': 'keyword.operator.logical.python' '"and"': 'keyword.operator.logical.python' '"or"': 'keyword.operator.logical.python' From 7711cdcfc8acc36f6b70cdab34374c7defd53c5c Mon Sep 17 00:00:00 2001 From: Etienne Napoleone Date: Wed, 28 Nov 2018 19:55:24 +0700 Subject: [PATCH 253/291] Fix PEP8 E301 in class snippet A blank line is required before a class methode --- snippets/language-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/language-python.cson b/snippets/language-python.cson index 30e2110..ef609e3 100644 --- a/snippets/language-python.cson +++ b/snippets/language-python.cson @@ -55,7 +55,7 @@ 'body': 'self.fail(\'${1:message}\')$0' 'New Class': 'prefix': 'class' - 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1.}"""\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' + 'body': 'class ${1:ClassName}(${2:object}):\n\t"""${3:docstring for $1.}"""\n\n\tdef __init__(self, ${4:arg}):\n\t\t${5:super($1, self).__init__()}\n\t\tself.arg = arg\n\t\t$0' 'New Method': 'prefix': 'defs' 'body': 'def ${1:mname}(self, ${2:arg}):\n\t${3:pass}' From 1cf33988212f54a419ba5d8bfd7d72ea62edee5c Mon Sep 17 00:00:00 2001 From: David Wilson Date: Tue, 5 Feb 2019 16:38:18 -0800 Subject: [PATCH 254/291] Fix code folding for 'elif' and 'else' statements --- grammars/tree-sitter-python.cson | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 46a4e37..ff31929 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -27,7 +27,8 @@ fileTypes: [ folds: [ { type: [ - 'if_statement' + 'elif_clause' + 'else_clause' 'for_statement' 'try_statement' 'with_statement' @@ -37,6 +38,11 @@ folds: [ 'async_function_definition' ] start: {type: ':'} + }, + { + type: ['if_statement'] + start: {type: ':'} + end: {type: ['elif_clause', 'else_clause']} } { start: {type: '(', index: 0} From ad49f4a498987056b0f4fe5bbdf02dc685dde5cd Mon Sep 17 00:00:00 2001 From: David Wilson Date: Tue, 5 Feb 2019 17:15:36 -0800 Subject: [PATCH 255/291] Prepare 0.51.9 release --- package-lock.json | 188 ++++++++++++++++++++++++++++++++++++++++++++++ package.json | 2 +- 2 files changed, 189 insertions(+), 1 deletion(-) create mode 100644 package-lock.json diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..743c8e3 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,188 @@ +{ + "name": "language-python", + "version": "0.51.9", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "atom-grammar-test": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/atom-grammar-test/-/atom-grammar-test-0.6.4.tgz", + "integrity": "sha1-2KU1A9H+k5mX9Ji3SirDEARKfU4=", + "requires": { + "chevrotain": "^0.18.0", + "escape-string-regexp": "^1.0.5" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "chevrotain": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-0.18.0.tgz", + "integrity": "sha1-sodxTjFZC64sXR4vYRZz7+xHnYA=" + }, + "coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "dev": true + }, + "coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "requires": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "nan": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz", + "integrity": "sha512-JY7V6lRkStKcKTvHO5NVSQRv+RV+FIL5pvDoLiAtSL9pKlC5x9PKQcZDsq7m4FO4d57mkhC6Z+QhAh3Jdk5JFw==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "tree-sitter-python": { + "version": "0.13.6", + "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.13.6.tgz", + "integrity": "sha512-QGc7dNObFv5+kCIvknO+Jv9eHusgamlcxZpLkDioAK6/dZ/f+3vbn3KQ2y4PpS1qiAHaaxh2V4XgMyv6k/rS9g==", + "requires": { + "nan": "^2.4.0" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/package.json b/package.json index e437e9a..b1bab16 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.8", + "version": "0.51.9", "engines": { "atom": "*", "node": "*" From 50f0fd087c78874ca60edc3c738d3ae2297fc33d Mon Sep 17 00:00:00 2001 From: Caleb Evans Date: Sat, 6 Apr 2019 12:32:54 -0700 Subject: [PATCH 256/291] Add support for python2/3 shebangs in tree-sitter grammar --- grammars/tree-sitter-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index ff31929..e95ceb7 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -5,7 +5,7 @@ parser: 'tree-sitter-python' firstLineRegex: [ # shebang line - '^#!.*\\b(python)\\r?\\n' + '^#![ \\t]*/.*\\bpython[\\d\\.]*\\b' # vim modeline 'vim\\b.*\\bset\\b.*\\b(filetype|ft|syntax)=python' From 7159e8b5b1092d73844bd69ee14434be82629f77 Mon Sep 17 00:00:00 2001 From: Caleb Evans Date: Sat, 6 Apr 2019 13:39:11 -0700 Subject: [PATCH 257/291] Tokenize formal function parameters in tree-sitter grammar --- grammars/tree-sitter-python.cson | 2 ++ 1 file changed, 2 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index ff31929..0560c96 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -99,6 +99,8 @@ scopes: '"global"': 'storage.modifier.global' '"nonlocal"': 'storage.modifier.nonlocal' + 'parameters > identifier': 'variable.parameter.function' + 'default_parameter > identifier:nth-child(0)': 'variable.parameter.function' 'keyword_argument > identifier:nth-child(0)': 'variable.parameter.function' '"if"': 'keyword.control' From c8283a90ffb4896000df053bd27ef0fee6611a2a Mon Sep 17 00:00:00 2001 From: Caleb Evans Date: Sat, 6 Apr 2019 14:04:33 -0700 Subject: [PATCH 258/291] Tokenize subclass list names in tree-sitter grammar --- grammars/tree-sitter-python.cson | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index ff31929..a99a177 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -92,6 +92,11 @@ scopes: 'type > identifier': 'support.storage.type' + 'class_definition > argument_list > attribute': 'entity.other.inherited-class' + 'class_definition > argument_list > identifier': 'entity.other.inherited-class' + 'class_definition > argument_list > keyword_argument > attribute': 'entity.other.inherited-class' + 'class_definition > argument_list > keyword_argument > identifier:nth-child(2)': 'entity.other.inherited-class' + '"class"': 'storage.type.class' '"def"': 'storage.type.function' '"lambda"': 'storage.type.function' From 177fda9ea6ef02692cf430ac202bef24dbebec50 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sun, 7 Apr 2019 14:30:09 -0700 Subject: [PATCH 259/291] :arrow_up: tree-sitter-python --- package-lock.json | 12 ++++++------ package.json | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/package-lock.json b/package-lock.json index 743c8e3..c678365 100644 --- a/package-lock.json +++ b/package-lock.json @@ -123,9 +123,9 @@ "dev": true }, "nan": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz", - "integrity": "sha512-JY7V6lRkStKcKTvHO5NVSQRv+RV+FIL5pvDoLiAtSL9pKlC5x9PKQcZDsq7m4FO4d57mkhC6Z+QhAh3Jdk5JFw==" + "version": "2.13.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.13.2.tgz", + "integrity": "sha512-TghvYc72wlMGMVMluVo9WRJc0mB8KxxF/gZ4YYFy7V2ZQX9l7rgbPg7vjS9mt6U5HXODVFVI2bOduCzwOMv/lw==" }, "once": { "version": "1.4.0", @@ -165,9 +165,9 @@ "dev": true }, "tree-sitter-python": { - "version": "0.13.6", - "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.13.6.tgz", - "integrity": "sha512-QGc7dNObFv5+kCIvknO+Jv9eHusgamlcxZpLkDioAK6/dZ/f+3vbn3KQ2y4PpS1qiAHaaxh2V4XgMyv6k/rS9g==", + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.14.0.tgz", + "integrity": "sha512-Kcj5AUdeI4/c/JLsQV8OFI0zLrwcQ1nKoqCRr+W73Tp5SIK+Dd1ILNC5TFHPw1IqOGstcg8AH0XTeU0uq3boZg==", "requires": { "nan": "^2.4.0" } diff --git a/package.json b/package.json index b1bab16..1da205e 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.13.6" + "tree-sitter-python": "^0.14.0" }, "devDependencies": { "coffeelint": "^1.10.1" From a6a1abef1c558d162b5b9b26bafbc36a3ba81396 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sun, 7 Apr 2019 14:30:31 -0700 Subject: [PATCH 260/291] Prepare 0.51.10 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index c678365..62134ae 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.9", + "version": "0.51.10", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 1da205e..60e923e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.9", + "version": "0.51.10", "engines": { "atom": "*", "node": "*" From 591fc791290a9aa42b3432ddc1142c89233e59da Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 10 Apr 2019 15:34:05 -0600 Subject: [PATCH 261/291] Prepare 0.52.0 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index 62134ae..6e6c485 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.10", + "version": "0.52.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 60e923e..0281087 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.51.10", + "version": "0.52.0", "engines": { "atom": "*", "node": "*" From b5011ef56dc4ac01a578f3e620b357ef8ae4c9a0 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 10 Apr 2019 15:49:38 -0600 Subject: [PATCH 262/291] Prepare 0.53.0 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6e6c485..5237194 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.52.0", + "version": "0.53.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 0281087..51b59a7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.52.0", + "version": "0.53.0", "engines": { "atom": "*", "node": "*" From 4d55d0ecbe93ba03a74a28896c921b82cb6bd50b Mon Sep 17 00:00:00 2001 From: Linus Eriksson Date: Thu, 11 Apr 2019 19:36:33 +0200 Subject: [PATCH 263/291] Allow folding if statement without elif or else --- grammars/tree-sitter-python.cson | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index ac1404f..ec0138a 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -25,8 +25,14 @@ fileTypes: [ ] folds: [ + { + type: ['if_statement'] + start: {type: ':'} + end: {type: ['elif_clause', 'else_clause']} + }, { type: [ + 'if_statement' 'elif_clause' 'else_clause' 'for_statement' @@ -39,11 +45,6 @@ folds: [ ] start: {type: ':'} }, - { - type: ['if_statement'] - start: {type: ':'} - end: {type: ['elif_clause', 'else_clause']} - } { start: {type: '(', index: 0} end: {type: ')', index: -1} From be7347256f501028d01621fc1125f0535c6132e3 Mon Sep 17 00:00:00 2001 From: Linus Eriksson Date: Thu, 18 Apr 2019 23:52:31 +0200 Subject: [PATCH 264/291] Prepare 0.53.1 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index 5237194..6d5e76c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.0", + "version": "0.53.1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 51b59a7..9ce904c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.0", + "version": "0.53.1", "engines": { "atom": "*", "node": "*" From 878f3509636237fe7bbfe2c5f06b59229e93fa93 Mon Sep 17 00:00:00 2001 From: Caleb Evans Date: Sun, 12 May 2019 16:09:16 -0700 Subject: [PATCH 265/291] Tokenize *args and **kwargs the same as other parameters These changes are specifically for the Python tree-sitter grammar. --- grammars/tree-sitter-python.cson | 2 ++ 1 file changed, 2 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index ec0138a..4951b02 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -111,6 +111,8 @@ scopes: '"nonlocal"': 'storage.modifier.nonlocal' 'parameters > identifier': 'variable.parameter.function' + 'parameters > list_splat > identifier': 'variable.parameter.function' + 'parameters > dictionary_splat > identifier': 'variable.parameter.function' 'default_parameter > identifier:nth-child(0)': 'variable.parameter.function' 'keyword_argument > identifier:nth-child(0)': 'variable.parameter.function' From a7b054915d438eaefb897817895f8a0d6c365ef5 Mon Sep 17 00:00:00 2001 From: Jason Rudolph Date: Fri, 24 May 2019 09:49:20 -0400 Subject: [PATCH 266/291] Prepare 0.53.2 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6d5e76c..c33183d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.1", + "version": "0.53.2", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 9ce904c..5c46fe7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.1", + "version": "0.53.2", "engines": { "atom": "*", "node": "*" From 110b32ce00a0b2965283bc49e710f0ada4efa0f5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 18 Jun 2019 16:57:49 -0700 Subject: [PATCH 267/291] :arrow_up: tree-sitter-python --- package-lock.json | 12 ++++++------ package.json | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/package-lock.json b/package-lock.json index c33183d..ac2549a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -123,9 +123,9 @@ "dev": true }, "nan": { - "version": "2.13.2", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.13.2.tgz", - "integrity": "sha512-TghvYc72wlMGMVMluVo9WRJc0mB8KxxF/gZ4YYFy7V2ZQX9l7rgbPg7vjS9mt6U5HXODVFVI2bOduCzwOMv/lw==" + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" }, "once": { "version": "1.4.0", @@ -165,9 +165,9 @@ "dev": true }, "tree-sitter-python": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.14.0.tgz", - "integrity": "sha512-Kcj5AUdeI4/c/JLsQV8OFI0zLrwcQ1nKoqCRr+W73Tp5SIK+Dd1ILNC5TFHPw1IqOGstcg8AH0XTeU0uq3boZg==", + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.15.0.tgz", + "integrity": "sha512-lOV84DUTsyab8xRfU0o8pBQOKAZPjIJsGL7q0buuORHQvvwnvy3iwF/83OGSyiNYRJzPz6gW+E1N/VgNNavMHA==", "requires": { "nan": "^2.4.0" } diff --git a/package.json b/package.json index 5c46fe7..35d00e7 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.14.0" + "tree-sitter-python": "^0.15.0" }, "devDependencies": { "coffeelint": "^1.10.1" From 90e245936e6aa1961ccc611c86706c13600e744f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 18 Jun 2019 16:58:05 -0700 Subject: [PATCH 268/291] Prepare 0.53.3 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index ac2549a..4a6760d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.2", + "version": "0.53.3", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 35d00e7..9c08d52 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.2", + "version": "0.53.3", "engines": { "atom": "*", "node": "*" From b11c80cca7dab85f652b021dd175ecea49f648ef Mon Sep 17 00:00:00 2001 From: Darangi Date: Thu, 5 Dec 2019 16:42:24 +0100 Subject: [PATCH 269/291] :arrow_up:tree-sitter-python@0.15.1 --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4a6760d..bcf60ea 100644 --- a/package-lock.json +++ b/package-lock.json @@ -165,9 +165,9 @@ "dev": true }, "tree-sitter-python": { - "version": "0.15.0", - "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.15.0.tgz", - "integrity": "sha512-lOV84DUTsyab8xRfU0o8pBQOKAZPjIJsGL7q0buuORHQvvwnvy3iwF/83OGSyiNYRJzPz6gW+E1N/VgNNavMHA==", + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.15.1.tgz", + "integrity": "sha512-v8HUvx6JnaRNiLM2ur+T5dVEoUKanXYv8vqHWGNzjiyt+vluHKySGR7fWeQVcaotDSulDJfil4Zbye2qIPVKSA==", "requires": { "nan": "^2.4.0" } diff --git a/package.json b/package.json index 9c08d52..567cab2 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.15.0" + "tree-sitter-python": "^0.15.1" }, "devDependencies": { "coffeelint": "^1.10.1" From d4a8e73a4ce35aa6aa877044c508899621adf040 Mon Sep 17 00:00:00 2001 From: Darangi Date: Thu, 5 Dec 2019 16:43:11 +0100 Subject: [PATCH 270/291] Prepare v0.53.4 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index bcf60ea..800fb4a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.3", + "version": "0.53.4", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 567cab2..693e98d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.3", + "version": "0.53.4", "engines": { "atom": "*", "node": "*" From 3f8fa33ce8f54564576dc1db2017a7bd5327f433 Mon Sep 17 00:00:00 2001 From: illright Date: Sun, 29 Dec 2019 19:34:50 +0500 Subject: [PATCH 271/291] Add a lookahead for DELETE to avoid mistaking HTTP strings for SQL --- grammars/python.cson | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/grammars/python.cson b/grammars/python.cson index dc84094..39318ef 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -1643,7 +1643,7 @@ 'name': 'string.quoted.double.block.python' 'patterns': [ { - 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' + 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE(?! \/)|CREATE|REPLACE|ALTER|WITH))' 'name': 'meta.embedded.sql' 'end': '(?=\\s*""")' 'patterns': [ @@ -1655,7 +1655,7 @@ ] } { - 'begin': '(")(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' + 'begin': '(")(?=\\s*(SELECT|INSERT|UPDATE|DELETE(?! \/)|CREATE|REPLACE|ALTER|WITH))' 'beginCaptures': '1': 'name': 'punctuation.definition.string.begin.python' @@ -2214,7 +2214,7 @@ 'name': 'string.quoted.single.block.python' 'patterns': [ { - 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' + 'begin': '(?=\\s*(SELECT|INSERT|UPDATE|DELETE(?! \/)|CREATE|REPLACE|ALTER|WITH))' 'end': '(?=\\s*\'\'\')' 'name': 'meta.embedded.sql' 'patterns': [ @@ -2226,7 +2226,7 @@ ] } { - 'begin': '(\')(?=\\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|REPLACE|ALTER|WITH))' + 'begin': '(\')(?=\\s*(SELECT|INSERT|UPDATE|DELETE(?! \/)|CREATE|REPLACE|ALTER|WITH))' 'beginCaptures': '1': 'name': 'punctuation.definition.string.begin.python' From 681aba31aa2679e563b2a287769a367957d4479f Mon Sep 17 00:00:00 2001 From: ThatXliner <66848002+ThatXliner@users.noreply.github.com> Date: Mon, 10 Aug 2020 17:47:41 -0700 Subject: [PATCH 272/291] Added async (line 851) Added keyword async: ``` 'illegal_names': 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield|await|async)\\b' 'name': 'invalid.illegal.name.python' ``` --- grammars/python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/python.cson b/grammars/python.cson index dc84094..6102851 100644 --- a/grammars/python.cson +++ b/grammars/python.cson @@ -848,7 +848,7 @@ 'generic_names': 'match': '[A-Za-z_][A-Za-z0-9_]*' 'illegal_names': - 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield|await)\\b' + 'match': '\\b(and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield|await|async)\\b' 'name': 'invalid.illegal.name.python' 'keyword_arguments': 'begin': '\\b([a-zA-Z_][a-zA-Z_0-9]*)\\s*(=)(?!=)' From df7643256348c80d9cbc7438f99d5d8af82a7b23 Mon Sep 17 00:00:00 2001 From: sadick254 Date: Wed, 19 Aug 2020 20:39:56 +0300 Subject: [PATCH 273/291] Prepare v0.53.5 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index 800fb4a..6b7f3c4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.4", + "version": "0.53.5", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 693e98d..930febc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.4", + "version": "0.53.5", "engines": { "atom": "*", "node": "*" From faf1e57ccbddbf7f7378bd085e5bff77090403cb Mon Sep 17 00:00:00 2001 From: aminya Date: Wed, 28 Oct 2020 18:17:41 -0500 Subject: [PATCH 274/291] :arrow_up: Update tree-sitter-python --- package-lock.json | 12 ++++++------ package.json | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6b7f3c4..06766ea 100644 --- a/package-lock.json +++ b/package-lock.json @@ -123,9 +123,9 @@ "dev": true }, "nan": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", - "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + "version": "2.14.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz", + "integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==" }, "once": { "version": "1.4.0", @@ -165,9 +165,9 @@ "dev": true }, "tree-sitter-python": { - "version": "0.15.1", - "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.15.1.tgz", - "integrity": "sha512-v8HUvx6JnaRNiLM2ur+T5dVEoUKanXYv8vqHWGNzjiyt+vluHKySGR7fWeQVcaotDSulDJfil4Zbye2qIPVKSA==", + "version": "0.16.1", + "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.16.1.tgz", + "integrity": "sha512-XUxJgecoSZwNYUD+Pfb16pjPmK16T+bqhNdGkX/pgXvaEniaeVLpZP0VSiRpBq7Dx5vaXQcTn1/2MhUxoVBCdg==", "requires": { "nan": "^2.4.0" } diff --git a/package.json b/package.json index 930febc..e7f0cb8 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.15.1" + "tree-sitter-python": "^0.16.1" }, "devDependencies": { "coffeelint": "^1.10.1" From ad4b9807280c9f38cb537539c43890b143188012 Mon Sep 17 00:00:00 2001 From: aminya Date: Wed, 28 Oct 2020 21:25:48 -0500 Subject: [PATCH 275/291] Update Travis linux distro --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 47ee9a1..eb88ec4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -30,7 +30,7 @@ git: sudo: false -dist: trusty +dist: bionic addons: apt: From 97d1eb6d7c1e39eb47717ee511412f26f077f644 Mon Sep 17 00:00:00 2001 From: aminya Date: Thu, 12 Nov 2020 08:18:44 -0600 Subject: [PATCH 276/291] GitHub Actions --- .github/workflows/ci.yml | 51 ++++++++++++++++++++++++++++++++++++++++ .travis.yml | 41 -------------------------------- README.md | 3 +-- appveyor.yml | 31 ++++-------------------- 4 files changed, 56 insertions(+), 70 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .travis.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..16f1825 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,51 @@ +name: ci +on: + - pull_request + - push + +jobs: + Test: + if: "!contains(github.event.head_commit.message, '[skip ci]')" + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + - windows-latest + atom_channel: + - stable + - nightly + steps: + - uses: actions/checkout@v2 + - name: Cache + uses: actions/cache@v2 + with: + path: | + 'node_modules' + 'C:/Program Files (x86)/MSBuild/Microsoft.Cpp/v4.0/v140' + key: ${{ runner.os }}-${{ matrix.atom_channel }}-${{ hashFiles('package.json') }} + + - uses: UziTech/action-setup-atom@v1 + with: + channel: ${{ matrix.atom_channel }} + + - name: Install Visual Studio 2015 on Windows + if: ${{ contains(matrix.os, 'windows') }} + run: | + choco install visualcpp-build-tools --version=14.0.25420.1 --ignore-dependencies -y --params "'/IncludeRequired'" + echo ::set-env name=VCTargetsPath::'C:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\v140' + + - name: Install dependencies + run: apm install + + - name: Run tests + run: apm test + + Skip: + if: contains(github.event.head_commit.message, '[skip ci]') + runs-on: ubuntu-latest + steps: + - name: Skip CI 🚫 + run: echo skip ci diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 47ee9a1..0000000 --- a/.travis.yml +++ /dev/null @@ -1,41 +0,0 @@ -### Project specific config ### -language: generic - -env: - global: - - APM_TEST_PACKAGES="" - - ATOM_LINT_WITH_BUNDLED_NODE="true" - - matrix: - - ATOM_CHANNEL=stable - - ATOM_CHANNEL=beta - -### Generic setup follows ### -script: - - curl -s -O https://raw.githubusercontent.com/atom/ci/master/build-package.sh - - chmod u+x build-package.sh - - ./build-package.sh - -notifications: - email: - on_success: never - on_failure: change - -branches: - only: - - master - -git: - depth: 10 - -sudo: false - -dist: trusty - -addons: - apt: - packages: - - build-essential - - fakeroot - - git - - libsecret-1-dev diff --git a/README.md b/README.md index e646780..9ef6e6d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ # Python language support in Atom -[![macOS Build Status](https://travis-ci.org/atom/language-python.svg?branch=master)](https://travis-ci.org/atom/language-python) -[![Windows Build Status](https://ci.appveyor.com/api/projects/status/hmxrb9jttjh41es9/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-python/branch/master) +![ci](https://github.com/atom/language-python/workflows/ci/badge.svg) [![Dependency Status](https://david-dm.org/atom/language-python.svg)](https://david-dm.org/atom/language-python) Adds syntax highlighting and snippets to Python files in Atom. diff --git a/appveyor.yml b/appveyor.yml index 7d07d05..795da41 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,29 +1,6 @@ -version: "{build}" - -image: Visual Studio 2015 - -platform: x64 +# empty appveyor +build: off branches: - only: - - master - -clone_depth: 10 - -skip_tags: true - -environment: - APM_TEST_PACKAGES: - - matrix: - - ATOM_CHANNEL: stable - - ATOM_CHANNEL: beta - -install: - - ps: Install-Product node 4 - -build_script: - - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/atom/ci/master/build-package.ps1')) - -test: off -deploy: off + only: + - non-existing From 870467c070fb4eb072e2320b726fc69142882b64 Mon Sep 17 00:00:00 2001 From: Lev Chelyadinov Date: Fri, 4 Dec 2020 10:28:26 +0300 Subject: [PATCH 277/291] Add a test --- spec/python-spec.coffee | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index c1851d0..6ef1fba 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -750,3 +750,10 @@ describe "Python grammar", -> expect(tokens[13]).toEqual value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql'] expect(tokens[15]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python'] expect(tokens[17]).toEqual value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python'] + + it "recognizes DELETE as an HTTP method", -> + {tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"') + + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.single.single-line.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] From 64010bfc83b2d6b0824e39c87d3b0cf72a3c7650 Mon Sep 17 00:00:00 2001 From: Lev Chelyadinov Date: Fri, 4 Dec 2020 10:31:49 +0300 Subject: [PATCH 278/291] Fix the test that failed the build --- spec/python-spec.coffee | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spec/python-spec.coffee b/spec/python-spec.coffee index 6ef1fba..423f8c1 100644 --- a/spec/python-spec.coffee +++ b/spec/python-spec.coffee @@ -754,6 +754,6 @@ describe "Python grammar", -> it "recognizes DELETE as an HTTP method", -> {tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"') - expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.begin.python'] - expect(tokens[1]).toEqual value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.single.single-line.python'] - expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.single.single-line.python', 'punctuation.definition.string.end.python'] + expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python'] + expect(tokens[1]).toEqual value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.double.single-line.python'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python'] From ebe88306700ef0face8c9a99b5fc2a1d4e059866 Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 10:51:27 -0600 Subject: [PATCH 279/291] :arrow_up: bump tree-sitter-python to 0.17.0 --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 06766ea..d6acf3c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -165,9 +165,9 @@ "dev": true }, "tree-sitter-python": { - "version": "0.16.1", - "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.16.1.tgz", - "integrity": "sha512-XUxJgecoSZwNYUD+Pfb16pjPmK16T+bqhNdGkX/pgXvaEniaeVLpZP0VSiRpBq7Dx5vaXQcTn1/2MhUxoVBCdg==", + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/tree-sitter-python/-/tree-sitter-python-0.17.0.tgz", + "integrity": "sha512-6HaqF/1GHB0/qrkcIxYqEELsQq6bXdQxx2KnGLZhoGn5ipbAibncSuQT9f8HYbmqLZ4dIGleQzsXreY1mx2lig==", "requires": { "nan": "^2.4.0" } diff --git a/package.json b/package.json index e7f0cb8..986bde6 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ }, "dependencies": { "atom-grammar-test": "^0.6.4", - "tree-sitter-python": "^0.16.1" + "tree-sitter-python": "^0.17.0" }, "devDependencies": { "coffeelint": "^1.10.1" From d989f9aee9638ba48e2018fa81dc9a9b1b200dda Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 11:59:34 -0600 Subject: [PATCH 280/291] Add keyword.control.return Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 4951b02..cb1b6d1 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -177,3 +177,4 @@ scopes: '"or"': 'keyword.operator.logical.python' '"not"': 'keyword.operator.logical.python' '"is"': 'keyword.operator.logical.python' + '"->"': 'keyword.control.return' From c2b79f1aebf91c507a0b469e4d201a92c76ad95a Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:00:27 -0600 Subject: [PATCH 281/291] Add punctuations Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index cb1b6d1..07d5562 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -178,3 +178,11 @@ scopes: '"not"': 'keyword.operator.logical.python' '"is"': 'keyword.operator.logical.python' '"->"': 'keyword.control.return' + + '"["': 'punctuation.definition.begin.bracket.square' + '"]"': 'punctuation.definition.end.bracket.square' + '","': 'punctuation.separator.delimiter' + '"{"': 'punctuation.section.block.begin.bracket.curly' + '"}"': 'punctuation.section.block.end.bracket.curly' + '"("': 'punctuation.section.parens.begin.bracket.round' + '")"': 'punctuation.section.parens.end.bracket.round' From 6325ed1445d53c5519e37b0597d0ae608bdb0a84 Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:01:25 -0600 Subject: [PATCH 282/291] Use function.def for function_definition Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 07d5562..3abe778 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -73,7 +73,7 @@ scopes: 'interpolation > "}"': 'punctuation.section.embedded' 'class_definition > identifier': 'entity.name.type.class' - 'function_definition > identifier': 'entity.name.function' + 'function_definition > identifier': 'entity.name.function.def' 'call > identifier:nth-child(0)': [ {match: '^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$', scopes: 'support.function'}, From 89351c1a3acd79e77b00474da135d7ea9f4c2e1f Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:02:40 -0600 Subject: [PATCH 283/291] Use function.call for call Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 3abe778..bfed062 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -76,8 +76,8 @@ scopes: 'function_definition > identifier': 'entity.name.function.def' 'call > identifier:nth-child(0)': [ {match: '^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$', - scopes: 'support.function'}, - 'entity.name.function' + scopes: 'support.function.call'}, + 'entity.name.function.call' ] 'call > attribute > identifier:nth-child(2)': 'entity.name.function' From b0a77d570ce7d39484b4b8d02963d618b0537585 Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:03:16 -0600 Subject: [PATCH 284/291] Support constructor Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index bfed062..bbbcd80 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -77,6 +77,7 @@ scopes: 'call > identifier:nth-child(0)': [ {match: '^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$', scopes: 'support.function.call'}, + {match: '^[A-Z]', scopes: 'support.type.contructor'} 'entity.name.function.call' ] 'call > attribute > identifier:nth-child(2)': 'entity.name.function' From 37c8e1d9e8bda4d875e2892d777e4b29f8218236 Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:15:56 -0600 Subject: [PATCH 285/291] Support lambda parameters Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index bbbcd80..7067f19 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -116,6 +116,7 @@ scopes: 'parameters > dictionary_splat > identifier': 'variable.parameter.function' 'default_parameter > identifier:nth-child(0)': 'variable.parameter.function' 'keyword_argument > identifier:nth-child(0)': 'variable.parameter.function' + 'lambda_parameters > identifier': 'variable.parameter.function' '"if"': 'keyword.control' '"else"': 'keyword.control' From 98cab1961c654b66ac60c5d57b57346983ce00fe Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:09:04 -0600 Subject: [PATCH 286/291] Support typed parameters Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 1 + 1 file changed, 1 insertion(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 7067f19..166d657 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -117,6 +117,7 @@ scopes: 'default_parameter > identifier:nth-child(0)': 'variable.parameter.function' 'keyword_argument > identifier:nth-child(0)': 'variable.parameter.function' 'lambda_parameters > identifier': 'variable.parameter.function' + 'typed_parameter > identifier': 'variable.parameter.function' '"if"': 'keyword.control' '"else"': 'keyword.control' From 6e5de571a361d82c7c9c2fa86f6295227820273f Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:13:29 -0600 Subject: [PATCH 287/291] Support self matching Co-Authored-By: Eric Shimizu Karbstein <17973728+GrayJack@users.noreply.github.com> --- grammars/tree-sitter-python.cson | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index 166d657..fa18df4 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -82,10 +82,12 @@ scopes: ] 'call > attribute > identifier:nth-child(2)': 'entity.name.function' - 'identifier': + 'identifier': [ {match: '^(BaseException|Exception|TypeError|StopAsyncIteration|StopIteration|ImportError|ModuleNotFoundError|OSError|ConnectionError|BrokenPipeError|ConnectionAbortedError|ConnectionRefusedError|ConnectionResetError|BlockingIOError|ChildProcessError|FileExistsError|FileNotFoundError|IsADirectoryError|NotADirectoryError|InterruptedError|PermissionError|ProcessLookupError|TimeoutError|EOFError|RuntimeError|RecursionError|NotImplementedError|NameError|UnboundLocalError|AttributeError|SyntaxError|IndentationError|TabError|LookupError|IndexError|KeyError|ValueError|UnicodeError|UnicodeEncodeError|UnicodeDecodeError|UnicodeTranslateError|AssertionError|ArithmeticError|FloatingPointError|OverflowError|ZeroDivisionError|SystemError|ReferenceError|BufferError|MemoryError|Warning|UserWarning|DeprecationWarning|PendingDeprecationWarning|SyntaxWarning|RuntimeWarning|FutureWarning|ImportWarning|UnicodeWarning|BytesWarning|ResourceWarning|GeneratorExit|SystemExit|KeyboardInterrupt)$' - scopes: 'support.type.exception'} + scopes: 'support.type.exception'}, + {match: '^(self)', scopes: 'entity.name.variable.self'} + ] 'attribute > identifier:nth-child(2)': 'variable.other.object.property' From 4d966074bfb6267e09416b9004f2e5770c5b9dfb Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:53:27 -0600 Subject: [PATCH 288/291] Support argument list https://github.com/tree-sitter/tree-sitter-python/issues/96 --- grammars/tree-sitter-python.cson | 2 ++ 1 file changed, 2 insertions(+) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index fa18df4..f97ca81 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -121,6 +121,8 @@ scopes: 'lambda_parameters > identifier': 'variable.parameter.function' 'typed_parameter > identifier': 'variable.parameter.function' + 'argument_list': 'meta.method-call.python' + '"if"': 'keyword.control' '"else"': 'keyword.control' '"elif"': 'keyword.control' From 5d548707cec8c150f4844d9a1cd744dd9641c6ef Mon Sep 17 00:00:00 2001 From: Amin Yahyaabadi Date: Fri, 8 Jan 2021 12:57:51 -0600 Subject: [PATCH 289/291] use entity.name.function.definition --- grammars/tree-sitter-python.cson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grammars/tree-sitter-python.cson b/grammars/tree-sitter-python.cson index f97ca81..4490d74 100644 --- a/grammars/tree-sitter-python.cson +++ b/grammars/tree-sitter-python.cson @@ -73,7 +73,7 @@ scopes: 'interpolation > "}"': 'punctuation.section.embedded' 'class_definition > identifier': 'entity.name.type.class' - 'function_definition > identifier': 'entity.name.function.def' + 'function_definition > identifier': 'entity.name.function.definition' 'call > identifier:nth-child(0)': [ {match: '^(abs|all|any|ascii|bin|bool|breakpoint|bytearray|bytes|callable|chr|classmethod|compile|complex|delattr|dict|dir|divmod|enumerate|eval|exec|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|isinstance|issubclass|iter|len|list|locals|map|max|memoryview|min|next|object|oct|open|ord|pow|print|property|range|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|vars|zip|__import__)$', scopes: 'support.function.call'}, From fd7182548b88405380ce05c1a52e712c20fabc47 Mon Sep 17 00:00:00 2001 From: darangi Date: Tue, 2 Feb 2021 12:15:40 +0100 Subject: [PATCH 290/291] Prepare v0.53.6 release --- package-lock.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package-lock.json b/package-lock.json index d6acf3c..d77eb13 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.5", + "version": "0.53.6", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 986bde6..6a079e3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "language-python", - "version": "0.53.5", + "version": "0.53.6", "engines": { "atom": "*", "node": "*" From 2d97e4b7a278df8d334c4bed701fdd4562c937bb Mon Sep 17 00:00:00 2001 From: Musa Ibrahim Date: Wed, 28 Sep 2022 11:52:01 +0100 Subject: [PATCH 291/291] add sunset message --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9ef6e6d..4def7c0 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ -# Python language support in Atom +##### Atom and all repositories under Atom will be archived on December 15, 2022. Learn more in our [official announcement](https://github.blog/2022-06-08-sunsetting-atom/) + # Python language support in Atom ![ci](https://github.com/atom/language-python/workflows/ci/badge.svg) [![Dependency Status](https://david-dm.org/atom/language-python.svg)](https://david-dm.org/atom/language-python)