@@ -103,25 +103,25 @@ describe "CoffeeScript grammar", ->
103
103
Until here
104
104
###
105
105
"""
106
- expect (lines[1 ][0 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
107
- expect (lines[1 ][1 ]).toEqual value : ' I am a block comment' , scopes : [' source.coffee' , ' comment.block.coffee' ]
108
- expect (lines[3 ][0 ]).toEqual value : ' Until here' , scopes : [' source.coffee' , ' comment.block.coffee' ]
109
- expect (lines[4 ][0 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
106
+ expect (lines[0 ][0 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
107
+ expect (lines[0 ][1 ]).toEqual value : ' I am a block comment' , scopes : [' source.coffee' , ' comment.block.coffee' ]
108
+ expect (lines[2 ][0 ]).toEqual value : ' Until here' , scopes : [' source.coffee' , ' comment.block.coffee' ]
109
+ expect (lines[3 ][0 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
110
110
111
111
{tokens } = grammar .tokenizeLine " identity = ###::<T>### (value ###: T ###) ###: T ### ->"
112
112
expect (tokens[0 ]).toEqual value : ' identity' , scopes : [' source.coffee' , ' variable.assignment.coffee' ]
113
113
expect (tokens[4 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
114
114
expect (tokens[5 ]).toEqual value : ' ::<T>' , scopes : [' source.coffee' , ' comment.block.coffee' ]
115
115
expect (tokens[6 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
116
- expect (tokens[7 ]).toEqual value : ' (value ' , scopes : [' source.coffee' ] # TODO: These scopes are incorrect and should be fixed
117
- expect (tokens[8 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
118
- expect (tokens[9 ]).toEqual value : ' : T ' , scopes : [' source.coffee' , ' comment.block.coffee' ]
116
+ expect (tokens[9 ]).toEqual value : ' value ' , scopes : [' source.coffee' ] # TODO: These scopes are incorrect and should be fixed
119
117
expect (tokens[10 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
120
- expect (tokens[11 ]).toEqual value : ' ) ' , scopes : [' source.coffee' ] # TODO: These scopes are incorrect and should be fixed
118
+ expect (tokens[11 ]).toEqual value : ' : T ' , scopes : [' source.coffee' , ' comment.block.coffee ' ]
121
119
expect (tokens[12 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
122
- expect (tokens[13 ]).toEqual value : ' : T ' , scopes : [' source.coffee' , ' comment.block.coffee' ]
123
- expect (tokens[14 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
124
- expect (tokens[16 ]).toEqual value : ' ->' , scopes : [' source.coffee' , ' meta.function.inline.coffee' , ' storage.type.function.coffee' ]
120
+ expect (tokens[14 ]).toEqual value : ' ' , scopes : [' source.coffee' ] # TODO: These scopes are incorrect and should be fixed
121
+ expect (tokens[15 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
122
+ expect (tokens[16 ]).toEqual value : ' : T ' , scopes : [' source.coffee' , ' comment.block.coffee' ]
123
+ expect (tokens[17 ]).toEqual value : ' ###' , scopes : [' source.coffee' , ' comment.block.coffee' , ' punctuation.definition.comment.coffee' ]
124
+ expect (tokens[19 ]).toEqual value : ' ->' , scopes : [' source.coffee' , ' meta.function.inline.coffee' , ' storage.type.function.coffee' ]
125
125
126
126
it " tokenizes annotations in block comments" , ->
127
127
lines = grammar .tokenizeLines """
0 commit comments