Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/src/vs/editor/test/common/model/tokenStore.test.ts
3296 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import assert from 'assert';
7
import { ensureNoDisposablesAreLeakedInTestSuite } from '../../../../base/test/common/utils.js';
8
import { TextModel } from '../../../common/model/textModel.js';
9
import { TokenQuality, TokenStore } from '../../../common/model/tokens/treeSitter/tokenStore.js';
10
11
suite('TokenStore', () => {
12
let textModel: TextModel;
13
ensureNoDisposablesAreLeakedInTestSuite();
14
15
setup(() => {
16
textModel = {
17
getValueLength: () => 11
18
} as TextModel;
19
});
20
21
test('constructs with empty model', () => {
22
const store = new TokenStore(textModel);
23
assert.ok(store.root);
24
assert.strictEqual(store.root.length, textModel.getValueLength());
25
});
26
27
test('builds store with single token', () => {
28
const store = new TokenStore(textModel);
29
store.buildStore([{
30
startOffsetInclusive: 0,
31
length: 5,
32
token: 1
33
}], TokenQuality.Accurate);
34
assert.strictEqual(store.root.length, 5);
35
});
36
37
test('builds store with multiple tokens', () => {
38
const store = new TokenStore(textModel);
39
store.buildStore([
40
{ startOffsetInclusive: 0, length: 3, token: 1 },
41
{ startOffsetInclusive: 3, length: 3, token: 2 },
42
{ startOffsetInclusive: 6, length: 4, token: 3 }
43
], TokenQuality.Accurate);
44
assert.ok(store.root);
45
assert.strictEqual(store.root.length, 10);
46
});
47
48
test('creates balanced tree structure', () => {
49
const store = new TokenStore(textModel);
50
store.buildStore([
51
{ startOffsetInclusive: 0, length: 2, token: 1 },
52
{ startOffsetInclusive: 2, length: 2, token: 2 },
53
{ startOffsetInclusive: 4, length: 2, token: 3 },
54
{ startOffsetInclusive: 6, length: 2, token: 4 }
55
], TokenQuality.Accurate);
56
57
const root = store.root as any;
58
assert.ok(root.children);
59
assert.strictEqual(root.children.length, 2);
60
assert.strictEqual(root.children[0].length, 4);
61
assert.strictEqual(root.children[1].length, 4);
62
});
63
64
test('creates deep tree structure', () => {
65
const store = new TokenStore(textModel);
66
store.buildStore([
67
{ startOffsetInclusive: 0, length: 1, token: 1 },
68
{ startOffsetInclusive: 1, length: 1, token: 2 },
69
{ startOffsetInclusive: 2, length: 1, token: 3 },
70
{ startOffsetInclusive: 3, length: 1, token: 4 },
71
{ startOffsetInclusive: 4, length: 1, token: 5 },
72
{ startOffsetInclusive: 5, length: 1, token: 6 },
73
{ startOffsetInclusive: 6, length: 1, token: 7 },
74
{ startOffsetInclusive: 7, length: 1, token: 8 }
75
], TokenQuality.Accurate);
76
77
const root = store.root as any;
78
assert.ok(root.children);
79
assert.strictEqual(root.children.length, 2);
80
assert.ok(root.children[0].children);
81
assert.strictEqual(root.children[0].children.length, 2);
82
assert.ok(root.children[0].children[0].children);
83
assert.strictEqual(root.children[0].children[0].children.length, 2);
84
});
85
86
test('updates single token in middle', () => {
87
const store = new TokenStore(textModel);
88
store.buildStore([
89
{ startOffsetInclusive: 0, length: 3, token: 1 },
90
{ startOffsetInclusive: 3, length: 3, token: 2 },
91
{ startOffsetInclusive: 6, length: 3, token: 3 }
92
], TokenQuality.Accurate);
93
94
store.update(3, [
95
{ startOffsetInclusive: 3, length: 3, token: 4 }
96
], TokenQuality.Accurate);
97
98
const tokens = store.root as any;
99
assert.strictEqual(tokens.children[0].token, 1);
100
assert.strictEqual(tokens.children[1].token, 4);
101
assert.strictEqual(tokens.children[2].token, 3);
102
});
103
104
test('updates multiple consecutive tokens', () => {
105
const store = new TokenStore(textModel);
106
store.buildStore([
107
{ startOffsetInclusive: 0, length: 3, token: 1 },
108
{ startOffsetInclusive: 3, length: 3, token: 2 },
109
{ startOffsetInclusive: 6, length: 3, token: 3 }
110
], TokenQuality.Accurate);
111
112
store.update(6, [
113
{ startOffsetInclusive: 3, length: 3, token: 4 },
114
{ startOffsetInclusive: 6, length: 3, token: 5 }
115
], TokenQuality.Accurate);
116
117
const tokens = store.root as any;
118
assert.strictEqual(tokens.children[0].token, 1);
119
assert.strictEqual(tokens.children[1].token, 4);
120
assert.strictEqual(tokens.children[2].token, 5);
121
});
122
123
test('updates tokens at start of document', () => {
124
const store = new TokenStore(textModel);
125
store.buildStore([
126
{ startOffsetInclusive: 0, length: 3, token: 1 },
127
{ startOffsetInclusive: 3, length: 3, token: 2 },
128
{ startOffsetInclusive: 6, length: 3, token: 3 }
129
], TokenQuality.Accurate);
130
131
store.update(3, [
132
{ startOffsetInclusive: 0, length: 3, token: 4 }
133
], TokenQuality.Accurate);
134
135
const tokens = store.root as any;
136
assert.strictEqual(tokens.children[0].token, 4);
137
assert.strictEqual(tokens.children[1].token, 2);
138
assert.strictEqual(tokens.children[2].token, 3);
139
});
140
141
test('updates tokens at end of document', () => {
142
const store = new TokenStore(textModel);
143
store.buildStore([
144
{ startOffsetInclusive: 0, length: 3, token: 1 },
145
{ startOffsetInclusive: 3, length: 3, token: 2 },
146
{ startOffsetInclusive: 6, length: 3, token: 3 }
147
], TokenQuality.Accurate);
148
149
store.update(3, [
150
{ startOffsetInclusive: 6, length: 3, token: 4 }
151
], TokenQuality.Accurate);
152
153
const tokens = store.root as any;
154
assert.strictEqual(tokens.children[0].token, 1);
155
assert.strictEqual(tokens.children[1].token, 2);
156
assert.strictEqual(tokens.children[2].token, 4);
157
});
158
159
test('updates length of tokens', () => {
160
const store = new TokenStore(textModel);
161
store.buildStore([
162
{ startOffsetInclusive: 0, length: 3, token: 1 },
163
{ startOffsetInclusive: 3, length: 3, token: 2 },
164
{ startOffsetInclusive: 6, length: 3, token: 3 }
165
], TokenQuality.Accurate);
166
167
store.update(6, [
168
{ startOffsetInclusive: 3, length: 5, token: 4 }
169
], TokenQuality.Accurate);
170
171
const tokens = store.root as any;
172
assert.strictEqual(tokens.children[0].token, 1);
173
assert.strictEqual(tokens.children[0].length, 3);
174
assert.strictEqual(tokens.children[1].token, 4);
175
assert.strictEqual(tokens.children[1].length, 5);
176
});
177
178
test('update deeply nested tree with new token length in the middle', () => {
179
const store = new TokenStore(textModel);
180
store.buildStore([
181
{ startOffsetInclusive: 0, length: 1, token: 1 },
182
{ startOffsetInclusive: 1, length: 1, token: 2 },
183
{ startOffsetInclusive: 2, length: 1, token: 3 },
184
{ startOffsetInclusive: 3, length: 1, token: 4 },
185
{ startOffsetInclusive: 4, length: 1, token: 5 },
186
{ startOffsetInclusive: 5, length: 1, token: 6 },
187
{ startOffsetInclusive: 6, length: 1, token: 7 },
188
{ startOffsetInclusive: 7, length: 1, token: 8 }
189
], TokenQuality.Accurate);
190
191
// Update token in the middle (position 3-4) to span 3-6
192
store.update(3, [
193
{ startOffsetInclusive: 3, length: 3, token: 9 }
194
], TokenQuality.Accurate);
195
196
const root = store.root as any;
197
// Verify the structure remains balanced
198
assert.strictEqual(root.children.length, 3);
199
assert.strictEqual(root.children[0].children.length, 2);
200
201
// Verify the lengths are updated correctly
202
assert.strictEqual(root.children[0].length, 2); // First 2 tokens
203
assert.strictEqual(root.children[1].length, 4); // Token 3 + our new longer token
204
assert.strictEqual(root.children[2].length, 2); // Last 2 tokens
205
});
206
207
test('update deeply nested tree with a range of tokens that causes tokens to split', () => {
208
const store = new TokenStore(textModel);
209
store.buildStore([
210
{ startOffsetInclusive: 0, length: 3, token: 1 },
211
{ startOffsetInclusive: 3, length: 3, token: 2 },
212
{ startOffsetInclusive: 6, length: 4, token: 3 },
213
{ startOffsetInclusive: 10, length: 5, token: 4 },
214
{ startOffsetInclusive: 15, length: 4, token: 5 },
215
{ startOffsetInclusive: 19, length: 3, token: 6 },
216
{ startOffsetInclusive: 22, length: 5, token: 7 },
217
{ startOffsetInclusive: 27, length: 3, token: 8 }
218
], TokenQuality.Accurate);
219
220
// Update token in the middle which causes tokens to split
221
store.update(8, [
222
{ startOffsetInclusive: 12, length: 4, token: 9 },
223
{ startOffsetInclusive: 16, length: 4, token: 10 }
224
], TokenQuality.Accurate);
225
226
const root = store.root as any;
227
// Verify the structure remains balanced
228
assert.strictEqual(root.children.length, 2);
229
assert.strictEqual(root.children[0].children.length, 2);
230
231
// Verify the lengths are updated correctly
232
assert.strictEqual(root.children[0].length, 12);
233
assert.strictEqual(root.children[1].length, 18);
234
});
235
236
test('getTokensInRange returns tokens in middle of document', () => {
237
const store = new TokenStore(textModel);
238
store.buildStore([
239
{ startOffsetInclusive: 0, length: 3, token: 1 },
240
{ startOffsetInclusive: 3, length: 3, token: 2 },
241
{ startOffsetInclusive: 6, length: 3, token: 3 }
242
], TokenQuality.Accurate);
243
244
const tokens = store.getTokensInRange(3, 6);
245
assert.deepStrictEqual(tokens, [{ startOffsetInclusive: 3, length: 3, token: 2 }]);
246
});
247
248
test('getTokensInRange returns tokens at start of document', () => {
249
const store = new TokenStore(textModel);
250
store.buildStore([
251
{ startOffsetInclusive: 0, length: 3, token: 1 },
252
{ startOffsetInclusive: 3, length: 3, token: 2 },
253
{ startOffsetInclusive: 6, length: 3, token: 3 }
254
], TokenQuality.Accurate);
255
256
const tokens = store.getTokensInRange(0, 3);
257
assert.deepStrictEqual(tokens, [{ startOffsetInclusive: 0, length: 3, token: 1 }]);
258
});
259
260
test('getTokensInRange returns tokens at end of document', () => {
261
const store = new TokenStore(textModel);
262
store.buildStore([
263
{ startOffsetInclusive: 0, length: 3, token: 1 },
264
{ startOffsetInclusive: 3, length: 3, token: 2 },
265
{ startOffsetInclusive: 6, length: 3, token: 3 }
266
], TokenQuality.Accurate);
267
268
const tokens = store.getTokensInRange(6, 9);
269
assert.deepStrictEqual(tokens, [{ startOffsetInclusive: 6, length: 3, token: 3 }]);
270
});
271
272
test('getTokensInRange returns multiple tokens across nodes', () => {
273
const store = new TokenStore(textModel);
274
store.buildStore([
275
{ startOffsetInclusive: 0, length: 1, token: 1 },
276
{ startOffsetInclusive: 1, length: 1, token: 2 },
277
{ startOffsetInclusive: 2, length: 1, token: 3 },
278
{ startOffsetInclusive: 3, length: 1, token: 4 },
279
{ startOffsetInclusive: 4, length: 1, token: 5 },
280
{ startOffsetInclusive: 5, length: 1, token: 6 }
281
], TokenQuality.Accurate);
282
283
const tokens = store.getTokensInRange(2, 5);
284
assert.deepStrictEqual(tokens, [
285
{ startOffsetInclusive: 2, length: 1, token: 3 },
286
{ startOffsetInclusive: 3, length: 1, token: 4 },
287
{ startOffsetInclusive: 4, length: 1, token: 5 }
288
]);
289
});
290
291
test('Realistic scenario one', () => {
292
// inspired by this snippet, with the update adding a space in the constructor's curly braces:
293
// /*
294
// */
295
// class XY {
296
// constructor() {}
297
// }
298
299
const store = new TokenStore(textModel);
300
store.buildStore([
301
{ startOffsetInclusive: 0, length: 3, token: 164164 },
302
{ startOffsetInclusive: 3, length: 1, token: 32836 },
303
{ startOffsetInclusive: 4, length: 3, token: 164164 },
304
{ startOffsetInclusive: 7, length: 2, token: 32836 },
305
{ startOffsetInclusive: 9, length: 5, token: 196676 },
306
{ startOffsetInclusive: 14, length: 1, token: 32836 },
307
{ startOffsetInclusive: 15, length: 2, token: 557124 },
308
{ startOffsetInclusive: 17, length: 4, token: 32836 },
309
{ startOffsetInclusive: 21, length: 1, token: 32836 },
310
{ startOffsetInclusive: 22, length: 11, token: 196676 },
311
{ startOffsetInclusive: 33, length: 7, token: 32836 },
312
{ startOffsetInclusive: 40, length: 3, token: 32836 }
313
], TokenQuality.Accurate);
314
315
store.update(33, [
316
{ startOffsetInclusive: 9, length: 5, token: 196676 },
317
{ startOffsetInclusive: 14, length: 1, token: 32836 },
318
{ startOffsetInclusive: 15, length: 2, token: 557124 },
319
{ startOffsetInclusive: 17, length: 4, token: 32836 },
320
{ startOffsetInclusive: 21, length: 1, token: 32836 },
321
{ startOffsetInclusive: 22, length: 11, token: 196676 },
322
{ startOffsetInclusive: 33, length: 8, token: 32836 },
323
{ startOffsetInclusive: 41, length: 3, token: 32836 }
324
], TokenQuality.Accurate);
325
326
});
327
test('Realistic scenario two', () => {
328
// inspired by this snippet, with the update deleteing the space in the body of class x
329
// class x {
330
//
331
// }
332
// class y {
333
334
// }
335
336
const store = new TokenStore(textModel);
337
store.buildStore([
338
{ startOffsetInclusive: 0, length: 5, token: 196676 },
339
{ startOffsetInclusive: 5, length: 1, token: 32836 },
340
{ startOffsetInclusive: 6, length: 1, token: 557124 },
341
{ startOffsetInclusive: 7, length: 4, token: 32836 },
342
{ startOffsetInclusive: 11, length: 3, token: 32836 },
343
{ startOffsetInclusive: 14, length: 3, token: 32836 },
344
{ startOffsetInclusive: 17, length: 5, token: 196676 },
345
{ startOffsetInclusive: 22, length: 1, token: 32836 },
346
{ startOffsetInclusive: 23, length: 1, token: 557124 },
347
{ startOffsetInclusive: 24, length: 4, token: 32836 },
348
{ startOffsetInclusive: 28, length: 2, token: 32836 },
349
{ startOffsetInclusive: 30, length: 1, token: 32836 }
350
], TokenQuality.Accurate);
351
const tokens0 = store.getTokensInRange(0, 16);
352
assert.deepStrictEqual(tokens0, [
353
{ token: 196676, startOffsetInclusive: 0, length: 5 },
354
{ token: 32836, startOffsetInclusive: 5, length: 1 },
355
{ token: 557124, startOffsetInclusive: 6, length: 1 },
356
{ token: 32836, startOffsetInclusive: 7, length: 4 },
357
{ token: 32836, startOffsetInclusive: 11, length: 3 },
358
{ token: 32836, startOffsetInclusive: 14, length: 2 }
359
]);
360
361
store.update(14, [
362
{ startOffsetInclusive: 0, length: 5, token: 196676 },
363
{ startOffsetInclusive: 5, length: 1, token: 32836 },
364
{ startOffsetInclusive: 6, length: 1, token: 557124 },
365
{ startOffsetInclusive: 7, length: 4, token: 32836 },
366
{ startOffsetInclusive: 11, length: 2, token: 32836 },
367
{ startOffsetInclusive: 13, length: 3, token: 32836 }
368
], TokenQuality.Accurate);
369
370
const tokens = store.getTokensInRange(0, 16);
371
assert.deepStrictEqual(tokens, [
372
{ token: 196676, startOffsetInclusive: 0, length: 5 },
373
{ token: 32836, startOffsetInclusive: 5, length: 1 },
374
{ token: 557124, startOffsetInclusive: 6, length: 1 },
375
{ token: 32836, startOffsetInclusive: 7, length: 4 },
376
{ token: 32836, startOffsetInclusive: 11, length: 2 },
377
{ token: 32836, startOffsetInclusive: 13, length: 3 }
378
]);
379
});
380
test('Realistic scenario three', () => {
381
// inspired by this snippet, with the update adding a space after the { in the constructor
382
// /*--
383
// --*/
384
// class TreeViewPane {
385
// constructor(
386
// options: IViewletViewOptions,
387
// ) {
388
// }
389
// }
390
391
392
const store = new TokenStore(textModel);
393
store.buildStore([
394
{ startOffsetInclusive: 0, length: 5, token: 164164 },
395
{ startOffsetInclusive: 5, length: 1, token: 32836 },
396
{ startOffsetInclusive: 6, length: 5, token: 164164 },
397
{ startOffsetInclusive: 11, length: 2, token: 32836 },
398
{ startOffsetInclusive: 13, length: 5, token: 196676 },
399
{ startOffsetInclusive: 18, length: 1, token: 32836 },
400
{ startOffsetInclusive: 19, length: 12, token: 557124 },
401
{ startOffsetInclusive: 31, length: 4, token: 32836 },
402
{ startOffsetInclusive: 35, length: 1, token: 32836 },
403
{ startOffsetInclusive: 36, length: 11, token: 196676 },
404
{ startOffsetInclusive: 47, length: 3, token: 32836 },
405
{ startOffsetInclusive: 50, length: 2, token: 32836 },
406
{ startOffsetInclusive: 52, length: 7, token: 327748 },
407
{ startOffsetInclusive: 59, length: 1, token: 98372 },
408
{ startOffsetInclusive: 60, length: 1, token: 32836 },
409
{ startOffsetInclusive: 61, length: 19, token: 557124 },
410
{ startOffsetInclusive: 80, length: 1, token: 32836 },
411
{ startOffsetInclusive: 81, length: 2, token: 32836 },
412
{ startOffsetInclusive: 83, length: 6, token: 32836 },
413
{ startOffsetInclusive: 89, length: 4, token: 32836 },
414
{ startOffsetInclusive: 93, length: 3, token: 32836 }
415
], TokenQuality.Accurate);
416
const tokens0 = store.getTokensInRange(36, 59);
417
assert.deepStrictEqual(tokens0, [
418
{ token: 196676, startOffsetInclusive: 36, length: 11 },
419
{ token: 32836, startOffsetInclusive: 47, length: 3 },
420
{ token: 32836, startOffsetInclusive: 50, length: 2 },
421
{ token: 327748, startOffsetInclusive: 52, length: 7 }
422
]);
423
424
store.update(82, [
425
{ startOffsetInclusive: 13, length: 5, token: 196676 },
426
{ startOffsetInclusive: 18, length: 1, token: 32836 },
427
{ startOffsetInclusive: 19, length: 12, token: 557124 },
428
{ startOffsetInclusive: 31, length: 4, token: 32836 },
429
{ startOffsetInclusive: 35, length: 1, token: 32836 },
430
{ startOffsetInclusive: 36, length: 11, token: 196676 },
431
{ startOffsetInclusive: 47, length: 3, token: 32836 },
432
{ startOffsetInclusive: 50, length: 2, token: 32836 },
433
{ startOffsetInclusive: 52, length: 7, token: 327748 },
434
{ startOffsetInclusive: 59, length: 1, token: 98372 },
435
{ startOffsetInclusive: 60, length: 1, token: 32836 },
436
{ startOffsetInclusive: 61, length: 19, token: 557124 },
437
{ startOffsetInclusive: 80, length: 1, token: 32836 },
438
{ startOffsetInclusive: 81, length: 2, token: 32836 },
439
{ startOffsetInclusive: 83, length: 7, token: 32836 },
440
{ startOffsetInclusive: 90, length: 4, token: 32836 },
441
{ startOffsetInclusive: 94, length: 3, token: 32836 }
442
], TokenQuality.Accurate);
443
444
const tokens = store.getTokensInRange(36, 59);
445
assert.deepStrictEqual(tokens, [
446
{ token: 196676, startOffsetInclusive: 36, length: 11 },
447
{ token: 32836, startOffsetInclusive: 47, length: 3 },
448
{ token: 32836, startOffsetInclusive: 50, length: 2 },
449
{ token: 327748, startOffsetInclusive: 52, length: 7 }
450
]);
451
});
452
test('Realistic scenario four', () => {
453
// inspired by this snippet, with the update adding a new line after the return true;
454
// function x() {
455
// return true;
456
// }
457
458
// class Y {
459
// private z = false;
460
// }
461
462
const store = new TokenStore(textModel);
463
store.buildStore([
464
{ startOffsetInclusive: 0, length: 8, token: 196676 },
465
{ startOffsetInclusive: 8, length: 1, token: 32836 },
466
{ startOffsetInclusive: 9, length: 1, token: 524356 },
467
{ startOffsetInclusive: 10, length: 6, token: 32836 },
468
{ startOffsetInclusive: 16, length: 1, token: 32836 },
469
{ startOffsetInclusive: 17, length: 6, token: 589892 },
470
{ startOffsetInclusive: 23, length: 1, token: 32836 },
471
{ startOffsetInclusive: 24, length: 4, token: 196676 },
472
{ startOffsetInclusive: 28, length: 1, token: 32836 },
473
{ startOffsetInclusive: 29, length: 2, token: 32836 },
474
{ startOffsetInclusive: 31, length: 3, token: 32836 }, // This is the closing curly brace + newline chars
475
{ startOffsetInclusive: 34, length: 2, token: 32836 },
476
{ startOffsetInclusive: 36, length: 5, token: 196676 },
477
{ startOffsetInclusive: 41, length: 1, token: 32836 },
478
{ startOffsetInclusive: 42, length: 1, token: 557124 },
479
{ startOffsetInclusive: 43, length: 4, token: 32836 },
480
{ startOffsetInclusive: 47, length: 1, token: 32836 },
481
{ startOffsetInclusive: 48, length: 7, token: 196676 },
482
{ startOffsetInclusive: 55, length: 1, token: 32836 },
483
{ startOffsetInclusive: 56, length: 1, token: 327748 },
484
{ startOffsetInclusive: 57, length: 1, token: 32836 },
485
{ startOffsetInclusive: 58, length: 1, token: 98372 },
486
{ startOffsetInclusive: 59, length: 1, token: 32836 },
487
{ startOffsetInclusive: 60, length: 5, token: 196676 },
488
{ startOffsetInclusive: 65, length: 1, token: 32836 },
489
{ startOffsetInclusive: 66, length: 2, token: 32836 },
490
{ startOffsetInclusive: 68, length: 1, token: 32836 }
491
], TokenQuality.Accurate);
492
const tokens0 = store.getTokensInRange(36, 59);
493
assert.deepStrictEqual(tokens0, [
494
{ startOffsetInclusive: 36, length: 5, token: 196676 },
495
{ startOffsetInclusive: 41, length: 1, token: 32836 },
496
{ startOffsetInclusive: 42, length: 1, token: 557124 },
497
{ startOffsetInclusive: 43, length: 4, token: 32836 },
498
{ startOffsetInclusive: 47, length: 1, token: 32836 },
499
{ startOffsetInclusive: 48, length: 7, token: 196676 },
500
{ startOffsetInclusive: 55, length: 1, token: 32836 },
501
{ startOffsetInclusive: 56, length: 1, token: 327748 },
502
{ startOffsetInclusive: 57, length: 1, token: 32836 },
503
{ startOffsetInclusive: 58, length: 1, token: 98372 }
504
]);
505
506
// insert a tab + new line after `return true;` (like hitting enter after the ;)
507
store.update(32, [
508
{ startOffsetInclusive: 0, length: 8, token: 196676 },
509
{ startOffsetInclusive: 8, length: 1, token: 32836 },
510
{ startOffsetInclusive: 9, length: 1, token: 524356 },
511
{ startOffsetInclusive: 10, length: 6, token: 32836 },
512
{ startOffsetInclusive: 16, length: 1, token: 32836 },
513
{ startOffsetInclusive: 17, length: 6, token: 589892 },
514
{ startOffsetInclusive: 23, length: 1, token: 32836 },
515
{ startOffsetInclusive: 24, length: 4, token: 196676 },
516
{ startOffsetInclusive: 28, length: 1, token: 32836 },
517
{ startOffsetInclusive: 29, length: 2, token: 32836 },
518
{ startOffsetInclusive: 31, length: 3, token: 32836 }, // This is the new line, which consists of 3 characters: \t\r\n
519
{ startOffsetInclusive: 34, length: 2, token: 32836 }
520
], TokenQuality.Accurate);
521
522
const tokens1 = store.getTokensInRange(36, 59);
523
assert.deepStrictEqual(tokens1, [
524
{ startOffsetInclusive: 36, length: 2, token: 32836 },
525
{ startOffsetInclusive: 38, length: 2, token: 32836 },
526
{ startOffsetInclusive: 40, length: 5, token: 196676 },
527
{ startOffsetInclusive: 45, length: 1, token: 32836 },
528
{ startOffsetInclusive: 46, length: 1, token: 557124 },
529
{ startOffsetInclusive: 47, length: 4, token: 32836 },
530
{ startOffsetInclusive: 51, length: 1, token: 32836 },
531
{ startOffsetInclusive: 52, length: 7, token: 196676 }
532
]);
533
534
// Delete the tab character
535
store.update(37, [
536
{ startOffsetInclusive: 0, length: 8, token: 196676 },
537
{ startOffsetInclusive: 8, length: 1, token: 32836 },
538
{ startOffsetInclusive: 9, length: 1, token: 524356 },
539
{ startOffsetInclusive: 10, length: 6, token: 32836 },
540
{ startOffsetInclusive: 16, length: 1, token: 32836 },
541
{ startOffsetInclusive: 17, length: 6, token: 589892 },
542
{ startOffsetInclusive: 23, length: 1, token: 32836 },
543
{ startOffsetInclusive: 24, length: 4, token: 196676 },
544
{ startOffsetInclusive: 28, length: 1, token: 32836 },
545
{ startOffsetInclusive: 29, length: 2, token: 32836 },
546
{ startOffsetInclusive: 31, length: 2, token: 32836 }, // This is the changed line: \t\r\n to \r\n
547
{ startOffsetInclusive: 33, length: 3, token: 32836 }
548
], TokenQuality.Accurate);
549
550
const tokens2 = store.getTokensInRange(36, 59);
551
assert.deepStrictEqual(tokens2, [
552
{ startOffsetInclusive: 36, length: 1, token: 32836 },
553
{ startOffsetInclusive: 37, length: 2, token: 32836 },
554
{ startOffsetInclusive: 39, length: 5, token: 196676 },
555
{ startOffsetInclusive: 44, length: 1, token: 32836 },
556
{ startOffsetInclusive: 45, length: 1, token: 557124 },
557
{ startOffsetInclusive: 46, length: 4, token: 32836 },
558
{ startOffsetInclusive: 50, length: 1, token: 32836 },
559
{ startOffsetInclusive: 51, length: 7, token: 196676 },
560
{ startOffsetInclusive: 58, length: 1, token: 32836 }
561
]);
562
563
});
564
565
test('Insert new line and remove tabs (split tokens)', () => {
566
// class A {
567
// a() {
568
// }
569
// }
570
//
571
// interface I {
572
//
573
// }
574
575
const store = new TokenStore(textModel);
576
store.buildStore([
577
{ startOffsetInclusive: 0, length: 5, token: 196676 },
578
{ startOffsetInclusive: 5, length: 1, token: 32836 },
579
{ startOffsetInclusive: 6, length: 1, token: 557124 },
580
{ startOffsetInclusive: 7, length: 3, token: 32836 },
581
{ startOffsetInclusive: 10, length: 1, token: 32836 },
582
{ startOffsetInclusive: 11, length: 1, token: 524356 },
583
{ startOffsetInclusive: 12, length: 5, token: 32836 },
584
{ startOffsetInclusive: 17, length: 3, token: 32836 }, // This is the closing curly brace line of a()
585
{ startOffsetInclusive: 20, length: 2, token: 32836 },
586
{ startOffsetInclusive: 22, length: 1, token: 32836 },
587
{ startOffsetInclusive: 23, length: 9, token: 196676 },
588
{ startOffsetInclusive: 32, length: 1, token: 32836 },
589
{ startOffsetInclusive: 33, length: 1, token: 557124 },
590
{ startOffsetInclusive: 34, length: 3, token: 32836 },
591
{ startOffsetInclusive: 37, length: 1, token: 32836 },
592
{ startOffsetInclusive: 38, length: 1, token: 32836 }
593
], TokenQuality.Accurate);
594
595
const tokens0 = store.getTokensInRange(23, 39);
596
assert.deepStrictEqual(tokens0, [
597
{ startOffsetInclusive: 23, length: 9, token: 196676 },
598
{ startOffsetInclusive: 32, length: 1, token: 32836 },
599
{ startOffsetInclusive: 33, length: 1, token: 557124 },
600
{ startOffsetInclusive: 34, length: 3, token: 32836 },
601
{ startOffsetInclusive: 37, length: 1, token: 32836 },
602
{ startOffsetInclusive: 38, length: 1, token: 32836 }
603
]);
604
605
// Insert a new line after a() { }, which will add 2 tabs
606
store.update(21, [
607
{ startOffsetInclusive: 0, length: 5, token: 196676 },
608
{ startOffsetInclusive: 5, length: 1, token: 32836 },
609
{ startOffsetInclusive: 6, length: 1, token: 557124 },
610
{ startOffsetInclusive: 7, length: 3, token: 32836 },
611
{ startOffsetInclusive: 10, length: 1, token: 32836 },
612
{ startOffsetInclusive: 11, length: 1, token: 524356 },
613
{ startOffsetInclusive: 12, length: 5, token: 32836 },
614
{ startOffsetInclusive: 17, length: 3, token: 32836 },
615
{ startOffsetInclusive: 20, length: 3, token: 32836 },
616
{ startOffsetInclusive: 23, length: 1, token: 32836 }
617
], TokenQuality.Accurate);
618
619
const tokens1 = store.getTokensInRange(26, 42);
620
assert.deepStrictEqual(tokens1, [
621
{ startOffsetInclusive: 26, length: 9, token: 196676 },
622
{ startOffsetInclusive: 35, length: 1, token: 32836 },
623
{ startOffsetInclusive: 36, length: 1, token: 557124 },
624
{ startOffsetInclusive: 37, length: 3, token: 32836 },
625
{ startOffsetInclusive: 40, length: 1, token: 32836 },
626
{ startOffsetInclusive: 41, length: 1, token: 32836 }
627
]);
628
629
// Insert another new line at the cursor, which will also cause the 2 tabs to be deleted
630
store.update(24, [
631
{ startOffsetInclusive: 0, length: 5, token: 196676 },
632
{ startOffsetInclusive: 5, length: 1, token: 32836 },
633
{ startOffsetInclusive: 6, length: 1, token: 557124 },
634
{ startOffsetInclusive: 7, length: 3, token: 32836 },
635
{ startOffsetInclusive: 10, length: 1, token: 32836 },
636
{ startOffsetInclusive: 11, length: 1, token: 524356 },
637
{ startOffsetInclusive: 12, length: 5, token: 32836 },
638
{ startOffsetInclusive: 17, length: 3, token: 32836 },
639
{ startOffsetInclusive: 20, length: 1, token: 32836 },
640
{ startOffsetInclusive: 21, length: 2, token: 32836 },
641
{ startOffsetInclusive: 23, length: 1, token: 32836 }
642
], TokenQuality.Accurate);
643
644
const tokens2 = store.getTokensInRange(26, 42);
645
assert.deepStrictEqual(tokens2, [
646
{ startOffsetInclusive: 26, length: 9, token: 196676 },
647
{ startOffsetInclusive: 35, length: 1, token: 32836 },
648
{ startOffsetInclusive: 36, length: 1, token: 557124 },
649
{ startOffsetInclusive: 37, length: 3, token: 32836 },
650
{ startOffsetInclusive: 40, length: 1, token: 32836 },
651
{ startOffsetInclusive: 41, length: 1, token: 32836 }
652
]);
653
});
654
655
test('delete removes tokens in the middle', () => {
656
const store = new TokenStore(textModel);
657
store.buildStore([
658
{ startOffsetInclusive: 0, length: 3, token: 1 },
659
{ startOffsetInclusive: 3, length: 3, token: 2 },
660
{ startOffsetInclusive: 6, length: 3, token: 3 }
661
], TokenQuality.Accurate);
662
store.delete(3, 3); // delete 3 chars starting at offset 3
663
const tokens = store.getTokensInRange(0, 9);
664
assert.deepStrictEqual(tokens, [
665
{ startOffsetInclusive: 0, length: 3, token: 1 },
666
{ startOffsetInclusive: 3, length: 3, token: 3 }
667
]);
668
});
669
670
test('delete merges partially affected token', () => {
671
const store = new TokenStore(textModel);
672
store.buildStore([
673
{ startOffsetInclusive: 0, length: 5, token: 1 },
674
{ startOffsetInclusive: 5, length: 5, token: 2 }
675
], TokenQuality.Accurate);
676
store.delete(3, 4); // removes 4 chars within token 1 and partially token 2
677
const tokens = store.getTokensInRange(0, 10);
678
assert.deepStrictEqual(tokens, [
679
{ startOffsetInclusive: 0, length: 4, token: 1 },
680
// token 2 is now shifted left by 4
681
{ startOffsetInclusive: 4, length: 3, token: 2 }
682
]);
683
});
684
685
test('replace a token with a slightly larger token', () => {
686
const store = new TokenStore(textModel);
687
store.buildStore([
688
{ startOffsetInclusive: 0, length: 5, token: 1 },
689
{ startOffsetInclusive: 5, length: 1, token: 2 },
690
{ startOffsetInclusive: 6, length: 1, token: 2 },
691
{ startOffsetInclusive: 7, length: 17, token: 2 },
692
{ startOffsetInclusive: 24, length: 1, token: 2 },
693
{ startOffsetInclusive: 25, length: 5, token: 2 },
694
{ startOffsetInclusive: 30, length: 1, token: 2 },
695
{ startOffsetInclusive: 31, length: 1, token: 2 },
696
{ startOffsetInclusive: 32, length: 5, token: 2 }
697
], TokenQuality.Accurate);
698
store.update(17, [{ startOffsetInclusive: 7, length: 19, token: 0 }], TokenQuality.Accurate); // removes 4 chars within token 1 and partially token 2
699
const tokens = store.getTokensInRange(0, 39);
700
assert.deepStrictEqual(tokens, [
701
{ startOffsetInclusive: 0, length: 5, token: 1 },
702
{ startOffsetInclusive: 5, length: 1, token: 2 },
703
{ startOffsetInclusive: 6, length: 1, token: 2 },
704
{ startOffsetInclusive: 7, length: 19, token: 0 },
705
{ startOffsetInclusive: 26, length: 1, token: 2 },
706
{ startOffsetInclusive: 27, length: 5, token: 2 },
707
{ startOffsetInclusive: 32, length: 1, token: 2 },
708
{ startOffsetInclusive: 33, length: 1, token: 2 },
709
{ startOffsetInclusive: 34, length: 5, token: 2 }
710
]);
711
});
712
713
test('replace a character from a large token', () => {
714
const store = new TokenStore(textModel);
715
store.buildStore([
716
{ startOffsetInclusive: 0, length: 2, token: 1 },
717
{ startOffsetInclusive: 2, length: 5, token: 2 },
718
{ startOffsetInclusive: 7, length: 1, token: 3 }
719
], TokenQuality.Accurate);
720
store.delete(1, 3);
721
const tokens = store.getTokensInRange(0, 7);
722
assert.deepStrictEqual(tokens, [
723
{ startOffsetInclusive: 0, length: 2, token: 1 },
724
{ startOffsetInclusive: 2, length: 1, token: 2 },
725
{ startOffsetInclusive: 3, length: 3, token: 2 },
726
{ startOffsetInclusive: 6, length: 1, token: 3 }
727
]);
728
});
729
});
730
731
732