Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/src/vs/editor/test/common/model/model.line.test.ts
3296 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import assert from 'assert';
7
import { ensureNoDisposablesAreLeakedInTestSuite } from '../../../../base/test/common/utils.js';
8
import { Range } from '../../../common/core/range.js';
9
import { MetadataConsts } from '../../../common/encodedTokenAttributes.js';
10
import { EncodedTokenizationResult, IBackgroundTokenizationStore, IBackgroundTokenizer, IState, ITokenizationSupport, TokenizationRegistry, TokenizationResult } from '../../../common/languages.js';
11
import { ITextModel } from '../../../common/model.js';
12
import { computeIndentLevel } from '../../../common/model/utils.js';
13
import { ContiguousMultilineTokensBuilder } from '../../../common/tokens/contiguousMultilineTokensBuilder.js';
14
import { LineTokens } from '../../../common/tokens/lineTokens.js';
15
import { TestLineToken, TestLineTokenFactory } from '../core/testLineToken.js';
16
import { createTextModel } from '../testTextModel.js';
17
18
interface ILineEdit {
19
startColumn: number;
20
endColumn: number;
21
text: string;
22
}
23
24
function assertLineTokens(__actual: LineTokens, _expected: TestToken[]): void {
25
const tmp = TestToken.toTokens(_expected);
26
LineTokens.convertToEndOffset(tmp, __actual.getLineContent().length);
27
const expected = TestLineTokenFactory.inflateArr(tmp);
28
const _actual = __actual.inflate();
29
interface ITestToken {
30
endIndex: number;
31
type: string;
32
}
33
const actual: ITestToken[] = [];
34
for (let i = 0, len = _actual.getCount(); i < len; i++) {
35
actual[i] = {
36
endIndex: _actual.getEndOffset(i),
37
type: _actual.getClassName(i)
38
};
39
}
40
const decode = (token: TestLineToken) => {
41
return {
42
endIndex: token.endIndex,
43
type: token.getType()
44
};
45
};
46
assert.deepStrictEqual(actual, expected.map(decode));
47
}
48
49
suite('ModelLine - getIndentLevel', () => {
50
51
ensureNoDisposablesAreLeakedInTestSuite();
52
53
function assertIndentLevel(text: string, expected: number, tabSize: number = 4): void {
54
const actual = computeIndentLevel(text, tabSize);
55
assert.strictEqual(actual, expected, text);
56
}
57
58
test('getIndentLevel', () => {
59
assertIndentLevel('', -1);
60
assertIndentLevel(' ', -1);
61
assertIndentLevel(' \t', -1);
62
assertIndentLevel('Hello', 0);
63
assertIndentLevel(' Hello', 1);
64
assertIndentLevel(' Hello', 3);
65
assertIndentLevel('\tHello', 4);
66
assertIndentLevel(' \tHello', 4);
67
assertIndentLevel(' \tHello', 4);
68
assertIndentLevel(' \tHello', 4);
69
assertIndentLevel(' \tHello', 8);
70
assertIndentLevel(' \tHello', 8);
71
assertIndentLevel('\t Hello', 5);
72
assertIndentLevel('\t \tHello', 8);
73
});
74
});
75
76
class TestToken {
77
public readonly startOffset: number;
78
public readonly color: number;
79
80
constructor(startOffset: number, color: number) {
81
this.startOffset = startOffset;
82
this.color = color;
83
}
84
85
public static toTokens(tokens: TestToken[]): Uint32Array;
86
public static toTokens(tokens: TestToken[] | null): Uint32Array | null {
87
if (tokens === null) {
88
return null;
89
}
90
const tokensLen = tokens.length;
91
const result = new Uint32Array((tokensLen << 1));
92
for (let i = 0; i < tokensLen; i++) {
93
const token = tokens[i];
94
result[(i << 1)] = token.startOffset;
95
result[(i << 1) + 1] = (
96
token.color << MetadataConsts.FOREGROUND_OFFSET
97
) >>> 0;
98
}
99
return result;
100
}
101
}
102
103
class ManualTokenizationSupport implements ITokenizationSupport {
104
private readonly tokens = new Map<number, Uint32Array>();
105
private readonly stores = new Set<IBackgroundTokenizationStore>();
106
107
public setLineTokens(lineNumber: number, tokens: Uint32Array): void {
108
const b = new ContiguousMultilineTokensBuilder();
109
b.add(lineNumber, tokens);
110
for (const s of this.stores) {
111
s.setTokens(b.finalize());
112
}
113
}
114
115
getInitialState(): IState {
116
return new LineState(1);
117
}
118
119
tokenize(line: string, hasEOL: boolean, state: IState): TokenizationResult {
120
throw new Error();
121
}
122
123
tokenizeEncoded(line: string, hasEOL: boolean, state: IState): EncodedTokenizationResult {
124
const s = state as LineState;
125
return new EncodedTokenizationResult(this.tokens.get(s.lineNumber)!, new LineState(s.lineNumber + 1));
126
}
127
128
/**
129
* Can be/return undefined if default background tokenization should be used.
130
*/
131
createBackgroundTokenizer?(textModel: ITextModel, store: IBackgroundTokenizationStore): IBackgroundTokenizer | undefined {
132
this.stores.add(store);
133
return {
134
dispose: () => {
135
this.stores.delete(store);
136
},
137
requestTokens(startLineNumber, endLineNumberExclusive) {
138
},
139
};
140
}
141
}
142
143
class LineState implements IState {
144
constructor(public readonly lineNumber: number) { }
145
clone(): IState {
146
return this;
147
}
148
equals(other: IState): boolean {
149
return (other as LineState).lineNumber === this.lineNumber;
150
}
151
}
152
153
suite('ModelLinesTokens', () => {
154
155
ensureNoDisposablesAreLeakedInTestSuite();
156
157
interface IBufferLineState {
158
text: string;
159
tokens: TestToken[];
160
}
161
162
interface IEdit {
163
range: Range;
164
text: string;
165
}
166
167
function testApplyEdits(initial: IBufferLineState[], edits: IEdit[], expected: IBufferLineState[]): void {
168
const initialText = initial.map(el => el.text).join('\n');
169
170
const s = new ManualTokenizationSupport();
171
const d = TokenizationRegistry.register('test', s);
172
173
const model = createTextModel(initialText, 'test');
174
model.onBeforeAttached();
175
for (let lineIndex = 0; lineIndex < initial.length; lineIndex++) {
176
const lineTokens = initial[lineIndex].tokens;
177
const lineTextLength = model.getLineMaxColumn(lineIndex + 1) - 1;
178
const tokens = TestToken.toTokens(lineTokens);
179
LineTokens.convertToEndOffset(tokens, lineTextLength);
180
s.setLineTokens(lineIndex + 1, tokens);
181
}
182
183
model.applyEdits(edits.map((ed) => ({
184
identifier: null,
185
range: ed.range,
186
text: ed.text,
187
forceMoveMarkers: false
188
})));
189
190
for (let lineIndex = 0; lineIndex < expected.length; lineIndex++) {
191
const actualLine = model.getLineContent(lineIndex + 1);
192
const actualTokens = model.tokenization.getLineTokens(lineIndex + 1);
193
assert.strictEqual(actualLine, expected[lineIndex].text);
194
assertLineTokens(actualTokens, expected[lineIndex].tokens);
195
}
196
197
model.dispose();
198
d.dispose();
199
}
200
201
test('single delete 1', () => {
202
testApplyEdits(
203
[{
204
text: 'hello world',
205
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
206
}],
207
[{ range: new Range(1, 1, 1, 2), text: '' }],
208
[{
209
text: 'ello world',
210
tokens: [new TestToken(0, 1), new TestToken(4, 2), new TestToken(5, 3)]
211
}]
212
);
213
});
214
215
test('single delete 2', () => {
216
testApplyEdits(
217
[{
218
text: 'helloworld',
219
tokens: [new TestToken(0, 1), new TestToken(5, 2)]
220
}],
221
[{ range: new Range(1, 3, 1, 8), text: '' }],
222
[{
223
text: 'herld',
224
tokens: [new TestToken(0, 1), new TestToken(2, 2)]
225
}]
226
);
227
});
228
229
test('single delete 3', () => {
230
testApplyEdits(
231
[{
232
text: 'hello world',
233
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
234
}],
235
[{ range: new Range(1, 1, 1, 6), text: '' }],
236
[{
237
text: ' world',
238
tokens: [new TestToken(0, 2), new TestToken(1, 3)]
239
}]
240
);
241
});
242
243
test('single delete 4', () => {
244
testApplyEdits(
245
[{
246
text: 'hello world',
247
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
248
}],
249
[{ range: new Range(1, 2, 1, 7), text: '' }],
250
[{
251
text: 'hworld',
252
tokens: [new TestToken(0, 1), new TestToken(1, 3)]
253
}]
254
);
255
});
256
257
test('single delete 5', () => {
258
testApplyEdits(
259
[{
260
text: 'hello world',
261
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
262
}],
263
[{ range: new Range(1, 1, 1, 12), text: '' }],
264
[{
265
text: '',
266
tokens: [new TestToken(0, 1)]
267
}]
268
);
269
});
270
271
test('multi delete 6', () => {
272
testApplyEdits(
273
[{
274
text: 'hello world',
275
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
276
}, {
277
text: 'hello world',
278
tokens: [new TestToken(0, 4), new TestToken(5, 5), new TestToken(6, 6)]
279
}, {
280
text: 'hello world',
281
tokens: [new TestToken(0, 7), new TestToken(5, 8), new TestToken(6, 9)]
282
}],
283
[{ range: new Range(1, 6, 3, 6), text: '' }],
284
[{
285
text: 'hello world',
286
tokens: [new TestToken(0, 1), new TestToken(5, 8), new TestToken(6, 9)]
287
}]
288
);
289
});
290
291
test('multi delete 7', () => {
292
testApplyEdits(
293
[{
294
text: 'hello world',
295
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
296
}, {
297
text: 'hello world',
298
tokens: [new TestToken(0, 4), new TestToken(5, 5), new TestToken(6, 6)]
299
}, {
300
text: 'hello world',
301
tokens: [new TestToken(0, 7), new TestToken(5, 8), new TestToken(6, 9)]
302
}],
303
[{ range: new Range(1, 12, 3, 12), text: '' }],
304
[{
305
text: 'hello world',
306
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
307
}]
308
);
309
});
310
311
test('multi delete 8', () => {
312
testApplyEdits(
313
[{
314
text: 'hello world',
315
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
316
}, {
317
text: 'hello world',
318
tokens: [new TestToken(0, 4), new TestToken(5, 5), new TestToken(6, 6)]
319
}, {
320
text: 'hello world',
321
tokens: [new TestToken(0, 7), new TestToken(5, 8), new TestToken(6, 9)]
322
}],
323
[{ range: new Range(1, 1, 3, 1), text: '' }],
324
[{
325
text: 'hello world',
326
tokens: [new TestToken(0, 7), new TestToken(5, 8), new TestToken(6, 9)]
327
}]
328
);
329
});
330
331
test('multi delete 9', () => {
332
testApplyEdits(
333
[{
334
text: 'hello world',
335
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
336
}, {
337
text: 'hello world',
338
tokens: [new TestToken(0, 4), new TestToken(5, 5), new TestToken(6, 6)]
339
}, {
340
text: 'hello world',
341
tokens: [new TestToken(0, 7), new TestToken(5, 8), new TestToken(6, 9)]
342
}],
343
[{ range: new Range(1, 12, 3, 1), text: '' }],
344
[{
345
text: 'hello worldhello world',
346
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3), new TestToken(11, 7), new TestToken(16, 8), new TestToken(17, 9)]
347
}]
348
);
349
});
350
351
test('single insert 1', () => {
352
testApplyEdits(
353
[{
354
text: 'hello world',
355
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
356
}],
357
[{ range: new Range(1, 1, 1, 1), text: 'xx' }],
358
[{
359
text: 'xxhello world',
360
tokens: [new TestToken(0, 1), new TestToken(7, 2), new TestToken(8, 3)]
361
}]
362
);
363
});
364
365
test('single insert 2', () => {
366
testApplyEdits(
367
[{
368
text: 'hello world',
369
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
370
}],
371
[{ range: new Range(1, 2, 1, 2), text: 'xx' }],
372
[{
373
text: 'hxxello world',
374
tokens: [new TestToken(0, 1), new TestToken(7, 2), new TestToken(8, 3)]
375
}]
376
);
377
});
378
379
test('single insert 3', () => {
380
testApplyEdits(
381
[{
382
text: 'hello world',
383
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
384
}],
385
[{ range: new Range(1, 6, 1, 6), text: 'xx' }],
386
[{
387
text: 'helloxx world',
388
tokens: [new TestToken(0, 1), new TestToken(7, 2), new TestToken(8, 3)]
389
}]
390
);
391
});
392
393
test('single insert 4', () => {
394
testApplyEdits(
395
[{
396
text: 'hello world',
397
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
398
}],
399
[{ range: new Range(1, 7, 1, 7), text: 'xx' }],
400
[{
401
text: 'hello xxworld',
402
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(8, 3)]
403
}]
404
);
405
});
406
407
test('single insert 5', () => {
408
testApplyEdits(
409
[{
410
text: 'hello world',
411
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
412
}],
413
[{ range: new Range(1, 12, 1, 12), text: 'xx' }],
414
[{
415
text: 'hello worldxx',
416
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
417
}]
418
);
419
});
420
421
test('multi insert 6', () => {
422
testApplyEdits(
423
[{
424
text: 'hello world',
425
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
426
}],
427
[{ range: new Range(1, 1, 1, 1), text: '\n' }],
428
[{
429
text: '',
430
tokens: [new TestToken(0, 1)]
431
}, {
432
text: 'hello world',
433
tokens: [new TestToken(0, 1)]
434
}]
435
);
436
});
437
438
test('multi insert 7', () => {
439
testApplyEdits(
440
[{
441
text: 'hello world',
442
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
443
}],
444
[{ range: new Range(1, 12, 1, 12), text: '\n' }],
445
[{
446
text: 'hello world',
447
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
448
}, {
449
text: '',
450
tokens: [new TestToken(0, 1)]
451
}]
452
);
453
});
454
455
test('multi insert 8', () => {
456
testApplyEdits(
457
[{
458
text: 'hello world',
459
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
460
}],
461
[{ range: new Range(1, 7, 1, 7), text: '\n' }],
462
[{
463
text: 'hello ',
464
tokens: [new TestToken(0, 1), new TestToken(5, 2)]
465
}, {
466
text: 'world',
467
tokens: [new TestToken(0, 1)]
468
}]
469
);
470
});
471
472
test('multi insert 9', () => {
473
testApplyEdits(
474
[{
475
text: 'hello world',
476
tokens: [new TestToken(0, 1), new TestToken(5, 2), new TestToken(6, 3)]
477
}, {
478
text: 'hello world',
479
tokens: [new TestToken(0, 4), new TestToken(5, 5), new TestToken(6, 6)]
480
}],
481
[{ range: new Range(1, 7, 1, 7), text: 'xx\nyy' }],
482
[{
483
text: 'hello xx',
484
tokens: [new TestToken(0, 1), new TestToken(5, 2)]
485
}, {
486
text: 'yyworld',
487
tokens: [new TestToken(0, 1)]
488
}, {
489
text: 'hello world',
490
tokens: [new TestToken(0, 4), new TestToken(5, 5), new TestToken(6, 6)]
491
}]
492
);
493
});
494
495
function testLineEditTokens(initialText: string, initialTokens: TestToken[], edits: ILineEdit[], expectedText: string, expectedTokens: TestToken[]): void {
496
testApplyEdits(
497
[{
498
text: initialText,
499
tokens: initialTokens
500
}],
501
edits.map((ed) => ({
502
range: new Range(1, ed.startColumn, 1, ed.endColumn),
503
text: ed.text
504
})),
505
[{
506
text: expectedText,
507
tokens: expectedTokens
508
}]
509
);
510
}
511
512
test('insertion on empty line', () => {
513
const s = new ManualTokenizationSupport();
514
const d = TokenizationRegistry.register('test', s);
515
516
const model = createTextModel('some text', 'test');
517
const tokens = TestToken.toTokens([new TestToken(0, 1)]);
518
LineTokens.convertToEndOffset(tokens, model.getLineMaxColumn(1) - 1);
519
s.setLineTokens(1, tokens);
520
521
model.applyEdits([{
522
range: new Range(1, 1, 1, 10),
523
text: ''
524
}]);
525
526
s.setLineTokens(1, new Uint32Array(0));
527
528
model.applyEdits([{
529
range: new Range(1, 1, 1, 1),
530
text: 'a'
531
}]);
532
533
const actualTokens = model.tokenization.getLineTokens(1);
534
assertLineTokens(actualTokens, [new TestToken(0, 1)]);
535
536
model.dispose();
537
d.dispose();
538
});
539
540
test('updates tokens on insertion 1', () => {
541
testLineEditTokens(
542
'abcd efgh',
543
[
544
new TestToken(0, 1),
545
new TestToken(4, 2),
546
new TestToken(5, 3)
547
],
548
[{
549
startColumn: 1,
550
endColumn: 1,
551
text: 'a',
552
}],
553
'aabcd efgh',
554
[
555
new TestToken(0, 1),
556
new TestToken(5, 2),
557
new TestToken(6, 3)
558
]
559
);
560
});
561
562
test('updates tokens on insertion 2', () => {
563
testLineEditTokens(
564
'aabcd efgh',
565
[
566
new TestToken(0, 1),
567
new TestToken(5, 2),
568
new TestToken(6, 3)
569
],
570
[{
571
startColumn: 2,
572
endColumn: 2,
573
text: 'x',
574
}],
575
'axabcd efgh',
576
[
577
new TestToken(0, 1),
578
new TestToken(6, 2),
579
new TestToken(7, 3)
580
]
581
);
582
});
583
584
test('updates tokens on insertion 3', () => {
585
testLineEditTokens(
586
'axabcd efgh',
587
[
588
new TestToken(0, 1),
589
new TestToken(6, 2),
590
new TestToken(7, 3)
591
],
592
[{
593
startColumn: 3,
594
endColumn: 3,
595
text: 'stu',
596
}],
597
'axstuabcd efgh',
598
[
599
new TestToken(0, 1),
600
new TestToken(9, 2),
601
new TestToken(10, 3)
602
]
603
);
604
});
605
606
test('updates tokens on insertion 4', () => {
607
testLineEditTokens(
608
'axstuabcd efgh',
609
[
610
new TestToken(0, 1),
611
new TestToken(9, 2),
612
new TestToken(10, 3)
613
],
614
[{
615
startColumn: 10,
616
endColumn: 10,
617
text: '\t',
618
}],
619
'axstuabcd\t efgh',
620
[
621
new TestToken(0, 1),
622
new TestToken(10, 2),
623
new TestToken(11, 3)
624
]
625
);
626
});
627
628
test('updates tokens on insertion 5', () => {
629
testLineEditTokens(
630
'axstuabcd\t efgh',
631
[
632
new TestToken(0, 1),
633
new TestToken(10, 2),
634
new TestToken(11, 3)
635
],
636
[{
637
startColumn: 12,
638
endColumn: 12,
639
text: 'dd',
640
}],
641
'axstuabcd\t ddefgh',
642
[
643
new TestToken(0, 1),
644
new TestToken(10, 2),
645
new TestToken(13, 3)
646
]
647
);
648
});
649
650
test('updates tokens on insertion 6', () => {
651
testLineEditTokens(
652
'axstuabcd\t ddefgh',
653
[
654
new TestToken(0, 1),
655
new TestToken(10, 2),
656
new TestToken(13, 3)
657
],
658
[{
659
startColumn: 18,
660
endColumn: 18,
661
text: 'xyz',
662
}],
663
'axstuabcd\t ddefghxyz',
664
[
665
new TestToken(0, 1),
666
new TestToken(10, 2),
667
new TestToken(13, 3)
668
]
669
);
670
});
671
672
test('updates tokens on insertion 7', () => {
673
testLineEditTokens(
674
'axstuabcd\t ddefghxyz',
675
[
676
new TestToken(0, 1),
677
new TestToken(10, 2),
678
new TestToken(13, 3)
679
],
680
[{
681
startColumn: 1,
682
endColumn: 1,
683
text: 'x',
684
}],
685
'xaxstuabcd\t ddefghxyz',
686
[
687
new TestToken(0, 1),
688
new TestToken(11, 2),
689
new TestToken(14, 3)
690
]
691
);
692
});
693
694
test('updates tokens on insertion 8', () => {
695
testLineEditTokens(
696
'xaxstuabcd\t ddefghxyz',
697
[
698
new TestToken(0, 1),
699
new TestToken(11, 2),
700
new TestToken(14, 3)
701
],
702
[{
703
startColumn: 22,
704
endColumn: 22,
705
text: 'x',
706
}],
707
'xaxstuabcd\t ddefghxyzx',
708
[
709
new TestToken(0, 1),
710
new TestToken(11, 2),
711
new TestToken(14, 3)
712
]
713
);
714
});
715
716
test('updates tokens on insertion 9', () => {
717
testLineEditTokens(
718
'xaxstuabcd\t ddefghxyzx',
719
[
720
new TestToken(0, 1),
721
new TestToken(11, 2),
722
new TestToken(14, 3)
723
],
724
[{
725
startColumn: 2,
726
endColumn: 2,
727
text: '',
728
}],
729
'xaxstuabcd\t ddefghxyzx',
730
[
731
new TestToken(0, 1),
732
new TestToken(11, 2),
733
new TestToken(14, 3)
734
]
735
);
736
});
737
738
test('updates tokens on insertion 10', () => {
739
testLineEditTokens(
740
'',
741
[],
742
[{
743
startColumn: 1,
744
endColumn: 1,
745
text: 'a',
746
}],
747
'a',
748
[
749
new TestToken(0, 1)
750
]
751
);
752
});
753
754
test('delete second token 2', () => {
755
testLineEditTokens(
756
'abcdefghij',
757
[
758
new TestToken(0, 1),
759
new TestToken(3, 2),
760
new TestToken(6, 3)
761
],
762
[{
763
startColumn: 4,
764
endColumn: 7,
765
text: '',
766
}],
767
'abcghij',
768
[
769
new TestToken(0, 1),
770
new TestToken(3, 3)
771
]
772
);
773
});
774
775
test('insert right before second token', () => {
776
testLineEditTokens(
777
'abcdefghij',
778
[
779
new TestToken(0, 1),
780
new TestToken(3, 2),
781
new TestToken(6, 3)
782
],
783
[{
784
startColumn: 4,
785
endColumn: 4,
786
text: 'hello',
787
}],
788
'abchellodefghij',
789
[
790
new TestToken(0, 1),
791
new TestToken(8, 2),
792
new TestToken(11, 3)
793
]
794
);
795
});
796
797
test('delete first char', () => {
798
testLineEditTokens(
799
'abcd efgh',
800
[
801
new TestToken(0, 1),
802
new TestToken(4, 2),
803
new TestToken(5, 3)
804
],
805
[{
806
startColumn: 1,
807
endColumn: 2,
808
text: '',
809
}],
810
'bcd efgh',
811
[
812
new TestToken(0, 1),
813
new TestToken(3, 2),
814
new TestToken(4, 3)
815
]
816
);
817
});
818
819
test('delete 2nd and 3rd chars', () => {
820
testLineEditTokens(
821
'abcd efgh',
822
[
823
new TestToken(0, 1),
824
new TestToken(4, 2),
825
new TestToken(5, 3)
826
],
827
[{
828
startColumn: 2,
829
endColumn: 4,
830
text: '',
831
}],
832
'ad efgh',
833
[
834
new TestToken(0, 1),
835
new TestToken(2, 2),
836
new TestToken(3, 3)
837
]
838
);
839
});
840
841
test('delete first token', () => {
842
testLineEditTokens(
843
'abcd efgh',
844
[
845
new TestToken(0, 1),
846
new TestToken(4, 2),
847
new TestToken(5, 3)
848
],
849
[{
850
startColumn: 1,
851
endColumn: 5,
852
text: '',
853
}],
854
' efgh',
855
[
856
new TestToken(0, 2),
857
new TestToken(1, 3)
858
]
859
);
860
});
861
862
test('delete second token', () => {
863
testLineEditTokens(
864
'abcd efgh',
865
[
866
new TestToken(0, 1),
867
new TestToken(4, 2),
868
new TestToken(5, 3)
869
],
870
[{
871
startColumn: 5,
872
endColumn: 6,
873
text: '',
874
}],
875
'abcdefgh',
876
[
877
new TestToken(0, 1),
878
new TestToken(4, 3)
879
]
880
);
881
});
882
883
test('delete second token + a bit of the third one', () => {
884
testLineEditTokens(
885
'abcd efgh',
886
[
887
new TestToken(0, 1),
888
new TestToken(4, 2),
889
new TestToken(5, 3)
890
],
891
[{
892
startColumn: 5,
893
endColumn: 7,
894
text: '',
895
}],
896
'abcdfgh',
897
[
898
new TestToken(0, 1),
899
new TestToken(4, 3)
900
]
901
);
902
});
903
904
test('delete second and third token', () => {
905
testLineEditTokens(
906
'abcd efgh',
907
[
908
new TestToken(0, 1),
909
new TestToken(4, 2),
910
new TestToken(5, 3)
911
],
912
[{
913
startColumn: 5,
914
endColumn: 10,
915
text: '',
916
}],
917
'abcd',
918
[
919
new TestToken(0, 1)
920
]
921
);
922
});
923
924
test('delete everything', () => {
925
testLineEditTokens(
926
'abcd efgh',
927
[
928
new TestToken(0, 1),
929
new TestToken(4, 2),
930
new TestToken(5, 3)
931
],
932
[{
933
startColumn: 1,
934
endColumn: 10,
935
text: '',
936
}],
937
'',
938
[
939
new TestToken(0, 1)
940
]
941
);
942
});
943
944
test('noop', () => {
945
testLineEditTokens(
946
'abcd efgh',
947
[
948
new TestToken(0, 1),
949
new TestToken(4, 2),
950
new TestToken(5, 3)
951
],
952
[{
953
startColumn: 1,
954
endColumn: 1,
955
text: '',
956
}],
957
'abcd efgh',
958
[
959
new TestToken(0, 1),
960
new TestToken(4, 2),
961
new TestToken(5, 3)
962
]
963
);
964
});
965
966
test('equivalent to deleting first two chars', () => {
967
testLineEditTokens(
968
'abcd efgh',
969
[
970
new TestToken(0, 1),
971
new TestToken(4, 2),
972
new TestToken(5, 3)
973
],
974
[{
975
startColumn: 1,
976
endColumn: 3,
977
text: '',
978
}],
979
'cd efgh',
980
[
981
new TestToken(0, 1),
982
new TestToken(2, 2),
983
new TestToken(3, 3)
984
]
985
);
986
});
987
988
test('equivalent to deleting from 5 to the end', () => {
989
testLineEditTokens(
990
'abcd efgh',
991
[
992
new TestToken(0, 1),
993
new TestToken(4, 2),
994
new TestToken(5, 3)
995
],
996
[{
997
startColumn: 5,
998
endColumn: 10,
999
text: '',
1000
}],
1001
'abcd',
1002
[
1003
new TestToken(0, 1)
1004
]
1005
);
1006
});
1007
1008
test('updates tokens on replace 1', () => {
1009
testLineEditTokens(
1010
'Hello world, ciao',
1011
[
1012
new TestToken(0, 1),
1013
new TestToken(5, 0),
1014
new TestToken(6, 2),
1015
new TestToken(11, 0),
1016
new TestToken(13, 0)
1017
],
1018
[{
1019
startColumn: 1,
1020
endColumn: 6,
1021
text: 'Hi',
1022
}],
1023
'Hi world, ciao',
1024
[
1025
new TestToken(0, 0),
1026
new TestToken(3, 2),
1027
new TestToken(8, 0),
1028
new TestToken(10, 0),
1029
]
1030
);
1031
});
1032
1033
test('updates tokens on replace 2', () => {
1034
testLineEditTokens(
1035
'Hello world, ciao',
1036
[
1037
new TestToken(0, 1),
1038
new TestToken(5, 0),
1039
new TestToken(6, 2),
1040
new TestToken(11, 0),
1041
new TestToken(13, 0),
1042
],
1043
[{
1044
startColumn: 1,
1045
endColumn: 6,
1046
text: 'Hi',
1047
}, {
1048
startColumn: 8,
1049
endColumn: 12,
1050
text: 'my friends',
1051
}],
1052
'Hi wmy friends, ciao',
1053
[
1054
new TestToken(0, 0),
1055
new TestToken(3, 2),
1056
new TestToken(14, 0),
1057
new TestToken(16, 0),
1058
]
1059
);
1060
});
1061
1062
function testLineSplitTokens(initialText: string, initialTokens: TestToken[], splitColumn: number, expectedText1: string, expectedText2: string, expectedTokens: TestToken[]): void {
1063
testApplyEdits(
1064
[{
1065
text: initialText,
1066
tokens: initialTokens
1067
}],
1068
[{
1069
range: new Range(1, splitColumn, 1, splitColumn),
1070
text: '\n'
1071
}],
1072
[{
1073
text: expectedText1,
1074
tokens: expectedTokens
1075
}, {
1076
text: expectedText2,
1077
tokens: [new TestToken(0, 1)]
1078
}]
1079
);
1080
}
1081
1082
test('split at the beginning', () => {
1083
testLineSplitTokens(
1084
'abcd efgh',
1085
[
1086
new TestToken(0, 1),
1087
new TestToken(4, 2),
1088
new TestToken(5, 3)
1089
],
1090
1,
1091
'',
1092
'abcd efgh',
1093
[
1094
new TestToken(0, 1),
1095
]
1096
);
1097
});
1098
1099
test('split at the end', () => {
1100
testLineSplitTokens(
1101
'abcd efgh',
1102
[
1103
new TestToken(0, 1),
1104
new TestToken(4, 2),
1105
new TestToken(5, 3)
1106
],
1107
10,
1108
'abcd efgh',
1109
'',
1110
[
1111
new TestToken(0, 1),
1112
new TestToken(4, 2),
1113
new TestToken(5, 3)
1114
]
1115
);
1116
});
1117
1118
test('split inthe middle 1', () => {
1119
testLineSplitTokens(
1120
'abcd efgh',
1121
[
1122
new TestToken(0, 1),
1123
new TestToken(4, 2),
1124
new TestToken(5, 3)
1125
],
1126
5,
1127
'abcd',
1128
' efgh',
1129
[
1130
new TestToken(0, 1)
1131
]
1132
);
1133
});
1134
1135
test('split inthe middle 2', () => {
1136
testLineSplitTokens(
1137
'abcd efgh',
1138
[
1139
new TestToken(0, 1),
1140
new TestToken(4, 2),
1141
new TestToken(5, 3)
1142
],
1143
6,
1144
'abcd ',
1145
'efgh',
1146
[
1147
new TestToken(0, 1),
1148
new TestToken(4, 2)
1149
]
1150
);
1151
});
1152
1153
function testLineAppendTokens(aText: string, aTokens: TestToken[], bText: string, bTokens: TestToken[], expectedText: string, expectedTokens: TestToken[]): void {
1154
testApplyEdits(
1155
[{
1156
text: aText,
1157
tokens: aTokens
1158
}, {
1159
text: bText,
1160
tokens: bTokens
1161
}],
1162
[{
1163
range: new Range(1, aText.length + 1, 2, 1),
1164
text: ''
1165
}],
1166
[{
1167
text: expectedText,
1168
tokens: expectedTokens
1169
}]
1170
);
1171
}
1172
1173
test('append empty 1', () => {
1174
testLineAppendTokens(
1175
'abcd efgh',
1176
[
1177
new TestToken(0, 1),
1178
new TestToken(4, 2),
1179
new TestToken(5, 3)
1180
],
1181
'',
1182
[],
1183
'abcd efgh',
1184
[
1185
new TestToken(0, 1),
1186
new TestToken(4, 2),
1187
new TestToken(5, 3)
1188
]
1189
);
1190
});
1191
1192
test('append empty 2', () => {
1193
testLineAppendTokens(
1194
'',
1195
[],
1196
'abcd efgh',
1197
[
1198
new TestToken(0, 1),
1199
new TestToken(4, 2),
1200
new TestToken(5, 3)
1201
],
1202
'abcd efgh',
1203
[
1204
new TestToken(0, 1),
1205
new TestToken(4, 2),
1206
new TestToken(5, 3)
1207
]
1208
);
1209
});
1210
1211
test('append 1', () => {
1212
testLineAppendTokens(
1213
'abcd efgh',
1214
[
1215
new TestToken(0, 1),
1216
new TestToken(4, 2),
1217
new TestToken(5, 3)
1218
],
1219
'abcd efgh',
1220
[
1221
new TestToken(0, 4),
1222
new TestToken(4, 5),
1223
new TestToken(5, 6)
1224
],
1225
'abcd efghabcd efgh',
1226
[
1227
new TestToken(0, 1),
1228
new TestToken(4, 2),
1229
new TestToken(5, 3),
1230
new TestToken(9, 4),
1231
new TestToken(13, 5),
1232
new TestToken(14, 6)
1233
]
1234
);
1235
});
1236
1237
test('append 2', () => {
1238
testLineAppendTokens(
1239
'abcd ',
1240
[
1241
new TestToken(0, 1),
1242
new TestToken(4, 2)
1243
],
1244
'efgh',
1245
[
1246
new TestToken(0, 3)
1247
],
1248
'abcd efgh',
1249
[
1250
new TestToken(0, 1),
1251
new TestToken(4, 2),
1252
new TestToken(5, 3)
1253
]
1254
);
1255
});
1256
1257
test('append 3', () => {
1258
testLineAppendTokens(
1259
'abcd',
1260
[
1261
new TestToken(0, 1),
1262
],
1263
' efgh',
1264
[
1265
new TestToken(0, 2),
1266
new TestToken(1, 3)
1267
],
1268
'abcd efgh',
1269
[
1270
new TestToken(0, 1),
1271
new TestToken(4, 2),
1272
new TestToken(5, 3)
1273
]
1274
);
1275
});
1276
});
1277
1278