Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 59 additions & 1 deletion src/encodedTokenAttributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,65 @@
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/

import { FontStyle } from "./theme";
import { FontStyle, StyleAttributes } from "./theme";

/**
* Performance notes (benchmark on large TypeScript file ~3100ms total tokenization):
*
* We tested multiple FontAttribute caching strategies:
* - String key HashMap (current): ~3175ms - uses `${fontFamily}|${fontSize}|${lineHeight}` as key
* - Linear array scan: ~3141ms - iterates through ~5 cached items
* - Numeric key + transition cache: ~3184ms - complex caching with per-instance transitions
* - NOOP (always return this): ~3097ms - theoretical upper bound
*
* Conclusion: FontAttribute overhead is only ~45-80ms (1-2.5% of total time).
* The simple string key HashMap is sufficient - additional optimization complexity
* provides negligible benefit since there are typically only ~5 unique FontAttribute
* combinations in practice.
*/
export class FontAttribute {

private static readonly _map: Map<string, FontAttribute> = new Map<string, FontAttribute>();

private static _getKey(fontFamily: string | null, fontSize: number | null, lineHeight: number | null): string {
return `${fontFamily}|${fontSize}|${lineHeight}`;
}

private static _get(fontFamily: string | null, fontSize: number | null, lineHeight: number | null): FontAttribute {
const key = this._getKey(fontFamily, fontSize, lineHeight);
let result = this._map.get(key);
if (!result) {
result = new FontAttribute(
fontFamily,
fontSize,
lineHeight
);
this._map.set(key, result);
}
return result;
}

public static from(fontFamily: string | null, fontSize: number | null, lineHeight: number | null): FontAttribute {
return new FontAttribute(fontFamily, fontSize, lineHeight);
}

private constructor(
public readonly fontFamily: string | null,
public readonly fontSize: number | null,
public readonly lineHeight: number | null
) { }

with(styleAttributes: StyleAttributes | null): FontAttribute {
if (!styleAttributes) {
return this;
}
return FontAttribute._get(
styleAttributes.fontFamily || this.fontFamily,
styleAttributes.fontSize || this.fontSize,
styleAttributes.lineHeight || this.lineHeight
);
}
}

export type EncodedTokenAttributes = number;

Expand Down
31 changes: 18 additions & 13 deletions src/grammar/grammar.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
*--------------------------------------------------------*/

import { DebugFlags } from '../debug';
import { EncodedTokenAttributes, OptionalStandardTokenType, StandardTokenType, toOptionalTokenType } from '../encodedTokenAttributes';
import { EncodedTokenAttributes, FontAttribute, OptionalStandardTokenType, StandardTokenType, toOptionalTokenType } from '../encodedTokenAttributes';
import { IEmbeddedLanguagesMap, IGrammar, IToken, ITokenizeLineResult, ITokenizeLineResult2, ITokenTypeMap, StateStack, IFontInfo } from '../main';
import { createMatchers, Matcher } from '../matcher';
import { disposeOnigString, IOnigLib, OnigScanner, OnigString } from '../onigLib';
Expand Down Expand Up @@ -339,6 +339,7 @@ export class Grammar implements IGrammar, IRuleFactoryHelper, IOnigLib {
defaultStyle.foregroundId,
defaultStyle.backgroundId
);
const fontAttribute = FontAttribute.from(defaultStyle.fontFamily, defaultStyle.fontSize, defaultStyle.lineHeight);

const rootScopeName = this.getRule(this._rootId).getName(
null,
Expand All @@ -350,12 +351,14 @@ export class Grammar implements IGrammar, IRuleFactoryHelper, IOnigLib {
scopeList = AttributedScopeStack.createRootAndLookUpScopeName(
rootScopeName,
defaultMetadata,
fontAttribute,
this
);
} else {
scopeList = AttributedScopeStack.createRoot(
"unknown",
defaultMetadata
defaultMetadata,
fontAttribute
);
}

Expand Down Expand Up @@ -427,16 +430,16 @@ export class AttributedScopeStack {
let scopeNames = namesScopeList?.scopePath ?? null;
for (const frame of contentNameScopesList) {
scopeNames = ScopeStack.push(scopeNames, frame.scopeNames);
current = new AttributedScopeStack(current, scopeNames!, frame.encodedTokenAttributes, null);
current = new AttributedScopeStack(current, scopeNames!, frame.encodedTokenAttributes, null, null);
}
return current;
}

public static createRoot(scopeName: ScopeName, tokenAttributes: EncodedTokenAttributes): AttributedScopeStack {
return new AttributedScopeStack(null, new ScopeStack(null, scopeName), tokenAttributes, null);
public static createRoot(scopeName: ScopeName, tokenAttributes: EncodedTokenAttributes, fontAttribute: FontAttribute): AttributedScopeStack {
return new AttributedScopeStack(null, new ScopeStack(null, scopeName), tokenAttributes, fontAttribute, null);
}

public static createRootAndLookUpScopeName(scopeName: ScopeName, tokenAttributes: EncodedTokenAttributes, grammar: Grammar): AttributedScopeStack {
public static createRootAndLookUpScopeName(scopeName: ScopeName, tokenAttributes: EncodedTokenAttributes, fontAttribute: FontAttribute, grammar: Grammar): AttributedScopeStack {
const rawRootMetadata = grammar.getMetadataForScope(scopeName);
const scopePath = new ScopeStack(null, scopeName);
const rootStyle = grammar.themeProvider.themeMatch(scopePath);
Expand All @@ -446,8 +449,9 @@ export class AttributedScopeStack {
rawRootMetadata,
rootStyle
);
const resolvedFontAttributes = fontAttribute.with(rootStyle);

return new AttributedScopeStack(null, scopePath, resolvedTokenAttributes, rootStyle);
return new AttributedScopeStack(null, scopePath, resolvedTokenAttributes, resolvedFontAttributes, rootStyle);
}

public get scopeName(): ScopeName { return this.scopePath.scopeName; }
Expand All @@ -464,6 +468,7 @@ export class AttributedScopeStack {
public readonly parent: AttributedScopeStack | null,
public readonly scopePath: ScopeStack,
public readonly tokenAttributes: EncodedTokenAttributes,
public readonly fontAttributes: FontAttribute | null,
public readonly styleAttributes: StyleAttributes | null
) {
}
Expand Down Expand Up @@ -566,7 +571,8 @@ export class AttributedScopeStack {
rawMetadata,
scopeThemeMatchResult
);
return new AttributedScopeStack(target, newPath, metadata, scopeThemeMatchResult);
const fontAttributes = target.fontAttributes?.with(scopeThemeMatchResult) ?? null;
return new AttributedScopeStack(target, newPath, metadata, fontAttributes, scopeThemeMatchResult);
}

public getScopeNames(): string[] {
Expand Down Expand Up @@ -1151,14 +1157,13 @@ export class LineFonts {
scopesList: AttributedScopeStack | null,
endIndex: number
): void {
const styleAttributes = scopesList?.styleAttributes;
if (!styleAttributes) {
if (!scopesList?.fontAttributes) {
this._lastIndex = endIndex;
return;
}
const fontFamily = styleAttributes.fontFamily;
const fontSizeMultiplier = styleAttributes.fontSize;
const lineHeightMultiplier = styleAttributes.lineHeight;
const fontFamily = scopesList.fontAttributes.fontFamily;
const fontSizeMultiplier = scopesList.fontAttributes.fontSize;
const lineHeightMultiplier = scopesList.fontAttributes.lineHeight;
if (!fontFamily && !fontSizeMultiplier && !lineHeightMultiplier) {
this._lastIndex = endIndex;
return;
Expand Down
64 changes: 63 additions & 1 deletion src/tests/themes.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ class ThemeInfo {
try {
await tst.evaluate();
assert.deepStrictEqual(tst.actual, tst.expected);
} catch(err) {
} catch (err) {
tst.writeExpected();
throw err;
}
Expand All @@ -122,6 +122,68 @@ class ThemeInfo {

})();

test('Tokenize test1.ts with TypeScript grammar and dark_vs theme', async () => {
await testTokenizationTime('test1.ts');
});

test('Tokenize test2.ts with TypeScript grammar and dark_vs theme', async function () {
this.timeout(0);
await testTokenizationTime('test2.ts');
});

async function testTokenizationTime(file: string) {
// Load dark_vs theme
const themeFile = path.join(THEMES_TEST_PATH, 'dark_vs.json');
const themeContent = fs.readFileSync(themeFile).toString();
const theme: IRawTheme = JSON.parse(themeContent);

// Load TypeScript grammar
const grammarsData: IGrammarRegistration[] = JSON.parse(fs.readFileSync(path.join(THEMES_TEST_PATH, 'grammars.json')).toString('utf8'));
const languagesData: ILanguageRegistration[] = JSON.parse(fs.readFileSync(path.join(THEMES_TEST_PATH, 'languages.json')).toString('utf8'));

// Update paths for grammars
for (let grammar of grammarsData) {
grammar.path = path.join(THEMES_TEST_PATH, grammar.path);
}

const resolver = new Resolver(grammarsData, languagesData, getOniguruma());
const registry = new Registry(resolver);
registry.setTheme(theme);

// Load TypeScript grammar
const tsGrammar = await registry.loadGrammar('source.ts');
assert.ok(tsGrammar, 'TypeScript grammar should be loaded');

// Read test.ts file
const testFilePath = path.join(THEMES_TEST_PATH, 'fixtures', file);
const testFileContent = fs.readFileSync(testFilePath).toString('utf8');
const lines = testFileContent.split(/\r\n|\r|\n/);

// Tokenize all lines
const tokenizeLines = () => {
let ruleStack = null;
const tokenizedLines: any[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
const result = tsGrammar.tokenizeLine2(line, ruleStack);
ruleStack = result.ruleStack;
tokenizedLines.push({
line: i + 1,
tokens: result.tokens,
fonts: result.fonts
});
}
return tokenizedLines;
};

// Verify we tokenized all lines
const start = Date.now();
tokenizeLines();
const end = Date.now();

console.log('Tokenization time:', (end - start));
}

test('Theme matching gives higher priority to deeper matches', () => {
const theme = Theme.createFromRawTheme({
settings: [
Expand Down
Loading