diff --git a/.changeset/chubby-hounds-roll.md b/.changeset/chubby-hounds-roll.md new file mode 100644 index 0000000000..916f3f123a --- /dev/null +++ b/.changeset/chubby-hounds-roll.md @@ -0,0 +1,5 @@ +--- +'@swisspost/design-system-tokens': patch +--- + +Updated tokens package to use TypeScript. diff --git a/packages/tokens/_build/utils/object-deepmerge.js b/packages/tokens/_build/utils/object-deepmerge.js deleted file mode 100644 index c08f0fd077..0000000000 --- a/packages/tokens/_build/utils/object-deepmerge.js +++ /dev/null @@ -1,12 +0,0 @@ -export default function deepmerge(obj1 = {}, obj2 = {}) { - const result = { ...obj1 }; - - for (let key in obj2) { - result[key] = - obj2[key] instanceof Object && obj1[key] instanceof Object - ? deepmerge(obj1[key], obj2[key]) - : obj2[key]; - } - - return result; -} diff --git a/packages/tokens/package.json b/packages/tokens/package.json index 4d26be20d0..e802e7c8c1 100644 --- a/packages/tokens/package.json +++ b/packages/tokens/package.json @@ -6,6 +6,7 @@ "license": "Apache-2.0", "private": false, "type": "module", + "types": "dist/index.d.ts", "repository": { "type": "git", "url": "https://github.com/swisspost/design-system.git" @@ -20,21 +21,24 @@ "linkDirectory": true }, "scripts": { - "build": "pnpm clean && node ./build.js && pnpm copy-files", - "build:verbose": "node ./build.js --verbosity=verbose", + "build": "pnpm clean && tsc -p . && node ./dist/build.js && pnpm copy-files", + "build:verbose": "pnpm clean && tsc -p . && node ./dist/build.js --verbosity=verbose && pnpm copy-files", "clean": "rimraf dist", "lint": "eslint", "lint:fix": "eslint --fix", - "copy-files": "copyfiles -f package.json README.md CONTRIBUTING.md CHANGELOG.md LICENSE dist" + "copy-files": "copyfiles -f package.json README.md CONTRIBUTING.md CHANGELOG.md LICENSE *.scss dist" }, "devDependencies": { "@eslint/js": "9.18.0", "@tokens-studio/sd-transforms": "1.2.9", + "@types/node": "22.10.5", "copyfiles": "2.4.1", "eslint": "9.18.0", "globals": "16.0.0", "rimraf": "6.0.1", - "style-dictionary": "4.3.0" + "style-dictionary": "4.3.0", + "typescript": "5.8.3", + "typescript-eslint": "8.20.0" }, "keywords": [ "tokens", diff --git a/packages/tokens/_build/configs/all.js b/packages/tokens/src/_build/configs/all.ts similarity index 85% rename from packages/tokens/_build/configs/all.js rename to packages/tokens/src/_build/configs/all.ts index 5b6db049e5..9d171ff76c 100644 --- a/packages/tokens/_build/configs/all.js +++ b/packages/tokens/src/_build/configs/all.ts @@ -1,8 +1,8 @@ import { fileHeader } from 'style-dictionary/utils'; import { expandTypesMap } from '@tokens-studio/sd-transforms'; -import StyleDictionary from '../style-dictionary.js'; +import StyleDictionary from 'style-dictionary'; import { getSetName, getSet, getTokenValue, registerConfigMethod } from '../methods.js'; - +import { ConfigWithMeta } from '_build/types.js'; /** * Registers a config getter method to generate output files for all code relevant tokens in the tokens.json. */ @@ -19,7 +19,11 @@ registerConfigMethod((tokenSets, { sourcePath, buildPath }) => { include: [`${sourcePath}_temp/source/**/*.json`], platforms: { scss: { - transforms: ['name/kebab', 'swisspost/scss-no-unitless-zero-values', 'swisspost/px-to-rem'], + transforms: [ + 'name/kebab', + 'swisspost/scss-no-unitless-zero-values', + 'swisspost/px-to-rem', + ], buildPath, expand: { include: ['typography'], @@ -55,8 +59,9 @@ registerConfigMethod((tokenSets, { sourcePath, buildPath }) => { */ StyleDictionary.registerFilter({ name: 'swisspost/scss-filter', - filter: (token, { meta }) => { - return token.filePath.includes(`/output/${meta.filePath}`); + filter: (token, options) => { + const configOptions = options as ConfigWithMeta; + return token.filePath.includes(`/output/${configOptions.meta?.filePath}`); }, }); @@ -75,12 +80,13 @@ StyleDictionary.registerFilter({ StyleDictionary.registerFormat({ name: 'swisspost/scss-format', format: async ({ dictionary, options, file }) => { - const { meta } = options; + const { meta } = options as ConfigWithMeta; + const header = await fileHeader({ file, commentStyle: 'short' }); return ( header + - meta.setNames + meta?.setNames .map(setName => { const tokenSetName = getSetName(options, setName); const tokenSet = getSet(options, dictionary, setName) @@ -92,7 +98,6 @@ StyleDictionary.registerFormat({ : ` ${token.name}: ${tokenValue},`; }) .join('\n'); - return meta.layer === 'core' ? `:root {\n${tokenSet}\n}\n` : `$${tokenSetName}: (\n${tokenSet}\n);\n`; diff --git a/packages/tokens/_build/configs/index.js b/packages/tokens/src/_build/configs/index.ts similarity index 100% rename from packages/tokens/_build/configs/index.js rename to packages/tokens/src/_build/configs/index.ts diff --git a/packages/tokens/_build/configs/tailwind.js b/packages/tokens/src/_build/configs/tailwind.ts similarity index 82% rename from packages/tokens/_build/configs/tailwind.js rename to packages/tokens/src/_build/configs/tailwind.ts index 76e92a138e..82a03d5ba9 100644 --- a/packages/tokens/_build/configs/tailwind.js +++ b/packages/tokens/src/_build/configs/tailwind.ts @@ -2,6 +2,7 @@ import { fileHeader } from 'style-dictionary/utils'; import { TOKENSET_LAYERS, TOKENSET_NAMES, TOKENSET_PREFIX } from '../constants.js'; import StyleDictionary from '../style-dictionary.js'; import { registerConfigMethod, getTokenValue } from '../methods.js'; +import { TokenProperty } from '_build/types.js'; import { objectDeepmerge, objectTextoutput } from '../utils/index.js'; const TAILWIND_TOKENSET_NAMES = [TOKENSET_NAMES.Utilities, TOKENSET_NAMES.Helpers]; @@ -61,14 +62,18 @@ StyleDictionary.registerFormat({ name: 'swisspost/tailwind-format', format: async ({ dictionary, options, file }) => { const header = await fileHeader({ file, commentStyle: 'short' }); - const tailwindTokensObject = dictionary.allTokens.reduce((allTokens, token) => { - const tokenObj = token.path - .slice(token.path.indexOf(TOKENSET_PREFIX) + 1) - .reverse() - .reduce((res, p) => ({ [p]: res }), getTokenValue(options, token)); - - return objectDeepmerge(allTokens, tokenObj); - }, {}); + const tailwindTokensObject = dictionary.allTokens.reduce>( + (allTokens, token) => { + const tokenObj = token.path + .slice(token.path.indexOf(TOKENSET_PREFIX) + 1) + .reverse() + .reduce((res, p) => ({ [p]: res }), getTokenValue(options, token)) as { + [key: string]: TokenProperty; + }; + return objectDeepmerge(allTokens, tokenObj); + }, + {}, + ); return header + `export default {${objectTextoutput(tailwindTokensObject)}\n};\n`; }, diff --git a/packages/tokens/_build/configs/utilities.js b/packages/tokens/src/_build/configs/utilities.ts similarity index 57% rename from packages/tokens/_build/configs/utilities.js rename to packages/tokens/src/_build/configs/utilities.ts index 779e5f8640..706471fffc 100644 --- a/packages/tokens/_build/configs/utilities.js +++ b/packages/tokens/src/_build/configs/utilities.ts @@ -2,39 +2,41 @@ import { fileHeader } from 'style-dictionary/utils'; import { TOKENSET_NAMES } from '../constants.js'; import StyleDictionary from '../style-dictionary.js'; import { registerConfigMethod, getTokenValue } from '../methods.js'; +import { UtilityAttributes } from '_build/types.js'; /** * Registers a config method to generate output files for utility tokens. */ registerConfigMethod((tokenSets, { sourcePath, buildPath }) => { - const { type, layer, filePath, sets } = tokenSets.output[TOKENSET_NAMES.Utilities]; - - return { - meta: { - type, - layer, - filePath, - setNames: Object.keys(sets), - }, - source: [`${sourcePath}_temp/output/${filePath}`], - include: [`${sourcePath}_temp/source/**/*.json`], - platforms: { - utilities: { - transforms: ['name/kebab'], - buildPath, - files: [ - { - destination: `_utilities-formatted.scss`, - filter: 'swisspost/source-tokens-filter', - format: 'swisspost/utility-format', - options: { - outputReferences: true, + const { type, layer, filePath, setNames } = tokenSets.output[TOKENSET_NAMES.Utilities]; + return [ + { + meta: { + type, + layer, + filePath, + setNames: Object.keys(setNames), + }, + source: [`${sourcePath}_temp/output/${filePath}`], + include: [`${sourcePath}_temp/source/**/*.json`], + platforms: { + utilities: { + transforms: ['name/kebab'], + buildPath, + files: [ + { + destination: `_utilities-formatted.scss`, + filter: 'swisspost/source-tokens-filter', + format: 'swisspost/utility-format', + options: { + outputReferences: true, + }, }, - }, - ], + ], + }, }, }, - }; + ]; }); /** @@ -48,7 +50,7 @@ StyleDictionary.registerFormat({ const utilityTokens = new Map(); dictionary.allTokens.forEach(token => { - const { subitem, state } = token.attributes; + const { subitem, state } = token.attributes as UtilityAttributes; const previousStates = utilityTokens.get(subitem) ?? []; const newState = `\n ${state}: ${getTokenValue(options, token)},`; diff --git a/packages/tokens/_build/constants.js b/packages/tokens/src/_build/constants.ts similarity index 92% rename from packages/tokens/_build/constants.js rename to packages/tokens/src/_build/constants.ts index c819d36848..173d90b278 100644 --- a/packages/tokens/_build/constants.js +++ b/packages/tokens/src/_build/constants.ts @@ -36,11 +36,13 @@ export const EXPLICIT_COMPONENT_LAYER_GROUPNAMES = [ ]; export const EXPLICIT_FIGMAONLY_GROUPNAMES = ['figmaonly']; export const EXPLICIT_FIGMAONLY_SETNAMES = ['figmaonly']; -export const TOKENSET_LAYERS = { - core: 'core', - semantic: 'semantic', - component: 'component', -}; + +export enum TOKENSET_LAYERS { + core = 'core', + component = 'component', + semantic = 'semantic', +} + export const TOKENSET_PREFIX = 'post'; export const CUSTOM_FORMAT_INDENT = ' '; diff --git a/packages/tokens/_build/methods.js b/packages/tokens/src/_build/methods.ts similarity index 78% rename from packages/tokens/_build/methods.js rename to packages/tokens/src/_build/methods.ts index a57143cc94..3378162be1 100644 --- a/packages/tokens/_build/methods.js +++ b/packages/tokens/src/_build/methods.ts @@ -1,5 +1,10 @@ import { promises } from 'fs'; -import StyleDictionary from './style-dictionary.js'; +import StyleDictionary, { + type Dictionary, + type Config, + type TransformedToken, +} from 'style-dictionary'; + import { usesReferences } from 'style-dictionary/utils'; import { SOURCE_PATH, @@ -11,13 +16,28 @@ import { EXPLICIT_FIGMAONLY_SETNAMES, TOKENSET_PREFIX, } from './constants.js'; + +import { + CliOptions, + RawTokenJson, + TokenDefinition, + TokenSets, + TokenProperty, + ConfigWithMeta, +} from './types.js'; + import { objectDeepmerge } from './utils/index.js'; +import { LocalOptions } from 'style-dictionary/types'; -let CLI_OPTIONS; -let tokenSets; -let registeredConfigMethods = []; +let CLI_OPTIONS: CliOptions; -export async function setup() { +let tokenSets: TokenSets; + +let registeredConfigMethods: Array< + (tokenSets: TokenSets, options: { sourcePath: string; buildPath: string }) => Config[] +> = []; + +export async function setup(): Promise { CLI_OPTIONS = createCliOptions(); const tokensFile = JSON.parse(await promises.readFile(`${SOURCE_PATH}/tokens.json`, 'utf-8')); @@ -35,8 +55,8 @@ export async function setup() { * verbosity: 'silent' | 'default' | 'verbose' * } */ -function createCliOptions() { - const options = { +function createCliOptions(): CliOptions { + const options: CliOptions = { verbosity: 'default', }; @@ -60,12 +80,12 @@ function createCliOptions() { * * @returns group-nested tokensets object */ -function createTokenSets(tokensFile) { +function createTokenSets(tokensFile: RawTokenJson): TokenSets { // remove $themes and $metadata objects // lowercase set names const normalized = Object.entries(tokensFile) .filter(([name]) => !/^\$/.test(name)) - .reduce((sets, [name, set]) => ({ ...sets, [name.toLowerCase()]: set }), {}); + .reduce((sets, [name, set]) => ({ ...sets, [name.toLowerCase()]: set }), {} as RawTokenJson); // only add non component layer sets to source files // component layer sets can not be resolved in the browser, and therefore are not usable as sources @@ -77,42 +97,47 @@ function createTokenSets(tokensFile) { } else { return sets; } - }, {}); + }, {} as TokenSets['source']); // combine tokensets by group so they can be outputted in a single file - const output = Object.entries(normalized).reduce((definition, [name, set]) => { - const { groupSlug, groupName, setName, baseDefinition } = getDefinition(name); - const existingGroup = definition[groupSlug]; - - if ( - EXPLICIT_FIGMAONLY_GROUPNAMES.includes(groupName) || - EXPLICIT_FIGMAONLY_SETNAMES.includes(setName) - ) { - return definition; - } else { - return { - ...definition, - [groupSlug]: { - ...baseDefinition, - sets: { ...existingGroup?.sets, [setName]: set }, - }, - }; - } - }, {}); + const output = Object.entries(normalized).reduce( + (definition: TokenSets['output'], [name, set]) => { + const { groupSlug, groupName, setName, baseDefinition } = getDefinition(name); + const existingGroup = definition[groupSlug]; + + if ( + (typeof groupName === 'string' && EXPLICIT_FIGMAONLY_GROUPNAMES.includes(groupName)) || + EXPLICIT_FIGMAONLY_SETNAMES.includes(setName) + ) { + return definition; + } else { + return { + ...definition, + [groupSlug]: { + ...baseDefinition, + sets: { ...existingGroup?.sets, [setName]: set }, + }, + }; + } + }, + {} as TokenSets['output'], + ); return { source, output, }; - function getDefinition(name) { + function getDefinition(name: string): TokenDefinition { const [groupSlug, setSlug] = name.split('/'); const groupName = setSlug ? groupSlug : null; const setName = setSlug ?? groupSlug; const type = !groupName ? 'singleton' : 'collection'; const isCore = type === 'singleton' && setName === 'core'; const isComponent = - !isCore && (type === 'singleton' || EXPLICIT_COMPONENT_LAYER_GROUPNAMES.includes(groupName)); + !isCore && + (type === 'singleton' || + (typeof groupName === 'string' && EXPLICIT_COMPONENT_LAYER_GROUPNAMES.includes(groupName))); return { groupSlug, @@ -125,6 +150,8 @@ function createTokenSets(tokensFile) { (isComponent && TOKENSET_LAYERS.component) || TOKENSET_LAYERS.semantic, filePath: `${groupName ?? setName}.json`, + setNames: [], + sets: {}, }, }; } @@ -136,7 +163,7 @@ function createTokenSets(tokensFile) { * These files are used to be included in the StyleDictionary Config as sources, * so StyleDictionary is able to resolve the currently processed tokens. */ -export async function createTokenSetFiles() { +export async function createTokenSetFiles(): Promise { console.log(`\x1b[90mProcessing data...`); const sourceTokenFolders = Object.keys(tokenSets.source) .filter(name => name.includes('/')) @@ -168,7 +195,9 @@ export async function createTokenSetFiles() { * @param {options} object { sourcePath: string, buildPath: string } * @returns {Config[]} StyleDictionary Config objects[] */ -export function registerConfigMethod(method) { +export function registerConfigMethod( + method: (tokenSets: TokenSets, options: { sourcePath: string; buildPath: string }) => Config[], +) { if (method instanceof Function) { registeredConfigMethods.push(method); } else { @@ -182,7 +211,7 @@ export function registerConfigMethod(method) { * * @param tokenSets group-nested tokensets object */ -export async function createOutputFiles() { +export async function createOutputFiles(): Promise { console.log(`\x1b[90mWriting files...`); await Promise.all(getConfigs().map(build)); await createIndexFile(); @@ -195,7 +224,7 @@ export async function createOutputFiles() { * * @returns Config[] */ - function getConfigs() { + function getConfigs(): Config[] { return registeredConfigMethods .map(method => method(tokenSets, { sourcePath: `${SOURCE_PATH}/`, buildPath: `${OUTPUT_PATH}/` }), @@ -211,10 +240,12 @@ export async function createOutputFiles() { preprocessors: [ 'swisspost/box-shadow-keep-refs-workaround', 'tokens-studio', - ...(config.proprocessors ?? []), + ...(config.preprocessors ?? []), ], }); + if (!config.platforms) return {}; + config.platforms = Object.entries(config.platforms).reduce( (platforms, [name, platform]) => ({ ...platforms, @@ -241,7 +272,7 @@ export async function createOutputFiles() { * @param config * StyleDictionary Config object */ - async function build(config) { + async function build(config: Config): Promise { const sd = new StyleDictionary(config); await sd.buildAllPlatforms(); } @@ -250,7 +281,7 @@ export async function createOutputFiles() { * @function createIndexFile() * Creates the index.scss file (which uses/forwards the other output files) in the "OUTPUT_PATH" directory. */ - async function createIndexFile() { + async function createIndexFile(): Promise { const header = FILE_HEADER.map(h => `// ${h}`).join('\n'); const imports = Object.entries(tokenSets.output) .map(([name, { layer }]) => `@${layer === 'core' ? 'use' : 'forward'} './${name}';`) @@ -264,7 +295,7 @@ export async function createOutputFiles() { * Copies the tokens.json file from the "SOURCE_PATH" to the "OUTPUT_PATH" directory, * to make it availble in the package distribution. */ - async function copySrcFiles() { + async function copySrcFiles(): Promise { await promises.copyFile(`${SOURCE_PATH}/tokens.json`, `${OUTPUT_PATH}/tokens.json`); } } @@ -273,7 +304,7 @@ export async function createOutputFiles() { * @function removeTokenSetFiles() * Removes the temporary token set files from the "SOURCE_PATH/_temp" directory. */ -export async function removeTokenSetFiles() { +export async function removeTokenSetFiles(): Promise { console.log(`\x1b[90mCleanup...`); await promises.rm(`${SOURCE_PATH}/_temp/`, { recursive: true }); console.log(`\x1b[33m✓ Complete!`); @@ -287,7 +318,7 @@ export async function removeTokenSetFiles() { * * @returns the normalized set name */ -export function getSetName(_options, setName) { +export function getSetName(_options: Config, setName: string): string { return `${TOKENSET_PREFIX ? TOKENSET_PREFIX + '-' : ''}${setName.trim().replace(/\s/g, '-')}`; } @@ -309,11 +340,17 @@ export function getSetName(_options, setName) { * | a | | b | desktop/tablet = a, mobile = b * | a | b | c | desttop = a, tablet = b, mobile = c */ -export function getSet(options, dictionary, currentSetName) { + +export function getSet( + options: ConfigWithMeta & LocalOptions, + dictionary: Dictionary, + currentSetName: string, +) { const { meta } = options; - let tokenSet = []; - if (meta.layer === 'semantic') { + let tokenSet: TransformedToken[] = []; + + if (meta?.layer === 'semantic') { const baseSetName = meta.setNames[0]; const overrideSetNameIndex = meta.setNames.findIndex(setName => setName === currentSetName); const overrideSetNames = meta.setNames.slice(1, overrideSetNameIndex + 1); @@ -339,7 +376,7 @@ export function getSet(options, dictionary, currentSetName) { return tokenSet; - function normalizeToken(token) { + function normalizeToken(token: TransformedToken) { const usesDtcg = token.$type && token.$value; const name = token.path.slice(1).join('-'); const path = name.split('-'); @@ -347,7 +384,7 @@ export function getSet(options, dictionary, currentSetName) { // Can be removed, as soon as box-shadow tokens can be outputted with references const boxShadowKeepRefsWorkaroundValue = token?.original?.$extensions?.[ 'studio.tokens' - ]?.boxShadowKeepRefsWorkaroundValue?.replace(/(\[\[|\]\])/g, match => + ]?.boxShadowKeepRefsWorkaroundValue?.replace(/(\[\[|\]\])/g, (match: string) => match === '[[' ? '{' : '}', ); @@ -375,7 +412,10 @@ export function getSet(options, dictionary, currentSetName) { * * @returns the tokens value, with referenced css custom-properties (if original value uses references) */ -export function getTokenValue(options, token) { +export function getTokenValue( + options: Config & LocalOptions, + token: TransformedToken, +): TokenProperty { const { outputReferences } = options; const usesDtcg = token.$type && token.$value; @@ -386,7 +426,7 @@ export function getTokenValue(options, token) { tokenValue = replaceAllReferences(originalTokenValue); } - function replaceAllReferences(value) { + function replaceAllReferences(value: string | { [key: string]: TokenProperty }) { if (typeof value === 'string') { return replaceReferences(value); } @@ -395,7 +435,11 @@ export function getTokenValue(options, token) { for (const key in value) { if (Object.hasOwn(value, key)) { if (typeof value[key] === 'string') value[key] = replaceReferences(value[key]); - if (typeof value[key] === 'object') value[key] = replaceAllReferences(value[key]); + if (typeof value[key] === 'object') { + value[key] = replaceAllReferences( + value[key] as { [key: string]: TokenProperty }, + ) as TokenProperty; + } } } @@ -403,7 +447,7 @@ export function getTokenValue(options, token) { } } - function replaceReferences(value) { + function replaceReferences(value: string) { return value.replace( /{[0-9a-zA-Z-._]+}/g, match => `var(--${match.replace(/[{}]/g, '').replace(/\./g, '-')})`, diff --git a/packages/tokens/_build/style-dictionary.js b/packages/tokens/src/_build/style-dictionary.ts similarity index 75% rename from packages/tokens/_build/style-dictionary.js rename to packages/tokens/src/_build/style-dictionary.ts index e39d7b2ae2..9490739fa8 100644 --- a/packages/tokens/_build/style-dictionary.js +++ b/packages/tokens/src/_build/style-dictionary.ts @@ -1,8 +1,13 @@ import StyleDictionary from 'style-dictionary'; import { register } from '@tokens-studio/sd-transforms'; -import { BASE_FONT_SIZE, FILE_HEADER, NO_UNITLESS_ZERO_VALUE_TOKEN_TYPES, PX_TO_REM_TOKEN_TYPE } from './constants.js'; - +import { + BASE_FONT_SIZE, + FILE_HEADER, + NO_UNITLESS_ZERO_VALUE_TOKEN_TYPES, + PX_TO_REM_TOKEN_TYPE, +} from './constants.js'; register(StyleDictionary); +import { DesignToken } from 'style-dictionary/types'; /** * @function StyleDictionary.registerFileHeader() @@ -34,10 +39,9 @@ StyleDictionary.registerTransform({ type: 'value', filter: token => { const usesDtcg = token.$type && token.$value; - const transformType = NO_UNITLESS_ZERO_VALUE_TOKEN_TYPES.includes( - usesDtcg ? token.$type : token.type, - ); - + let transformType = false; + const typeToCheck = usesDtcg ? (token.$type as string) : (token.type as string); + transformType = NO_UNITLESS_ZERO_VALUE_TOKEN_TYPES.includes(typeToCheck); if (transformType) { return token[usesDtcg ? '$value' : 'value'] === '0'; } else { @@ -92,24 +96,25 @@ StyleDictionary.registerPreprocessor({ preprocessor: dictionary => { traverse(dictionary); - function traverse(context) { + function traverse(context: DesignToken) { Object.entries(context).forEach(([key, value]) => { const usesDtcg = context[key].$type && context[key].$value; const isToken = context[key][usesDtcg ? '$type' : 'type'] !== undefined; + const tokenType = context[key][usesDtcg ? '$type' : 'type']; + const tokenValue = context[key][usesDtcg ? '$value' : 'value']; - if (isToken) { - const tokenType = context[key][usesDtcg ? '$type' : 'type']; - const tokenValue = context[key][usesDtcg ? '$value' : 'value']; - - if (tokenType === 'shadow' && typeof tokenValue === 'string') { - context[key].$extensions[ - 'studio.tokens' - ].boxShadowKeepRefsWorkaroundValue = `${tokenValue.replace(/[{}]/g, match => - match === '{' ? '[[' : ']]', - )}`; + if (typeof context[key] === 'object' && context[key] !== null) { + if (isToken) { + if (tokenType === 'shadow' && typeof tokenValue === 'string') { + context[key].$extensions[ + 'studio.tokens' + ].boxShadowKeepRefsWorkaroundValue = `${tokenValue.replace(/[{}]/g, match => + match === '{' ? '[[' : ']]', + )}`; + } + } else if (typeof value === 'object' && value !== null) { + traverse(value); } - } else if (typeof context[key] === 'object') { - traverse(value); } }); } diff --git a/packages/tokens/src/_build/types.ts b/packages/tokens/src/_build/types.ts new file mode 100644 index 0000000000..9e211c59b3 --- /dev/null +++ b/packages/tokens/src/_build/types.ts @@ -0,0 +1,50 @@ +import { Config } from 'style-dictionary/types'; +export type CliOptions = { + verbosity: 'silent' | 'default' | 'verbose'; + [key: string]: string; +}; + +export interface RawTokenJson { + [setName: string]: any; +} +export type TokenProperty = string | number | boolean | { [key: string]: TokenProperty }; + +export interface TokenMeta { + type: 'singleton' | 'collection'; + layer: 'core' | 'component' | 'semantic'; + filePath: string; + setNames: string[]; + sets: { + [setName: string]: { [key: string]: TokenProperty }; + }; +} + +export interface TokenDefinition { + groupSlug: string; + groupName: string | null; + setName: string; + baseDefinition: TokenMeta; +} + +export interface ConfigWithMeta extends Config { + meta?: TokenMeta; +} + +export interface TokenSets { + source: { + meta: TokenMeta; + }; + output: { [groupName: string]: TokenMeta }; +} + +export type TokenGroup = { + [key: string]: TokenProperty | TokenGroup; +}; + +export type UtilityAttributes = { + category: string; + type: string; + item: string; + subitem: string; + state: string; +}; diff --git a/packages/tokens/_build/utils/index.js b/packages/tokens/src/_build/utils/index.ts similarity index 100% rename from packages/tokens/_build/utils/index.js rename to packages/tokens/src/_build/utils/index.ts diff --git a/packages/tokens/src/_build/utils/object-deepmerge.ts b/packages/tokens/src/_build/utils/object-deepmerge.ts new file mode 100644 index 0000000000..19ab4b1d85 --- /dev/null +++ b/packages/tokens/src/_build/utils/object-deepmerge.ts @@ -0,0 +1,44 @@ +export type DeepMerged = { + [K in keyof T | keyof U]: K extends keyof U + ? K extends keyof T + ? T[K] extends object + ? U[K] extends object + ? DeepMerged + : U[K] + : U[K] + : U[K] + : K extends keyof T + ? T[K] + : never; +}; + +// This deepmerge function recursively combines two objects, prioritizing values from the second object and deeply merging nested objects +export default function deepmerge( + obj1: T, + obj2: U, +): DeepMerged { + const result = { ...obj1 } as Record; + + for (const key in obj2) { + if (Object.hasOwn(obj2, key)) { + const val1 = (obj1 as Record)[key]; + const val2 = obj2[key as keyof U]; + + const shouldMerge = + typeof val1 === 'object' && + typeof val2 === 'object' && + val1 !== null && + val2 !== null && + !Array.isArray(val1) && + !Array.isArray(val2); + + if (shouldMerge) { + result[key] = deepmerge(val1 as object, val2 as object); + } else { + result[key] = val2; + } + } + } + + return result as DeepMerged; +} diff --git a/packages/tokens/_build/utils/object-textoutput.js b/packages/tokens/src/_build/utils/object-textoutput.ts similarity index 71% rename from packages/tokens/_build/utils/object-textoutput.js rename to packages/tokens/src/_build/utils/object-textoutput.ts index 0b1beaf736..a6345b508c 100644 --- a/packages/tokens/_build/utils/object-textoutput.js +++ b/packages/tokens/src/_build/utils/object-textoutput.ts @@ -1,10 +1,14 @@ import { CUSTOM_FORMAT_INDENT } from '../constants.js'; +type NestedObject = { + [key: string]: string | number | boolean | NestedObject; +}; + export default function textoutput( - obj = {}, - baseIndent = CUSTOM_FORMAT_INDENT, - currentIndent = '', -) { + obj: NestedObject = {}, + baseIndent: string = CUSTOM_FORMAT_INDENT, + currentIndent: string = '', +): string { const indent = `${baseIndent}${currentIndent}`; const stringifyKeys = Object.keys(obj).some(key => !key.match(/^[a-zA-Z0-9]+$/)); @@ -17,7 +21,7 @@ export default function textoutput( val = `{${textoutput(value, baseIndent, indent)}\n${indent}}`; } else { try { - val = JSON.parse(value); + val = JSON.parse(value as string); } catch { val = `'${value}'`; } diff --git a/packages/tokens/build.js b/packages/tokens/src/build.ts similarity index 99% rename from packages/tokens/build.js rename to packages/tokens/src/build.ts index 243c5bb71e..ffe4acde9f 100644 --- a/packages/tokens/build.js +++ b/packages/tokens/src/build.ts @@ -4,7 +4,6 @@ import { createOutputFiles, removeTokenSetFiles, } from './_build/methods.js'; - import './_build/configs/index.js'; /** diff --git a/packages/tokens/tsconfig.json b/packages/tokens/tsconfig.json new file mode 100644 index 0000000000..a8489dec84 --- /dev/null +++ b/packages/tokens/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "es2021", + "module": "esnext", + "moduleResolution": "bundler", + "baseUrl": "./src", + "outDir": "./dist", + "rootDir": "./src", + "strict": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "skipLibCheck": true, + "declaration": true, + "sourceMap": false, + "forceConsistentCasingInFileNames": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "resolveJsonModule": true, + "lib": ["es2022"] + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 409c2b6552..f51b3acad9 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1042,6 +1042,9 @@ importers: '@tokens-studio/sd-transforms': specifier: 1.2.9 version: 1.2.9(style-dictionary@4.3.0) + '@types/node': + specifier: 22.10.5 + version: 22.10.5 copyfiles: specifier: 2.4.1 version: 2.4.1 @@ -1057,6 +1060,12 @@ importers: style-dictionary: specifier: 4.3.0 version: 4.3.0 + typescript: + specifier: 5.8.3 + version: 5.8.3 + typescript-eslint: + specifier: 8.20.0 + version: 8.20.0(eslint@9.18.0(jiti@2.4.2))(typescript@5.8.3) publishDirectory: ./dist packages: @@ -15633,7 +15642,7 @@ snapshots: '@typescript-eslint/types': 8.20.0 '@typescript-eslint/typescript-estree': 8.20.0(typescript@5.8.3) '@typescript-eslint/visitor-keys': 8.20.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@5.5.0) eslint: 9.18.0(jiti@2.4.2) typescript: 5.8.3 transitivePeerDependencies: