Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 2 additions & 8 deletions .github/workflows/tokens.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,6 @@ jobs:
- name: Install dependencies
run: pnpm install --frozen-lockfile

# Tokens Studio 포맷(value/type) → W3C DTCG($value/$type) 변환
# 현재 tokens/ 파일이 이미 DTCG 포맷이면 변환 없이 통과
- name: Transform (Tokens Studio → W3C DTCG)
run: pnpm --filter @sipe-team/tokens transform:tokens

# PR 리뷰용: base 브랜치 빌드 결과를 먼저 저장해 diff 비교
- name: Build base tokens
id: base-build
Expand Down Expand Up @@ -145,9 +140,8 @@ jobs:

### 파이프라인
1. Tokens Studio → `tokens/figma-sync` Push
2. token-transformer: Tokens Studio 포맷 → W3C DTCG 변환
3. Style Dictionary: CSS 빌드
4. 이 PR → main 머지 후 VE codegen(추후 추가)
2. Style Dictionary + @tokens-studio/sd-transforms: 변환 및 CSS/TS 빌드
3. 이 PR → main 머지

### Review checklist
- [ ] `validate` 잡 Step Summary에서 CSS diff 확인
Expand Down
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -184,4 +184,7 @@ build/
.claude/settings.local.json

# oh-my-claudecode local state
.omc/
.omc/

# Style Dictionary intermediate artifacts
**/tokens-transformed.json
196 changes: 167 additions & 29 deletions packages/tokens/config.js
Original file line number Diff line number Diff line change
@@ -1,19 +1,37 @@
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'node:fs';
import { mkdirSync, readFileSync, writeFileSync } from 'node:fs';
import { chdir } from 'node:process';

import { register } from '@tokens-studio/sd-transforms';
import StyleDictionary from 'style-dictionary';

chdir(import.meta.dirname);

// Register Tokens Studio transforms and preprocessor into Style Dictionary
register(StyleDictionary, { excludeParentKeys: true });

const DIST = 'dist/css';
const DIST_TS = 'dist/ts';
const SEMANTIC_LIGHT_DIR = '../../tokens/semantic/light';

/** @param {string} dir */
function hasJsonFiles(dir) {
if (!existsSync(dir)) return false;
const entries = readdirSync(dir, { recursive: true });
return entries.some((f) => f.toString().endsWith('.json'));
const allTokens = JSON.parse(readFileSync('../../tokens/tokens.json', 'utf-8'));
const setOrder = allTokens.$metadata?.tokenSetOrder ?? [];

/**
* Build a token dict scoped to the given sets, preserving the multi-set wrapper
* so the tokens-studio preprocessor can strip the parent keys and merge sets.
* @param {string[]} setNames
*/
function buildSetDict(setNames) {
const dict = {};
for (const name of setNames) {
if (allTokens[name]) dict[name] = allTokens[name];
}
dict.$metadata = { tokenSetOrder: setNames.filter((n) => allTokens[n]) };
return dict;
}

/** @param {string} prefix */
function hasSetsByPrefix(prefix) {
return setOrder.some((key) => key.startsWith(prefix) && allTokens[key] && Object.keys(allTokens[key]).length > 0);
}

/** @param {string} str */
Expand All @@ -24,9 +42,72 @@ function toPascalCase(str) {
.join('');
}

/**
* Recursively collect all leaf token paths within the given sets.
* Used to compute which tokens belong exclusively to the semantic layer.
* @param {string[]} setNames
* @returns {Set<string>}
*/
function getLeafPaths(setNames) {
const paths = new Set();
function walk(obj, segments) {
if (obj && '$value' in obj) {
paths.add(segments.join('.'));
return;
}
for (const [k, v] of Object.entries(obj ?? {})) {
if (!k.startsWith('$') && v && typeof v === 'object') walk(v, [...segments, k]);
}
}
for (const name of setNames) {
if (allTokens[name]) walk(allTokens[name], []);
}
return paths;
}

/**
* Recursively strip tokens that would create a circular reference when merged with primitives.
* @param {object} setContent
* @param {Set<string>} primLeafPaths
* @returns {object}
*/
function _stripCircularAliases(setContent, primLeafPaths) {
function clean(obj, segments) {
if (!obj || typeof obj !== 'object') return obj;
if ('$value' in obj) {
const path = segments.join('.');
if (primLeafPaths.has(path) && typeof obj.$value === 'string') {
const ref = obj.$value.match(/^\{([^}]+)\}$/)?.[1];
if (ref === path) return null;
}
return obj;
}
const result = {};
for (const [k, v] of Object.entries(obj)) {
if (k.startsWith('$')) {
result[k] = v;
continue;
}
const cleaned = clean(v, [...segments, k]);
if (cleaned !== null) result[k] = cleaned;
}
return result;
}
return clean(setContent, []);
}

const primitiveSets = setOrder.filter((k) => k.startsWith('primitive/'));
// @INFO: All semantic sets contribute to the dark theme (dark is the default)
const darkSets = setOrder.filter((k) => k.startsWith('semantic/'));

const primitiveLeafPaths = getLeafPaths(primitiveSets);
const darkLeafPaths = getLeafPaths(darkSets);
const semanticOnlyPaths = new Set([...darkLeafPaths].filter((p) => !primitiveLeafPaths.has(p)));

// Build primitive tokens (CSS + TypeScript types)
const primitive = new StyleDictionary({
source: ['../../tokens/primitive/**/*.json'],
tokens: buildSetDict(primitiveSets),
preprocessors: ['tokens-studio'],
hooks: {
formats: {
'typescript/token-names-dts': ({ dictionary }) => {
Expand All @@ -48,14 +129,9 @@ const primitive = new StyleDictionary({
lines.push(`${names.map((n) => ` | '${n}'`).join('\n')};\n`);
}

lines.push('export type DesignToken =');
lines.push('export type PrimitiveToken =');
lines.push(`${typeNames.map((t) => ` | ${t}`).join('\n')};\n`);

lines.push(
'/** Wraps a design token name in `var()` for use in inline styles. */',
'export declare function cssVar<T extends DesignToken>(token: T): `var(--${T})`;\n',
);

return lines.join('\n');
},
'typescript/token-names-js': () =>
Expand All @@ -78,7 +154,8 @@ const primitive = new StyleDictionary({
},
platforms: {
css: {
transformGroup: 'css',
transformGroup: 'tokens-studio',
transforms: ['name/kebab'],
buildPath: `${DIST}/`,
files: [
{
Expand Down Expand Up @@ -114,32 +191,87 @@ const primitive = new StyleDictionary({
});
await primitive.buildAllPlatforms();

// Build semantic/light tokens — graceful no-op if directory is empty
if (hasJsonFiles(SEMANTIC_LIGHT_DIR)) {
const semanticLight = new StyleDictionary({
source: [`${SEMANTIC_LIGHT_DIR}/**/*.json`],
// Build semantic/dark tokens — dark is the default theme, output to :root
// @NOTE: semantic/light is not yet developed; add a symmetric block when light sets are defined
if (hasSetsByPrefix('semantic/dark')) {
const semanticDarkDict = buildSetDict(primitiveSets);
for (const name of darkSets) {
if (allTokens[name]) semanticDarkDict[name] = _stripCircularAliases(allTokens[name], primitiveLeafPaths);
}
semanticDarkDict.$metadata = { tokenSetOrder: [...primitiveSets, ...darkSets].filter((n) => allTokens[n]) };

const semanticDark = new StyleDictionary({
// @ts-expect-error - Style Dictionary's type definitions don't allow for the multi-set wrapper structure used by tokens-studio
tokens: semanticDarkDict,
preprocessors: ['tokens-studio'],
hooks: {
formats: {
'typescript/semantic-token-names-dts': ({ dictionary }) => {
/** @type {Map<string, string[]>} */
const groups = new Map();
for (const token of dictionary.allTokens) {
if (!semanticOnlyPaths.has(token.path.join('.'))) continue;
const category = token.path[0];
if (!groups.has(category)) groups.set(category, []);
groups.get(category).push(token.name);
}

const lines = ['/** Auto-generated — do not edit directly. */\n'];
const typeNames = [];

for (const [category, names] of groups) {
const typeName = `Semantic${toPascalCase(category)}Token`;
typeNames.push(typeName);
lines.push(`export type ${typeName} =`);
lines.push(`${names.map((n) => ` | '${n}'`).join('\n')};\n`);
}

lines.push('export type SemanticToken =');
lines.push(`${typeNames.map((t) => ` | ${t}`).join('\n')};\n`);

return lines.join('\n');
},
},
},
platforms: {
css: {
transformGroup: 'css',
transformGroup: 'tokens-studio',
transforms: ['name/kebab'],
buildPath: `${DIST}/`,
files: [
{
destination: 'semantic-light.css',
destination: 'semantic-dark.css',
format: 'css/variables',
// Only emit tokens that are unique to the semantic layer.
filter: (token) => semanticOnlyPaths.has(token.path.join('.')),
options: { selector: ':root', outputReferences: true },
},
],
},
ts: {
transforms: ['name/kebab'],
buildPath: `${DIST_TS}/`,
files: [
{
destination: 'semantic.d.ts',
format: 'typescript/semantic-token-names-dts',
},
{
destination: 'semantic.d.cts',
format: 'typescript/semantic-token-names-dts',
},
],
},
},
});
await semanticLight.buildAllPlatforms();
await semanticDark.buildAllPlatforms();
} else {
mkdirSync(DIST, { recursive: true });
writeFileSync(`${DIST}/semantic-light.css`, '/* semantic/light tokens not yet defined */\n');
writeFileSync(`${DIST}/semantic-dark.css`, '/* semantic/dark tokens not yet defined */\n');
}

// Concatenate all CSS into index.css
const parts = [readFileSync(`${DIST}/primitive.css`, 'utf-8'), readFileSync(`${DIST}/semantic-light.css`, 'utf-8')];
const parts = [readFileSync(`${DIST}/primitive.css`, 'utf-8'), readFileSync(`${DIST}/semantic-dark.css`, 'utf-8')];
writeFileSync(`${DIST}/index.css`, parts.join('\n'));
console.log(`✓ ${DIST}/index.css generated`);

Expand All @@ -149,13 +281,19 @@ writeFileSync(
`${DIST_TS}/index.js`,
"/** Auto-generated — do not edit directly. */\nexport * from './primitive.js';\n",
);
writeFileSync(`${DIST_TS}/index.d.ts`, "/** Auto-generated — do not edit directly. */\nexport * from './primitive';\n");
writeFileSync(
`${DIST_TS}/index.cjs`,
"/** Auto-generated — do not edit directly. */\n'use strict';\nmodule.exports = require('./primitive.cjs');\n",
);
writeFileSync(
`${DIST_TS}/index.d.cts`,
"/** Auto-generated — do not edit directly. */\nexport * from './primitive';\n",
);

const barrelDts = [
'/** Auto-generated — do not edit directly. */',
"export * from './primitive';",
"export * from './semantic';",
'export type DesignToken = PrimitiveToken | SemanticToken;',
'/** Wraps a design token name in `var()` for use in inline styles. */',
'export declare function cssVar<T extends DesignToken>(token: T): `var(--${T})`;\n',
].join('\n');
writeFileSync(`${DIST_TS}/index.d.ts`, barrelDts);
writeFileSync(`${DIST_TS}/index.d.cts`, barrelDts);
console.log(`✓ ${DIST_TS}/index.js + index.cjs generated`);
3 changes: 1 addition & 2 deletions packages/tokens/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
"scripts": {
"build": "tsup && pnpm build:tokens",
"build:tokens": "node config.js",
Comment thread
froggy1014 marked this conversation as resolved.
"transform:tokens": "token-transformer ../../tokens /tmp/tokens-transformed.json",
"lint": "pnpm exec biome lint",
"typecheck": "tsc",
"prepack": "pnpm run build",
Expand All @@ -32,8 +31,8 @@
"@types/react": "catalog:react",
"react": "catalog:react",
"react-dom": "catalog:react",
"@tokens-studio/sd-transforms": "^2.0.0",
"style-dictionary": "^5.4.0",
"token-transformer": "^0.0.33",
"tsup": "catalog:",
"typescript": "catalog:"
},
Expand Down
Loading
Loading