@@ -0,0 +1,13 @@ | |||
{ | |||
"package": { | |||
"searchPatterns": "categories/**", | |||
"classifications": { | |||
"platform": { | |||
"web": "@tesseract-design/web-*" | |||
}, | |||
"framework": { | |||
"react": "@tesseract-design/*-react" | |||
} | |||
} | |||
} | |||
} |
@@ -0,0 +1,8 @@ | |||
import { ActionButton } from '@tesseract-design/web-action-react'; | |||
The `<ActionButton>` component is a wrapper around the `<Button>` component that adds a few extra props to make it | |||
easier to create action buttons. | |||
<ActionButton> | |||
Button | |||
</ActionButton> |
@@ -109,3 +109,5 @@ dist | |||
types/ | |||
.amanuensis/ | |||
components/ | |||
pages/**/*.md | |||
pages/**/*.mdx |
@@ -0,0 +1,191 @@ | |||
import { | |||
cp, readFile, rm, stat, writeFile, | |||
} from 'fs/promises'; | |||
import { dirname, resolve } from 'path'; | |||
import { Argv } from 'yargs'; | |||
import { Stats } from 'fs'; | |||
import { mkdirp } from 'mkdirp'; | |||
import { getPackages, TypedocData } from '../utils/data'; | |||
const ensureTypedocJson = async (typedocPath: string) => { | |||
const trueTypedocPath = resolve(typedocPath); | |||
process.stdout.write(`Using typedoc.json path: ${trueTypedocPath}\n`); | |||
process.stdout.write('Does the file exist? '); | |||
let statResult: Stats; | |||
try { | |||
statResult = await stat(trueTypedocPath); | |||
} catch (errRaw) { | |||
const err = errRaw as NodeJS.ErrnoException; | |||
if (err.code === 'ENOENT') { | |||
process.stdout.write('no\n'); | |||
process.stderr.write('Could not find typedoc.json\n'); | |||
throw new Error('Could not find typedoc.json'); | |||
} | |||
process.stdout.write('maybe?\n'); | |||
process.stderr.write('Could not ensure typedoc.json\n'); | |||
throw err; | |||
} | |||
if (statResult.isDirectory()) { | |||
process.stdout.write('no\n'); | |||
process.stderr.write('typedoc.json is a directory\n'); | |||
throw new Error('typedoc.json is a directory'); | |||
} | |||
process.stdout.write('yes\n'); | |||
}; | |||
const generateTypedocData = async () => { | |||
process.stdout.write('Generating typedoc data...\n'); | |||
const outPath = resolve(__dirname, '..', '..', '..', '.amanuensis', 'data.json'); | |||
const typedocBinPath = resolve(__dirname, '..', '..', '..', 'node_modules', '.bin', 'typedoc'); | |||
const { execa } = await import('execa'); | |||
await execa(typedocBinPath, ['--json', outPath], { | |||
stdout: 'inherit', | |||
stderr: 'inherit', | |||
}); | |||
process.stdout.write('done\n'); | |||
}; | |||
const produceGroupings = async () => { | |||
process.stdout.write('Grouping typedoc data...\n'); | |||
const typedocDataJsonPath = resolve(__dirname, '..', '..', '..', '.amanuensis', 'data.json'); | |||
const typedocDataJson = await readFile(typedocDataJsonPath, 'utf-8'); | |||
const typedocData = JSON.parse(typedocDataJson) as TypedocData; | |||
const packages = await getPackages(process.cwd()); | |||
const groupings = { | |||
packages, | |||
typedocData, | |||
}; | |||
await writeFile(typedocDataJsonPath, JSON.stringify(groupings, null, 2)); | |||
process.stdout.write(`File written to ${typedocDataJsonPath}\n`); | |||
}; | |||
const linkComponents = async () => { | |||
process.stdout.write('Linking components...\n'); | |||
const projectCwd = resolve(process.cwd(), '.amanuensis'); | |||
const defaultCwd = resolve(__dirname, '..', '..', '..', 'default'); | |||
const destCwd = resolve(__dirname, '..', '..', '..'); | |||
const componentsList = [ | |||
'components/Wrapper.tsx', | |||
]; | |||
try { | |||
await rm(resolve(destCwd, 'components'), { recursive: true }); | |||
} catch { | |||
// noop | |||
} | |||
await Promise.all(componentsList.map(async (componentPath) => { | |||
const destPath = resolve(destCwd, componentPath); | |||
let baseCwd = projectCwd; | |||
try { | |||
await stat(resolve(baseCwd, componentPath)); | |||
} catch (errRaw) { | |||
const err = errRaw as NodeJS.ErrnoException; | |||
if (err.code === 'ENOENT') { | |||
baseCwd = defaultCwd; | |||
} | |||
} | |||
await mkdirp(dirname(destPath)); | |||
await cp( | |||
resolve(baseCwd, componentPath), | |||
destPath, | |||
); | |||
process.stdout.write(`Linked ${componentPath}\n`); | |||
})); | |||
const typedocDataJsonPath = resolve(__dirname, '..', '..', '..', '.amanuensis', 'data.json'); | |||
const typedocDataJson = await readFile(typedocDataJsonPath, 'utf-8'); | |||
const typedocData = JSON.parse(typedocDataJson) as TypedocData; | |||
await Promise.all( | |||
typedocData.packages.map(async (pkg: any) => { | |||
await mkdirp(resolve(destCwd, 'pages', pkg.basePath)); | |||
await Promise.all( | |||
pkg.markdown.map(async (m: any) => { | |||
const srcPath = resolve(process.cwd(), pkg.basePath, m.filePath); | |||
const destPath = resolve(destCwd, 'pages', pkg.basePath, m.name); | |||
console.log(srcPath); | |||
await cp(srcPath, destPath); | |||
}), | |||
); | |||
}), | |||
); | |||
// try { | |||
// await rm(resolve(destCwd, 'pages'), { recursive: true }); | |||
// } catch { | |||
// // noop | |||
// } | |||
// | |||
// await cp( | |||
// resolve(defaultCwd, 'pages'), | |||
// resolve(destCwd, 'pages'), | |||
// { recursive: true }, | |||
// ); | |||
process.stdout.write('done\n'); | |||
}; | |||
export const description = 'Generate documentation from typedoc.json' as const; | |||
export enum GenerateReturnCode { | |||
SUCCESS = 0, | |||
NO_TYPEDOC_JSON = -1, | |||
COULD_NOT_GENERATE_TYPEDOC_DATA = -2, | |||
COULD_NOT_PRODUCE_GROUPINGS = -3, | |||
COULD_NOT_GENERATE_PAGES = -4, | |||
} | |||
export interface GenerateArgs { | |||
typedocJsonPath?: string; | |||
subcommands?: string[]; | |||
} | |||
export const builder = (yargs: Argv) => yargs | |||
.option('typedocJsonPath', { | |||
type: 'string', | |||
alias: 't', | |||
}); | |||
const generate = async (args: GenerateArgs) => { | |||
const { | |||
typedocJsonPath = resolve(process.cwd(), 'typedoc.json'), | |||
} = args; | |||
try { | |||
await ensureTypedocJson(typedocJsonPath); | |||
} catch { | |||
return GenerateReturnCode.NO_TYPEDOC_JSON; | |||
} | |||
try { | |||
await generateTypedocData(); | |||
} catch { | |||
return GenerateReturnCode.COULD_NOT_GENERATE_TYPEDOC_DATA; | |||
} | |||
try { | |||
await produceGroupings(); | |||
} catch { | |||
return GenerateReturnCode.COULD_NOT_PRODUCE_GROUPINGS; | |||
} | |||
try { | |||
await linkComponents(); | |||
} catch { | |||
return GenerateReturnCode.COULD_NOT_GENERATE_PAGES; | |||
} | |||
return GenerateReturnCode.SUCCESS; | |||
}; | |||
export default generate; |
@@ -1,50 +1,8 @@ | |||
import { Argv } from 'yargs'; | |||
import { resolve, dirname } from 'path'; | |||
import { cp, stat, unlink } from 'fs/promises'; | |||
import { mkdirp } from 'mkdirp'; | |||
import { resolve } from 'path'; | |||
const DEFAULT_PORT = 3000 as const; | |||
const linkComponents = async () => { | |||
process.stdout.write('Linking components...\n'); | |||
const projectCwd = resolve(process.cwd(), '.amanuensis'); | |||
const defaultCwd = resolve(__dirname, '..', '..', '..', 'default'); | |||
const destCwd = resolve(__dirname, '..', '..', '..'); | |||
const componentsList = [ | |||
'components/Wrapper.tsx', | |||
]; | |||
await Promise.all(componentsList.map(async (componentPath) => { | |||
const destPath = resolve(destCwd, componentPath); | |||
try { | |||
await unlink(destPath); | |||
} catch { | |||
// noop | |||
} | |||
let baseCwd = projectCwd; | |||
try { | |||
await stat(resolve(baseCwd, componentPath)); | |||
} catch (errRaw) { | |||
const err = errRaw as NodeJS.ErrnoException; | |||
if (err.code === 'ENOENT') { | |||
baseCwd = defaultCwd; | |||
} | |||
} | |||
await mkdirp(dirname(destPath)); | |||
await cp( | |||
resolve(baseCwd, componentPath), | |||
destPath, | |||
); | |||
process.stdout.write(`Linked ${componentPath}\n`); | |||
})); | |||
process.stdout.write('done\n'); | |||
}; | |||
const buildApp = async () => { | |||
process.stdout.write('Building app...\n'); | |||
@@ -99,7 +57,6 @@ const serve = async (args: ServeArgs) => { | |||
port = DEFAULT_PORT, | |||
} = args; | |||
await linkComponents(); | |||
await buildApp(); | |||
await serveApp(port); | |||
return ServeReturnCode.SUCCESS; |
@@ -0,0 +1,23 @@ | |||
const withMDX = require('@next/mdx')({ | |||
extension: /\.mdx?$/, | |||
options: { | |||
// If you use remark-gfm, you'll need to use next.config.mjs | |||
// as the package is ESM only | |||
// https://github.com/remarkjs/remark-gfm#install | |||
remarkPlugins: [], | |||
rehypePlugins: [], | |||
// If you use `MDXProvider`, uncomment the following line. | |||
// providerImportSource: "@mdx-js/react", | |||
}, | |||
}) | |||
/** @type {import('next').NextConfig} */ | |||
const nextConfig = { | |||
// Configure pageExtensions to include md and mdx | |||
pageExtensions: ['ts', 'tsx', 'js', 'jsx', 'md', 'mdx'], | |||
// Optionally, add any other Next.js config below | |||
reactStrictMode: true, | |||
} | |||
// Merge MDX config with Next.js config | |||
module.exports = withMDX(nextConfig) |
@@ -54,16 +54,33 @@ | |||
"access": "public" | |||
}, | |||
"dependencies": { | |||
"@mdx-js/loader": "^2.3.0", | |||
"@mdx-js/react": "^2.3.0", | |||
"@next/mdx": "^13.4.12", | |||
"execa": "^7.2.0", | |||
"glob": "^10.3.3", | |||
"minimatch": "^9.0.3", | |||
"mkdirp": "^3.0.1", | |||
"next": "13.4.7", | |||
"react": "^18.2.0", | |||
"react-dom": "^18.2.0", | |||
"react-markdown": "^8.0.7", | |||
"typedoc": "^0.24.8", | |||
"yargs": "^17.7.2" | |||
"yargs": "^17.7.2", | |||
"@tesseract-design/web-action-react": "workspace:*", | |||
"@tesseract-design/web-base": "workspace:*", | |||
"@tesseract-design/web-blob-react": "workspace:*", | |||
"@tesseract-design/web-choice-react": "workspace:*", | |||
"@tesseract-design/web-color-react": "workspace:*", | |||
"@tesseract-design/web-formatted-react": "workspace:*", | |||
"@tesseract-design/web-freeform-react": "workspace:*", | |||
"@tesseract-design/web-information-react": "workspace:*", | |||
"@tesseract-design/web-multichoice-react": "workspace:*", | |||
"@tesseract-design/web-navigation-react": "workspace:*", | |||
"@tesseract-design/web-number-react": "workspace:*", | |||
"@tesseract-design/web-temporal-react": "workspace:*" | |||
}, | |||
"types": "./dist/types/index.d.ts", | |||
"types": "./dist/types/src/index.d.ts", | |||
"main": "./dist/cjs/production/index.js", | |||
"module": "./dist/esm/production/index.js", | |||
"exports": { | |||
@@ -74,7 +91,7 @@ | |||
}, | |||
"require": "./dist/cjs/production/index.js", | |||
"import": "./dist/esm/production/index.js", | |||
"types": "./dist/types/index.d.ts" | |||
"types": "./dist/types/src/index.d.ts" | |||
} | |||
}, | |||
"typesVersions": { | |||
@@ -1,7 +1,7 @@ | |||
import {GetStaticProps, NextPage} from 'next'; | |||
import {getReadmeText} from '../src/data'; | |||
import ReactMarkdown from 'react-markdown'; | |||
import {Wrapper} from '../components/Wrapper'; | |||
import {getReadmeText} from '../utils/data'; | |||
interface IndexPageProps { | |||
readmeType: 'markdown'; | |||
@@ -1,86 +0,0 @@ | |||
import { stat } from 'fs/promises'; | |||
import { resolve } from 'path'; | |||
import { Argv } from 'yargs'; | |||
import { Stats } from 'fs'; | |||
const ensureTypedocJson = async (typedocPath: string) => { | |||
const trueTypedocPath = resolve(typedocPath); | |||
process.stdout.write(`Using typedoc.json path: ${trueTypedocPath}\n`); | |||
process.stdout.write('Does the file exist? '); | |||
let statResult: Stats; | |||
try { | |||
statResult = await stat(trueTypedocPath); | |||
} catch (errRaw) { | |||
const err = errRaw as NodeJS.ErrnoException; | |||
if (err.code === 'ENOENT') { | |||
process.stdout.write('no\n'); | |||
process.stderr.write('Could not find typedoc.json\n'); | |||
throw new Error('Could not find typedoc.json'); | |||
} | |||
process.stdout.write('maybe?\n'); | |||
process.stderr.write('Could not ensure typedoc.json\n'); | |||
throw err; | |||
} | |||
if (statResult.isDirectory()) { | |||
process.stdout.write('no\n'); | |||
process.stderr.write('typedoc.json is a directory\n'); | |||
throw new Error('typedoc.json is a directory'); | |||
} | |||
process.stdout.write('yes\n'); | |||
}; | |||
const generateTypedocData = async () => { | |||
process.stdout.write('Generating typedoc data...\n'); | |||
const outPath = resolve(__dirname, '..', '..', '..', '.amanuensis', 'data.json'); | |||
const typedocBinPath = resolve(__dirname, '..', '..', '..', 'node_modules', '.bin', 'typedoc'); | |||
const { execa } = await import('execa'); | |||
await execa(typedocBinPath, ['--json', outPath], { | |||
stdout: 'inherit', | |||
stderr: 'inherit', | |||
}); | |||
process.stdout.write('done\n'); | |||
}; | |||
export const description = 'Generate documentation from typedoc.json' as const; | |||
export enum GenerateReturnCode { | |||
SUCCESS = 0, | |||
NO_TYPEDOC_JSON = -1, | |||
COULD_NOT_GENERATE_TYPEDOC_DATA = -2, | |||
} | |||
export interface GenerateArgs { | |||
typedocJsonPath?: string; | |||
subcommands?: string[]; | |||
} | |||
export const builder = (yargs: Argv) => yargs | |||
.option('typedocJsonPath', { | |||
type: 'string', | |||
alias: 't', | |||
}); | |||
const generate = async (args: GenerateArgs) => { | |||
const { | |||
typedocJsonPath = resolve(process.cwd(), 'typedoc.json'), | |||
} = args; | |||
try { | |||
await ensureTypedocJson(typedocJsonPath); | |||
} catch { | |||
return GenerateReturnCode.NO_TYPEDOC_JSON; | |||
} | |||
try { | |||
await generateTypedocData(); | |||
} catch { | |||
return GenerateReturnCode.COULD_NOT_GENERATE_TYPEDOC_DATA; | |||
} | |||
return GenerateReturnCode.SUCCESS; | |||
}; | |||
export default generate; |
@@ -1,35 +0,0 @@ | |||
import { readFile } from 'fs/promises'; | |||
import { resolve } from 'path'; | |||
interface TypedocDataTextNode { | |||
kind: 'text'; | |||
text: string; | |||
} | |||
interface TypedocDataInlineTagNode { | |||
kind: 'inline-tag'; | |||
tag: string; | |||
text: string; | |||
target: number; | |||
tsLinkText: string; | |||
} | |||
type TypedocDataNode = TypedocDataTextNode | TypedocDataInlineTagNode; | |||
export interface TypedocData { | |||
readme: TypedocDataNode[]; | |||
} | |||
export const getReadmeText = async () => { | |||
const typedocDataJson = await readFile(resolve('.amanuensis', 'data.json'), 'utf-8'); | |||
const typedocData = JSON.parse(typedocDataJson) as TypedocData; | |||
return typedocData.readme.reduce( | |||
(theText, node) => { | |||
if (node.kind === 'text') { | |||
return `${theText}${node.text}`; | |||
} | |||
return theText; | |||
}, | |||
'', | |||
); | |||
}; |
@@ -5,8 +5,8 @@ import yargs from 'yargs'; | |||
const main = async (args: string[]) => { | |||
const COMMANDS = { | |||
serve: await import('./commands/serve'), | |||
generate: await import('./commands/generate'), | |||
serve: await import('../commands/serve'), | |||
generate: await import('../commands/generate'), | |||
}; | |||
const yargsBuilder = Object.entries(COMMANDS).reduce( | |||
@@ -11,7 +11,7 @@ | |||
"importHelpers": true, | |||
"declaration": true, | |||
"sourceMap": true, | |||
"rootDir": "./", | |||
"rootDir": ".", | |||
"strict": true, | |||
"noUnusedLocals": true, | |||
"noUnusedParameters": true, | |||
@@ -16,7 +16,7 @@ | |||
"importHelpers": true, | |||
"declaration": true, | |||
"sourceMap": true, | |||
"rootDir": "./src", | |||
"rootDir": ".", | |||
"strict": true, | |||
"noUnusedLocals": true, | |||
"noUnusedParameters": true, | |||
@@ -0,0 +1,141 @@ | |||
import { readFile } from 'fs/promises'; | |||
import { dirname, basename, resolve } from 'path'; | |||
import { glob } from 'glob'; | |||
import { minimatch } from 'minimatch'; | |||
export interface AmanuensisConfig { | |||
package: { | |||
searchPatterns: string | string[]; | |||
classifications: Record<string, Record<string, string | string[]>>; | |||
} | |||
} | |||
interface TypedocDataTextNode { | |||
kind: 'text'; | |||
text: string; | |||
} | |||
interface TypedocDataInlineTagNode { | |||
kind: 'inline-tag'; | |||
tag: string; | |||
text: string; | |||
target: number; | |||
tsLinkText: string; | |||
} | |||
interface SymbolIdMapEntry { | |||
sourceFileName: string; | |||
qualifiedName: string; | |||
} | |||
type TypedocDataNode = TypedocDataTextNode | TypedocDataInlineTagNode; | |||
export interface TypedocData { | |||
packages: any[]; | |||
typedocData: { | |||
readme: TypedocDataNode[]; | |||
symbolIdMap: Record<string, SymbolIdMapEntry>; | |||
}; | |||
} | |||
export const getReadmeText = async (cwd = process.cwd()) => { | |||
const typedocDataJsonPath = resolve(cwd, '.amanuensis', 'data.json'); | |||
const typedocDataJson = await readFile(typedocDataJsonPath, 'utf-8'); | |||
const typedocData = JSON.parse(typedocDataJson) as { | |||
typedocData: { | |||
readme: { kind: string, text: string }[] | |||
} | |||
}; | |||
return typedocData.typedocData.readme.reduce( | |||
(theText, node) => { | |||
if (node.kind === 'text') { | |||
return `${theText}${node.text}`; | |||
} | |||
return theText; | |||
}, | |||
'', | |||
); | |||
}; | |||
export const getPackages = async (cwd = process.cwd()) => { | |||
const configPath = resolve(cwd, '.amanuensis', 'config.json'); | |||
const configString = await readFile(configPath, 'utf-8'); | |||
const config = JSON.parse(configString) as AmanuensisConfig; | |||
const searchPatternsRaw = config.package.searchPatterns; | |||
const searchPatterns = Array.isArray(searchPatternsRaw) ? searchPatternsRaw : [searchPatternsRaw]; | |||
const patternPackagePaths = await Promise.all( | |||
searchPatterns.map(async (searchPattern) => glob( | |||
searchPattern === 'package.json' || searchPattern.endsWith('/package.json') | |||
? searchPattern | |||
: `${searchPattern}/package.json`, | |||
{ | |||
ignore: ['**/node_modules/**'], | |||
}, | |||
)), | |||
); | |||
const packagePaths = patternPackagePaths.flat(); | |||
const markdownFilePaths = await glob( | |||
'**/*.{md,mdx}', | |||
{ | |||
ignore: ['**/node_modules/**'], | |||
}, | |||
); | |||
return Promise.all( | |||
packagePaths.map(async (packagePath) => { | |||
const packageString = await readFile(packagePath, 'utf-8'); | |||
const basePath = dirname(packagePath); | |||
const packageJson = JSON.parse(packageString) as { name: string }; | |||
const classifications = Object.fromEntries( | |||
Object.entries(config.package.classifications) | |||
.map(([classification, c]) => { | |||
const [thisClassifications] = Object.entries(c) | |||
.find(([, globRaw]) => { | |||
const globs = Array.isArray(globRaw) ? globRaw : [globRaw]; | |||
return globs.some((g) => minimatch(packageJson.name, g)); | |||
}) ?? []; | |||
return [classification, thisClassifications] as const; | |||
}), | |||
); | |||
const markdownFiles = markdownFilePaths.filter((markdownFilePath) => ( | |||
markdownFilePath.startsWith(basePath) | |||
)); | |||
const markdown = await Promise.all( | |||
markdownFiles.map(async (markdownFilePath) => { | |||
const content = await readFile(markdownFilePath, 'utf-8'); | |||
const filePath = markdownFilePath.slice(basePath.length + 1); | |||
const file = filePath.split('/').at(-1) ?? ''; | |||
const name = filePath === 'README.md' ? 'index.md' : basename(file); | |||
return { | |||
name, | |||
filePath, | |||
content, | |||
}; | |||
}), | |||
); | |||
return { | |||
name: packageJson.name, | |||
packageJson, | |||
basePath, | |||
markdown, | |||
classifications, | |||
}; | |||
}), | |||
); | |||
}; | |||
export const getFileSources = async (cwd = process.cwd()) => { | |||
const typedocDataJsonPath = resolve(cwd, '.amanuensis', 'data.json'); | |||
const typedocDataJson = await readFile(typedocDataJsonPath, 'utf-8'); | |||
const typedocData = JSON.parse(typedocDataJson) as TypedocData; | |||
const symbolIdMapEntries = Object.values(typedocData.typedocData.symbolIdMap); | |||
const firstPartySources = symbolIdMapEntries.filter( | |||
({ sourceFileName }) => !sourceFileName.startsWith('node_modules'), | |||
); | |||
const firstPartySourceFiles = firstPartySources.map(({ sourceFileName }) => sourceFileName); | |||
const uniqueFirstPartySourceFiles = [...new Set(firstPartySourceFiles)]; | |||
return uniqueFirstPartySourceFiles; | |||
}; |
@@ -1,8 +1,6 @@ | |||
{ | |||
"entryPoints": ["categories/**"], | |||
"exclude": ["**/*.test.(ts|tsx)", "**/node_modules/**"], | |||
"entryPointStrategy": "packages", | |||
"name": "@tesseract-design/tesseract-web-react", | |||
"json": ".data/typedoc-data.json", | |||
"pretty": false, | |||
"tsconfig": "tsconfig.json" | |||
} |