Implement basic compilation and formatting for PDF and EPUB formats.master
@@ -103,5 +103,5 @@ dist | |||||
# TernJS port file | # TernJS port file | ||||
.tern-port | .tern-port | ||||
types/ | |||||
.npmrc | .npmrc |
@@ -12,6 +12,12 @@ export const addCommands = (cli: Cli) => { | |||||
describe: 'Binds a collection of static Web assets into a book.', | describe: 'Binds a collection of static Web assets into a book.', | ||||
handler: bindController.bindBook, | handler: bindController.bindBook, | ||||
options: { | options: { | ||||
'sourceType': { | |||||
alias: 's', | |||||
describe: 'The kind of source that will be supplied when binding the book.', | |||||
type: 'string', | |||||
default: 'dir', | |||||
}, | |||||
'format': { | 'format': { | ||||
alias: 'f', | alias: 'f', | ||||
describe: 'The format of the output.', | describe: 'The format of the output.', | ||||
@@ -18,6 +18,11 @@ export class BindControllerImpl implements BindController { | |||||
readonly bindBook: CommandHandler = async (params) => { | readonly bindBook: CommandHandler = async (params) => { | ||||
if (!params.interactive) { | if (!params.interactive) { | ||||
const checkArgs = params.args as Record<string, unknown>; | const checkArgs = params.args as Record<string, unknown>; | ||||
const checkSourceType = checkArgs.sourceType ?? checkArgs.source ?? checkArgs.s; | |||||
if (typeof checkSourceType === 'undefined') { | |||||
params.logger.error('Missing required argument: sourceType'); | |||||
return -1; | |||||
} | |||||
const checkFormat = checkArgs.format ?? checkArgs.f; | const checkFormat = checkArgs.format ?? checkArgs.f; | ||||
if (typeof checkFormat === 'undefined') { | if (typeof checkFormat === 'undefined') { | ||||
params.logger.error('Missing required argument: format'); | params.logger.error('Missing required argument: format'); | ||||
@@ -30,12 +35,13 @@ export class BindControllerImpl implements BindController { | |||||
} | } | ||||
} | } | ||||
const { inputPath: inputPathRaw, f, format = f, o, outputPath = o } = params.args; | |||||
const { inputPath: inputPathRaw, f, format = f, o, outputPath = o, s, source = s, sourceType = source, } = params.args; | |||||
const inputPath = inputPathRaw ?? process.cwd(); | const inputPath = inputPathRaw ?? process.cwd(); | ||||
try { | try { | ||||
const response = await this.bindService.bindBook({ | const response = await this.bindService.bindBook({ | ||||
input: { | input: { | ||||
sourceType, | |||||
path: inputPath, | path: inputPath, | ||||
}, | }, | ||||
output: { | output: { | ||||
@@ -103,5 +103,5 @@ dist | |||||
# TernJS port file | # TernJS port file | ||||
.tern-port | .tern-port | ||||
types/ | |||||
.npmrc | .npmrc |
@@ -16,6 +16,7 @@ | |||||
"pridepack" | "pridepack" | ||||
], | ], | ||||
"devDependencies": { | "devDependencies": { | ||||
"@types/archiver": "^6.0.2", | |||||
"@types/node": "^20.12.7", | "@types/node": "^20.12.7", | ||||
"pridepack": "2.6.0", | "pridepack": "2.6.0", | ||||
"tslib": "^2.6.2", | "tslib": "^2.6.2", | ||||
@@ -65,6 +66,12 @@ | |||||
"*": {} | "*": {} | ||||
}, | }, | ||||
"dependencies": { | "dependencies": { | ||||
"valibot": "^0.30.0" | |||||
"archiver": "^7.0.1", | |||||
"glob": "^11.0.0", | |||||
"mime": "^4.0.4", | |||||
"pdf-lib": "^1.17.1", | |||||
"puppeteer-html-pdf": "^4.0.8", | |||||
"valibot": "^0.30.0", | |||||
"xml-js": "^1.6.11" | |||||
} | } | ||||
} | } |
@@ -1,8 +1,8 @@ | |||||
import * as v from 'valibot'; | import * as v from 'valibot'; | ||||
export const inputSchema = v.object({}, v.unknown()); | |||||
const commonInputSchema = v.object({}); | |||||
export type Input = v.Output<typeof inputSchema>; | |||||
export type CommonInput = v.Output<typeof commonInputSchema>; | |||||
export const BOOK_FILENAME = 'patchouli.book.json' as const; | export const BOOK_FILENAME = 'patchouli.book.json' as const; | ||||
@@ -12,7 +12,12 @@ export const bindingFileBaseSchema = v.object({ | |||||
generatorType: v.string(), | generatorType: v.string(), | ||||
generatorConfigFilePath: v.optional(v.string()), | generatorConfigFilePath: v.optional(v.string()), | ||||
generatorDistDirectory: v.string(), | generatorDistDirectory: v.string(), | ||||
pageOrdering: v.array(v.string()), // allow blobs on page ordering | |||||
pageFileExtensions: v.array(v.string()), | |||||
pageOrdering: v.optional( | |||||
v.array( | |||||
v.string() | |||||
) | |||||
), // allow blobs on page ordering // ahh so that's why I added this comment, so we can add the generated files in binding | |||||
}); | }); | ||||
export const bookFileSchema = v.object({ | export const bookFileSchema = v.object({ | ||||
@@ -26,3 +31,23 @@ export const bookFileSchema = v.object({ | |||||
subjects: v.optional(v.array(v.string())), | subjects: v.optional(v.array(v.string())), | ||||
rights: v.optional(v.string()), | rights: v.optional(v.string()), | ||||
}); | }); | ||||
export type Book = v.Output<typeof bookFileSchema>; | |||||
export type Binding = v.Output<typeof bindingFileBaseSchema>; | |||||
export interface BookManifestItem { | |||||
id: string; | |||||
href: string; | |||||
path?: string; | |||||
buffer?: Buffer; | |||||
type?: string; | |||||
properties?: string; | |||||
} | |||||
export interface BookData { | |||||
metadata: Book; | |||||
manifest: BookManifestItem[]; | |||||
spine: BookManifestItem['id'][]; | |||||
binding: Binding; | |||||
} |
@@ -1,14 +1,15 @@ | |||||
import * as v from 'valibot'; | import * as v from 'valibot'; | ||||
import {Input} from '../../common'; | |||||
import {CommonInput} from '../../common'; | |||||
export const name = 'archive' as const; | export const name = 'archive' as const; | ||||
const inputSchema = v.object({ | |||||
export const inputSchema = v.object({ | |||||
sourceType: v.literal(name), | |||||
blob: v.blob(), | blob: v.blob(), | ||||
type: v.string(), | type: v.string(), | ||||
}); | }); | ||||
interface ArchiveInput extends Input, v.Output<typeof inputSchema> {} | |||||
interface ArchiveInput extends CommonInput, v.Output<typeof inputSchema> {} | |||||
export class InvalidArchiveTypeError extends Error {} | export class InvalidArchiveTypeError extends Error {} | ||||
@@ -24,7 +25,8 @@ const extractTar = () => { | |||||
}; | }; | ||||
export const compileFromInput = async <T extends ArchiveInput = ArchiveInput>(input: T) => { | |||||
export const compileFromInput = async <T extends CommonInput = ArchiveInput>(inputRaw: T) => { | |||||
const input = inputRaw as unknown as ArchiveInput; | |||||
switch (input.type) { | switch (input.type) { | ||||
// TODO get files from archive type | // TODO get files from archive type | ||||
case 'zip': { | case 'zip': { | ||||
@@ -0,0 +1,174 @@ | |||||
import {readdir, stat, readFile} from 'fs/promises'; | |||||
import {resolve} from 'path'; | |||||
import {glob} from 'glob'; | |||||
import * as v from 'valibot'; | |||||
import { | |||||
Binding, | |||||
BINDING_FILENAME, | |||||
Book, | |||||
BOOK_FILENAME, BookData, | |||||
bookFileSchema, | |||||
BookManifestItem, | |||||
CommonInput, | |||||
} from '../../common'; | |||||
import { randomUUID } from 'crypto'; | |||||
export const name = 'dir' as const; | |||||
export const inputSchema = v.object({ | |||||
sourceType: v.literal(name), | |||||
path: v.string(), | |||||
}); | |||||
interface DirInput extends CommonInput, v.Output<typeof inputSchema> {} | |||||
export class InvalidInputPathError extends Error {} | |||||
const getBookFile = async (bookFilePath: string, packageJsonFilePath: string): Promise<Book> => { | |||||
const bookFileString = await readFile(bookFilePath, 'utf-8'); | |||||
const bookFileRaw = JSON.parse(bookFileString); | |||||
const packageJsonFileString = await readFile(packageJsonFilePath, 'utf-8'); | |||||
const packageJson = JSON.parse(packageJsonFileString); | |||||
const bookFile = { | |||||
...bookFileRaw, | |||||
id: bookFileRaw.id ?? randomUUID(), | |||||
publisher: bookFileRaw.publisher ?? '@modal-sh/patchouli', | |||||
description: bookFileRaw.description ?? packageJson.description, | |||||
rights: bookFileRaw.rights ?? packageJson.copyright, | |||||
}; | |||||
return await v.parseAsync(bookFileSchema, bookFile); | |||||
}; | |||||
const getBindingFile = async (bindingFilePath: string, defaultBinding: Binding): Promise<Binding> => { | |||||
const bindingFileString = await readFile(bindingFilePath, 'utf-8'); | |||||
const bindingFileRaw = JSON.parse(bindingFileString); | |||||
return { | |||||
...defaultBinding, | |||||
...bindingFileRaw, | |||||
}; | |||||
}; | |||||
const getItems = async (dir: string, prefix = ''): Promise<BookManifestItem[]> => { | |||||
const thisDirFiles = await readdir(dir); | |||||
const items = await Promise.all( | |||||
thisDirFiles.map(async (p) => { | |||||
const thisPath = resolve(dir, p); | |||||
const s = await stat(thisPath); | |||||
if (s.isDirectory()) { | |||||
return getItems(thisPath, p); | |||||
} | |||||
const href = [prefix, p].filter((s) => s.trim()).join('/'); | |||||
const [idRaw] = href.split('.'); | |||||
const id = idRaw.replace(/\//g, '-'); | |||||
return [ | |||||
{ | |||||
id, | |||||
href, | |||||
path: thisPath, | |||||
}, | |||||
]; | |||||
}) | |||||
); | |||||
return items.flat(); | |||||
}; | |||||
const getSpine = async (binding: Binding) => { | |||||
const { | |||||
pageOrdering, | |||||
generatorDistDirectory, | |||||
pageFileExtensions, | |||||
} = binding; | |||||
if (!Array.isArray(pageOrdering)) { | |||||
// get glob of HTML pages | |||||
return []; | |||||
} | |||||
const directoryNormalized = generatorDistDirectory.replace(/\\/g, '/'); | |||||
const extensionsGlobFragment = pageFileExtensions.join(','); | |||||
const pagesPaths = await Promise.all( | |||||
pageOrdering.map( | |||||
async (globString) => { | |||||
switch (globString) { | |||||
case 'index': | |||||
case 'title': | |||||
return glob(`${directoryNormalized}/${globString}{${extensionsGlobFragment}`); | |||||
case 'toc': | |||||
return []; // TODO check if TOC is pre-generated or there is a directive to generate a TOC | |||||
default: | |||||
break; | |||||
} | |||||
if (globString.startsWith('pages:')) { | |||||
const [prefix, ...etcGlobStrArr] = globString.split(':'); | |||||
const globStrPath = etcGlobStrArr.join(':'); | |||||
switch (prefix) { | |||||
case 'pages': { | |||||
const thePagesPath = await glob(`${directoryNormalized}/${globStrPath}{${extensionsGlobFragment}}`); | |||||
return thePagesPath.sort((a, b) => a.localeCompare(b)); | |||||
} | |||||
} | |||||
} | |||||
return []; | |||||
} | |||||
) | |||||
); | |||||
return pagesPaths.flat(); | |||||
}; | |||||
export const compileFromInput = async <T extends CommonInput = DirInput>(inputRaw: T): Promise<BookData> => { | |||||
const input = inputRaw as unknown as DirInput; | |||||
const files = await readdir(input.path); | |||||
if (!files.includes(BOOK_FILENAME)) { | |||||
throw new InvalidInputPathError(`Path does not contain a "${BOOK_FILENAME}" file.`); | |||||
} | |||||
const bookFilePath = resolve(input.path, BOOK_FILENAME); | |||||
const packageJsonFilePath = resolve(input.path, 'package.json'); | |||||
const bookFile = await getBookFile(bookFilePath, packageJsonFilePath); | |||||
const defaultBinding: Binding = { | |||||
generatorType: 'static', | |||||
generatorDistDirectory: resolve(input.path, 'dist'), | |||||
pageFileExtensions: ['.html', '.htm', '.xhtml'], | |||||
}; | |||||
const bindingFilePath = resolve(input.path, BINDING_FILENAME); | |||||
const bindingFile: Binding = files.includes(BINDING_FILENAME) | |||||
? await getBindingFile(bindingFilePath, defaultBinding) | |||||
: defaultBinding; | |||||
const isAstro = files.includes('astro.config.mjs'); | |||||
if (isAstro) { | |||||
bindingFile.generatorType = 'astro'; | |||||
bindingFile.generatorConfigFilePath = resolve(input.path, 'astro.config.mjs'); | |||||
const { default: config } = await import('file:///' + bindingFile.generatorConfigFilePath); | |||||
bindingFile.generatorDistDirectory = resolve(input.path, config.outDir ?? 'dist'); | |||||
if (typeof bindingFile.pageOrdering !== 'object') { | |||||
bindingFile.pageOrdering = []; // if we want to have a custom ordering, e.g. last minute changes | |||||
} | |||||
} | |||||
const spine = await getSpine(bindingFile); | |||||
const manifest = await getItems( | |||||
bindingFile.generatorDistDirectory | |||||
); | |||||
return { | |||||
metadata: bookFile, | |||||
manifest, | |||||
spine, | |||||
binding: bindingFile, | |||||
}; | |||||
}; |
@@ -1,66 +0,0 @@ | |||||
import {readdir, stat, readFile} from 'fs/promises'; | |||||
import {resolve} from 'path'; | |||||
import * as v from 'valibot'; | |||||
import {BINDING_FILENAME, bindingFileBaseSchema, BOOK_FILENAME, bookFileSchema, Input} from '../../common'; | |||||
export const name = 'path' as const; | |||||
const inputSchema = v.object({ | |||||
path: v.string(), | |||||
}); | |||||
interface PathInput extends Input, v.Output<typeof inputSchema> {} | |||||
const readPath = async (path: string, rootPath = path, readFiles = []) => { | |||||
const files = await readdir(path); | |||||
// TODO get the buffers on the tree | |||||
//console.log(files); | |||||
}; | |||||
export class InvalidInputPathError extends Error {} | |||||
type Book = v.Output<typeof bookFileSchema>; | |||||
type Binding = v.Output<typeof bindingFileBaseSchema>; | |||||
const getBookFile = async (bookFilePath: string): Promise<Book | undefined> => { | |||||
const bookFileString = await readFile(bookFilePath, 'utf-8'); | |||||
const bookFileRaw = JSON.parse(bookFileString); | |||||
return await v.parseAsync(bookFileSchema, bookFileRaw); | |||||
}; | |||||
const getBindingFile = async (bindingFilePath: string, defaultBinding: Binding) => { | |||||
const bindingFileString = await readFile(bindingFilePath, 'utf-8'); | |||||
const bindingFileRaw = JSON.parse(bindingFileString); | |||||
return { | |||||
...defaultBinding, | |||||
...bindingFileRaw, | |||||
}; | |||||
}; | |||||
export const compileFromInput = async <T extends PathInput = PathInput>(input: T) => { | |||||
const files = await readdir(input.path); | |||||
if (!files.includes(BOOK_FILENAME)) { | |||||
throw new InvalidInputPathError(`Path does not contain a "${BOOK_FILENAME}" file.`); | |||||
} | |||||
const bookFilePath = resolve(input.path, BOOK_FILENAME); | |||||
const bookFile = await getBookFile(bookFilePath); | |||||
const defaultBinding = { | |||||
generatorType: 'static', | |||||
// TODO should make the dist directory related to book file when getting contents | |||||
generatorDistDirectory: resolve(input.path, 'dist'), | |||||
}; | |||||
const bindingFilePath = resolve(input.path, BINDING_FILENAME); | |||||
const bindingFile = files.includes(BINDING_FILENAME) | |||||
? await getBindingFile(bindingFilePath, defaultBinding) | |||||
: defaultBinding; | |||||
return [ | |||||
]; | |||||
}; |
@@ -1,7 +1,10 @@ | |||||
import {Input} from '../../common'; | |||||
import {BookData} from '../../common'; | |||||
import {createEpubArchive, prepareEpubContents} from './packaging'; | |||||
export const name = 'epub' as const; | export const name = 'epub' as const; | ||||
export const bindBook = async <T extends Input = Input>(input: T) => { | |||||
return Buffer.from(input.path + ' ' + name); | |||||
export const bindBook = async <U extends BookData = BookData>(bookPackage: U) => { | |||||
const data = await prepareEpubContents(bookPackage); | |||||
return await createEpubArchive(data); | |||||
}; | }; |
@@ -0,0 +1,78 @@ | |||||
import {Readable} from 'stream'; | |||||
import {BookData} from '../../common'; | |||||
import {prepareEpubContainerContents, prepareEpubRootfileContents} from './xml-definitions'; | |||||
import {createReadStream} from 'fs'; | |||||
import assert from 'assert'; | |||||
import archiver from 'archiver'; | |||||
interface EpubArchiveDatum { | |||||
path: string; | |||||
data: Buffer | Readable; | |||||
} | |||||
export const prepareEpubContents = async <U extends BookData = BookData>(bookPackage: U): Promise<EpubArchiveDatum[]> => { | |||||
const baseDir = 'EPUB'; | |||||
const rootFilePath = `${baseDir}/content.opf`; | |||||
const rootFileContents = await prepareEpubRootfileContents(bookPackage); | |||||
return [ | |||||
{ | |||||
path: 'mimetype', | |||||
data: Buffer.from('application/epub+zip'), | |||||
}, | |||||
{ | |||||
path: 'META-INF/container.xml', | |||||
data: Buffer.from(prepareEpubContainerContents(rootFilePath)) | |||||
}, | |||||
{ | |||||
path: rootFilePath, | |||||
data: Buffer.from(rootFileContents), | |||||
}, | |||||
...bookPackage.manifest.map((entry) => { | |||||
if (typeof entry.path === 'string') { | |||||
const finalPath = entry.path.slice(bookPackage.binding.generatorDistDirectory.length ?? 0); | |||||
return { | |||||
path: `${baseDir}/${finalPath}`, | |||||
data: createReadStream(entry.path), | |||||
}; | |||||
} | |||||
assert(entry.buffer instanceof Buffer); | |||||
return { | |||||
path: `${baseDir}/${entry.href}`, | |||||
data: entry.buffer, | |||||
}; | |||||
}), | |||||
]; | |||||
}; | |||||
export const createEpubArchive = async (data: EpubArchiveDatum[]): Promise<Buffer> => { | |||||
const archive = archiver('zip', { | |||||
zlib: { | |||||
level: 9, | |||||
}, | |||||
}); | |||||
return new Promise<Buffer>(async (resolve, reject) => { | |||||
let b = Buffer.from(''); | |||||
archive.on('data', (c) => { | |||||
b = Buffer.concat([b, c]); | |||||
}); | |||||
archive.on('end', () => { | |||||
resolve(b); | |||||
}); | |||||
archive.on('error', (err) => { | |||||
reject(err); | |||||
}); | |||||
data.forEach((entry) => { | |||||
archive.append(entry.data, { | |||||
name: entry.path, | |||||
}); | |||||
}); | |||||
await archive.finalize(); | |||||
}); | |||||
}; |
@@ -0,0 +1,190 @@ | |||||
import {js2xml} from 'xml-js'; | |||||
import {BookData} from '../../common'; | |||||
export const prepareEpubContainerContents = (rootFilePath: string) => { | |||||
return js2xml({ | |||||
_declaration: { | |||||
_attributes: { | |||||
version: '1.0', | |||||
encoding: 'utf-8', | |||||
}, | |||||
}, | |||||
container: { | |||||
_attributes: { | |||||
xmlns: 'urn:oasis:names:tc:opendocument:xmlns:container', | |||||
version: '1.0', | |||||
}, | |||||
rootfiles: { | |||||
rootfile: { | |||||
_attributes: { | |||||
'full-path': rootFilePath, | |||||
'media-type': 'application/oebps-package+xml', | |||||
} | |||||
} | |||||
} | |||||
} | |||||
}, { | |||||
compact: true, | |||||
}); | |||||
}; | |||||
export const prepareEpubRootfileContents = async <U extends BookData = BookData>(bookPackage: U) => { | |||||
const { default: mime } = await import('mime'); // ESM import, convert to CJS for compatibility | |||||
const uniqueIdentifierName = 'BookID'; | |||||
return js2xml({ | |||||
declaration: { | |||||
attributes: { | |||||
version: '1.0', | |||||
encoding: 'utf-8', | |||||
}, | |||||
}, | |||||
elements: [ | |||||
{ | |||||
type: 'element', | |||||
name: 'package', | |||||
attributes: { | |||||
xmlns: 'http://www.idpf.org/2007/opf', | |||||
'xmlns:dc': 'http://purl.org/dc/elements/1.1/', | |||||
version: '3.0', | |||||
'unique-identifier': uniqueIdentifierName, | |||||
}, | |||||
elements: [ | |||||
{ | |||||
type: 'element', | |||||
name: 'metadata', | |||||
elements: [ | |||||
{ | |||||
type: 'element', | |||||
name: 'dc:identifier', | |||||
attributes: { | |||||
id: uniqueIdentifierName, | |||||
}, | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.id, | |||||
}, | |||||
], | |||||
}, | |||||
typeof bookPackage.metadata.isbn !== 'undefined' | |||||
? { | |||||
type: 'element', | |||||
name: 'dc:identifier', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.isbn, | |||||
}, | |||||
], | |||||
} | |||||
: undefined, | |||||
{ | |||||
type: 'element', | |||||
name: 'dc:title', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.title, | |||||
}, | |||||
], | |||||
}, | |||||
typeof bookPackage.metadata.publisher !== 'undefined' | |||||
? { | |||||
type: 'element', | |||||
name: 'dc:publisher', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.publisher, | |||||
}, | |||||
], | |||||
} | |||||
: undefined, | |||||
{ | |||||
type: 'element', | |||||
name: 'dc:creator', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.creator, | |||||
}, | |||||
], | |||||
}, | |||||
...(bookPackage.metadata.contributors ?? []).map((c) => ({ | |||||
type: 'element', | |||||
name: 'dc:contributor', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: c, | |||||
}, | |||||
], | |||||
})), | |||||
typeof bookPackage.metadata.description !== 'undefined' | |||||
? { | |||||
type: 'element', | |||||
name: 'dc:description', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.description, | |||||
}, | |||||
], | |||||
} | |||||
: undefined, | |||||
...(bookPackage.metadata.subjects ?? []).map((c) => ({ | |||||
type: 'element', | |||||
name: 'dc:subject', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: c, | |||||
}, | |||||
], | |||||
})), | |||||
typeof bookPackage.metadata.rights !== 'undefined' | |||||
? { | |||||
type: 'element', | |||||
name: 'dc:rights', | |||||
elements: [ | |||||
{ | |||||
type: 'text', | |||||
text: bookPackage.metadata.rights, | |||||
}, | |||||
], | |||||
} | |||||
: undefined, | |||||
] | |||||
.filter((s) => typeof s !== 'undefined'), | |||||
}, | |||||
{ | |||||
type: 'element', | |||||
name: 'manifest', | |||||
elements: bookPackage.manifest.map((e) => ({ | |||||
type: 'element', | |||||
name: 'item', | |||||
attributes: { | |||||
id: e.id, | |||||
href: e.href, | |||||
'media-type': mime.getType(e.href), | |||||
}, | |||||
})), | |||||
}, | |||||
{ | |||||
type: 'element', | |||||
name: 'spine', | |||||
elements: bookPackage.spine.map((e) => ({ | |||||
type: 'element', | |||||
name: 'itemref', | |||||
attributes: { | |||||
idref: bookPackage.manifest.find((m) => m.href === e || m.path === e)?.id, | |||||
}, | |||||
})), | |||||
}, | |||||
], | |||||
}, | |||||
], | |||||
}, { | |||||
compact: false, | |||||
}); | |||||
}; |
@@ -0,0 +1,53 @@ | |||||
import {PDFDocument} from 'pdf-lib'; | |||||
import {Book} from '../../common'; | |||||
export const compilePdfBuffers = async (buffers: Buffer[]) => { | |||||
const documents = await buffers.reduce( | |||||
async (previousPromise, e) => { | |||||
const p = await previousPromise; | |||||
const d = await PDFDocument.load(e, { | |||||
updateMetadata: false, | |||||
}); | |||||
return [ | |||||
...p, | |||||
d, | |||||
]; | |||||
}, | |||||
Promise.resolve([] as PDFDocument[]) | |||||
); | |||||
return await documents.reduce( | |||||
async (thePdfDocPromise, dd) => { | |||||
const thePdfDoc = await thePdfDocPromise; | |||||
const c = await thePdfDoc.copyPages(dd, dd.getPageIndices()); | |||||
c.forEach((page) => { | |||||
thePdfDoc.addPage(page); | |||||
}); | |||||
return thePdfDoc; | |||||
}, | |||||
PDFDocument.create(), | |||||
); | |||||
}; | |||||
export const addPdfViewerPreferences = (pdfDoc: PDFDocument) => { | |||||
const viewerPrefs = pdfDoc.catalog.getOrCreateViewerPreferences(); | |||||
viewerPrefs.setHideToolbar(false); | |||||
viewerPrefs.setHideMenubar(false); | |||||
viewerPrefs.setDisplayDocTitle(true); | |||||
return pdfDoc; | |||||
}; | |||||
export const addPdfMetadata = (pdfDoc: PDFDocument, metadata: Book) => { | |||||
pdfDoc.setTitle(metadata.title, { showInWindowTitleBar: true }); | |||||
pdfDoc.setAuthor([metadata.creator, ...(metadata.contributors ?? [])].join(', ')); | |||||
if (Array.isArray(metadata.subjects)) { | |||||
pdfDoc.setSubject(metadata.subjects.join(', ')); | |||||
} | |||||
pdfDoc.setCreator('@modal-sh/patchouli'); | |||||
const defaultProducer = pdfDoc.getProducer(); | |||||
pdfDoc.setProducer(['puppeteer-html-pdf (https://github.com/ultimateakash/puppeteer-html-pdf)', defaultProducer].join(', ')); | |||||
const now = new Date(); | |||||
pdfDoc.setCreationDate(now); | |||||
pdfDoc.setModificationDate(now); | |||||
// TODO add language data | |||||
return pdfDoc; | |||||
}; |
@@ -1,7 +1,17 @@ | |||||
import {Input} from '../../common'; | |||||
import {BookData} from '../../common'; | |||||
import {renderFilesAsPdf} from './rendering'; | |||||
import {addPdfMetadata, addPdfViewerPreferences, compilePdfBuffers} from './compiling'; | |||||
export const name = 'pdf' as const; | export const name = 'pdf' as const; | ||||
export const bindBook = async <T extends Input = Input>(input: T) => { | |||||
return Buffer.from(input.path + ' ' + name); | |||||
export const bindBook = async <T extends BookData = BookData>(input: T) => { | |||||
const buffers = await renderFilesAsPdf(input.spine); | |||||
// TODO how to generate TOC? | |||||
// https://github.com/Hopding/pdf-lib/issues/123 | |||||
// https://github.com/Hopding/pdf-lib/issues/1257 | |||||
const compiledDocument = await compilePdfBuffers(buffers); | |||||
const documentWithMetadata = addPdfMetadata(compiledDocument, input.metadata); | |||||
const documentWithViewerPreferences = addPdfViewerPreferences(documentWithMetadata);; | |||||
const buffer = await documentWithViewerPreferences.save(); | |||||
return Buffer.from(buffer); | |||||
}; | }; |
@@ -0,0 +1,26 @@ | |||||
import PuppeteerHTMLPDF from 'puppeteer-html-pdf'; | |||||
import {BookData} from '../../common'; | |||||
export const renderFilesAsPdf = async (spine: BookData['spine']) => { | |||||
const pdf = new PuppeteerHTMLPDF(); | |||||
await pdf.setOptions({ | |||||
preferCSSPageSize: true, | |||||
headless: true, | |||||
}); | |||||
return await spine.reduce( | |||||
async (previousPromise, e, i, ee) => { | |||||
const previousBuffers = await previousPromise; | |||||
process.stdout.write(`Rendering file ${i + 1} of ${ee.length}...`); | |||||
await pdf.initializeBrowser(); | |||||
const thisPdf = await pdf.create(`file:///${e}`); | |||||
await pdf.closeBrowser(); | |||||
process.stdout.write('Done!\n'); | |||||
return [ | |||||
...previousBuffers, | |||||
thisPdf | |||||
]; | |||||
}, | |||||
Promise.resolve([] as Buffer[]) | |||||
); | |||||
}; |
@@ -2,11 +2,12 @@ import * as v from 'valibot'; | |||||
import assert from 'assert'; | import assert from 'assert'; | ||||
import * as PdfFormat from './formats/pdf'; | import * as PdfFormat from './formats/pdf'; | ||||
import * as EpubFormat from './formats/epub'; | import * as EpubFormat from './formats/epub'; | ||||
import * as PathCompiler from './compilers/path'; | |||||
import {inputSchema} from './common'; | |||||
import * as DirCompiler from './compilers/dir'; | |||||
import * as ArchiveCompiler from './compilers/archive'; | |||||
const AVAILABLE_COMPILERS = [ | const AVAILABLE_COMPILERS = [ | ||||
PathCompiler, | |||||
ArchiveCompiler, | |||||
DirCompiler, | |||||
]; | ]; | ||||
const AVAILABLE_FORMATS = [ | const AVAILABLE_FORMATS = [ | ||||
@@ -16,18 +17,11 @@ const AVAILABLE_FORMATS = [ | |||||
const optionsSchema = v.object( | const optionsSchema = v.object( | ||||
{ | { | ||||
input: v.merge( | |||||
[ | |||||
inputSchema, | |||||
v.object({ | |||||
sourceType: v.picklist(AVAILABLE_COMPILERS.map((f) => f.name)), | |||||
}) | |||||
], | |||||
v.unknown(), | |||||
), | |||||
input: v.union(AVAILABLE_COMPILERS.map((c) => c.inputSchema)), | |||||
output: v.object( | output: v.object( | ||||
{ | { | ||||
format: v.picklist(AVAILABLE_FORMATS.map((f) => f.name)), | format: v.picklist(AVAILABLE_FORMATS.map((f) => f.name)), | ||||
path: v.string(), | |||||
}, | }, | ||||
v.unknown(), | v.unknown(), | ||||
), | ), | ||||
@@ -41,12 +35,13 @@ export interface BindFunction { | |||||
<T extends BindFunctionOptions = BindFunctionOptions>(options: T): Promise<Buffer>; | <T extends BindFunctionOptions = BindFunctionOptions>(options: T): Promise<Buffer>; | ||||
} | } | ||||
// TODO add options to include blank pages to ensure chapters' first pages are in the right side of the book | |||||
export const bindBook: BindFunction = async (options: BindFunctionOptions): Promise<Buffer> => { | export const bindBook: BindFunction = async (options: BindFunctionOptions): Promise<Buffer> => { | ||||
const { input, output, } = await v.parseAsync(optionsSchema, options); | const { input, output, } = await v.parseAsync(optionsSchema, options); | ||||
const selectedCompiler = AVAILABLE_COMPILERS.find((c) => c.name === input.sourceType); | const selectedCompiler = AVAILABLE_COMPILERS.find((c) => c.name === input.sourceType); | ||||
assert(typeof selectedCompiler !== 'undefined'); | |||||
const bookPackage = await selectedCompiler.compileFromInput(input); | |||||
const selectedFormat = AVAILABLE_FORMATS.find((f) => f.name === output.format); | const selectedFormat = AVAILABLE_FORMATS.find((f) => f.name === output.format); | ||||
assert(typeof selectedFormat !== 'undefined'); | assert(typeof selectedFormat !== 'undefined'); | ||||
return selectedFormat.bindBook(input); | |||||
return selectedFormat.bindBook(bookPackage); | |||||
}; | }; |
@@ -7,7 +7,7 @@ import { | |||||
Mock, afterEach, | Mock, afterEach, | ||||
} from 'vitest'; | } from 'vitest'; | ||||
import { readFile, readdir } from 'fs/promises'; | import { readFile, readdir } from 'fs/promises'; | ||||
import { compileFromInput } from '../../src/compilers/path'; | |||||
import { compileFromInput } from '../../src/compilers/dir'; | |||||
vi.mock('fs/promises'); | vi.mock('fs/promises'); | ||||
@@ -26,7 +26,7 @@ const completeBookFile = { | |||||
rights: '© copyright notice or get from package.json LICENSE' | rights: '© copyright notice or get from package.json LICENSE' | ||||
}; | }; | ||||
describe('path compiler', () => { | |||||
describe('dir compiler', () => { | |||||
let mockReaddir: Mock; | let mockReaddir: Mock; | ||||
beforeEach(() => { | beforeEach(() => { | ||||
mockReaddir = readdir as Mock; | mockReaddir = readdir as Mock; | ||||
@@ -1,4 +1,9 @@ | |||||
import { defineConfig } from 'astro/config'; | import { defineConfig } from 'astro/config'; | ||||
// https://astro.build/config | // https://astro.build/config | ||||
export default defineConfig({}); | |||||
export default defineConfig({ | |||||
output: 'static', | |||||
build: { | |||||
format: 'file' | |||||
}, | |||||
}); |
@@ -1,6 +1,7 @@ | |||||
{ | { | ||||
"name": "@modal-sh/patchouli-sandbox-astro", | "name": "@modal-sh/patchouli-sandbox-astro", | ||||
"type": "module", | "type": "module", | ||||
"description": "Package JSON description", | |||||
"version": "0.0.1", | "version": "0.0.1", | ||||
"scripts": { | "scripts": { | ||||
"dev": "astro dev", | "dev": "astro dev", | ||||
@@ -0,0 +1,10 @@ | |||||
{ | |||||
"pageOrdering": [ | |||||
"index", | |||||
"title", | |||||
"toc", | |||||
"pages:foreword", | |||||
"pages:chapters/**", | |||||
"pages:appendices/**" | |||||
] | |||||
} |
@@ -1,11 +1,9 @@ | |||||
{ | { | ||||
"title": "Astro Sandbox", | "title": "Astro Sandbox", | ||||
"publisher": "", | |||||
"creator": "John Doe", | "creator": "John Doe", | ||||
"contributors": [ | "contributors": [ | ||||
"Jane Doe" | "Jane Doe" | ||||
], | ], | ||||
"description": "Retrieve from package.json", | |||||
"subjects": [ | "subjects": [ | ||||
"A subject of the publication", | "A subject of the publication", | ||||
"Another subject of the publication" | "Another subject of the publication" | ||||
@@ -0,0 +1,5 @@ | |||||
--- | |||||
title: Appendix | |||||
--- | |||||
Appendix. |
@@ -0,0 +1,5 @@ | |||||
--- | |||||
title: Introduction | |||||
--- | |||||
Hello! This is chapter 1. |
@@ -0,0 +1,5 @@ | |||||
--- | |||||
title: Next | |||||
--- | |||||
This is the next chapter. |
@@ -0,0 +1,5 @@ | |||||
--- | |||||
title: Final | |||||
--- | |||||
This is the final chapter. |
@@ -0,0 +1,5 @@ | |||||
--- | |||||
title: Foreword | |||||
--- | |||||
This is a foreword. |
@@ -1 +1,2 @@ | |||||
/// <reference path="../.astro/types.d.ts" /> | |||||
/// <reference types="astro/client" /> | /// <reference types="astro/client" /> |
@@ -0,0 +1,26 @@ | |||||
--- | |||||
import { getCollection } from 'astro:content'; | |||||
export const getStaticPaths = async () => { | |||||
const entries = await getCollection('appendices'); | |||||
return entries.map(entry => ({ | |||||
params: { slug: entry.slug }, props: { entry }, | |||||
})); | |||||
} | |||||
const { entry } = Astro.props; | |||||
const { Content } = await entry.render(); | |||||
--- | |||||
<html lang="en"> | |||||
<head> | |||||
<meta charset="utf-8" /> | |||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" /> | |||||
<meta name="viewport" content="width=device-width" /> | |||||
<meta name="generator" content={Astro.generator} /> | |||||
<title>{entry.data.title}</title> | |||||
</head> | |||||
<body> | |||||
<Content /> | |||||
</body> | |||||
</html> |
@@ -0,0 +1,26 @@ | |||||
--- | |||||
import { getCollection } from 'astro:content'; | |||||
export const getStaticPaths = async () => { | |||||
const entries = await getCollection('chapters'); | |||||
return entries.map(entry => ({ | |||||
params: { slug: entry.slug }, props: { entry }, | |||||
})); | |||||
} | |||||
const { entry } = Astro.props; | |||||
const { Content } = await entry.render(); | |||||
--- | |||||
<html lang="en"> | |||||
<head> | |||||
<meta charset="utf-8" /> | |||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" /> | |||||
<meta name="viewport" content="width=device-width" /> | |||||
<meta name="generator" content={Astro.generator} /> | |||||
<title>{entry.data.title}</title> | |||||
</head> | |||||
<body> | |||||
<Content /> | |||||
</body> | |||||
</html> |
@@ -0,0 +1,16 @@ | |||||
--- | |||||
import { title } from '../../patchouli.book.json'; | |||||
--- | |||||
<html lang="en"> | |||||
<head> | |||||
<meta charset="utf-8" /> | |||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" /> | |||||
<meta name="viewport" content="width=device-width" /> | |||||
<meta name="generator" content={Astro.generator} /> | |||||
<title>Foreword - {title}</title> | |||||
</head> | |||||
<body> | |||||
<h1>Foreword</h1> | |||||
</body> | |||||
</html> |
@@ -1,5 +1,5 @@ | |||||
--- | --- | ||||
import { title } from '../../patchouli.book.json'; | |||||
--- | --- | ||||
<html lang="en"> | <html lang="en"> | ||||
@@ -8,9 +8,9 @@ | |||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" /> | <link rel="icon" type="image/svg+xml" href="/favicon.svg" /> | ||||
<meta name="viewport" content="width=device-width" /> | <meta name="viewport" content="width=device-width" /> | ||||
<meta name="generator" content={Astro.generator} /> | <meta name="generator" content={Astro.generator} /> | ||||
<title>Astro</title> | |||||
<title>{title}</title> | |||||
</head> | </head> | ||||
<body> | <body> | ||||
<h1>Astro</h1> | |||||
<h1>{title}</h1> | |||||
</body> | </body> | ||||
</html> | </html> |
@@ -0,0 +1,17 @@ | |||||
--- | |||||
import { title } from '../../patchouli.book.json'; | |||||
--- | |||||
<html lang="en"> | |||||
<head> | |||||
<meta charset="utf-8" /> | |||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" /> | |||||
<meta name="viewport" content="width=device-width" /> | |||||
<meta name="generator" content={Astro.generator} /> | |||||
<title>{title}</title> | |||||
</head> | |||||
<body> | |||||
<h1>{title}</h1> | |||||
Title Page | |||||
</body> | |||||
</html> |