createPackage = function (src, dest, callback) { return module.exports.createPackageWithOptions(src, dest, {}, callback) }
...
```js
var asar = require('asar');
var src = 'some/path/';
var dest = 'name.asar';
asar.createPackage(src, dest, function() {
console.log('done.');
})
```
Please note that there is currently **no** error handling provided!
### Transform
...
createPackageFromFiles = function (src, dest, filenames, metadata, options, callback) { if (typeof metadata === 'undefined' || metadata === null) { metadata = {} } const filesystem = new Filesystem(src) const files = [] const unpackDirs = [] let filenamesSorted = [] if (options.ordering) { const orderingFiles = fs.readFileSync(options.ordering).toString().split('\n').map(function (line) { if (line.includes(':')) { line = line.split(':').pop() } line = line.trim() if (line.startsWith('/')) { line = line.slice(1) } return line }) const ordering = [] for (const file of orderingFiles) { const pathComponents = file.split(path.sep) let str = src for (const pathComponent of pathComponents) { str = path.join(str, pathComponent) ordering.push(str) } } let missing = 0 const total = filenames.length for (const file of ordering) { if (!filenamesSorted.includes(file) && filenames.includes(file)) { filenamesSorted.push(file) } } for (const file of filenames) { if (!filenamesSorted.includes(file)) { filenamesSorted.push(file) missing += 1 } } console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`) } else { filenamesSorted = filenames } const handleFile = function (filename, done) { let file = metadata[filename] let type if (!file) { const stat = fs.lstatSync(filename) if (stat.isDirectory()) { type = 'directory' } if (stat.isFile()) { type = 'file' } if (stat.isSymbolicLink()) { type = 'link' } file = {stat, type} } let shouldUnpack switch (file.type) { case 'directory': shouldUnpack = options.unpackDir ? isUnpackDir(path.relative(src, filename), options.unpackDir, unpackDirs) : false filesystem.insertDirectory(filename, shouldUnpack) break case 'file': shouldUnpack = false if (options.unpack) { shouldUnpack = minimatch(filename, options.unpack, {matchBase: true}) } if (!shouldUnpack && options.unpackDir) { const dirName = path.relative(src, path.dirname(filename)) shouldUnpack = isUnpackDir(dirName, options.unpackDir, unpackDirs) } files.push({filename: filename, unpack: shouldUnpack}) filesystem.insertFile(filename, shouldUnpack, file, options, done) return case 'link': filesystem.insertLink(filename, file.stat) break } return process.nextTick(done) } const insertsDone = function () { return mkdirp(path.dirname(dest), function (error) { if (error) { return callback(error) } return disk.writeFilesystem(dest, filesystem, files, metadata, function (error) { if (error) { return callback(error) } if (options.snapshot) { return createSnapshot(src, dest, filenames, metadata, options, callback) } else { return callback(null) } }) }) } const names = filenamesSorted.slice() const next = function (name) { if (!name) { return insertsDone() } return handleFile(name, function () { return next(names.shift()) }) } return next(names.shift()) }
n/a
createPackageWithOptions = function (src, dest, options, callback) { const dot = typeof options.dot === 'undefined' ? true : options.dot return crawlFilesystem(src, { dot: dot }, function (error, filenames, metadata) { if (error) { return callback(error) } module.exports.createPackageFromFiles(src, dest, filenames, metadata, options, callback) }) }
...
var src = 'some/path/';
var dest = 'name.asar';
function transform(filename) {
return new CustomTransformStream()
}
asar.createPackageWithOptions(src, dest, { transform: transform }, function() {
console.log('done.');
})
```
## Using with grunt
There is also an unofficial grunt plugin to generate asar archives at [bwin/grunt-asar][grunt-asar].
...
extractAll = function (archive, dest) { const filesystem = disk.readFilesystemSync(archive) const filenames = filesystem.listFiles() // under windows just extract links as regular files const followLinks = process.platform === 'win32' // create destination directory mkdirp.sync(dest) return filenames.map((filename) => { filename = filename.substr(1) // get rid of leading slash const destFilename = path.join(dest, filename) const file = filesystem.getFile(filename, followLinks) if (file.files) { // it's a directory, create it and continue with the next entry mkdirp.sync(destFilename) } else if (file.link) { // it's a symlink, create a symlink const linkSrcPath = path.dirname(path.join(dest, file.link)) const linkDestPath = path.dirname(destFilename) const relativePath = path.relative(linkDestPath, linkSrcPath); // try to delete output file, because we can't overwrite a link (() => { try { fs.unlinkSync(destFilename) } catch (error) {} })() const linkTo = path.join(relativePath, path.basename(file.link)) fs.symlinkSync(linkTo, destFilename) } else { // it's a file, extract it const content = disk.readFileSync(filesystem, filename, file) fs.writeFileSync(destFilename, content) } }) }
...
asar.extractFile(archive, filename))
})
program.command('extract <archive> <dest>')
.alias('e')
.description('extract archive')
.action(function (archive, dest) {
asar.extractAll(archive, dest)
})
program.command('*')
.action(function (cmd) {
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', cmd)
})
...
extractFile = function (archive, filename) { const filesystem = disk.readFilesystemSync(archive) return disk.readFileSync(filesystem, filename, filesystem.getFile(filename)) }
...
})
program.command('extract-file <archive> <filename>')
.alias('ef')
.description('extract one file from archive')
.action(function (archive, filename) {
require('fs').writeFileSync(require('path').basename(filename),
asar.extractFile(archive, filename))
})
program.command('extract <archive> <dest>')
.alias('e')
.description('extract archive')
.action(function (archive, dest) {
asar.extractAll(archive, dest)
...
listPackage = function (archive) { return disk.readFilesystemSync(archive).listFiles() }
...
})
})
program.command('list <archive>')
.alias('l')
.description('list files of asar archive')
.action(function (archive) {
var files = asar.listPackage(archive)
for (var i in files) {
console.log(files[i])
}
})
program.command('extract-file <archive> <filename>')
.alias('ef')
...
statFile = function (archive, filename, followLinks) { const filesystem = disk.readFilesystemSync(archive) return filesystem.getFile(filename, followLinks) }
n/a
readArchiveHeaderSync = function (archive) { const fd = fs.openSync(archive, 'r') let size let headerBuf try { const sizeBuf = new Buffer(8) if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) { throw new Error('Unable to read header size') } const sizePickle = pickle.createFromBuffer(sizeBuf) size = sizePickle.createIterator().readUInt32() headerBuf = new Buffer(size) if (fs.readSync(fd, headerBuf, 0, size, null) !== size) { throw new Error('Unable to read header') } } finally { fs.closeSync(fd) } const headerPickle = pickle.createFromBuffer(headerBuf) const header = headerPickle.createIterator().readString() return {header: JSON.parse(header), headerSize: size} }
...
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return {header: JSON.parse(header), headerSize: size}
}
module.exports.readFilesystemSync = function (archive) {
if (!filesystemCache[archive]) {
const header = this.readArchiveHeaderSync(archive)
const filesystem = new Filesystem(archive)
filesystem.header = header.header
filesystem.headerSize = header.headerSize
filesystemCache[archive] = filesystem
}
return filesystemCache[archive]
}
...
readFileSync = function (filesystem, filename, info) { let buffer = new Buffer(info.size) if (info.size <= 0) { return buffer } if (info.unpacked) { // it's an unpacked file, copy it. buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename)) } else { // Node throws an exception when reading 0 bytes into a 0-size buffer, // so we short-circuit the read in this case. const fd = fs.openSync(filesystem.src, 'r') try { const offset = 8 + filesystem.headerSize + parseInt(info.offset) fs.readSync(fd, buffer, 0, info.size, offset) } finally { fs.closeSync(fd) } } return buffer }
...
const Filesystem = require('./filesystem')
const filesystemCache = {}
const copyFileToSync = function (dest, src, filename) {
const srcFile = path.join(src, filename)
const targetFile = path.join(dest, filename)
const content = fs.readFileSync(srcFile)
const stats = fs.statSync(srcFile)
mkdirp.sync(path.dirname(targetFile))
return fs.writeFileSync(targetFile, content, {mode: stats.mode})
}
const writeFileListToStream = function (dest, filesystem, out, list, metadata, callback) {
if (list.length === 0) {
...
readFilesystemSync = function (archive) { if (!filesystemCache[archive]) { const header = this.readArchiveHeaderSync(archive) const filesystem = new Filesystem(archive) filesystem.header = header.header filesystem.headerSize = header.headerSize filesystemCache[archive] = filesystem } return filesystemCache[archive] }
n/a
writeFilesystem = function (dest, filesystem, files, metadata, callback) { let sizeBuf let headerBuf try { const headerPickle = pickle.createEmpty() headerPickle.writeString(JSON.stringify(filesystem.header)) headerBuf = headerPickle.toBuffer() const sizePickle = pickle.createEmpty() sizePickle.writeUInt32(headerBuf.length) sizeBuf = sizePickle.toBuffer() } catch (error) { return callback(error) } const out = fs.createWriteStream(dest) out.on('error', callback) out.write(sizeBuf) return out.write(headerBuf, function () { return writeFileListToStream(dest, filesystem, out, files, metadata, callback) }) }
n/a