| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288 | 
							- 'use strict'
 
- const BB = require('bluebird')
 
- const contentPath = require('./content/path')
 
- const crypto = require('crypto')
 
- const figgyPudding = require('figgy-pudding')
 
- const fixOwner = require('./util/fix-owner')
 
- const fs = require('graceful-fs')
 
- const hashToSegments = require('./util/hash-to-segments')
 
- const ms = require('mississippi')
 
- const path = require('path')
 
- const ssri = require('ssri')
 
- const Y = require('./util/y.js')
 
- const indexV = require('../package.json')['cache-version'].index
 
- const appendFileAsync = BB.promisify(fs.appendFile)
 
- const readFileAsync = BB.promisify(fs.readFile)
 
- const readdirAsync = BB.promisify(fs.readdir)
 
- const concat = ms.concat
 
- const from = ms.from
 
- module.exports.NotFoundError = class NotFoundError extends Error {
 
-   constructor (cache, key) {
 
-     super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
 
-     this.code = 'ENOENT'
 
-     this.cache = cache
 
-     this.key = key
 
-   }
 
- }
 
- const IndexOpts = figgyPudding({
 
-   metadata: {},
 
-   size: {}
 
- })
 
- module.exports.insert = insert
 
- function insert (cache, key, integrity, opts) {
 
-   opts = IndexOpts(opts)
 
-   const bucket = bucketPath(cache, key)
 
-   const entry = {
 
-     key,
 
-     integrity: integrity && ssri.stringify(integrity),
 
-     time: Date.now(),
 
-     size: opts.size,
 
-     metadata: opts.metadata
 
-   }
 
-   return fixOwner.mkdirfix(
 
-     cache, path.dirname(bucket)
 
-   ).then(() => {
 
-     const stringified = JSON.stringify(entry)
 
-     // NOTE - Cleverness ahoy!
 
-     //
 
-     // This works because it's tremendously unlikely for an entry to corrupt
 
-     // another while still preserving the string length of the JSON in
 
-     // question. So, we just slap the length in there and verify it on read.
 
-     //
 
-     // Thanks to @isaacs for the whiteboarding session that ended up with this.
 
-     return appendFileAsync(
 
-       bucket, `\n${hashEntry(stringified)}\t${stringified}`
 
-     )
 
-   }).then(
 
-     () => fixOwner.chownr(cache, bucket)
 
-   ).catch({ code: 'ENOENT' }, () => {
 
-     // There's a class of race conditions that happen when things get deleted
 
-     // during fixOwner, or between the two mkdirfix/chownr calls.
 
-     //
 
-     // It's perfectly fine to just not bother in those cases and lie
 
-     // that the index entry was written. Because it's a cache.
 
-   }).then(() => {
 
-     return formatEntry(cache, entry)
 
-   })
 
- }
 
- module.exports.insert.sync = insertSync
 
- function insertSync (cache, key, integrity, opts) {
 
-   opts = IndexOpts(opts)
 
-   const bucket = bucketPath(cache, key)
 
-   const entry = {
 
-     key,
 
-     integrity: integrity && ssri.stringify(integrity),
 
-     time: Date.now(),
 
-     size: opts.size,
 
-     metadata: opts.metadata
 
-   }
 
-   fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
 
-   const stringified = JSON.stringify(entry)
 
-   fs.appendFileSync(
 
-     bucket, `\n${hashEntry(stringified)}\t${stringified}`
 
-   )
 
-   try {
 
-     fixOwner.chownr.sync(cache, bucket)
 
-   } catch (err) {
 
-     if (err.code !== 'ENOENT') {
 
-       throw err
 
-     }
 
-   }
 
-   return formatEntry(cache, entry)
 
- }
 
- module.exports.find = find
 
- function find (cache, key) {
 
-   const bucket = bucketPath(cache, key)
 
-   return bucketEntries(bucket).then(entries => {
 
-     return entries.reduce((latest, next) => {
 
-       if (next && next.key === key) {
 
-         return formatEntry(cache, next)
 
-       } else {
 
-         return latest
 
-       }
 
-     }, null)
 
-   }).catch(err => {
 
-     if (err.code === 'ENOENT') {
 
-       return null
 
-     } else {
 
-       throw err
 
-     }
 
-   })
 
- }
 
- module.exports.find.sync = findSync
 
- function findSync (cache, key) {
 
-   const bucket = bucketPath(cache, key)
 
-   try {
 
-     return bucketEntriesSync(bucket).reduce((latest, next) => {
 
-       if (next && next.key === key) {
 
-         return formatEntry(cache, next)
 
-       } else {
 
-         return latest
 
-       }
 
-     }, null)
 
-   } catch (err) {
 
-     if (err.code === 'ENOENT') {
 
-       return null
 
-     } else {
 
-       throw err
 
-     }
 
-   }
 
- }
 
- module.exports.delete = del
 
- function del (cache, key, opts) {
 
-   return insert(cache, key, null, opts)
 
- }
 
- module.exports.delete.sync = delSync
 
- function delSync (cache, key, opts) {
 
-   return insertSync(cache, key, null, opts)
 
- }
 
- module.exports.lsStream = lsStream
 
- function lsStream (cache) {
 
-   const indexDir = bucketDir(cache)
 
-   const stream = from.obj()
 
-   // "/cachename/*"
 
-   readdirOrEmpty(indexDir).map(bucket => {
 
-     const bucketPath = path.join(indexDir, bucket)
 
-     // "/cachename/<bucket 0xFF>/*"
 
-     return readdirOrEmpty(bucketPath).map(subbucket => {
 
-       const subbucketPath = path.join(bucketPath, subbucket)
 
-       // "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
 
-       return readdirOrEmpty(subbucketPath).map(entry => {
 
-         const getKeyToEntry = bucketEntries(
 
-           path.join(subbucketPath, entry)
 
-         ).reduce((acc, entry) => {
 
-           acc.set(entry.key, entry)
 
-           return acc
 
-         }, new Map())
 
-         return getKeyToEntry.then(reduced => {
 
-           for (let entry of reduced.values()) {
 
-             const formatted = formatEntry(cache, entry)
 
-             formatted && stream.push(formatted)
 
-           }
 
-         }).catch({ code: 'ENOENT' }, nop)
 
-       })
 
-     })
 
-   }).then(() => {
 
-     stream.push(null)
 
-   }, err => {
 
-     stream.emit('error', err)
 
-   })
 
-   return stream
 
- }
 
- module.exports.ls = ls
 
- function ls (cache) {
 
-   return BB.fromNode(cb => {
 
-     lsStream(cache).on('error', cb).pipe(concat(entries => {
 
-       cb(null, entries.reduce((acc, xs) => {
 
-         acc[xs.key] = xs
 
-         return acc
 
-       }, {}))
 
-     }))
 
-   })
 
- }
 
- function bucketEntries (bucket, filter) {
 
-   return readFileAsync(
 
-     bucket, 'utf8'
 
-   ).then(data => _bucketEntries(data, filter))
 
- }
 
- function bucketEntriesSync (bucket, filter) {
 
-   const data = fs.readFileSync(bucket, 'utf8')
 
-   return _bucketEntries(data, filter)
 
- }
 
- function _bucketEntries (data, filter) {
 
-   let entries = []
 
-   data.split('\n').forEach(entry => {
 
-     if (!entry) { return }
 
-     const pieces = entry.split('\t')
 
-     if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
 
-       // Hash is no good! Corruption or malice? Doesn't matter!
 
-       // EJECT EJECT
 
-       return
 
-     }
 
-     let obj
 
-     try {
 
-       obj = JSON.parse(pieces[1])
 
-     } catch (e) {
 
-       // Entry is corrupted!
 
-       return
 
-     }
 
-     if (obj) {
 
-       entries.push(obj)
 
-     }
 
-   })
 
-   return entries
 
- }
 
- module.exports._bucketDir = bucketDir
 
- function bucketDir (cache) {
 
-   return path.join(cache, `index-v${indexV}`)
 
- }
 
- module.exports._bucketPath = bucketPath
 
- function bucketPath (cache, key) {
 
-   const hashed = hashKey(key)
 
-   return path.join.apply(path, [bucketDir(cache)].concat(
 
-     hashToSegments(hashed)
 
-   ))
 
- }
 
- module.exports._hashKey = hashKey
 
- function hashKey (key) {
 
-   return hash(key, 'sha256')
 
- }
 
- module.exports._hashEntry = hashEntry
 
- function hashEntry (str) {
 
-   return hash(str, 'sha1')
 
- }
 
- function hash (str, digest) {
 
-   return crypto
 
-     .createHash(digest)
 
-     .update(str)
 
-     .digest('hex')
 
- }
 
- function formatEntry (cache, entry) {
 
-   // Treat null digests as deletions. They'll shadow any previous entries.
 
-   if (!entry.integrity) { return null }
 
-   return {
 
-     key: entry.key,
 
-     integrity: entry.integrity,
 
-     path: contentPath(cache, entry.integrity),
 
-     size: entry.size,
 
-     time: entry.time,
 
-     metadata: entry.metadata
 
-   }
 
- }
 
- function readdirOrEmpty (dir) {
 
-   return readdirAsync(dir)
 
-     .catch({ code: 'ENOENT' }, () => [])
 
-     .catch({ code: 'ENOTDIR' }, () => [])
 
- }
 
- function nop () {
 
- }
 
 
  |