"use strict";

Object.defineProperty(exports, "__esModule", {
  value: true
});
exports.FSCache = void 0;
function _stream() {
  const data = _interopRequireDefault(require("stream"));
  _stream = function () {
    return data;
  };
  return data;
}
function _path() {
  const data = _interopRequireDefault(require("path"));
  _path = function () {
    return data;
  };
  return data;
}
function _util() {
  const data = require("util");
  _util = function () {
    return data;
  };
  return data;
}
function _logger() {
  const data = _interopRequireDefault(require("@parcel/logger"));
  _logger = function () {
    return data;
  };
  return data;
}
function _core() {
  const data = require("@parcel/core");
  _core = function () {
    return data;
  };
  return data;
}
var _package = _interopRequireDefault(require("../package.json"));
var _constants = require("./constants");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classPrivateMethodInitSpec(obj, privateSet) { _checkPrivateRedeclaration(obj, privateSet); privateSet.add(obj); }
function _checkPrivateRedeclaration(obj, privateCollection) { if (privateCollection.has(obj)) { throw new TypeError("Cannot initialize the same private elements twice on an object"); } }
function _classPrivateMethodGet(receiver, privateSet, fn) { if (!privateSet.has(receiver)) { throw new TypeError("attempted to get private field on non-instance"); } return fn; } // flowlint-next-line untyped-import:off
const pipeline = (0, _util().promisify)(_stream().default.pipeline);
var _getFilePath = /*#__PURE__*/new WeakSet();
var _unlinkChunks = /*#__PURE__*/new WeakSet();
class FSCache {
  constructor(fs, cacheDir) {
    _classPrivateMethodInitSpec(this, _unlinkChunks);
    _classPrivateMethodInitSpec(this, _getFilePath);
    this.fs = fs;
    this.dir = cacheDir;
  }
  async ensure() {
    // First, create the main cache directory if necessary.
    await this.fs.mkdirp(this.dir);

    // In parallel, create sub-directories for every possible hex value
    // This speeds up large caches on many file systems since there are fewer files in a single directory.
    let dirPromises = [];
    for (let i = 0; i < 256; i++) {
      dirPromises.push(this.fs.mkdirp(_path().default.join(this.dir, ('00' + i.toString(16)).slice(-2))));
    }
    await Promise.all(dirPromises);
  }
  _getCachePath(cacheId) {
    return _path().default.join(this.dir, cacheId.slice(0, 2), cacheId.slice(2));
  }
  getStream(key) {
    return this.fs.createReadStream(this._getCachePath(`${key}-large`));
  }
  setStream(key, stream) {
    return pipeline(stream, this.fs.createWriteStream(this._getCachePath(`${key}-large`)));
  }
  has(key) {
    return this.fs.exists(this._getCachePath(key));
  }
  getBlob(key) {
    return this.fs.readFile(this._getCachePath(key));
  }
  async setBlob(key, contents) {
    await this.fs.writeFile(this._getCachePath(key), contents);
  }
  async getBuffer(key) {
    try {
      return await this.fs.readFile(this._getCachePath(key));
    } catch (err) {
      if (err.code === 'ENOENT') {
        return null;
      } else {
        throw err;
      }
    }
  }
  hasLargeBlob(key) {
    return this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0));
  }
  async getLargeBlob(key) {
    const buffers = [];
    for (let i = 0; await this.fs.exists(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i)); i += 1) {
      const file = this.fs.readFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i));
      buffers.push(file);
    }
    return Buffer.concat(await Promise.all(buffers));
  }
  async setLargeBlob(key, contents, options) {
    const chunks = Math.ceil(contents.length / _constants.WRITE_LIMIT_CHUNK);
    const writePromises = [];
    if (chunks === 1) {
      // If there's one chunk, don't slice the content
      writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, 0), contents, {
        signal: options === null || options === void 0 ? void 0 : options.signal
      }));
    } else {
      for (let i = 0; i < chunks; i += 1) {
        writePromises.push(this.fs.writeFile(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, i), typeof contents === 'string' ? contents.slice(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK) : contents.subarray(i * _constants.WRITE_LIMIT_CHUNK, (i + 1) * _constants.WRITE_LIMIT_CHUNK), {
          signal: options === null || options === void 0 ? void 0 : options.signal
        }));
      }
    }

    // If there's already a files following this chunk, it's old and should be removed
    writePromises.push(_classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, chunks));
    await Promise.all(writePromises);
  }
  async get(key) {
    try {
      let data = await this.fs.readFile(this._getCachePath(key));
      return (0, _core().deserialize)(data);
    } catch (err) {
      if (err.code === 'ENOENT') {
        return null;
      } else {
        throw err;
      }
    }
  }
  async set(key, value) {
    try {
      let blobPath = this._getCachePath(key);
      let data = (0, _core().serialize)(value);
      await this.fs.writeFile(blobPath, data);
    } catch (err) {
      _logger().default.error(err, '@parcel/cache');
    }
  }
  refresh() {
    // NOOP
  }
}
exports.FSCache = FSCache;
function _getFilePath2(key, index) {
  return _path().default.join(this.dir, `${key}-${index}`);
}
async function _unlinkChunks2(key, index) {
  try {
    await this.fs.unlink(_classPrivateMethodGet(this, _getFilePath, _getFilePath2).call(this, key, index));
    await _classPrivateMethodGet(this, _unlinkChunks, _unlinkChunks2).call(this, key, index + 1);
  } catch (err) {
    // If there's an error, no more chunks are left to delete
  }
}
(0, _core().registerSerializableClass)(`${_package.default.version}:FSCache`, FSCache);