[
  {
    "path": ".github/workflows/claude-code-review.yml",
    "content": "name: Claude Code Review\n\non:\n  pull_request:\n    types: [opened, synchronize, ready_for_review, reopened]\n    # Optional: Only run on specific file changes\n    # paths:\n    #   - \"src/**/*.ts\"\n    #   - \"src/**/*.tsx\"\n    #   - \"src/**/*.js\"\n    #   - \"src/**/*.jsx\"\n\njobs:\n  claude-review:\n    # Optional: Filter by PR author\n    # if: |\n    #   github.event.pull_request.user.login == 'external-contributor' ||\n    #   github.event.pull_request.user.login == 'new-developer' ||\n    #   github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'\n\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n      pull-requests: read\n      issues: read\n      id-token: write\n\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 1\n\n      - name: Run Claude Code Review\n        id: claude-review\n        uses: anthropics/claude-code-action@v1\n        with:\n          claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}\n          plugin_marketplaces: 'https://github.com/anthropics/claude-code.git'\n          plugins: 'code-review@claude-code-plugins'\n          prompt: '/code-review:code-review ${{ github.repository }}/pull/${{ github.event.pull_request.number }}'\n          # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md\n          # or https://code.claude.com/docs/en/cli-reference for available options\n\n"
  },
  {
    "path": ".github/workflows/claude.yml",
    "content": "name: Claude Code\n\non:\n  issue_comment:\n    types: [created]\n  pull_request_review_comment:\n    types: [created]\n  issues:\n    types: [opened, assigned]\n  pull_request_review:\n    types: [submitted]\n\njobs:\n  claude:\n    if: |\n      (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||\n      (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||\n      (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||\n      (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n      pull-requests: read\n      issues: read\n      id-token: write\n      actions: read # Required for Claude to read CI results on PRs\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 1\n\n      - name: Run Claude Code\n        id: claude\n        uses: anthropics/claude-code-action@v1\n        with:\n          claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}\n\n          # This is an optional setting that allows Claude to read CI results on PRs\n          additional_permissions: |\n            actions: read\n\n          # Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.\n          # prompt: 'Update the pull request description to include a summary of changes.'\n\n          # Optional: Add claude_args to customize behavior and configuration\n          # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md\n          # or https://code.claude.com/docs/en/cli-reference for available options\n          # claude_args: '--allowed-tools Bash(gh pr:*)'\n\n"
  },
  {
    "path": ".github/workflows/npm-publish.yml",
    "content": "name: Publish to npm\n\non:\n  workflow_dispatch:\n  push:\n    tags:\n      - 'v*.*.*'\n\njobs:\n  publish:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n      id-token: write\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@v4\n\n      - name: Setup Node.js\n        uses: actions/setup-node@v4\n        with:\n          node-version: '18'\n          registry-url: 'https://registry.npmjs.org'\n\n      - name: Install dependencies\n        run: npm install\n\n      - name: Run tests\n        run: npm test\n\n      - name: Publish to npm\n        run: npm publish --provenance --access public\n        env:\n          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Test\n\non:\n  pull_request:\n    branches: [master]\n    types: [opened, synchronize, reopened]\n  workflow_dispatch:\n\njobs:\n  test:\n    runs-on: ubuntu-latest\n\n    strategy:\n      matrix:\n        node-version: [18, 20, 22]\n\n    steps:\n      - uses: actions/checkout@v4\n\n      - name: Use Node.js ${{ matrix.node-version }}\n        uses: actions/setup-node@v4\n        with:\n          node-version: ${{ matrix.node-version }}\n\n      - name: Install dependencies\n        run: npm i\n\n      - name: Run tests\n        run: npm test"
  },
  {
    "path": ".gitignore",
    "content": ".nyc_output/*\ncoverage/*\nnode_modules/*\ntest/*.zip\ntest/*.tar\ntest/*.tar.gz\n.idea/\n"
  },
  {
    "path": ".npmrc",
    "content": "package-lock=false\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2019 Ronald Luitwieler.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "# s3-zip\n\n[![npm version][npm-badge]][npm-url]\n[![Build Status][gh-actions-badge]][gh-actions-url]\n[![JavaScript Style Guide](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/)\n\nDownload selected files from an Amazon S3 bucket as a zip file.\n\n\n\n## Install\n\n```\nnpm install s3-zip\n```\n\n\n## AWS Configuration\n\nRefer to the [AWS SDK][aws-sdk-url] for authenticating to AWS prior to using this plugin.\n\n\n\n## Usage\n\n### Zip specific files\n\n```javascript\n\nconst fs = require('fs')\nconst join = require('path').join\nconst s3Zip = require('s3-zip')\n\nconst region = 'bucket-region'\nconst bucket = 'name-of-s3-bucket'\nconst folder = 'name-of-bucket-folder/'\nconst file1 = 'Image A.png'\nconst file2 = 'Image B.png'\nconst file3 = 'Image C.png'\nconst file4 = 'Image D.png'\n\nconst output = fs.createWriteStream(join(__dirname, 'use-s3-zip.zip'))\n\ns3Zip\n  .archive({ region: region, bucket: bucket}, folder, [file1, file2, file3, file4])\n  .pipe(output)\n\n```\n\nYou can also pass a custom S3 client. For example if you want to zip files from a S3 compatible storage:\n\n```javascript\nconst { S3Client } = require('@aws-sdk/client-s3')\n\nconst s3Client = new S3Client({\n  region: 'us-east-1',\n  endpoint: 'http://localhost:9000',\n  forcePathStyle: true\n})\n\ns3Zip\n  .archive({ s3: s3Client, bucket: bucket }, folder, [file1, file2])\n  .pipe(output)\n```\n\n**Note:** When passing a custom S3 client, it must be an AWS SDK v3 client (from `@aws-sdk/client-s3`). AWS SDK v2 clients are not supported and will result in an error.\n\n### Zip files with AWS Lambda\n\nExample of s3-zip in combination with [AWS Lambda](aws_lambda.md).\n\n\n### Zip a whole bucket folder\n\n```javascript\nconst fs = require('fs')\nconst join = require('path').join\nconst {\n  S3Client\n} = require(\"@aws-sdk/client-s3\")\nconst s3Zip = require('s3-zip')\nconst XmlStream = require('xml-stream')\n\nconst region = 'bucket-region'\nconst bucket = 'name-of-s3-bucket'\nconst folder = 'name-of-bucket-folder/'\nconst s3 = new S3Client({ region: region })\nconst params = {\n  Bucket: bucket,\n  Prefix: folder\n}\n\nconst filesArray = []\nconst files = s3.listObjects(params).createReadStream()\nconst xml = new XmlStream(files)\nxml.collect('Key')\nxml.on('endElement: Key', function(item) {\n  filesArray.push(item['$text'].substring(folder.length))\n})\n\nxml\n  .on('end', function () {\n    zip(filesArray)\n  })\n\nfunction zip(files) {\n  console.log(files)\n  const output = fs.createWriteStream(join(__dirname, 'use-s3-zip.zip'))\n  s3Zip\n   .archive({ region: region, bucket: bucket, preserveFolderStructure: true }, folder, files)\n   .pipe(output)\n}\n```\n\n### Tar format support\n\n```javascript\ns3Zip\n  .setFormat('tar')\n  .archive({ region: region, bucket: bucket }, folder, [file1, file2])\n  .pipe(output)\n```\n\n### Zip a file with protected password\n\n```javascript\ns3Zip\n  .setRegisterFormatOptions('zip-encrypted', require(\"archiver-zip-encrypted\"))\n  .setFormat('zip-encryptable')\n  .setArchiverOptions({zlib: {level: 8}, encryptionMethod: 'aes256', password: '123'})\n  .archive({ region: region, bucket: bucket }, folder, [file1, file2])\n  .pipe(output)\n```\n\n### Archiver options\n\nWe use [archiver][archiver-url] to create archives. To pass your options to it, use `setArchiverOptions` method:\n\n```javascript\ns3Zip\n  .setFormat('tar')\n  .setArchiverOptions({ gzip: true })\n  .archive({ region: region, bucket: bucket }, folder, [file1, file2])\n```\n\n### Organize your archive with custom paths and permissions\n\nYou can pass an array of objects with type [EntryData][entrydata-url] to organize your archive.\n\n```javascript\nconst files = ['flower.jpg', 'road.jpg']\nconst archiveFiles = [\n  { name: 'newFolder/flower.jpg' },\n\n  /* _rw_______ */\n  { name: 'road.jpg', mode: parseInt('0600', 8)  }\n];\ns3Zip.archive({ region: region, bucket: bucket }, folder, files, archiveFiles)\n```\n\n### Using with ExpressJS\n\n`s3-zip` works with any framework which leverages on NodeJS Streams including ExpressJS.\n\n```javascript\nconst s3Zip = require('s3-zip')\n\napp.get('/download', (req, res) => {\n  s3Zip\n    .archive({ region: region, bucket: bucket }, '', 'abc.jpg')\n    .pipe(res)\n})\n```\nAbove should stream out the file in the response of the request.\n\n### Debug mode\n\nEnable debug mode to see the logs:\n\n```javascript\ns3Zip.archive({ region: region, bucket: bucket, debug: true }, folder, files)\n```\n\n## Testing\n\nTests are written in Node Tap, run them like this:\n\n```\nnpm t\n```\n\nIf you would like a more fancy coverage report:\n\n```\nnpm run coverage\n```\n\n## Publishing\n\nThis package is automatically published to NPM when a new release is created on GitHub. The publishing workflow:\n\n1. Triggers on GitHub releases\n2. Runs tests to ensure quality\n3. Publishes to NPM using the `NPM_TOKEN` secret\n\n### Setup for Maintainers\n\nTo enable automatic publishing, the repository requires an `NPM_TOKEN` secret to be configured in GitHub:\n\n1. Generate an NPM access token with publish permissions\n2. Add it as a repository secret named `NPM_TOKEN` in GitHub Settings > Secrets and variables > Actions\n\nThe workflow can also be triggered manually from the Actions tab for testing purposes.\n\n\n\n\n[aws-sdk-url]: https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/configuring-the-jssdk.html\n[npm-badge]: https://badge.fury.io/js/s3-zip.svg\n[npm-url]: https://badge.fury.io/js/s3-zip\n[gh-actions-badge]: https://github.com/orangewise/s3-zip/actions/workflows/test.yml/badge.svg?branch=master\n[gh-actions-url]: https://github.com/orangewise/s3-zip/actions/workflows/test.yml\n[archiver-url]: https://www.npmjs.com/package/archiver\n[entrydata-url]: https://archiverjs.com/docs/global.html#EntryData\n"
  },
  {
    "path": "aws_lambda.md",
    "content": "# Using s3-zip in combination with AWS Lambda\n\n## Create a lambda function\n\n\n```javascript\nconst { Upload } = require(\"@aws-sdk/lib-storage\");\nconst { S3 } = require(\"@aws-sdk/client-s3\");\nconst s3Zip = require('s3-zip')\nconst {Readable} = require('stream')\n\nexports.handler = function (event, context) {\n  console.log('event', event)\n  \n\n  const region = event.region\n  const bucket = event.bucket\n  const folder = event.folder\n  const files = event.files\n  const zipFileName = event.zipFileName\n\n  // Create body stream\n  try {\n\n    const writable = s3Zip.archive({ region: region, bucket: bucket}, folder, files)\n    const body = Readable.from(writable)\n    const zipParams = { params: { Bucket: bucket, Key: folder + zipFileName } }\n    const zipFile = new S3(zipParams)\n    new Upload({\n      client: zipFile,\n      params: { Body: body }\n    })\n      .on('httpUploadProgress', function (evt) { console.log(evt) })\n      .done().then(\n        (r) => {\n          console.log(r)\n          context.succeed(r)\n        },\n        (e) => {\n          console.log('zipFile.upload error', e)\n          context.fail(e)\n        }\n      )\n\n  } catch (e) {\n    const err = 'catched error: ' + e\n    console.log(err)    \n    context.fail(err)\n  }\n}\n\n```\n\n## Invoke the function\n\n```javascript\nconst { LambdaClient } = require(\"@aws-sdk/client-lambda\");\n\nconst region = 'bucket-region'\nconst bucket = 'name-of-s3-bucket'\nconst folder = 'name-of-bucket-folder/'\nconst file1 = 'Image A.png'\nconst file2 = 'Image B.png'\nconst file3 = 'Image C.png'\nconst file4 = 'Image D.png'\n\n\nconst lambda = new LambdaClient({\n  region\n})\n\nconst files = [file1, file2, file3, file4]\nconst payload = JSON.stringify({ \n  'region'     : region,\n  'bucket'     : bucket,\n  'folder'     : folder,\n  'files'      :  files,\n  'zipFileName': 'bla.zip'\n})\n\nconst params = {\n  FunctionName : 'NAME_OF_YOUR_LAMBDA_FUNCTION', \n  Payload      : payload\n}\n\n\nlambda.invoke(params, function (err, data) {\n  if (err) console.log(err, err.stack) // an error occurred\n  else     console.log(data)           // successful response\n})\n\n```\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"s3-zip\",\n  \"version\": \"3.3.3\",\n  \"description\": \"Download selected files from an Amazon S3 bucket as a zip file.\",\n  \"main\": \"s3-zip.js\",\n  \"engines\": {\n    \"node\": \">=18\"\n  },\n  \"scripts\": {\n    \"test\": \"standard && tap --cov test/test*.js\",\n    \"coverage\": \"npm test -- --cov --coverage-report=lcov\"\n  },\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"git+https://github.com/orangewise/s3-zip.git\"\n  },\n  \"keywords\": [\n    \"amazon\",\n    \"aws\",\n    \"S3\",\n    \"zip\",\n    \"files\"\n  ],\n  \"author\": \"Ronald Luitwieler\",\n  \"license\": \"MIT\",\n  \"bugs\": {\n    \"url\": \"https://github.com/orangewise/s3-zip/issues\"\n  },\n  \"homepage\": \"https://github.com/orangewise/s3-zip#readme\",\n  \"dependencies\": {\n    \"archiver\": \"^6.0.1\",\n    \"normalize-path\": \"^3.0.0\",\n    \"s3-files\": \"^3.0.0\"\n  },\n  \"devDependencies\": {\n    \"archiver-zip-encryptable\": \"^1.0.5\",\n    \"concat-stream\": \"^2.0.0\",\n    \"proxyquire\": \"^2.1.3\",\n    \"sinon\": \"^16.0.0\",\n    \"standard\": \"^17.1.0\",\n    \"stream-array\": \"^1.1.2\",\n    \"tap\": \"^16.3.8\",\n    \"tar\": \"^6.2.0\",\n    \"yauzl\": \"^2.10.0\"\n  }\n}\n"
  },
  {
    "path": "s3-zip.js",
    "content": "const s3Files = require('s3-files')\nconst archiver = require('archiver')\n\nconst s3Zip = {}\nmodule.exports = s3Zip\n\ns3Zip.archive = function (opts, folder, filesS3, filesZip) {\n  const self = this\n  let connectionConfig\n\n  this.folder = folder\n\n  self.debug = opts.debug || false\n\n  if ('s3' in opts) {\n    // Validate that the provided S3 client is compatible with AWS SDK v3\n    if (!opts.s3 || typeof opts.s3.send !== 'function') {\n      throw new Error('The provided S3 client must be an AWS SDK v3 client with a .send() method. ' +\n        'Please use @aws-sdk/client-s3 (v3) instead of aws-sdk (v2).')\n    }\n    connectionConfig = {\n      s3: opts.s3\n    }\n  } else {\n    connectionConfig = {\n      region: opts.region\n    }\n  }\n\n  connectionConfig.bucket = opts.bucket\n\n  self.client = s3Files.connect(connectionConfig)\n\n  const keyStream = self.client.createKeyStream(folder, filesS3)\n\n  const preserveFolderStructure = opts.preserveFolderStructure === true || filesZip\n  const fileStream = s3Files.createFileStream(keyStream, preserveFolderStructure)\n  const archive = self.archiveStream(fileStream, filesS3, filesZip)\n\n  return archive\n}\n\ns3Zip.archiveStream = function (stream, filesS3, filesZip) {\n  const self = this\n  const folder = this.folder || ''\n  if (this.registerFormat) {\n    // Only register the format if it hasn't been registered before\n    if (!archiver.isRegisteredFormat(this.registerFormat)) {\n      archiver.registerFormat(this.registerFormat, this.formatModule)\n    }\n  }\n  const archive = archiver(this.format || 'zip', this.archiverOpts || {})\n  archive.on('error', function (err) {\n    self.debug && console.log('archive error', err)\n  })\n  stream\n    .on('data', function (file) {\n      if (file.path[file.path.length - 1] === '/') {\n        self.debug && console.log('don\\'t append to zip', file.path)\n        return\n      }\n      let fname\n      if (filesZip) {\n        // Place files_s3[i] into the archive as files_zip[i]\n        const i = filesS3.indexOf(file.path.startsWith(folder) ? file.path.substring(folder.length) : file.path)\n        fname = (i >= 0 && i < filesZip.length) ? filesZip[i] : file.path\n        filesS3[i] = ''\n      } else {\n        // Just use the S3 file name\n        fname = file.path\n      }\n      const entryData = typeof fname === 'object' ? fname : { name: fname }\n      self.debug && console.log('append to zip', fname)\n      if (file.data.length === 0) {\n        archive.append('', entryData)\n      } else {\n        archive.append(file.data, entryData)\n      }\n    })\n    .on('end', function () {\n      self.debug && console.log('end -> finalize')\n      archive.finalize()\n    })\n    .on('error', function (err) {\n      archive.emit('error', err)\n    })\n\n  return archive\n}\n\ns3Zip.setFormat = function (format) {\n  this.format = format\n  return this\n}\n\ns3Zip.setArchiverOptions = function (archiverOpts) {\n  this.archiverOpts = archiverOpts\n  return this\n}\n\ns3Zip.setRegisterFormatOptions = function (registerFormat, formatModule) {\n  this.registerFormat = registerFormat\n  this.formatModule = formatModule\n  return this\n}\n"
  },
  {
    "path": "test/fixtures/empty.txt",
    "content": ""
  },
  {
    "path": "test/fixtures/file.txt",
    "content": "Howdy.\nHowdy.\nHowdy."
  },
  {
    "path": "test/fixtures/folder/a/file.txt",
    "content": "a\n"
  },
  {
    "path": "test/fixtures/folder/b/file.txt",
    "content": "b\n"
  },
  {
    "path": "test/test-coverage-missing-lines.js",
    "content": "// Test to cover missing lines for full coverage\n\nconst t = require('tap')\nconst Stream = require('stream')\nconst sinon = require('sinon')\nconst proxyquire = require('proxyquire')\n\n// Mock s3Files to avoid real S3 calls\nconst mockS3Files = {\n  connect: sinon.stub().returns({\n    createKeyStream: sinon.stub().returns(new Stream())\n  }),\n  createFileStream: sinon.stub().returns(new Stream())\n}\n\nconst s3Zip = proxyquire('../s3-zip.js', {\n  's3-files': mockS3Files\n})\n\nt.test('test archive with valid AWS SDK v3 client', function (child) {\n  // Mock valid AWS SDK v3 client (has .send method)\n  const awsV3Client = {\n    send: function (command) {\n      return Promise.resolve({ Body: Buffer.from('test data') })\n    }\n  }\n\n  try {\n    // This should work and cover line 21: connectionConfig = { s3: opts.s3 }\n    const archive = s3Zip.archive(\n      { s3: awsV3Client, bucket: 'test-bucket' },\n      'folder/',\n      ['test-file.txt']\n    )\n\n    child.type(archive, 'object', 'Should return archive object')\n    child.end()\n  } catch (error) {\n    child.fail(`Should not throw error with valid SDK v3 client: ${error.message}`)\n    child.end()\n  }\n})\n\nt.test('test archiveStream with debug mode and directory paths', function (child) {\n  const rs = new Stream()\n  rs.readable = true\n\n  // Enable debug mode to cover lines 59-60\n  s3Zip.debug = true\n\n  const archive = s3Zip.archiveStream(rs, [])\n\n  // Emit a directory path to trigger the debug log\n  rs.emit('data', { data: Buffer.alloc(0), path: 'test-folder/' })\n  rs.emit('end')\n\n  child.type(archive, 'object', 'Should return archive object')\n  child.end()\n})\n\nt.test('test archiveStream with file not in filesS3 array', function (child) {\n  const rs = new Stream()\n  rs.readable = true\n\n  // Test the ternary on line 66 - when file is not found in filesS3 array\n  const archive = s3Zip.archiveStream(rs, ['different-file.txt'], ['renamed.txt'])\n\n  // Emit a file that's not in the filesS3 array\n  rs.emit('data', {\n    data: Buffer.from('test content'),\n    path: 'not-in-list.txt'\n  })\n  rs.emit('end')\n\n  child.type(archive, 'object', 'Should return archive object')\n  child.end()\n})\n\nt.test('test archiveStream with debug and archive error', function (child) {\n  const rs = new Stream()\n  rs.readable = true\n\n  // Enable debug mode to cover line 54\n  s3Zip.debug = true\n\n  const archive = s3Zip.archiveStream(rs, [])\n\n  // Force an archive error to trigger the debug log on line 54\n  setImmediate(() => {\n    archive.emit('error', new Error('test error'))\n  })\n\n  rs.emit('end')\n\n  child.type(archive, 'object', 'Should return archive object')\n  child.end()\n})\n"
  },
  {
    "path": "test/test-password-protected-duplicate.js",
    "content": "const s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst archiverZipEncryptable = require('archiver-zip-encryptable')\n\nt.test('test duplicate format registration does not error', function (child) {\n  // First registration should work\n  s3Zip.setRegisterFormatOptions('zip-encryptable', archiverZipEncryptable)\n\n  // Try to register the format via archiveStream simulation\n  const mockStream = {\n    on: function (event, callback) {\n      if (event === 'end') {\n        setTimeout(callback, 10) // Simulate async behavior\n      } else if (event === 'data') {\n        // Don't emit any data\n      } else if (event === 'error') {\n        // Don't emit any errors\n      }\n      return this\n    }\n  }\n\n  try {\n    // First call to archiveStream (should register format)\n    const archive1 = s3Zip.archiveStream(mockStream, [], [])\n    child.ok(archive1, 'First archiveStream call succeeded')\n\n    // Second call to archiveStream (should NOT fail due to duplicate registration)\n    const archive2 = s3Zip.archiveStream(mockStream, [], [])\n    child.ok(archive2, 'Second archiveStream call succeeded')\n\n    child.end()\n  } catch (err) {\n    child.fail(`archiveStream calls failed: ${err.message}`)\n    child.end()\n  }\n})\n"
  },
  {
    "path": "test/test-s3-error-on-stream.js",
    "content": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nconst concat = require('concat-stream')\nconst join = require('path').join\nconst proxyquire = require('proxyquire')\nconst sinon = require('sinon')\n\nconst fileStream = function (file) {\n  const rs = new Stream()\n  rs.readable = true\n  const fileStream = fs.createReadStream(join(__dirname, file))\n  fileStream\n    .pipe(concat(\n      function buffersEmit (buffer) {\n        rs.emit('error', new Error())\n      })\n    )\n  fileStream\n    .on('end', function () {\n      console.log('end fileStream')\n      rs.emit('end')\n    })\n  return rs\n}\n\nt.test('test if error on filestream with archiveStream', function (child) {\n  const stream = fileStream('./fixtures/file.txt')\n  const files = ['foo.png']\n  s3Zip.archiveStream(stream, files)\n  child.end()\n})\n\nt.test('test if error on filestream with archive', function (child) {\n  const stream = fileStream('./fixtures/file.txt')\n  s3Zip = proxyquire('../s3-zip.js', {\n    's3-files': { createFileStream: sinon.stub().returns(stream) }\n  })\n  const files = ['foo.png']\n  s3Zip.archive({ region: 'region', bucket: 'bucket' }, 'folder', files)\n  child.end()\n})\n"
  },
  {
    "path": "test/test-s3-password-protected.js",
    "content": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nconst concat = require('concat-stream')\nconst join = require('path').join\nconst streamify = require('stream-array')\nconst archiverZipEncryptable = require('archiver-zip-encryptable')\nconst { exec } = require('child_process')\n\nconst fileStreamForFiles = function (files, preserveFolderPath) {\n  const rs = new Stream()\n  rs.readable = true\n\n  let fileCounter = 0\n  streamify(files).on('data', function (file) {\n    fileCounter += 1\n\n    const fileStream = fs.createReadStream(join(__dirname, file))\n    fileStream.pipe(\n      concat(function buffersEmit (buffer) {\n        // console.log('buffers concatenated, emit data for ', file);\n        const path = preserveFolderPath ? file : file.replace(/^.*[\\\\/]/, '')\n        rs.emit('data', { data: buffer, path })\n      })\n    )\n    fileStream.on('end', function () {\n      fileCounter -= 1\n      if (fileCounter < 1) {\n        // console.log('all files processed, emit end');\n        rs.emit('end')\n      }\n    })\n  })\n  return rs\n}\n\nconst file1 = 'a/file.txt'\nconst file2 = 'b/file.txt'\nconst sinon = require('sinon')\nconst proxyquire = require('proxyquire')\nconst s3Stub = fileStreamForFiles(\n  ['/fixtures/folder/a/file.txt', '/fixtures/folder/b/file.txt'],\n  true\n)\ns3Zip = proxyquire('../s3-zip.js', {\n  's3-files': { createFileStream: sinon.stub().returns(s3Stub) }\n})\n\nt.test('test archive password protected', async child => {\n  const outputPath = join(__dirname, '/test-password-protected.zip')\n  const output = fs.createWriteStream(outputPath)\n\n  await s3Zip\n    .setRegisterFormatOptions('zip-encryptable', archiverZipEncryptable)\n    .setFormat('zip-encryptable')\n    .setArchiverOptions({\n      zlib: { level: 8 },\n      forceLocalTime: true,\n      password: 'test'\n    })\n    .archive({ region: 'region', bucket: 'bucket' }, '/fixtures/folder/', [\n      file1,\n      file2\n    ])\n    .pipe(output)\n    .on('finish', async () => {\n      exec(\n        `unzip -P test ${outputPath} -d ${outputPath}/../testUnzipped/`,\n        () => {\n          if (\n            fs.existsSync(\n              `${outputPath}/../testUnzipped/fixtures/folder/a/file.txt`\n            )\n          ) {\n            child.ok(true, 'file exist after unzip')\n          }\n        }\n      )\n    })\n})\n"
  },
  {
    "path": "test/test-s3-same-file-alt-names.js",
    "content": "// Test s3-zip BUT using alternate file names for the same file which is listed multiple times\n\nconst s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nconst concat = require('concat-stream')\nconst join = require('path').join\nconst streamify = require('stream-array')\nconst tar = require('tar')\nconst sinon = require('sinon')\nconst proxyquire = require('proxyquire')\n\nconst fileStreamForFiles = function (files, preserveFolderPath) {\n  const rs = new Stream()\n  rs.readable = true\n\n  let fileCounter = 0\n  streamify(files).on('data', function (file) {\n    fileCounter += 1\n\n    const fileStream = fs.createReadStream(join(__dirname, file))\n    fileStream.pipe(\n      concat(function buffersEmit (buffer) {\n        // console.log('buffers concatenated, emit data for ', file);\n        const path = preserveFolderPath ? file : file.replace(/^.*[\\\\/]/, '')\n        rs.emit('data', { data: buffer, path })\n      })\n    )\n    fileStream.on('end', function () {\n      fileCounter -= 1\n      if (fileCounter < 1) {\n        // console.log('all files processed, emit end');\n        rs.emit('end')\n      }\n    })\n  })\n  return rs\n}\n\nconst outputFiles = [\n  'FILE_1_ALT_1.TXT',\n  'FILE_1_ALT_2.TXT'\n]\nconst filesRead = []\n\nt.test('test a tar archive with alternate names for one file listed many times', function (child) {\n  const inputFiles = [\n    '/fixtures/folder/a/file.txt',\n    '/fixtures/folder/a/file.txt'\n  ]\n  const outputPath = join(__dirname, '/test-same_file_alt_name.tar')\n  const output = fs.createWriteStream(outputPath)\n  const archive = s3Zip\n    .setFormat('tar')\n    .archiveStream(fileStreamForFiles(inputFiles, true), inputFiles, outputFiles)\n    .pipe(output)\n\n  archive.on('close', function () {\n    fs.createReadStream(outputPath)\n      .pipe(tar.list())\n      .on('entry', function (entry) {\n        filesRead.push(entry.path)\n      })\n      .on('end', function () {\n        child.same(filesRead, outputFiles)\n        child.end()\n      })\n  })\n})\n\nt.test('test archive with alternate names for one file listed many times', function (child) {\n  const inputFiles = [\n    '/fixtures/folder/a/file.txt',\n    '/fixtures/folder/a/file.txt'\n  ]\n  const s3Zip = proxyquire('../s3-zip.js', {\n    's3-files': { createFileStream: sinon.stub().returns(fileStreamForFiles(inputFiles, true)) }\n  })\n  const archive = s3Zip\n    .archive({ region: 'region', bucket: 'bucket' },\n      '',\n      inputFiles,\n      outputFiles.map(file => {\n        return { name: file }\n      })\n    )\n\n  child.type(archive, 'object')\n  child.end()\n})\n"
  },
  {
    "path": "test/test-s3-v2-client-error.js",
    "content": "// Test s3-zip with AWS SDK v2 client which should produce clear error\n\nconst s3Zip = require('../s3-zip.js')\nconst t = require('tap')\n\nt.test('test s3-zip with AWS SDK v2 client should fail with clear error', function (child) {\n  // Mock AWS SDK v2 client (no .send method)\n  const awsV2Client = {\n    getObject: function (params) {\n      return {\n        promise: function () {\n          return Promise.resolve({ Body: Buffer.from('test data') })\n        }\n      }\n    }\n  }\n\n  try {\n    // This should fail with a clear error message about AWS SDK compatibility\n    s3Zip.archive(\n      { s3: awsV2Client, bucket: 'test-bucket' },\n      'folder/',\n      ['test-file.txt']\n    )\n\n    // If we get here without an error, the test failed\n    child.fail('Expected an error about AWS SDK compatibility but none was thrown')\n    child.end()\n  } catch (error) {\n    // We should get a clear error about AWS SDK version compatibility\n    child.ok(error.message.includes('AWS SDK v3'), 'Should get clear error about AWS SDK v3 requirement')\n    child.ok(error.message.includes('@aws-sdk/client-s3'), 'Should mention the correct package')\n    child.end()\n  }\n})\n\nt.test('test s3-zip with null s3 client should fail with clear error', function (child) {\n  try {\n    // This should fail with a clear error message about AWS SDK compatibility\n    s3Zip.archive(\n      { s3: null, bucket: 'test-bucket' },\n      'folder/',\n      ['test-file.txt']\n    )\n\n    // If we get here without an error, the test failed\n    child.fail('Expected an error about AWS SDK compatibility but none was thrown')\n    child.end()\n  } catch (error) {\n    // We should get a clear error about AWS SDK version compatibility\n    child.ok(error.message.includes('AWS SDK v3'), 'Should get clear error about AWS SDK v3 requirement')\n    child.ok(error.message.includes('.send() method'), 'Should mention the .send() method requirement')\n    child.end()\n  }\n})\n"
  },
  {
    "path": "test/test-s3-zip-alt-names.js",
    "content": "// Test s3-zip BUT using alternate file names in the resulting zip archive\n\nlet s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nconst concat = require('concat-stream')\nconst yauzl = require('yauzl')\nconst join = require('path').join\nconst tar = require('tar')\n\nconst fileStream = function (file, forceError) {\n  const rs = new Stream()\n  rs.readable = true\n  const fileStream = fs.createReadStream(join(__dirname, file))\n  fileStream\n    .pipe(concat(\n      function buffersEmit (buffer) {\n        if (forceError) {\n          console.log('send end to finalize archive')\n          rs.emit('end')\n        } else {\n          rs.emit('data', { data: buffer, path: file })\n        }\n      })\n    )\n  fileStream\n    .on('end', function () {\n      console.log('end fileStream')\n      rs.emit('end')\n    })\n  return rs\n}\n\nconst file1 = '/fixtures/file.txt'\nconst file1Alt = 'FILE_ALT.TXT'\nconst file1DataEntry = { name: file1Alt, mode: parseInt('0600', 8) }\n// Stub: var fileStream = s3Files.createFileStream(keyStream);\nconst sinon = require('sinon')\nconst proxyquire = require('proxyquire')\nconst s3Stub = fileStream(file1)\ns3Zip = proxyquire('../s3-zip.js', {\n  's3-files': { createFileStream: sinon.stub().returns(s3Stub) }\n})\n\nt.test('test archiveStream and zip file with alternate file name in zip archive', function (child) {\n  const output = fs.createWriteStream(join(__dirname, '/test-alt.zip'))\n  const s = fileStream(file1)\n  const archive = s3Zip\n    .archiveStream(s, [file1], [file1Alt])\n    .pipe(output)\n  archive.on('close', function () {\n    console.log('+++++++++++')\n    yauzl.open(join(__dirname, '/test-alt.zip'), function (err, zip) {\n      if (err) console.log('err', err)\n      zip.on('entry', function (entry) {\n        // console.log(entry);\n        child.same(entry.fileName, file1Alt)\n        child.same(entry.compressedSize, 11)\n        child.same(entry.uncompressedSize, 20)\n      })\n\n      zip.on('close', function () {\n        child.end()\n      })\n    })\n  })\n  child.type(archive, 'object')\n})\n\nt.test('test archive with alternate zip archive names', function (child) {\n  const archive = s3Zip\n    .archive({ region: 'region', bucket: 'bucket' },\n      'folder',\n      [file1],\n      [file1Alt]\n    )\n  child.type(archive, 'object')\n  child.end()\n})\n\nt.test('test a tar archive with EntryData object', function (child) {\n  const outputPath = join(__dirname, '/test-entrydata.tar')\n  const output = fs.createWriteStream(outputPath)\n  const archive = s3Zip\n    .setFormat('tar')\n    .archiveStream(fileStream(file1), [file1], [file1DataEntry])\n    .pipe(output)\n\n  archive.on('close', function () {\n    fs.createReadStream(outputPath)\n      .pipe(tar.list())\n      .on('entry', function (entry) {\n        child.same(entry.path, file1Alt)\n        child.same(entry.mode, parseInt('0600', 8))\n      })\n      .on('end', function () {\n        child.end()\n      })\n  })\n})\n"
  },
  {
    "path": "test/test-s3-zip-unique-prefix.js",
    "content": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nconst concat = require('concat-stream')\nconst yauzl = require('yauzl')\nconst join = require('path').join\nconst streamify = require('stream-array')\n\nconst fileStreamForFiles = function (files, preserveFolderPath) {\n  const rs = new Stream()\n  rs.readable = true\n\n  let fileCounter = 0\n  streamify(files).on('data', function (file) {\n    fileCounter += 1\n\n    const fileStream = fs.createReadStream(join(__dirname, file))\n    fileStream.pipe(\n      concat(function buffersEmit (buffer) {\n        // console.log('buffers concatenated, emit data for ', file);\n        const path = preserveFolderPath ? file : file.replace(/^.*[\\\\/]/, '')\n        rs.emit('data', { data: buffer, path })\n      })\n    )\n    fileStream.on('end', function () {\n      fileCounter -= 1\n      if (fileCounter < 1) {\n        // console.log('all files processed, emit end');\n        rs.emit('end')\n      }\n    })\n  })\n  return rs\n}\n\nconst file1 = 'a/file.txt'\nconst file1Alt = 'file.txt'\nconst file2 = 'b/file.txt'\nconst file2Alt = 'file-1.txt'\nconst sinon = require('sinon')\nconst proxyquire = require('proxyquire')\nconst s3Stub = fileStreamForFiles(\n  ['/fixtures/folder/a/file.txt', '/fixtures/folder/b/file.txt'],\n  true\n)\ns3Zip = proxyquire('../s3-zip.js', {\n  's3-files': { createFileStream: sinon.stub().returns(s3Stub) }\n})\n\nt.test(\n  'test archive with matching alternate zip archive names but unique keys',\n  function (child) {\n    const outputPath = join(__dirname, '/test-unique.zip')\n    const output = fs.createWriteStream(outputPath)\n\n    const archive = s3Zip\n      .archive(\n        { region: 'region', bucket: 'bucket' },\n        '/fixtures/folder/',\n        [file1, file2],\n        [{ name: file1Alt }, { name: file2Alt }]\n      )\n      .pipe(output)\n\n    const altFiles = [file1Alt, file2Alt]\n\n    archive.on('close', function () {\n      yauzl.open(outputPath, function (err, zip) {\n        if (err) console.log('err', err)\n        zip.on('entry', function (entry) {\n          const i = altFiles.indexOf(entry.fileName)\n          if (i > -1) {\n            child.same(entry.fileName, altFiles[i])\n            altFiles.splice(i, 1)\n          } else {\n            child.ok(false, 'File not found in alternate file names list.')\n          }\n        })\n\n        zip.on('close', function () {\n          child.end()\n        })\n      })\n    })\n\n    child.type(archive, 'object')\n  }\n)\n"
  },
  {
    "path": "test/test-s3-zip.js",
    "content": "let s3Zip = require('../s3-zip.js')\nconst t = require('tap')\nconst fs = require('fs')\nconst Stream = require('stream')\nconst concat = require('concat-stream')\nconst yauzl = require('yauzl')\nconst { join } = require('path')\n\nconst fileStream = function (file, forceError) {\n  const rs = new Stream()\n  rs.readable = true\n  const fileStream = fs.createReadStream(join(__dirname, file))\n  fileStream.pipe(\n    concat(function buffersEmit (buffer) {\n      if (forceError) {\n        console.log('send end to finalize archive')\n        rs.emit('end')\n      } else {\n        rs.emit('data', { data: buffer, path: file })\n      }\n    })\n  )\n  fileStream.on('end', function () {\n    console.log('end fileStream')\n    rs.emit('end')\n  })\n  return rs\n}\n\nconst file1 = '/fixtures/file.txt'\nconst emptyFile = '/fixtures/empty.txt'\n// Stub: var fileStream = s3Files.createFileStream(keyStream);\nconst sinon = require('sinon')\nconst proxyquire = require('proxyquire')\nconst s3Stub = fileStream(file1)\ns3Zip = proxyquire('../s3-zip.js', {\n  's3-files': { createFileStream: sinon.stub().returns(s3Stub) }\n})\n\nt.test('test archiveStream and zip file', function (child) {\n  const output = fs.createWriteStream(join(__dirname, '/test.zip'))\n  const s = fileStream(file1)\n  const archive = s3Zip.archiveStream(s).pipe(output)\n  archive.on('close', function () {\n    console.log('+++++++++++')\n    yauzl.open(join(__dirname, '/test.zip'), function (err, zip) {\n      if (err) console.log('err', err)\n      zip.on('entry', function (entry) {\n        // console.log(entry);\n        child.same(entry.fileName, 'fixtures/file.txt')\n        child.same(entry.compressedSize, 11)\n        child.same(entry.uncompressedSize, 20)\n      })\n\n      zip.on('close', function () {\n        child.end()\n      })\n    })\n  })\n  child.type(archive, 'object')\n})\n\nt.test('test archive', function (child) {\n  const archive = s3Zip.archive(\n    { region: 'region', bucket: 'bucket' },\n    'folder',\n    [file1]\n  )\n  child.type(archive, 'object')\n  child.end()\n})\n\nt.test('test archive on empty file', function (child) {\n  const output = fs.createWriteStream(join(__dirname, '/test.zip'))\n  const s = fileStream(emptyFile)\n  const archive = s3Zip.archiveStream(s).pipe(output)\n  archive.on('close', function () {\n    console.log('+++++++++++')\n    yauzl.open(join(__dirname, '/test.zip'), function (err, zip) {\n      if (err) console.log('err', err)\n      zip.on('entry', function (entry) {\n        // console.log(entry);\n        child.same(entry.fileName, 'fixtures/empty.txt')\n        child.same(entry.compressedSize, 0)\n        child.same(entry.uncompressedSize, 0)\n      })\n\n      zip.on('close', function () {\n        child.end()\n      })\n    })\n  })\n  child.type(archive, 'object')\n})\n"
  }
]