[
  {
    "path": ".eslintrc.json",
    "content": "{\n  \"env\": {\n    \"browser\": true,\n    \"commonjs\": true,\n    \"es6\": true,\n    \"node\": true,\n    \"jest\": true\n  },\n  \"parser\": \"@typescript-eslint/parser\",\n  \"extends\": [\"plugin:@typescript-eslint/recommended\", \"prettier\", \"plugin:prettier/recommended\"],\n  \"parserOptions\": {\n    \"ecmaFeatures\": {\n      \"jsx\": true\n    },\n    \"ecmaVersion\": 2018,\n    \"sourceType\": \"module\",\n    \"project\": \"./tsconfig.json\"\n  },\n\n  \"plugins\": [\"jest\", \"unused-imports\", \"@typescript-eslint\"],\n  \"rules\": {\n    \"indent\": \"off\",\n    \"@typescript-eslint/explicit-function-return-type\": [\"error\"],\n    \"@typescript-eslint/indent\": [\"error\", 2],\n    \"array-bracket-newline\": [\"error\", \"consistent\"],\n    \"strict\": [\"error\", \"safe\"],\n    \"block-scoped-var\": \"error\",\n    \"complexity\": \"warn\",\n    \"default-case\": \"error\",\n    \"dot-notation\": \"warn\",\n    \"eqeqeq\": \"error\",\n    \"guard-for-in\": \"warn\",\n    \"linebreak-style\": [\"warn\", \"unix\"],\n    \"no-alert\": \"error\",\n    \"no-case-declarations\": \"error\",\n    \"no-console\": \"error\",\n    \"no-constant-condition\": \"error\",\n    \"no-continue\": \"warn\",\n    \"no-div-regex\": \"error\",\n    \"no-empty\": \"warn\",\n    \"no-empty-pattern\": \"error\",\n    \"no-implicit-coercion\": \"error\",\n    \"prefer-arrow-callback\": \"warn\",\n    \"no-labels\": \"error\",\n    \"no-loop-func\": \"error\",\n    \"no-nested-ternary\": \"warn\",\n    \"no-script-url\": \"error\",\n    \"no-warning-comments\": \"warn\",\n    \"quote-props\": [\"error\", \"as-needed\"],\n    \"require-yield\": \"error\",\n    \"max-nested-callbacks\": [\"error\", 4],\n    \"max-depth\": [\"error\", 4],\n    \"space-before-function-paren\": [\n      \"error\",\n      {\n        \"anonymous\": \"never\",\n        \"named\": \"never\",\n        \"asyncArrow\": \"always\"\n      }\n    ],\n    \"padding-line-between-statements\": [\n      \"error\",\n      { \"blankLine\": \"always\", \"prev\": \"*\", \"next\": \"if\" },\n      { \"blankLine\": \"always\", \"prev\": \"*\", \"next\": \"function\" },\n      { \"blankLine\": \"always\", \"prev\": \"*\", \"next\": \"return\" }\n    ],\n    \"no-useless-constructor\": \"off\",\n    \"no-dupe-class-members\": \"off\",\n    \"no-unused-expressions\": \"off\",\n    \"curly\": [\"error\", \"multi-line\"],\n    \"object-curly-spacing\": [\"error\", \"always\"],\n    \"comma-dangle\": [\"error\", \"always-multiline\"],\n    \"@typescript-eslint/no-useless-constructor\": \"error\",\n    \"@typescript-eslint/no-unused-expressions\": \"error\",\n    \"@typescript-eslint/member-delimiter-style\": [\n      \"error\",\n      {\n        \"multiline\": {\n          \"delimiter\": \"none\",\n          \"requireLast\": true\n        },\n        \"singleline\": {\n          \"delimiter\": \"semi\",\n          \"requireLast\": false\n        }\n      }\n    ],\n    \"@typescript-eslint/ban-ts-comment\": [\n      \"error\",\n      {\n        \"ts-expect-error\": \"allow-with-description\",\n        \"ts-ignore\": \"allow-with-description\",\n        \"ts-nocheck\": \"allow-with-description\",\n        \"ts-check\": \"allow-with-description\",\n        \"minimumDescriptionLength\": 6\n      }\n    ],\n    \"require-await\": \"off\",\n    \"@typescript-eslint/promise-function-async\": \"error\",\n    \"@typescript-eslint/require-await\": \"off\",\n    \"@typescript-eslint/no-non-null-assertion\": \"off\",\n    \"@typescript-eslint/no-unused-vars\": \"off\",\n    \"unused-imports/no-unused-imports\": \"error\",\n    \"unused-imports/no-unused-vars\": [\n      \"warn\",\n      { \"vars\": \"all\", \"varsIgnorePattern\": \"^_\", \"args\": \"after-used\", \"argsIgnorePattern\": \"^_\" }\n    ]\n  }\n}\n"
  },
  {
    "path": ".github/workflows/tests.yaml",
    "content": "name: Test\n\non:\n  push:\n    branches:\n      - master\n  pull_request:\n    branches:\n      - '**'\n\njobs:\n  test:\n    name: Run tests\n    runs-on: ubuntu-latest\n    steps:\n      - name: Check out code\n        uses: actions/checkout@v2\n\n      - name: Set up Docker Buildx\n        id: buildx\n        uses: docker/setup-buildx-action@v1\n\n      - name: Set up QEMU\n        uses: docker/setup-qemu-action@v1\n\n      - name: Build Docker image\n        uses: docker/build-push-action@v2\n        with:\n          context: .\n          platforms: linux/arm64\n          push: false\n          load: true\n          tags: your-docker-image-name:latest\n\n      - name: Run tests\n        run: docker run your-docker-image-name:latest\n"
  },
  {
    "path": ".gitignore",
    "content": "/node_modules\r\n/.pnp\r\n.pnp.js\r\n\r\n# testing\r\n/coverage\r\n\r\n# production\r\n/build\r\n\r\n# misc\r\n.env\r\n.DS_Store\r\n.env.local\r\n.env.development.local\r\n.env.test.local\r\n.env.production.local\r\n\r\nnpm-debug.log*\r\nyarn-debug.log*\r\nyarn-error.log*\r\ndist\r\navatars\r\n"
  },
  {
    "path": ".prettierrc",
    "content": "{\n  \"printWidth\": 120,\n  \"tabWidth\": 2,\n  \"useTabs\": false,\n  \"bracketSpacing\": true,\n  \"semi\": false,\n  \"singleQuote\": true,\n  \"quoteProps\": \"as-needed\",\n  \"trailingComma\": \"all\",\n  \"endOfLine\": \"lf\",\n  \"arrowParens\": \"avoid\",\n  \"proseWrap\": \"always\"\n}\n"
  },
  {
    "path": "Dockerfile",
    "content": "# Start from the latest LTS Node version built for arm64 on Alpine\nFROM node:alpine\n\n# Add the TON Storage daemon and CLI to the path\nENV PATH=\"/app/ton:${PATH}\"\n\nWORKDIR /app\n\n# Install necessary packages\n# netcat equivalent in Alpine is netcat-openbsd\n# curl, mysql, and mysql-client are added since they might not be present in Alpine by default\nRUN apk add --no-cache curl netcat-openbsd mysql mysql-client\n\n# Initialize MySQL Database\nRUN mysql_install_db --user=mysql --ldata=/var/lib/mysql\n\n# Create the directory for the MySQL Unix socket and change its ownership\nRUN mkdir -p /run/mysqld/ && chown -R mysql:mysql /run/mysqld/\n\n# Download TON Storage daemon and CLI binaries\nRUN curl -LJO https://github.com/ton-blockchain/ton/releases/download/v2023.06/storage-daemon-linux-arm64\nRUN curl -LJO https://github.com/ton-blockchain/ton/releases/download/v2023.06/storage-daemon-cli-linux-arm64\nRUN curl -LJO https://ton-blockchain.github.io/global.config.json\n\n# Make them executable\nRUN chmod +x storage-daemon-linux-arm64 storage-daemon-cli-linux-arm64\n\n# Move them to the right place\nRUN mkdir ton && mv storage-daemon-linux-arm64 storage-daemon-cli-linux-arm64 global.config.json ton/\n\n# Add the current directory content to the Docker image\nADD . /app\n\n# Install project dependencies\nRUN npm ci\n\n# Run scripts\nRUN npm run check:types\nRUN npm run lint:check\n\n# Copy the startup script and make it executable\nCOPY ./startup.sh /app/startup.sh\nRUN chmod +x /app/startup.sh\n\nCMD [\"/app/startup.sh\"]\n"
  },
  {
    "path": "README.md",
    "content": "# Mutable File System Gateway\n\nThis repository contains the server-side implementation of our decentralized file system gateway. It's designed to provide a public, uncensored file system that can be accessed via the web for individuals wanting to share their data. The backend interfaces with multiple decentralized storage platforms and provides key services to manage data effectively and securely.\n\nIn its final stage, this project is envisioned to become a dynamic directory of decentralized, publicly accessible user files available to the entire world. These files will be available to all projects that use the node and are immune to censorship. This is not just a technical project, but a step towards a more transparent and accessible digital world. Harnessing the power of decentralized technologies, we aim to put the control of data back into the hands of users.\n\n\n## Features\n\n1. **Mempool:** Holds user operations on their file systems before they're included in the smart contract and uploaded to storage.\n\n2. **Gateway:** Manages data uploads to storage through public gateways, eliminating the need for users to install nodes/extensions. This component can be replaced in projects using other file gateways.\n\n3. **Rollup:** Aggregates all user changes over a specific period into a single hash, stored in a smart contract at regular intervals. This method significantly reduces the traditionally high costs associated with smart contract modifications, potentially saving users a substantial amount of money.\n\n4. **Appchains:** The combination of the backend and file system allows services to build Appchains for data storage. As the project evolves, these data Appchains will be interconnected in a decentralized manner.\n\nThe server-side implementation is designed to work seamlessly with our [Decentralized File System](https://github.com/FairJournal/file-system), providing an end-to-end solution for creating a public, decentralized file system.\n\n## Roadmap\n\n- [x] ✅ POC of mempool\n- [x] ✅ POC of gateway\n- [x] ✅ POC of rollup\n- [x] ✅ POC with the ability to create a file system specific for an app\n- [ ] Add multi-storage capability, backup user's data to different storages\n- [ ] Wrap the project in the form of a node with the same features, should work on mobile\n- [ ] Write a smart contract for storing file system changes for all users across projects\n- [ ] Find a blockchain home for the smart contract\n- [ ] Implement decentralized database distribution for user's updates\n- [ ] Implement incentives for nodes which store and validate the data\n- [ ] Enable the ability to incentivize not only directly by the user, but by Appchains and third parties\n- [ ] 🎉 🌎 Become a worldwide directory of public user files\n\n\n## API\n\n### GET /v1/fs/user/info\n\nThis endpoint checks if a user exists in the file system.\n\n**URL parameters:**\n\n- `address`: The address of the user.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\",\n  \"address\": \"<address>\",\n  \"isUserExists\": \"<boolean>\"\n}\n```\n\n---\n\n## GET /v1/fs/user/get-update-id\n\nThis endpoint gets the current update ID for a user.\n\n**URL parameters:**\n\n- `address`: The address of the user.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\",\n  \"address\": \"<address>\",\n  \"updateId\": \"<number>\"\n}\n```\n\n---\n\n### POST /v1/fs/blob/upload\n\nThis endpoint handles the uploading of a file, uploads it to the storage, inserts its metadata into a cache database to speed up the gateway, and returns the file info.\n\n**Form data:**\n\n- `blob`: A file to upload.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\",\n  \"data\": {\n    \"reference\": \"<reference>\",\n    \"mime_type\": \"<mime_type>\",\n    \"sha256\": \"<sha256>\",\n    \"size\": \"<size>\"\n  }\n}\n\n```\n\n---\n\n### GET /v1/fs/blob/get-article\n\nThis endpoint retrieves a full article based on the user's address and the article's slug.\n\n**URL parameters:**\n\n- `userAddress`: The address of the user.\n- `slug`: The slug of the article.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\",\n  \"userAddress\": \"<userAddress>\",\n  \"article\": {\n    \"slug\": \"<slug>\",\n    \"data\": \"<data>\",\n    \"preview\": \"<data>\"\n  }\n}\n\n```\n\n---\n\n### GET /v1/fs/blob/get-articles\n\nThis endpoint retrieves all the articles of a user.\n\n**URL parameters:**\n\n- `userAddress`: The address of the user.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\",\n  \"userAddress\": \"<userAddress>\",\n  \"articles\": [\n    {\n      \"slug\": \"<slug>\",\n      \"data\": \"<data>\",\n      \"preview\": \"<data>\"\n    },\n    // ... more articles\n  ]\n}\n\n```\n\n---\n\n### GET /v1/fs/blob/get-path-info\n\nThis endpoint retrieves the info of a specific path for a user.\n\n**URL parameters:**\n\n- `userAddress`: The address of the user.\n- `path`: The path to retrieve info for.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\",\n  \"userAddress\": \"<userAddress>\",\n  \"path\": \"<path>\",\n  \"data\": \"<data>\"\n}\n```\n\n---\n\n### POST /v1/fs/update/apply\n\nThis endpoint applies an update action to the file system.\n\n**Form data:**\n\n- An `update` object that includes the update data.\n\n**Response:**\n\n```json\n{\n  \"status\": \"ok\"\n}\n```\n\n## Installation\n\n1 - Install dependencies (Node.js 16):\n\n`npm ci`\n\nCopy and change options\n\n`cp example.env .env`\n\n2 - Install MySQL.\n\n3 - Create `fair_journal` db:\n\n`mysql -u root -p < ./migrations/db.sql`\n\n4 - Start interactive mode for MySQL user creation:\n\n`mysql -u root`\n\nand run commands:\n\n`CREATE USER 'fjuser'@'localhost' IDENTIFIED BY 'STRONG_PASSWORD_HERE';`\n\n`GRANT ALL PRIVILEGES ON fair_journal.* TO 'fjuser'@'localhost';`\n\n`FLUSH PRIVILEGES;`\n\n5 - Put these credentials to `.env` file.\n\n6 - Run migrations:\n\n`npx knex migrate:latest --env production`\n\n7 - Start server using pm2:\n\n`npm run start`\n\n## Development\n\nStart in dev mode\n\n`start:dev`\n\nTest app using local Docker\n\n`docker build -t your-docker-image-name . && docker run -p 8000:8000 your-docker-image-name`"
  },
  {
    "path": "blob/README.md",
    "content": "# Uploaded blobs here"
  },
  {
    "path": "example.env",
    "content": "# Path to the root of the files\nFILES_ROOT_PATH=/Users/test/web/fj-backend\n\n# Port of the application\nPORT=5000\n\n# Database socket connection path if needed\nDB_SOCKET_PATH=\n\n# Database host\nDB_HOST=localhost\n\n# Database port\nDB_PORT=3306\n\n# Database username\nDB_USER=root\n\n# Database password\nDB_PASSWORD=root\n\n# Database name\nDB_NAME=fair_journal\n\n# External web url for old files\nURL=http://localhost:5000/\n\n# Is show server logs\nSHOW_LOGS=true\n\n# Ton Storage CLI binary path\nTON_STORAGE_BIN_PATH=/root/storage-daemon-cli\n\n# Ton Storage host\nTON_STORAGE_HOST=127.0.0.1:5555\n\n# Ton Storage database path\nTON_STORAGE_DATABASE_PATH=/var/ton-storage\n\n# Ton Storage timeout\nTON_STORAGE_TIMEOUT=5000\n\n# Ton Storage wait attempts\nTON_STORAGE_WAIT_ATTEMPTS=100\n\n# Ton Storage check wait timeout\nTON_STORAGE_CHECK_WAIT_TIMEOUT=3000\n\n# Password for publishing the file system\nPUBLISH_FS_PASSWORD="
  },
  {
    "path": "jest.config.js",
    "content": "module.exports = {\n  preset: 'ts-jest',\n  testEnvironment: 'node',\n  testMatch: ['**/*.test.ts'],\n  testTimeout: 100000,\n}\n"
  },
  {
    "path": "knexfile.ts",
    "content": "import { config } from 'dotenv'\nimport { Knex } from 'knex'\n\nconfig()\n\nconst knexConfig: Knex.Config = {\n  client: 'mysql2',\n  connection: {\n    host: process.env.DB_HOST,\n    user: process.env.DB_USER,\n    password: process.env.DB_PASSWORD,\n    database: process.env.DB_NAME,\n  },\n  migrations: {\n    directory: './migrations',\n  },\n}\n\nconst configurations: { [key: string]: Knex.Config } = {\n  development: knexConfig,\n  production: knexConfig,\n  docker: {\n    ...knexConfig,\n    connection: {\n      // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n      // @ts-ignore\n      ...knexConfig.connection,\n      socketPath: '/run/mysqld/mysqld2.sock',\n    },\n  },\n}\n\nexport default configurations\n"
  },
  {
    "path": "migrations/20230706133935_init.ts",
    "content": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n  // create table\n  await knex.schema.createTable(\"users\", (table) => {\n    table.increments(\"id\").primary();\n    table.string(\"wallet\", 255).notNullable();\n    table.string(\"avatar\", 255).notNullable();\n    table.string(\"name\", 255).notNullable();\n    table.string(\"description\", 255).notNullable();\n  });\n\n  // insert data\n  await knex(\"users\").insert([\n    { id: 1, wallet: '200', avatar: 'https://example.com/avatar2.png', name: 'John Smith', description: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit' },\n    { id: 3, wallet: '123', avatar: '', name: '', description: '' },\n    { id: 4, wallet: '1234', avatar: '', name: '', description: '' },\n    { id: 5, wallet: '1', avatar: '', name: '', description: '' },\n    { id: 6, wallet: '0:fed265a59332abef0e2392fb653f94e8ff5cff55f6b35f6bfd3f3b7b5f862a2b', avatar: '222', name: 'Ihar Chernishev111', description: 'vTools failed to load source map: Could not load content for chrome111' },\n    { id: 7, wallet: '0:fed265a59332abef0e2392fb653f94e8ff5cff55f6b35f6bfd3f3b7b5f862a2b', avatar: '', name: '', description: '' },\n  ]);\n\n  // Create 'articles' table\n  await knex.schema.createTable(\"articles\", (table) => {\n    table.increments(\"id\").primary();\n    table.string(\"hash\", 255).notNullable();\n    table.string(\"content\", 255).notNullable();\n    table.integer(\"author_id\").unsigned().notNullable();\n\n    table.foreign(\"author_id\").references(\"id\").inTable(\"users\");\n  });\n\n  // Insert data into 'articles' table\n  await knex(\"articles\").insert([\n    { id: 2, hash: 'random-hash', content: 'Lorem ipsum dolor sit amet', author_id: 1 },\n    { id: 3, hash: 'random-hash', content: 'Lorem ipsum dolor sit amet', author_id: 1 },\n    { id: 4, hash: '00000000000', content: '{\"time\":1683731258538,\"blocks\":[{\"id\":\"sheNwCUP5A\",\"type\":\"header\",\"data\":{\"text\":\"Title\",\"level\":1}},{\"id\":\"u3i1-RBll_\",\"type\":\"paragraph\",\"data\":{\"text\":\"ceecec\"}},{\"id\":\"Z-X8jY2mAi\",\"type\":\"paragraph\",\"data\":{\"text\":\"ececec\"}}],\"version\":\"2.26.5\"}', author_id: 6 },\n    { id: 6, hash: '00000000000', content: '{\"time\":1683796077710,\"blocks\":[{\"id\":\"sheNwCUP5A\",\"type\":\"header\",\"data\":{\"text\":\"Title11\",\"level\":1}},{\"id\":\"4RA6seA4xt\",\"type\":\"paragraph\",\"data\":{\"text\":\"efefwefwef\"}}],\"version\":\"2.26.5\"}', author_id: 6 },\n    { id: 7, hash: '00000000000', content: '{\"time\":1683796230168,\"blocks\":[{\"id\":\"sheNwCUP5A\",\"type\":\"header\",\"data\":{\"text\":\"Title1122\",\"level\":1}},{\"id\":\"5Rk0mmE5T7\",\"type\":\"paragraph\",\"data\":{\"text\":\"yukddd11\"}},{\"id\":\"5jV6cesj88\",\"type\":\"paragraph\",\"data\":{\"text\":\"yku\"}}],\"version\":\"2.26.5\"}', author_id: 6 },\n    { id: 8, hash: '00000000000', content: '{\"time\":1683795761833,\"blocks\":[{\"id\":\"sheNwCUP5A\",\"type\":\"header\",\"data\":{\"text\":\"Title\",\"level\":1}},{\"id\":\"buOPouRBIE\",\"type\":\"paragraph\",\"data\":{\"text\":\"cdchh\"}}],\"version\":\"2.26.5\"}', author_id: 6 },\n    { id: 11, hash: '00000000000', content: '{\"time\":1683795580253,\"blocks\":[{\"id\":\"fzJUR75ZC8\",\"type\":\"paragraph\",\"data\":{\"text\":\"111111222\"}},{\"id\":\"2xATC4OkUH\",\"type\":\"paragraph\",\"data\":{\"text\":\"111111\"}}],\"version\":\"2.26.5\"}', author_id: 6 },\n  ]);\n\n  await knex.schema.createTable('images', table => {\n    table.increments('id');\n    table.integer(\"author_id\").unsigned().notNullable();\n    table.string('signature', 255).notNullable();\n    table.string('path', 255).notNullable();\n\n    table.foreign(\"author_id\").references(\"id\").inTable(\"users\");\n\n  });\n}\n\nexport async function down(knex: Knex): Promise<void> {\n  // Drop 'images' table\n  await knex.schema.dropTable('images');\n\n  // Drop 'articles' table\n  await knex.schema.dropTable(\"articles\");\n\n  // Drop 'users' table\n  await knex.schema.dropTable(\"users\");\n}"
  },
  {
    "path": "migrations/20230713094839_fs_updates.ts",
    "content": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n  return knex.schema.createTable('fs_update', table => {\n    table.increments('id').unsigned().primary();\n    table.string('public_key', 64).notNullable();\n    table.integer('update_id').unsigned().notNullable();\n    table.text('update', 'longtext').notNullable();\n    table.dateTime('created_at').notNullable().defaultTo(knex.fn.now());\n\n    // Setting the combination of public_key + update_id to be unique\n    table.unique(['public_key', 'update_id']);\n    table.index('public_key'); // Add an index to the public_key column\n  });\n}\n\nexport async function down(knex: Knex): Promise<void> {\n  return knex.schema.dropTable('fs_update');\n}\n"
  },
  {
    "path": "migrations/20230716103734_file.ts",
    "content": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n  return knex.schema.createTable('file', (table) => {\n    table.string('reference', 64).primary().unique().index();\n    table.integer('status').unsigned();\n    table.string('mime_type', 255);\n    table.bigInteger('size').unsigned(); // added size field\n    table.string('sha256', 64).index(); // added sha256 field with index\n    table.dateTime('created_at').defaultTo(knex.fn.now());\n    table.dateTime('updated_at').defaultTo(knex.fn.now());\n  });\n}\n\nexport async function down(knex: Knex): Promise<void> {\n  return knex.schema.dropTable('file');\n}\n"
  },
  {
    "path": "migrations/20230725081357_settings.ts",
    "content": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n    return knex.schema.createTable('settings', table => {\n        table.string('key', 255).primary().index();\n        table.text('value');\n        table.timestamp('created_at').defaultTo(knex.fn.now());\n        table.timestamp('updated_at').defaultTo(knex.fn.now());\n    });\n}\n\nexport async function down(knex: Knex): Promise<void> {\n    return knex.schema.dropTable('settings');\n}\n"
  },
  {
    "path": "migrations/db.sql",
    "content": "CREATE DATABASE fair_journal\n    CHARACTER SET utf8mb4\n    COLLATE utf8mb4_general_ci;\n"
  },
  {
    "path": "nodemon.json",
    "content": "{\r\n    \"watch\": [\r\n        \"src\"\r\n    ],\r\n    \"ext\": \".ts,.js\",\r\n    \"ignore\": [],\r\n    \"exec\": \"ts-node ./src/index.ts\"\r\n}"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"fair-journal-backend\",\n  \"version\": \"1.0.0\",\n  \"description\": \"\",\n  \"main\": \"index.js\",\n  \"scripts\": {\n    \"prepublishOnly\": \"rimraf dist && npm run compile:types && npm run compile:node --env mode=production\",\n    \"start:dev\": \"nodemon\",\n    \"build\": \"rimraf ./build && tsc\",\n    \"start\": \"ts-node src/index.ts\",\n    \"test\": \"jest --runInBand\",\n    \"lint:check\": \"eslint \\\"src/**/*.ts\\\" \\\"test/**/*.ts\\\" && prettier --check \\\"src/**/*.ts\\\" \\\"test/**/*.ts\\\"\",\n    \"check:types\": \"tsc --project tsconfig.test.json\"\n  },\n  \"author\": \"\",\n  \"license\": \"ISC\",\n  \"devDependencies\": {\n    \"@types/cors\": \"^2.8.13\",\n    \"@types/express\": \"^4.17.17\",\n    \"@types/jest\": \"^29.5.2\",\n    \"@types/multer\": \"^1.4.7\",\n    \"@types/node\": \"^20.1.0\",\n    \"@types/supertest\": \"^2.0.12\",\n    \"@types/tmp\": \"^0.2.3\",\n    \"@typescript-eslint/eslint-plugin\": \"^5.59.2\",\n    \"babel-jest\": \"^29.6.1\",\n    \"eslint\": \"^8.44.0\",\n    \"eslint-config-prettier\": \"^8.8.0\",\n    \"eslint-config-standard-with-typescript\": \"^34.0.1\",\n    \"eslint-plugin-import\": \"^2.27.5\",\n    \"eslint-plugin-jest\": \"^27.2.2\",\n    \"eslint-plugin-n\": \"^15.7.0\",\n    \"eslint-plugin-prettier\": \"^4.2.1\",\n    \"eslint-plugin-promise\": \"^6.1.1\",\n    \"eslint-plugin-unused-imports\": \"^2.0.0\",\n    \"jest\": \"^29.6.1\",\n    \"nodemon\": \"^2.0.22\",\n    \"prettier\": \"^2.8.8\",\n    \"rimraf\": \"^5.0.0\",\n    \"supertest\": \"^6.3.3\",\n    \"ton-crypto\": \"^3.2.0\",\n    \"ts-jest\": \"^29.1.1\",\n    \"ts-node\": \"^10.9.1\",\n    \"typescript\": \"^5.0.4\"\n  },\n  \"dependencies\": {\n    \"@fairjournal/file-system\": \"^1.10.1\",\n    \"body-parser\": \"^1.20.2\",\n    \"cors\": \"^2.8.5\",\n    \"dotenv\": \"^16.0.3\",\n    \"express\": \"^4.18.2\",\n    \"knex\": \"^2.4.2\",\n    \"multer\": \"^1.4.5-lts.1\",\n    \"mysql2\": \"^3.5.1\",\n    \"sharp\": \"^0.32.1\",\n    \"tmp\": \"^0.2.1\",\n    \"tonstorage-cli\": \"^1.1.5\",\n    \"uuid\": \"^9.0.0\"\n  }\n}\n"
  },
  {
    "path": "src/app.ts",
    "content": "import express, { Application } from 'express'\nimport cors from 'cors'\nimport router from './routes'\nimport fileSystemRouter from './controllers/file-system'\nimport { FileSystem } from '@fairjournal/file-system'\nimport { initFs, syncFs } from './fs'\nimport { TonstorageCLI } from 'tonstorage-cli'\nimport { delay } from './utils'\n\nconst app: Application = express()\nexport let fileSystem: FileSystem\n\nexport let tonstorage: TonstorageCLI\n\nexport const errorHandler = (\n  err: Error,\n  req: express.Request,\n  res: express.Response,\n  next: express.NextFunction,\n): void => {\n  const error = {\n    status: 'error',\n    message: err.message,\n  }\n\n  res.status(500).json(error)\n}\n\n// Middleware\napp.use(express.json())\napp.use(cors())\n\n// Routes\napp.use('/api', router)\napp.use('/avatars', express.static('avatars'))\napp.use('/v1/fs', fileSystemRouter)\napp.use(errorHandler)\n\nfunction log(message: string): void {\n  if (process.env.SHOW_LOGS === 'true') {\n    // eslint-disable-next-line no-console\n    console.log(message)\n  }\n}\n\n/**\n * Waits for TonStorage to be ready\n *\n * @param tonStorage TonStorage instance\n */\nexport async function waitTonStorage(tonStorage: TonstorageCLI): Promise<void> {\n  /**\n   * TonStorage provider info response\n   */\n  interface Response {\n    ok: boolean\n    error?: string\n    code: number\n  }\n\n  const maxAttempts = parseInt(process.env.TON_STORAGE_WAIT_ATTEMPTS || '10')\n  const waitTime = parseInt(process.env.TON_STORAGE_CHECK_WAIT_TIMEOUT || '3000')\n\n  let isReady = false\n  let attempts = 0\n\n  while (!isReady && attempts < maxAttempts) {\n    attempts += 1\n\n    try {\n      const providerInfo = (await tonStorage.getProviderInfo()) as Response\n\n      if (providerInfo && providerInfo.error && providerInfo.error.includes('timeout')) {\n        // eslint-disable-next-line no-console\n        console.log(`Ton Storage: connection timeout occurred. Waiting and retrying (${attempts}/${maxAttempts})...`)\n        await delay(waitTime)\n      } else {\n        isReady = true\n      }\n    } catch (e) {\n      // eslint-disable-next-line no-console\n      console.log('An error occurred. Waiting for TonStorage...')\n      await delay(waitTime)\n    }\n  }\n\n  if (!isReady) {\n    throw new Error(`Failed to get provider info after ${maxAttempts} attempts.`)\n  }\n}\n\n/**\n * Creates TonStorage instance\n */\nexport function createTonStorageInstance(): TonstorageCLI {\n  const bin = process.env.TON_STORAGE_BIN_PATH\n  const host = process.env.TON_STORAGE_HOST\n  const database = process.env.TON_STORAGE_DATABASE_PATH\n  const timeout = Number(process.env.TON_STORAGE_TIMEOUT)\n\n  if (!bin || !host || !database || !timeout) {\n    throw new Error('TonStorage is not configured via .env')\n  }\n\n  return new TonstorageCLI({\n    bin,\n    host,\n    database,\n    timeout: Number(process.env.TON_STORAGE_TIMEOUT),\n  })\n}\n\n/**\n * Start initialization asynchronously\n */\nexport async function syncFileSystem(): Promise<void> {\n  log('Connecting to TonStorage...')\n  tonstorage = createTonStorageInstance()\n\n  await waitTonStorage(tonstorage)\n  log('Connected to TonStorage!')\n  log('Sync file system...')\n  await syncFs(fileSystem)\n  log('File system synced!')\n}\n\n/**\n * Clear file system\n */\nexport function clearFileSystem(): void {\n  fileSystem = initFs()\n}\n\nexport default app\n"
  },
  {
    "path": "src/controllers/ArticleController.ts",
    "content": "import { Request, Response } from 'express'\nimport { OkPacket, RowDataPacket } from 'mysql2'\nimport pool from '../db'\n\nconst getAllArticles = async (req: Request, res: Response): Promise<Response> => {\n  try {\n    const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM articles')\n\n    return res.json(rows)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst getArticleById = async (req: Request, res: Response): Promise<Response> => {\n  const id = Number(req.params.id)\n\n  if (!id) {\n    return res.status(400).send('Article id is required')\n  }\n  try {\n    const [rows] = await pool.query<RowDataPacket[]>(\n      `SELECT articles.*, users.name, users.avatar, users.wallet\n       FROM articles\n       JOIN users ON articles.author_id = users.id\n       WHERE articles.id = ?`,\n      [id],\n    )\n    const article = rows[0]\n\n    if (!article) {\n      return res.status(404).send(`Article with id ${id} not found`)\n    }\n\n    return res.json(article)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst createArticle = async (req: Request, res: Response): Promise<Response> => {\n  const { authorId, hash, content } = req.body\n\n  if (!authorId) {\n    return res.status(400).send('Author id is required')\n  }\n\n  if (!hash) {\n    return res.status(400).send('Hash is required')\n  }\n\n  if (!content) {\n    return res.status(400).send('Content is required')\n  }\n  try {\n    const [result] = await pool.query<OkPacket>('INSERT INTO articles(author_id, hash, content) VALUES(?, ?, ?)', [\n      authorId,\n      hash,\n      JSON.stringify(content),\n    ])\n    const id = result.insertId\n\n    return res.status(201).json({ id })\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst updateArticle = async (req: Request, res: Response): Promise<Response | void> => {\n  const id = Number(req.params.id)\n  const { authorId, hash, content } = req.body\n\n  if (!authorId) {\n    return res.status(400).send('Author id is required')\n  }\n\n  if (!hash) {\n    return res.status(400).send('Hash is required')\n  }\n\n  if (!content) {\n    return res.status(400).send('Content is required')\n  }\n  try {\n    const [result] = await pool.query<OkPacket>(\n      'UPDATE articles SET author_id = ?, hash = ?, content = ? WHERE id = ?',\n      [authorId, hash, JSON.stringify(content), id],\n    )\n\n    if (result.affectedRows === 0) {\n      return res.status(404).send(`Article with id ${id} not found`)\n    }\n\n    return res.json({ id })\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst deleteArticle = async (req: Request, res: Response): Promise<Response | void> => {\n  const id = Number(req.params.id)\n\n  if (!id) {\n    return res.status(400).send('Id is required')\n  }\n  try {\n    const [result] = await pool.query<OkPacket>('DELETE FROM articles WHERE id = ?', [id])\n\n    if (result.affectedRows === 0) {\n      return res.status(404).send(`Article with id ${id} not found`)\n    }\n\n    return res.json({ id })\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nexport { getAllArticles, getArticleById, createArticle, updateArticle, deleteArticle }\n"
  },
  {
    "path": "src/controllers/ImageController.ts",
    "content": "import { Request, Response } from 'express'\nimport { OkPacket } from 'mysql2'\nimport pool from '../db'\n\nconst upload = async (req: Request, res: Response): Promise<Response> => {\n  const { authorId } = req.body\n\n  if (!authorId) {\n    return res.status(400).send('Author id is required')\n  }\n\n  if (!(req.file && req.file.path)) {\n    return res.status(400).send('No image uploaded.')\n  }\n\n  // Check image size\n  const fileSizeInBytes = req.file.size\n  const maxSizeInBytes = 10 * 1024 * 1024 // 10 megabytes\n\n  if (fileSizeInBytes > maxSizeInBytes) {\n    return res.status(400).send('Image size exceeds the maximum limit of 10 megabytes.')\n  }\n\n  try {\n    const path = req.file.path\n    const [result] = await pool.query<OkPacket>('INSERT INTO images(author_id, signature, path) VALUES(?, ?, ?)', [\n      authorId,\n      '---',\n      path,\n    ])\n    const id = result.insertId\n\n    return res.status(201).json({\n      id,\n      success: 1,\n      file: {\n        url: `${process.env.URL}${path}`,\n        relativePath: path,\n      },\n    })\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nexport default { upload }\n"
  },
  {
    "path": "src/controllers/UserController.ts",
    "content": "import { Request, Response } from 'express'\nimport { OkPacket, RowDataPacket } from 'mysql2'\nimport pool from '../db'\nimport User from '../models/User'\nimport * as fs from 'fs'\n\nconst getUserById = async (req: Request, res: Response): Promise<Response> => {\n  const id = Number(req.params.id)\n\n  if (!id) {\n    return res.status(400).send('Id is required')\n  }\n  try {\n    const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [id])\n    const user = rows[0] as User\n\n    if (!user) {\n      return res.status(404).send(`User with id ${id} not found`)\n    }\n\n    return res.json(user)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst getArticlesByUserId = async (req: Request, res: Response): Promise<Response> => {\n  const id = Number(req.params.id)\n\n  if (!id) {\n    return res.status(400).send('User id is required')\n  }\n  try {\n    const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM articles WHERE author_id = ?', [id])\n    const articles = rows || []\n\n    return res.json(articles)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\n/**\n * Update user info\n */\nconst updateUser = async (req: Request, res: Response): Promise<Response> => {\n  try {\n    const id = Number(req.params.id)\n\n    if (!id) {\n      throw new Error('Id is required')\n    }\n\n    const { wallet, name, description } = req.body\n\n    if (!wallet) {\n      throw new Error('Wallet is required')\n    }\n\n    if (!name) {\n      throw new Error('Name is required')\n    }\n\n    let avatarPath = null\n\n    if (req.file) {\n      // Check avatar image size\n      const fileSizeInBytes = req.file.size\n      const maxSizeInBytes = 10 * 1024 * 1024 // 10 megabytes\n\n      if (fileSizeInBytes > maxSizeInBytes) {\n        return res.status(400).send('Avatar image size exceeds the maximum limit of 10 megabytes.')\n      }\n      avatarPath = req.file.path\n    }\n\n    // get old user info\n    const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [id])\n    const user = rows[0] as User\n\n    // update user info\n    const [result] = await pool.query<OkPacket>(\n      'UPDATE users SET wallet = ?, avatar = IFNULL(?, avatar), name = ?, description = ? WHERE id = ?',\n      [wallet, avatarPath, name, description, id],\n    )\n\n    // remove old avatar if exists sync\n    if (user.avatar && fs.existsSync(user.avatar)) {\n      fs.unlinkSync(user.avatar)\n    }\n\n    if (result.affectedRows === 0) {\n      return res.status(404).send(`User with id \"${id}\" not found`)\n    }\n\n    // get updated user info\n    const [updatedRows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [id])\n    const updatedUser = updatedRows[0]\n\n    if (!updatedUser) {\n      throw new Error('User not found')\n    }\n\n    return res.json(updatedUser)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst deleteUser = async (req: Request, res: Response): Promise<Response> => {\n  const id = Number(req.params.id)\n\n  if (!id) {\n    return res.status(400).send('User id is required')\n  }\n  try {\n    const [result] = await pool.query<OkPacket>('DELETE FROM users WHERE id = ?', [id])\n\n    if (result.affectedRows === 0) {\n      return res.status(404).send(`User with id ${id} not found`)\n    }\n\n    return res.sendStatus(204)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nconst authorizeByWallet = async (req: Request, res: Response): Promise<Response> => {\n  const { wallet }: { wallet: string } = req.body\n\n  try {\n    // Check if the user already exists in the database\n    const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE wallet = ?', [wallet])\n    let user = rows[0]\n\n    if (!user) {\n      // If the user doesn't exist, create a new one\n      const [result] = await pool.query<OkPacket>(\n        'INSERT INTO users (wallet, name, description, avatar) VALUES (?, ?, ?, ?)',\n        [wallet, '', '', ''],\n      )\n      const newUserId = result.insertId\n\n      // Retrieve the newly created user from the database\n      const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [newUserId])\n      user = rows[0]\n    }\n\n    return res.json(user)\n  } catch (e) {\n    return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)\n  }\n}\n\nexport { getUserById, updateUser, deleteUser, getArticlesByUserId, authorizeByWallet }\n"
  },
  {
    "path": "src/controllers/file-system/app/get-settings-action.ts",
    "content": "import { Request, Response, NextFunction } from 'express'\nimport pool from '../../../db'\nimport { getSetting } from '../utils'\nimport { assertString } from '../../../utils'\n\n/**\n * Gets settings from db\n *\n * @param req Request\n * @param res Response\n * @param next Next function\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { key } = req.query\n\n    if (!key) {\n      throw new Error('\"key\" is not set')\n    }\n\n    assertString(key)\n    const settingValue = await getSetting(pool, key)\n\n    res.json({\n      status: 'ok',\n      value: settingValue,\n    })\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/app/index.ts",
    "content": "import express from 'express'\nimport publishAction from './publish-action'\nimport getSettingsAction from './get-settings-action'\n\nconst router = express.Router()\nrouter.post('/publish', publishAction)\nrouter.get('/get-settings', getSettingsAction)\n\nexport default router\n"
  },
  {
    "path": "src/controllers/file-system/app/publish-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport { fileSystem } from '../../../app'\nimport { SettingsKey, uploadData, upsertSettings } from '../utils'\nimport pool from '../../../db'\n\n/**\n * Publish action body\n */\nexport interface PublishBody {\n  /**\n   * Password for the update\n   */\n  password: string\n}\n\n/**\n * Publish action for the file system\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { password } = req.body as PublishBody\n\n    if (!process.env.PUBLISH_FS_PASSWORD) {\n      throw new Error('Publish password is not set in .env')\n    }\n\n    if (password !== process.env.PUBLISH_FS_PASSWORD) {\n      throw new Error('Invalid password')\n    }\n\n    const uploadResult = await fileSystem.upload({\n      uploadData: uploadData,\n    })\n\n    const reference = uploadResult.reference\n    await upsertSettings(pool, SettingsKey.FS_STATE_REFERENCE, reference)\n    // todo send tx to smart contract with the actual reference\n    // todo send tx if only changed\n\n    res.json({\n      status: 'ok',\n      reference,\n    })\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/blob/get-article-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress, assertArticleName } from '../../../utils'\nimport { DEFAULT_DIRECTORY } from '../const'\nimport { assertDirectory, assertFile, assertFiles, Directory, File } from '@fairjournal/file-system'\nimport { fileSystem } from '../../../app'\nimport { Article, ARTICLE_INDEX_FILE_NAME, ArticleResponse, directoryToArticle } from './utils'\n\n/**\n * Checks if the user exists in the file system. If not, an error is thrown.\n *\n * @param address The address of the user\n * @throws Will throw an error if the user does not exist in the file system\n */\nfunction checkUserExists(address: string): void {\n  if (!fileSystem.isUserExists(address)) {\n    throw new Error(`User not found: \"${address}\"`)\n  }\n}\n\n/**\n * Retrieves article data based on the user address and the slug.\n *\n * @param address The address of the user\n * @param slug The slug of the article\n * @returns The data of the article\n * @throws Will throw an error if the article is not found\n */\nasync function getArticleData(address: string, slug: string): Promise<File | Directory> {\n  try {\n    const path = `/${address}/${DEFAULT_DIRECTORY}/${slug}`\n\n    return fileSystem.getPathInfo(path)\n  } catch (e) {\n    throw new Error(`Article not found: \"${slug}\". ${(e as Error).message}`)\n  }\n}\n\n/**\n * Converts the retrieved data into an article.\n *\n * @param data The raw data of the article\n * @param slug The slug of the article\n * @returns The converted article\n * @throws Will throw an error if the data cannot be converted into an article\n */\nasync function convertDataToArticle(data: Directory, slug: string): Promise<Article> {\n  try {\n    return await directoryToArticle(data)\n  } catch (e) {\n    throw new Error(`Article not found: \"${slug}\". Error: ${(e as Error).message}`)\n  }\n}\n\n/**\n * Handles the GET request to retrieve a full article.\n *\n * @param req The request object\n * @param res The response object\n * @param next The next middleware function in the stack\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { userAddress, slug } = req.query\n    assertAddress(userAddress)\n    assertArticleName(slug)\n    const address = userAddress.toLowerCase()\n    checkUserExists(address)\n    const articleData = await getArticleData(address, slug)\n    assertDirectory(articleData)\n    assertFiles(articleData.files)\n    const indexArticle = articleData.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME)\n    assertFile(indexArticle)\n    const article = await convertDataToArticle(articleData, slug)\n\n    const response: ArticleResponse = {\n      status: 'ok',\n      userAddress,\n      article,\n      reference: indexArticle.hash,\n    }\n\n    res.json(response)\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/blob/get-articles-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { DEFAULT_DIRECTORY } from '../const'\nimport { assertDirectories, assertDirectory, File, Directory } from '@fairjournal/file-system'\nimport { fileSystem } from '../../../app'\nimport { ArticlesResponse, directoriesToShortArticles } from './utils'\n\n/**\n * Check if user exists\n *\n * @param userAddress - User address in the blockchain.\n * @throws Will throw an error if the user is not found.\n */\nfunction checkUserExistence(userAddress: string): void {\n  if (!fileSystem.isUserExists(userAddress.toLowerCase())) {\n    throw new Error(`User not found: \"${userAddress}\"`)\n  }\n}\n\n/**\n * Get path info and handle possible errors\n *\n * @param path - Path to the user's articles directory.\n * @throws Will throw an error if the articles are not found.\n */\nfunction getPathInfoWithErrorHandling(path: string): File | Directory {\n  try {\n    return fileSystem.getPathInfo(path)\n  } catch (e) {\n    throw new Error(`Articles not found. ${(e as Error).message}`)\n  }\n}\n\n/**\n * Get articles of the user\n *\n * @param req Request\n * @param res Response\n * @param next Next function\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { userAddress } = req.query\n\n    assertAddress(userAddress)\n    checkUserExistence(userAddress)\n    const path = `/${userAddress.toLowerCase()}/${DEFAULT_DIRECTORY}`\n    const data = getPathInfoWithErrorHandling(path)\n    assertDirectory(data)\n    assertDirectories(data.directories)\n    const articles = await directoriesToShortArticles(data.directories)\n    // todo cache this object for N minutes. And invalidate cache when new article is added\n    const response: ArticlesResponse = {\n      status: 'ok',\n      userAddress,\n      articles,\n    }\n\n    res.json(response)\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/blob/get-path-info-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { PathInfoResponse } from './utils'\nimport { assertPath, assertUserExists, getPathInfo } from '../utils'\n\n/**\n * Handles the GET request to retrieve a path info\n *\n * @param req The request object\n * @param res The response object\n * @param next The next middleware function in the stack\n */\nexport default (req: Request, res: Response, next: NextFunction): void => {\n  try {\n    const { userAddress, path } = req.query\n    assertAddress(userAddress)\n    assertPath(path)\n\n    const address = userAddress.toLowerCase()\n    assertUserExists(address)\n    const data = getPathInfo(address, path)\n\n    const response: PathInfoResponse = {\n      status: 'ok',\n      userAddress,\n      path,\n      data,\n    }\n\n    res.json(response)\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/blob/index.ts",
    "content": "import express from 'express'\nimport uploadAction from './upload-action'\nimport getArticleAction from './get-article-action'\nimport getArticlesAction from './get-articles-action'\nimport getPathInfoAction from './get-path-info-action'\nimport multer from 'multer'\nimport { MAX_BLOB_SIZE } from '../const'\n\nconst storage = multer.diskStorage({\n  destination: (req, file, cb) => {\n    cb(null, 'blob/')\n  },\n})\n\nconst upload = multer({ storage, limits: { fileSize: MAX_BLOB_SIZE } })\n\nconst router = express.Router()\nrouter.post('/upload', upload.single('blob'), uploadAction)\nrouter.get('/get-article', getArticleAction)\nrouter.get('/get-articles', getArticlesAction)\nrouter.get('/get-path-info', getPathInfoAction)\n\nexport default router\n"
  },
  {
    "path": "src/controllers/file-system/blob/upload-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport pool from '../../../db'\nimport { assertReference, calculateSHA256, toAbsolutePath } from '../../../utils'\nimport { RowDataPacket } from 'mysql2'\nimport { tonstorage } from '../../../app'\nimport * as fs from 'fs'\nimport { FileStatus } from '../types'\nimport { getReferencePath } from '../../../fs'\nimport path from 'path'\nimport { uploadToStorage } from '../utils'\n\n/**\n * DB model of the file\n */\nexport interface DBFileInfo {\n  /**\n   * Reference in storage\n   */\n  reference: string\n\n  /**\n   * Status of the file\n   */\n  status: number\n\n  /**\n   * Mime type of the file\n   */\n  mime_type: string\n\n  /**\n   * Size of the file\n   */\n  size: number\n\n  /**\n   * Sha256 of the file in lowercase\n   */\n  sha256: string\n\n  /**\n   * Date of creation\n   */\n  created_at?: Date\n\n  /**\n   * Date of last update\n   */\n  updated_at?: Date\n}\n\n/**\n * Inserts file info into database\n *\n * @param info File info\n */\nasync function insertFileInfo(info: DBFileInfo): Promise<void> {\n  const connection = await pool.getConnection()\n\n  try {\n    await connection.query(\n      `INSERT INTO file (reference, status, mime_type, size, sha256, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)`,\n      [info.reference, info.status, info.mime_type, info.size, info.sha256, info.created_at, info.updated_at],\n    )\n  } catch (error) {\n    throw error\n  } finally {\n    connection.release()\n  }\n}\n\n/**\n * Gets file info from database\n *\n * @param sha256 SHA256 of the file\n */\nasync function getFileInfo(sha256: string): Promise<DBFileInfo> {\n  const connection = await pool.getConnection()\n\n  try {\n    const [rows] = await connection.query<RowDataPacket[]>(`SELECT * FROM file WHERE sha256 = ?`, [sha256])\n\n    // Check if a row was returned and then return it\n    if (Array.isArray(rows) && rows.length > 0) {\n      return rows[0] as DBFileInfo\n    } else {\n      throw new Error('No file with this sha256 exists in the database')\n    }\n  } catch (error) {\n    throw error\n  } finally {\n    connection.release()\n  }\n}\n\n/**\n * Checks that is file with given sha256 is uploaded\n *\n * @param sha256 SHA256 of the file\n */\nasync function isSha256Uploaded(sha256: string): Promise<boolean> {\n  sha256 = sha256.toLowerCase()\n  const connection = await pool.getConnection()\n\n  try {\n    const [rows] = await connection.query(`SELECT * FROM file WHERE sha256 = ?`, [sha256])\n\n    // Check if rows is an array and then check its length\n    if (Array.isArray(rows)) {\n      return rows.length > 0\n    } else {\n      throw new Error('Unexpected query result format')\n    }\n  } catch (error) {\n    return false\n  } finally {\n    // Don't forget to release the connection when you're done!\n    connection.release()\n  }\n}\n\n/**\n * Removes file and directory\n *\n * @param filePath Path to file\n * @param directoryPath Path to directory\n */\nfunction removeFileAndDirectory(filePath: string, directoryPath: string): void {\n  fs.rmSync(filePath, {\n    force: true,\n  })\n\n  fs.rmSync(directoryPath, {\n    recursive: true,\n    force: true,\n  })\n}\n\n/**\n * Validate the uploaded file\n *\n * @param file File to be validated\n * @throws Will throw an error if the file or its properties are not valid\n */\nfunction assertValidFile(file: Express.Multer.File | undefined): asserts file is Express.Multer.File {\n  if (!file) {\n    throw new Error('File is not uploaded')\n  }\n\n  if (!file.path) {\n    throw new Error('File path is not defined')\n  }\n\n  if (!file.mimetype) {\n    throw new Error('File mime type is not defined')\n  }\n\n  if (!file.size) {\n    throw new Error('File size is not defined')\n  }\n}\n\n/**\n * Handle file upload and storage\n *\n * @param filePath Path to the file\n * @param targetFilePath Target path of the file\n * @param targetDirectoryPath Target directory of the file\n * @param sha256 SHA256 of the file\n * @param file File to be uploaded\n * @returns fileInfo Information about the file in the database\n * @throws Will throw an error if the storage adding fails\n */\nasync function handleFileUpload(\n  filePath: string,\n  targetFilePath: string,\n  targetDirectoryPath: string,\n  sha256: string,\n  file: Express.Multer.File,\n): Promise<DBFileInfo> {\n  let fileInfo: DBFileInfo\n  const isUploaded = await isSha256Uploaded(sha256)\n\n  if (isUploaded) {\n    fileInfo = await getFileInfo(sha256)\n  } else {\n    if (!tonstorage) {\n      throw new Error('Ton Storage is not initialized')\n    }\n\n    removeFileAndDirectory(targetFilePath, targetDirectoryPath)\n    fs.mkdirSync(targetDirectoryPath, { recursive: true })\n    fs.renameSync(filePath, targetFilePath)\n    const reference = await uploadToStorage(targetFilePath, sha256, false)\n    assertReference(reference)\n    fileInfo = {\n      reference,\n      status: FileStatus.New,\n      mime_type: file.mimetype,\n      size: file.size,\n      sha256,\n      created_at: new Date(),\n      updated_at: new Date(),\n    }\n    await insertFileInfo(fileInfo)\n  }\n\n  return fileInfo\n}\n\n/**\n * Checks that path is exists\n *\n * @param path Path to check\n * @param message Message to be thrown if path does not exist\n */\nfunction checkPathExists(path: string, message: string): void {\n  if (!fs.existsSync(path)) {\n    throw new Error(`Path \"${path}\" does not exist. Message: ${message}`)\n  }\n}\n\n/**\n * Removes the uploaded file at the provided filePath.\n *\n * @async\n * @param filePath Path to the file that should be removed.\n * @throws Will throw an error if the removal operation fails.\n */\nasync function removeUploadedFile(filePath: string): Promise<void> {\n  try {\n    if (filePath && fs.existsSync(filePath)) {\n      fs.unlinkSync(filePath)\n    }\n  } catch (e) {\n    /* empty */\n  }\n}\n\n/**\n * Sets the permissions of a directory and a file to 0755.\n *\n * @param reference Reference of the file\n */\nfunction setPermissions(reference: string): void {\n  const filePath = getReferencePath(reference)\n  try {\n    fs.chmodSync(path.dirname(filePath), 0o755)\n    fs.chmodSync(filePath, 0o755)\n  } catch (error) {\n    /* empty */\n  }\n}\n\n/**\n * Uploads file, upload it to the storage, insert info into database and return the file info\n *\n * @param req Request\n * @param res Response\n * @param next Next function\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  let filePath = ''\n\n  try {\n    const rootPath = process.env.FILES_ROOT_PATH || __dirname\n    checkPathExists(rootPath, 'root path')\n\n    const file = req.file\n    assertValidFile(file)\n\n    filePath = toAbsolutePath(rootPath, file.path)\n    checkPathExists(filePath, 'file path')\n\n    const sha256 = await calculateSHA256(filePath)\n    const targetDirectoryPath = toAbsolutePath(rootPath, 'blob', sha256)\n    const targetFilePath = toAbsolutePath(targetDirectoryPath, 'blob')\n\n    const fileInfo = await handleFileUpload(filePath, targetFilePath, targetDirectoryPath, sha256, file)\n    setPermissions(fileInfo.reference)\n    removeFileAndDirectory(targetFilePath, targetDirectoryPath)\n\n    res.json({\n      status: 'ok',\n      data: {\n        reference: fileInfo.reference,\n        mime_type: fileInfo.mime_type,\n        sha256: fileInfo.sha256,\n        size: fileInfo.size,\n      },\n    })\n  } catch (e) {\n    next(e)\n  } finally {\n    await removeUploadedFile(filePath)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/blob/utils.ts",
    "content": "import { assertFiles, Directory, File } from '@fairjournal/file-system'\nimport { assertJson, bytesToString } from '../../../utils'\nimport { extractArticleText, getContentByReference } from '../../../fs'\n\n/**\n * Max length of the short article\n */\nexport const SHORT_ARTICLE_LENGTH = 1000\n\n/**\n * Article index file name\n */\nexport const ARTICLE_INDEX_FILE_NAME = 'index-json'\n\n/**\n * Short version of article\n */\nexport interface ShortArticle {\n  /**\n   * Human-readable name of the article\n   */\n  slug: string\n\n  /**\n   * Short text of the article\n   */\n  shortText: string\n\n  /**\n   * Custom data for preview\n   */\n  previewData: unknown\n}\n\n/**\n * Full article\n */\nexport interface Article {\n  /**\n   * Human-readable name of the article\n   */\n  slug: string\n\n  /**\n   * Full json object of the article\n   */\n  data: unknown\n\n  /**\n   * Custom data for preview\n   */\n  preview: unknown\n}\n\n/**\n * Response for `get-articles` action\n */\nexport interface ArticlesResponse {\n  /**\n   * Status of the response\n   */\n  status: string\n\n  /**\n   * User address\n   */\n  userAddress: string\n\n  /**\n   * Articles\n   */\n  articles: ShortArticle[]\n}\n\n/**\n * Response for `get-article` action\n */\nexport interface ArticleResponse {\n  /**\n   * Status of the response\n   */\n  status: string\n\n  /**\n   * User address\n   */\n  userAddress: string\n\n  /**\n   * Article\n   */\n  article: Article\n\n  /**\n   * Reference to the article\n   */\n  reference: string\n\n  /**\n   * Error message\n   */\n  message?: string\n}\n\n/**\n * Response for `get-path-info` action\n */\nexport interface PathInfoResponse {\n  /**\n   * Status of the response\n   */\n  status: string\n\n  /**\n   * User address\n   */\n  userAddress: string\n\n  /**\n   * Path\n   */\n  path: string\n\n  /**\n   * Directory or file\n   */\n  data: Directory | File\n}\n\n/**\n * Convert directory to short article\n *\n * @param directory Directory\n */\nexport async function directoryToShortArticle(directory: Directory): Promise<ShortArticle> {\n  assertFiles(directory.files)\n  const file = directory.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME)\n\n  if (!file) {\n    throw new Error(`Article index file not found. In \"${directory.name}\"`)\n  }\n\n  const indexContent = bytesToString(await getContentByReference(file.hash))\n  assertJson(indexContent)\n  const indexObject = JSON.parse(indexContent) as Article\n  const shortText = extractArticleText(indexObject, SHORT_ARTICLE_LENGTH)\n\n  return {\n    slug: directory.name.toLowerCase(),\n    shortText,\n    previewData: indexObject.preview,\n  }\n}\n\n/**\n * Check if directory is article directory\n *\n * @param directory Directory\n */\nexport function isArticleDirectory(directory: Directory): boolean {\n  assertFiles(directory.files)\n\n  return Boolean(directory.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME))\n}\n\n/**\n * Convert directories to short articles\n *\n * @param directories Directories\n */\nexport async function directoriesToShortArticles(directories: Directory[]): Promise<ShortArticle[]> {\n  const articles: ShortArticle[] = []\n  const filteredDirectories = directories.filter(isArticleDirectory)\n  for (const directory of filteredDirectories) {\n    try {\n      articles.push(await directoryToShortArticle(directory))\n    } catch (e) {\n      /* empty */\n    }\n  }\n\n  return articles\n}\n\n/**\n * Convert directory to article\n *\n * @param directory Directory\n */\nexport async function directoryToArticle(directory: Directory): Promise<Article> {\n  if (!isArticleDirectory(directory)) {\n    throw new Error(`Directory \"${directory.name}\" is not article directory`)\n  }\n\n  assertFiles(directory.files)\n  const file = directory.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME)\n\n  if (!file) {\n    throw new Error(`Article index file not found. In \"${directory.name}\"`)\n  }\n\n  const indexContent = bytesToString(await getContentByReference(file.hash))\n  assertJson(indexContent)\n  const article = JSON.parse(indexContent) as Article\n\n  return {\n    slug: directory.name.toLowerCase(),\n    data: article.data,\n    preview: article.preview,\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/const.ts",
    "content": "/**\n * Default directory where all files should be stored\n */\nexport const DEFAULT_DIRECTORY = 'articles'\n\n/**\n * Project name\n */\nexport const PROJECT_NAME = 'fairjournal'\n\n/**\n * Maximum size of the blob in bytes\n */\nexport const MAX_BLOB_SIZE = 1024 * 1024 * 10\n"
  },
  {
    "path": "src/controllers/file-system/index.ts",
    "content": "import express from 'express'\nimport userRouter from './user'\nimport blobRouter from './blob'\nimport updateRouter from './update'\nimport appRouter from './app'\n\nconst router = express.Router()\nrouter.use('/user', userRouter)\nrouter.use('/blob', blobRouter)\nrouter.use('/update', updateRouter)\nrouter.use('/app', appRouter)\n\nexport default router\n"
  },
  {
    "path": "src/controllers/file-system/types.ts",
    "content": "/**\n * File status in database\n */\nexport enum FileStatus {\n  /**\n   * File is new, just uploaded\n   */\n  New = 0,\n\n  /**\n   * File is used in some article\n   */\n  Used = 1,\n}\n"
  },
  {
    "path": "src/controllers/file-system/update/apply-action.ts",
    "content": "import { Request, Response, NextFunction } from 'express'\nimport { ActionType, AddFileActionData, UpdateDataSigned } from '@fairjournal/file-system'\nimport { fileSystem, tonstorage } from '../../../app'\nimport { assertObject, assertReference, getPathParts } from '../../../utils'\nimport { DEFAULT_DIRECTORY } from '../const'\nimport { assertUpdateDataSigned } from '@fairjournal/file-system'\nimport pool from '../../../db'\nimport { OkPacket } from 'mysql2'\nimport { isReferenceExists } from '../../../fs'\nimport { FileStatus } from '../types'\n\n/**\n * Request body\n */\nexport interface ApplyBody {\n  /**\n   * Update data\n   */\n  update: UpdateDataSigned\n}\n\n/**\n * Insert update to db for backup\n *\n * @param update Update data\n *\n * @returns ID of the inserted row\n */\nasync function insertUpdate(update: UpdateDataSigned): Promise<number> {\n  const query = `\n      INSERT INTO fs_update(public_key, update_id, \\`update\\`)\n      VALUES (?, ?, ?)\n  `\n\n  // Execute the query\n  const results = (\n    await pool.execute(query, [update.userAddress.toLowerCase(), update.id, JSON.stringify(update)])\n  )[0] as OkPacket\n\n  return results.insertId\n}\n\n/**\n * Validate update for the gateway\n *\n * @param update Update data\n */\nasync function validateUpdate(update: UpdateDataSigned): Promise<string[]> {\n  const references: string[] = []\n  for (const action of update.actions) {\n    if (action.actionType === ActionType.addDirectory) {\n      // commented because user should add profile file. define it here or allow full control\n      // const data = action.actionData as AddDirectoryActionData\n      // if (!(data.path === `/${DEFAULT_DIRECTORY}` || data.path.startsWith(`/${DEFAULT_DIRECTORY}/`))) {\n      //   throw new Error(`Invalid path: \"${data.path}\". All files should be inside \"/articles\" folder`)\n      // }\n    } else if (action.actionType === ActionType.addFile) {\n      const data = action.actionData as AddFileActionData\n      references.push(await validateAndGetAddFileReference(data))\n    } else if (action.actionType === ActionType.addUser) {\n      // skip it\n    } else if (action.actionType === ActionType.removeDirectory) {\n      // skip it\n    } else if (action.actionType === ActionType.removeFile) {\n      // skip it\n    } else {\n      throw new Error(`Unknown action type: \"${action.actionType}\"`)\n    }\n  }\n\n  return references\n}\n\n/**\n * Updates file status in database\n *\n * @param reference Reference of the file\n * @param status New status of the file\n */\nasync function updateFileStatus(reference: string, status: FileStatus): Promise<void> {\n  const connection = await pool.getConnection()\n\n  try {\n    await connection.query(`UPDATE file SET status = ?, updated_at = ? WHERE reference = ?`, [\n      status,\n      new Date(),\n      reference,\n    ])\n  } catch (error) {\n    throw error\n  } finally {\n    connection.release()\n  }\n}\n\n/**\n * Checks that update is correct and returns the references of the file\n *\n * @param data Update data\n */\nasync function validateAndGetAddFileReference(data: AddFileActionData): Promise<string> {\n  const reference = data.hash.toLowerCase()\n  assertReference(reference)\n\n  if (!(await isReferenceExists(reference))) {\n    throw new Error(`Reference \"${reference}\" not found`)\n  }\n\n  const parts = getPathParts(data.path)\n\n  if (!(data.path.startsWith(`/${DEFAULT_DIRECTORY}/`) || parts.length < 3 || parts[0] !== DEFAULT_DIRECTORY)) {\n    throw new Error(`Invalid path: \"${data.path}\". All files should be inside \"/articles/NAMEOFARTICLE/\" folder`)\n  }\n\n  return reference\n}\n\n/**\n * Publish all files from the update\n *\n * @param update Update data\n */\nasync function publishAllFiles(update: UpdateDataSigned): Promise<string[]> {\n  const references: string[] = []\n  for (const action of update.actions) {\n    if (action.actionType === ActionType.addFile) {\n      const data = action.actionData as AddFileActionData\n      references.push(await validateAndGetAddFileReference(data))\n    }\n  }\n\n  for (const reference of references) {\n    await updateFileStatus(reference, FileStatus.Used)\n    await tonstorage.uploadResume(reference)\n  }\n\n  return references\n}\n\n/**\n * Apply update action to the file system\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { update } = req.body as ApplyBody\n    assertObject(update)\n    assertUpdateDataSigned(update)\n    await validateUpdate(update)\n    fileSystem.addUpdate(update)\n    await insertUpdate(update)\n    await publishAllFiles(update)\n\n    res.json({\n      status: 'ok',\n    })\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/update/index.ts",
    "content": "import express from 'express'\nimport applyAction from './apply-action'\n\nconst router = express.Router()\nrouter.post('/apply', applyAction)\n\nexport default router\n"
  },
  {
    "path": "src/controllers/file-system/user/get-update-id-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { fileSystem } from '../../../app'\n\n/**\n * Response of the get update id action\n */\nexport interface GetUpdateIdResponse {\n  /**\n   * Status of the request\n   */\n  status: string\n\n  /**\n   * Address of the user\n   */\n  address: string\n\n  /**\n   * Update id\n   */\n  updateId: number\n}\n\n/**\n * Gets current user's update id\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { address } = req.query\n    assertAddress(address)\n\n    const addressLowerCased = address.toLowerCase()\n    const updateId = fileSystem.getUpdateId(addressLowerCased)\n    const data: GetUpdateIdResponse = {\n      status: 'ok',\n      address: addressLowerCased,\n      updateId,\n    }\n\n    res.json(data)\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/user/index.ts",
    "content": "import express from 'express'\nimport infoAction from './info-action'\nimport getUpdateIdAction from './get-update-id-action'\n\nconst router = express.Router()\nrouter.get('/info', infoAction)\nrouter.get('/get-update-id', getUpdateIdAction)\n\nexport default router\n"
  },
  {
    "path": "src/controllers/file-system/user/info-action.ts",
    "content": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { fileSystem } from '../../../app'\n\n/**\n * Check if user exists in the file system\n */\nexport default async (req: Request, res: Response, next: NextFunction): Promise<void> => {\n  try {\n    const { address } = req.query\n    assertAddress(address)\n\n    const isUserExists = fileSystem.isUserExists(address.toLowerCase())\n\n    res.json({\n      status: 'ok',\n      address: address.toLowerCase(),\n      isUserExists,\n    })\n  } catch (e) {\n    next(e)\n  }\n}\n"
  },
  {
    "path": "src/controllers/file-system/utils.ts",
    "content": "import { fileSystem, tonstorage } from '../../app'\nimport { assertString, base64ToHex, extractHash } from '../../utils'\nimport tmp from 'tmp'\nimport fs from 'fs'\nimport { ReferencedItem } from '@fairjournal/file-system/dist/src/file-system/interfaces/referenced-item'\nimport { File, Directory } from '@fairjournal/file-system'\nimport path from 'path'\nimport { getReferencePath } from '../../fs'\nimport { Pool } from 'mysql2/promise'\nimport { RowDataPacket } from 'mysql2'\n\n/**\n * Settings key that available in the DB\n */\nexport enum SettingsKey {\n  /**\n   * File system state reference\n   */\n  FS_STATE_REFERENCE = 'fs_state_reference',\n}\n\n/**\n * Asserts that user exists in the file system\n *\n * @param data The data to assert\n */\nexport function assertUserExists(data: unknown): asserts data is string {\n  const address = data as string\n\n  if (!fileSystem.isUserExists(address)) {\n    throw new Error(`User not found: \"${address}\"`)\n  }\n}\n\n/**\n * Asserts that the data is a string path\n *\n * @param data The data to assert\n */\nexport function assertPath(data: unknown): asserts data is string {\n  assertString(data)\n\n  if (!data) {\n    throw new Error('Path is required')\n  }\n}\n\n/**\n * Get path info\n *\n * @param address User address\n * @param path Path\n */\nexport function getPathInfo(address: string, path: string): File | Directory {\n  try {\n    return fileSystem.getPathInfo(`/${address}${path}`)\n  } catch (e) {\n    throw new Error(`Can't get info about the path: ${(e as Error).message}`)\n  }\n}\n\n/**\n * Upload data to storage\n *\n * @param path Path to the file\n * @param message Message to show in case of error\n * @param isUpload Should the file be uploaded to the storage\n */\nexport async function uploadToStorage(path: string, message: string, isUpload: boolean): Promise<string> {\n  const response = await tonstorage.create(path, {\n    // copy file to storage. Files should be removed later if they are not used\n    copy: true,\n    // description of the file\n    desc: '',\n    // do not upload file while article is not published\n    upload: isUpload,\n  })\n  let reference = ''\n\n  if (response?.ok) {\n    reference = base64ToHex(response.result.torrent.hash).toLowerCase()\n  } else {\n    if (response?.error?.includes('duplicate hash')) {\n      reference = extractHash(response?.error).toLowerCase()\n    } else {\n      throw new Error(`Error on Ton Storage adding (${message}): ${response?.error || 'unknown error'}`)\n    }\n  }\n\n  return reference\n}\n\n/**\n * Method for uploading data to a storage\n *\n * @param data Data to be uploaded\n */\nexport async function uploadData(data: string): Promise<ReferencedItem> {\n  const tempDir = tmp.dirSync()\n  const tempFilePath = path.join(tempDir.name, 'blob')\n  fs.writeFileSync(tempFilePath, data)\n  const reference = await uploadToStorage(tempFilePath, tempFilePath, true)\n  fs.rmSync(tempFilePath)\n  tempDir.removeCallback()\n\n  return {\n    reference,\n  }\n}\n\n/**\n * Downloads data from storage directory by reference\n *\n * @param reference Reference to the file\n */\nexport async function downloadData(reference: string): Promise<string> {\n  const path = getReferencePath(reference)\n\n  return fs.readFileSync(path, 'utf-8')\n}\n\n/**\n * Upserts settings\n *\n * @param pool Database pool\n * @param key Key\n * @param value Value\n */\nexport async function upsertSettings(pool: Pool, key: string, value: string): Promise<void> {\n  const query = `\n    INSERT INTO settings (\\`key\\`, value, created_at, updated_at)\n    VALUES (?, ?, NOW(), NOW())\n      ON DUPLICATE KEY UPDATE\n                         value = VALUES(value),\n                         updated_at = NOW();\n  `\n  await pool.execute(query, [key, value])\n}\n\n/**\n * Gets setting by key\n *\n * @param pool Database pool\n * @param key Key\n */\nexport async function getSetting(pool: Pool, key: string): Promise<string> {\n  const query = 'SELECT value FROM settings WHERE `key` = ?'\n  const [rows] = await pool.execute(query, [key])\n\n  const rowData = rows as RowDataPacket[]\n\n  if (rowData.length === 0) {\n    throw new Error(`No setting found for key: ${key}`)\n  }\n\n  return rowData[0].value as string\n}\n"
  },
  {
    "path": "src/db.ts",
    "content": "import mysql from 'mysql2/promise'\nimport dotenv from 'dotenv'\n\ndotenv.config()\n\nconst simpleConfig = {\n  host: process.env.DB_HOST,\n  port: Number(process.env.DB_PORT),\n  user: process.env.DB_USER,\n  password: process.env.DB_PASSWORD,\n  database: process.env.DB_NAME,\n  connectionLimit: 10,\n}\nconst socketConfig = {\n  user: process.env.DB_USER,\n  password: process.env.DB_PASSWORD,\n  database: process.env.DB_NAME,\n  connectionLimit: 10,\n  socketPath: process.env.DB_SOCKET_PATH,\n}\n\nconst config = process.env.DB_SOCKET_PATH ? socketConfig : simpleConfig\n\nexport const pool = mysql.createPool(config)\n\nexport default pool\n"
  },
  {
    "path": "src/fs.ts",
    "content": "import { assertUpdateDataSignedArray, FileSystem } from '@fairjournal/file-system'\nimport { PROJECT_NAME } from './controllers/file-system/const'\nimport { Pool, RowDataPacket } from 'mysql2/promise'\nimport pool from './db'\nimport { assertString, isString } from './utils'\nimport * as fs from 'fs'\nimport path from 'path'\n\n/**\n * Function for processing batches of data\n */\ntype AsyncFunction = (records: any[]) => Promise<void>\n\n/**\n * Gets batches of data from DB and processes them\n *\n * @param pool DB pool\n * @param asyncFn Function to process the data\n */\nasync function processInBatches(pool: Pool, asyncFn: AsyncFunction): Promise<void> {\n  const limit = 1000\n\n  // Get the total count of records\n  const [countResult] = await pool.execute('SELECT COUNT(*) AS count FROM fs_update')\n  const totalRecords = Number(((countResult as RowDataPacket[])[0] as any).count)\n\n  // Calculate the total number of iterations needed (each iteration fetches 'limit' records)\n  const iterations = Math.ceil(totalRecords / limit)\n\n  for (let i = 0; i < iterations; i++) {\n    const offset = i * limit\n\n    const [rows] = (await pool.execute(\n      `\n  SELECT * FROM fs_update\n  LIMIT ?\n  OFFSET ?\n`,\n      [limit.toString(), offset.toString()],\n    )) as [RowDataPacket[], any]\n\n    await asyncFn(rows)\n  }\n}\n\n/**\n * Initialize file system using DB\n */\nexport function initFs(): FileSystem {\n  return new FileSystem({\n    version: '0.0.1',\n    projectName: PROJECT_NAME,\n    projectDescription: 'A creative platform owned by people.',\n    checkSignature: 'ton',\n  })\n}\n\n/**\n * Sync file system with DB\n *\n * @param fs File system\n */\nexport async function syncFs(fs: FileSystem): Promise<void> {\n  if (!fs) {\n    throw new Error('File system is not initialized')\n  }\n\n  await processInBatches(pool, async data => {\n    const updates = data.map(item => JSON.parse(item.update))\n    assertUpdateDataSignedArray(updates)\n    updates.forEach(update => fs.addUpdate(update))\n  })\n}\n\n/**\n * Gets reference path on the local file system\n *\n * @param reference Reference\n */\nexport function getReferencePath(reference: string): string {\n  const storagePath = process.env.TON_STORAGE_DATABASE_PATH\n  assertString(storagePath)\n\n  if (!storagePath) {\n    throw new Error('Storage path is not defined')\n  }\n\n  return path.resolve(storagePath, 'torrent/torrent-files', reference.toUpperCase(), 'blob')\n}\n\n/**\n * Checks if reference exists\n *\n * @param reference\n */\nexport async function isReferenceExists(reference: string): Promise<boolean> {\n  return fs.existsSync(getReferencePath(reference))\n}\n\n/**\n * Gets content by reference\n *\n * @param reference Reference\n */\nexport async function getContentByReference(reference: string): Promise<Uint8Array> {\n  const filePath = getReferencePath(reference)\n\n  if (!fs.existsSync(filePath)) {\n    throw new Error(`File does not exist`)\n  }\n\n  return fs.readFileSync(filePath)\n}\n\n/**\n * Extracts article text from JSON object\n *\n * @param jsonObject JSON object\n * @param symbols Number of symbols to extract\n */\nexport function extractArticleText(jsonObject: unknown, symbols: number): string {\n  // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n  // @ts-ignore\n  const blocks = jsonObject.data.blocks as { type: string; data: { text: string } }[]\n  const paragraphs = blocks\n    .filter(block => block?.type === 'paragraph')\n    .filter(block => isString(block?.data?.text))\n    .map(block => {\n      const text = block.data.text\n\n      return text.replace(/<\\/?[^>]+(>|$)/g, '')\n    })\n    .join(' ')\n\n  return paragraphs.slice(0, symbols)\n}\n"
  },
  {
    "path": "src/index.ts",
    "content": "import app, { clearFileSystem, syncFileSystem } from './app'\n\n// Start server\nconst PORT = process.env.PORT || 5000\n\nasync function start(): Promise<void> {\n  clearFileSystem()\n  await syncFileSystem()\n  // eslint-disable-next-line no-console\n  app.listen(PORT, () => console.log(`Server started on port ${PORT}`))\n}\n\nstart().then()\n"
  },
  {
    "path": "src/models/Article.ts",
    "content": "interface Article {\n  id: number\n  hash: string\n  content: string\n  authorId: number\n}\n\nexport default Article\n"
  },
  {
    "path": "src/models/User.ts",
    "content": "interface User {\n  id: number\n  wallet: number\n  avatar: string\n  name: string\n  description: string\n  articles: number[]\n}\n\nexport default User\n"
  },
  {
    "path": "src/routes.ts",
    "content": "import { Router } from 'express'\nimport {\n  deleteUser,\n  getUserById,\n  updateUser,\n  getArticlesByUserId,\n  authorizeByWallet,\n} from './controllers/UserController'\nimport {\n  createArticle,\n  deleteArticle,\n  getAllArticles,\n  getArticleById,\n  updateArticle,\n} from './controllers/ArticleController'\nimport Image from './controllers/ImageController'\nimport multer from 'multer'\nimport path from 'path'\n// Configure storage\nconst storage = multer.diskStorage({\n  destination: (req, file, cb) => {\n    cb(null, 'avatars/')\n  },\n  filename: (req, file, cb) => {\n    // Use the originalname property to get the original extension\n    cb(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname))\n  },\n})\n\nconst upload = multer({ storage })\n\nconst router = Router()\n\n// User Routes\nrouter.get('/users/:id', getUserById)\nrouter.get('/users/:id/articles', getArticlesByUserId)\nrouter.post('/users/:id', upload.single('avatar'), updateUser)\nrouter.delete('/users/:id', deleteUser)\n\n// Article Routes\nrouter.get('/articles', getAllArticles)\nrouter.get('/articles/:id', getArticleById)\nrouter.post('/articles', createArticle)\nrouter.put('/articles/:id', updateArticle)\nrouter.delete('/articles/:id', deleteArticle)\n\n// Images Routes\nrouter.post('/image/upload', upload.single('image'), Image.upload)\n\n// Auth route\nrouter.post('/auth', authorizeByWallet)\n\nexport default router\n"
  },
  {
    "path": "src/ton-utils.ts",
    "content": "/**\n * Daemon response\n */\nexport interface DaemonResponse {\n  ok: boolean\n  result: TorrentFull\n  code: number\n}\n\n/**\n * Torrent full info\n */\nexport interface TorrentFull {\n  '@type': string\n  torrent: Torrent\n  files: FileInfo[]\n}\n\n/**\n * Torrent info\n */\nexport interface Torrent {\n  '@type': string\n  hash: string\n  flags: number\n  total_size: string\n  description: string\n  files_count: string\n  included_size: string\n  dir_name: string\n  downloaded_size: string\n  added_at: number\n  root_dir: string\n  active_download: boolean\n  active_upload: boolean\n  completed: boolean\n  download_speed: number\n  upload_speed: number\n  fatal_error: string\n}\n\n/**\n * File info\n */\nexport interface FileInfo {\n  '@type': string\n  name: string\n  size: string\n  priority: number\n  downloaded_size: string\n}\n\n/**\n * Prefix for error messages\n */\nexport const errorPrefix = 'Daemon response does not contain'\n\n/**\n * Asserts that the value is defined\n *\n * @param property Property to check\n * @param name Name of the property\n */\nexport function assertIsDefined<T>(property: T | undefined | null, name: string): asserts property is NonNullable<T> {\n  if (property === undefined || property === null) {\n    throw new Error(`${errorPrefix} ${name}`)\n  }\n}\n\n/**\n * Asserts that the value is a number\n *\n * @param value Value to check\n * @param name Name of the value\n */\nexport function assertIsNumber(value: unknown, name: string): asserts value is number {\n  if (typeof value !== 'number') {\n    throw new Error(`${errorPrefix} ${name} of type number`)\n  }\n}\n\n/**\n * Asserts that the value is a boolean\n *\n * @param value Value to check\n * @param name Name of the value\n */\nexport function assertIsBoolean(value: unknown, name: string): asserts value is boolean {\n  if (typeof value !== 'boolean') {\n    throw new Error(`${errorPrefix} ${name} of type boolean`)\n  }\n}\n\n/**\n * Asserts that the data is a valid FileInfo\n *\n * @param fileInfo Data to check\n */\nexport function assertFileInfo(fileInfo: FileInfo): asserts fileInfo is FileInfo {\n  assertIsDefined(fileInfo['@type'], 'file @type')\n  assertIsDefined(fileInfo.name, 'file name')\n  assertIsDefined(fileInfo.size, 'file size')\n  assertIsDefined(fileInfo.downloaded_size, 'file downloaded_size')\n  assertIsDefined(fileInfo.priority, 'file priority')\n}\n\n/**\n * Asserts that the data is a valid Torrent\n *\n * @param torrent Data to check\n */\nexport function assertTorrent(torrent: Torrent): asserts torrent is Torrent {\n  assertIsDefined(torrent['@type'], 'torrent @type')\n  assertIsDefined(torrent.hash, 'torrent hash')\n  assertIsNumber(torrent.flags, 'torrent flags')\n  assertIsDefined(torrent.total_size, 'torrent total_size')\n  assertIsDefined(torrent.files_count, 'torrent files_count')\n  assertIsDefined(torrent.included_size, 'torrent included_size')\n  assertIsDefined(torrent.downloaded_size, 'torrent downloaded_size')\n  assertIsDefined(torrent.added_at, 'torrent added_at')\n  assertIsDefined(torrent.root_dir, 'torrent root_dir')\n  assertIsBoolean(torrent.active_download, 'torrent active_download')\n  assertIsBoolean(torrent.active_upload, 'torrent active_upload')\n  assertIsBoolean(torrent.completed, 'torrent completed')\n  assertIsNumber(torrent.download_speed, 'torrent download_speed')\n  assertIsNumber(torrent.upload_speed, 'torrent upload_speed')\n  assertIsDefined(torrent.fatal_error, 'torrent fatal_error')\n}\n\n/**\n * Asserts that the data is a valid DaemonResponse\n *\n * @param data Data to check\n */\nexport function assertDaemonResponse(data: DaemonResponse): asserts data is DaemonResponse {\n  assertIsDefined(data.ok, 'ok')\n  assertIsDefined(data.result, 'result')\n  assertIsNumber(data.code, 'code')\n\n  assertIsDefined(data.result.torrent, 'result.torrent')\n  assertIsDefined(data.result.files, 'result.files')\n\n  data.result.files.forEach(file => {\n    assertFileInfo(file)\n  })\n\n  assertTorrent(data.result.torrent)\n}\n"
  },
  {
    "path": "src/utils.ts",
    "content": "import * as crypto from 'crypto'\nimport * as fs from 'fs'\nimport { promisify } from 'util'\nimport path from 'path'\n\nconst readFile = promisify(fs.read)\n\n/**\n * Length of a public key\n */\nexport const PUBLIC_KEY_LENGTH = 64\n\n/**\n * Reference of a file\n */\nexport const REFERENCE_LENGTH = 64\n\n/**\n * Max length of an article name\n */\nexport const MAX_ARTICLE_NAME_LENGTH = 64\n\n/**\n * Checks if the value is a string\n *\n * @param value Value to check\n */\nexport function isString(value: unknown): boolean {\n  return typeof value === 'string'\n}\n\n/**\n * Asserts that the data is a string\n *\n * @param data Data to check\n */\nexport function assertString(data: unknown): asserts data is string {\n  if (!isString(data)) {\n    throw new Error('Data is not a string')\n  }\n}\n\n/**\n * Asserts that the data length is equal to the specified length\n *\n * @param data Data to check\n * @param length Length to check\n */\nexport function assertStringLength(data: unknown, length: number): asserts data is string {\n  assertString(data)\n\n  if (data.length !== length) {\n    throw new Error(`Data length is not equal to ${length}`)\n  }\n}\n\n/**\n * Asserts that the data is a public key\n *\n * @param data Data to check\n */\nexport function assertAddress(data: unknown): asserts data is string {\n  assertStringLength(data, PUBLIC_KEY_LENGTH)\n  assertHex(data)\n}\n\n/**\n * Checks if the value is a hex string\n *\n * @param value Value to check\n */\nexport function isHexString(value: string): boolean {\n  const hexRegEx = /^[0-9A-Fa-f]*$/\n\n  return hexRegEx.test(value)\n}\n\n/**\n * Asserts that the data is a hex string\n *\n * @param data Data to check\n */\nexport function assertHex(data: unknown): asserts data is string {\n  assertString(data)\n\n  if (!isHexString(data)) {\n    throw new Error('Data is not a hex string')\n  }\n}\n\n/**\n * Asserts that the data is a correct reference\n *\n * @param data Data to check\n */\nexport function assertReference(data: unknown): asserts data is string {\n  assertStringLength(data, REFERENCE_LENGTH)\n  assertHex(data)\n}\n\n/**\n * Gets path parts\n *\n * @param path Path to get parts from\n */\nexport function getPathParts(path: string): string[] {\n  return path.split('/').filter(Boolean)\n}\n\n/**\n * Asserts that the data is a correct article name\n *\n * @param data Data to check\n */\nexport function assertArticleName(data: unknown): asserts data is string {\n  assertString(data)\n\n  const regex = /^[a-z0-9-]+$/i\n\n  if (data.length === 0 || data.length > MAX_ARTICLE_NAME_LENGTH || !regex.test(data)) {\n    throw new Error('Article name is not valid')\n  }\n}\n\n/**\n * Checks if the data is an object\n *\n * @param data Data to check\n */\nexport function isObject(data: unknown): data is Record<string, unknown> {\n  return typeof data === 'object' && !Array.isArray(data) && data !== null\n}\n\n/**\n * Asserts that the data is an object\n *\n * @param data Data to check\n * @param customError Custom error message\n */\nexport function assertObject(data: unknown, customError?: string): asserts data is Record<string, unknown> {\n  if (!isObject(data)) {\n    throw new Error(customError ? customError : 'Data is not an object')\n  }\n}\n\n/**\n * Bytes to string\n *\n * @param data Bytes to convert\n */\nexport function bytesToString(data: Uint8Array): string {\n  const decoder = new TextDecoder()\n\n  return decoder.decode(data)\n}\n\n/**\n * String to bytes\n *\n * @param data String to convert\n */\nexport function stringToBytes(data: string): Uint8Array {\n  const encoder = new TextEncoder()\n\n  return encoder.encode(data)\n}\n\n/**\n * Asserts that the data is a JSON string\n *\n * @param data Data to check\n */\nexport function assertJson(data: unknown): asserts data is string {\n  if (typeof data !== 'string') {\n    throw new Error('JSON assert: data is not a string')\n  }\n\n  try {\n    JSON.parse(data)\n  } catch (e) {\n    throw new Error(`JSON assert: data is not a valid JSON: ${(e as Error).message}`)\n  }\n}\n\n/**\n * Calculates SHA256 of a file\n *\n * @param filePath Path to the file\n */\nexport async function calculateSHA256(filePath: string): Promise<string> {\n  const hash = crypto.createHash('sha256')\n  const fd = fs.openSync(filePath, 'r')\n  const bufferSize = 8192 // 8KB at a time\n  const buffer = Buffer.alloc(bufferSize)\n\n  let bytesRead: number\n\n  do {\n    ;({ bytesRead } = await readFile(fd, buffer, 0, bufferSize, null))\n    hash.update(buffer.slice(0, bytesRead))\n  } while (bytesRead === bufferSize)\n\n  fs.closeSync(fd)\n\n  return hash.digest('hex').toLowerCase()\n}\n\n/**\n * Converts relative path to absolute\n *\n * @param paths Paths to convert\n */\nexport function toAbsolutePath(...paths: string[]): string {\n  return path.resolve(...paths)\n}\n\n/**\n * Delays the execution\n *\n * @param ms Delay in milliseconds\n */\nexport async function delay(ms: number): Promise<void> {\n  return new Promise(resolve => setTimeout(resolve, ms))\n}\n\n/**\n * Extracts a hash from a message\n *\n * @param message Message to extract hash from\n */\nexport function extractHash(message: string): string {\n  const hashRegex = /[A-Fa-f0-9]{64}/\n  const match = message.match(hashRegex)\n\n  if (match) {\n    return match[0]\n  } else {\n    throw new Error('No hash found in the message.')\n  }\n}\n\n/**\n * Converts base64 string to uppercase hex string\n */\nexport function base64ToHex(base64: string): string {\n  return Buffer.from(base64, 'base64').toString('hex').toUpperCase()\n}\n\n/**\n * Converts hex string to base64 string\n */\nexport function hexToBase64(hex: string): string {\n  return Buffer.from(hex, 'hex').toString('base64')\n}\n"
  },
  {
    "path": "startup.sh",
    "content": "#!/bin/sh\n\n# Create .env file\ncat > .env << EOF\n# Path to the root of the files\nFILES_ROOT_PATH=/app\n\n# Port of the application\nPORT=5000\n\nDB_SOCKET_PATH=/run/mysqld/mysqld2.sock\n\n# Database host\nDB_HOST=localhost\n\n# Database port\nDB_PORT=3306\n\n# Database username\nDB_USER=fjuser\n\n# Database password\nDB_PASSWORD=fjpassword\n\n# Database name\nDB_NAME=fair_journal\n\n# External web url for old files\nURL=http://localhost:5000/\n\n# Is show server logs\nSHOW_LOGS=true\n\n# Ton Storage CLI binary path\n# This path should reflect the path inside the Docker container\nTON_STORAGE_BIN_PATH=/app/ton/storage-daemon-cli-linux-arm64\n\n# Ton Storage host\nTON_STORAGE_HOST=localhost:5555\n\n# Ton Storage database path\n# This path should reflect the path inside the Docker container\nTON_STORAGE_DATABASE_PATH=/app/ton/storage-db\n\n# Ton Storage timeout\nTON_STORAGE_TIMEOUT=5000\n\n# Ton Storage wait attempts\nTON_STORAGE_WAIT_ATTEMPTS=3\n\n# Ton Storage check wait timeout\nTON_STORAGE_CHECK_WAIT_TIMEOUT=1000\n\nPUBLISH_FS_PASSWORD=345134t134g145gh145h54\nEOF\n\n/app/ton/storage-daemon-linux-arm64 -v 5 -C /app/ton/global.config.json -I localhost:3333 -p 5555 -D /app/ton/storage-db >/dev/null 2>&1 &\n/usr/bin/mysqld --user=mysql --socket=/run/mysqld/mysqld2.sock &\nsleep 5 &&\nmysql --socket=/run/mysqld/mysqld2.sock -uroot -e \"source ./migrations/db.sql\" &&\nmysql --socket=/run/mysqld/mysqld2.sock -uroot -e \"CREATE USER 'fjuser'@'localhost' IDENTIFIED BY 'fjpassword';\" &&\nmysql --socket=/run/mysqld/mysqld2.sock -uroot -e \"GRANT ALL ON fair_journal.* TO 'fjuser'@'localhost';\" &&\nnpx knex migrate:latest --env docker &&\nnpm run test\n"
  },
  {
    "path": "test/controllers/file-system/app.test.ts",
    "content": "import knex from 'knex'\nimport knexConfig from '../../../knexfile'\nimport { TonstorageCLI } from 'tonstorage-cli'\nimport app, { clearFileSystem, createTonStorageInstance, syncFileSystem } from '../../../src/app'\nimport { assertTree, createWallet, removeAllTonStorageFiles, tonStorageFilesList, uploadBytes } from '../../utils'\nimport pool from '../../../src/db'\nimport {\n  assertDirectories,\n  assertFiles,\n  createAddDirectoryAction,\n  createAddFileAction,\n  createAddUserAction,\n  personalSign,\n  Update,\n} from '@fairjournal/file-system'\nimport { PROJECT_NAME } from '../../../src/controllers/file-system/const'\nimport supertest from 'supertest'\nimport path from 'path'\nimport { stringToBytes } from '../../../src/utils'\nimport { downloadData, getSetting, SettingsKey, upsertSettings } from '../../../src/controllers/file-system/utils'\nimport { initFs } from '../../../src/fs'\nimport fs from 'fs'\n\nprocess.env.SHOW_LOGS = 'false'\n\nconst db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)\ndescribe('App', () => {\n  let tonStorage: TonstorageCLI\n  beforeEach(async () => {\n    // Rollback the migration (if any)\n    await db.migrate.rollback()\n\n    // Run the migration\n    await db.migrate.latest()\n    clearFileSystem()\n    tonStorage = createTonStorageInstance()\n    await removeAllTonStorageFiles(tonStorage)\n    expect(await tonStorageFilesList(tonStorage)).toHaveLength(0)\n  })\n\n  afterEach(async () => {\n    // After each test, we can rollback the migration\n    await db.migrate.rollback()\n  })\n\n  afterAll(async () => {\n    // Close the database connection after all tests are done\n    await db.destroy()\n    await pool.end()\n    await removeAllTonStorageFiles(tonStorage)\n  })\n\n  it('should publish fs', async () => {\n    await syncFileSystem()\n    const supertestApp = supertest(app)\n\n    const files = [\n      {\n        originalName: 'file1.txt',\n        destinationName: 'file1-txt',\n      },\n      {\n        originalName: 'file2.txt',\n        destinationName: 'file2-txt',\n      },\n      {\n        originalName: 'img1.jpg',\n        destinationName: 'img1-jpg',\n      },\n    ]\n\n    const seeds = [\n      '4f3ab03c9b34be0a399e8b165350c705f1c74e1f980be66c7aba92fbe4d07fb8',\n      '235b19b79390d5a821b49fef63e63691c377d645f1d20862b42f6e13f37a1b5e',\n      '9036f25e16e153c6af6031a98e5087c627d86b4da9acbe63b5cfad096a218739',\n    ]\n    const users = await Promise.all(\n      Array.from({ length: 3 }, async (_, index) => {\n        const wallet = await createWallet(seeds[index])\n\n        return {\n          address: wallet.publicKey.toString('hex'),\n          personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n        }\n      }),\n    )\n\n    for (const user of users) {\n      const update = new Update(PROJECT_NAME, user.address, 1)\n      update.addAction(createAddUserAction(user.address))\n\n      for (let i = 0; i < 3; i++) {\n        const dir = `dir${i}`\n        update.addAction(createAddDirectoryAction(`/${dir}`))\n\n        for (const file of files) {\n          const filePath = path.join(__dirname, `../../data/${file.originalName}`)\n          const content = fs.readFileSync(filePath)\n          const hash = await uploadBytes(tonStorage, stringToBytes(content.toString()))\n\n          update.addAction(\n            createAddFileAction({\n              path: `/${dir}/${file.destinationName}`,\n              mimeType: 'text/plain',\n              size: content.length,\n              hash,\n            }),\n          )\n        }\n      }\n\n      update.setSignature(user.personalSign(update.getSignData()))\n      const response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n      expect(response.status).toBe(200)\n      expect(response.body).toStrictEqual({ status: 'ok' })\n    }\n\n    const response0 = await supertestApp.post('/v1/fs/app/publish').send({ password: 'any-password' })\n    expect(response0.status).toBe(500)\n    expect(response0.body).toStrictEqual({ status: 'error', message: 'Invalid password' })\n\n    const resultReference = '0371cb0e4f839c0e06fccbc5001b593fa9b25c3c23fac2cd7c4979d2efc64f7a'\n    await expect(getSetting(pool, SettingsKey.FS_STATE_REFERENCE)).rejects.toThrow(\n      `No setting found for key: ${SettingsKey.FS_STATE_REFERENCE}`,\n    )\n    const response1 = await supertestApp.post('/v1/fs/app/publish').send({ password: process.env.PUBLISH_FS_PASSWORD })\n    expect(response1.status).toBe(200)\n    expect(response1.body).toStrictEqual({ status: 'ok', reference: resultReference })\n    expect(await getSetting(pool, SettingsKey.FS_STATE_REFERENCE)).toBe(resultReference)\n\n    const mfs = initFs()\n    await mfs.download(resultReference, {\n      downloadData: async item => downloadData(item.reference),\n      withUpdates: true,\n    })\n\n    const exported = mfs.exportMeta()\n    expect(exported.users).toHaveLength(3)\n    assertTree(exported.tree)\n\n    const rootDirectories = exported.tree.directory.directories\n    assertDirectories(rootDirectories)\n    expect(rootDirectories).toHaveLength(3)\n\n    for (const rootDirectory of rootDirectories) {\n      const subDirectories = rootDirectory.directories\n      expect(subDirectories).toHaveLength(3)\n      assertDirectories(subDirectories)\n\n      for (const subDirectory of subDirectories) {\n        const filesInSubDirectory = subDirectory.files\n        expect(filesInSubDirectory).toHaveLength(3)\n        assertFiles(filesInSubDirectory)\n      }\n    }\n  })\n\n  it('set and get settings', async () => {\n    const supertestApp = supertest(app)\n    let data0 = await supertestApp.get(`/v1/fs/app/get-settings`)\n    expect(data0.status).toBe(500)\n    expect(data0.body).toStrictEqual({\n      status: 'error',\n      message: `\"key\" is not set`,\n    })\n\n    data0 = await supertestApp.get(`/v1/fs/app/get-settings?key=${SettingsKey.FS_STATE_REFERENCE}`)\n    expect(data0.status).toBe(500)\n    expect(data0.body).toStrictEqual({\n      status: 'error',\n      message: `No setting found for key: ${SettingsKey.FS_STATE_REFERENCE}`,\n    })\n\n    const checkValue = 'Hello-world'\n    await upsertSettings(pool, SettingsKey.FS_STATE_REFERENCE, checkValue)\n    data0 = await supertestApp.get(`/v1/fs/app/get-settings?key=${SettingsKey.FS_STATE_REFERENCE}`)\n    expect(data0.status).toBe(200)\n    expect(data0.body).toStrictEqual({\n      status: 'ok',\n      value: checkValue,\n    })\n  })\n})\n"
  },
  {
    "path": "test/controllers/file-system/article.test.ts",
    "content": "// todo change managing of the file system to configure and call when needed\nprocess.env.SHOW_LOGS = 'false'\nimport { Article, ArticleResponse, ArticlesResponse } from '../../../src/controllers/file-system/blob/utils'\nimport knex from 'knex'\nimport knexConfig from '../../../knexfile'\nimport pool from '../../../src/db'\nimport supertest from 'supertest'\nimport app, { clearFileSystem, createTonStorageInstance, syncFileSystem } from '../../../src/app'\nimport {\n  createAddDirectoryAction,\n  createAddFileAction,\n  createAddUserAction,\n  createRemoveDirectoryAction,\n  personalSign,\n  Update,\n} from '@fairjournal/file-system'\nimport {\n  createWallet,\n  generateArticle,\n  getUpdatesCount,\n  removeAllTonStorageFiles,\n  tonStorageFilesList,\n  uploadBytes,\n} from '../../utils'\nimport { PROJECT_NAME } from '../../../src/controllers/file-system/const'\nimport { stringToBytes } from '../../../src/utils'\nimport { GetUpdateIdResponse } from '../../../src/controllers/file-system/user/get-update-id-action'\nimport { TonstorageCLI } from 'tonstorage-cli'\n\nconst db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)\n\ndescribe('Article', () => {\n  let tonStorage: TonstorageCLI\n  beforeEach(async () => {\n    // Rollback the migration (if any)\n    await db.migrate.rollback()\n\n    // Run the migration\n    await db.migrate.latest()\n    clearFileSystem()\n    tonStorage = createTonStorageInstance()\n    await removeAllTonStorageFiles(tonStorage)\n    expect(await tonStorageFilesList(tonStorage)).toHaveLength(0)\n  })\n\n  afterEach(async () => {\n    // After each test, we can rollback the migration\n    await db.migrate.rollback()\n  })\n\n  afterAll(async () => {\n    // Close the database connection after all tests are done\n    await db.destroy()\n    await pool.end()\n    await removeAllTonStorageFiles(tonStorage)\n  })\n\n  it('create and get articles', async () => {\n    await syncFileSystem()\n    const supertestApp = supertest(app)\n\n    const authors = await Promise.all(\n      Array.from({ length: 3 }, async () => {\n        const wallet = await createWallet()\n\n        return {\n          address: wallet.publicKey.toString('hex'),\n          personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n          articles: [generateArticle(), generateArticle(), generateArticle()] as Article[],\n        }\n      }),\n    )\n\n    for (const author of authors) {\n      const update = new Update(PROJECT_NAME, author.address, 1)\n      update.addAction(createAddUserAction(author.address))\n      update.addAction(createAddDirectoryAction('/articles'))\n      update.setSignature(author.personalSign(update.getSignData()))\n      const response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n      expect(response.status).toBe(200)\n      expect(response.body).toStrictEqual({ status: 'ok' })\n\n      for (let articleIndex = 0; articleIndex < author.articles.length; articleIndex++) {\n        const article = author.articles[articleIndex]\n        const articleData = JSON.stringify(article)\n        const hash = await uploadBytes(tonStorage, stringToBytes(articleData))\n\n        const updatesInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))\n          .body as GetUpdateIdResponse\n        const update = new Update(PROJECT_NAME, author.address, updatesInfo.updateId + 1)\n        update.addAction(createAddDirectoryAction(`/articles/${article.slug}`))\n        update.addAction(\n          createAddFileAction({\n            path: `/articles/${article.slug}/index-json`,\n            mimeType: 'application/json',\n            size: articleData.length,\n            hash,\n          }),\n        )\n        update.setSignature(author.personalSign(update.getSignData()))\n        const response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n        expect(response.status).toBe(200)\n        expect(response.body).toStrictEqual({ status: 'ok' })\n      }\n    }\n\n    // 3*1 - registrations = 3, 3*3 - articles = 9, total 12\n    expect(await getUpdatesCount(db)).toEqual(12)\n\n    for (const author of authors) {\n      const articlesList = (await supertestApp.get(`/v1/fs/blob/get-articles?userAddress=${author.address}`))\n        .body as ArticlesResponse\n      expect(articlesList.status).toBe('ok')\n      expect(articlesList.userAddress).toBe(author.address)\n      expect(articlesList.articles.length).toBe(author.articles.length)\n      for (let articleIndex = 0; articleIndex < author.articles.length; articleIndex++) {\n        const article = author.articles[articleIndex]\n\n        // check short version of the article\n        const articleInfo = articlesList.articles[articleIndex]\n        expect(articleInfo.slug).toBe(article.slug)\n        expect(articleInfo.shortText).toBeDefined()\n        expect(articleInfo.previewData).toBeDefined()\n\n        // check full version of the article\n        const fsArticle = (\n          await supertestApp.get(`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${article.slug}`)\n        ).body as ArticleResponse\n        expect(fsArticle.status).toBe('ok')\n        expect(fsArticle.userAddress).toBe(author.address)\n        expect(fsArticle.article.slug).toStrictEqual(article.slug)\n        expect(fsArticle.article.data).toBeDefined()\n        expect(fsArticle.article.preview).toBeDefined()\n      }\n    }\n  })\n\n  it('get non-existing article for an existing user', async () => {\n    const supertestApp = supertest(app)\n    const wallet = await createWallet()\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n    }\n\n    const update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.setSignature(author.personalSign(update.getSignData()))\n    await supertestApp.post('/v1/fs/update/apply').send({ update })\n    const nonExistentSlug = 'non-existent-article'\n    const response = await supertestApp.get(\n      `/v1/fs/blob/get-article?userAddress=${author.address}&slug=${nonExistentSlug}`,\n    )\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      message: `Article not found: \"${nonExistentSlug}\". Get item: item not found: \"articles\"`,\n      status: 'error',\n    })\n  })\n\n  it('get article from non-existing user', async () => {\n    const supertestApp = supertest(app)\n    const nonExistentUserAddress = '0'.repeat(64)\n    const response = await supertestApp.get(\n      `/v1/fs/blob/get-article?userAddress=${nonExistentUserAddress}&slug=some-article`,\n    )\n\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      message: `User not found: \"${nonExistentUserAddress}\"`,\n      status: 'error',\n    })\n  })\n\n  it('get articles from non-existing user', async () => {\n    const supertestApp = supertest(app)\n    const nonExistentUserAddress = '0'.repeat(64)\n    const response = await supertestApp.get(`/v1/fs/blob/get-articles?userAddress=${nonExistentUserAddress}`)\n\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      message: `User not found: \"${nonExistentUserAddress}\"`,\n      status: 'error',\n    })\n  })\n\n  it('get non-existing articles for an existing user', async () => {\n    const supertestApp = supertest(app)\n    const wallet = await createWallet()\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n    }\n\n    const update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.setSignature(author.personalSign(update.getSignData()))\n    await supertestApp.post('/v1/fs/update/apply').send({ update })\n\n    const response = await supertestApp.get(`/v1/fs/blob/get-articles?userAddress=${author.address}`)\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      message: `Articles not found. Get item: item not found: \"articles\"`,\n      status: 'error',\n    })\n  })\n\n  it('add incorrect article with correct index-json for an existing user', async () => {\n    await syncFileSystem()\n    const supertestApp = supertest(app)\n    const wallet = await createWallet()\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n    }\n\n    // Add user first\n    let update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.addAction(createAddDirectoryAction('/articles'))\n    update.setSignature(author.personalSign(update.getSignData()))\n    await supertestApp.post('/v1/fs/update/apply').send({ update })\n\n    const articleData = 'This is some random short text instead of an actual article.'\n    const hash = await uploadBytes(tonStorage, stringToBytes(articleData))\n\n    const articleSlug = 'random-article'\n    const updatesInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))\n      .body as GetUpdateIdResponse\n    update = new Update(PROJECT_NAME, author.address, updatesInfo.updateId + 1)\n    update.addAction(createAddDirectoryAction(`/articles/${articleSlug}`))\n    update.addAction(\n      createAddFileAction({\n        path: `/articles/${articleSlug}/index-json`,\n        mimeType: 'application/json',\n        size: articleData.length,\n        hash,\n      }),\n    )\n    update.setSignature(author.personalSign(update.getSignData()))\n    const response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n\n    expect(response.status).toBe(200)\n    expect(response.body).toStrictEqual({ status: 'ok' })\n\n    const fsArticle = (\n      await supertestApp.get(`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${articleSlug}`)\n    ).body as ArticleResponse\n    expect(fsArticle.status).toBe('error')\n    // not strict comparison because of different error messages on different platforms (macos/linux arm64)\n    expect(fsArticle.message).toContain(\n      `Article not found: \"${articleSlug}\". Error: JSON assert: data is not a valid JSON`,\n    )\n  })\n\n  it('should add and remove an article, checking its availability by slug', async () => {\n    await syncFileSystem()\n    const supertestApp = supertest(app)\n\n    // create a new user and a new article\n    const wallet = await createWallet()\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n      article: generateArticle() as Article,\n    }\n\n    // register the user and create the article directory\n    let update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.addAction(createAddDirectoryAction('/articles'))\n    update.setSignature(author.personalSign(update.getSignData()))\n    let response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(response.status).toBe(200)\n    expect(response.body).toStrictEqual({ status: 'ok' })\n\n    // add the new article\n    const articleData = JSON.stringify(author.article)\n    const hash = await uploadBytes(tonStorage, stringToBytes(articleData))\n    const updatesInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))\n      .body as GetUpdateIdResponse\n    update = new Update(PROJECT_NAME, author.address, updatesInfo.updateId + 1)\n    update.addAction(createAddDirectoryAction(`/articles/${author.article.slug}`))\n    update.addAction(\n      createAddFileAction({\n        path: `/articles/${author.article.slug}/index-json`,\n        mimeType: 'application/json',\n        size: articleData.length,\n        hash,\n      }),\n    )\n    update.setSignature(author.personalSign(update.getSignData()))\n    response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(response.status).toBe(200)\n    expect(response.body).toStrictEqual({ status: 'ok' })\n\n    // check the article is available by slug\n    const fsArticle = (\n      await supertestApp.get(`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${author.article.slug}`)\n    ).body as ArticleResponse\n    expect(fsArticle.status).toBe('ok')\n    expect(fsArticle.userAddress).toBe(author.address)\n    expect(fsArticle.article.slug).toBe(author.article.slug)\n    expect(fsArticle.article.data).toBeDefined()\n\n    // remove the article by deleting its slug folder\n    const deleteInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))\n      .body as GetUpdateIdResponse\n    update = new Update(PROJECT_NAME, author.address, deleteInfo.updateId + 1)\n    update.addAction(createRemoveDirectoryAction(`/articles/${author.article.slug}`))\n    update.setSignature(author.personalSign(update.getSignData()))\n    response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(response.status).toBe(200)\n    expect(response.body).toStrictEqual({ status: 'ok' })\n\n    // check the article is no longer available by slug\n    const removedArticleResponse = await supertestApp.get(\n      `/v1/fs/blob/get-article?userAddress=${author.address}&slug=${author.article.slug}`,\n    )\n    expect(removedArticleResponse.status).toBe(500)\n    expect(removedArticleResponse.body.status).toBe('error')\n  })\n})\n"
  },
  {
    "path": "test/controllers/file-system/blob.test.ts",
    "content": "// todo change managing of the file system to configure and call when needed\nprocess.env.SHOW_LOGS = 'false'\nimport tmp from 'tmp'\nimport path from 'path'\nimport knex from 'knex'\nimport knexConfig from '../../../knexfile'\nimport pool from '../../../src/db'\nimport supertest from 'supertest'\nimport app, { clearFileSystem, createTonStorageInstance, syncFileSystem } from '../../../src/app'\nimport {\n  createAddFileAction,\n  createAddUserAction,\n  createRemoveFileAction,\n  personalSign,\n  Update,\n} from '@fairjournal/file-system'\nimport { createWallet, removeAllTonStorageFiles, tonStorageFilesList } from '../../utils'\nimport { MAX_BLOB_SIZE, PROJECT_NAME } from '../../../src/controllers/file-system/const'\nimport { TonstorageCLI } from 'tonstorage-cli'\nimport fs from 'fs'\n\nconst db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)\n\ndescribe('blob', () => {\n  let tonStorage: TonstorageCLI\n  beforeEach(async () => {\n    // Rollback the migration (if any)\n    await db.migrate.rollback()\n\n    // Run the migration\n    await db.migrate.latest()\n    clearFileSystem()\n    tonStorage = createTonStorageInstance()\n    await removeAllTonStorageFiles(tonStorage)\n    expect(await tonStorageFilesList(tonStorage)).toHaveLength(0)\n  })\n\n  afterEach(async () => {\n    // After each test, we can rollback the migration\n    await db.migrate.rollback()\n  })\n\n  afterAll(async () => {\n    // Close the database connection after all tests are done\n    await db.destroy()\n    await pool.end()\n    await removeAllTonStorageFiles(tonStorage)\n  })\n\n  it('upload and download blob', async () => {\n    const supertestApp = supertest(app)\n\n    await syncFileSystem()\n    const files = [\n      {\n        name: 'file1.txt',\n        mime_type: 'text/plain',\n        size: 12,\n        sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',\n        reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',\n      },\n      {\n        name: 'img1.jpg',\n        mime_type: 'image/jpeg',\n        size: 2022171,\n        sha256: '6b0f972d83497327eb8adc8a9a58177d99140322570b86773969f6e5febec698',\n        reference: 'f67a56fe1f9198e1e5024eed4cc82f24137aaffb373351139c1e066a4e5d58fc',\n      },\n    ]\n\n    for (const [index, file] of files.entries()) {\n      const filePath = path.join(__dirname, `../../data/${file.name}`)\n      for (let i = 0; i < 10; i++) {\n        const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)\n        expect(response.status).toBe(200)\n        const data = response.body.data\n        expect(data.reference).toBe(file.reference)\n        expect(data.mime_type).toBe(file.mime_type)\n        expect(data.sha256).toBe(file.sha256)\n        expect(data.size).toBe(file.size)\n      }\n\n      expect(await tonStorageFilesList(tonStorage)).toHaveLength(index + 1)\n    }\n  })\n\n  it('add update with reference that do not exists', async () => {\n    const supertestApp = supertest(app)\n\n    const wallet = await createWallet()\n\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n    }\n\n    const nonExistentReference = '0'.repeat(64)\n\n    const update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.addAction(\n      createAddFileAction({\n        path: '/index-json',\n        mimeType: 'application/json',\n        size: 100,\n        hash: nonExistentReference,\n      }),\n    )\n    update.setSignature(author.personalSign(update.getSignData()))\n\n    const response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      message: `Reference \"${nonExistentReference}\" not found`,\n      status: 'error',\n    })\n  })\n\n  it('duplicate file upload', async () => {\n    const supertestApp = supertest(app)\n\n    // Sync file system before uploading\n    await syncFileSystem()\n\n    const file = {\n      name: 'file1.txt',\n      mime_type: 'text/plain',\n      size: 12,\n      sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',\n      reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',\n    }\n\n    const filePath = path.join(__dirname, `../../data/${file.name}`)\n\n    // First upload\n    let response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)\n    expect(response.status).toBe(200)\n    let data = response.body.data\n    expect(data.reference).toBe(file.reference)\n    expect(data.mime_type).toBe(file.mime_type)\n    expect(data.sha256).toBe(file.sha256)\n    expect(data.size).toBe(file.size)\n\n    // Second upload of the same file\n    response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)\n    expect(response.status).toBe(200) // Or some error status if your application doesn't allow duplicate uploads\n    data = response.body.data\n\n    // Check if it is the same file or a different one based on your application logic\n    expect(data.reference).toBe(file.reference)\n    expect(data.mime_type).toBe(file.mime_type)\n    expect(data.sha256).toBe(file.sha256)\n    expect(data.size).toBe(file.size)\n\n    // Check that the count of files in tonStorage is still 1\n    expect(await tonStorageFilesList(tonStorage)).toHaveLength(1)\n  })\n\n  it('upload a file larger than the max size limit', async () => {\n    const supertestApp = supertest(app)\n    const tempFile = tmp.fileSync()\n\n    try {\n      fs.writeSync(tempFile.fd, Buffer.alloc(MAX_BLOB_SIZE + 1))\n      const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', tempFile.name)\n      expect(response.status).toBe(500)\n      expect(response.body).toStrictEqual({\n        message: 'File too large',\n        status: 'error',\n      })\n    } finally {\n      // Clean up the temp file regardless of the test result\n      tempFile.removeCallback()\n    }\n  })\n\n  it('update fs file', async () => {\n    const supertestApp = supertest(app)\n    const wallet = await createWallet()\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n    }\n\n    await syncFileSystem()\n\n    const file1 = {\n      name: 'file1.txt',\n      mime_type: 'text/plain',\n      size: 12,\n      sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',\n      reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',\n    }\n\n    const file2 = {\n      name: 'file2.txt',\n      mime_type: 'text/plain',\n      size: 258,\n      sha256: '5438a317bde30599b535f86cd3ed0a69d88ab4d17ee935199bb3a07a4189fbd4',\n      reference: '366f6ec29a530266595d9dc11415bd7fb3312d816308774db445f872153b2d97',\n    }\n    const remoteFileName = 'profile-data'\n    const remoteFilePath = `/${remoteFileName}`\n    const filePath1 = path.join(__dirname, `../../data/${file1.name}`)\n    const filePath2 = path.join(__dirname, `../../data/${file2.name}`)\n\n    // First upload\n    const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath1)\n    expect(response.status).toBe(200)\n    const data = response.body.data\n    expect(data.reference).toBe(file1.reference)\n    expect(data.mime_type).toBe(file1.mime_type)\n    expect(data.sha256).toBe(file1.sha256)\n    expect(data.size).toBe(file1.size)\n\n    const update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.addAction(\n      createAddFileAction({\n        path: remoteFilePath,\n        mimeType: file1.mime_type,\n        size: file1.size,\n        hash: file1.reference,\n      }),\n    )\n    update.setSignature(author.personalSign(update.getSignData()))\n\n    const apply1 = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(apply1.status).toBe(200)\n    expect(apply1.body).toStrictEqual({\n      status: 'ok',\n    })\n\n    // Check get-path-info method\n    const pathInfoResponse1 = await supertestApp.get(\n      `/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${remoteFilePath}`,\n    )\n    expect(pathInfoResponse1.status).toBe(200)\n    expect(pathInfoResponse1.body).toStrictEqual({\n      status: 'ok',\n      userAddress: author.address,\n      path: remoteFilePath,\n      data: {\n        name: remoteFileName,\n        mimeType: file1.mime_type,\n        size: file1.size,\n        hash: file1.reference,\n        updateId: 1,\n      },\n    })\n\n    // Second upload\n    const response2 = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath2)\n    expect(response2.status).toBe(200)\n    const data2 = response2.body.data\n    expect(data2.reference).toBe(file2.reference)\n    expect(data2.mime_type).toBe(file2.mime_type)\n    expect(data2.sha256).toBe(file2.sha256)\n    expect(data2.size).toBe(file2.size)\n\n    update.setId(2)\n    update.setActions([])\n    update.addAction(createRemoveFileAction('/profile-data'))\n    update.addAction(\n      createAddFileAction({\n        path: remoteFilePath,\n        mimeType: file2.mime_type,\n        size: file2.size,\n        hash: file2.reference,\n      }),\n    )\n    update.setSignature(author.personalSign(update.getSignData()))\n\n    const apply2 = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(apply2.status).toBe(200)\n    expect(apply2.body).toStrictEqual({\n      status: 'ok',\n    })\n\n    // Check get-path-info method\n    const pathInfoResponse2 = await supertestApp.get(\n      `/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${remoteFilePath}`,\n    )\n    expect(pathInfoResponse2.status).toBe(200)\n    expect(pathInfoResponse2.body).toStrictEqual({\n      status: 'ok',\n      userAddress: author.address,\n      path: remoteFilePath,\n      data: {\n        name: remoteFileName,\n        mimeType: file2.mime_type,\n        size: file2.size,\n        hash: file2.reference,\n        updateId: 2,\n      },\n    })\n\n    expect(await tonStorageFilesList(tonStorage)).toHaveLength(2)\n  })\n\n  it('get path info for incorrect path', async () => {\n    const supertestApp = supertest(app)\n    const wallet = await createWallet()\n\n    await syncFileSystem()\n\n    const author = {\n      address: wallet.publicKey.toString('hex'),\n      personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n    }\n\n    const file = {\n      name: 'file1.txt',\n      mime_type: 'text/plain',\n      size: 12,\n      sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',\n      reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',\n    }\n\n    const filePath = path.join(__dirname, `../../data/${file.name}`)\n    const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)\n    expect(response.status).toBe(200)\n\n    const remoteFileName = 'file-test'\n    const remoteFilePath = `/${remoteFileName}`\n\n    const update = new Update(PROJECT_NAME, author.address, 1)\n    update.addAction(createAddUserAction(author.address))\n    update.addAction(\n      createAddFileAction({\n        path: remoteFilePath,\n        mimeType: file.mime_type,\n        size: file.size,\n        hash: file.reference,\n      }),\n    )\n    update.setSignature(author.personalSign(update.getSignData()))\n\n    const applyUpdateResponse = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(applyUpdateResponse.status).toBe(200)\n\n    // Try to get the file without / symbol\n    const pathInfoResponse1 = await supertestApp.get(\n      `/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${remoteFileName}`,\n    )\n    expect(pathInfoResponse1.status).toBe(500)\n    expect(pathInfoResponse1.body).toStrictEqual({\n      status: 'error',\n      message: `Can't get info about the path: Get item: item not found: \"${author.address}${remoteFileName}\"`,\n    })\n\n    // Try to get another file with a full path but one symbol more\n    const fakePath = `${remoteFilePath}1`\n    const fakeName = `${remoteFileName}1`\n    const pathInfoResponse2 = await supertestApp.get(\n      `/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${fakePath}`,\n    )\n    expect(pathInfoResponse2.status).toBe(500)\n    expect(pathInfoResponse2.body).toStrictEqual({\n      status: 'error',\n      message: `Can't get info about the path: Get item: item not found: \"${fakeName}\"`,\n    })\n  })\n})\n"
  },
  {
    "path": "test/controllers/file-system/file-system.test.ts",
    "content": "// todo change managing of the file system to configure and call when needed\nprocess.env.SHOW_LOGS = 'false'\nimport knex from 'knex'\nimport knexConfig from '../../../knexfile'\nimport pool from '../../../src/db'\nimport supertest from 'supertest'\nimport app, { clearFileSystem, fileSystem, syncFileSystem } from '../../../src/app'\nimport { createAddUserAction, Update, personalSign } from '@fairjournal/file-system'\nimport { PROJECT_NAME } from '../../../src/controllers/file-system/const'\nimport { createWallet, getUpdatesCount } from '../../utils'\n\nconst db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)\n\ndescribe('file-system', () => {\n  beforeEach(async () => {\n    // Rollback the migration (if any)\n    await db.migrate.rollback()\n\n    // Run the migration\n    await db.migrate.latest()\n    clearFileSystem()\n  })\n\n  afterEach(async () => {\n    // After each test, we can rollback the migration\n    await db.migrate.rollback()\n  })\n\n  afterAll(async () => {\n    // Close the database connection after all tests are done\n    await db.destroy()\n    pool.end()\n  })\n\n  it('update/apply - empty data', async () => {\n    const supertestApp = supertest(app)\n    const response = await supertestApp.post('/v1/fs/update/apply').send()\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      status: 'error',\n      message: 'Data is not an object',\n    })\n  })\n\n  it('update/apply - empty object', async () => {\n    const supertestApp = supertest(app)\n    const response = await supertestApp.post('/v1/fs/update/apply').send({ update: {} })\n    expect(response.status).toBe(500)\n    expect(response.body).toStrictEqual({\n      status: 'error',\n      message: 'UpdateDataSigned: signature is not defined',\n    })\n  })\n\n  it('update/apply - register, clear fs and recover from db', async () => {\n    const supertestApp = supertest(app)\n\n    const authors = await Promise.all(\n      Array.from({ length: 3 }, async () => {\n        const wallet = await createWallet()\n\n        return {\n          address: wallet.publicKey.toString('hex'),\n          personalSign: (data: string): string => personalSign(data, wallet.secretKey),\n        }\n      }),\n    )\n\n    const responseUserCheck0 = await supertestApp.get(`/v1/fs/user/info?address=${authors[0].address}`)\n    expect(responseUserCheck0.status).toBe(200)\n    expect(responseUserCheck0.body).toStrictEqual({\n      address: authors[0].address,\n      isUserExists: false,\n      status: 'ok',\n    })\n\n    const update = new Update(PROJECT_NAME, authors[0].address, 1)\n    update.addAction(createAddUserAction(authors[0].address))\n    update.setSignature(authors[0].personalSign(update.getSignData()))\n    expect(await getUpdatesCount(db)).toEqual(0)\n    expect(fileSystem.getUpdateId(authors[0].address)).toEqual(0)\n    const response = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(response.status).toBe(200)\n    expect(response.body).toStrictEqual({ status: 'ok' })\n    expect(await getUpdatesCount(db)).toEqual(1)\n    expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)\n\n    const responseUserCheck1 = await supertestApp.get(`/v1/fs/user/info?address=${authors[0].address}`)\n    expect(responseUserCheck1.status).toBe(200)\n    expect(responseUserCheck1.body).toStrictEqual({\n      address: authors[0].address,\n      isUserExists: true,\n      status: 'ok',\n    })\n\n    const response1 = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(response1.status).toBe(500)\n    expect(response1.body).toStrictEqual({ status: 'error', message: 'Update with id \"1\" already exists' })\n\n    expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)\n    update.setId(2)\n    update.setSignature(authors[0].personalSign(update.getSignData()))\n    const response2 = await supertestApp.post('/v1/fs/update/apply').send({ update })\n    expect(response2.status).toBe(500)\n    expect(response2.body).toStrictEqual({\n      status: 'error',\n      message: `User with address \"${authors[0].address}\" already exists`,\n    })\n\n    expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)\n    clearFileSystem()\n    expect(fileSystem.getUpdateId(authors[0].address)).toEqual(0)\n\n    // recover filesystem from the db\n    await syncFileSystem()\n    expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)\n  })\n\n  it('user/info - user do not exists', async () => {\n    const supertestApp = supertest(app)\n    const address = 'd66401889725ada1f6ba8e78f67d24aec386341d8e3310f00ef64df463def1ef'\n    const response = await supertestApp.get(`/v1/fs/user/info?address=${address}`)\n    expect(response.status).toBe(200)\n    expect(response.body).toStrictEqual({\n      address: address,\n      isUserExists: false,\n      status: 'ok',\n    })\n  })\n\n  // todo cover case of recovering filesystem using real ton. using DB and without db\n})\n"
  },
  {
    "path": "test/data/file1.txt",
    "content": "Hello world!"
  },
  {
    "path": "test/data/file2.txt",
    "content": "Privacy is a fundamental human right.\nYour devices are important to so many parts of your life.\nWhat you share from those experiences, and who you share it with, should be up to you.\nIt’s not always easy.\nBut that’s the kind of innovation we believe in.\n"
  },
  {
    "path": "test/utils.ts",
    "content": "import { getSecureRandomBytes, KeyPair, keyPairFromSeed } from 'ton-crypto'\nimport { Knex } from 'knex'\nimport { Article } from '../src/controllers/file-system/blob/utils'\nimport { TonstorageCLI } from 'tonstorage-cli'\nimport { Torrent } from '../src/ton-utils'\nimport { base64ToHex, extractHash } from '../src/utils'\nimport * as fs from 'fs'\nimport * as os from 'os'\nimport * as path from 'path'\nimport * as crypto from 'crypto'\nimport { Tree } from '@fairjournal/file-system'\n\n/**\n * Fake storage\n */\nexport interface FakeStorage {\n  /**\n   * Uploads data to the storage and returns its reference\n   *\n   * @param data Data to upload\n   */\n  upload: (data: Uint8Array) => Promise<string>\n\n  /**\n   * Downloads data from the storage by its reference\n   *\n   * @param reference Reference to download\n   */\n  download: (reference: string) => Promise<Uint8Array>\n}\n\nexport const UPDATES_TABLE_NAME = 'fs_update'\n\n/**\n * According: https://github.com/ton-foundation/specs/blob/main/specs/wtf-0002.md\n */\nexport const TON_SAFE_SIGN_MAGIC = 'ton-safe-sign-magic'\n\n/**\n * Creates TON wallet with public and secret keys\n */\nexport async function createWallet(userSeed?: string): Promise<KeyPair> {\n  const seed: Buffer = userSeed ? Buffer.from(userSeed, 'hex') : await getSecureRandomBytes(32) // seed is always 32 bytes\n\n  return keyPairFromSeed(seed)\n}\n\n/**\n * Gets the number of records in the table\n *\n * @param db Database\n * @param tableName Table name\n */\nexport async function getRecordCount(db: Knex, tableName: string): Promise<number> {\n  const result = await db(tableName).count('* as count')\n\n  return Number(result[0].count)\n}\n\n/**\n * Gets the number of records in the updates table\n *\n * @param db Database\n */\nexport async function getUpdatesCount(db: Knex): Promise<number> {\n  return getRecordCount(db, UPDATES_TABLE_NAME)\n}\n\n/**\n * Generates a random number\n *\n * @param max Max value\n */\nexport function randomNumber(max = 1000): number {\n  return Math.floor(Math.random() * max)\n}\n\n/**\n * Generates a random article\n */\nexport function generateArticle(): Article {\n  const articleId = randomNumber()\n\n  return {\n    slug: `article-${articleId}`,\n    data: {\n      blocks: [\n        {\n          type: 'title',\n          text: `Article ${articleId}`,\n        },\n        {\n          type: 'paragraph',\n          text: 'Hello world! Paragraph 1.',\n        },\n        {\n          type: 'paragraph',\n          text: 'Hello world 2222 Paragraph 2',\n        },\n        {\n          type: 'paragraph',\n          text: 'Hello world 33333 Paragraph 3',\n        },\n      ],\n    },\n    preview: {\n      img: 'https://test.domain/test.jpg',\n    },\n  }\n}\n\n/**\n * Pads the string with zeros to the desired length\n *\n * @param input Input string\n * @param resultSize Desired length\n */\nexport function padStringWithZeros(input: string, resultSize = 64): string {\n  // 'padStart' adds zeros to the start of the string until it reaches the desired length\n  return input.padStart(resultSize, '0')\n}\n\n/**\n * Gets fake storage instance\n */\nexport function getFakeStorage(): FakeStorage {\n  let index = 0\n  const storage: Record<string, Uint8Array> = {}\n\n  return {\n    upload: async (data: Uint8Array): Promise<string> => {\n      index++\n\n      const reference = padStringWithZeros(index.toString())\n      storage[reference] = data\n\n      return reference\n    },\n    download: async (reference: string): Promise<Uint8Array> => {\n      if (!storage[reference]) {\n        throw new Error(`Reference \"${reference}\" not found`)\n      }\n\n      return storage[reference]\n    },\n  }\n}\n\n/**\n * Gets list of torrents from ton-storage\n *\n * @param tonStorage Ton-storage instance\n */\nexport async function tonStorageFilesList(tonStorage: TonstorageCLI): Promise<Torrent[]> {\n  const list = await tonStorage.list()\n\n  if (!list?.ok) {\n    throw new Error(`Failed to get list of torrents from ton-storage: ${JSON.stringify(list)}`)\n  }\n\n  return (list?.result?.torrents || []) as Torrent[]\n}\n\n/**\n * Removes all files from ton-storage\n *\n * @param tonStorage Ton-storage instance\n */\nexport async function removeAllTonStorageFiles(tonStorage: TonstorageCLI): Promise<void> {\n  const torrents = await tonStorageFilesList(tonStorage)\n  const itemsList = torrents || []\n  for (const item of itemsList) {\n    await tonStorage.remove(base64ToHex(item.hash))\n  }\n}\n\n/**\n * Writes data to a temporary file and returns its path\n *\n * @param data Data to write\n * @param name File name\n */\nexport async function writeTempFile(data: Uint8Array, name = 'blob'): Promise<string> {\n  const dirName = crypto.randomBytes(16).toString('hex')\n  const tempDir = os.tmpdir()\n  const dirPath = path.join(tempDir, dirName)\n  fs.mkdirSync(dirPath)\n  const filePath = path.join(dirPath, name)\n  fs.writeFileSync(filePath, data)\n\n  return filePath\n}\n\n/**\n * Uploads bytes to ton-storage\n *\n * @param tonStorage Ton-storage instance\n * @param bytes Bytes to upload\n */\nexport async function uploadBytes(tonStorage: TonstorageCLI, bytes: Uint8Array): Promise<string> {\n  const filePath = await writeTempFile(bytes)\n  let response\n  try {\n    response = await tonStorage.create(filePath, {\n      copy: true,\n      desc: '',\n      upload: false,\n    })\n  } finally {\n    if (fs.existsSync(filePath)) {\n      fs.unlinkSync(filePath)\n    }\n  }\n\n  let reference\n\n  if (response?.ok) {\n    reference = base64ToHex(response.result.torrent.hash)\n  } else if (response?.error?.includes('duplicate hash')) {\n    reference = extractHash(response?.error)\n  } else {\n    throw new Error(`Failed to upload bytes to ton-storage: ${JSON.stringify(response)}`)\n  }\n\n  return reference.toLowerCase()\n}\n\n/**\n * Asserts that tree is correct\n *\n * @param data Tree to check\n */\nexport function assertTree(data: unknown): asserts data is Tree {\n  // todo remove this method when it will be exported from fs\n  const tree = data as Tree\n\n  if (!tree.directory) {\n    throw new Error('Tree: should contain root directory')\n  }\n}\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n  \"include\": [\"src\", \"test\"],\n  \"compilerOptions\": {\n    \"lib\": [\n      \"dom\"\n    ],\n    \"alwaysStrict\": true,\n    \"target\": \"ES6\",\n    \"esModuleInterop\": true,\n    \"skipLibCheck\": true,\n    \"allowSyntheticDefaultImports\": true,\n    \"moduleResolution\": \"node\",\n    \"module\": \"commonjs\",\n    \"strict\": true,\n    \"resolveJsonModule\": true,\n    \"experimentalDecorators\": true,\n    \"emitDecoratorMetadata\": true,\n    \"typeRoots\": [\n      \"node_modules/@types\"\n    ],\n    \"rootDirs\": [\"src\"],\n    \"outDir\": \"dist\"\n  },\n}\n"
  },
  {
    "path": "tsconfig.test.json",
    "content": "{\n  \"extends\": \"./tsconfig.json\",\n  \"include\": [\n    \"src\",\n    \"test\",\n    \"jest.config.ts\"\n  ],\n  \"compilerOptions\": {\n    \"noEmit\": true,\n    \"lib\": [\n      \"dom\"\n    ]\n  }\n}\n"
  }
]