Repository: FairJournal/backend
Branch: master
Commit: b1a74f8def12
Files: 57
Total size: 122.7 KB
Directory structure:
gitextract_d7t6a830/
├── .eslintrc.json
├── .github/
│ └── workflows/
│ └── tests.yaml
├── .gitignore
├── .prettierrc
├── Dockerfile
├── README.md
├── blob/
│ └── README.md
├── example.env
├── jest.config.js
├── knexfile.ts
├── migrations/
│ ├── 20230706133935_init.ts
│ ├── 20230713094839_fs_updates.ts
│ ├── 20230716103734_file.ts
│ ├── 20230725081357_settings.ts
│ └── db.sql
├── nodemon.json
├── package.json
├── src/
│ ├── app.ts
│ ├── controllers/
│ │ ├── ArticleController.ts
│ │ ├── ImageController.ts
│ │ ├── UserController.ts
│ │ └── file-system/
│ │ ├── app/
│ │ │ ├── get-settings-action.ts
│ │ │ ├── index.ts
│ │ │ └── publish-action.ts
│ │ ├── blob/
│ │ │ ├── get-article-action.ts
│ │ │ ├── get-articles-action.ts
│ │ │ ├── get-path-info-action.ts
│ │ │ ├── index.ts
│ │ │ ├── upload-action.ts
│ │ │ └── utils.ts
│ │ ├── const.ts
│ │ ├── index.ts
│ │ ├── types.ts
│ │ ├── update/
│ │ │ ├── apply-action.ts
│ │ │ └── index.ts
│ │ ├── user/
│ │ │ ├── get-update-id-action.ts
│ │ │ ├── index.ts
│ │ │ └── info-action.ts
│ │ └── utils.ts
│ ├── db.ts
│ ├── fs.ts
│ ├── index.ts
│ ├── models/
│ │ ├── Article.ts
│ │ └── User.ts
│ ├── routes.ts
│ ├── ton-utils.ts
│ └── utils.ts
├── startup.sh
├── test/
│ ├── controllers/
│ │ └── file-system/
│ │ ├── app.test.ts
│ │ ├── article.test.ts
│ │ ├── blob.test.ts
│ │ └── file-system.test.ts
│ ├── data/
│ │ ├── file1.txt
│ │ └── file2.txt
│ └── utils.ts
├── tsconfig.json
└── tsconfig.test.json
================================================
FILE CONTENTS
================================================
================================================
FILE: .eslintrc.json
================================================
{
"env": {
"browser": true,
"commonjs": true,
"es6": true,
"node": true,
"jest": true
},
"parser": "@typescript-eslint/parser",
"extends": ["plugin:@typescript-eslint/recommended", "prettier", "plugin:prettier/recommended"],
"parserOptions": {
"ecmaFeatures": {
"jsx": true
},
"ecmaVersion": 2018,
"sourceType": "module",
"project": "./tsconfig.json"
},
"plugins": ["jest", "unused-imports", "@typescript-eslint"],
"rules": {
"indent": "off",
"@typescript-eslint/explicit-function-return-type": ["error"],
"@typescript-eslint/indent": ["error", 2],
"array-bracket-newline": ["error", "consistent"],
"strict": ["error", "safe"],
"block-scoped-var": "error",
"complexity": "warn",
"default-case": "error",
"dot-notation": "warn",
"eqeqeq": "error",
"guard-for-in": "warn",
"linebreak-style": ["warn", "unix"],
"no-alert": "error",
"no-case-declarations": "error",
"no-console": "error",
"no-constant-condition": "error",
"no-continue": "warn",
"no-div-regex": "error",
"no-empty": "warn",
"no-empty-pattern": "error",
"no-implicit-coercion": "error",
"prefer-arrow-callback": "warn",
"no-labels": "error",
"no-loop-func": "error",
"no-nested-ternary": "warn",
"no-script-url": "error",
"no-warning-comments": "warn",
"quote-props": ["error", "as-needed"],
"require-yield": "error",
"max-nested-callbacks": ["error", 4],
"max-depth": ["error", 4],
"space-before-function-paren": [
"error",
{
"anonymous": "never",
"named": "never",
"asyncArrow": "always"
}
],
"padding-line-between-statements": [
"error",
{ "blankLine": "always", "prev": "*", "next": "if" },
{ "blankLine": "always", "prev": "*", "next": "function" },
{ "blankLine": "always", "prev": "*", "next": "return" }
],
"no-useless-constructor": "off",
"no-dupe-class-members": "off",
"no-unused-expressions": "off",
"curly": ["error", "multi-line"],
"object-curly-spacing": ["error", "always"],
"comma-dangle": ["error", "always-multiline"],
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-unused-expressions": "error",
"@typescript-eslint/member-delimiter-style": [
"error",
{
"multiline": {
"delimiter": "none",
"requireLast": true
},
"singleline": {
"delimiter": "semi",
"requireLast": false
}
}
],
"@typescript-eslint/ban-ts-comment": [
"error",
{
"ts-expect-error": "allow-with-description",
"ts-ignore": "allow-with-description",
"ts-nocheck": "allow-with-description",
"ts-check": "allow-with-description",
"minimumDescriptionLength": 6
}
],
"require-await": "off",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-await": "off",
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-unused-vars": "off",
"unused-imports/no-unused-imports": "error",
"unused-imports/no-unused-vars": [
"warn",
{ "vars": "all", "varsIgnorePattern": "^_", "args": "after-used", "argsIgnorePattern": "^_" }
]
}
}
================================================
FILE: .github/workflows/tests.yaml
================================================
name: Test
on:
push:
branches:
- master
pull_request:
branches:
- '**'
jobs:
test:
name: Run tests
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Build Docker image
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/arm64
push: false
load: true
tags: your-docker-image-name:latest
- name: Run tests
run: docker run your-docker-image-name:latest
================================================
FILE: .gitignore
================================================
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
/build
# misc
.env
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
dist
avatars
================================================
FILE: .prettierrc
================================================
{
"printWidth": 120,
"tabWidth": 2,
"useTabs": false,
"bracketSpacing": true,
"semi": false,
"singleQuote": true,
"quoteProps": "as-needed",
"trailingComma": "all",
"endOfLine": "lf",
"arrowParens": "avoid",
"proseWrap": "always"
}
================================================
FILE: Dockerfile
================================================
# Start from the latest LTS Node version built for arm64 on Alpine
FROM node:alpine
# Add the TON Storage daemon and CLI to the path
ENV PATH="/app/ton:${PATH}"
WORKDIR /app
# Install necessary packages
# netcat equivalent in Alpine is netcat-openbsd
# curl, mysql, and mysql-client are added since they might not be present in Alpine by default
RUN apk add --no-cache curl netcat-openbsd mysql mysql-client
# Initialize MySQL Database
RUN mysql_install_db --user=mysql --ldata=/var/lib/mysql
# Create the directory for the MySQL Unix socket and change its ownership
RUN mkdir -p /run/mysqld/ && chown -R mysql:mysql /run/mysqld/
# Download TON Storage daemon and CLI binaries
RUN curl -LJO https://github.com/ton-blockchain/ton/releases/download/v2023.06/storage-daemon-linux-arm64
RUN curl -LJO https://github.com/ton-blockchain/ton/releases/download/v2023.06/storage-daemon-cli-linux-arm64
RUN curl -LJO https://ton-blockchain.github.io/global.config.json
# Make them executable
RUN chmod +x storage-daemon-linux-arm64 storage-daemon-cli-linux-arm64
# Move them to the right place
RUN mkdir ton && mv storage-daemon-linux-arm64 storage-daemon-cli-linux-arm64 global.config.json ton/
# Add the current directory content to the Docker image
ADD . /app
# Install project dependencies
RUN npm ci
# Run scripts
RUN npm run check:types
RUN npm run lint:check
# Copy the startup script and make it executable
COPY ./startup.sh /app/startup.sh
RUN chmod +x /app/startup.sh
CMD ["/app/startup.sh"]
================================================
FILE: README.md
================================================
# Mutable File System Gateway
This repository contains the server-side implementation of our decentralized file system gateway. It's designed to provide a public, uncensored file system that can be accessed via the web for individuals wanting to share their data. The backend interfaces with multiple decentralized storage platforms and provides key services to manage data effectively and securely.
In its final stage, this project is envisioned to become a dynamic directory of decentralized, publicly accessible user files available to the entire world. These files will be available to all projects that use the node and are immune to censorship. This is not just a technical project, but a step towards a more transparent and accessible digital world. Harnessing the power of decentralized technologies, we aim to put the control of data back into the hands of users.
## Features
1. **Mempool:** Holds user operations on their file systems before they're included in the smart contract and uploaded to storage.
2. **Gateway:** Manages data uploads to storage through public gateways, eliminating the need for users to install nodes/extensions. This component can be replaced in projects using other file gateways.
3. **Rollup:** Aggregates all user changes over a specific period into a single hash, stored in a smart contract at regular intervals. This method significantly reduces the traditionally high costs associated with smart contract modifications, potentially saving users a substantial amount of money.
4. **Appchains:** The combination of the backend and file system allows services to build Appchains for data storage. As the project evolves, these data Appchains will be interconnected in a decentralized manner.
The server-side implementation is designed to work seamlessly with our [Decentralized File System](https://github.com/FairJournal/file-system), providing an end-to-end solution for creating a public, decentralized file system.
## Roadmap
- [x] ✅ POC of mempool
- [x] ✅ POC of gateway
- [x] ✅ POC of rollup
- [x] ✅ POC with the ability to create a file system specific for an app
- [ ] Add multi-storage capability, backup user's data to different storages
- [ ] Wrap the project in the form of a node with the same features, should work on mobile
- [ ] Write a smart contract for storing file system changes for all users across projects
- [ ] Find a blockchain home for the smart contract
- [ ] Implement decentralized database distribution for user's updates
- [ ] Implement incentives for nodes which store and validate the data
- [ ] Enable the ability to incentivize not only directly by the user, but by Appchains and third parties
- [ ] 🎉 🌎 Become a worldwide directory of public user files
## API
### GET /v1/fs/user/info
This endpoint checks if a user exists in the file system.
**URL parameters:**
- `address`: The address of the user.
**Response:**
```json
{
"status": "ok",
"address": "<address>",
"isUserExists": "<boolean>"
}
```
---
## GET /v1/fs/user/get-update-id
This endpoint gets the current update ID for a user.
**URL parameters:**
- `address`: The address of the user.
**Response:**
```json
{
"status": "ok",
"address": "<address>",
"updateId": "<number>"
}
```
---
### POST /v1/fs/blob/upload
This endpoint handles the uploading of a file, uploads it to the storage, inserts its metadata into a cache database to speed up the gateway, and returns the file info.
**Form data:**
- `blob`: A file to upload.
**Response:**
```json
{
"status": "ok",
"data": {
"reference": "<reference>",
"mime_type": "<mime_type>",
"sha256": "<sha256>",
"size": "<size>"
}
}
```
---
### GET /v1/fs/blob/get-article
This endpoint retrieves a full article based on the user's address and the article's slug.
**URL parameters:**
- `userAddress`: The address of the user.
- `slug`: The slug of the article.
**Response:**
```json
{
"status": "ok",
"userAddress": "<userAddress>",
"article": {
"slug": "<slug>",
"data": "<data>",
"preview": "<data>"
}
}
```
---
### GET /v1/fs/blob/get-articles
This endpoint retrieves all the articles of a user.
**URL parameters:**
- `userAddress`: The address of the user.
**Response:**
```json
{
"status": "ok",
"userAddress": "<userAddress>",
"articles": [
{
"slug": "<slug>",
"data": "<data>",
"preview": "<data>"
},
// ... more articles
]
}
```
---
### GET /v1/fs/blob/get-path-info
This endpoint retrieves the info of a specific path for a user.
**URL parameters:**
- `userAddress`: The address of the user.
- `path`: The path to retrieve info for.
**Response:**
```json
{
"status": "ok",
"userAddress": "<userAddress>",
"path": "<path>",
"data": "<data>"
}
```
---
### POST /v1/fs/update/apply
This endpoint applies an update action to the file system.
**Form data:**
- An `update` object that includes the update data.
**Response:**
```json
{
"status": "ok"
}
```
## Installation
1 - Install dependencies (Node.js 16):
`npm ci`
Copy and change options
`cp example.env .env`
2 - Install MySQL.
3 - Create `fair_journal` db:
`mysql -u root -p < ./migrations/db.sql`
4 - Start interactive mode for MySQL user creation:
`mysql -u root`
and run commands:
`CREATE USER 'fjuser'@'localhost' IDENTIFIED BY 'STRONG_PASSWORD_HERE';`
`GRANT ALL PRIVILEGES ON fair_journal.* TO 'fjuser'@'localhost';`
`FLUSH PRIVILEGES;`
5 - Put these credentials to `.env` file.
6 - Run migrations:
`npx knex migrate:latest --env production`
7 - Start server using pm2:
`npm run start`
## Development
Start in dev mode
`start:dev`
Test app using local Docker
`docker build -t your-docker-image-name . && docker run -p 8000:8000 your-docker-image-name`
================================================
FILE: blob/README.md
================================================
# Uploaded blobs here
================================================
FILE: example.env
================================================
# Path to the root of the files
FILES_ROOT_PATH=/Users/test/web/fj-backend
# Port of the application
PORT=5000
# Database socket connection path if needed
DB_SOCKET_PATH=
# Database host
DB_HOST=localhost
# Database port
DB_PORT=3306
# Database username
DB_USER=root
# Database password
DB_PASSWORD=root
# Database name
DB_NAME=fair_journal
# External web url for old files
URL=http://localhost:5000/
# Is show server logs
SHOW_LOGS=true
# Ton Storage CLI binary path
TON_STORAGE_BIN_PATH=/root/storage-daemon-cli
# Ton Storage host
TON_STORAGE_HOST=127.0.0.1:5555
# Ton Storage database path
TON_STORAGE_DATABASE_PATH=/var/ton-storage
# Ton Storage timeout
TON_STORAGE_TIMEOUT=5000
# Ton Storage wait attempts
TON_STORAGE_WAIT_ATTEMPTS=100
# Ton Storage check wait timeout
TON_STORAGE_CHECK_WAIT_TIMEOUT=3000
# Password for publishing the file system
PUBLISH_FS_PASSWORD=
================================================
FILE: jest.config.js
================================================
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
testTimeout: 100000,
}
================================================
FILE: knexfile.ts
================================================
import { config } from 'dotenv'
import { Knex } from 'knex'
config()
const knexConfig: Knex.Config = {
client: 'mysql2',
connection: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
},
migrations: {
directory: './migrations',
},
}
const configurations: { [key: string]: Knex.Config } = {
development: knexConfig,
production: knexConfig,
docker: {
...knexConfig,
connection: {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
...knexConfig.connection,
socketPath: '/run/mysqld/mysqld2.sock',
},
},
}
export default configurations
================================================
FILE: migrations/20230706133935_init.ts
================================================
import { Knex } from "knex";
export async function up(knex: Knex): Promise<void> {
// create table
await knex.schema.createTable("users", (table) => {
table.increments("id").primary();
table.string("wallet", 255).notNullable();
table.string("avatar", 255).notNullable();
table.string("name", 255).notNullable();
table.string("description", 255).notNullable();
});
// insert data
await knex("users").insert([
{ id: 1, wallet: '200', avatar: 'https://example.com/avatar2.png', name: 'John Smith', description: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit' },
{ id: 3, wallet: '123', avatar: '', name: '', description: '' },
{ id: 4, wallet: '1234', avatar: '', name: '', description: '' },
{ id: 5, wallet: '1', avatar: '', name: '', description: '' },
{ id: 6, wallet: '0:fed265a59332abef0e2392fb653f94e8ff5cff55f6b35f6bfd3f3b7b5f862a2b', avatar: '222', name: 'Ihar Chernishev111', description: 'vTools failed to load source map: Could not load content for chrome111' },
{ id: 7, wallet: '0:fed265a59332abef0e2392fb653f94e8ff5cff55f6b35f6bfd3f3b7b5f862a2b', avatar: '', name: '', description: '' },
]);
// Create 'articles' table
await knex.schema.createTable("articles", (table) => {
table.increments("id").primary();
table.string("hash", 255).notNullable();
table.string("content", 255).notNullable();
table.integer("author_id").unsigned().notNullable();
table.foreign("author_id").references("id").inTable("users");
});
// Insert data into 'articles' table
await knex("articles").insert([
{ id: 2, hash: 'random-hash', content: 'Lorem ipsum dolor sit amet', author_id: 1 },
{ id: 3, hash: 'random-hash', content: 'Lorem ipsum dolor sit amet', author_id: 1 },
{ id: 4, hash: '00000000000', content: '{"time":1683731258538,"blocks":[{"id":"sheNwCUP5A","type":"header","data":{"text":"Title","level":1}},{"id":"u3i1-RBll_","type":"paragraph","data":{"text":"ceecec"}},{"id":"Z-X8jY2mAi","type":"paragraph","data":{"text":"ececec"}}],"version":"2.26.5"}', author_id: 6 },
{ id: 6, hash: '00000000000', content: '{"time":1683796077710,"blocks":[{"id":"sheNwCUP5A","type":"header","data":{"text":"Title11","level":1}},{"id":"4RA6seA4xt","type":"paragraph","data":{"text":"efefwefwef"}}],"version":"2.26.5"}', author_id: 6 },
{ id: 7, hash: '00000000000', content: '{"time":1683796230168,"blocks":[{"id":"sheNwCUP5A","type":"header","data":{"text":"Title1122","level":1}},{"id":"5Rk0mmE5T7","type":"paragraph","data":{"text":"yukddd11"}},{"id":"5jV6cesj88","type":"paragraph","data":{"text":"yku"}}],"version":"2.26.5"}', author_id: 6 },
{ id: 8, hash: '00000000000', content: '{"time":1683795761833,"blocks":[{"id":"sheNwCUP5A","type":"header","data":{"text":"Title","level":1}},{"id":"buOPouRBIE","type":"paragraph","data":{"text":"cdchh"}}],"version":"2.26.5"}', author_id: 6 },
{ id: 11, hash: '00000000000', content: '{"time":1683795580253,"blocks":[{"id":"fzJUR75ZC8","type":"paragraph","data":{"text":"111111222"}},{"id":"2xATC4OkUH","type":"paragraph","data":{"text":"111111"}}],"version":"2.26.5"}', author_id: 6 },
]);
await knex.schema.createTable('images', table => {
table.increments('id');
table.integer("author_id").unsigned().notNullable();
table.string('signature', 255).notNullable();
table.string('path', 255).notNullable();
table.foreign("author_id").references("id").inTable("users");
});
}
export async function down(knex: Knex): Promise<void> {
// Drop 'images' table
await knex.schema.dropTable('images');
// Drop 'articles' table
await knex.schema.dropTable("articles");
// Drop 'users' table
await knex.schema.dropTable("users");
}
================================================
FILE: migrations/20230713094839_fs_updates.ts
================================================
import { Knex } from "knex";
export async function up(knex: Knex): Promise<void> {
return knex.schema.createTable('fs_update', table => {
table.increments('id').unsigned().primary();
table.string('public_key', 64).notNullable();
table.integer('update_id').unsigned().notNullable();
table.text('update', 'longtext').notNullable();
table.dateTime('created_at').notNullable().defaultTo(knex.fn.now());
// Setting the combination of public_key + update_id to be unique
table.unique(['public_key', 'update_id']);
table.index('public_key'); // Add an index to the public_key column
});
}
export async function down(knex: Knex): Promise<void> {
return knex.schema.dropTable('fs_update');
}
================================================
FILE: migrations/20230716103734_file.ts
================================================
import { Knex } from "knex";
export async function up(knex: Knex): Promise<void> {
return knex.schema.createTable('file', (table) => {
table.string('reference', 64).primary().unique().index();
table.integer('status').unsigned();
table.string('mime_type', 255);
table.bigInteger('size').unsigned(); // added size field
table.string('sha256', 64).index(); // added sha256 field with index
table.dateTime('created_at').defaultTo(knex.fn.now());
table.dateTime('updated_at').defaultTo(knex.fn.now());
});
}
export async function down(knex: Knex): Promise<void> {
return knex.schema.dropTable('file');
}
================================================
FILE: migrations/20230725081357_settings.ts
================================================
import { Knex } from "knex";
export async function up(knex: Knex): Promise<void> {
return knex.schema.createTable('settings', table => {
table.string('key', 255).primary().index();
table.text('value');
table.timestamp('created_at').defaultTo(knex.fn.now());
table.timestamp('updated_at').defaultTo(knex.fn.now());
});
}
export async function down(knex: Knex): Promise<void> {
return knex.schema.dropTable('settings');
}
================================================
FILE: migrations/db.sql
================================================
CREATE DATABASE fair_journal
CHARACTER SET utf8mb4
COLLATE utf8mb4_general_ci;
================================================
FILE: nodemon.json
================================================
{
"watch": [
"src"
],
"ext": ".ts,.js",
"ignore": [],
"exec": "ts-node ./src/index.ts"
}
================================================
FILE: package.json
================================================
{
"name": "fair-journal-backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"prepublishOnly": "rimraf dist && npm run compile:types && npm run compile:node --env mode=production",
"start:dev": "nodemon",
"build": "rimraf ./build && tsc",
"start": "ts-node src/index.ts",
"test": "jest --runInBand",
"lint:check": "eslint \"src/**/*.ts\" \"test/**/*.ts\" && prettier --check \"src/**/*.ts\" \"test/**/*.ts\"",
"check:types": "tsc --project tsconfig.test.json"
},
"author": "",
"license": "ISC",
"devDependencies": {
"@types/cors": "^2.8.13",
"@types/express": "^4.17.17",
"@types/jest": "^29.5.2",
"@types/multer": "^1.4.7",
"@types/node": "^20.1.0",
"@types/supertest": "^2.0.12",
"@types/tmp": "^0.2.3",
"@typescript-eslint/eslint-plugin": "^5.59.2",
"babel-jest": "^29.6.1",
"eslint": "^8.44.0",
"eslint-config-prettier": "^8.8.0",
"eslint-config-standard-with-typescript": "^34.0.1",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-jest": "^27.2.2",
"eslint-plugin-n": "^15.7.0",
"eslint-plugin-prettier": "^4.2.1",
"eslint-plugin-promise": "^6.1.1",
"eslint-plugin-unused-imports": "^2.0.0",
"jest": "^29.6.1",
"nodemon": "^2.0.22",
"prettier": "^2.8.8",
"rimraf": "^5.0.0",
"supertest": "^6.3.3",
"ton-crypto": "^3.2.0",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1",
"typescript": "^5.0.4"
},
"dependencies": {
"@fairjournal/file-system": "^1.10.1",
"body-parser": "^1.20.2",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.18.2",
"knex": "^2.4.2",
"multer": "^1.4.5-lts.1",
"mysql2": "^3.5.1",
"sharp": "^0.32.1",
"tmp": "^0.2.1",
"tonstorage-cli": "^1.1.5",
"uuid": "^9.0.0"
}
}
================================================
FILE: src/app.ts
================================================
import express, { Application } from 'express'
import cors from 'cors'
import router from './routes'
import fileSystemRouter from './controllers/file-system'
import { FileSystem } from '@fairjournal/file-system'
import { initFs, syncFs } from './fs'
import { TonstorageCLI } from 'tonstorage-cli'
import { delay } from './utils'
const app: Application = express()
export let fileSystem: FileSystem
export let tonstorage: TonstorageCLI
export const errorHandler = (
err: Error,
req: express.Request,
res: express.Response,
next: express.NextFunction,
): void => {
const error = {
status: 'error',
message: err.message,
}
res.status(500).json(error)
}
// Middleware
app.use(express.json())
app.use(cors())
// Routes
app.use('/api', router)
app.use('/avatars', express.static('avatars'))
app.use('/v1/fs', fileSystemRouter)
app.use(errorHandler)
function log(message: string): void {
if (process.env.SHOW_LOGS === 'true') {
// eslint-disable-next-line no-console
console.log(message)
}
}
/**
* Waits for TonStorage to be ready
*
* @param tonStorage TonStorage instance
*/
export async function waitTonStorage(tonStorage: TonstorageCLI): Promise<void> {
/**
* TonStorage provider info response
*/
interface Response {
ok: boolean
error?: string
code: number
}
const maxAttempts = parseInt(process.env.TON_STORAGE_WAIT_ATTEMPTS || '10')
const waitTime = parseInt(process.env.TON_STORAGE_CHECK_WAIT_TIMEOUT || '3000')
let isReady = false
let attempts = 0
while (!isReady && attempts < maxAttempts) {
attempts += 1
try {
const providerInfo = (await tonStorage.getProviderInfo()) as Response
if (providerInfo && providerInfo.error && providerInfo.error.includes('timeout')) {
// eslint-disable-next-line no-console
console.log(`Ton Storage: connection timeout occurred. Waiting and retrying (${attempts}/${maxAttempts})...`)
await delay(waitTime)
} else {
isReady = true
}
} catch (e) {
// eslint-disable-next-line no-console
console.log('An error occurred. Waiting for TonStorage...')
await delay(waitTime)
}
}
if (!isReady) {
throw new Error(`Failed to get provider info after ${maxAttempts} attempts.`)
}
}
/**
* Creates TonStorage instance
*/
export function createTonStorageInstance(): TonstorageCLI {
const bin = process.env.TON_STORAGE_BIN_PATH
const host = process.env.TON_STORAGE_HOST
const database = process.env.TON_STORAGE_DATABASE_PATH
const timeout = Number(process.env.TON_STORAGE_TIMEOUT)
if (!bin || !host || !database || !timeout) {
throw new Error('TonStorage is not configured via .env')
}
return new TonstorageCLI({
bin,
host,
database,
timeout: Number(process.env.TON_STORAGE_TIMEOUT),
})
}
/**
* Start initialization asynchronously
*/
export async function syncFileSystem(): Promise<void> {
log('Connecting to TonStorage...')
tonstorage = createTonStorageInstance()
await waitTonStorage(tonstorage)
log('Connected to TonStorage!')
log('Sync file system...')
await syncFs(fileSystem)
log('File system synced!')
}
/**
* Clear file system
*/
export function clearFileSystem(): void {
fileSystem = initFs()
}
export default app
================================================
FILE: src/controllers/ArticleController.ts
================================================
import { Request, Response } from 'express'
import { OkPacket, RowDataPacket } from 'mysql2'
import pool from '../db'
const getAllArticles = async (req: Request, res: Response): Promise<Response> => {
try {
const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM articles')
return res.json(rows)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const getArticleById = async (req: Request, res: Response): Promise<Response> => {
const id = Number(req.params.id)
if (!id) {
return res.status(400).send('Article id is required')
}
try {
const [rows] = await pool.query<RowDataPacket[]>(
`SELECT articles.*, users.name, users.avatar, users.wallet
FROM articles
JOIN users ON articles.author_id = users.id
WHERE articles.id = ?`,
[id],
)
const article = rows[0]
if (!article) {
return res.status(404).send(`Article with id ${id} not found`)
}
return res.json(article)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const createArticle = async (req: Request, res: Response): Promise<Response> => {
const { authorId, hash, content } = req.body
if (!authorId) {
return res.status(400).send('Author id is required')
}
if (!hash) {
return res.status(400).send('Hash is required')
}
if (!content) {
return res.status(400).send('Content is required')
}
try {
const [result] = await pool.query<OkPacket>('INSERT INTO articles(author_id, hash, content) VALUES(?, ?, ?)', [
authorId,
hash,
JSON.stringify(content),
])
const id = result.insertId
return res.status(201).json({ id })
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const updateArticle = async (req: Request, res: Response): Promise<Response | void> => {
const id = Number(req.params.id)
const { authorId, hash, content } = req.body
if (!authorId) {
return res.status(400).send('Author id is required')
}
if (!hash) {
return res.status(400).send('Hash is required')
}
if (!content) {
return res.status(400).send('Content is required')
}
try {
const [result] = await pool.query<OkPacket>(
'UPDATE articles SET author_id = ?, hash = ?, content = ? WHERE id = ?',
[authorId, hash, JSON.stringify(content), id],
)
if (result.affectedRows === 0) {
return res.status(404).send(`Article with id ${id} not found`)
}
return res.json({ id })
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const deleteArticle = async (req: Request, res: Response): Promise<Response | void> => {
const id = Number(req.params.id)
if (!id) {
return res.status(400).send('Id is required')
}
try {
const [result] = await pool.query<OkPacket>('DELETE FROM articles WHERE id = ?', [id])
if (result.affectedRows === 0) {
return res.status(404).send(`Article with id ${id} not found`)
}
return res.json({ id })
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
export { getAllArticles, getArticleById, createArticle, updateArticle, deleteArticle }
================================================
FILE: src/controllers/ImageController.ts
================================================
import { Request, Response } from 'express'
import { OkPacket } from 'mysql2'
import pool from '../db'
const upload = async (req: Request, res: Response): Promise<Response> => {
const { authorId } = req.body
if (!authorId) {
return res.status(400).send('Author id is required')
}
if (!(req.file && req.file.path)) {
return res.status(400).send('No image uploaded.')
}
// Check image size
const fileSizeInBytes = req.file.size
const maxSizeInBytes = 10 * 1024 * 1024 // 10 megabytes
if (fileSizeInBytes > maxSizeInBytes) {
return res.status(400).send('Image size exceeds the maximum limit of 10 megabytes.')
}
try {
const path = req.file.path
const [result] = await pool.query<OkPacket>('INSERT INTO images(author_id, signature, path) VALUES(?, ?, ?)', [
authorId,
'---',
path,
])
const id = result.insertId
return res.status(201).json({
id,
success: 1,
file: {
url: `${process.env.URL}${path}`,
relativePath: path,
},
})
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
export default { upload }
================================================
FILE: src/controllers/UserController.ts
================================================
import { Request, Response } from 'express'
import { OkPacket, RowDataPacket } from 'mysql2'
import pool from '../db'
import User from '../models/User'
import * as fs from 'fs'
const getUserById = async (req: Request, res: Response): Promise<Response> => {
const id = Number(req.params.id)
if (!id) {
return res.status(400).send('Id is required')
}
try {
const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [id])
const user = rows[0] as User
if (!user) {
return res.status(404).send(`User with id ${id} not found`)
}
return res.json(user)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const getArticlesByUserId = async (req: Request, res: Response): Promise<Response> => {
const id = Number(req.params.id)
if (!id) {
return res.status(400).send('User id is required')
}
try {
const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM articles WHERE author_id = ?', [id])
const articles = rows || []
return res.json(articles)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
/**
* Update user info
*/
const updateUser = async (req: Request, res: Response): Promise<Response> => {
try {
const id = Number(req.params.id)
if (!id) {
throw new Error('Id is required')
}
const { wallet, name, description } = req.body
if (!wallet) {
throw new Error('Wallet is required')
}
if (!name) {
throw new Error('Name is required')
}
let avatarPath = null
if (req.file) {
// Check avatar image size
const fileSizeInBytes = req.file.size
const maxSizeInBytes = 10 * 1024 * 1024 // 10 megabytes
if (fileSizeInBytes > maxSizeInBytes) {
return res.status(400).send('Avatar image size exceeds the maximum limit of 10 megabytes.')
}
avatarPath = req.file.path
}
// get old user info
const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [id])
const user = rows[0] as User
// update user info
const [result] = await pool.query<OkPacket>(
'UPDATE users SET wallet = ?, avatar = IFNULL(?, avatar), name = ?, description = ? WHERE id = ?',
[wallet, avatarPath, name, description, id],
)
// remove old avatar if exists sync
if (user.avatar && fs.existsSync(user.avatar)) {
fs.unlinkSync(user.avatar)
}
if (result.affectedRows === 0) {
return res.status(404).send(`User with id "${id}" not found`)
}
// get updated user info
const [updatedRows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [id])
const updatedUser = updatedRows[0]
if (!updatedUser) {
throw new Error('User not found')
}
return res.json(updatedUser)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const deleteUser = async (req: Request, res: Response): Promise<Response> => {
const id = Number(req.params.id)
if (!id) {
return res.status(400).send('User id is required')
}
try {
const [result] = await pool.query<OkPacket>('DELETE FROM users WHERE id = ?', [id])
if (result.affectedRows === 0) {
return res.status(404).send(`User with id ${id} not found`)
}
return res.sendStatus(204)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
const authorizeByWallet = async (req: Request, res: Response): Promise<Response> => {
const { wallet }: { wallet: string } = req.body
try {
// Check if the user already exists in the database
const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE wallet = ?', [wallet])
let user = rows[0]
if (!user) {
// If the user doesn't exist, create a new one
const [result] = await pool.query<OkPacket>(
'INSERT INTO users (wallet, name, description, avatar) VALUES (?, ?, ?, ?)',
[wallet, '', '', ''],
)
const newUserId = result.insertId
// Retrieve the newly created user from the database
const [rows] = await pool.query<RowDataPacket[]>('SELECT * FROM users WHERE id = ?', [newUserId])
user = rows[0]
}
return res.json(user)
} catch (e) {
return res.status(500).send(`Internal Server Error: ${(e as Error).message}`)
}
}
export { getUserById, updateUser, deleteUser, getArticlesByUserId, authorizeByWallet }
================================================
FILE: src/controllers/file-system/app/get-settings-action.ts
================================================
import { Request, Response, NextFunction } from 'express'
import pool from '../../../db'
import { getSetting } from '../utils'
import { assertString } from '../../../utils'
/**
* Gets settings from db
*
* @param req Request
* @param res Response
* @param next Next function
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { key } = req.query
if (!key) {
throw new Error('"key" is not set')
}
assertString(key)
const settingValue = await getSetting(pool, key)
res.json({
status: 'ok',
value: settingValue,
})
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/app/index.ts
================================================
import express from 'express'
import publishAction from './publish-action'
import getSettingsAction from './get-settings-action'
const router = express.Router()
router.post('/publish', publishAction)
router.get('/get-settings', getSettingsAction)
export default router
================================================
FILE: src/controllers/file-system/app/publish-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import { fileSystem } from '../../../app'
import { SettingsKey, uploadData, upsertSettings } from '../utils'
import pool from '../../../db'
/**
* Publish action body
*/
export interface PublishBody {
/**
* Password for the update
*/
password: string
}
/**
* Publish action for the file system
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { password } = req.body as PublishBody
if (!process.env.PUBLISH_FS_PASSWORD) {
throw new Error('Publish password is not set in .env')
}
if (password !== process.env.PUBLISH_FS_PASSWORD) {
throw new Error('Invalid password')
}
const uploadResult = await fileSystem.upload({
uploadData: uploadData,
})
const reference = uploadResult.reference
await upsertSettings(pool, SettingsKey.FS_STATE_REFERENCE, reference)
// todo send tx to smart contract with the actual reference
// todo send tx if only changed
res.json({
status: 'ok',
reference,
})
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/blob/get-article-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import { assertAddress, assertArticleName } from '../../../utils'
import { DEFAULT_DIRECTORY } from '../const'
import { assertDirectory, assertFile, assertFiles, Directory, File } from '@fairjournal/file-system'
import { fileSystem } from '../../../app'
import { Article, ARTICLE_INDEX_FILE_NAME, ArticleResponse, directoryToArticle } from './utils'
/**
* Checks if the user exists in the file system. If not, an error is thrown.
*
* @param address The address of the user
* @throws Will throw an error if the user does not exist in the file system
*/
function checkUserExists(address: string): void {
if (!fileSystem.isUserExists(address)) {
throw new Error(`User not found: "${address}"`)
}
}
/**
* Retrieves article data based on the user address and the slug.
*
* @param address The address of the user
* @param slug The slug of the article
* @returns The data of the article
* @throws Will throw an error if the article is not found
*/
async function getArticleData(address: string, slug: string): Promise<File | Directory> {
try {
const path = `/${address}/${DEFAULT_DIRECTORY}/${slug}`
return fileSystem.getPathInfo(path)
} catch (e) {
throw new Error(`Article not found: "${slug}". ${(e as Error).message}`)
}
}
/**
* Converts the retrieved data into an article.
*
* @param data The raw data of the article
* @param slug The slug of the article
* @returns The converted article
* @throws Will throw an error if the data cannot be converted into an article
*/
async function convertDataToArticle(data: Directory, slug: string): Promise<Article> {
try {
return await directoryToArticle(data)
} catch (e) {
throw new Error(`Article not found: "${slug}". Error: ${(e as Error).message}`)
}
}
/**
* Handles the GET request to retrieve a full article.
*
* @param req The request object
* @param res The response object
* @param next The next middleware function in the stack
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { userAddress, slug } = req.query
assertAddress(userAddress)
assertArticleName(slug)
const address = userAddress.toLowerCase()
checkUserExists(address)
const articleData = await getArticleData(address, slug)
assertDirectory(articleData)
assertFiles(articleData.files)
const indexArticle = articleData.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME)
assertFile(indexArticle)
const article = await convertDataToArticle(articleData, slug)
const response: ArticleResponse = {
status: 'ok',
userAddress,
article,
reference: indexArticle.hash,
}
res.json(response)
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/blob/get-articles-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import { assertAddress } from '../../../utils'
import { DEFAULT_DIRECTORY } from '../const'
import { assertDirectories, assertDirectory, File, Directory } from '@fairjournal/file-system'
import { fileSystem } from '../../../app'
import { ArticlesResponse, directoriesToShortArticles } from './utils'
/**
* Check if user exists
*
* @param userAddress - User address in the blockchain.
* @throws Will throw an error if the user is not found.
*/
function checkUserExistence(userAddress: string): void {
if (!fileSystem.isUserExists(userAddress.toLowerCase())) {
throw new Error(`User not found: "${userAddress}"`)
}
}
/**
* Get path info and handle possible errors
*
* @param path - Path to the user's articles directory.
* @throws Will throw an error if the articles are not found.
*/
function getPathInfoWithErrorHandling(path: string): File | Directory {
try {
return fileSystem.getPathInfo(path)
} catch (e) {
throw new Error(`Articles not found. ${(e as Error).message}`)
}
}
/**
* Get articles of the user
*
* @param req Request
* @param res Response
* @param next Next function
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { userAddress } = req.query
assertAddress(userAddress)
checkUserExistence(userAddress)
const path = `/${userAddress.toLowerCase()}/${DEFAULT_DIRECTORY}`
const data = getPathInfoWithErrorHandling(path)
assertDirectory(data)
assertDirectories(data.directories)
const articles = await directoriesToShortArticles(data.directories)
// todo cache this object for N minutes. And invalidate cache when new article is added
const response: ArticlesResponse = {
status: 'ok',
userAddress,
articles,
}
res.json(response)
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/blob/get-path-info-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import { assertAddress } from '../../../utils'
import { PathInfoResponse } from './utils'
import { assertPath, assertUserExists, getPathInfo } from '../utils'
/**
* Handles the GET request to retrieve a path info
*
* @param req The request object
* @param res The response object
* @param next The next middleware function in the stack
*/
export default (req: Request, res: Response, next: NextFunction): void => {
try {
const { userAddress, path } = req.query
assertAddress(userAddress)
assertPath(path)
const address = userAddress.toLowerCase()
assertUserExists(address)
const data = getPathInfo(address, path)
const response: PathInfoResponse = {
status: 'ok',
userAddress,
path,
data,
}
res.json(response)
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/blob/index.ts
================================================
import express from 'express'
import uploadAction from './upload-action'
import getArticleAction from './get-article-action'
import getArticlesAction from './get-articles-action'
import getPathInfoAction from './get-path-info-action'
import multer from 'multer'
import { MAX_BLOB_SIZE } from '../const'
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'blob/')
},
})
const upload = multer({ storage, limits: { fileSize: MAX_BLOB_SIZE } })
const router = express.Router()
router.post('/upload', upload.single('blob'), uploadAction)
router.get('/get-article', getArticleAction)
router.get('/get-articles', getArticlesAction)
router.get('/get-path-info', getPathInfoAction)
export default router
================================================
FILE: src/controllers/file-system/blob/upload-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import pool from '../../../db'
import { assertReference, calculateSHA256, toAbsolutePath } from '../../../utils'
import { RowDataPacket } from 'mysql2'
import { tonstorage } from '../../../app'
import * as fs from 'fs'
import { FileStatus } from '../types'
import { getReferencePath } from '../../../fs'
import path from 'path'
import { uploadToStorage } from '../utils'
/**
* DB model of the file
*/
export interface DBFileInfo {
/**
* Reference in storage
*/
reference: string
/**
* Status of the file
*/
status: number
/**
* Mime type of the file
*/
mime_type: string
/**
* Size of the file
*/
size: number
/**
* Sha256 of the file in lowercase
*/
sha256: string
/**
* Date of creation
*/
created_at?: Date
/**
* Date of last update
*/
updated_at?: Date
}
/**
* Inserts file info into database
*
* @param info File info
*/
async function insertFileInfo(info: DBFileInfo): Promise<void> {
const connection = await pool.getConnection()
try {
await connection.query(
`INSERT INTO file (reference, status, mime_type, size, sha256, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)`,
[info.reference, info.status, info.mime_type, info.size, info.sha256, info.created_at, info.updated_at],
)
} catch (error) {
throw error
} finally {
connection.release()
}
}
/**
* Gets file info from database
*
* @param sha256 SHA256 of the file
*/
async function getFileInfo(sha256: string): Promise<DBFileInfo> {
const connection = await pool.getConnection()
try {
const [rows] = await connection.query<RowDataPacket[]>(`SELECT * FROM file WHERE sha256 = ?`, [sha256])
// Check if a row was returned and then return it
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as DBFileInfo
} else {
throw new Error('No file with this sha256 exists in the database')
}
} catch (error) {
throw error
} finally {
connection.release()
}
}
/**
* Checks that is file with given sha256 is uploaded
*
* @param sha256 SHA256 of the file
*/
async function isSha256Uploaded(sha256: string): Promise<boolean> {
sha256 = sha256.toLowerCase()
const connection = await pool.getConnection()
try {
const [rows] = await connection.query(`SELECT * FROM file WHERE sha256 = ?`, [sha256])
// Check if rows is an array and then check its length
if (Array.isArray(rows)) {
return rows.length > 0
} else {
throw new Error('Unexpected query result format')
}
} catch (error) {
return false
} finally {
// Don't forget to release the connection when you're done!
connection.release()
}
}
/**
* Removes file and directory
*
* @param filePath Path to file
* @param directoryPath Path to directory
*/
function removeFileAndDirectory(filePath: string, directoryPath: string): void {
fs.rmSync(filePath, {
force: true,
})
fs.rmSync(directoryPath, {
recursive: true,
force: true,
})
}
/**
* Validate the uploaded file
*
* @param file File to be validated
* @throws Will throw an error if the file or its properties are not valid
*/
function assertValidFile(file: Express.Multer.File | undefined): asserts file is Express.Multer.File {
if (!file) {
throw new Error('File is not uploaded')
}
if (!file.path) {
throw new Error('File path is not defined')
}
if (!file.mimetype) {
throw new Error('File mime type is not defined')
}
if (!file.size) {
throw new Error('File size is not defined')
}
}
/**
* Handle file upload and storage
*
* @param filePath Path to the file
* @param targetFilePath Target path of the file
* @param targetDirectoryPath Target directory of the file
* @param sha256 SHA256 of the file
* @param file File to be uploaded
* @returns fileInfo Information about the file in the database
* @throws Will throw an error if the storage adding fails
*/
async function handleFileUpload(
filePath: string,
targetFilePath: string,
targetDirectoryPath: string,
sha256: string,
file: Express.Multer.File,
): Promise<DBFileInfo> {
let fileInfo: DBFileInfo
const isUploaded = await isSha256Uploaded(sha256)
if (isUploaded) {
fileInfo = await getFileInfo(sha256)
} else {
if (!tonstorage) {
throw new Error('Ton Storage is not initialized')
}
removeFileAndDirectory(targetFilePath, targetDirectoryPath)
fs.mkdirSync(targetDirectoryPath, { recursive: true })
fs.renameSync(filePath, targetFilePath)
const reference = await uploadToStorage(targetFilePath, sha256, false)
assertReference(reference)
fileInfo = {
reference,
status: FileStatus.New,
mime_type: file.mimetype,
size: file.size,
sha256,
created_at: new Date(),
updated_at: new Date(),
}
await insertFileInfo(fileInfo)
}
return fileInfo
}
/**
* Checks that path is exists
*
* @param path Path to check
* @param message Message to be thrown if path does not exist
*/
function checkPathExists(path: string, message: string): void {
if (!fs.existsSync(path)) {
throw new Error(`Path "${path}" does not exist. Message: ${message}`)
}
}
/**
* Removes the uploaded file at the provided filePath.
*
* @async
* @param filePath Path to the file that should be removed.
* @throws Will throw an error if the removal operation fails.
*/
async function removeUploadedFile(filePath: string): Promise<void> {
try {
if (filePath && fs.existsSync(filePath)) {
fs.unlinkSync(filePath)
}
} catch (e) {
/* empty */
}
}
/**
* Sets the permissions of a directory and a file to 0755.
*
* @param reference Reference of the file
*/
function setPermissions(reference: string): void {
const filePath = getReferencePath(reference)
try {
fs.chmodSync(path.dirname(filePath), 0o755)
fs.chmodSync(filePath, 0o755)
} catch (error) {
/* empty */
}
}
/**
* Uploads file, upload it to the storage, insert info into database and return the file info
*
* @param req Request
* @param res Response
* @param next Next function
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
let filePath = ''
try {
const rootPath = process.env.FILES_ROOT_PATH || __dirname
checkPathExists(rootPath, 'root path')
const file = req.file
assertValidFile(file)
filePath = toAbsolutePath(rootPath, file.path)
checkPathExists(filePath, 'file path')
const sha256 = await calculateSHA256(filePath)
const targetDirectoryPath = toAbsolutePath(rootPath, 'blob', sha256)
const targetFilePath = toAbsolutePath(targetDirectoryPath, 'blob')
const fileInfo = await handleFileUpload(filePath, targetFilePath, targetDirectoryPath, sha256, file)
setPermissions(fileInfo.reference)
removeFileAndDirectory(targetFilePath, targetDirectoryPath)
res.json({
status: 'ok',
data: {
reference: fileInfo.reference,
mime_type: fileInfo.mime_type,
sha256: fileInfo.sha256,
size: fileInfo.size,
},
})
} catch (e) {
next(e)
} finally {
await removeUploadedFile(filePath)
}
}
================================================
FILE: src/controllers/file-system/blob/utils.ts
================================================
import { assertFiles, Directory, File } from '@fairjournal/file-system'
import { assertJson, bytesToString } from '../../../utils'
import { extractArticleText, getContentByReference } from '../../../fs'
/**
* Max length of the short article
*/
export const SHORT_ARTICLE_LENGTH = 1000
/**
* Article index file name
*/
export const ARTICLE_INDEX_FILE_NAME = 'index-json'
/**
* Short version of article
*/
export interface ShortArticle {
/**
* Human-readable name of the article
*/
slug: string
/**
* Short text of the article
*/
shortText: string
/**
* Custom data for preview
*/
previewData: unknown
}
/**
* Full article
*/
export interface Article {
/**
* Human-readable name of the article
*/
slug: string
/**
* Full json object of the article
*/
data: unknown
/**
* Custom data for preview
*/
preview: unknown
}
/**
* Response for `get-articles` action
*/
export interface ArticlesResponse {
/**
* Status of the response
*/
status: string
/**
* User address
*/
userAddress: string
/**
* Articles
*/
articles: ShortArticle[]
}
/**
* Response for `get-article` action
*/
export interface ArticleResponse {
/**
* Status of the response
*/
status: string
/**
* User address
*/
userAddress: string
/**
* Article
*/
article: Article
/**
* Reference to the article
*/
reference: string
/**
* Error message
*/
message?: string
}
/**
* Response for `get-path-info` action
*/
export interface PathInfoResponse {
/**
* Status of the response
*/
status: string
/**
* User address
*/
userAddress: string
/**
* Path
*/
path: string
/**
* Directory or file
*/
data: Directory | File
}
/**
* Convert directory to short article
*
* @param directory Directory
*/
export async function directoryToShortArticle(directory: Directory): Promise<ShortArticle> {
assertFiles(directory.files)
const file = directory.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME)
if (!file) {
throw new Error(`Article index file not found. In "${directory.name}"`)
}
const indexContent = bytesToString(await getContentByReference(file.hash))
assertJson(indexContent)
const indexObject = JSON.parse(indexContent) as Article
const shortText = extractArticleText(indexObject, SHORT_ARTICLE_LENGTH)
return {
slug: directory.name.toLowerCase(),
shortText,
previewData: indexObject.preview,
}
}
/**
* Check if directory is article directory
*
* @param directory Directory
*/
export function isArticleDirectory(directory: Directory): boolean {
assertFiles(directory.files)
return Boolean(directory.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME))
}
/**
* Convert directories to short articles
*
* @param directories Directories
*/
export async function directoriesToShortArticles(directories: Directory[]): Promise<ShortArticle[]> {
const articles: ShortArticle[] = []
const filteredDirectories = directories.filter(isArticleDirectory)
for (const directory of filteredDirectories) {
try {
articles.push(await directoryToShortArticle(directory))
} catch (e) {
/* empty */
}
}
return articles
}
/**
* Convert directory to article
*
* @param directory Directory
*/
export async function directoryToArticle(directory: Directory): Promise<Article> {
if (!isArticleDirectory(directory)) {
throw new Error(`Directory "${directory.name}" is not article directory`)
}
assertFiles(directory.files)
const file = directory.files.find(file => file.name === ARTICLE_INDEX_FILE_NAME)
if (!file) {
throw new Error(`Article index file not found. In "${directory.name}"`)
}
const indexContent = bytesToString(await getContentByReference(file.hash))
assertJson(indexContent)
const article = JSON.parse(indexContent) as Article
return {
slug: directory.name.toLowerCase(),
data: article.data,
preview: article.preview,
}
}
================================================
FILE: src/controllers/file-system/const.ts
================================================
/**
* Default directory where all files should be stored
*/
export const DEFAULT_DIRECTORY = 'articles'
/**
* Project name
*/
export const PROJECT_NAME = 'fairjournal'
/**
* Maximum size of the blob in bytes
*/
export const MAX_BLOB_SIZE = 1024 * 1024 * 10
================================================
FILE: src/controllers/file-system/index.ts
================================================
import express from 'express'
import userRouter from './user'
import blobRouter from './blob'
import updateRouter from './update'
import appRouter from './app'
const router = express.Router()
router.use('/user', userRouter)
router.use('/blob', blobRouter)
router.use('/update', updateRouter)
router.use('/app', appRouter)
export default router
================================================
FILE: src/controllers/file-system/types.ts
================================================
/**
* File status in database
*/
export enum FileStatus {
/**
* File is new, just uploaded
*/
New = 0,
/**
* File is used in some article
*/
Used = 1,
}
================================================
FILE: src/controllers/file-system/update/apply-action.ts
================================================
import { Request, Response, NextFunction } from 'express'
import { ActionType, AddFileActionData, UpdateDataSigned } from '@fairjournal/file-system'
import { fileSystem, tonstorage } from '../../../app'
import { assertObject, assertReference, getPathParts } from '../../../utils'
import { DEFAULT_DIRECTORY } from '../const'
import { assertUpdateDataSigned } from '@fairjournal/file-system'
import pool from '../../../db'
import { OkPacket } from 'mysql2'
import { isReferenceExists } from '../../../fs'
import { FileStatus } from '../types'
/**
* Request body
*/
export interface ApplyBody {
/**
* Update data
*/
update: UpdateDataSigned
}
/**
* Insert update to db for backup
*
* @param update Update data
*
* @returns ID of the inserted row
*/
async function insertUpdate(update: UpdateDataSigned): Promise<number> {
const query = `
INSERT INTO fs_update(public_key, update_id, \`update\`)
VALUES (?, ?, ?)
`
// Execute the query
const results = (
await pool.execute(query, [update.userAddress.toLowerCase(), update.id, JSON.stringify(update)])
)[0] as OkPacket
return results.insertId
}
/**
* Validate update for the gateway
*
* @param update Update data
*/
async function validateUpdate(update: UpdateDataSigned): Promise<string[]> {
const references: string[] = []
for (const action of update.actions) {
if (action.actionType === ActionType.addDirectory) {
// commented because user should add profile file. define it here or allow full control
// const data = action.actionData as AddDirectoryActionData
// if (!(data.path === `/${DEFAULT_DIRECTORY}` || data.path.startsWith(`/${DEFAULT_DIRECTORY}/`))) {
// throw new Error(`Invalid path: "${data.path}". All files should be inside "/articles" folder`)
// }
} else if (action.actionType === ActionType.addFile) {
const data = action.actionData as AddFileActionData
references.push(await validateAndGetAddFileReference(data))
} else if (action.actionType === ActionType.addUser) {
// skip it
} else if (action.actionType === ActionType.removeDirectory) {
// skip it
} else if (action.actionType === ActionType.removeFile) {
// skip it
} else {
throw new Error(`Unknown action type: "${action.actionType}"`)
}
}
return references
}
/**
* Updates file status in database
*
* @param reference Reference of the file
* @param status New status of the file
*/
async function updateFileStatus(reference: string, status: FileStatus): Promise<void> {
const connection = await pool.getConnection()
try {
await connection.query(`UPDATE file SET status = ?, updated_at = ? WHERE reference = ?`, [
status,
new Date(),
reference,
])
} catch (error) {
throw error
} finally {
connection.release()
}
}
/**
* Checks that update is correct and returns the references of the file
*
* @param data Update data
*/
async function validateAndGetAddFileReference(data: AddFileActionData): Promise<string> {
const reference = data.hash.toLowerCase()
assertReference(reference)
if (!(await isReferenceExists(reference))) {
throw new Error(`Reference "${reference}" not found`)
}
const parts = getPathParts(data.path)
if (!(data.path.startsWith(`/${DEFAULT_DIRECTORY}/`) || parts.length < 3 || parts[0] !== DEFAULT_DIRECTORY)) {
throw new Error(`Invalid path: "${data.path}". All files should be inside "/articles/NAMEOFARTICLE/" folder`)
}
return reference
}
/**
* Publish all files from the update
*
* @param update Update data
*/
async function publishAllFiles(update: UpdateDataSigned): Promise<string[]> {
const references: string[] = []
for (const action of update.actions) {
if (action.actionType === ActionType.addFile) {
const data = action.actionData as AddFileActionData
references.push(await validateAndGetAddFileReference(data))
}
}
for (const reference of references) {
await updateFileStatus(reference, FileStatus.Used)
await tonstorage.uploadResume(reference)
}
return references
}
/**
* Apply update action to the file system
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { update } = req.body as ApplyBody
assertObject(update)
assertUpdateDataSigned(update)
await validateUpdate(update)
fileSystem.addUpdate(update)
await insertUpdate(update)
await publishAllFiles(update)
res.json({
status: 'ok',
})
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/update/index.ts
================================================
import express from 'express'
import applyAction from './apply-action'
const router = express.Router()
router.post('/apply', applyAction)
export default router
================================================
FILE: src/controllers/file-system/user/get-update-id-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import { assertAddress } from '../../../utils'
import { fileSystem } from '../../../app'
/**
* Response of the get update id action
*/
export interface GetUpdateIdResponse {
/**
* Status of the request
*/
status: string
/**
* Address of the user
*/
address: string
/**
* Update id
*/
updateId: number
}
/**
* Gets current user's update id
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { address } = req.query
assertAddress(address)
const addressLowerCased = address.toLowerCase()
const updateId = fileSystem.getUpdateId(addressLowerCased)
const data: GetUpdateIdResponse = {
status: 'ok',
address: addressLowerCased,
updateId,
}
res.json(data)
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/user/index.ts
================================================
import express from 'express'
import infoAction from './info-action'
import getUpdateIdAction from './get-update-id-action'
const router = express.Router()
router.get('/info', infoAction)
router.get('/get-update-id', getUpdateIdAction)
export default router
================================================
FILE: src/controllers/file-system/user/info-action.ts
================================================
import { NextFunction, Request, Response } from 'express'
import { assertAddress } from '../../../utils'
import { fileSystem } from '../../../app'
/**
* Check if user exists in the file system
*/
export default async (req: Request, res: Response, next: NextFunction): Promise<void> => {
try {
const { address } = req.query
assertAddress(address)
const isUserExists = fileSystem.isUserExists(address.toLowerCase())
res.json({
status: 'ok',
address: address.toLowerCase(),
isUserExists,
})
} catch (e) {
next(e)
}
}
================================================
FILE: src/controllers/file-system/utils.ts
================================================
import { fileSystem, tonstorage } from '../../app'
import { assertString, base64ToHex, extractHash } from '../../utils'
import tmp from 'tmp'
import fs from 'fs'
import { ReferencedItem } from '@fairjournal/file-system/dist/src/file-system/interfaces/referenced-item'
import { File, Directory } from '@fairjournal/file-system'
import path from 'path'
import { getReferencePath } from '../../fs'
import { Pool } from 'mysql2/promise'
import { RowDataPacket } from 'mysql2'
/**
* Settings key that available in the DB
*/
export enum SettingsKey {
/**
* File system state reference
*/
FS_STATE_REFERENCE = 'fs_state_reference',
}
/**
* Asserts that user exists in the file system
*
* @param data The data to assert
*/
export function assertUserExists(data: unknown): asserts data is string {
const address = data as string
if (!fileSystem.isUserExists(address)) {
throw new Error(`User not found: "${address}"`)
}
}
/**
* Asserts that the data is a string path
*
* @param data The data to assert
*/
export function assertPath(data: unknown): asserts data is string {
assertString(data)
if (!data) {
throw new Error('Path is required')
}
}
/**
* Get path info
*
* @param address User address
* @param path Path
*/
export function getPathInfo(address: string, path: string): File | Directory {
try {
return fileSystem.getPathInfo(`/${address}${path}`)
} catch (e) {
throw new Error(`Can't get info about the path: ${(e as Error).message}`)
}
}
/**
* Upload data to storage
*
* @param path Path to the file
* @param message Message to show in case of error
* @param isUpload Should the file be uploaded to the storage
*/
export async function uploadToStorage(path: string, message: string, isUpload: boolean): Promise<string> {
const response = await tonstorage.create(path, {
// copy file to storage. Files should be removed later if they are not used
copy: true,
// description of the file
desc: '',
// do not upload file while article is not published
upload: isUpload,
})
let reference = ''
if (response?.ok) {
reference = base64ToHex(response.result.torrent.hash).toLowerCase()
} else {
if (response?.error?.includes('duplicate hash')) {
reference = extractHash(response?.error).toLowerCase()
} else {
throw new Error(`Error on Ton Storage adding (${message}): ${response?.error || 'unknown error'}`)
}
}
return reference
}
/**
* Method for uploading data to a storage
*
* @param data Data to be uploaded
*/
export async function uploadData(data: string): Promise<ReferencedItem> {
const tempDir = tmp.dirSync()
const tempFilePath = path.join(tempDir.name, 'blob')
fs.writeFileSync(tempFilePath, data)
const reference = await uploadToStorage(tempFilePath, tempFilePath, true)
fs.rmSync(tempFilePath)
tempDir.removeCallback()
return {
reference,
}
}
/**
* Downloads data from storage directory by reference
*
* @param reference Reference to the file
*/
export async function downloadData(reference: string): Promise<string> {
const path = getReferencePath(reference)
return fs.readFileSync(path, 'utf-8')
}
/**
* Upserts settings
*
* @param pool Database pool
* @param key Key
* @param value Value
*/
export async function upsertSettings(pool: Pool, key: string, value: string): Promise<void> {
const query = `
INSERT INTO settings (\`key\`, value, created_at, updated_at)
VALUES (?, ?, NOW(), NOW())
ON DUPLICATE KEY UPDATE
value = VALUES(value),
updated_at = NOW();
`
await pool.execute(query, [key, value])
}
/**
* Gets setting by key
*
* @param pool Database pool
* @param key Key
*/
export async function getSetting(pool: Pool, key: string): Promise<string> {
const query = 'SELECT value FROM settings WHERE `key` = ?'
const [rows] = await pool.execute(query, [key])
const rowData = rows as RowDataPacket[]
if (rowData.length === 0) {
throw new Error(`No setting found for key: ${key}`)
}
return rowData[0].value as string
}
================================================
FILE: src/db.ts
================================================
import mysql from 'mysql2/promise'
import dotenv from 'dotenv'
dotenv.config()
const simpleConfig = {
host: process.env.DB_HOST,
port: Number(process.env.DB_PORT),
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
connectionLimit: 10,
}
const socketConfig = {
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
connectionLimit: 10,
socketPath: process.env.DB_SOCKET_PATH,
}
const config = process.env.DB_SOCKET_PATH ? socketConfig : simpleConfig
export const pool = mysql.createPool(config)
export default pool
================================================
FILE: src/fs.ts
================================================
import { assertUpdateDataSignedArray, FileSystem } from '@fairjournal/file-system'
import { PROJECT_NAME } from './controllers/file-system/const'
import { Pool, RowDataPacket } from 'mysql2/promise'
import pool from './db'
import { assertString, isString } from './utils'
import * as fs from 'fs'
import path from 'path'
/**
* Function for processing batches of data
*/
type AsyncFunction = (records: any[]) => Promise<void>
/**
* Gets batches of data from DB and processes them
*
* @param pool DB pool
* @param asyncFn Function to process the data
*/
async function processInBatches(pool: Pool, asyncFn: AsyncFunction): Promise<void> {
const limit = 1000
// Get the total count of records
const [countResult] = await pool.execute('SELECT COUNT(*) AS count FROM fs_update')
const totalRecords = Number(((countResult as RowDataPacket[])[0] as any).count)
// Calculate the total number of iterations needed (each iteration fetches 'limit' records)
const iterations = Math.ceil(totalRecords / limit)
for (let i = 0; i < iterations; i++) {
const offset = i * limit
const [rows] = (await pool.execute(
`
SELECT * FROM fs_update
LIMIT ?
OFFSET ?
`,
[limit.toString(), offset.toString()],
)) as [RowDataPacket[], any]
await asyncFn(rows)
}
}
/**
* Initialize file system using DB
*/
export function initFs(): FileSystem {
return new FileSystem({
version: '0.0.1',
projectName: PROJECT_NAME,
projectDescription: 'A creative platform owned by people.',
checkSignature: 'ton',
})
}
/**
* Sync file system with DB
*
* @param fs File system
*/
export async function syncFs(fs: FileSystem): Promise<void> {
if (!fs) {
throw new Error('File system is not initialized')
}
await processInBatches(pool, async data => {
const updates = data.map(item => JSON.parse(item.update))
assertUpdateDataSignedArray(updates)
updates.forEach(update => fs.addUpdate(update))
})
}
/**
* Gets reference path on the local file system
*
* @param reference Reference
*/
export function getReferencePath(reference: string): string {
const storagePath = process.env.TON_STORAGE_DATABASE_PATH
assertString(storagePath)
if (!storagePath) {
throw new Error('Storage path is not defined')
}
return path.resolve(storagePath, 'torrent/torrent-files', reference.toUpperCase(), 'blob')
}
/**
* Checks if reference exists
*
* @param reference
*/
export async function isReferenceExists(reference: string): Promise<boolean> {
return fs.existsSync(getReferencePath(reference))
}
/**
* Gets content by reference
*
* @param reference Reference
*/
export async function getContentByReference(reference: string): Promise<Uint8Array> {
const filePath = getReferencePath(reference)
if (!fs.existsSync(filePath)) {
throw new Error(`File does not exist`)
}
return fs.readFileSync(filePath)
}
/**
* Extracts article text from JSON object
*
* @param jsonObject JSON object
* @param symbols Number of symbols to extract
*/
export function extractArticleText(jsonObject: unknown, symbols: number): string {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const blocks = jsonObject.data.blocks as { type: string; data: { text: string } }[]
const paragraphs = blocks
.filter(block => block?.type === 'paragraph')
.filter(block => isString(block?.data?.text))
.map(block => {
const text = block.data.text
return text.replace(/<\/?[^>]+(>|$)/g, '')
})
.join(' ')
return paragraphs.slice(0, symbols)
}
================================================
FILE: src/index.ts
================================================
import app, { clearFileSystem, syncFileSystem } from './app'
// Start server
const PORT = process.env.PORT || 5000
async function start(): Promise<void> {
clearFileSystem()
await syncFileSystem()
// eslint-disable-next-line no-console
app.listen(PORT, () => console.log(`Server started on port ${PORT}`))
}
start().then()
================================================
FILE: src/models/Article.ts
================================================
interface Article {
id: number
hash: string
content: string
authorId: number
}
export default Article
================================================
FILE: src/models/User.ts
================================================
interface User {
id: number
wallet: number
avatar: string
name: string
description: string
articles: number[]
}
export default User
================================================
FILE: src/routes.ts
================================================
import { Router } from 'express'
import {
deleteUser,
getUserById,
updateUser,
getArticlesByUserId,
authorizeByWallet,
} from './controllers/UserController'
import {
createArticle,
deleteArticle,
getAllArticles,
getArticleById,
updateArticle,
} from './controllers/ArticleController'
import Image from './controllers/ImageController'
import multer from 'multer'
import path from 'path'
// Configure storage
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'avatars/')
},
filename: (req, file, cb) => {
// Use the originalname property to get the original extension
cb(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname))
},
})
const upload = multer({ storage })
const router = Router()
// User Routes
router.get('/users/:id', getUserById)
router.get('/users/:id/articles', getArticlesByUserId)
router.post('/users/:id', upload.single('avatar'), updateUser)
router.delete('/users/:id', deleteUser)
// Article Routes
router.get('/articles', getAllArticles)
router.get('/articles/:id', getArticleById)
router.post('/articles', createArticle)
router.put('/articles/:id', updateArticle)
router.delete('/articles/:id', deleteArticle)
// Images Routes
router.post('/image/upload', upload.single('image'), Image.upload)
// Auth route
router.post('/auth', authorizeByWallet)
export default router
================================================
FILE: src/ton-utils.ts
================================================
/**
* Daemon response
*/
export interface DaemonResponse {
ok: boolean
result: TorrentFull
code: number
}
/**
* Torrent full info
*/
export interface TorrentFull {
'@type': string
torrent: Torrent
files: FileInfo[]
}
/**
* Torrent info
*/
export interface Torrent {
'@type': string
hash: string
flags: number
total_size: string
description: string
files_count: string
included_size: string
dir_name: string
downloaded_size: string
added_at: number
root_dir: string
active_download: boolean
active_upload: boolean
completed: boolean
download_speed: number
upload_speed: number
fatal_error: string
}
/**
* File info
*/
export interface FileInfo {
'@type': string
name: string
size: string
priority: number
downloaded_size: string
}
/**
* Prefix for error messages
*/
export const errorPrefix = 'Daemon response does not contain'
/**
* Asserts that the value is defined
*
* @param property Property to check
* @param name Name of the property
*/
export function assertIsDefined<T>(property: T | undefined | null, name: string): asserts property is NonNullable<T> {
if (property === undefined || property === null) {
throw new Error(`${errorPrefix} ${name}`)
}
}
/**
* Asserts that the value is a number
*
* @param value Value to check
* @param name Name of the value
*/
export function assertIsNumber(value: unknown, name: string): asserts value is number {
if (typeof value !== 'number') {
throw new Error(`${errorPrefix} ${name} of type number`)
}
}
/**
* Asserts that the value is a boolean
*
* @param value Value to check
* @param name Name of the value
*/
export function assertIsBoolean(value: unknown, name: string): asserts value is boolean {
if (typeof value !== 'boolean') {
throw new Error(`${errorPrefix} ${name} of type boolean`)
}
}
/**
* Asserts that the data is a valid FileInfo
*
* @param fileInfo Data to check
*/
export function assertFileInfo(fileInfo: FileInfo): asserts fileInfo is FileInfo {
assertIsDefined(fileInfo['@type'], 'file @type')
assertIsDefined(fileInfo.name, 'file name')
assertIsDefined(fileInfo.size, 'file size')
assertIsDefined(fileInfo.downloaded_size, 'file downloaded_size')
assertIsDefined(fileInfo.priority, 'file priority')
}
/**
* Asserts that the data is a valid Torrent
*
* @param torrent Data to check
*/
export function assertTorrent(torrent: Torrent): asserts torrent is Torrent {
assertIsDefined(torrent['@type'], 'torrent @type')
assertIsDefined(torrent.hash, 'torrent hash')
assertIsNumber(torrent.flags, 'torrent flags')
assertIsDefined(torrent.total_size, 'torrent total_size')
assertIsDefined(torrent.files_count, 'torrent files_count')
assertIsDefined(torrent.included_size, 'torrent included_size')
assertIsDefined(torrent.downloaded_size, 'torrent downloaded_size')
assertIsDefined(torrent.added_at, 'torrent added_at')
assertIsDefined(torrent.root_dir, 'torrent root_dir')
assertIsBoolean(torrent.active_download, 'torrent active_download')
assertIsBoolean(torrent.active_upload, 'torrent active_upload')
assertIsBoolean(torrent.completed, 'torrent completed')
assertIsNumber(torrent.download_speed, 'torrent download_speed')
assertIsNumber(torrent.upload_speed, 'torrent upload_speed')
assertIsDefined(torrent.fatal_error, 'torrent fatal_error')
}
/**
* Asserts that the data is a valid DaemonResponse
*
* @param data Data to check
*/
export function assertDaemonResponse(data: DaemonResponse): asserts data is DaemonResponse {
assertIsDefined(data.ok, 'ok')
assertIsDefined(data.result, 'result')
assertIsNumber(data.code, 'code')
assertIsDefined(data.result.torrent, 'result.torrent')
assertIsDefined(data.result.files, 'result.files')
data.result.files.forEach(file => {
assertFileInfo(file)
})
assertTorrent(data.result.torrent)
}
================================================
FILE: src/utils.ts
================================================
import * as crypto from 'crypto'
import * as fs from 'fs'
import { promisify } from 'util'
import path from 'path'
const readFile = promisify(fs.read)
/**
* Length of a public key
*/
export const PUBLIC_KEY_LENGTH = 64
/**
* Reference of a file
*/
export const REFERENCE_LENGTH = 64
/**
* Max length of an article name
*/
export const MAX_ARTICLE_NAME_LENGTH = 64
/**
* Checks if the value is a string
*
* @param value Value to check
*/
export function isString(value: unknown): boolean {
return typeof value === 'string'
}
/**
* Asserts that the data is a string
*
* @param data Data to check
*/
export function assertString(data: unknown): asserts data is string {
if (!isString(data)) {
throw new Error('Data is not a string')
}
}
/**
* Asserts that the data length is equal to the specified length
*
* @param data Data to check
* @param length Length to check
*/
export function assertStringLength(data: unknown, length: number): asserts data is string {
assertString(data)
if (data.length !== length) {
throw new Error(`Data length is not equal to ${length}`)
}
}
/**
* Asserts that the data is a public key
*
* @param data Data to check
*/
export function assertAddress(data: unknown): asserts data is string {
assertStringLength(data, PUBLIC_KEY_LENGTH)
assertHex(data)
}
/**
* Checks if the value is a hex string
*
* @param value Value to check
*/
export function isHexString(value: string): boolean {
const hexRegEx = /^[0-9A-Fa-f]*$/
return hexRegEx.test(value)
}
/**
* Asserts that the data is a hex string
*
* @param data Data to check
*/
export function assertHex(data: unknown): asserts data is string {
assertString(data)
if (!isHexString(data)) {
throw new Error('Data is not a hex string')
}
}
/**
* Asserts that the data is a correct reference
*
* @param data Data to check
*/
export function assertReference(data: unknown): asserts data is string {
assertStringLength(data, REFERENCE_LENGTH)
assertHex(data)
}
/**
* Gets path parts
*
* @param path Path to get parts from
*/
export function getPathParts(path: string): string[] {
return path.split('/').filter(Boolean)
}
/**
* Asserts that the data is a correct article name
*
* @param data Data to check
*/
export function assertArticleName(data: unknown): asserts data is string {
assertString(data)
const regex = /^[a-z0-9-]+$/i
if (data.length === 0 || data.length > MAX_ARTICLE_NAME_LENGTH || !regex.test(data)) {
throw new Error('Article name is not valid')
}
}
/**
* Checks if the data is an object
*
* @param data Data to check
*/
export function isObject(data: unknown): data is Record<string, unknown> {
return typeof data === 'object' && !Array.isArray(data) && data !== null
}
/**
* Asserts that the data is an object
*
* @param data Data to check
* @param customError Custom error message
*/
export function assertObject(data: unknown, customError?: string): asserts data is Record<string, unknown> {
if (!isObject(data)) {
throw new Error(customError ? customError : 'Data is not an object')
}
}
/**
* Bytes to string
*
* @param data Bytes to convert
*/
export function bytesToString(data: Uint8Array): string {
const decoder = new TextDecoder()
return decoder.decode(data)
}
/**
* String to bytes
*
* @param data String to convert
*/
export function stringToBytes(data: string): Uint8Array {
const encoder = new TextEncoder()
return encoder.encode(data)
}
/**
* Asserts that the data is a JSON string
*
* @param data Data to check
*/
export function assertJson(data: unknown): asserts data is string {
if (typeof data !== 'string') {
throw new Error('JSON assert: data is not a string')
}
try {
JSON.parse(data)
} catch (e) {
throw new Error(`JSON assert: data is not a valid JSON: ${(e as Error).message}`)
}
}
/**
* Calculates SHA256 of a file
*
* @param filePath Path to the file
*/
export async function calculateSHA256(filePath: string): Promise<string> {
const hash = crypto.createHash('sha256')
const fd = fs.openSync(filePath, 'r')
const bufferSize = 8192 // 8KB at a time
const buffer = Buffer.alloc(bufferSize)
let bytesRead: number
do {
;({ bytesRead } = await readFile(fd, buffer, 0, bufferSize, null))
hash.update(buffer.slice(0, bytesRead))
} while (bytesRead === bufferSize)
fs.closeSync(fd)
return hash.digest('hex').toLowerCase()
}
/**
* Converts relative path to absolute
*
* @param paths Paths to convert
*/
export function toAbsolutePath(...paths: string[]): string {
return path.resolve(...paths)
}
/**
* Delays the execution
*
* @param ms Delay in milliseconds
*/
export async function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms))
}
/**
* Extracts a hash from a message
*
* @param message Message to extract hash from
*/
export function extractHash(message: string): string {
const hashRegex = /[A-Fa-f0-9]{64}/
const match = message.match(hashRegex)
if (match) {
return match[0]
} else {
throw new Error('No hash found in the message.')
}
}
/**
* Converts base64 string to uppercase hex string
*/
export function base64ToHex(base64: string): string {
return Buffer.from(base64, 'base64').toString('hex').toUpperCase()
}
/**
* Converts hex string to base64 string
*/
export function hexToBase64(hex: string): string {
return Buffer.from(hex, 'hex').toString('base64')
}
================================================
FILE: startup.sh
================================================
#!/bin/sh
# Create .env file
cat > .env << EOF
# Path to the root of the files
FILES_ROOT_PATH=/app
# Port of the application
PORT=5000
DB_SOCKET_PATH=/run/mysqld/mysqld2.sock
# Database host
DB_HOST=localhost
# Database port
DB_PORT=3306
# Database username
DB_USER=fjuser
# Database password
DB_PASSWORD=fjpassword
# Database name
DB_NAME=fair_journal
# External web url for old files
URL=http://localhost:5000/
# Is show server logs
SHOW_LOGS=true
# Ton Storage CLI binary path
# This path should reflect the path inside the Docker container
TON_STORAGE_BIN_PATH=/app/ton/storage-daemon-cli-linux-arm64
# Ton Storage host
TON_STORAGE_HOST=localhost:5555
# Ton Storage database path
# This path should reflect the path inside the Docker container
TON_STORAGE_DATABASE_PATH=/app/ton/storage-db
# Ton Storage timeout
TON_STORAGE_TIMEOUT=5000
# Ton Storage wait attempts
TON_STORAGE_WAIT_ATTEMPTS=3
# Ton Storage check wait timeout
TON_STORAGE_CHECK_WAIT_TIMEOUT=1000
PUBLISH_FS_PASSWORD=345134t134g145gh145h54
EOF
/app/ton/storage-daemon-linux-arm64 -v 5 -C /app/ton/global.config.json -I localhost:3333 -p 5555 -D /app/ton/storage-db >/dev/null 2>&1 &
/usr/bin/mysqld --user=mysql --socket=/run/mysqld/mysqld2.sock &
sleep 5 &&
mysql --socket=/run/mysqld/mysqld2.sock -uroot -e "source ./migrations/db.sql" &&
mysql --socket=/run/mysqld/mysqld2.sock -uroot -e "CREATE USER 'fjuser'@'localhost' IDENTIFIED BY 'fjpassword';" &&
mysql --socket=/run/mysqld/mysqld2.sock -uroot -e "GRANT ALL ON fair_journal.* TO 'fjuser'@'localhost';" &&
npx knex migrate:latest --env docker &&
npm run test
================================================
FILE: test/controllers/file-system/app.test.ts
================================================
import knex from 'knex'
import knexConfig from '../../../knexfile'
import { TonstorageCLI } from 'tonstorage-cli'
import app, { clearFileSystem, createTonStorageInstance, syncFileSystem } from '../../../src/app'
import { assertTree, createWallet, removeAllTonStorageFiles, tonStorageFilesList, uploadBytes } from '../../utils'
import pool from '../../../src/db'
import {
assertDirectories,
assertFiles,
createAddDirectoryAction,
createAddFileAction,
createAddUserAction,
personalSign,
Update,
} from '@fairjournal/file-system'
import { PROJECT_NAME } from '../../../src/controllers/file-system/const'
import supertest from 'supertest'
import path from 'path'
import { stringToBytes } from '../../../src/utils'
import { downloadData, getSetting, SettingsKey, upsertSettings } from '../../../src/controllers/file-system/utils'
import { initFs } from '../../../src/fs'
import fs from 'fs'
process.env.SHOW_LOGS = 'false'
const db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)
describe('App', () => {
let tonStorage: TonstorageCLI
beforeEach(async () => {
// Rollback the migration (if any)
await db.migrate.rollback()
// Run the migration
await db.migrate.latest()
clearFileSystem()
tonStorage = createTonStorageInstance()
await removeAllTonStorageFiles(tonStorage)
expect(await tonStorageFilesList(tonStorage)).toHaveLength(0)
})
afterEach(async () => {
// After each test, we can rollback the migration
await db.migrate.rollback()
})
afterAll(async () => {
// Close the database connection after all tests are done
await db.destroy()
await pool.end()
await removeAllTonStorageFiles(tonStorage)
})
it('should publish fs', async () => {
await syncFileSystem()
const supertestApp = supertest(app)
const files = [
{
originalName: 'file1.txt',
destinationName: 'file1-txt',
},
{
originalName: 'file2.txt',
destinationName: 'file2-txt',
},
{
originalName: 'img1.jpg',
destinationName: 'img1-jpg',
},
]
const seeds = [
'4f3ab03c9b34be0a399e8b165350c705f1c74e1f980be66c7aba92fbe4d07fb8',
'235b19b79390d5a821b49fef63e63691c377d645f1d20862b42f6e13f37a1b5e',
'9036f25e16e153c6af6031a98e5087c627d86b4da9acbe63b5cfad096a218739',
]
const users = await Promise.all(
Array.from({ length: 3 }, async (_, index) => {
const wallet = await createWallet(seeds[index])
return {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
}),
)
for (const user of users) {
const update = new Update(PROJECT_NAME, user.address, 1)
update.addAction(createAddUserAction(user.address))
for (let i = 0; i < 3; i++) {
const dir = `dir${i}`
update.addAction(createAddDirectoryAction(`/${dir}`))
for (const file of files) {
const filePath = path.join(__dirname, `../../data/${file.originalName}`)
const content = fs.readFileSync(filePath)
const hash = await uploadBytes(tonStorage, stringToBytes(content.toString()))
update.addAction(
createAddFileAction({
path: `/${dir}/${file.destinationName}`,
mimeType: 'text/plain',
size: content.length,
hash,
}),
)
}
}
update.setSignature(user.personalSign(update.getSignData()))
const response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
}
const response0 = await supertestApp.post('/v1/fs/app/publish').send({ password: 'any-password' })
expect(response0.status).toBe(500)
expect(response0.body).toStrictEqual({ status: 'error', message: 'Invalid password' })
const resultReference = '0371cb0e4f839c0e06fccbc5001b593fa9b25c3c23fac2cd7c4979d2efc64f7a'
await expect(getSetting(pool, SettingsKey.FS_STATE_REFERENCE)).rejects.toThrow(
`No setting found for key: ${SettingsKey.FS_STATE_REFERENCE}`,
)
const response1 = await supertestApp.post('/v1/fs/app/publish').send({ password: process.env.PUBLISH_FS_PASSWORD })
expect(response1.status).toBe(200)
expect(response1.body).toStrictEqual({ status: 'ok', reference: resultReference })
expect(await getSetting(pool, SettingsKey.FS_STATE_REFERENCE)).toBe(resultReference)
const mfs = initFs()
await mfs.download(resultReference, {
downloadData: async item => downloadData(item.reference),
withUpdates: true,
})
const exported = mfs.exportMeta()
expect(exported.users).toHaveLength(3)
assertTree(exported.tree)
const rootDirectories = exported.tree.directory.directories
assertDirectories(rootDirectories)
expect(rootDirectories).toHaveLength(3)
for (const rootDirectory of rootDirectories) {
const subDirectories = rootDirectory.directories
expect(subDirectories).toHaveLength(3)
assertDirectories(subDirectories)
for (const subDirectory of subDirectories) {
const filesInSubDirectory = subDirectory.files
expect(filesInSubDirectory).toHaveLength(3)
assertFiles(filesInSubDirectory)
}
}
})
it('set and get settings', async () => {
const supertestApp = supertest(app)
let data0 = await supertestApp.get(`/v1/fs/app/get-settings`)
expect(data0.status).toBe(500)
expect(data0.body).toStrictEqual({
status: 'error',
message: `"key" is not set`,
})
data0 = await supertestApp.get(`/v1/fs/app/get-settings?key=${SettingsKey.FS_STATE_REFERENCE}`)
expect(data0.status).toBe(500)
expect(data0.body).toStrictEqual({
status: 'error',
message: `No setting found for key: ${SettingsKey.FS_STATE_REFERENCE}`,
})
const checkValue = 'Hello-world'
await upsertSettings(pool, SettingsKey.FS_STATE_REFERENCE, checkValue)
data0 = await supertestApp.get(`/v1/fs/app/get-settings?key=${SettingsKey.FS_STATE_REFERENCE}`)
expect(data0.status).toBe(200)
expect(data0.body).toStrictEqual({
status: 'ok',
value: checkValue,
})
})
})
================================================
FILE: test/controllers/file-system/article.test.ts
================================================
// todo change managing of the file system to configure and call when needed
process.env.SHOW_LOGS = 'false'
import { Article, ArticleResponse, ArticlesResponse } from '../../../src/controllers/file-system/blob/utils'
import knex from 'knex'
import knexConfig from '../../../knexfile'
import pool from '../../../src/db'
import supertest from 'supertest'
import app, { clearFileSystem, createTonStorageInstance, syncFileSystem } from '../../../src/app'
import {
createAddDirectoryAction,
createAddFileAction,
createAddUserAction,
createRemoveDirectoryAction,
personalSign,
Update,
} from '@fairjournal/file-system'
import {
createWallet,
generateArticle,
getUpdatesCount,
removeAllTonStorageFiles,
tonStorageFilesList,
uploadBytes,
} from '../../utils'
import { PROJECT_NAME } from '../../../src/controllers/file-system/const'
import { stringToBytes } from '../../../src/utils'
import { GetUpdateIdResponse } from '../../../src/controllers/file-system/user/get-update-id-action'
import { TonstorageCLI } from 'tonstorage-cli'
const db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)
describe('Article', () => {
let tonStorage: TonstorageCLI
beforeEach(async () => {
// Rollback the migration (if any)
await db.migrate.rollback()
// Run the migration
await db.migrate.latest()
clearFileSystem()
tonStorage = createTonStorageInstance()
await removeAllTonStorageFiles(tonStorage)
expect(await tonStorageFilesList(tonStorage)).toHaveLength(0)
})
afterEach(async () => {
// After each test, we can rollback the migration
await db.migrate.rollback()
})
afterAll(async () => {
// Close the database connection after all tests are done
await db.destroy()
await pool.end()
await removeAllTonStorageFiles(tonStorage)
})
it('create and get articles', async () => {
await syncFileSystem()
const supertestApp = supertest(app)
const authors = await Promise.all(
Array.from({ length: 3 }, async () => {
const wallet = await createWallet()
return {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
articles: [generateArticle(), generateArticle(), generateArticle()] as Article[],
}
}),
)
for (const author of authors) {
const update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.addAction(createAddDirectoryAction('/articles'))
update.setSignature(author.personalSign(update.getSignData()))
const response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
for (let articleIndex = 0; articleIndex < author.articles.length; articleIndex++) {
const article = author.articles[articleIndex]
const articleData = JSON.stringify(article)
const hash = await uploadBytes(tonStorage, stringToBytes(articleData))
const updatesInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))
.body as GetUpdateIdResponse
const update = new Update(PROJECT_NAME, author.address, updatesInfo.updateId + 1)
update.addAction(createAddDirectoryAction(`/articles/${article.slug}`))
update.addAction(
createAddFileAction({
path: `/articles/${article.slug}/index-json`,
mimeType: 'application/json',
size: articleData.length,
hash,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
const response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
}
}
// 3*1 - registrations = 3, 3*3 - articles = 9, total 12
expect(await getUpdatesCount(db)).toEqual(12)
for (const author of authors) {
const articlesList = (await supertestApp.get(`/v1/fs/blob/get-articles?userAddress=${author.address}`))
.body as ArticlesResponse
expect(articlesList.status).toBe('ok')
expect(articlesList.userAddress).toBe(author.address)
expect(articlesList.articles.length).toBe(author.articles.length)
for (let articleIndex = 0; articleIndex < author.articles.length; articleIndex++) {
const article = author.articles[articleIndex]
// check short version of the article
const articleInfo = articlesList.articles[articleIndex]
expect(articleInfo.slug).toBe(article.slug)
expect(articleInfo.shortText).toBeDefined()
expect(articleInfo.previewData).toBeDefined()
// check full version of the article
const fsArticle = (
await supertestApp.get(`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${article.slug}`)
).body as ArticleResponse
expect(fsArticle.status).toBe('ok')
expect(fsArticle.userAddress).toBe(author.address)
expect(fsArticle.article.slug).toStrictEqual(article.slug)
expect(fsArticle.article.data).toBeDefined()
expect(fsArticle.article.preview).toBeDefined()
}
}
})
it('get non-existing article for an existing user', async () => {
const supertestApp = supertest(app)
const wallet = await createWallet()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
const update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.setSignature(author.personalSign(update.getSignData()))
await supertestApp.post('/v1/fs/update/apply').send({ update })
const nonExistentSlug = 'non-existent-article'
const response = await supertestApp.get(
`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${nonExistentSlug}`,
)
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
message: `Article not found: "${nonExistentSlug}". Get item: item not found: "articles"`,
status: 'error',
})
})
it('get article from non-existing user', async () => {
const supertestApp = supertest(app)
const nonExistentUserAddress = '0'.repeat(64)
const response = await supertestApp.get(
`/v1/fs/blob/get-article?userAddress=${nonExistentUserAddress}&slug=some-article`,
)
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
message: `User not found: "${nonExistentUserAddress}"`,
status: 'error',
})
})
it('get articles from non-existing user', async () => {
const supertestApp = supertest(app)
const nonExistentUserAddress = '0'.repeat(64)
const response = await supertestApp.get(`/v1/fs/blob/get-articles?userAddress=${nonExistentUserAddress}`)
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
message: `User not found: "${nonExistentUserAddress}"`,
status: 'error',
})
})
it('get non-existing articles for an existing user', async () => {
const supertestApp = supertest(app)
const wallet = await createWallet()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
const update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.setSignature(author.personalSign(update.getSignData()))
await supertestApp.post('/v1/fs/update/apply').send({ update })
const response = await supertestApp.get(`/v1/fs/blob/get-articles?userAddress=${author.address}`)
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
message: `Articles not found. Get item: item not found: "articles"`,
status: 'error',
})
})
it('add incorrect article with correct index-json for an existing user', async () => {
await syncFileSystem()
const supertestApp = supertest(app)
const wallet = await createWallet()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
// Add user first
let update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.addAction(createAddDirectoryAction('/articles'))
update.setSignature(author.personalSign(update.getSignData()))
await supertestApp.post('/v1/fs/update/apply').send({ update })
const articleData = 'This is some random short text instead of an actual article.'
const hash = await uploadBytes(tonStorage, stringToBytes(articleData))
const articleSlug = 'random-article'
const updatesInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))
.body as GetUpdateIdResponse
update = new Update(PROJECT_NAME, author.address, updatesInfo.updateId + 1)
update.addAction(createAddDirectoryAction(`/articles/${articleSlug}`))
update.addAction(
createAddFileAction({
path: `/articles/${articleSlug}/index-json`,
mimeType: 'application/json',
size: articleData.length,
hash,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
const response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
const fsArticle = (
await supertestApp.get(`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${articleSlug}`)
).body as ArticleResponse
expect(fsArticle.status).toBe('error')
// not strict comparison because of different error messages on different platforms (macos/linux arm64)
expect(fsArticle.message).toContain(
`Article not found: "${articleSlug}". Error: JSON assert: data is not a valid JSON`,
)
})
it('should add and remove an article, checking its availability by slug', async () => {
await syncFileSystem()
const supertestApp = supertest(app)
// create a new user and a new article
const wallet = await createWallet()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
article: generateArticle() as Article,
}
// register the user and create the article directory
let update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.addAction(createAddDirectoryAction('/articles'))
update.setSignature(author.personalSign(update.getSignData()))
let response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
// add the new article
const articleData = JSON.stringify(author.article)
const hash = await uploadBytes(tonStorage, stringToBytes(articleData))
const updatesInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))
.body as GetUpdateIdResponse
update = new Update(PROJECT_NAME, author.address, updatesInfo.updateId + 1)
update.addAction(createAddDirectoryAction(`/articles/${author.article.slug}`))
update.addAction(
createAddFileAction({
path: `/articles/${author.article.slug}/index-json`,
mimeType: 'application/json',
size: articleData.length,
hash,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
// check the article is available by slug
const fsArticle = (
await supertestApp.get(`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${author.article.slug}`)
).body as ArticleResponse
expect(fsArticle.status).toBe('ok')
expect(fsArticle.userAddress).toBe(author.address)
expect(fsArticle.article.slug).toBe(author.article.slug)
expect(fsArticle.article.data).toBeDefined()
// remove the article by deleting its slug folder
const deleteInfo = (await supertestApp.get(`/v1/fs/user/get-update-id?address=${author.address}`))
.body as GetUpdateIdResponse
update = new Update(PROJECT_NAME, author.address, deleteInfo.updateId + 1)
update.addAction(createRemoveDirectoryAction(`/articles/${author.article.slug}`))
update.setSignature(author.personalSign(update.getSignData()))
response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
// check the article is no longer available by slug
const removedArticleResponse = await supertestApp.get(
`/v1/fs/blob/get-article?userAddress=${author.address}&slug=${author.article.slug}`,
)
expect(removedArticleResponse.status).toBe(500)
expect(removedArticleResponse.body.status).toBe('error')
})
})
================================================
FILE: test/controllers/file-system/blob.test.ts
================================================
// todo change managing of the file system to configure and call when needed
process.env.SHOW_LOGS = 'false'
import tmp from 'tmp'
import path from 'path'
import knex from 'knex'
import knexConfig from '../../../knexfile'
import pool from '../../../src/db'
import supertest from 'supertest'
import app, { clearFileSystem, createTonStorageInstance, syncFileSystem } from '../../../src/app'
import {
createAddFileAction,
createAddUserAction,
createRemoveFileAction,
personalSign,
Update,
} from '@fairjournal/file-system'
import { createWallet, removeAllTonStorageFiles, tonStorageFilesList } from '../../utils'
import { MAX_BLOB_SIZE, PROJECT_NAME } from '../../../src/controllers/file-system/const'
import { TonstorageCLI } from 'tonstorage-cli'
import fs from 'fs'
const db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)
describe('blob', () => {
let tonStorage: TonstorageCLI
beforeEach(async () => {
// Rollback the migration (if any)
await db.migrate.rollback()
// Run the migration
await db.migrate.latest()
clearFileSystem()
tonStorage = createTonStorageInstance()
await removeAllTonStorageFiles(tonStorage)
expect(await tonStorageFilesList(tonStorage)).toHaveLength(0)
})
afterEach(async () => {
// After each test, we can rollback the migration
await db.migrate.rollback()
})
afterAll(async () => {
// Close the database connection after all tests are done
await db.destroy()
await pool.end()
await removeAllTonStorageFiles(tonStorage)
})
it('upload and download blob', async () => {
const supertestApp = supertest(app)
await syncFileSystem()
const files = [
{
name: 'file1.txt',
mime_type: 'text/plain',
size: 12,
sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',
reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',
},
{
name: 'img1.jpg',
mime_type: 'image/jpeg',
size: 2022171,
sha256: '6b0f972d83497327eb8adc8a9a58177d99140322570b86773969f6e5febec698',
reference: 'f67a56fe1f9198e1e5024eed4cc82f24137aaffb373351139c1e066a4e5d58fc',
},
]
for (const [index, file] of files.entries()) {
const filePath = path.join(__dirname, `../../data/${file.name}`)
for (let i = 0; i < 10; i++) {
const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)
expect(response.status).toBe(200)
const data = response.body.data
expect(data.reference).toBe(file.reference)
expect(data.mime_type).toBe(file.mime_type)
expect(data.sha256).toBe(file.sha256)
expect(data.size).toBe(file.size)
}
expect(await tonStorageFilesList(tonStorage)).toHaveLength(index + 1)
}
})
it('add update with reference that do not exists', async () => {
const supertestApp = supertest(app)
const wallet = await createWallet()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
const nonExistentReference = '0'.repeat(64)
const update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.addAction(
createAddFileAction({
path: '/index-json',
mimeType: 'application/json',
size: 100,
hash: nonExistentReference,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
const response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
message: `Reference "${nonExistentReference}" not found`,
status: 'error',
})
})
it('duplicate file upload', async () => {
const supertestApp = supertest(app)
// Sync file system before uploading
await syncFileSystem()
const file = {
name: 'file1.txt',
mime_type: 'text/plain',
size: 12,
sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',
reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',
}
const filePath = path.join(__dirname, `../../data/${file.name}`)
// First upload
let response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)
expect(response.status).toBe(200)
let data = response.body.data
expect(data.reference).toBe(file.reference)
expect(data.mime_type).toBe(file.mime_type)
expect(data.sha256).toBe(file.sha256)
expect(data.size).toBe(file.size)
// Second upload of the same file
response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)
expect(response.status).toBe(200) // Or some error status if your application doesn't allow duplicate uploads
data = response.body.data
// Check if it is the same file or a different one based on your application logic
expect(data.reference).toBe(file.reference)
expect(data.mime_type).toBe(file.mime_type)
expect(data.sha256).toBe(file.sha256)
expect(data.size).toBe(file.size)
// Check that the count of files in tonStorage is still 1
expect(await tonStorageFilesList(tonStorage)).toHaveLength(1)
})
it('upload a file larger than the max size limit', async () => {
const supertestApp = supertest(app)
const tempFile = tmp.fileSync()
try {
fs.writeSync(tempFile.fd, Buffer.alloc(MAX_BLOB_SIZE + 1))
const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', tempFile.name)
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
message: 'File too large',
status: 'error',
})
} finally {
// Clean up the temp file regardless of the test result
tempFile.removeCallback()
}
})
it('update fs file', async () => {
const supertestApp = supertest(app)
const wallet = await createWallet()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
await syncFileSystem()
const file1 = {
name: 'file1.txt',
mime_type: 'text/plain',
size: 12,
sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',
reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',
}
const file2 = {
name: 'file2.txt',
mime_type: 'text/plain',
size: 258,
sha256: '5438a317bde30599b535f86cd3ed0a69d88ab4d17ee935199bb3a07a4189fbd4',
reference: '366f6ec29a530266595d9dc11415bd7fb3312d816308774db445f872153b2d97',
}
const remoteFileName = 'profile-data'
const remoteFilePath = `/${remoteFileName}`
const filePath1 = path.join(__dirname, `../../data/${file1.name}`)
const filePath2 = path.join(__dirname, `../../data/${file2.name}`)
// First upload
const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath1)
expect(response.status).toBe(200)
const data = response.body.data
expect(data.reference).toBe(file1.reference)
expect(data.mime_type).toBe(file1.mime_type)
expect(data.sha256).toBe(file1.sha256)
expect(data.size).toBe(file1.size)
const update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.addAction(
createAddFileAction({
path: remoteFilePath,
mimeType: file1.mime_type,
size: file1.size,
hash: file1.reference,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
const apply1 = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(apply1.status).toBe(200)
expect(apply1.body).toStrictEqual({
status: 'ok',
})
// Check get-path-info method
const pathInfoResponse1 = await supertestApp.get(
`/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${remoteFilePath}`,
)
expect(pathInfoResponse1.status).toBe(200)
expect(pathInfoResponse1.body).toStrictEqual({
status: 'ok',
userAddress: author.address,
path: remoteFilePath,
data: {
name: remoteFileName,
mimeType: file1.mime_type,
size: file1.size,
hash: file1.reference,
updateId: 1,
},
})
// Second upload
const response2 = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath2)
expect(response2.status).toBe(200)
const data2 = response2.body.data
expect(data2.reference).toBe(file2.reference)
expect(data2.mime_type).toBe(file2.mime_type)
expect(data2.sha256).toBe(file2.sha256)
expect(data2.size).toBe(file2.size)
update.setId(2)
update.setActions([])
update.addAction(createRemoveFileAction('/profile-data'))
update.addAction(
createAddFileAction({
path: remoteFilePath,
mimeType: file2.mime_type,
size: file2.size,
hash: file2.reference,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
const apply2 = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(apply2.status).toBe(200)
expect(apply2.body).toStrictEqual({
status: 'ok',
})
// Check get-path-info method
const pathInfoResponse2 = await supertestApp.get(
`/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${remoteFilePath}`,
)
expect(pathInfoResponse2.status).toBe(200)
expect(pathInfoResponse2.body).toStrictEqual({
status: 'ok',
userAddress: author.address,
path: remoteFilePath,
data: {
name: remoteFileName,
mimeType: file2.mime_type,
size: file2.size,
hash: file2.reference,
updateId: 2,
},
})
expect(await tonStorageFilesList(tonStorage)).toHaveLength(2)
})
it('get path info for incorrect path', async () => {
const supertestApp = supertest(app)
const wallet = await createWallet()
await syncFileSystem()
const author = {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
const file = {
name: 'file1.txt',
mime_type: 'text/plain',
size: 12,
sha256: 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a',
reference: '65d9deffdec24c795d88611d32b80831c076000af7402a8b5973bf188b0b6b2d',
}
const filePath = path.join(__dirname, `../../data/${file.name}`)
const response = await supertestApp.post('/v1/fs/blob/upload').attach('blob', filePath)
expect(response.status).toBe(200)
const remoteFileName = 'file-test'
const remoteFilePath = `/${remoteFileName}`
const update = new Update(PROJECT_NAME, author.address, 1)
update.addAction(createAddUserAction(author.address))
update.addAction(
createAddFileAction({
path: remoteFilePath,
mimeType: file.mime_type,
size: file.size,
hash: file.reference,
}),
)
update.setSignature(author.personalSign(update.getSignData()))
const applyUpdateResponse = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(applyUpdateResponse.status).toBe(200)
// Try to get the file without / symbol
const pathInfoResponse1 = await supertestApp.get(
`/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${remoteFileName}`,
)
expect(pathInfoResponse1.status).toBe(500)
expect(pathInfoResponse1.body).toStrictEqual({
status: 'error',
message: `Can't get info about the path: Get item: item not found: "${author.address}${remoteFileName}"`,
})
// Try to get another file with a full path but one symbol more
const fakePath = `${remoteFilePath}1`
const fakeName = `${remoteFileName}1`
const pathInfoResponse2 = await supertestApp.get(
`/v1/fs/blob/get-path-info?userAddress=${author.address}&path=${fakePath}`,
)
expect(pathInfoResponse2.status).toBe(500)
expect(pathInfoResponse2.body).toStrictEqual({
status: 'error',
message: `Can't get info about the path: Get item: item not found: "${fakeName}"`,
})
})
})
================================================
FILE: test/controllers/file-system/file-system.test.ts
================================================
// todo change managing of the file system to configure and call when needed
process.env.SHOW_LOGS = 'false'
import knex from 'knex'
import knexConfig from '../../../knexfile'
import pool from '../../../src/db'
import supertest from 'supertest'
import app, { clearFileSystem, fileSystem, syncFileSystem } from '../../../src/app'
import { createAddUserAction, Update, personalSign } from '@fairjournal/file-system'
import { PROJECT_NAME } from '../../../src/controllers/file-system/const'
import { createWallet, getUpdatesCount } from '../../utils'
const db = knex(process.env.DB_SOCKET_PATH ? knexConfig.docker : knexConfig.development)
describe('file-system', () => {
beforeEach(async () => {
// Rollback the migration (if any)
await db.migrate.rollback()
// Run the migration
await db.migrate.latest()
clearFileSystem()
})
afterEach(async () => {
// After each test, we can rollback the migration
await db.migrate.rollback()
})
afterAll(async () => {
// Close the database connection after all tests are done
await db.destroy()
pool.end()
})
it('update/apply - empty data', async () => {
const supertestApp = supertest(app)
const response = await supertestApp.post('/v1/fs/update/apply').send()
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
status: 'error',
message: 'Data is not an object',
})
})
it('update/apply - empty object', async () => {
const supertestApp = supertest(app)
const response = await supertestApp.post('/v1/fs/update/apply').send({ update: {} })
expect(response.status).toBe(500)
expect(response.body).toStrictEqual({
status: 'error',
message: 'UpdateDataSigned: signature is not defined',
})
})
it('update/apply - register, clear fs and recover from db', async () => {
const supertestApp = supertest(app)
const authors = await Promise.all(
Array.from({ length: 3 }, async () => {
const wallet = await createWallet()
return {
address: wallet.publicKey.toString('hex'),
personalSign: (data: string): string => personalSign(data, wallet.secretKey),
}
}),
)
const responseUserCheck0 = await supertestApp.get(`/v1/fs/user/info?address=${authors[0].address}`)
expect(responseUserCheck0.status).toBe(200)
expect(responseUserCheck0.body).toStrictEqual({
address: authors[0].address,
isUserExists: false,
status: 'ok',
})
const update = new Update(PROJECT_NAME, authors[0].address, 1)
update.addAction(createAddUserAction(authors[0].address))
update.setSignature(authors[0].personalSign(update.getSignData()))
expect(await getUpdatesCount(db)).toEqual(0)
expect(fileSystem.getUpdateId(authors[0].address)).toEqual(0)
const response = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({ status: 'ok' })
expect(await getUpdatesCount(db)).toEqual(1)
expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)
const responseUserCheck1 = await supertestApp.get(`/v1/fs/user/info?address=${authors[0].address}`)
expect(responseUserCheck1.status).toBe(200)
expect(responseUserCheck1.body).toStrictEqual({
address: authors[0].address,
isUserExists: true,
status: 'ok',
})
const response1 = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response1.status).toBe(500)
expect(response1.body).toStrictEqual({ status: 'error', message: 'Update with id "1" already exists' })
expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)
update.setId(2)
update.setSignature(authors[0].personalSign(update.getSignData()))
const response2 = await supertestApp.post('/v1/fs/update/apply').send({ update })
expect(response2.status).toBe(500)
expect(response2.body).toStrictEqual({
status: 'error',
message: `User with address "${authors[0].address}" already exists`,
})
expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)
clearFileSystem()
expect(fileSystem.getUpdateId(authors[0].address)).toEqual(0)
// recover filesystem from the db
await syncFileSystem()
expect(fileSystem.getUpdateId(authors[0].address)).toEqual(1)
})
it('user/info - user do not exists', async () => {
const supertestApp = supertest(app)
const address = 'd66401889725ada1f6ba8e78f67d24aec386341d8e3310f00ef64df463def1ef'
const response = await supertestApp.get(`/v1/fs/user/info?address=${address}`)
expect(response.status).toBe(200)
expect(response.body).toStrictEqual({
address: address,
isUserExists: false,
status: 'ok',
})
})
// todo cover case of recovering filesystem using real ton. using DB and without db
})
================================================
FILE: test/data/file1.txt
================================================
Hello world!
================================================
FILE: test/data/file2.txt
================================================
Privacy is a fundamental human right.
Your devices are important to so many parts of your life.
What you share from those experiences, and who you share it with, should be up to you.
It’s not always easy.
But that’s the kind of innovation we believe in.
================================================
FILE: test/utils.ts
================================================
import { getSecureRandomBytes, KeyPair, keyPairFromSeed } from 'ton-crypto'
import { Knex } from 'knex'
import { Article } from '../src/controllers/file-system/blob/utils'
import { TonstorageCLI } from 'tonstorage-cli'
import { Torrent } from '../src/ton-utils'
import { base64ToHex, extractHash } from '../src/utils'
import * as fs from 'fs'
import * as os from 'os'
import * as path from 'path'
import * as crypto from 'crypto'
import { Tree } from '@fairjournal/file-system'
/**
* Fake storage
*/
export interface FakeStorage {
/**
* Uploads data to the storage and returns its reference
*
* @param data Data to upload
*/
upload: (data: Uint8Array) => Promise<string>
/**
* Downloads data from the storage by its reference
*
* @param reference Reference to download
*/
download: (reference: string) => Promise<Uint8Array>
}
export const UPDATES_TABLE_NAME = 'fs_update'
/**
* According: https://github.com/ton-foundation/specs/blob/main/specs/wtf-0002.md
*/
export const TON_SAFE_SIGN_MAGIC = 'ton-safe-sign-magic'
/**
* Creates TON wallet with public and secret keys
*/
export async function createWallet(userSeed?: string): Promise<KeyPair> {
const seed: Buffer = userSeed ? Buffer.from(userSeed, 'hex') : await getSecureRandomBytes(32) // seed is always 32 bytes
return keyPairFromSeed(seed)
}
/**
* Gets the number of records in the table
*
* @param db Database
* @param tableName Table name
*/
export async function getRecordCount(db: Knex, tableName: string): Promise<number> {
const result = await db(tableName).count('* as count')
return Number(result[0].count)
}
/**
* Gets the number of records in the updates table
*
* @param db Database
*/
export async function getUpdatesCount(db: Knex): Promise<number> {
return getRecordCount(db, UPDATES_TABLE_NAME)
}
/**
* Generates a random number
*
* @param max Max value
*/
export function randomNumber(max = 1000): number {
return Math.floor(Math.random() * max)
}
/**
* Generates a random article
*/
export function generateArticle(): Article {
const articleId = randomNumber()
return {
slug: `article-${articleId}`,
data: {
blocks: [
{
type: 'title',
text: `Article ${articleId}`,
},
{
type: 'paragraph',
text: 'Hello world! Paragraph 1.',
},
{
type: 'paragraph',
text: 'Hello world 2222 Paragraph 2',
},
{
type: 'paragraph',
text: 'Hello world 33333 Paragraph 3',
},
],
},
preview: {
img: 'https://test.domain/test.jpg',
},
}
}
/**
* Pads the string with zeros to the desired length
*
* @param input Input string
* @param resultSize Desired length
*/
export function padStringWithZeros(input: string, resultSize = 64): string {
// 'padStart' adds zeros to the start of the string until it reaches the desired length
return input.padStart(resultSize, '0')
}
/**
* Gets fake storage instance
*/
export function getFakeStorage(): FakeStorage {
let index = 0
const storage: Record<string, Uint8Array> = {}
return {
upload: async (data: Uint8Array): Promise<string> => {
index++
const reference = padStringWithZeros(index.toString())
storage[reference] = data
return reference
},
download: async (reference: string): Promise<Uint8Array> => {
if (!storage[reference]) {
throw new Error(`Reference "${reference}" not found`)
}
return storage[reference]
},
}
}
/**
* Gets list of torrents from ton-storage
*
* @param tonStorage Ton-storage instance
*/
export async function tonStorageFilesList(tonStorage: TonstorageCLI): Promise<Torrent[]> {
const list = await tonStorage.list()
if (!list?.ok) {
throw new Error(`Failed to get list of torrents from ton-storage: ${JSON.stringify(list)}`)
}
return (list?.result?.torrents || []) as Torrent[]
}
/**
* Removes all files from ton-storage
*
* @param tonStorage Ton-storage instance
*/
export async function removeAllTonStorageFiles(tonStorage: TonstorageCLI): Promise<void> {
const torrents = await tonStorageFilesList(tonStorage)
const itemsList = torrents || []
for (const item of itemsList) {
await tonStorage.remove(base64ToHex(item.hash))
}
}
/**
* Writes data to a temporary file and returns its path
*
* @param data Data to write
* @param name File name
*/
export async function writeTempFile(data: Uint8Array, name = 'blob'): Promise<string> {
const dirName = crypto.randomBytes(16).toString('hex')
const tempDir = os.tmpdir()
const dirPath = path.join(tempDir, dirName)
fs.mkdirSync(dirPath)
const filePath = path.join(dirPath, name)
fs.writeFileSync(filePath, data)
return filePath
}
/**
* Uploads bytes to ton-storage
*
* @param tonStorage Ton-storage instance
* @param bytes Bytes to upload
*/
export async function uploadBytes(tonStorage: TonstorageCLI, bytes: Uint8Array): Promise<string> {
const filePath = await writeTempFile(bytes)
let response
try {
response = await tonStorage.create(filePath, {
copy: true,
desc: '',
upload: false,
})
} finally {
if (fs.existsSync(filePath)) {
fs.unlinkSync(filePath)
}
}
let reference
if (response?.ok) {
reference = base64ToHex(response.result.torrent.hash)
} else if (response?.error?.includes('duplicate hash')) {
reference = extractHash(response?.error)
} else {
throw new Error(`Failed to upload bytes to ton-storage: ${JSON.stringify(response)}`)
}
return reference.toLowerCase()
}
/**
* Asserts that tree is correct
*
* @param data Tree to check
*/
export function assertTree(data: unknown): asserts data is Tree {
// todo remove this method when it will be exported from fs
const tree = data as Tree
if (!tree.directory) {
throw new Error('Tree: should contain root directory')
}
}
================================================
FILE: tsconfig.json
================================================
{
"include": ["src", "test"],
"compilerOptions": {
"lib": [
"dom"
],
"alwaysStrict": true,
"target": "ES6",
"esModuleInterop": true,
"skipLibCheck": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node",
"module": "commonjs",
"strict": true,
"resolveJsonModule": true,
"experimentalDecorators": true,
"emitDecoratorMetadata": true,
"typeRoots": [
"node_modules/@types"
],
"rootDirs": ["src"],
"outDir": "dist"
},
}
================================================
FILE: tsconfig.test.json
================================================
{
"extends": "./tsconfig.json",
"include": [
"src",
"test",
"jest.config.ts"
],
"compilerOptions": {
"noEmit": true,
"lib": [
"dom"
]
}
}
gitextract_d7t6a830/ ├── .eslintrc.json ├── .github/ │ └── workflows/ │ └── tests.yaml ├── .gitignore ├── .prettierrc ├── Dockerfile ├── README.md ├── blob/ │ └── README.md ├── example.env ├── jest.config.js ├── knexfile.ts ├── migrations/ │ ├── 20230706133935_init.ts │ ├── 20230713094839_fs_updates.ts │ ├── 20230716103734_file.ts │ ├── 20230725081357_settings.ts │ └── db.sql ├── nodemon.json ├── package.json ├── src/ │ ├── app.ts │ ├── controllers/ │ │ ├── ArticleController.ts │ │ ├── ImageController.ts │ │ ├── UserController.ts │ │ └── file-system/ │ │ ├── app/ │ │ │ ├── get-settings-action.ts │ │ │ ├── index.ts │ │ │ └── publish-action.ts │ │ ├── blob/ │ │ │ ├── get-article-action.ts │ │ │ ├── get-articles-action.ts │ │ │ ├── get-path-info-action.ts │ │ │ ├── index.ts │ │ │ ├── upload-action.ts │ │ │ └── utils.ts │ │ ├── const.ts │ │ ├── index.ts │ │ ├── types.ts │ │ ├── update/ │ │ │ ├── apply-action.ts │ │ │ └── index.ts │ │ ├── user/ │ │ │ ├── get-update-id-action.ts │ │ │ ├── index.ts │ │ │ └── info-action.ts │ │ └── utils.ts │ ├── db.ts │ ├── fs.ts │ ├── index.ts │ ├── models/ │ │ ├── Article.ts │ │ └── User.ts │ ├── routes.ts │ ├── ton-utils.ts │ └── utils.ts ├── startup.sh ├── test/ │ ├── controllers/ │ │ └── file-system/ │ │ ├── app.test.ts │ │ ├── article.test.ts │ │ ├── blob.test.ts │ │ └── file-system.test.ts │ ├── data/ │ │ ├── file1.txt │ │ └── file2.txt │ └── utils.ts ├── tsconfig.json └── tsconfig.test.json
SYMBOL INDEX (120 symbols across 22 files)
FILE: migrations/20230706133935_init.ts
function up (line 3) | async function up(knex: Knex): Promise<void> {
function down (line 55) | async function down(knex: Knex): Promise<void> {
FILE: migrations/20230713094839_fs_updates.ts
function up (line 3) | async function up(knex: Knex): Promise<void> {
function down (line 17) | async function down(knex: Knex): Promise<void> {
FILE: migrations/20230716103734_file.ts
function up (line 3) | async function up(knex: Knex): Promise<void> {
function down (line 15) | async function down(knex: Knex): Promise<void> {
FILE: migrations/20230725081357_settings.ts
function up (line 3) | async function up(knex: Knex): Promise<void> {
function down (line 12) | async function down(knex: Knex): Promise<void> {
FILE: src/app.ts
function log (line 39) | function log(message: string): void {
function waitTonStorage (line 51) | async function waitTonStorage(tonStorage: TonstorageCLI): Promise<void> {
function createTonStorageInstance (line 95) | function createTonStorageInstance(): TonstorageCLI {
function syncFileSystem (line 116) | async function syncFileSystem(): Promise<void> {
function clearFileSystem (line 130) | function clearFileSystem(): void {
FILE: src/controllers/file-system/app/publish-action.ts
type PublishBody (line 9) | interface PublishBody {
FILE: src/controllers/file-system/blob/get-article-action.ts
function checkUserExists (line 14) | function checkUserExists(address: string): void {
function getArticleData (line 28) | async function getArticleData(address: string, slug: string): Promise<Fi...
function convertDataToArticle (line 46) | async function convertDataToArticle(data: Directory, slug: string): Prom...
FILE: src/controllers/file-system/blob/get-articles-action.ts
function checkUserExistence (line 14) | function checkUserExistence(userAddress: string): void {
function getPathInfoWithErrorHandling (line 26) | function getPathInfoWithErrorHandling(path: string): File | Directory {
FILE: src/controllers/file-system/blob/upload-action.ts
type DBFileInfo (line 15) | interface DBFileInfo {
function insertFileInfo (line 57) | async function insertFileInfo(info: DBFileInfo): Promise<void> {
function getFileInfo (line 77) | async function getFileInfo(sha256: string): Promise<DBFileInfo> {
function isSha256Uploaded (line 101) | async function isSha256Uploaded(sha256: string): Promise<boolean> {
function removeFileAndDirectory (line 128) | function removeFileAndDirectory(filePath: string, directoryPath: string)...
function assertValidFile (line 145) | function assertValidFile(file: Express.Multer.File | undefined): asserts...
function handleFileUpload (line 174) | async function handleFileUpload(
function checkPathExists (line 217) | function checkPathExists(path: string, message: string): void {
function removeUploadedFile (line 230) | async function removeUploadedFile(filePath: string): Promise<void> {
function setPermissions (line 245) | function setPermissions(reference: string): void {
FILE: src/controllers/file-system/blob/utils.ts
constant SHORT_ARTICLE_LENGTH (line 8) | const SHORT_ARTICLE_LENGTH = 1000
constant ARTICLE_INDEX_FILE_NAME (line 13) | const ARTICLE_INDEX_FILE_NAME = 'index-json'
type ShortArticle (line 18) | interface ShortArticle {
type Article (line 38) | interface Article {
type ArticlesResponse (line 58) | interface ArticlesResponse {
type ArticleResponse (line 78) | interface ArticleResponse {
type PathInfoResponse (line 108) | interface PathInfoResponse {
function directoryToShortArticle (line 135) | async function directoryToShortArticle(directory: Directory): Promise<Sh...
function isArticleDirectory (line 160) | function isArticleDirectory(directory: Directory): boolean {
function directoriesToShortArticles (line 171) | async function directoriesToShortArticles(directories: Directory[]): Pro...
function directoryToArticle (line 190) | async function directoryToArticle(directory: Directory): Promise<Article> {
FILE: src/controllers/file-system/const.ts
constant DEFAULT_DIRECTORY (line 4) | const DEFAULT_DIRECTORY = 'articles'
constant PROJECT_NAME (line 9) | const PROJECT_NAME = 'fairjournal'
constant MAX_BLOB_SIZE (line 14) | const MAX_BLOB_SIZE = 1024 * 1024 * 10
FILE: src/controllers/file-system/types.ts
type FileStatus (line 4) | enum FileStatus {
FILE: src/controllers/file-system/update/apply-action.ts
type ApplyBody (line 15) | interface ApplyBody {
function insertUpdate (line 29) | async function insertUpdate(update: UpdateDataSigned): Promise<number> {
function validateUpdate (line 48) | async function validateUpdate(update: UpdateDataSigned): Promise<string[...
function updateFileStatus (line 80) | async function updateFileStatus(reference: string, status: FileStatus): ...
function validateAndGetAddFileReference (line 101) | async function validateAndGetAddFileReference(data: AddFileActionData): ...
function publishAllFiles (line 123) | async function publishAllFiles(update: UpdateDataSigned): Promise<string...
FILE: src/controllers/file-system/user/get-update-id-action.ts
type GetUpdateIdResponse (line 8) | interface GetUpdateIdResponse {
FILE: src/controllers/file-system/utils.ts
type SettingsKey (line 15) | enum SettingsKey {
function assertUserExists (line 27) | function assertUserExists(data: unknown): asserts data is string {
function assertPath (line 40) | function assertPath(data: unknown): asserts data is string {
function getPathInfo (line 54) | function getPathInfo(address: string, path: string): File | Directory {
function uploadToStorage (line 69) | async function uploadToStorage(path: string, message: string, isUpload: ...
function uploadData (line 98) | async function uploadData(data: string): Promise<ReferencedItem> {
function downloadData (line 116) | async function downloadData(reference: string): Promise<string> {
function upsertSettings (line 129) | async function upsertSettings(pool: Pool, key: string, value: string): P...
function getSetting (line 146) | async function getSetting(pool: Pool, key: string): Promise<string> {
FILE: src/fs.ts
type AsyncFunction (line 12) | type AsyncFunction = (records: any[]) => Promise<void>
function processInBatches (line 20) | async function processInBatches(pool: Pool, asyncFn: AsyncFunction): Pro...
function initFs (line 49) | function initFs(): FileSystem {
function syncFs (line 63) | async function syncFs(fs: FileSystem): Promise<void> {
function getReferencePath (line 80) | function getReferencePath(reference: string): string {
function isReferenceExists (line 96) | async function isReferenceExists(reference: string): Promise<boolean> {
function getContentByReference (line 105) | async function getContentByReference(reference: string): Promise<Uint8Ar...
function extractArticleText (line 121) | function extractArticleText(jsonObject: unknown, symbols: number): string {
FILE: src/index.ts
constant PORT (line 4) | const PORT = process.env.PORT || 5000
function start (line 6) | async function start(): Promise<void> {
FILE: src/models/Article.ts
type Article (line 1) | interface Article {
FILE: src/models/User.ts
type User (line 1) | interface User {
FILE: src/ton-utils.ts
type DaemonResponse (line 4) | interface DaemonResponse {
type TorrentFull (line 13) | interface TorrentFull {
type Torrent (line 22) | interface Torrent {
type FileInfo (line 45) | interface FileInfo {
function assertIsDefined (line 64) | function assertIsDefined<T>(property: T | undefined | null, name: string...
function assertIsNumber (line 76) | function assertIsNumber(value: unknown, name: string): asserts value is ...
function assertIsBoolean (line 88) | function assertIsBoolean(value: unknown, name: string): asserts value is...
function assertFileInfo (line 99) | function assertFileInfo(fileInfo: FileInfo): asserts fileInfo is FileInfo {
function assertTorrent (line 112) | function assertTorrent(torrent: Torrent): asserts torrent is Torrent {
function assertDaemonResponse (line 135) | function assertDaemonResponse(data: DaemonResponse): asserts data is Dae...
FILE: src/utils.ts
constant PUBLIC_KEY_LENGTH (line 11) | const PUBLIC_KEY_LENGTH = 64
constant REFERENCE_LENGTH (line 16) | const REFERENCE_LENGTH = 64
constant MAX_ARTICLE_NAME_LENGTH (line 21) | const MAX_ARTICLE_NAME_LENGTH = 64
function isString (line 28) | function isString(value: unknown): boolean {
function assertString (line 37) | function assertString(data: unknown): asserts data is string {
function assertStringLength (line 49) | function assertStringLength(data: unknown, length: number): asserts data...
function assertAddress (line 62) | function assertAddress(data: unknown): asserts data is string {
function isHexString (line 72) | function isHexString(value: string): boolean {
function assertHex (line 83) | function assertHex(data: unknown): asserts data is string {
function assertReference (line 96) | function assertReference(data: unknown): asserts data is string {
function getPathParts (line 106) | function getPathParts(path: string): string[] {
function assertArticleName (line 115) | function assertArticleName(data: unknown): asserts data is string {
function isObject (line 130) | function isObject(data: unknown): data is Record<string, unknown> {
function assertObject (line 140) | function assertObject(data: unknown, customError?: string): asserts data...
function bytesToString (line 151) | function bytesToString(data: Uint8Array): string {
function stringToBytes (line 162) | function stringToBytes(data: string): Uint8Array {
function assertJson (line 173) | function assertJson(data: unknown): asserts data is string {
function calculateSHA256 (line 190) | async function calculateSHA256(filePath: string): Promise<string> {
function toAbsolutePath (line 213) | function toAbsolutePath(...paths: string[]): string {
function delay (line 222) | async function delay(ms: number): Promise<void> {
function extractHash (line 231) | function extractHash(message: string): string {
function base64ToHex (line 245) | function base64ToHex(base64: string): string {
function hexToBase64 (line 252) | function hexToBase64(hex: string): string {
FILE: test/utils.ts
type FakeStorage (line 16) | interface FakeStorage {
constant UPDATES_TABLE_NAME (line 32) | const UPDATES_TABLE_NAME = 'fs_update'
constant TON_SAFE_SIGN_MAGIC (line 37) | const TON_SAFE_SIGN_MAGIC = 'ton-safe-sign-magic'
function createWallet (line 42) | async function createWallet(userSeed?: string): Promise<KeyPair> {
function getRecordCount (line 54) | async function getRecordCount(db: Knex, tableName: string): Promise<numb...
function getUpdatesCount (line 65) | async function getUpdatesCount(db: Knex): Promise<number> {
function randomNumber (line 74) | function randomNumber(max = 1000): number {
function generateArticle (line 81) | function generateArticle(): Article {
function padStringWithZeros (line 118) | function padStringWithZeros(input: string, resultSize = 64): string {
function getFakeStorage (line 126) | function getFakeStorage(): FakeStorage {
function tonStorageFilesList (line 154) | async function tonStorageFilesList(tonStorage: TonstorageCLI): Promise<T...
function removeAllTonStorageFiles (line 169) | async function removeAllTonStorageFiles(tonStorage: TonstorageCLI): Prom...
function writeTempFile (line 183) | async function writeTempFile(data: Uint8Array, name = 'blob'): Promise<s...
function uploadBytes (line 200) | async function uploadBytes(tonStorage: TonstorageCLI, bytes: Uint8Array)...
function assertTree (line 233) | function assertTree(data: unknown): asserts data is Tree {
Condensed preview — 57 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (135K chars).
[
{
"path": ".eslintrc.json",
"chars": 3351,
"preview": "{\n \"env\": {\n \"browser\": true,\n \"commonjs\": true,\n \"es6\": true,\n \"node\": true,\n \"jest\": true\n },\n \"pars"
},
{
"path": ".github/workflows/tests.yaml",
"chars": 712,
"preview": "name: Test\n\non:\n push:\n branches:\n - master\n pull_request:\n branches:\n - '**'\n\njobs:\n test:\n name:"
},
{
"path": ".gitignore",
"chars": 249,
"preview": "/node_modules\r\n/.pnp\r\n.pnp.js\r\n\r\n# testing\r\n/coverage\r\n\r\n# production\r\n/build\r\n\r\n# misc\r\n.env\r\n.DS_Store\r\n.env.local\r\n.e"
},
{
"path": ".prettierrc",
"chars": 254,
"preview": "{\n \"printWidth\": 120,\n \"tabWidth\": 2,\n \"useTabs\": false,\n \"bracketSpacing\": true,\n \"semi\": false,\n \"singleQuote\": "
},
{
"path": "Dockerfile",
"chars": 1505,
"preview": "# Start from the latest LTS Node version built for arm64 on Alpine\nFROM node:alpine\n\n# Add the TON Storage daemon and CL"
},
{
"path": "README.md",
"chars": 5785,
"preview": "# Mutable File System Gateway\n\nThis repository contains the server-side implementation of our decentralized file system "
},
{
"path": "blob/README.md",
"chars": 21,
"preview": "# Uploaded blobs here"
},
{
"path": "example.env",
"chars": 888,
"preview": "# Path to the root of the files\nFILES_ROOT_PATH=/Users/test/web/fj-backend\n\n# Port of the application\nPORT=5000\n\n# Datab"
},
{
"path": "jest.config.js",
"chars": 123,
"preview": "module.exports = {\n preset: 'ts-jest',\n testEnvironment: 'node',\n testMatch: ['**/*.test.ts'],\n testTimeout: 100000,"
},
{
"path": "knexfile.ts",
"chars": 707,
"preview": "import { config } from 'dotenv'\nimport { Knex } from 'knex'\n\nconfig()\n\nconst knexConfig: Knex.Config = {\n client: 'mysq"
},
{
"path": "migrations/20230706133935_init.ts",
"chars": 3727,
"preview": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n // create table\n await knex.schem"
},
{
"path": "migrations/20230713094839_fs_updates.ts",
"chars": 724,
"preview": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n return knex.schema.createTable('fs"
},
{
"path": "migrations/20230716103734_file.ts",
"chars": 635,
"preview": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n return knex.schema.createTable('fi"
},
{
"path": "migrations/20230725081357_settings.ts",
"chars": 466,
"preview": "import { Knex } from \"knex\";\n\nexport async function up(knex: Knex): Promise<void> {\n return knex.schema.createTable('"
},
{
"path": "migrations/db.sql",
"chars": 87,
"preview": "CREATE DATABASE fair_journal\n CHARACTER SET utf8mb4\n COLLATE utf8mb4_general_ci;\n"
},
{
"path": "nodemon.json",
"chars": 123,
"preview": "{\r\n \"watch\": [\r\n \"src\"\r\n ],\r\n \"ext\": \".ts,.js\",\r\n \"ignore\": [],\r\n \"exec\": \"ts-node ./src/index.ts\""
},
{
"path": "package.json",
"chars": 1830,
"preview": "{\n \"name\": \"fair-journal-backend\",\n \"version\": \"1.0.0\",\n \"description\": \"\",\n \"main\": \"index.js\",\n \"scripts\": {\n "
},
{
"path": "src/app.ts",
"chars": 3290,
"preview": "import express, { Application } from 'express'\nimport cors from 'cors'\nimport router from './routes'\nimport fileSystemRo"
},
{
"path": "src/controllers/ArticleController.ts",
"chars": 3318,
"preview": "import { Request, Response } from 'express'\nimport { OkPacket, RowDataPacket } from 'mysql2'\nimport pool from '../db'\n\nc"
},
{
"path": "src/controllers/ImageController.ts",
"chars": 1175,
"preview": "import { Request, Response } from 'express'\nimport { OkPacket } from 'mysql2'\nimport pool from '../db'\n\nconst upload = a"
},
{
"path": "src/controllers/UserController.ts",
"chars": 4547,
"preview": "import { Request, Response } from 'express'\nimport { OkPacket, RowDataPacket } from 'mysql2'\nimport pool from '../db'\nim"
},
{
"path": "src/controllers/file-system/app/get-settings-action.ts",
"chars": 658,
"preview": "import { Request, Response, NextFunction } from 'express'\nimport pool from '../../../db'\nimport { getSetting } from '../"
},
{
"path": "src/controllers/file-system/app/index.ts",
"chars": 271,
"preview": "import express from 'express'\nimport publishAction from './publish-action'\nimport getSettingsAction from './get-settings"
},
{
"path": "src/controllers/file-system/app/publish-action.ts",
"chars": 1138,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport { fileSystem } from '../../../app'\nimport { SettingsKey"
},
{
"path": "src/controllers/file-system/blob/get-article-action.ts",
"chars": 2798,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress, assertArticleName } from '../../../uti"
},
{
"path": "src/controllers/file-system/blob/get-articles-action.ts",
"chars": 1901,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { DEFAUL"
},
{
"path": "src/controllers/file-system/blob/get-path-info-action.ts",
"chars": 874,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { PathIn"
},
{
"path": "src/controllers/file-system/blob/index.ts",
"chars": 736,
"preview": "import express from 'express'\nimport uploadAction from './upload-action'\nimport getArticleAction from './get-article-act"
},
{
"path": "src/controllers/file-system/blob/upload-action.ts",
"chars": 7256,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport pool from '../../../db'\nimport { assertReference, calcu"
},
{
"path": "src/controllers/file-system/blob/utils.ts",
"chars": 4036,
"preview": "import { assertFiles, Directory, File } from '@fairjournal/file-system'\nimport { assertJson, bytesToString } from '../.."
},
{
"path": "src/controllers/file-system/const.ts",
"chars": 265,
"preview": "/**\n * Default directory where all files should be stored\n */\nexport const DEFAULT_DIRECTORY = 'articles'\n\n/**\n * Projec"
},
{
"path": "src/controllers/file-system/index.ts",
"chars": 346,
"preview": "import express from 'express'\nimport userRouter from './user'\nimport blobRouter from './blob'\nimport updateRouter from '"
},
{
"path": "src/controllers/file-system/types.ts",
"chars": 176,
"preview": "/**\n * File status in database\n */\nexport enum FileStatus {\n /**\n * File is new, just uploaded\n */\n New = 0,\n\n /*"
},
{
"path": "src/controllers/file-system/update/apply-action.ts",
"chars": 4580,
"preview": "import { Request, Response, NextFunction } from 'express'\nimport { ActionType, AddFileActionData, UpdateDataSigned } fro"
},
{
"path": "src/controllers/file-system/update/index.ts",
"chars": 162,
"preview": "import express from 'express'\nimport applyAction from './apply-action'\n\nconst router = express.Router()\nrouter.post('/ap"
},
{
"path": "src/controllers/file-system/user/get-update-id-action.ts",
"chars": 884,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { fileSy"
},
{
"path": "src/controllers/file-system/user/index.ts",
"chars": 260,
"preview": "import express from 'express'\nimport infoAction from './info-action'\nimport getUpdateIdAction from './get-update-id-acti"
},
{
"path": "src/controllers/file-system/user/info-action.ts",
"chars": 567,
"preview": "import { NextFunction, Request, Response } from 'express'\nimport { assertAddress } from '../../../utils'\nimport { fileSy"
},
{
"path": "src/controllers/file-system/utils.ts",
"chars": 4106,
"preview": "import { fileSystem, tonstorage } from '../../app'\nimport { assertString, base64ToHex, extractHash } from '../../utils'\n"
},
{
"path": "src/db.ts",
"chars": 623,
"preview": "import mysql from 'mysql2/promise'\nimport dotenv from 'dotenv'\n\ndotenv.config()\n\nconst simpleConfig = {\n host: process."
},
{
"path": "src/fs.ts",
"chars": 3579,
"preview": "import { assertUpdateDataSignedArray, FileSystem } from '@fairjournal/file-system'\nimport { PROJECT_NAME } from './contr"
},
{
"path": "src/index.ts",
"chars": 333,
"preview": "import app, { clearFileSystem, syncFileSystem } from './app'\n\n// Start server\nconst PORT = process.env.PORT || 5000\n\nasy"
},
{
"path": "src/models/Article.ts",
"chars": 111,
"preview": "interface Article {\n id: number\n hash: string\n content: string\n authorId: number\n}\n\nexport default Article\n"
},
{
"path": "src/models/User.ts",
"chars": 145,
"preview": "interface User {\n id: number\n wallet: number\n avatar: string\n name: string\n description: string\n articles: number["
},
{
"path": "src/routes.ts",
"chars": 1388,
"preview": "import { Router } from 'express'\nimport {\n deleteUser,\n getUserById,\n updateUser,\n getArticlesByUserId,\n authorizeB"
},
{
"path": "src/ton-utils.ts",
"chars": 3885,
"preview": "/**\n * Daemon response\n */\nexport interface DaemonResponse {\n ok: boolean\n result: TorrentFull\n code: number\n}\n\n/**\n "
},
{
"path": "src/utils.ts",
"chars": 5489,
"preview": "import * as crypto from 'crypto'\nimport * as fs from 'fs'\nimport { promisify } from 'util'\nimport path from 'path'\n\ncons"
},
{
"path": "startup.sh",
"chars": 1606,
"preview": "#!/bin/sh\n\n# Create .env file\ncat > .env << EOF\n# Path to the root of the files\nFILES_ROOT_PATH=/app\n\n# Port of the appl"
},
{
"path": "test/controllers/file-system/app.test.ts",
"chars": 6343,
"preview": "import knex from 'knex'\nimport knexConfig from '../../../knexfile'\nimport { TonstorageCLI } from 'tonstorage-cli'\nimport"
},
{
"path": "test/controllers/file-system/article.test.ts",
"chars": 13382,
"preview": "// todo change managing of the file system to configure and call when needed\nprocess.env.SHOW_LOGS = 'false'\nimport { Ar"
},
{
"path": "test/controllers/file-system/blob.test.ts",
"chars": 12406,
"preview": "// todo change managing of the file system to configure and call when needed\nprocess.env.SHOW_LOGS = 'false'\nimport tmp "
},
{
"path": "test/controllers/file-system/file-system.test.ts",
"chars": 4881,
"preview": "// todo change managing of the file system to configure and call when needed\nprocess.env.SHOW_LOGS = 'false'\nimport knex"
},
{
"path": "test/data/file1.txt",
"chars": 12,
"preview": "Hello world!"
},
{
"path": "test/data/file2.txt",
"chars": 254,
"preview": "Privacy is a fundamental human right.\nYour devices are important to so many parts of your life.\nWhat you share from thos"
},
{
"path": "test/utils.ts",
"chars": 5957,
"preview": "import { getSecureRandomBytes, KeyPair, keyPairFromSeed } from 'ton-crypto'\nimport { Knex } from 'knex'\nimport { Article"
},
{
"path": "tsconfig.json",
"chars": 518,
"preview": "{\n \"include\": [\"src\", \"test\"],\n \"compilerOptions\": {\n \"lib\": [\n \"dom\"\n ],\n \"alwaysStrict\": true,\n \"ta"
},
{
"path": "tsconfig.test.json",
"chars": 178,
"preview": "{\n \"extends\": \"./tsconfig.json\",\n \"include\": [\n \"src\",\n \"test\",\n \"jest.config.ts\"\n ],\n \"compilerOptions\": {"
}
]
About this extraction
This page contains the full source code of the FairJournal/backend GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 57 files (122.7 KB), approximately 33.1k tokens, and a symbol index with 120 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.