diff --git a/.env.tpl b/.env.tpl index eef76572be..579fca2c1c 100644 --- a/.env.tpl +++ b/.env.tpl @@ -26,16 +26,16 @@ SENTRY_UPLOAD=false SLACK_USER_REQUEST_WEBHOOK_URL= ## API PostgREST -DATABASE_URL=http://localhost:3000 +DATABASE_URL=http://postgrest:3000 DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI # Postgres Database -DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres +DATABASE_CONNECTION=postgresql://postgres:postgres@db:5432/postgres -# Cluster +# Cluster CLUSTER_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0 -CLUSTER_SERVICE = -CLUSTER_API_URL = http://localhost:9094 +CLUSTER_SERVICE = +CLUSTER_API_URL = http://ipfs-cluster:9094 # Maintenance Mode MAINTENANCE_MODE = rw @@ -45,4 +45,9 @@ S3_ENDPOINT = http://localhost:9095 S3_REGION = test S3_ACCESS_KEY_ID = test S3_SECRET_ACCESS_KEY = test -S3_BUCKET_NAME = test \ No newline at end of file +S3_BUCKET_NAME = test + +## Git +VERSION=development +COMMITHASH=development-hash +BRANCH=development diff --git a/.github/workflows/api.yml b/.github/workflows/api.yml index 75952ae0a6..c76a81cc5f 100644 --- a/.github/workflows/api.yml +++ b/.github/workflows/api.yml @@ -21,13 +21,8 @@ jobs: - uses: actions/setup-node@v2 with: node-version: '16' - - uses: bahmutov/npm-install@v1 - - run: npx playwright install-deps - - run: yarn test:api - env: - DATABASE_URL: http://localhost:3000 - DATABASE_TOKEN: eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI - DATABASE_CONNECTION: postgresql://postgres:postgres@localhost:5432/postgres + - run: 'cp .env.tpl .env' + - run: yarn test:api:docker deploy-dev: name: Deploy Dev if: github.event_name == 'pull_request' && github.ref != 'refs/heads/main' @@ -47,7 +42,7 @@ jobs: CF_API_TOKEN: ${{secrets.CF_API_TOKEN}} with: apiToken: ${{secrets.CF_API_TOKEN }} - workingDirectory: 'packages/api' + workingDirectory: packages/api deploy-staging: name: Deploy Staging if: github.event_name == 'push' && github.ref == 'refs/heads/main' diff --git a/.gitignore b/.gitignore index 2d0492bb31..dc18c05ebc 100644 --- a/.gitignore +++ b/.gitignore @@ -42,4 +42,7 @@ site/public packages/website/public/robots.txt packages/website/public/sitemap.xml -packages/api/docker/compose \ No newline at end of file +packages/api/docker/compose +tmp/ +.envrc +db_data diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 7f026c6dee..8ae707fcee 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -2,7 +2,7 @@ This doc should contain everything you need to know to get a working development environment up and running. If it doesn't and you know what's missing, please open a PR or issue to update the guide! -## Pre-requisites +## Prerequisites You'll need at least the following: @@ -25,8 +25,10 @@ We use `yarn` in this project and commit the `yarn.lock` file. ``` 2. Setup your local environment with a `.env` file. See [intructions](#local-environment-configuration). 3. Run locally by starting the following processes. - 1. API server (`yarn dev:api`). - 2. Web server (`yarn dev:website`). + 1. Web server (`yarn dev:website`). + 2. API server (`yarn dev:api`). + - Note: This starts the API server in a Docker container, and does not persist database state between runs. + - If you would like to persist database state between runs, you can use the `dev:api:persist` command instead. The site should now be available at http://localhost:4000 diff --git a/package.json b/package.json index 4b69ad8707..235ae2352b 100644 --- a/package.json +++ b/package.json @@ -6,11 +6,13 @@ ] }, "scripts": { - "dev:api": "cd packages/api && yarn dev", + "dev:api": "yarn --cwd packages/api dev", + "dev:api:persist": "yarn --cwd packages/api dev:persist", "dev:website": "cd packages/website && yarn dev", "test": "run-s test:*", "test:client": "yarn --cwd packages/client test", "test:api": "yarn --cwd packages/api test", + "test:api:docker": "yarn --cwd packages/api test:docker", "test:website": "yarn --cwd packages/website test", "build:client:docs": "yarn --cwd packages/client typedoc", "build:website": "yarn --cwd packages/website build", diff --git a/packages/api/.dockerignore b/packages/api/.dockerignore new file mode 100644 index 0000000000..569695b294 --- /dev/null +++ b/packages/api/.dockerignore @@ -0,0 +1,2 @@ +dist/ +./docker/db_data/ diff --git a/packages/api/README.md b/packages/api/README.md index 60a83cb7f4..29e8a522f3 100644 --- a/packages/api/README.md +++ b/packages/api/README.md @@ -26,7 +26,7 @@ eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0F In the case you need to clean up docker after failed tests or debugging session you can just run the command below. ```bash -yarn clean +yarn dev:clean ``` ### Dev CLI scripts diff --git a/packages/api/db/cargo.testing.sql b/packages/api/db/cargo.testing.sql index 0efba5b9cb..33e25ea316 100644 --- a/packages/api/db/cargo.testing.sql +++ b/packages/api/db/cargo.testing.sql @@ -106,3 +106,4 @@ INSERT INTO public.metric (name, value, updated_at) INSERT INTO public."user" (magic_link_id, github_id, name, email, public_address) VALUES ('did:ethr:0x65007A739ab7AC5c537161249b81250E49e2853C', 'github|000000', 'mock user', 'test@gmail.com', '0x65007A739ab7AC5c537161249b81250E49e2853C'); INSERT INTO public.auth_key (name, secret, user_id) VALUES ('main', 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0FDNWM1MzcxNjEyNDliODEyNTBFNDllMjg1M0MiLCJpc3MiOiJuZnQtc3RvcmFnZSIsImlhdCI6MTYzOTc1NDczNjYzOCwibmFtZSI6Im1haW4ifQ.wKwJIRXXHsgwVp8mOQp6r3_F4Lz5lnoAkgVP8wqwA_Y', 1); +INSERT INTO public.user_tag (user_id, tag, value, reason) VALUES (1, 'HasPsaAccess', 'true', 'bc this is for testing'); diff --git a/packages/api/docker/Dockerfile b/packages/api/docker/Dockerfile new file mode 100644 index 0000000000..33706bcd4c --- /dev/null +++ b/packages/api/docker/Dockerfile @@ -0,0 +1,24 @@ +FROM node:16-alpine3.12 + +HEALTHCHECK --interval=5s --timeout=5s --retries=3 \ + CMD curl -f http://localhost:8787/stats || exit 1 + +# Install some utilities to make debugging easier +RUN apk add --update bash curl vim tmux postgresql-client + +RUN npm i -g nodemon +RUN mkdir -p /app + +WORKDIR /app + +# Make your docker builds 100x faster with this one trick :) +COPY ./package.json . +RUN yarn install + +COPY ./tsconfig.json . +COPY ./docker/scripts ./docker-scripts +COPY ./scripts ./scripts +COPY ./db ./db +COPY ./src/ ./src + +ENTRYPOINT ["nodemon", "--watch", "src/", "--exec", "'./docker-scripts/run-miniflare.sh'" ] diff --git a/packages/api/docker/docker-compose.dev.yml b/packages/api/docker/docker-compose.dev.yml new file mode 100644 index 0000000000..c7f01a9242 --- /dev/null +++ b/packages/api/docker/docker-compose.dev.yml @@ -0,0 +1,6 @@ +services: + api: + ports: + - '8787:8787' + volumes: + - ../src:/app/src diff --git a/packages/api/docker/docker-compose.local-ports.yml b/packages/api/docker/docker-compose.local-ports.yml new file mode 100644 index 0000000000..8c10582859 --- /dev/null +++ b/packages/api/docker/docker-compose.local-ports.yml @@ -0,0 +1,10 @@ +services: + postgrest: + ports: + - '3000:3000' + db: + ports: + - '5432:5432' + ipfs-cluster: + ports: + - '9094:9094' diff --git a/packages/api/docker/docker-compose.yml b/packages/api/docker/docker-compose.yml index d534557238..b5b1e53e7f 100644 --- a/packages/api/docker/docker-compose.yml +++ b/packages/api/docker/docker-compose.yml @@ -1,57 +1,126 @@ -version: '3.6' services: - rest: + postgrest: image: postgrest/postgrest:v9.0.0 depends_on: - - db - restart: always - ports: - - 3000:3000/tcp + - db-init environment: - PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres + PGRST_DB_URI: $DATABASE_CONNECTION PGRST_DB_SCHEMAS: public,cargo PGRST_DB_ANON_ROLE: postgres PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long + db-init: + build: + dockerfile: './docker/Dockerfile' + context: '../' + entrypoint: 'psql $DATABASE_CONNECTION -v "ON_ERROR_STOP=1" -f ./db/config.sql -f ./db/tables.sql -f ./db/cargo.testing.sql -f ./db/functions.sql' + depends_on: + db: + condition: service_healthy + environment: + - DATABASE_CONNECTION + - DATABASE_URL + - DATABASE_TOKEN + api: + restart: on-failure + build: + dockerfile: ./docker/Dockerfile + context: ../ + environment: + - BRANCH + - CLUSTER_API_URL + - CLUSTER_BASIC_AUTH_TOKEN + - CLUSTER_SERVICE + - COMMITHASH + - DATABASE_CONNECTION + - DATABASE_TOKEN + - DATABASE_URL + - DEBUG + - ENV + - LOGTAIL_TOKEN + - MAGIC_SECRET_KEY + - MAILCHIMP_API_KEY + - MAINTENANCE_MODE + - METAPLEX_AUTH_TOKEN + - PRIVATE_KEY + - SALT + - SENTRY_DSN + - VERSION + + depends_on: + - postgrest + - ipfs-cluster db: + restart: on-failure + healthcheck: + test: + [ + 'CMD', + 'psql', + '$DATABASE_CONNECTION', + '-v', + 'ON_ERROR_STOP=1', + '-c', + 'SELECT 1', + ] + interval: '1s' + timeout: '1s' + retries: 100 build: context: ./postgres - deploy: - resources: - limits: - cpus: '2' - memory: 2G - reservations: - cpus: '1' - memory: 1G - ports: - - 5432:5432 environment: + DATABASE_CONNECTION: $DATABASE_CONNECTION POSTGRES_DB: postgres POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_PORT: 5432 - ipfs0: - container_name: ipfs0 + ipfs: image: ipfs/go-ipfs:v0.10.0 # update this when go-ipfs M1 macs https://github.com/ipfs/go-ipfs/issues/8645 - volumes: - - ./compose/ipfs0:/data/ipfs - cluster0: - container_name: cluster0 - image: ipfs/ipfs-cluster:v1.0.0-rc4 + ipfs-cluster: + image: ipfs/ipfs-cluster:v1.0.0-rc4 # we need to use this image tag, as nft.storage expects CIDs to come back in a certain format depends_on: - - ipfs0 + - ipfs environment: - CLUSTER_PEERNAME: cluster0 + CLUSTER_PEERNAME: cluster CLUSTER_SECRET: ${CLUSTER_SECRET} # From shell variable if set - CLUSTER_IPFSHTTP_NODEMULTIADDRESS: /dns4/ipfs0/tcp/5001 + CLUSTER_IPFSHTTP_NODEMULTIADDRESS: /dns4/ipfs/tcp/5001 CLUSTER_CRDT_TRUSTEDPEERS: '*' # Trust all peers in Cluster CLUSTER_RESTAPI_HTTPLISTENMULTIADDRESS: /ip4/0.0.0.0/tcp/9094 # Expose API CLUSTER_RESTAPI_BASICAUTHCREDENTIALS: test:test CLUSTER_RESTAPI_CORSALLOWEDMETHODS: GET,POST,OPTIONS CLUSTER_RESTAPI_CORSALLOWEDHEADERS: authorization CLUSTER_MONITORPINGINTERVAL: 2s # Speed up peer discovery - ports: - - '127.0.0.1:9094:9094' - volumes: - - ./compose/cluster0:/data/ipfs-cluster + playwright: + profiles: + - test + depends_on: + api: + condition: service_healthy + build: + dockerfile: ./docker/test.Dockerfile + context: ../ + environment: + - BRANCH + - CLUSTER_API_URL + - CLUSTER_BASIC_AUTH_TOKEN + - CLUSTER_SERVICE + - COMMITHASH + - DATABASE_CONNECTION + - DATABASE_TOKEN + - DATABASE_URL + - DEBUG + - ENV + - LOGTAIL_TOKEN + - MAGIC_SECRET_KEY + - MAILCHIMP_API_KEY + - MAINTENANCE_MODE + - METAPLEX_AUTH_TOKEN + - PRIVATE_KEY + - S3_ACCESS_KEY_ID + - S3_BUCKET_NAME + - S3_ENDPOINT + - S3_REGION + - S3_SECRET_ACCESS_KEY + - SALT + - SENTRY_DSN + - VERSION diff --git a/packages/api/docker/postgres/Dockerfile b/packages/api/docker/postgres/Dockerfile index 763c9ae137..9c504f9d62 100644 --- a/packages/api/docker/postgres/Dockerfile +++ b/packages/api/docker/postgres/Dockerfile @@ -2,13 +2,7 @@ FROM supabase/postgres:13.3.0 COPY 00-initial-schema.sql /docker-entrypoint-initdb.d/00-initial-schema.sql -# Run time values -ENV POSTGRES_DB=postgres -ENV POSTGRES_USER=postgres -ENV POSTGRES_PASSWORD=postgres -ENV POSTGRES_PORT=5432 - EXPOSE 5432 # Enables cat /var/lib/postgresql/data/pg_log/postgresql.log within the container to debug queries -CMD ["postgres", "-c", "wal_level=logical", "-c", "log_statement=all", "-c", "pg_stat_statements.track=all"] \ No newline at end of file +CMD ["postgres", "-c", "wal_level=logical", "-c", "log_statement=all", "-c", "pg_stat_statements.track=all"] diff --git a/packages/api/docker/scripts/run-miniflare.sh b/packages/api/docker/scripts/run-miniflare.sh new file mode 100755 index 0000000000..1ddc4fa97b --- /dev/null +++ b/packages/api/docker/scripts/run-miniflare.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +npx miniflare --debug \ + --build-command "npm run build" \ + --binding ENV="$ENV" \ + --binding PRIVATE_KEY="$PRIVATE_KEY" \ + --binding DATABASE_CONNECTION="$DATABASE_CONNECTION" \ + --binding DATABASE_TOKEN="$DATABASE_TOKEN" \ + --binding DATABASE_URL="$DATABASE_URL" \ + --binding LOGTAIL_TOKEN="$LOGTAIL_TOKEN" \ + --binding MAGIC_SECRET_KEY="$MAGIC_SECRET_KEY" \ + --binding SALT="$SALT" \ + --binding SENTRY_DSN="$SENTRY_DSN" \ + --binding MAINTENANCE_MODE="$MAINTENANCE_MODE" \ + --binding MAILCHIMP_API_KEY="$MAILCHIMP_API_KEY" \ + --binding CLUSTER_API_URL="$CLUSTER_API_URL" \ + --binding CLUSTER_BASIC_AUTH_TOKEN="$CLUSTER_BASIC_AUTH_TOKEN" \ + --binding DEBUG="$DEBUG" \ + --binding METAPLEX_AUTH_TOKEN="$METAPLEX_AUTH_TOKEN" \ + --binding S3_ACCESS_KEY_ID="$S3_ACCESS_KEY_ID" \ + --binding S3_BUCKET_NAME="$S3_BUCKET_NAME" \ + --binding S3_ENDPOINT="$S3_ENDPOINT" \ + --binding S3_REGION="$S3_REGION" \ + --binding S3_SECRET_ACCESS_KEY="$S3_SECRET_ACCESS_KEY" \ +; + \ No newline at end of file diff --git a/packages/api/docker/scripts/run-playwright.sh b/packages/api/docker/scripts/run-playwright.sh new file mode 100755 index 0000000000..f008669f46 --- /dev/null +++ b/packages/api/docker/scripts/run-playwright.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +tsc \ +&& npx playwright-test 'test/**/*.spec.js' --sw src/index.js \ +; diff --git a/packages/api/docker/test.Dockerfile b/packages/api/docker/test.Dockerfile new file mode 100644 index 0000000000..5d4837990f --- /dev/null +++ b/packages/api/docker/test.Dockerfile @@ -0,0 +1,20 @@ +FROM mcr.microsoft.com/playwright:v1.20.0-focal + +RUN npm i -g nodemon typescript +RUN mkdir -p /app + +WORKDIR /app + +# Make your docker builds 100x faster with this one trick :) +COPY ./package.json . +COPY ./tsconfig.json . +RUN yarn install + +COPY ./pw-test.config.cjs . +COPY ./docker/scripts ./docker-scripts +COPY ./scripts ./scripts +COPY ./db ./db +COPY ./src/ ./src +COPY ./test/ ./test + +ENTRYPOINT ["./docker-scripts/run-playwright.sh" ] diff --git a/packages/api/package.json b/packages/api/package.json index e9fe652865..8924a1d8a1 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -7,11 +7,13 @@ "main": "dist/worker.js", "scripts": { "deploy": "wrangler publish --env production", - "predev": "./scripts/cli.js db --start && ./scripts/cli.js db-sql --cargo --testing --reset", - "dev": "miniflare --watch --debug --env ../../.env", - "clean": "./scripts/cli.js db --clean && rm -rf docker/compose", - "build": "scripts/cli.js build", - "test": "tsc && playwright-test \"test/**/*.spec.js\" --sw src/index.js", + "dev": "./scripts/start-dev.sh", + "dev:persist": "./scripts/start-dev-persist.sh", + "dev:clean": "./scripts/clean-dev-persist.sh", + "build": "./scripts/cli.js build", + "test": "./scripts/start-test.sh", + "test:docker": "./scripts/start-test-docker.sh ", + "test:watch": "nodemon --watch ./src --watch ./test --watch ./scripts --exec ./scripts/start-test.sh", "db-types": "./scripts/cli.js db-types" }, "author": "Hugo Dias (hugodias.me)", @@ -24,11 +26,12 @@ "@ipld/dag-pb": "^2.1.16", "@magic-sdk/admin": "1.4.0", "@nftstorage/ipfs-cluster": "^5.0.1", - "@supabase/postgrest-js": "^0.34.1", + "@supabase/postgrest-js": "^0.37.2", "ipfs-car": "^0.6.1", "merge-options": "^3.0.4", "multiformats": "^9.6.4", "nanoid": "^3.1.30", + "nft.storage": "^6.3.0", "regexparam": "^2.0.0", "toucan-js": "^2.4.1", "ucan-storage": "^1.0.0", @@ -43,16 +46,19 @@ "@types/git-rev-sync": "^2.0.0", "@types/mocha": "^9.0.0", "@types/pg": "^8.6.1", + "@types/temp": "^0.9.1", "assert": "^2.0.0", "buffer": "^6.0.3", "carbites": "^1.0.6", "delay": "^5.0.0", "dotenv": "^10.0.0", - "esbuild": "^0.13.13", + "esbuild": "^0.14.38", "execa": "^5.1.1", - "git-rev-sync": "^3.0.1", + "git-rev-sync": "^3.0.2", "ipfs-unixfs-importer": "^9.0.3", - "miniflare": "^2.0.0-rc.3", + "miniflare": "^2.4.0", + "mocha": "^9.1.0", + "nodemon": "^2.0.16", "npm-run-all": "^4.1.5", "openapi-typescript": "^4.0.2", "p-retry": "^4.6.1", @@ -62,6 +68,7 @@ "readable-stream": "^3.6.0", "sade": "^1.7.4", "smoke": "^3.1.1", + "temp": "^0.9.4", "tweetnacl": "^1.0.3" } } diff --git a/packages/api/pw-test.config.cjs b/packages/api/pw-test.config.cjs index d20b30cbbb..9220b1ebcc 100644 --- a/packages/api/pw-test.config.cjs +++ b/packages/api/pw-test.config.cjs @@ -1,5 +1,7 @@ const path = require('path') +const fs = require('fs') const dotenv = require('dotenv') +const temp = require('temp') const execa = require('execa') const delay = require('delay') const { once } = require('events') @@ -8,26 +10,64 @@ const { once } = require('events') dotenv.config({ path: path.join(__dirname, '../../.env') }) -const cli = path.join(__dirname, 'scripts/cli.js') +const defineGlobalsJs = ` +globalThis.ENV = '${process.env.ENV || ''}' +globalThis.DEBUG = '${process.env.DEBUG || ''}' +globalThis.SALT = '${process.env.SALT || ''}' +globalThis.DATABASE_URL = '${process.env.DATABASE_URL || ''}' +globalThis.DATABASE_TOKEN = '${process.env.DATABASE_TOKEN || ''}' + +globalThis.MAGIC_SECRET_KEY = '${process.env.MAGIC_SECRET_KEY || ''}' +globalThis.MAILCHIMP_API_KEY = '${process.env.MAILCHIMP_API_KEY || ''}' +globalThis.METAPLEX_AUTH_TOKEN = '${process.env.METAPLEX_AUTH_TOKEN || ''}' +globalThis.LOGTAIL_TOKEN = '${process.env.LOGTAIL_TOKEN || ''}' +globalThis.PRIVATE_KEY = '${process.env.PRIVATE_KEY || ''}' +globalThis.SENTRY_DSN = '${process.env.SENTRY_DSN || ''}' + +globalThis.CLUSTER_API_URL = '${process.env.CLUSTER_API_URL || ''}' +globalThis.CLUSTER_BASIC_AUTH_TOKEN = '${ + process.env.CLUSTER_BASIC_AUTH_TOKEN || '' +}' +globalThis.CLUSTER_SERVICE = '${process.env.CLUSTER_SERVICE || ''}' + +globalThis.MAINTENANCE_MODE = '${process.env.MAINTENANCE_MODE || ''}' + +globalThis.S3_ENDPOINT = '${process.env.S3_ENDPOINT || ''}' +globalThis.S3_REGION = '${process.env.S3_REGION || ''}' +globalThis.S3_ACCESS_KEY_ID = '${process.env.S3_ACCESS_KEY_ID || ''}' +globalThis.S3_SECRET_ACCESS_KEY = '${process.env.S3_SECRET_ACCESS_KEY || ''}' +globalThis.S3_BUCKET_NAME = '${process.env.S3_BUCKET_NAME || ''}' +globalThis.SLACK_USER_REQUEST_WEBHOOK_URL = ${ + process.env.SLACK_USER_REQUEST_WEBHOOK_URL || '' +} +` +temp.track() +const injectGlobalsTempfile = temp.openSync({ + prefix: 'nftstorage-test-', + suffix: '.js', +}) +fs.writeSync(injectGlobalsTempfile.fd, defineGlobalsJs) +fs.closeSync(injectGlobalsTempfile.fd) + +const cli = path.join(__dirname, 'scripts/cli.js') /** @type {import('esbuild').Plugin} */ const nodeBuiltinsPlugin = { name: 'node builtins', setup(build) { build.onResolve({ filter: /^stream$/ }, () => { return { path: require.resolve('readable-stream') } - }) - - build.onResolve({ filter: /^cross-fetch$/ }, () => { - return { path: path.resolve(__dirname, 'scripts/fetch.js') } - }) + }), + build.onResolve({ filter: /^cross-fetch$/ }, () => { + return { path: path.resolve(__dirname, 'scripts/fetch.js') } + }) }, } const config = { inject: [ path.join(__dirname, './scripts/node-globals.js'), - path.join(__dirname, './test/scripts/globals.js'), + injectGlobalsTempfile.path, ], define: { NFT_STORAGE_VERSION: JSON.stringify('0.1.0'), @@ -43,16 +83,6 @@ module.exports = { buildSWConfig: config, beforeTests: async () => { const mock = await startMockServer('AWS S3', 9095, 'test/mocks/aws-s3') - - await execa(cli, ['db', '--start'], { stdio: 'inherit' }) - console.log('⚡️ Cluster and Postgres started.') - - await execa(cli, ['db-sql', '--cargo', '--testing', '--reset'], { - stdio: 'inherit', - }) - console.log('⚡️ SQL schema loaded.') - - await delay(2000) return { mock } }, afterTests: async ( @@ -62,7 +92,6 @@ module.exports = { console.log('⚡️ Shutting down mock servers.') beforeTests.mock.proc.kill() - await execa(cli, ['db', '--clean']) }, } diff --git a/packages/api/scripts/clean-dev-persist.sh b/packages/api/scripts/clean-dev-persist.sh new file mode 100755 index 0000000000..bc049d0b6b --- /dev/null +++ b/packages/api/scripts/clean-dev-persist.sh @@ -0,0 +1,2 @@ +#!/usr/bin/env sh +docker compose --project-name="nft-storage-api-persist" rm --force --volumes diff --git a/packages/api/scripts/cli.js b/packages/api/scripts/cli.js index 753c8e26b9..c0c2f7e778 100755 --- a/packages/api/scripts/cli.js +++ b/packages/api/scripts/cli.js @@ -46,10 +46,20 @@ prog .describe('Build the worker.') .option('--env', 'Environment', 'dev') .action(async (opts) => { + let shortCommit = 'unknown' + let branch = 'unknown-branch' + let commit = 'unknown-commit' + + try { + shortCommit += git.short(__dirname) + commit = git.long(__dirname) + branch = git.branch(__dirname) + } catch (e) { + console.warn('no git version info available') + } + try { - const version = `${pkg.name}@${pkg.version}-${opts.env}+${git.short( - __dirname - )}` + const version = `${pkg.name}@${pkg.version}-${opts.env}+${shortCommit}` await build({ entryPoints: [path.join(__dirname, '../src/index.js')], bundle: true, @@ -59,8 +69,8 @@ prog plugins: [PluginAlias], define: { NFT_STORAGE_VERSION: JSON.stringify(version), - NFT_STORAGE_COMMITHASH: JSON.stringify(git.long(__dirname)), - NFT_STORAGE_BRANCH: JSON.stringify(git.branch(__dirname)), + NFT_STORAGE_COMMITHASH: JSON.stringify(commit), + NFT_STORAGE_BRANCH: JSON.stringify(branch), global: 'globalThis', }, minify: opts.env === 'dev' ? false : true, diff --git a/packages/api/scripts/cmds/db-sql.js b/packages/api/scripts/cmds/db-sql.js index 9e93c526e7..8719aa24ee 100644 --- a/packages/api/scripts/cmds/db-sql.js +++ b/packages/api/scripts/cmds/db-sql.js @@ -39,7 +39,6 @@ export async function dbSqlCmd(opts) { ) const client = await getDbClient(env.DATABASE_CONNECTION) - if (opts.reset) { await client.query(reset) } diff --git a/packages/api/scripts/start-dev-persist.sh b/packages/api/scripts/start-dev-persist.sh new file mode 100755 index 0000000000..8c57580c18 --- /dev/null +++ b/packages/api/scripts/start-dev-persist.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env sh +# This script force builds docker images, src the env, kills the old containers, and starts the new containers. + +# We're moving this script around a lot, and it's pretty cwd-dependent. +ENV_FILE=../../.env +COMPOSE_FILES="--file ./docker/docker-compose.yml --file ./docker/docker-compose.dev.yml" +docker compose \ + --project-name="nft-storage-api-persist" \ + $COMPOSE_FILES --env-file="$ENV_FILE" up \ + --build \ + --remove-orphans \ +; diff --git a/packages/api/scripts/start-dev.sh b/packages/api/scripts/start-dev.sh new file mode 100755 index 0000000000..4754fce12a --- /dev/null +++ b/packages/api/scripts/start-dev.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh +# This script force builds docker images, src the env, kills the old containers, and starts the new containers. + +# We're moving this script around a lot, and it's pretty cwd-dependent. +ENV_FILE=../../.env +COMPOSE_FILES="--file ./docker/docker-compose.yml --file ./docker/docker-compose.dev.yml" + +docker compose \ + --project-name="nft-storage-api" \ + $COMPOSE_FILES --env-file="$ENV_FILE" up \ + --always-recreate-deps \ + --remove-orphans \ + --force-recreate \ + --renew-anon-volumes \ + --build \ +; diff --git a/packages/api/scripts/start-test-docker.sh b/packages/api/scripts/start-test-docker.sh new file mode 100755 index 0000000000..8e7689be0b --- /dev/null +++ b/packages/api/scripts/start-test-docker.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env sh + +ENV_FILE=../../.env +docker compose --profile="test" --file ./docker/docker-compose.yml --env-file="$ENV_FILE" config + +docker compose \ + --project-name="nft-storage-api-test" \ + --profile="test" --file ./docker/docker-compose.yml --env-file="$ENV_FILE" up \ + --always-recreate-deps \ + --force-recreate \ + --renew-anon-volumes \ + --remove-orphans \ + --build \ + --attach "playwright" \ + --no-log-prefix \ +; + diff --git a/packages/api/scripts/start-test.sh b/packages/api/scripts/start-test.sh new file mode 100755 index 0000000000..369068f6c7 --- /dev/null +++ b/packages/api/scripts/start-test.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -eX + +# We're moving this script around a lot, and it's pretty cwd-dependent. +ENV_FILE=../../.env +COMPOSE_FILES="--file ./docker/docker-compose.yml --file ./docker/docker-compose.local-ports.yml" + +cleanup() { + docker compose --project-name "nft-storage-api-local-tests" down --remove-orphans +} +trap cleanup EXIT + +docker compose \ + --project-name="nft-storage-api-local-tests" \ + $COMPOSE_FILES --env-file="$ENV_FILE" up \ + --detach \ + --always-recreate-deps \ + --remove-orphans \ + --force-recreate \ + --renew-anon-volumes \ + --build \ +; + +THIS_DIR="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +API_DIR="$THIS_DIR/../" + +pushd $API_DIR + +export DATABASE_URL="http://localhost:3000" +export CLUSTER_API_URL="http://localhost:9094" + +tsc \ +&& npx playwright-test "./test/"'**/*.spec.js' --sw src/index.js \ +; + + +popd diff --git a/packages/api/scripts/token-add.sh b/packages/api/scripts/token-add.sh new file mode 100755 index 0000000000..6b12e8388c --- /dev/null +++ b/packages/api/scripts/token-add.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +# This script can be used to detect if the database persists after a docker-compose restart. +# using token-count.sh along with this script helps to check if data survives a restart. +API_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0FDNWM1MzcxNjEyNDliODEyNTBFNDllMjg1M0MiLCJpc3MiOiJuZnQtc3RvcmFnZSIsImlhdCI6MTYzOTc1NDczNjYzOCwibmFtZSI6Im1haW4ifQ.wKwJIRXXHsgwVp8mOQp6r3_F4Lz5lnoAkgVP8wqwA_Y +curl -v 'localhost:8787/internal/tokens' -H "Authorization: Bearer $API_KEY" -H 'content-type: application/json' --data-raw '{"name":"abc"}' | jq diff --git a/packages/api/scripts/token-count.sh b/packages/api/scripts/token-count.sh new file mode 100755 index 0000000000..828e87c291 --- /dev/null +++ b/packages/api/scripts/token-count.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env sh + +# This counts tokens returned from the server. Useful when checking to see if the database data persists. +API_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0FDNWM1MzcxNjEyNDliODEyNTBFNDllMjg1M0MiLCJpc3MiOiJuZnQtc3RvcmFnZSIsImlhdCI6MTYzOTc1NDczNjYzOCwibmFtZSI6Im1haW4ifQ.wKwJIRXXHsgwVp8mOQp6r3_F4Lz5lnoAkgVP8wqwA_Y +curl -vvvv 'localhost:8787/internal/tokens' -H "Authorization: Bearer $API_KEY" -H 'content-type: application/json' | jq '.value | length' diff --git a/packages/api/wrangler.toml b/packages/api/wrangler.toml index 4b308c8ef4..48e7e8eef4 100644 --- a/packages/api/wrangler.toml +++ b/packages/api/wrangler.toml @@ -1,18 +1,18 @@ # Development -name = "nft-storage-dev" account_id = "fffa4b4363a7e5250af8357087263b3a" -workers_dev = true +name = "nft-storage-dev" type = "javascript" +workers_dev = true # Compatibility flags https://github.com/cloudflare/wrangler/pull/2009 compatibility_date = "2021-08-23" -compatibility_flags = [ "formdata_parser_supports_files" ] +compatibility_flags = ["formdata_parser_supports_files"] [vars] -ENV = "dev" -DEBUG = "true" DATABASE_URL = "http://localhost:3000" +DEBUG = "true" +ENV = "dev" [build] command = "scripts/cli.js build" @@ -22,20 +22,21 @@ watch_dir = "src" format = "service-worker" [miniflare] -env_path = "./../../.env" +host = "0.0.0.0" +port = "8787" # Staging [env.staging] name = "nft-storage-staging" route = "api-staging.nft.storage/*" -zone_id = "fc6cb51dbc2d0b9a729eae6a302a49c9" usage_model = "unbound" +zone_id = "fc6cb51dbc2d0b9a729eae6a302a49c9" [env.staging.vars] -ENV = "staging" -DEBUG = "true" DATABASE_URL = "https://nft-storage-pgrest-staging.herokuapp.com" +DEBUG = "true" +ENV = "staging" [env.staging.build] command = "scripts/cli.js build --env staging" @@ -49,13 +50,13 @@ format = "service-worker" [env.production] name = "nft-storage" route = "api.nft.storage/*" -zone_id = "fc6cb51dbc2d0b9a729eae6a302a49c9" usage_model = "unbound" +zone_id = "fc6cb51dbc2d0b9a729eae6a302a49c9" [env.production.vars] -ENV = "production" -DEBUG = "false" DATABASE_URL = "https://nft-storage-pgrest-prod.herokuapp.com" +DEBUG = "false" +ENV = "production" [env.production.build] command = "scripts/cli.js build --env production"