Skip to content

Commit

Permalink
Merge branch 'main' into integration/bump-mobc
Browse files Browse the repository at this point in the history
  • Loading branch information
janpio authored Nov 13, 2023
2 parents e405629 + 10b1ce5 commit 14aff28
Show file tree
Hide file tree
Showing 42 changed files with 892 additions and 240 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/publish-prisma-schema-wasm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- name: Build
run: nix build .#prisma-schema-wasm

- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: '20.x'

Expand Down
4 changes: 1 addition & 3 deletions .github/workflows/query-engine-driver-adapters.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ jobs:
setup_task: 'dev-neon-ws-postgres13'
- name: 'libsql'
setup_task: 'dev-libsql-sqlite'
- name: 'planetscale'
setup_task: 'dev-planetscale-vitess8'
node_version: ['18']
env:
LOG_LEVEL: 'info' # Set to "debug" to trace the query engine and node process running the driver adapter
Expand All @@ -53,7 +51,7 @@ jobs:
ref: ${{ github.event.pull_request.head.sha }}

- name: 'Setup Node.js'
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node_version }}

Expand Down
5 changes: 3 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -47,5 +47,6 @@ graph.dot

prisma-schema-wasm/nodejs

# This symlink looks orphan here, but it comes from prisma/prisma where driver adapters reference a file in their parent directory
tsconfig.build.adapter.json
# Ignore pnpm-lock.yaml
query-engine/driver-adapters/pnpm-lock.yaml
package-lock.json
2 changes: 1 addition & 1 deletion CODEOWNERS
Original file line number Diff line number Diff line change
@@ -1 +1 @@
* @prisma/team-orm-rust
* @prisma/ORM-Rust
25 changes: 7 additions & 18 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,10 @@ test-pg-postgres13: dev-pg-postgres13 test-qe-st

test-driver-adapter-pg: test-pg-postgres13

start-neon-postgres13: build-qe-napi build-connector-kit-js
start-neon-postgres13:
docker compose -f docker-compose.yml up --wait -d --remove-orphans neon-postgres13

dev-neon-ws-postgres13: start-neon-postgres13
dev-neon-ws-postgres13: start-neon-postgres13 build-qe-napi build-connector-kit-js
cp $(CONFIG_PATH)/neon-ws-postgres13 $(CONFIG_FILE)

test-neon-ws-postgres13: dev-neon-ws-postgres13 test-qe-st
Expand Down Expand Up @@ -268,10 +268,10 @@ start-vitess_8_0:
dev-vitess_8_0: start-vitess_8_0
cp $(CONFIG_PATH)/vitess_8_0 $(CONFIG_FILE)

start-planetscale-vitess8: build-qe-napi build-connector-kit-js
start-planetscale-vitess8:
docker compose -f docker-compose.yml up -d --remove-orphans planetscale-vitess8

dev-planetscale-vitess8: start-planetscale-vitess8
dev-planetscale-vitess8: start-planetscale-vitess8 build-qe-napi build-connector-kit-js
cp $(CONFIG_PATH)/planetscale-vitess8 $(CONFIG_FILE)

test-planetscale-vitess8: dev-planetscale-vitess8 test-qe-st
Expand All @@ -285,25 +285,14 @@ test-driver-adapter-planetscale: test-planetscale-vitess8
build-qe-napi:
cargo build --package query-engine-node-api

build-connector-kit-js: build-driver-adapters symlink-driver-adapters
cd query-engine/driver-adapters/connector-test-kit-executor && pnpm i && pnpm build
build-connector-kit-js: build-driver-adapters
cd query-engine/driver-adapters && pnpm i && pnpm build

build-driver-adapters: ensure-prisma-present
@echo "Building driver adapters..."
@cd ../prisma && pnpm --filter "*adapter*" i && pnpm --filter "*adapter*" build
@cd ../prisma && pnpm --filter "*adapter*" i
@echo "Driver adapters build completed.";

symlink-driver-adapters: ensure-prisma-present
@echo "Creating symbolic links for driver adapters..."
@for dir in $(wildcard $(realpath ../prisma)/packages/*adapter*); do \
if [ -d "$$dir" ]; then \
dir_name=$$(basename "$$dir"); \
ln -sfn "$$dir" "$(realpath .)/query-engine/driver-adapters/$$dir_name"; \
echo "Created symbolic link for $$dir_name"; \
fi; \
done;
echo "Symbolic links creation completed.";

ensure-prisma-present:
@if [ -d ../prisma ]; then \
cd "$(realpath ../prisma)" && git fetch origin main; \
Expand Down
48 changes: 37 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,29 @@ GitHub actions will then pick up the branch name and use it to clone that branch

When it's time to merge the sibling PRs, you'll need to merge the prisma/prisma PR first, so when merging the engines PR you have the code of the adapters ready in prisma/prisma `main` branch.

### Testing engines in `prisma/prisma`

You can trigger releases from this repository to npm that can be used for testing the engines in `prisma/prisma` either automatically or manually:

#### Automated integration releases from this repository to npm

(Since July 2022). Any branch name starting with `integration/` will, first, run the full test suite in Buildkite `[Test] Prisma Engines` and, second, if passing, run the publish pipeline (build and upload engines to S3 & R2)

The journey through the pipeline is the same as a commit on the `main` branch.
- It will trigger [`prisma/engines-wrapper`](https://github.com/prisma/engines-wrapper) and publish a new [`@prisma/engines-version`](https://www.npmjs.com/package/@prisma/engines-version) npm package but on the `integration` tag.
- Which triggers [`prisma/prisma`](https://github.com/prisma/prisma) to create a `chore(Automated Integration PR): [...]` PR with a branch name also starting with `integration/`
- Since in `prisma/prisma` we also trigger the publish pipeline when a branch name starts with `integration/`, this will publish all `prisma/prisma` monorepo packages to npm on the `integration` tag.
- Our [ecosystem-tests](https://github.com/prisma/ecosystem-tests/) tests will automatically pick up this new version and run tests, results will show in [GitHub Actions](https://github.com/prisma/ecosystem-tests/actions?query=branch%3Aintegration)

This end to end will take minimum ~1h20 to complete, but is completely automated :robot:

Notes:
- in `prisma/prisma` repository, we do not run tests for `integration/` branches, it is much faster and also means that there is no risk of tests failing (e.g. flaky tests, snapshots) that would stop the publishing process.
- in `prisma/prisma-engines` the Buildkite test pipeline must first pass, then the engines will be built and uploaded to our storage via the Buildkite release pipeline. These 2 pipelines can fail for different reasons, it's recommended to keep an eye on them (check notifications in Slack) and restart jobs as needed. Finally, it will trigger [`prisma/engines-wrapper`](https://github.com/prisma/engines-wrapper).

#### Manual integration releases from this repository to npm

Additionally to the automated integration release for `integration/` branches, you can also trigger a publish **manually** in the Buildkite `[Test] Prisma Engines` job if that succeeds for _any_ branch name. Click "🚀 Publish binaries" at the bottom of the test list to unlock the publishing step. When all the jobs in `[Release] Prisma Engines` succeed, you also have to unlock the next step by clicking "🚀 Publish client". This will then trigger the same journey as described above.

## Parallel rust-analyzer builds

Expand All @@ -269,22 +292,25 @@ rust-analyzer. To avoid this. Open VSCode settings and search for `Check on Save
--target-dir:/tmp/rust-analyzer-check
```

### Automated integration releases from this repository to npm

(Since July 2022). Any branch name starting with `integration/` will, first, run the full test suite and, second, if passing, run the publish pipeline (build and upload engines to S3)
## Community PRs: create a local branch for a branch coming from a fork

The journey through the pipeline is the same as a commit on the `main` branch.
- It will trigger [prisma/engines-wrapper](https://github.com/prisma/engines-wrapper) and publish a new [`@prisma/engines-version`](https://www.npmjs.com/package/@prisma/engines-version) npm package but on the `integration` tag.
- Which triggers [prisma/prisma](https://github.com/prisma/prisma) to create a `chore(Automated Integration PR): [...]` PR with a branch name also starting with `integration/`
- Since in prisma/prisma we also trigger the publish pipeline when a branch name starts with `integration/`, this will publish all prisma/prisma monorepo packages to npm on the `integration` tag.
- Our [ecosystem-tests](https://github.com/prisma/ecosystem-tests/) tests will automatically pick up this new version and run tests, results will show in [GitHub Actions](https://github.com/prisma/ecosystem-tests/actions?query=branch%3Aintegration)
To trigger an [Automated integration releases from this repository to npm](#automated-integration-releases-from-this-repository-to-npm) or [Manual integration releases from this repository to npm](#manual-integration-releases-from-this-repository-to-npm) branches of forks need to be pulled into this repository so the Buildkite job is triggered. You can use these GitHub and git CLI commands to achieve that easily:

This end to end will take minimum ~1h20 to complete, but is completely automated :robot:
```
gh pr checkout 4375
git checkout -b integration/sql-nested-transactions
git push --set-upstream origin integration/sql-nested-transactions
```

Notes:
- in prisma/prisma repository, we do not run tests for `integration/` branches, it is much faster and also means that there is no risk of test failing (e.g. flaky tests, snapshots) that would stop the publishing process.
- in prisma/prisma-engines tests must first pass, before publishing starts. So better keep an eye on them and restart them as needed.
If there is a need to re-create this branch because it has been updated, deleting it and re-creating will make sure the content is identical and avoid any conflicts.

```
git branch --delete integration/sql-nested-transactions
gh pr checkout 4375
git checkout -b integration/sql-nested-transactions
git push --set-upstream origin integration/sql-nested-transactions --force
```

## Security

Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ services:
restart: unless-stopped
platform: linux/x86_64
environment:
MYSQL_USER: root
MYSQL_ROOT_PASSWORD: prisma
MYSQL_DATABASE: prisma
ports:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ pub(super) fn validate(ctx: &mut Context<'_>) {
indexes::supports_clustering_setting(index, ctx);
indexes::clustering_can_be_defined_only_once(index, ctx);
indexes::opclasses_are_not_allowed_with_other_than_normal_indices(index, ctx);
indexes::composite_types_are_not_allowed_in_index(index, ctx);
indexes::composite_type_in_compound_unique_index(index, ctx);

for field_attribute in index.scalar_field_attributes() {
let span = index.ast_attribute().span;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -386,20 +386,25 @@ pub(crate) fn opclasses_are_not_allowed_with_other_than_normal_indices(index: In
}
}

pub(crate) fn composite_types_are_not_allowed_in_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) {
for field in index.fields() {
if field.scalar_field_type().as_composite_type().is_some() {
let message = format!(
"Indexes can only contain scalar attributes. Please remove {:?} from the argument list of the indexes.",
field.name()
);
ctx.push_error(DatamodelError::new_attribute_validation_error(
&message,
index.attribute_name(),
index.ast_attribute().span,
));
return;
}
pub(crate) fn composite_type_in_compound_unique_index(index: IndexWalker<'_>, ctx: &mut Context<'_>) {
if !index.is_unique() {
return;
}

let composite_type = index
.fields()
.find(|f| f.scalar_field_type().as_composite_type().is_some());

if index.fields().len() > 1 && composite_type.is_some() {
let message = format!(
"Prisma does not currently support composite types in compound unique indices, please remove {:?} from the index. See https://pris.ly/d/mongodb-composite-compound-indices for more details",
composite_type.unwrap().name()
);
ctx.push_error(DatamodelError::new_attribute_validation_error(
&message,
index.attribute_name(),
index.ast_attribute().span,
));
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use query_engine_tests::*;

#[test_suite(schema(generic), only(Postgres))]
#[test_suite(schema(generic))]
mod raw_params {
#[connector_test]
#[connector_test(only(Postgres), exclude(JS))]
async fn value_too_many_bind_variables(runner: Runner) -> TestResult<()> {
let n = 32768;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ mod interactive_tx {
Ok(())
}

#[connector_test(exclude(JS))]
#[connector_test]
async fn batch_queries_failure(mut runner: Runner) -> TestResult<()> {
// Tx expires after five second.
let tx_id = runner.start_tx(5000, 5000, None).await?;
Expand Down Expand Up @@ -256,7 +256,7 @@ mod interactive_tx {
Ok(())
}

#[connector_test(exclude(JS))]
#[connector_test]
async fn tx_expiration_failure_cycle(mut runner: Runner) -> TestResult<()> {
// Tx expires after one seconds.
let tx_id = runner.start_tx(5000, 1000, None).await?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ mod one2one_req {
}

/// Deleting the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))]
async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -167,7 +167,7 @@ mod one2one_opt {
}

/// Deleting the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))]
async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -270,7 +270,7 @@ mod one2many_req {
}

/// Deleting the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))]
async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -371,7 +371,7 @@ mod one2many_opt {
}

/// Deleting the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))]
async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ mod one2one_req {
}

/// Updating the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))]
async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -171,7 +171,7 @@ mod one2one_opt {
}

/// Updating the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))]
async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -276,7 +276,7 @@ mod one2many_req {
}

/// Updating the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(required_with_default), exclude(MongoDb, MySQL))]
async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#),
Expand Down Expand Up @@ -379,7 +379,7 @@ mod one2many_opt {
}

/// Updating the parent reconnects the child to the default and fails (the default doesn't exist).
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, JS))]
#[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL))]
async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> {
insta::assert_snapshot!(
run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,8 +187,8 @@ mod max_integer {
schema.to_owned()
}

#[connector_test(schema(overflow_pg), only(Postgres))]
async fn unfitted_int_should_fail_pg(runner: Runner) -> TestResult<()> {
#[connector_test(schema(overflow_pg), only(Postgres), exclude(JS))]
async fn unfitted_int_should_fail_pg_quaint(runner: Runner) -> TestResult<()> {
// int
assert_error!(
runner,
Expand Down Expand Up @@ -234,6 +234,55 @@ mod max_integer {
Ok(())
}

// The driver adapter for neon provides different error messages on overflow
#[connector_test(schema(overflow_pg), only(JS, Postgres))]
async fn unfitted_int_should_fail_pg_js(runner: Runner) -> TestResult<()> {
// int
assert_error!(
runner,
format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MAX} }}) {{ id }} }}"),
None,
"value \\\"2147483648\\\" is out of range for type integer"
);
assert_error!(
runner,
format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MIN} }}) {{ id }} }}"),
None,
"value \\\"-2147483649\\\" is out of range for type integer"
);

// smallint
assert_error!(
runner,
format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MAX} }}) {{ id }} }}"),
None,
"value \\\"32768\\\" is out of range for type smallint"
);
assert_error!(
runner,
format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MIN} }}) {{ id }} }}"),
None,
"value \\\"-32769\\\" is out of range for type smallint"
);

//oid
assert_error!(
runner,
format!("mutation {{ createOneTest(data: {{ oid: {U32_OVERFLOW_MAX} }}) {{ id }} }}"),
None,
"value \\\"4294967296\\\" is out of range for type oid"
);

// The underlying driver swallows a negative id by interpreting it as unsigned.
// {"data":{"createOneTest":{"id":1,"oid":4294967295}}}
run_query!(
runner,
format!("mutation {{ createOneTest(data: {{ oid: {OVERFLOW_MIN} }}) {{ id, oid }} }}")
);

Ok(())
}

#[connector_test(schema(overflow_pg), only(Postgres))]
async fn fitted_int_should_work_pg(runner: Runner) -> TestResult<()> {
// int
Expand Down
Loading

0 comments on commit 14aff28

Please sign in to comment.