commit_message
stringlengths
9
4.28k
sha
stringlengths
40
40
type
stringclasses
10 values
commit_url
stringlengths
78
90
masked_commit_message
stringlengths
2
4.26k
author_email
stringclasses
8 values
git_diff
stringlengths
129
19.1M
docs: add 5.9.4 changelog to core package
48421f5f7b2c18d9caa55336eeb7c2cc7b4cc669
docs
https://github.com/mikro-orm/mikro-orm/commit/48421f5f7b2c18d9caa55336eeb7c2cc7b4cc669
add 5.9.4 changelog to core package
diff --git a/CHANGELOG.md b/CHANGELOG.md index b33d01a..dd43677 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -108,6 +108,20 @@ See [Conventional Commits](https://conventionalcommits.org) for commit guideline Please see the [upgrading guide](https://mikro-orm.io/docs/next/upgrading-v5-to-v6). +## [5.9.4](https://github.com/mikro-orm/mikro-orm/compare/v5.9.3...v5.9.4) (2023-11-17) + + +### Bug Fixes + +* **cli:** support `mikro-orm-esm` on windows ([c491af9](https://github.com/mikro-orm/mikro-orm/commit/c491af9113eed8d274c2ec1f2a736a4cfbaa81f7)) +* **core:** fix auto-refresh detection in `em.find` for inlined embedded properties ([759b7b8](https://github.com/mikro-orm/mikro-orm/commit/759b7b8b5aa95bea8c6b1074ec1f5c2c9ffc5286)), closes [#4904](https://github.com/mikro-orm/mikro-orm/issues/4904) +* **core:** support composite PKs in `em.upsertMany()` ([85c38d4](https://github.com/mikro-orm/mikro-orm/commit/85c38d4465bf37b8448522c835ad77ce6300e317)), closes [#4923](https://github.com/mikro-orm/mikro-orm/issues/4923) +* **mysql:** improve diffing of defaults for JSON columns ([d92a440](https://github.com/mikro-orm/mikro-orm/commit/d92a44059b3b6dc8eeb107e8bd6fd4644f18383a)), closes [#4926](https://github.com/mikro-orm/mikro-orm/issues/4926) +* **schema:** do not inherit schema for FKs if not a wildcard entity ([cc7fed9](https://github.com/mikro-orm/mikro-orm/commit/cc7fed9fcdf62e6ff76f4fa9d2b65192d6ca5f46)), closes [#4918](https://github.com/mikro-orm/mikro-orm/issues/4918) +* **schema:** respect explicit schema in FKs to STI entities ([cc19ebb](https://github.com/mikro-orm/mikro-orm/commit/cc19ebb3addf6e68891e78c36b8857280ddae4a5)), closes [#4933](https://github.com/mikro-orm/mikro-orm/issues/4933) +* **schema:** respect up migration when detecting column renaming in down migration ([d5af5bd](https://github.com/mikro-orm/mikro-orm/commit/d5af5bdd3a709212edb9aa0127d29d8bd9610f25)), closes [#4919](https://github.com/mikro-orm/mikro-orm/issues/4919) + + ## [5.9.3](https://github.com/mikro-orm/mikro-orm/compare/v5.9.2...v5.9.3) (2023-11-06)
chore(release): v5.1.2
eecd4da303757cadb096096ee2133890e3122b6f
chore
https://github.com/mikro-orm/mikro-orm/commit/eecd4da303757cadb096096ee2133890e3122b6f
v5.1.2
diff --git a/CHANGELOG.md b/CHANGELOG.md index 437ff84..30d72ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,22 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [5.1.2](https://github.com/mikro-orm/mikro-orm/compare/v5.1.1...v5.1.2) (2022-04-10) + + +### Bug Fixes + +* **postgres:** do not ignore custom PK constraint names ([#2931](https://github.com/mikro-orm/mikro-orm/issues/2931)) ([24bf10e](https://github.com/mikro-orm/mikro-orm/commit/24bf10e668dd2d3b4b6cc4c52ed215fbffcc9d45)) + + +### Features + +* **schema:** support mysql 8 ([#2961](https://github.com/mikro-orm/mikro-orm/issues/2961)) ([acc960e](https://github.com/mikro-orm/mikro-orm/commit/acc960ebc694c61a959f48e89a9fee5513f6bdfa)) + + + + + ## [5.1.1](https://github.com/mikro-orm/mikro-orm/compare/v5.1.0...v5.1.1) (2022-03-20) **Note:** Version bump only for package @mikro-orm/sqlite diff --git a/lerna.json b/lerna.json index ada44cf..1c117ff 100644 --- a/lerna.json +++ b/lerna.json @@ -2,7 +2,7 @@ "packages": [ "packages/*" ], - "version": "5.1.1", + "version": "5.1.2", "command": { "version": { "conventionalCommits": true, diff --git a/package.json b/package.json index 5e76ba2..e467245 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@mikro-orm/sqlite", - "version": "5.1.1", + "version": "5.1.2", "description": "TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.", "main": "dist/index.js", "module": "dist/index.mjs", @@ -56,13 +56,13 @@ "access": "public" }, "dependencies": { - "@mikro-orm/knex": "^5.1.1", + "@mikro-orm/knex": "^5.1.2", "@vscode/sqlite3": "5.0.8", "fs-extra": "10.0.1", "sqlstring-sqlite": "0.1.1" }, "devDependencies": { - "@mikro-orm/core": "^5.1.1" + "@mikro-orm/core": "^5.1.2" }, "peerDependencies": { "@mikro-orm/core": "^5.0.0",
feat: add `backoff` for an exponential backoff iterator. It's used for computing the backoff on the lock itself, but is certainly useful in other occasions as well.
f9bb71b984045a069f7caf0c650ef24bd3cc0be1
feat
https://github.com/Byron/gitoxide/commit/f9bb71b984045a069f7caf0c650ef24bd3cc0be1
add `backoff` for an exponential backoff iterator. It's used for computing the backoff on the lock itself, but is certainly useful in other occasions as well.
diff --git a/backoff.rs b/backoff.rs index 5cd09c7..3e550c5 100644 --- a/backoff.rs +++ b/backoff.rs @@ -1,6 +1,6 @@ use std::time::Duration; -pub fn randomize(backoff_ms: usize) -> usize { +fn randomize(backoff_ms: usize) -> usize { let new_value = (fastrand::usize(750..=1250) * backoff_ms) / 1000; if new_value == 0 { backoff_ms @@ -9,6 +9,7 @@ pub fn randomize(backoff_ms: usize) -> usize { } } +/// A utility to calculate steps for exponential backoff similar to how it's done in `git`. pub struct Exponential<Fn> { multiplier: usize, max_multiplier: usize, @@ -28,6 +29,7 @@ impl Default for Exponential<fn(usize) -> usize> { } impl Exponential<fn(usize) -> usize> { + /// Create a new exponential backoff iterator that backs off in randomized, ever increasing steps. pub fn default_with_random() -> Self { Exponential { multiplier: 1, @@ -42,6 +44,7 @@ impl<Transform> Exponential<Transform> where Transform: Fn(usize) -> usize, { + /// Return an iterator that yields `Duration` instances to sleep on until `time` is depleted. pub fn until_no_remaining(&mut self, time: Duration) -> impl Iterator<Item = Duration> + '_ { let mut elapsed = Duration::default(); let mut stop_next_iteration = false; diff --git a/lib.rs b/lib.rs index e80f8ce..fdabdfe 100644 --- a/lib.rs +++ b/lib.rs @@ -24,7 +24,8 @@ const DOT_LOCK_SUFFIX: &str = ".lock"; /// pub mod acquire; -mod backoff; +/// +pub mod backoff; /// pub mod commit;
build: fixed bundle package.json files
930a059c87ea8a605f5a0d5792c67c1063addd4f
build
https://github.com/tsparticles/tsparticles/commit/930a059c87ea8a605f5a0d5792c67c1063addd4f
fixed bundle package.json files
diff --git a/package.dist.json b/package.dist.json index c760a9f..027d6b0 100644 --- a/package.dist.json +++ b/package.dist.json @@ -3,9 +3,6 @@ "version": "2.0.2", "description": "Easily create highly customizable particle animations and use them as animated backgrounds for your website. Ready to use components available also for React, Vue.js (2.x and 3.x), Angular, Svelte, jQuery, Preact, Riot.js, Inferno.", "homepage": "https://particles.js.org/", - "scripts": { - "install": "node ./scripts/install.js" - }, "repository": { "type": "git", "url": "git+https://github.com/matteobruni/tsparticles.git", @@ -96,4 +93,4 @@ "unpkg": "tsparticles.slim.min.js", "browser": "index.js", "types": "index.d.ts" -} \\ No newline at end of file +}
ci: update paths to new docs folder in GitHub Actions
d6918146ae522d9c79efd83d505e40c3d47c8077
ci
https://github.com/wzhiqing/cube/commit/d6918146ae522d9c79efd83d505e40c3d47c8077
update paths to new docs folder in GitHub Actions
diff --git a/docs.yml b/docs.yml index 1dd3122..c521897 100644 --- a/docs.yml +++ b/docs.yml @@ -3,7 +3,6 @@ on: push: paths: - '.github/workflows/docs.yml' - - 'docs-build/**' - 'docs-gen/**' - 'docs/**' branches: @@ -14,7 +13,7 @@ jobs: runs-on: ubuntu-latest defaults: run: - working-directory: ./docs-build + working-directory: ./docs steps: - name: Checkout uses: actions/checkout@v2 diff --git a/master.yml b/master.yml index 0184342..f6b8a9d 100644 --- a/master.yml +++ b/master.yml @@ -10,8 +10,8 @@ on: - 'package.json' - 'rollup.config.js' - 'yarn.lock' - - 'docs-build/**' - 'docs-gen/**' + - 'docs/**' branches: - master jobs: @@ -44,4 +44,4 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} repository: cubejs/cube - readme-filepath: ./packages/cubejs-docker/README.md \\ No newline at end of file + readme-filepath: ./packages/cubejs-docker/README.md diff --git a/push.yml b/push.yml index 9fd4d20..3ccb10a 100644 --- a/push.yml +++ b/push.yml @@ -11,8 +11,8 @@ on: - 'package.json' - 'rollup.config.js' - 'yarn.lock' - - 'docs-build/**' - 'docs-gen/**' + - 'docs/**' pull_request: paths: - '.github/workflows/push.yml' @@ -23,8 +23,8 @@ on: - 'package.json' - 'rollup.config.js' - 'yarn.lock' - - 'docs-build/**' - 'docs-gen/**' + - 'docs/**' jobs: unit: diff --git a/gatsby-config.js b/gatsby-config.js index 6b18899..ed8d1c2 100644 --- a/gatsby-config.js +++ b/gatsby-config.js @@ -50,7 +50,7 @@ const config = { resolve: 'gatsby-source-filesystem', options: { name: 'cubejs-docs', - path: `${__dirname}/../docs/`, + path: `${__dirname}/content/`, }, }, {
chore: update mongodb
b1291fb4c19fdc2cf31286c8fd877c9b4ab77cc9
chore
https://github.com/mikro-orm/mikro-orm/commit/b1291fb4c19fdc2cf31286c8fd877c9b4ab77cc9
update mongodb
diff --git a/package.json b/package.json index 466eaa2..11967ab 100644 --- a/package.json +++ b/package.json @@ -58,8 +58,8 @@ "access": "public" }, "dependencies": { - "bson": "^6.10.0", - "mongodb": "6.11.0" + "bson": "^6.10.1", + "mongodb": "6.12.0" }, "devDependencies": { "@mikro-orm/core": "^6.4.1" diff --git a/yarn.lock b/yarn.lock index 2df1f37..e7a224f 100644 --- a/yarn.lock +++ b/yarn.lock Binary files a/yarn.lock and b/yarn.lock differ
fix: change detection and remove "props.force"
fdd285500dc01f7d5b05aea3b4068fd6813de0d9
fix
https://github.com/pmndrs/react-spring/commit/fdd285500dc01f7d5b05aea3b4068fd6813de0d9
change detection and remove "props.force"
diff --git a/SpringValue.ts b/SpringValue.ts index 74cd67b..176b2dd 100644 --- a/SpringValue.ts +++ b/SpringValue.ts @@ -442,11 +442,13 @@ export class SpringValue<T = any, P extends string = string> return false } + const prevTo = anim.to + // Write or read the "to" prop if (!is.und(to) && diff('to')) { this._animateTo(to) } else { - to = anim.to + to = prevTo } // Write or read the "from" prop @@ -474,7 +476,7 @@ export class SpringValue<T = any, P extends string = string> from = from.get() } - const changed = props.force || !isEqual(to, anim.to) + const changed = !(is.und(to) || isEqual(to, prevTo)) const isActive = this.is(ACTIVE) // Only use the default "config" prop on first animation. diff --git a/animated.ts b/animated.ts index 8b09455..63996a5 100644 --- a/animated.ts +++ b/animated.ts @@ -49,10 +49,6 @@ export interface AnimationProps<T = unknown> extends AnimationEvents<T> { * Swap the `to` and `from` props. */ reverse?: boolean - /** - * Prevent an update from being cancelled. - */ - force?: boolean } /**
feat(sql): support `$some`, `$none` and `$every` subquery operators (#4917) In addition to the regular operators that translate to a real SQL operator expression (e.g. `>=`), you can also use the following collection operators: | operator | description | |----------|-----------------------------------------------------------------| | `$some` | Finds collections that have some record matching the condition. | | `$none` | Finds collections that have no records matching the condition. | | `$every` | Finds collections where every record is matching the condition. | This will be resolved as a subquery condition: ```ts // finds all authors that have some book called `Foo` const res1 = await em.find(Author, { books: { $some: { title: 'Foo' } }, }); // finds all authors that have no books called `Foo` const res2 = await em.find(Author, { books: { $none: { title: 'Foo' } }, }); // finds all authors that have every book called `Foo` const res3 = await em.find(Author, { books: { $every: { title: 'Foo' } }, }); ``` The condition object can be also empty: ```ts // finds all authors that have at least one book const res1 = await em.find(Author, { books: { $some: {} }, }); // finds all authors that have no books const res2 = await em.find(Author, { books: { $none: {} }, }); ``` Closes #2916
50d2265507e5add684317e2722666ac817bae804
feat
https://github.com/mikro-orm/mikro-orm/commit/50d2265507e5add684317e2722666ac817bae804
support `$some`, `$none` and `$every` subquery operators (#4917) In addition to the regular operators that translate to a real SQL operator expression (e.g. `>=`), you can also use the following collection operators: | operator | description | |----------|-----------------------------------------------------------------| | `$some` | Finds collections that have some record matching the condition. | | `$none` | Finds collections that have no records matching the condition. | | `$every` | Finds collections where every record is matching the condition. | This will be resolved as a subquery condition: ```ts // finds all authors that have some book called `Foo` const res1 = await em.find(Author, { books: { $some: { title: 'Foo' } }, }); // finds all authors that have no books called `Foo` const res2 = await em.find(Author, { books: { $none: { title: 'Foo' } }, }); // finds all authors that have every book called `Foo` const res3 = await em.find(Author, { books: { $every: { title: 'Foo' } }, }); ``` The condition object can be also empty: ```ts // finds all authors that have at least one book const res1 = await em.find(Author, { books: { $some: {} }, }); // finds all authors that have no books const res2 = await em.find(Author, { books: { $none: {} }, }); ``` Closes #2916
diff --git a/nested-populate.md b/nested-populate.md index 0e7509d..27571d9 100644 --- a/nested-populate.md +++ b/nested-populate.md @@ -1,5 +1,5 @@ --- -title: Smart Nested Populate +title: Nested Populate --- `MikroORM` is capable of loading large nested structures while maintaining good performance, querying each database table only once. Imagine you have this nested structure: diff --git a/query-conditions.md b/query-conditions.md index 71e378b..7b159c4 100644 --- a/query-conditions.md +++ b/query-conditions.md @@ -1,5 +1,5 @@ --- -title: Smart Query Conditions +title: Query Conditions --- import Tabs from '@theme/Tabs'; @@ -77,6 +77,49 @@ const res = await orm.em.find(Author, [1, 2, 7]); | `$not` | Inverts the effect of a query expression and returns documents that do not match the query expression. | | `$or` | Joins query clauses with a logical OR returns all documents that match the conditions of either clause. | +### Collection + +In addition to the regular operators that translate to a real SQL operator expression (e.g. `>=`), you can also use the following collection operators: + +| operator | description | +|----------|-----------------------------------------------------------------| +| `$some` | Finds collections that have some record matching the condition. | +| `$none` | Finds collections that have no records matching the condition. | +| `$every` | Finds collections where every record is matching the condition. | + +This will be resolved as a subquery condition: + +```ts +// finds all authors that have some book called `Foo` +const res1 = await em.find(Author, { + books: { $some: { title: 'Foo' } }, +}); + +// finds all authors that have no books called `Foo` +const res2 = await em.find(Author, { + books: { $none: { title: 'Foo' } }, +}); + +// finds all authors that have every book called `Foo` +const res3 = await em.find(Author, { + books: { $every: { title: 'Foo' } }, +}); +``` + +The condition object can be also empty: + +```ts +// finds all authors that have at least one book +const res1 = await em.find(Author, { + books: { $some: {} }, +}); + +// finds all authors that have no books +const res2 = await em.find(Author, { + books: { $none: {} }, +}); +``` + ## Regular Expressions The `$re` operator takes a string as input value, and by default uses the case-sensitive operator. If you would like to use a `RegExp` object, i.e. to be able to set flags, then search directly on the field name without using the operator: diff --git a/sidebars.js b/sidebars.js index 80d1c54..462a3b6 100644 --- a/sidebars.js +++ b/sidebars.js @@ -34,6 +34,7 @@ module.exports = { 'identity-map', 'collections', 'type-safe-relations', + 'query-conditions', 'repositories', 'transactions', 'inheritance-mapping', @@ -56,7 +57,6 @@ module.exports = { 'caching', 'logging', 'nested-populate', - 'query-conditions', 'propagation', 'loading-strategies', 'dataloaders', diff --git a/enums.ts b/enums.ts index 6533bdd..f866bb5 100644 --- a/enums.ts +++ b/enums.ts @@ -38,6 +38,9 @@ export enum QueryOperator { $overlap = '&&', // postgres only $contains = '@>', // postgres only $contained = '<@', // postgres only + $none = 'none', // collection operators, sql only + $some = 'some', // collection operators, sql only + $every = 'every', // collection operators, sql only } export const ARRAY_OPERATORS = [ diff --git a/typings.ts b/typings.ts index 54ff8ea..06ce11f 100644 --- a/typings.ts +++ b/typings.ts @@ -137,6 +137,7 @@ export interface IQueryBuilder<T> { truncate(): this; count(field?: string | string[], distinct?: boolean): this; join(field: string, alias: string, cond?: QBFilterQuery, type?: JoinType, path?: string): this; + innerJoin(field: string, alias: string, cond?: QBFilterQuery): this; leftJoin(field: string, alias: string, cond?: QBFilterQuery): this; joinAndSelect(field: string, alias: string, cond?: QBFilterQuery): this; leftJoinAndSelect(field: string, alias: string, cond?: QBFilterQuery, fields?: string[]): this; @@ -153,6 +154,7 @@ export interface IQueryBuilder<T> { having(cond?: QBFilterQuery | string, params?: any[]): this; getAliasForJoinPath(path: string): string | undefined; getNextAlias(entityName?: string): string; + clone(reset?: boolean): IQueryBuilder<T>; } export interface ICriteriaNode<T extends object> { diff --git a/ArrayCriteriaNode.ts b/ArrayCriteriaNode.ts index e904fd5..e40d67f 100644 --- a/ArrayCriteriaNode.ts +++ b/ArrayCriteriaNode.ts @@ -12,6 +12,12 @@ export class ArrayCriteriaNode<T extends object> extends CriteriaNode<T> { }); } + override unwrap(): any { + return this.payload.map((node: CriteriaNode<T>) => { + return node.unwrap(); + }); + } + override willAutoJoin(qb: IQueryBuilder<T>, alias?: string) { return this.payload.some((node: CriteriaNode<T>) => { return node.willAutoJoin(qb, alias); diff --git a/CriteriaNode.ts b/CriteriaNode.ts index fae29b6..983436c 100644 --- a/CriteriaNode.ts +++ b/CriteriaNode.ts @@ -43,6 +43,10 @@ export class CriteriaNode<T extends object> implements ICriteriaNode<T> { return this.payload; } + unwrap(): any { + return this.payload; + } + shouldInline(payload: any): boolean { return false; } @@ -56,7 +60,9 @@ export class CriteriaNode<T extends object> implements ICriteriaNode<T> { const composite = this.prop?.joinColumns ? this.prop.joinColumns.length > 1 : false; const customExpression = CriteriaNode.isCustomExpression(this.key!); const scalar = payload === null || Utils.isPrimaryKey(payload) || payload as unknown instanceof RegExp || payload as unknown instanceof Date || customExpression; - const operator = Utils.isPlainObject(payload) && Object.keys(payload).every(k => Utils.isOperator(k, false)); + const plainObject = Utils.isPlainObject(payload); + const keys = plainObject ? Object.keys(payload) : []; + const operator = plainObject && keys.every(k => Utils.isOperator(k, false)); if (composite) { return true; diff --git a/ObjectCriteriaNode.ts b/ObjectCriteriaNode.ts index 303ca22..c5c9343 100644 --- a/ObjectCriteriaNode.ts +++ b/ObjectCriteriaNode.ts @@ -19,16 +19,42 @@ export class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> { override process(qb: IQueryBuilder<T>, alias?: string): any { const nestedAlias = qb.getAliasForJoinPath(this.getPath()); const ownerAlias = alias || qb.alias; + const keys = Object.keys(this.payload); if (nestedAlias) { alias = nestedAlias; } if (this.shouldAutoJoin(nestedAlias)) { + if (keys.some(k => ['$some', '$none', '$every'].includes(k))) { + const $and: Dictionary[] = []; + const primaryKeys = this.metadata.find(this.entityName)!.primaryKeys.map(pk => `${alias}.${pk}`); + + for (const key of keys) { + const payload = (this.payload[key] as CriteriaNode<T>).unwrap(); + const sub = qb.clone(true).innerJoin(this.key!, qb.getNextAlias(this.prop!.type)); + sub.select(this.prop!.targetMeta!.primaryKeys); + + if (key === '$every') { + sub.where({ $not: { [this.key!]: payload } }); + } else { + sub.where({ [this.key!]: payload }); + } + + const op = key === '$some' ? '$in' : '$nin'; + + $and.push({ + [Utils.getPrimaryKeyHash(primaryKeys)]: { [op]: (sub as Dictionary).getKnexQuery() }, + }); + } + + return { $and }; + } + alias = this.autoJoin(qb, ownerAlias); } - return Object.keys(this.payload).reduce((o, field) => { + return keys.reduce((o, field) => { const childNode = this.payload[field] as CriteriaNode<T>; const payload = childNode.process(qb, this.prop ? alias : ownerAlias); const operator = Utils.isOperator(field); @@ -52,6 +78,14 @@ export class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> { o[`${alias}.${field}`] = payload; } + + return o; + }, {} as Dictionary); + } + + override unwrap(): any { + return Object.keys(this.payload).reduce((o, field) => { + o[field] = this.payload[field].unwrap(); return o; }, {} as Dictionary); } @@ -59,16 +93,17 @@ export class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> { override willAutoJoin(qb: IQueryBuilder<T>, alias?: string) { const nestedAlias = qb.getAliasForJoinPath(this.getPath()); const ownerAlias = alias || qb.alias; + const keys = Object.keys(this.payload); if (nestedAlias) { alias = nestedAlias; } if (this.shouldAutoJoin(nestedAlias)) { - return true; + return !keys.some(k => ['$some', '$none', '$every'].includes(k)); } - return Object.keys(this.payload).some(field => { + return keys.some(field => { const childNode = this.payload[field] as CriteriaNode<T>; return childNode.willAutoJoin(qb, this.prop ? alias : ownerAlias); }); @@ -92,7 +127,8 @@ export class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> { o[`${alias}.${field}`] = { [k]: tmp, ...(o[`${alias}.${field}`] || {}) }; } else if (this.isPrefixed(k) || Utils.isOperator(k) || !childAlias) { const idx = prop.referencedPKs.indexOf(k as EntityKey); - const key = idx !== -1 && !childAlias ? prop.joinColumns[idx] : k; + // FIXME maybe other kinds should be supported too? + const key = idx !== -1 && !childAlias && ![ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(prop.kind) ? prop.joinColumns[idx] : k; if (key in o) { const $and = o.$and ?? []; @@ -101,9 +137,7 @@ export class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> { o.$and = $and; } else if (Utils.isOperator(k) && Array.isArray(payload[k])) { o[key] = payload[k].map((child: Dictionary) => Object.keys(child).reduce((o, childKey) => { - const key = (this.isPrefixed(childKey) || Utils.isOperator(childKey)) - ? childKey - : `${childAlias}.${childKey}`; + const key = (this.isPrefixed(childKey) || Utils.isOperator(childKey)) ? childKey : `${childAlias}.${childKey}`; o[key] = child[childKey]; return o; }, {} as Dictionary)); @@ -128,12 +162,15 @@ export class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> { const operatorKeys = knownKey && Object.keys(this.payload).every(key => Utils.isOperator(key, false)); const primaryKeys = knownKey && Object.keys(this.payload).every(key => { const meta = this.metadata.find(this.entityName)!; + if (!meta.primaryKeys.includes(key)) { return false; } + if (!Utils.isPlainObject(this.payload[key].payload) || ![ReferenceKind.ONE_TO_ONE, ReferenceKind.MANY_TO_ONE].includes(meta.properties[key].kind)) { return true; } + return Object.keys(this.payload[key].payload).every(k => meta.properties[key].targetMeta!.primaryKeys.includes(k)); }); diff --git a/QueryBuilder.ts b/QueryBuilder.ts index e446395..4ef2aac 100644 --- a/QueryBuilder.ts +++ b/QueryBuilder.ts @@ -844,8 +844,13 @@ export class QueryBuilder<T extends object = AnyEntity> { return ret; } - clone(): QueryBuilder<T> { + clone(reset?: boolean): QueryBuilder<T> { const qb = new QueryBuilder<T>(this.mainAlias.entityName, this.metadata, this.driver, this.context, this.mainAlias.aliasName, this.connectionType, this.em); + + if (reset) { + return qb; + } + Object.assign(qb, this); // clone array/object properties diff --git a/GH2916.test.ts b/GH2916.test.ts index 54cb9ae..c1dc54e 100644 --- a/GH2916.test.ts +++ b/GH2916.test.ts @@ -0,0 +1,233 @@ +import { Collection, Entity, ManyToOne, MikroORM, OneToMany, ManyToMany, PrimaryKey, Property, SimpleLogger } from '@mikro-orm/better-sqlite'; +import { mockLogger } from '../helpers'; + +@Entity() +class Author { + + @PrimaryKey() + id!: number; + + @Property() + name: string; + + @OneToMany(() => Book, b => b.author) + books = new Collection<Book>(this); + + constructor(name: string) { + this.name = name; + } + +} + +@Entity() +class Book { + + @PrimaryKey() + id!: number; + + @Property() + title!: string; + + @ManyToOne(() => Author) + author!: Author; + + @ManyToMany(() => BookTag) + tags = new Collection<BookTag>(this); + + constructor(author: Author, title: string, tags: BookTag[] = []) { + this.author = author; + this.title = title; + this.tags.set(tags); + } + +} + +@Entity() +class BookTag { + + @PrimaryKey() + id!: number; + + @Property() + name!: string; + + @ManyToMany(() => Book, b => b.tags) + books = new Collection<Book>(this); + + constructor(name: string) { + this.name = name; + } + +} + +let orm: MikroORM; + +beforeAll(async () => { + orm = await MikroORM.init({ + entities: [Author], + dbName: ':memory:', + loggerFactory: options => new SimpleLogger(options), + }); + + await orm.schema.createSchema(); + await createEntities(); +}); + +beforeEach(() => orm.em.clear()); +afterAll(() => orm.close(true)); + +async function createEntities() { + const author1 = new Author('Author 1'); // no books + const author2 = new Author('Author 2'); // only 'Foo' + const author3 = new Author('Author 3'); // 'Foo' and 'Bar' + const author4 = new Author('Author 4'); // only 'Foo Bar' + const author5 = new Author('Author 5'); // only 'Foo' + + const t1 = new BookTag('t1'); + const t2 = new BookTag('t2'); + const t3 = new BookTag('t3'); + const t4 = new BookTag('t4'); + const t5 = new BookTag('t5'); + + author2.books.add( + new Book(author2, 'Foo', [t1, t5]), + new Book(author2, 'Foo', [t1, t3]), + ); + author3.books.add( + new Book(author3, 'Foo', [t2]), + new Book(author3, 'Foo', [t2]), + new Book(author3, 'Bar', [t2]), + new Book(author3, 'Bar', [t2]), + ); + author4.books.add( + new Book(author4, 'Foo Bar', [t1, t2]), + new Book(author4, 'Foo Bar', [t1, t2, t4]), + ); + author5.books.add(new Book(author5, 'Foo', [t4, t5])); + + await orm.em.fork().persist([author1, author2, author3, author4, author5]).flush(); +} + +test('1:m sub-query operators $some, $none and $every', async () => { + const mock = mockLogger(orm); + + let results = await orm.em.fork().find(Author, { + books: { $some: { title: 'Foo' } }, + }); + expect(results.map(res => res.name)).toEqual(['Author 2', 'Author 3', 'Author 5']); + expect(mock.mock.calls[0][0]).toBe("[query] select `a0`.* from `author` as `a0` where `a0`.`id` in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id` where `b1`.`title` = 'Foo')"); + + results = await orm.em.fork().find(Author, { + books: { $none: { title: 'Foo' } }, + }); + expect(results.map(res => res.name)).toEqual(['Author 1', 'Author 4']); + expect(mock.mock.calls[1][0]).toBe("[query] select `a0`.* from `author` as `a0` where `a0`.`id` not in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id` where `b1`.`title` = 'Foo')"); + + results = await orm.em.fork().find(Author, { + books: { $every: { title: 'Foo' } }, + }); + expect(results.map(res => res.name)).toEqual(['Author 1', 'Author 2', 'Author 5']); + expect(mock.mock.calls[2][0]).toBe("[query] select `a0`.* from `author` as `a0` where `a0`.`id` not in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id` where not (`b1`.`title` = 'Foo'))"); + + results = await orm.em.fork().find(Author, { + books: { $some: {} }, + }); + expect(results.map(res => res.name)).toEqual(['Author 2', 'Author 3', 'Author 4', 'Author 5']); + expect(mock.mock.calls[3][0]).toBe('[query] select `a0`.* from `author` as `a0` where `a0`.`id` in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id`)'); + + results = await orm.em.fork().find(Author, { + books: { $none: {} }, + }); + expect(results.map(res => res.name)).toEqual(['Author 1']); + expect(mock.mock.calls[4][0]).toBe('[query] select `a0`.* from `author` as `a0` where `a0`.`id` not in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id`)'); +}); + +test('m:n sub-query operators $some, $none and $every', async () => { + const mock = mockLogger(orm); + + let results = await orm.em.fork().find(Book, { + tags: { $some: { name: ['t1', 't2'] } }, + }, { populate: ['tags'] }); + expect(results.map(res => res.tags.getIdentifiers('name'))).toEqual([ + ['t1', 't5'], + ['t1', 't3'], + ['t1', 't2'], + ['t1', 't2', 't4'], + ['t2'], + ['t2'], + ['t2'], + ['t2'], + ]); + expect(mock.mock.calls[0][0]).toBe("[query] select `b0`.* from `book` as `b0` where `b0`.`id` in (select `b0`.`id` from `book` as `b0` inner join `book_tags` as `b1` on `b0`.`id` = `b1`.`book_id` inner join `book_tag` as `b1` on `b1`.`book_tag_id` = `b1`.`id` where `b1`.`name` in ('t1', 't2'))"); + expect(mock.mock.calls[1][0]).toBe('[query] select `b1`.*, `b0`.`book_tag_id` as `fk__book_tag_id`, `b0`.`book_id` as `fk__book_id` from `book_tags` as `b0` inner join `book_tag` as `b1` on `b0`.`book_tag_id` = `b1`.`id` where `b0`.`book_id` in (1, 2, 3, 4, 5, 6, 7, 8)'); + + results = await orm.em.fork().find(Book, { + tags: { $none: { name: ['t1', 't2'] } }, + }, { populate: ['tags'], orderBy: { tags: { name: 1 } } }); + expect(results.map(res => res.tags.getIdentifiers('name'))).toEqual([ + ['t4', 't5'], + ]); + expect(mock.mock.calls[2][0]).toBe("[query] select `b0`.* from `book` as `b0` left join `book_tags` as `b3` on `b0`.`id` = `b3`.`book_id` left join `book_tag` as `b2` on `b3`.`book_tag_id` = `b2`.`id` where `b0`.`id` not in (select `b0`.`id` from `book` as `b0` inner join `book_tags` as `b1` on `b0`.`id` = `b1`.`book_id` inner join `book_tag` as `b1` on `b1`.`book_tag_id` = `b1`.`id` where `b1`.`name` in ('t1', 't2')) order by `b2`.`name` asc"); + expect(mock.mock.calls[3][0]).toBe('[query] select `b1`.*, `b0`.`book_tag_id` as `fk__book_tag_id`, `b0`.`book_id` as `fk__book_id` from `book_tags` as `b0` inner join `book_tag` as `b1` on `b0`.`book_tag_id` = `b1`.`id` where `b0`.`book_id` in (9) order by `b1`.`name` asc'); + + results = await orm.em.fork().find(Book, { + tags: { $every: { name: ['t1', 't2'] } }, + }, { populate: ['tags'], orderBy: { tags: { name: 1 } } }); + expect(results.map(res => res.tags.getIdentifiers('name'))).toEqual([ + ['t1', 't2'], + ['t2'], + ['t2'], + ['t2'], + ['t2'], + ]); + expect(mock.mock.calls[4][0]).toBe("[query] select `b0`.* from `book` as `b0` left join `book_tags` as `b3` on `b0`.`id` = `b3`.`book_id` left join `book_tag` as `b2` on `b3`.`book_tag_id` = `b2`.`id` where `b0`.`id` not in (select `b0`.`id` from `book` as `b0` inner join `book_tags` as `b1` on `b0`.`id` = `b1`.`book_id` inner join `book_tag` as `b1` on `b1`.`book_tag_id` = `b1`.`id` where not (`b1`.`name` in ('t1', 't2'))) order by `b2`.`name` asc"); + expect(mock.mock.calls[5][0]).toBe('[query] select `b1`.*, `b0`.`book_tag_id` as `fk__book_tag_id`, `b0`.`book_id` as `fk__book_id` from `book_tags` as `b0` inner join `book_tag` as `b1` on `b0`.`book_tag_id` = `b1`.`id` where `b0`.`book_id` in (3, 5, 6, 7, 8) order by `b1`.`name` asc'); + + results = await orm.em.fork().find(Book, { + tags: { $some: {} }, + }, { populate: ['tags'], orderBy: { tags: { name: 1 } } }); + expect(results.map(res => res.tags.getIdentifiers('name'))).toEqual([ + ['t1', 't5'], + ['t1', 't3'], + ['t1', 't2'], + ['t1', 't2', 't4'], + ['t2'], + ['t2'], + ['t2'], + ['t2'], + ['t4', 't5'], + ]); + expect(mock.mock.calls[6][0]).toBe('[query] select `b0`.* from `book` as `b0` left join `book_tags` as `b3` on `b0`.`id` = `b3`.`book_id` left join `book_tag` as `b2` on `b3`.`book_tag_id` = `b2`.`id` where `b0`.`id` in (select `b0`.`id` from `book` as `b0` inner join `book_tags` as `b1` on `b0`.`id` = `b1`.`book_id` inner join `book_tag` as `b1` on `b1`.`book_tag_id` = `b1`.`id`) order by `b2`.`name` asc'); + expect(mock.mock.calls[7][0]).toBe('[query] select `b1`.*, `b0`.`book_tag_id` as `fk__book_tag_id`, `b0`.`book_id` as `fk__book_id` from `book_tags` as `b0` inner join `book_tag` as `b1` on `b0`.`book_tag_id` = `b1`.`id` where `b0`.`book_id` in (1, 2, 3, 4, 5, 6, 7, 8, 9) order by `b1`.`name` asc'); + + results = await orm.em.fork().find(Book, { + tags: { $none: {} }, + }, { populate: ['tags'], orderBy: { tags: { name: 1 } } }); + expect(results.map(res => res.tags.getIdentifiers('name'))).toEqual([]); + expect(mock.mock.calls[8][0]).toBe('[query] select `b0`.* from `book` as `b0` left join `book_tags` as `b3` on `b0`.`id` = `b3`.`book_id` left join `book_tag` as `b2` on `b3`.`book_tag_id` = `b2`.`id` where `b0`.`id` not in (select `b0`.`id` from `book` as `b0` inner join `book_tags` as `b1` on `b0`.`id` = `b1`.`book_id` inner join `book_tag` as `b1` on `b1`.`book_tag_id` = `b1`.`id`) order by `b2`.`name` asc'); +}); + +test('allows only one of $some, $none and $every on the given level', async () => { + const mock = mockLogger(orm); + let results = await orm.em.fork().find(Author, { + books: { + $some: { title: 'Foo' }, + $none: { title: 'Foo' }, + }, + }); + expect(mock.mock.calls[0][0]).toBe("[query] select `a0`.* from `author` as `a0` where `a0`.`id` in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id` where `b1`.`title` = 'Foo') and `a0`.`id` not in (select `a0`.`id` from `author` as `a0` inner join `book` as `b2` on `a0`.`id` = `b2`.`author_id` where `b2`.`title` = 'Foo')"); + expect(results).toHaveLength(0); + + results = await orm.em.fork().find(Author, { + books: { + $some: { title: 'Foo' }, + $none: { title: 'Foo 123' }, + }, + }); + expect(mock.mock.calls[1][0]).toBe("[query] select `a0`.* from `author` as `a0` where `a0`.`id` in (select `a0`.`id` from `author` as `a0` inner join `book` as `b1` on `a0`.`id` = `b1`.`author_id` where `b1`.`title` = 'Foo') and `a0`.`id` not in (select `a0`.`id` from `author` as `a0` inner join `book` as `b2` on `a0`.`id` = `b2`.`author_id` where `b2`.`title` = 'Foo 123')"); + expect(results.map(res => res.name)).toEqual([ + 'Author 2', + 'Author 3', + 'Author 5', + ]); +});
fix(dot-sql): ensure that CTEs can be used in `.sql`
b63e0fd3ed08e558e8b3012d6a41d03ecc58e231
fix
https://github.com/ibis-project/ibis/commit/b63e0fd3ed08e558e8b3012d6a41d03ecc58e231
ensure that CTEs can be used in `.sql`
diff --git a/__init__.py b/__init__.py index 66ab139..df3aa5b 100644 --- a/__init__.py +++ b/__init__.py @@ -191,18 +191,10 @@ class SQLBackend(BaseBackend, _DatabaseSchemaHandler): The schema inferred from `query` """ - def _get_sql_string_view_schema(self, name, table, query) -> sch.Schema: - compiler = self.compiler - dialect = compiler.dialect - - cte = compiler.to_sqlglot(table) - parsed = sg.parse_one(query, read=dialect) - parsed.args["with"] = cte.args.pop("with", []) - parsed = parsed.with_( - sg.to_identifier(name, quoted=compiler.quoted), as_=cte, dialect=dialect - ) - - sql = parsed.sql(dialect) + def _get_sql_string_view_schema( + self, *, name: str, table: ir.Table, query: str + ) -> sch.Schema: + sql = self.compiler.add_query_to_expr(name=name, table=table, query=query) return self._get_schema_using_query(sql) def _register_udfs(self, expr: ir.Expr) -> None: diff --git a/base.py b/base.py index ed8d11a..67fafa4 100644 --- a/base.py +++ b/base.py @@ -1613,6 +1613,31 @@ class SQLGlotCompiler(abc.ABC): ) return sg.select(*columns_to_keep).from_(parent) + def add_query_to_expr(self, *, name: str, table: ir.Table, query: str) -> str: + dialect = self.dialect + + compiled_ibis_expr = self.to_sqlglot(table) + + # pull existing CTEs from the compiled Ibis expression and combine them + # with the new query + parsed = reduce( + lambda parsed, cte: parsed.with_(cte.args["alias"], as_=cte.args["this"]), + compiled_ibis_expr.ctes, + sg.parse_one(query, read=dialect), + ) + + # remove all ctes from the compiled expression, since they're now in + # our larger expression + compiled_ibis_expr.args.pop("with", None) + + # add the new str query as a CTE + parsed = parsed.with_( + sg.to_identifier(name, quoted=self.quoted), as_=compiled_ibis_expr + ) + + # generate the SQL string + return parsed.sql(dialect) + # `__init_subclass__` is uncalled for subclasses - we manually call it here to # autogenerate the base class implementations as well. diff --git a/test_dot_sql.py b/test_dot_sql.py index a569be4..0130c06 100644 --- a/test_dot_sql.py +++ b/test_dot_sql.py @@ -5,6 +5,7 @@ import getpass import pytest import sqlglot as sg +import sqlglot.expressions as sge from pytest import param import ibis @@ -24,11 +25,21 @@ dot_sql_never = pytest.mark.never( _NAMES = { "bigquery": f"ibis_gbq_testing_{getpass.getuser()}_{PYTHON_SHORT_VERSION}.functional_alltypes", - "exasol": '"functional_alltypes"', } [email protected](["oracle"], reason="table quoting behavior") [email protected](scope="module") +def ftname_raw(con): + return _NAMES.get(con.name, "functional_alltypes") + + [email protected](scope="module") +def ftname(con, ftname_raw): + table = sg.parse_one(ftname_raw, into=sge.Table) + table = sg.table(table.name, db=table.db, catalog=table.catalog, quoted=True) + return table.sql(con.dialect) + + @dot_sql_never @pytest.mark.parametrize( "schema", @@ -37,10 +48,9 @@ _NAMES = { param({"s": "string", "new_col": "double"}, id="explicit_schema"), ], ) -def test_con_dot_sql(backend, con, schema): +def test_con_dot_sql(backend, con, schema, ftname): alltypes = backend.functional_alltypes # pull out the quoted name - name = _NAMES.get(con.name, "functional_alltypes") quoted = True cols = [ sg.column("string_col", quoted=quoted).as_("s", quoted=quoted).sql(con.dialect), @@ -50,7 +60,7 @@ def test_con_dot_sql(backend, con, schema): ] t = ( con.sql( - f"SELECT {', '.join(cols)} FROM {name}", + f"SELECT {', '.join(cols)} FROM {ftname}", schema=schema, ) .group_by("s") # group by a column from SQL @@ -325,9 +335,16 @@ def test_cte(alltypes, df): @dot_sql_never -def test_bare_minimum(con, alltypes, df): +def test_bare_minimum(alltypes, df, ftname_raw): """Test that a backend that supports dot sql can do the most basic thing.""" - name = _NAMES.get(con.name, "functional_alltypes").replace('"', "") - expr = alltypes.sql(f'SELECT COUNT(*) AS "n" FROM "{name}"', dialect="duckdb") + expr = alltypes.sql(f'SELECT COUNT(*) AS "n" FROM "{ftname_raw}"', dialect="duckdb") assert expr.to_pandas().iat[0, 0] == len(df) + + +@dot_sql_never +def test_embedded_cte(alltypes, ftname_raw): + sql = f'WITH "x" AS (SELECT * FROM "{ftname_raw}") SELECT * FROM "x"' + expr = alltypes.sql(sql, dialect="duckdb") + result = expr.head(1).execute() + assert len(result) == 1 diff --git a/relations.py b/relations.py index 684f669..4d4beed 100644 --- a/relations.py +++ b/relations.py @@ -3553,7 +3553,7 @@ class Table(Expr, _FixedTextJupyterMixin): name = util.gen_name("sql_query") expr = self - schema = backend._get_sql_string_view_schema(name, expr, query) + schema = backend._get_sql_string_view_schema(name=name, table=expr, query=query) node = ops.SQLStringView(child=self.op(), query=query, schema=schema) return node.to_expr()
chore(deps): relock
05f2a259149bb222f06d3ea49157e0e2246d9d51
chore
https://github.com/ibis-project/ibis/commit/05f2a259149bb222f06d3ea49157e0e2246d9d51
relock
diff --git a/poetry.lock b/poetry.lock index bfa6c3b..a4a6aa0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "appnope" @@ -773,7 +773,7 @@ click = ">=8.0" cloudpickle = ">=1.5.0" fsspec = ">=2021.09.0" importlib-metadata = ">=4.13.0" -numpy = {version = ">=1.21", optional = true, markers = "extra == \\"array\\" or extra == \\"dataframe\\""} +numpy = {version = ">=1.21", optional = true, markers = "extra == \\"array\\""} packaging = ">=20.0" pandas = {version = ">=1.3", optional = true, markers = "extra == \\"dataframe\\""} partd = ">=1.2.0" @@ -1245,11 +1245,11 @@ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \\"grpc\\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \\"3.11\\" and extra == \\"grpc\\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \\"3.11\\""}, ] grpcio-status = [ {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \\"grpc\\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \\"3.11\\" and extra == \\"grpc\\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \\"3.11\\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -4715,7 +4715,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \\"3\\" and platform_machine == \\"aarch64\\" or python_version >= \\"3\\" and platform_machine == \\"ppc64le\\" or python_version >= \\"3\\" and platform_machine == \\"x86_64\\" or python_version >= \\"3\\" and platform_machine == \\"amd64\\" or python_version >= \\"3\\" and platform_machine == \\"AMD64\\" or python_version >= \\"3\\" and platform_machine == \\"win32\\" or python_version >= \\"3\\" and platform_machine == \\"WIN32\\""} +greenlet = {version = "!=0.4.17", markers = "python_version >= \\"3\\" and (platform_machine == \\"aarch64\\" or platform_machine == \\"ppc64le\\" or platform_machine == \\"x86_64\\" or platform_machine == \\"amd64\\" or platform_machine == \\"AMD64\\" or platform_machine == \\"win32\\" or platform_machine == \\"WIN32\\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -5361,7 +5361,7 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \\"PyPy\\ cffi = ["cffi (>=1.11)"] [extras] -all = ["GeoAlchemy2", "black", "clickhouse-connect", "dask", "datafusion", "db-dtypes", "duckdb", "duckdb-engine", "fsspec", "geopandas", "google-cloud-bigquery", "google-cloud-bigquery-storage", "graphviz", "impyla", "oracledb", "packaging", "polars", "psycopg2", "pydata-google-auth", "pydruid", "pymssql", "pymysql", "pyspark", "regex", "requests", "shapely", "snowflake-connector-python", "snowflake-sqlalchemy", "sqlalchemy", "sqlalchemy-views", "trino"] +all = ["black", "clickhouse-connect", "dask", "datafusion", "db-dtypes", "duckdb", "duckdb-engine", "fsspec", "GeoAlchemy2", "geopandas", "google-cloud-bigquery", "google-cloud-bigquery-storage", "graphviz", "impyla", "oracledb", "packaging", "polars", "psycopg2", "pydata-google-auth", "pydruid", "pymssql", "pymysql", "pyspark", "regex", "requests", "shapely", "snowflake-connector-python", "snowflake-sqlalchemy", "sqlalchemy", "sqlalchemy-views", "trino"] bigquery = ["db-dtypes", "google-cloud-bigquery", "google-cloud-bigquery-storage", "pydata-google-auth"] clickhouse = ["clickhouse-connect", "sqlalchemy"] dask = ["dask", "regex"] @@ -5371,16 +5371,16 @@ druid = ["pydruid", "sqlalchemy"] duckdb = ["duckdb", "duckdb-engine", "packaging", "sqlalchemy", "sqlalchemy-views"] geospatial = ["GeoAlchemy2", "geopandas", "shapely"] impala = ["fsspec", "impyla", "requests", "sqlalchemy"] -mssql = ["pymssql", "sqlalchemy", "sqlalchemy-views"] -mysql = ["pymysql", "sqlalchemy", "sqlalchemy-views"] -oracle = ["oracledb", "packaging", "sqlalchemy", "sqlalchemy-views"] +mssql = ["sqlalchemy", "pymssql", "sqlalchemy-views"] +mysql = ["sqlalchemy", "pymysql", "sqlalchemy-views"] +oracle = ["sqlalchemy", "oracledb", "packaging", "sqlalchemy-views"] pandas = ["regex"] polars = ["polars"] postgres = ["psycopg2", "sqlalchemy", "sqlalchemy-views"] pyspark = ["pyspark", "sqlalchemy"] snowflake = ["snowflake-connector-python", "snowflake-sqlalchemy", "sqlalchemy-views"] sqlite = ["regex", "sqlalchemy", "sqlalchemy-views"] -trino = ["sqlalchemy", "sqlalchemy-views", "trino"] +trino = ["trino", "sqlalchemy", "sqlalchemy-views"] visualization = ["graphviz"] [metadata] diff --git a/requirements.txt b/requirements.txt index cdf08db..87ee3ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -102,7 +102,7 @@ mkdocs-jupyter==0.24.1 ; python_version >= "3.8" and python_version < "4.0" mkdocs-literate-nav==0.6.0 ; python_version >= "3.8" and python_version < "4.0" mkdocs-macros-plugin==0.7.0 ; python_version >= "3.8" and python_version < "4.0" mkdocs-material-extensions==1.1.1 ; python_version >= "3.8" and python_version < "4.0" -mkdocs-material==9.1.12 ; python_version >= "3.8" and python_version < "4.0" +mkdocs-material==9.1.13 ; python_version >= "3.8" and python_version < "4.0" mkdocs==1.4.3 ; python_version >= "3.8" and python_version < "4.0" mkdocstrings-python==1.0.0 ; python_version >= "3.8" and python_version < "4.0" mkdocstrings==0.21.2 ; python_version >= "3.8" and python_version < "4.0"
test: update ibis-testing commit
69a5b93aa90bc6a7d68dc4e58ff5b585e310cc9d
test
https://github.com/rohankumardubey/ibis/commit/69a5b93aa90bc6a7d68dc4e58ff5b585e310cc9d
update ibis-testing commit
diff --git a/overlay.nix b/overlay.nix index 21d0644..8ab73aa 100644 --- a/overlay.nix +++ b/overlay.nix @@ -19,9 +19,9 @@ in { ibisTestingData = pkgs.fetchFromGitHub { name = "ibis-testing-data"; - owner = "cpcloud"; + owner = "ibis-project"; repo = "testing-data"; - rev = "cleanup"; + rev = "master"; sha256 = "sha256-q1b5IcOl5oIFXP7/P5RufncjHEVrWp4NjoU2uo/BE9U="; };
refactor(aliasing): remove the need for renaming after execution (#9996)
a0d7237a9d5ffd4dedb5c935fa2fbb851f513421
refactor
https://github.com/ibis-project/ibis/commit/a0d7237a9d5ffd4dedb5c935fa2fbb851f513421
remove the need for renaming after execution (#9996)
diff --git a/__init__.py b/__init__.py index baaf1cd..219e4d7 100644 --- a/__init__.py +++ b/__init__.py @@ -519,15 +519,12 @@ class Backend(BaseBackend, NoUrl): streaming: bool = False, **kwargs: Any, ): + from ibis.formats.pyarrow import PyArrowData + df = self._to_dataframe( expr, params=params, limit=limit, streaming=streaming, **kwargs ) - table = df.to_arrow() - if isinstance(expr, (ir.Table, ir.Value)): - schema = expr.as_table().schema().to_pyarrow() - return table.rename_columns(schema.names).cast(schema) - else: - raise com.IbisError(f"Cannot execute expression of type: {type(expr)}") + return PyArrowData.convert_table(df.to_arrow(), expr.as_table().schema()) def to_pyarrow( self, diff --git a/compiler.py b/compiler.py index 622b523..e8164e1 100644 --- a/compiler.py +++ b/compiler.py @@ -59,7 +59,7 @@ def table(op, **_): @translate.register(ops.DummyTable) def dummy_table(op, **kw): - selections = [translate(arg, **kw) for name, arg in op.values.items()] + selections = [translate(arg, **kw).alias(name) for name, arg in op.values.items()] return pl.DataFrame().lazy().select(selections) @@ -68,12 +68,6 @@ def in_memory_table(op, **_): return op.data.to_polars(op.schema).lazy() [email protected](ops.Alias) -def alias(op, **kw): - arg = translate(op.arg, **kw) - return arg.alias(op.name) - - def _make_duration(value, dtype): kwargs = {f"{dtype.resolution}s": value} return pl.duration(**kwargs) diff --git a/test_generic.py b/test_generic.py index 4181b3e..b8bb7a4 100644 --- a/test_generic.py +++ b/test_generic.py @@ -2502,3 +2502,11 @@ def test_simple_pivot_wider(con, backend, monkeypatch): result = expr.to_pandas() expected = pd.DataFrame({"no": [4], "yes": [3]}) backend.assert_frame_equal(result, expected) + + +def test_named_literal(con, backend): + lit = ibis.literal(1, type="int64").name("one") + expr = lit.as_table() + result = con.to_pandas(expr) + expected = pd.DataFrame({"one": [1]}) + backend.assert_frame_equal(result, expected) diff --git a/relations.py b/relations.py index 9d1c36c..a78d3f6 100644 --- a/relations.py +++ b/relations.py @@ -118,6 +118,14 @@ def bind(table: Table, value) -> Iterator[ir.Value]: yield literal(value) +def unwrap_alias(node: ops.Value) -> ops.Value: + """Unwrap an alias node.""" + if isinstance(node, ops.Alias): + return node.arg + else: + return node + + def unwrap_aliases(values: Iterator[ir.Value]) -> Mapping[str, ir.Value]: """Unwrap aliases into a mapping of {name: expression}.""" result = {} @@ -127,10 +135,7 @@ def unwrap_aliases(values: Iterator[ir.Value]) -> Mapping[str, ir.Value]: raise com.IbisInputError( f"Duplicate column name {node.name!r} in result set" ) - if isinstance(node, ops.Alias): - result[node.name] = node.arg - else: - result[node.name] = node + result[node.name] = unwrap_alias(node) return result diff --git a/generic.py b/generic.py index 4e83869..40b5c07 100644 --- a/generic.py +++ b/generic.py @@ -1342,10 +1342,13 @@ class Scalar(Value): >>> isinstance(lit, ir.Table) True """ - parents = self.op().relations + from ibis.expr.types.relations import unwrap_alias + + op = self.op() + parents = op.relations - if len(parents) == 0: - return ops.DummyTable({self.get_name(): self}).to_expr() + if not parents: + return ops.DummyTable({op.name: unwrap_alias(op)}).to_expr() elif len(parents) == 1: (parent,) = parents return parent.to_expr().aggregate(self) @@ -1521,11 +1524,13 @@ class Column(Value, _FixedTextJupyterMixin): >>> expr.equals(expected) True """ - parents = self.op().relations - values = {self.get_name(): self} + from ibis.expr.types.relations import unwrap_alias + + op = self.op() + parents = op.relations - if len(parents) == 0: - return ops.DummyTable(values).to_expr() + if not parents: + return ops.DummyTable({op.name: unwrap_alias(op)}).to_expr() elif len(parents) == 1: (parent,) = parents return parent.to_expr().select(self) diff --git a/pandas.py b/pandas.py index 935a5dc..3df53df 100644 --- a/pandas.py +++ b/pandas.py @@ -114,14 +114,11 @@ class PandasData(DataMapper): "schema column count does not match input data column count" ) - columns = [] - for (_, series), dtype in zip(df.items(), schema.types): - columns.append(cls.convert_column(series, dtype)) - df = cls.concat(columns, axis=1) - - # return data with the schema's columns which may be different than the - # input columns - df.columns = schema.names + columns = { + name: cls.convert_column(series, dtype) + for (name, dtype), (_, series) in zip(schema.items(), df.items()) + } + df = pd.DataFrame(columns) if geospatial_supported: from geopandas import GeoDataFrame @@ -154,7 +151,7 @@ class PandasData(DataMapper): @classmethod def convert_scalar(cls, obj, dtype): - df = PandasData.convert_table(obj, sch.Schema({obj.columns[0]: dtype})) + df = PandasData.convert_table(obj, sch.Schema({str(obj.columns[0]): dtype})) return df.iat[0, 0] @classmethod diff --git a/polars.py b/polars.py index 43f66c8..ad1f781 100644 --- a/polars.py +++ b/polars.py @@ -166,9 +166,6 @@ class PolarsData(DataMapper): def convert_table(cls, df: pl.DataFrame, schema: Schema) -> pl.DataFrame: pl_schema = PolarsSchema.from_ibis(schema) - if tuple(df.columns) != tuple(schema.names): - df = df.rename(dict(zip(df.columns, schema.names))) - if df.schema == pl_schema: return df return df.cast(pl_schema) diff --git a/test_polars.py b/test_polars.py index 6a450fc..2580f0a 100644 --- a/test_polars.py +++ b/test_polars.py @@ -162,7 +162,7 @@ def test_convert_column(): def test_convert_table(): - df = pl.DataFrame({"x": ["1", "2"], "y": ["a", "b"]}) + df = pl.DataFrame({"x": ["1", "2"], "z": ["a", "b"]}) schema = ibis.schema({"x": "int64", "z": "string"}) df = PolarsData.convert_table(df, schema) sol = pl.DataFrame( diff --git a/test_table.py b/test_table.py index 385d585..dc90310 100644 --- a/test_table.py +++ b/test_table.py @@ -2192,3 +2192,10 @@ def test_table_fillna_depr_warn(): t = ibis.table(schema={"a": "int", "b": "str"}) with pytest.warns(FutureWarning, match="v9.1"): t.fillna({"b": "missing"}) + + +def test_dummy_table_disallows_aliases(): + values = {"one": ops.Alias(ops.Literal(1, dtype=dt.int64), name="two")} + + with pytest.raises(ValidationError): + ops.DummyTable(values)
feat: added infinity shape
935368b88ad77495b3b17477ab7093e130623844
feat
https://github.com/tsparticles/tsparticles/commit/935368b88ad77495b3b17477ab7093e130623844
added infinity shape
diff --git a/package.dist.json b/package.dist.json index 565524a..5e82e54 100644 --- a/package.dist.json +++ b/package.dist.json @@ -0,0 +1,108 @@ +{ + "name": "@tsparticles/shape-infinity", + "version": "3.3.0", + "description": "tsParticles infinity shape", + "homepage": "https://particles.js.org", + "repository": { + "type": "git", + "url": "git+https://github.com/tsparticles/tsparticles.git", + "directory": "shapes/infinity" + }, + "keywords": [ + "front-end", + "frontend", + "tsparticles", + "particles.js", + "particlesjs", + "particles", + "particle", + "canvas", + "jsparticles", + "xparticles", + "particles-js", + "particles-bg", + "particles-bg-vue", + "particles-ts", + "particles.ts", + "react-particles-js", + "react-particles.js", + "react-particles", + "react", + "reactjs", + "vue-particles", + "ngx-particles", + "angular-particles", + "particleground", + "vue", + "vuejs", + "preact", + "preactjs", + "jquery", + "angularjs", + "angular", + "typescript", + "javascript", + "animation", + "web", + "html5", + "web-design", + "webdesign", + "css", + "html", + "css3", + "animated", + "background", + "confetti", + "canvas", + "fireworks", + "fireworks-js", + "confetti-js", + "confettijs", + "fireworksjs", + "canvas-confetti", + "tsparticles-shape" + ], + "author": "Matteo Bruni <[email protected]>", + "license": "MIT", + "bugs": { + "url": "https://github.com/tsparticles/tsparticles/issues" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/matteobruni" + }, + { + "type": "github", + "url": "https://github.com/sponsors/tsparticles" + }, + { + "type": "buymeacoffee", + "url": "https://www.buymeacoffee.com/matteobruni" + } + ], + "sideEffects": false, + "jsdelivr": "tsparticles.shape.infinity.min.js", + "unpkg": "tsparticles.shape.infinity.min.js", + "browser": "browser/index.js", + "main": "cjs/index.js", + "module": "esm/index.js", + "types": "types/index.d.ts", + "exports": { + ".": { + "types": "./types/index.d.ts", + "browser": "./browser/index.js", + "import": "./esm/index.js", + "require": "./cjs/index.js", + "umd": "./umd/index.js", + "default": "./cjs/index.js" + }, + "./package.json": "./package.json" + }, + "dependencies": { + "@tsparticles/engine": "^3.3.0" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/package.json b/package.json index 9aae97b..8f45333 100644 --- a/package.json +++ b/package.json @@ -0,0 +1,118 @@ +{ + "name": "@tsparticles/shape-infinity", + "version": "3.3.0", + "description": "tsParticles infinity shape", + "homepage": "https://particles.js.org", + "scripts": { + "build": "tsparticles-cli build", + "build:ci": "tsparticles-cli build --ci", + "version": "tsparticles-cli build -d && git add package.dist.json", + "prepack": "pnpm run build" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/tsparticles/tsparticles.git", + "directory": "shapes/infinity" + }, + "keywords": [ + "front-end", + "frontend", + "tsparticles", + "particles.js", + "particlesjs", + "particles", + "particle", + "canvas", + "jsparticles", + "xparticles", + "particles-js", + "particles-bg", + "particles-bg-vue", + "particles-ts", + "particles.ts", + "react-particles-js", + "react-particles.js", + "react-particles", + "react", + "reactjs", + "vue-particles", + "ngx-particles", + "angular-particles", + "particleground", + "vue", + "vuejs", + "preact", + "preactjs", + "jquery", + "angularjs", + "angular", + "typescript", + "javascript", + "animation", + "web", + "html5", + "web-design", + "webdesign", + "css", + "html", + "css3", + "animated", + "background", + "confetti", + "canvas", + "fireworks", + "fireworks-js", + "confetti-js", + "confettijs", + "fireworksjs", + "canvas-confetti", + "tsparticles-shape" + ], + "author": "Matteo Bruni <[email protected]>", + "license": "MIT", + "bugs": { + "url": "https://github.com/tsparticles/tsparticles/issues" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/matteobruni" + }, + { + "type": "github", + "url": "https://github.com/sponsors/tsparticles" + }, + { + "type": "buymeacoffee", + "url": "https://www.buymeacoffee.com/matteobruni" + } + ], + "prettier": "@tsparticles/prettier-config", + "files": [ + "dist" + ], + "sideEffects": false, + "browser": "dist/browser/index.js", + "main": "dist/cjs/index.js", + "module": "dist/esm/index.js", + "types": "dist/types/index.d.ts", + "exports": { + ".": { + "types": "./dist/types/index.d.ts", + "browser": "./dist/browser/index.js", + "import": "./dist/esm/index.js", + "require": "./dist/cjs/index.js", + "umd": "./dist/umd/index.js", + "default": "./dist/cjs/index.js" + }, + "./package.json": "./dist/package.json" + }, + "dependencies": { + "@tsparticles/engine": "^3.3.0" + }, + "publishConfig": { + "access": "public", + "directory": "dist", + "linkDirectory": true + } +} diff --git a/index.ts b/index.ts index 53cec7b..d884273 100644 --- a/index.ts +++ b/index.ts @@ -4,6 +4,7 @@ import shapeArrow from "./shapeArrow.js"; import shapeCog from "./shapeCog.js"; import shapeEmoji from "./shapeEmoji.js"; import shapeHeart from "./shapeHeart.js"; +import shapeInfinity from "./shapeInfinity.js"; import shapeMultilineText from "./shapeMultilineText.js"; import shapeOptions from "./shapeOptions.js"; import shapePath from "./shapePath.js"; @@ -31,6 +32,7 @@ export default { shapeCog, shapeEmoji, shapeHeart, + shapeInfinity, shapeMultilineText, shapeOptions, shapePath, diff --git a/app.ts b/app.ts index 5fce46e..db29309 100644 --- a/app.ts +++ b/app.ts @@ -149,6 +149,7 @@ app.use("/shape-arrow", express.static("./node_modules/@tsparticles/shape-arrow" app.use("/shape-cards", express.static("./node_modules/@tsparticles/shape-cards")); app.use("/shape-cog", express.static("./node_modules/@tsparticles/shape-cog")); app.use("/shape-heart", express.static("./node_modules/@tsparticles/shape-heart")); +app.use("/shape-infinity", express.static("./node_modules/@tsparticles/shape-infinity")); app.use("/shape-path", express.static("./node_modules/@tsparticles/shape-path")); app.use("/shape-rounded-polygon", express.static("./node_modules/@tsparticles/shape-rounded-polygon")); app.use("/shape-rounded-rect", express.static("./node_modules/@tsparticles/shape-rounded-rect")); diff --git a/index.pug b/index.pug index 0f2acf7..ccfe327 100644 --- a/index.pug +++ b/index.pug @@ -189,6 +189,7 @@ html(lang="en") script(src="/shape-cog/tsparticles.shape.cog.min.js") script(src="/shape-emoji/tsparticles.shape.emoji.min.js") script(src="/shape-heart/tsparticles.shape.heart.min.js") + script(src="/shape-infinity/tsparticles.shape.infinity.min.js") script(src="/shape-path/tsparticles.shape.path.min.js") script(src="/shape-rounded-polygon/tsparticles.shape.rounded-polygon.min.js") script(src="/shape-rounded-rect/tsparticles.shape.rounded-rect.min.js") diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2239468..93580ae 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -305,6 +305,9 @@ importers: '@tsparticles/shape-heart': specifier: ^3.3.0 version: link:../../shapes/heart/dist + '@tsparticles/shape-infinity': + specifier: ^3.3.0 + version: link:../../shapes/infinity/dist '@tsparticles/shape-path': specifier: ^3.3.0 version: link:../../shapes/path/dist @@ -796,6 +799,9 @@ importers: '@tsparticles/shape-image': specifier: workspace:^ version: link:../../shapes/image/dist + '@tsparticles/shape-infinity': + specifier: workspace:^ + version: link:../../shapes/infinity/dist '@tsparticles/shape-line': specifier: workspace:^ version: link:../../shapes/line/dist @@ -1477,6 +1483,13 @@ importers: version: link:../../engine/dist publishDirectory: dist + shapes/infinity: + dependencies: + '@tsparticles/engine': + specifier: ^3.3.0 + version: link:../../engine/dist + publishDirectory: dist + shapes/line: dependencies: '@tsparticles/engine': diff --git a/.browserslistrc b/.browserslistrc index eb2c296..4f5c4d3 100644 --- a/.browserslistrc +++ b/.browserslistrc @@ -0,0 +1,2 @@ +since 2019 +not dead diff --git a/.eslintignore b/.eslintignore index a964b86..8b085a7 100644 --- a/.eslintignore +++ b/.eslintignore @@ -0,0 +1,2 @@ +dist +node_modules \\ No newline at end of file diff --git a/.eslintrc.js b/.eslintrc.js index c0b7d8a..e804fa1 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -0,0 +1,5 @@ +module.exports = { + extends: [ + "@tsparticles/eslint-config", + ] +}; diff --git a/CHANGELOG.md b/CHANGELOG.md index f6c5766..a0e7fa5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -0,0 +1,106 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [3.3.0](https://github.com/tsparticles/tsparticles/compare/v3.2.2...v3.3.0) (2024-02-27) + +### Bug Fixes + +- fixed issues in Chrome with async rAF function, reduced async methods for vite builds ([2600f6f](https://github.com/tsparticles/tsparticles/commit/2600f6f69917895ab80f9a55b1f5168d587adac6)) + +## [3.2.2](https://github.com/tsparticles/tsparticles/compare/v3.2.1...v3.2.2) (2024-02-20) + +### Bug Fixes + +- fixed circular deps detection and other issues with dynamic imports ([b6ed5d3](https://github.com/tsparticles/tsparticles/commit/b6ed5d3eaa41e0ad50c55807e1ec6439eeacd0c1)) + +## [3.2.1](https://github.com/tsparticles/tsparticles/compare/v3.2.0...v3.2.1) (2024-01-31) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +# [3.2.0](https://github.com/tsparticles/tsparticles/compare/v3.1.0...v3.2.0) (2024-01-31) + +### Features + +- added new particle external interaction ([f51ce7f](https://github.com/tsparticles/tsparticles/commit/f51ce7f104fa930fc68a257b64bbe8cf65fb9794)) +- improving dynamic imports ([f05c2ee](https://github.com/tsparticles/tsparticles/commit/f05c2ee643978b6ed4abe8c4a54d0c3cc29727a8)) +- improving dynamic imports ([f9f450d](https://github.com/tsparticles/tsparticles/commit/f9f450d438d0cc3e5710ec5c1977516fb94c6f21)) +- improving dynamic imports ([c592b29](https://github.com/tsparticles/tsparticles/commit/c592b2995a3cdf6864dcc331402023373c79107d)) + +# [3.1.0](https://github.com/tsparticles/tsparticles/compare/v3.0.3...v3.1.0) (2024-01-13) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +## [3.0.3](https://github.com/tsparticles/tsparticles/compare/v3.0.2...v3.0.3) (2023-12-26) + +### Bug Fixes + +- used element id when present and fixed emoji memory management ([1990bbc](https://github.com/tsparticles/tsparticles/commit/1990bbcd9079366db7ec3dedf4477ba43d2c47cf)) + +## [3.0.2](https://github.com/tsparticles/tsparticles/compare/v3.0.1...v3.0.2) (2023-12-06) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +## [3.0.1](https://github.com/tsparticles/tsparticles/compare/v3.0.0...v3.0.1) (2023-12-06) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +# [3.0.0](https://github.com/tsparticles/tsparticles/compare/v3.0.0-beta.5...v3.0.0) (2023-12-04) + +### Features + +- added fade to trail effect ([17750ea](https://github.com/tsparticles/tsparticles/commit/17750eacdf86de208b2e723decc2ffb65521474b)) + +# [3.0.0-beta.5](https://github.com/tsparticles/tsparticles/compare/v3.0.0-beta.4...v3.0.0-beta.5) (2023-12-03) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +# [3.0.0-beta.4](https://github.com/tsparticles/tsparticles/compare/v3.0.0-beta.3...v3.0.0-beta.4) (2023-11-16) + +### Features + +- added flat options to tsparticles-confetti options ([dff6c75](https://github.com/tsparticles/tsparticles/commit/dff6c7590c5a844e34547513637c8ad0f13a3d66)) + +# [3.0.0-beta.3](https://github.com/tsparticles/tsparticles/compare/v3.0.0-beta.2...v3.0.0-beta.3) (2023-09-20) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +# [3.0.0-beta.2](https://github.com/tsparticles/tsparticles/compare/v3.0.0-beta.1...v3.0.0-beta.2) (2023-09-11) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +# [3.0.0-beta.1](https://github.com/tsparticles/tsparticles/compare/v3.0.0-beta.0...v3.0.0-beta.1) (2023-08-25) + +### Features + +- supporting the npm exports option correctly ([bdfaca8](https://github.com/tsparticles/tsparticles/commit/bdfaca8077b8a3a4b1f482cc2ae5766914dcfaf7)) + +# [3.0.0-beta.0](https://github.com/tsparticles/tsparticles/compare/v2.12.0...v3.0.0-beta.0) (2023-08-24) + +**Note:** Version bump only for package @tsparticles/shape-infinity + +# [2.12.0](https://github.com/tsparticles/tsparticles/compare/v2.11.1...v2.12.0) (2023-08-03) + +**Note:** Version bump only for package tsparticles-shape-infinity + +## [2.11.1](https://github.com/tsparticles/tsparticles/compare/v2.11.0...v2.11.1) (2023-07-24) + +**Note:** Version bump only for package tsparticles-shape-infinity + +# [2.11.0](https://github.com/tsparticles/tsparticles/compare/v2.10.1...v2.11.0) (2023-07-12) + +### Features + +- added refresh flag for loading plugins, this will prevent multiple refresh of the instance ([9d999d6](https://github.com/tsparticles/tsparticles/commit/9d999d6fa2f0c0a45a551aab45b467a8f3b682c5)) +- added tree shaking ([86806a6](https://github.com/tsparticles/tsparticles/commit/86806a6054d89b050567599daab20da3b643b788)) + +## [2.10.1](https://github.com/tsparticles/tsparticles/compare/v2.10.0...v2.10.1) (2023-06-04) + +**Note:** Version bump only for package tsparticles-shape-infinity + +# [2.10.0](https://github.com/tsparticles/tsparticles/compare/v2.0.0-alpha.0...v2.10.0) (2023-06-03) + +### Features + +- added infinity shape ([09d962b](https://github.com/tsparticles/tsparticles/commit/09d962be91b721d4b93811e75d8b44912b1a6c45)) diff --git a/LICENSE b/LICENSE index 8788c42..82aa757 100644 --- a/LICENSE +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Matteo Bruni + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 5896be8..0c5c092 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,75 @@ +[![banner](https://particles.js.org/images/banner3.png)](https://particles.js.org) + +# tsParticles Infinity Shape + +[![jsDelivr](https://data.jsdelivr.com/v1/package/npm/@tsparticles/shape-infinity/badge)](https://www.jsdelivr.com/package/npm/@tsparticles/shape-infinity) +[![npmjs](https://badge.fury.io/js/@tsparticles/shape-infinity.svg)](https://www.npmjs.com/package/@tsparticles/shape-infinity) +[![npmjs](https://img.shields.io/npm/dt/@tsparticles/shape-infinity)](https://www.npmjs.com/package/@tsparticles/shape-infinity) [![GitHub Sponsors](https://img.shields.io/github/sponsors/matteobruni)](https://github.com/sponsors/matteobruni) + +[tsParticles](https://github.com/tsparticles/tsparticles) additional infinity shape. + +## How to use it + +### CDN / Vanilla JS / jQuery + +The CDN/Vanilla version JS has one required file in vanilla configuration: + +Including the `tsparticles.shape.infinity.min.js` file will export the function to load the shape: + +```text +loadInfinityShape +``` + +### Usage + +Once the scripts are loaded you can set up `tsParticles` and the shape like this: + +```javascript +(async () => { + await loadInfinityShape(tsParticles); + + await tsParticles.load({ + id: "tsparticles", + options: { + /* options */ + /* here you can use particles.shape.type: "infinity" */ + }, + }); +})(); +``` + +### ESM / CommonJS + +This package is compatible also with ES or CommonJS modules, firstly this needs to be installed, like this: + +```shell +$ npm install @tsparticles/shape-infinity +``` + +or + +```shell +$ yarn add @tsparticles/shape-infinity +``` + +Then you need to import it in the app, like this: + +```javascript +const { tsParticles } = require("@tsparticles/engine"); +const { loadInfinityShape } = require("@tsparticles/shape-infinity"); + +(async () => { + await loadInfinityShape(tsParticles); +})(); +``` + +or + +```javascript +import { tsParticles } from "@tsparticles/engine"; +import { loadInfinityShape } from "@tsparticles/shape-infinity"; + +(async () => { + await loadInfinityShape(tsParticles); +})(); +``` diff --git a/InfinityDrawer.ts b/InfinityDrawer.ts index 38438a0..7010c77 100644 --- a/InfinityDrawer.ts +++ b/InfinityDrawer.ts @@ -0,0 +1,10 @@ +import { type IShapeDrawData, type IShapeDrawer } from "@tsparticles/engine"; +import { drawInfinity } from "./Utils.js"; + +export class InfinityDrawer implements IShapeDrawer { + readonly validTypes = ["infinity"] as const; + + draw(data: IShapeDrawData): void { + drawInfinity(data); + } +} diff --git a/Utils.ts b/Utils.ts index 092bd04..7b886b7 100644 --- a/Utils.ts +++ b/Utils.ts @@ -0,0 +1,20 @@ +import type { ICoordinates, IShapeDrawData } from "@tsparticles/engine"; + +const origin: ICoordinates = { + x: 0, + y: 0, + }, + loopSizeFactor = 0.55; + +/** + * @param data - + */ +export function drawInfinity(data: IShapeDrawData): void { + const { context, radius } = data, + loopControl = radius * loopSizeFactor; + + context.moveTo(origin.x, origin.y); + context.bezierCurveTo(loopControl, -radius, loopControl, radius, origin.x, origin.y); + context.moveTo(origin.x, origin.y); + context.bezierCurveTo(-loopControl, -radius, origin.x - loopControl, radius, origin.x, origin.y); +} diff --git a/tsconfig.base.json b/tsconfig.base.json index b8351b1..3cccbf7 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -0,0 +1,9 @@ +{ + "extends": "@tsparticles/tsconfig/tsconfig.base.json", + "compilerOptions": { + "rootDir": "./src" + }, + "include": [ + "./src" + ] +} diff --git a/tsconfig.browser.json b/tsconfig.browser.json index c3d5233..c5d56cd 100644 --- a/tsconfig.browser.json +++ b/tsconfig.browser.json @@ -0,0 +1,6 @@ +{ + "extends": ["./tsconfig.base.json", "@tsparticles/tsconfig/tsconfig.browser.json"], + "compilerOptions": { + "outDir": "./dist/browser" + } +} diff --git a/tsconfig.json b/tsconfig.json index a844190..e9bc25f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": ["./tsconfig.base.json", "@tsparticles/tsconfig/tsconfig.json"], + "compilerOptions": { + "outDir": "./dist/cjs" + } +} diff --git a/tsconfig.module.json b/tsconfig.module.json index c2cd5a3..2c1f248 100644 --- a/tsconfig.module.json +++ b/tsconfig.module.json @@ -0,0 +1,6 @@ +{ + "extends": ["./tsconfig.base.json", "@tsparticles/tsconfig/tsconfig.module.json"], + "compilerOptions": { + "outDir": "./dist/esm" + } +} diff --git a/tsconfig.types.json b/tsconfig.types.json index 1d5d57c..3ada416 100644 --- a/tsconfig.types.json +++ b/tsconfig.types.json @@ -0,0 +1,6 @@ +{ + "extends": ["./tsconfig.base.json", "@tsparticles/tsconfig/tsconfig.types.json"], + "compilerOptions": { + "outDir": "./dist/types" + } +} diff --git a/tsconfig.umd.json b/tsconfig.umd.json index 715f98d..32c9773 100644 --- a/tsconfig.umd.json +++ b/tsconfig.umd.json @@ -0,0 +1,6 @@ +{ + "extends": ["./tsconfig.base.json", "@tsparticles/tsconfig/tsconfig.umd.json"], + "compilerOptions": { + "outDir": "./dist/umd" + } +} diff --git a/typedoc.json b/typedoc.json index cfcf141..8bea8fb 100644 --- a/typedoc.json +++ b/typedoc.json @@ -0,0 +1,15 @@ +{ + "includes": "./markdown", + "entryPoints": [ + "./src/" + ], + "entryPointStrategy": "expand", + "name": "tsParticles Infinity Shape", + "includeVersion": true, + "hideGenerator": true, + "out": "./docs", + "validation": { + "invalidLink": true, + "notDocumented": true + } +} diff --git a/webpack.config.js b/webpack.config.js index 621ea13..78fd808 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -0,0 +1,4 @@ +const { loadParticlesShape } = require("@tsparticles/webpack-plugin"); +const version = require("./package.json").version; + +module.exports = loadParticlesShape({ moduleName: "infinity", shapeName: "Infinity", version, dir: __dirname }); diff --git a/shapeInfinity.ts b/shapeInfinity.ts index 22a34c2..8b7ccb7 100644 --- a/shapeInfinity.ts +++ b/shapeInfinity.ts @@ -0,0 +1,91 @@ +import type { ISourceOptions } from "@tsparticles/engine"; + +const options: ISourceOptions = { + key: "shapeInfinity", + name: "Shape Infinity", + particles: { + number: { + value: 80, + density: { + enable: true, + }, + }, + color: { + value: "transparent", + }, + stroke: { + color: { + value: "#ff0000", + animation: { + enable: true, + speed: 20, + sync: true, + }, + }, + width: 1, + }, + shape: { + type: "infinity", + }, + opacity: { + value: 0.5, + }, + size: { + value: { + min: 20, + max: 30, + }, + }, + move: { + enable: true, + speed: 6, + }, + rotate: { + animation: { + enable: true, + speed: 20, + sync: false, + }, + }, + }, + interactivity: { + events: { + onHover: { + enable: true, + mode: "repulse", + }, + onClick: { + enable: true, + mode: "push", + }, + }, + modes: { + grab: { + distance: 400, + links: { + opacity: 1, + }, + }, + bubble: { + distance: 400, + size: 40, + duration: 2, + opacity: 0.8, + }, + repulse: { + distance: 200, + }, + push: { + quantity: 4, + }, + remove: { + quantity: 2, + }, + }, + }, + background: { + color: "#000000", + }, +}; + +export default options;
ci: remove cache-virtualenv id
9534a9612af0fd9bb8e5c459a25f8202d4623ee4
ci
https://github.com/rohankumardubey/ibis/commit/9534a9612af0fd9bb8e5c459a25f8202d4623ee4
remove cache-virtualenv id
diff --git a/check-setup-py.yml b/check-setup-py.yml index 2197f35..9b5ad93 100644 --- a/check-setup-py.yml +++ b/check-setup-py.yml @@ -72,7 +72,6 @@ jobs: run: poetry export --dev --extras all --without-hashes > requirements.txt - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: requirements.txt custom_cache_key_element: check-setuptools-install-${{ steps.install_python.outputs.python-version }} diff --git a/ibis-backends.yml b/ibis-backends.yml index 8c5172b..7d61add 100644 --- a/ibis-backends.yml +++ b/ibis-backends.yml @@ -77,7 +77,6 @@ jobs: python-version: ${{ matrix.python-version }} - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: ${{ matrix.backend.name }}-${{ matrix.backend.deps }}-${{ steps.install_python.outputs.python-version }} @@ -156,7 +155,6 @@ jobs: run: sudo apt-get install -qq -y build-essential libgeos-dev python-dev - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: postgres-geospatial-${{ join(matrix.deps, '-') }}-${{ steps.install_python.outputs.python-version }} @@ -239,7 +237,6 @@ jobs: java-version: ${{ matrix.pyspark.jdk }} - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: pyspark-${{ matrix.pyspark-version }}-${{ matrix.pyspark.jdk }}-${{ steps.install_python.outputs.python-version }} @@ -349,7 +346,6 @@ jobs: run: sudo apt-get install -qq -y build-essential cmake krb5-config python-dev libkrb5-dev libboost-all-dev - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: impala-${{ steps.install_python.outputs.python-version }} @@ -424,7 +420,6 @@ jobs: python-version: ${{ matrix.python-version }} - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: ${{ matrix.backend.name }}-${{ steps.install_python.outputs.python-version }} diff --git a/ibis-main.yml b/ibis-main.yml index 9899428..975107a 100644 --- a/ibis-main.yml +++ b/ibis-main.yml @@ -109,7 +109,6 @@ jobs: python-version: ${{ matrix.python-version }} - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: no-backends-${{ steps.install_python.outputs.python-version }} @@ -157,7 +156,6 @@ jobs: run: sudo apt-get install -qq -y build-essential cmake krb5-config python-dev libkrb5-dev libboost-all-dev - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: poetry.lock custom_cache_key_element: benchmarks-${{ steps.install_python.outputs.python-version }} @@ -203,7 +201,6 @@ jobs: python-version: ${{ matrix.python-version }} - uses: syphar/restore-virtualenv@v1 - id: cache-virtualenv with: requirement_files: ibis/poetry.lock custom_cache_key_element: docs-${{ steps.install_python.outputs.python-version }}
fix: add "react-spring/addons" module This just forwards the "@react-spring/addons" package.
e3efb341a92bc1501e1b851281a4900c76d10647
fix
https://github.com/pmndrs/react-spring/commit/e3efb341a92bc1501e1b851281a4900c76d10647
add "react-spring/addons" module This just forwards the "@react-spring/addons" package.
diff --git a/.size-snapshot.json b/.size-snapshot.json index cd006dd..63bd895 100644 --- a/.size-snapshot.json +++ b/.size-snapshot.json @@ -93,5 +93,24 @@ "bundled": 276, "minified": 223, "gzipped": 177 + }, + "dist/addons.js": { + "bundled": 37, + "minified": 34, + "gzipped": 54, + "treeshaked": { + "rollup": { + "code": 29, + "import_statements": 29 + }, + "webpack": { + "code": 1048 + } + } + }, + "dist/addons.cjs.js": { + "bundled": 288, + "minified": 235, + "gzipped": 179 } } diff --git a/package.json b/package.json index 5a076b8..3a43f1d 100644 --- a/package.json +++ b/package.json @@ -4,6 +4,7 @@ "private": true, "main": "src/web.ts", "dependencies": { + "@react-spring/addons": "link:../addons", "@react-spring/core": "link:../core", "@react-spring/konva": "link:../konva", "@react-spring/native": "link:../native", diff --git a/addons.ts b/addons.ts index 9cd27d0..3d67821 100644 --- a/addons.ts +++ b/addons.ts @@ -0,0 +1 @@ +export * from '@react-spring/addons'
chore(conftest.py): remove breakpoint
772454164c92d8e48c7324b3a1e68048898afede
chore
https://github.com/rohankumardubey/ibis/commit/772454164c92d8e48c7324b3a1e68048898afede
remove breakpoint
diff --git a/conftest.py b/conftest.py index 23329a1..a25906b 100644 --- a/conftest.py +++ b/conftest.py @@ -120,7 +120,6 @@ def pytest_runtest_call(item): if key.endswith("backend") ] if len(backend) > 1: - breakpoint() raise ValueError( f"test {item.originalname} was supplied with multiple backend " f"objects simultaneously. This is likely due to a leaky fixture."
refactor: implement joined strategy m:1, 1:1 and m:n Related: #440
acebbdbb7c32a44037163e3df7ac90d64500a1e9
refactor
https://github.com/mikro-orm/mikro-orm/commit/acebbdbb7c32a44037163e3df7ac90d64500a1e9
implement joined strategy m:1, 1:1 and m:n Related: #440
diff --git a/EntityManager.ts b/EntityManager.ts index eedf714..c42ec1d 100644 --- a/EntityManager.ts +++ b/EntityManager.ts @@ -307,7 +307,7 @@ export class EntityManager<D extends IDatabaseDriver = IDatabaseDriver> { // add to IM immediately - needed for self-references that can be part of `data` (and do not trigger cascade merge) this.getUnitOfWork().merge(entity, [entity]); - EntityAssigner.assign(entity, data as EntityData<T>, true); + EntityAssigner.assign(entity, data as EntityData<T>, { onlyProperties: true, merge: true }); this.getUnitOfWork().merge(entity); // add to IM again so we have correct payload saved to change set computation return entity; diff --git a/DatabaseDriver.ts b/DatabaseDriver.ts index b2992ad..e16506e 100644 --- a/DatabaseDriver.ts +++ b/DatabaseDriver.ts @@ -59,7 +59,7 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD const ret = Object.assign({}, result) as any; Object.values(meta.properties).forEach(prop => { - if (prop.fieldNames && prop.fieldNames.length > 1 && prop.fieldNames.every(joinColumn => joinColumn in ret)) { + if (prop.fieldNames && prop.fieldNames.length > 1 && prop.fieldNames.every(joinColumn => Utils.isDefined(ret[joinColumn], true))) { const temp: any[] = []; prop.fieldNames.forEach(joinColumn => { temp.push(ret[joinColumn]); diff --git a/IDatabaseDriver.ts b/IDatabaseDriver.ts index b3b8c01..8e10bf9 100644 --- a/IDatabaseDriver.ts +++ b/IDatabaseDriver.ts @@ -100,7 +100,7 @@ export interface FindOneOptions<T> { flags?: QueryFlag[]; } -export type PopulateChildren<T> = { [K in keyof T]?: PopulateMap<ReferencedEntity<T> | CollectionItem<T[K]>> }; +export type PopulateChildren<T> = { [K in keyof T]?: PopulateMap<ReferencedEntity<T[K]> | CollectionItem<T[K]>> }; export type PopulateMap<T> = boolean | LoadStrategy | PopulateChildren<T> | [LoadStrategy, PopulateChildren<T>]; export type Populate<T> = (string | PopulateOptions<T>)[] | boolean | PopulateMap<T>; diff --git a/EntityAssigner.ts b/EntityAssigner.ts index 6101394..440827d 100644 --- a/EntityAssigner.ts +++ b/EntityAssigner.ts @@ -38,11 +38,11 @@ export class EntityAssigner { } if ([ReferenceType.MANY_TO_ONE, ReferenceType.ONE_TO_ONE].includes(props[prop]?.reference) && Utils.isDefined(value, true) && EntityAssigner.validateEM(em)) { - return EntityAssigner.assignReference<T>(entity, value, props[prop], em!); + return EntityAssigner.assignReference<T>(entity, value, props[prop], em!, options); } if (props[prop] && Utils.isCollection(entity[prop as keyof T], props[prop]) && Array.isArray(value) && EntityAssigner.validateEM(em)) { - return EntityAssigner.assignCollection<T>(entity, entity[prop as keyof T] as unknown as Collection<AnyEntity>, value, props[prop], em!); + return EntityAssigner.assignCollection<T>(entity, entity[prop as keyof T] as unknown as Collection<AnyEntity>, value, props[prop], em!, options); } if (props[prop]?.reference === ReferenceType.SCALAR && SCALAR_TYPES.includes(props[prop].type) && (props[prop].setter || !props[prop].getter)) { @@ -96,7 +96,7 @@ export class EntityAssigner { return true; } - private static assignReference<T extends AnyEntity<T>>(entity: T, value: any, prop: EntityProperty, em: EntityManager): void { + private static assignReference<T extends AnyEntity<T>>(entity: T, value: any, prop: EntityProperty, em: EntityManager, options: AssignOptions): void { let valid = false; if (Utils.isEntity(value, true)) { @@ -105,6 +105,9 @@ export class EntityAssigner { } else if (Utils.isPrimaryKey(value, true)) { entity[prop.name] = Utils.wrapReference(em.getReference<T>(prop.type, value), prop); valid = true; + } else if (Utils.isObject<T[keyof T]>(value) && options.merge) { + entity[prop.name] = Utils.wrapReference(em.merge(prop.type, value), prop); + valid = true; } else if (Utils.isObject<T[keyof T]>(value)) { entity[prop.name] = Utils.wrapReference(em.create(prop.type, value), prop); valid = true; @@ -118,20 +121,20 @@ export class EntityAssigner { EntityAssigner.autoWireOneToOne(prop, entity); } - private static assignCollection<T extends AnyEntity<T>, U extends AnyEntity<U> = AnyEntity>(entity: T, collection: Collection<U>, value: any[], prop: EntityProperty, em: EntityManager): void { + private static assignCollection<T extends AnyEntity<T>, U extends AnyEntity<U> = AnyEntity>(entity: T, collection: Collection<U>, value: any[], prop: EntityProperty, em: EntityManager, options: AssignOptions): void { const invalid: any[] = []; - const items = value.map((item: any) => this.createCollectionItem<U>(item, em, prop, invalid)); + const items = value.map((item: any) => this.createCollectionItem<U>(item, em, prop, invalid, options)); if (invalid.length > 0) { const name = entity.constructor.name; throw new Error(`Invalid collection values provided for '${name}.${prop.name}' in ${name}.assign(): ${inspect(invalid)}`); } - collection.hydrate(items, true, false); + collection.hydrate(items, true, !!options.merge); collection.setDirty(); } - private static createCollectionItem<T extends AnyEntity<T>>(item: any, em: EntityManager, prop: EntityProperty, invalid: any[]): T { + private static createCollectionItem<T extends AnyEntity<T>>(item: any, em: EntityManager, prop: EntityProperty, invalid: any[], options: AssignOptions): T { if (Utils.isEntity<T>(item)) { return item; } @@ -140,6 +143,10 @@ export class EntityAssigner { return em.getReference(prop.type, item); } + if (Utils.isObject<T>(item) && options.merge) { + return em.merge<T>(prop.type, item); + } + if (Utils.isObject<T>(item)) { return em.create<T>(prop.type, item); } @@ -154,5 +161,6 @@ export class EntityAssigner { export interface AssignOptions { onlyProperties?: boolean; mergeObjects?: boolean; + merge?: boolean; em?: EntityManager; } diff --git a/typings.ts b/typings.ts index d790dc9..7c77105 100644 --- a/typings.ts +++ b/typings.ts @@ -55,8 +55,8 @@ export type OperatorMap<T> = { }; export type StringProp<T> = T extends string ? string | RegExp : never; export type EntityOrPrimary<T> = true extends IsScalar<T> ? never : DeepPartialEntity<ReferencedEntity<T>> | PartialEntity<ReferencedEntity<T>> | Primary<ReferencedEntity<T>> | ReferencedEntity<T>; -export type CollectionItem<T> = T extends Collection<infer K> ? EntityOrPrimary<K> : never; -export type ReferencedEntity<T> = T extends Reference<infer K> ? K : T; +export type CollectionItem<T extends AnyEntity<T>> = T extends Collection<infer K> ? EntityOrPrimary<K> : never; +export type ReferencedEntity<T extends AnyEntity<T>> = T extends Reference<infer K> ? K : T; export type FilterValue<T> = T | OperatorMap<T> | StringProp<T> | OneOrArray<CollectionItem<T> | EntityOrPrimary<T>> | null; export type Query<T> = true extends IsEntity<T> ? { [K in keyof T]?: Query<ReferencedEntity<T[K]>> | FilterValue<ReferencedEntity<T[K]>> | null } | FilterValue<ReferencedEntity<T>> diff --git a/AbstractSqlDriver.ts b/AbstractSqlDriver.ts index 7955c3e..e2deeb2 100644 --- a/AbstractSqlDriver.ts +++ b/AbstractSqlDriver.ts @@ -33,9 +33,9 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra options = { populate: [], orderBy: {}, ...(options || {}) }; const meta = this.metadata.get(entityName); const populate = this.autoJoinOneToOneOwner(meta, options.populate as PopulateOptions<T>[]); - const joinedLoads = this.joinedLoads(meta, populate); + const joinedProps = this.joinedProps(meta, populate); const qb = this.createQueryBuilder(entityName, ctx, !!ctx); - const fields = this.buildFields(meta, populate, joinedLoads, qb, options.fields); + const fields = this.buildFields(meta, populate, joinedProps, qb, options.fields); qb.select(fields) .populate(populate) @@ -52,8 +52,8 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra Utils.asArray(options.flags).forEach(flag => qb.setFlag(flag)); const result = await this.rethrow(qb.execute('all')); - if (joinedLoads.length > 0) { - return this.mergeJoinedResult(result, meta, joinedLoads); + if (joinedProps.length > 0) { + return this.mergeJoinedResult(result, meta, joinedProps); } return result; @@ -69,15 +69,15 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra where = { [pk]: where } as FilterQuery<T>; } - const joinedLoads = this.joinedLoads(meta, populate); + const joinedProps = this.joinedProps(meta, populate); const qb = this.createQueryBuilder(entityName, ctx, !!ctx); - const fields = this.buildFields(meta, populate, joinedLoads, qb, options.fields); + const fields = this.buildFields(meta, populate, joinedProps, qb, options.fields); - if (joinedLoads.length === 0) { + if (joinedProps.length === 0) { qb.limit(1); } - const method = joinedLoads.length > 0 ? 'all' : 'get'; + const method = joinedProps.length > 0 ? 'all' : 'get'; qb.select(fields) .populate(populate) @@ -92,7 +92,7 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra const result = await this.rethrow(qb.execute(method)); if (Array.isArray(result)) { - return this.mergeSingleJoinedResult(result, joinedLoads) as unknown as T; + return this.mergeSingleJoinedResult(result, joinedProps) as unknown as T; } return result; @@ -105,38 +105,43 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra return null; } - const joinedLoads = this.joinedLoads(meta, populate); + const joinedProps = this.joinedProps(meta, populate); - joinedLoads.forEach(relationName => { - const relation = meta.properties[relationName]; - const properties = this.metadata.get(relation.type).properties; - const found = Object.entries(aliasMap).find(([,r]) => r === relation.type)!; + joinedProps.forEach(relation => { + const meta2 = this.metadata.get(relation.type); + // FIXME we should lookup the alias from join definition, based on path (author.favouriteBook), rather than just the type (Book) + const found = Object.entries(aliasMap).find(([, r]) => r === relation.type)!; const relationAlias = found[0]; - - ret[relationName] = ret[relationName] || []; + ret[relation.name] = ret[relation.name] || []; const relationPojo = {}; - let relationExists = true; - Object.values(properties) - .filter(({ reference }) => reference === ReferenceType.SCALAR) - .forEach(prop => { - const alias = `${relationAlias}_${prop.fieldNames[0]}`; - const value = ret[alias]; + // If the primary key value for the relation is null, we know we haven't joined to anything + // and therefore we don't return any record (since all values would be null) + const hasPK = meta2.primaryKeys.every(pk => meta2.properties[pk].fieldNames.every(name => { + return Utils.isDefined(ret[`${relationAlias}_${name}`], true); + })); - // If the primary key value for the relation is null, we know we haven't joined to anything - // and therefore we don't return any record (since all values would be null) - if (prop.primary && value === null) { - relationExists = false; - } + if (!hasPK) { + return; + } - if (alias in ret) { + Object.values(meta2.properties) + .filter(prop => this.shouldHaveColumn(prop, populate)) + .forEach(prop => { + if (prop.fieldNames.length > 1) { // composite keys + relationPojo[prop.name] = prop.fieldNames.map(name => ret[`${relationAlias}_${name}`]); + prop.fieldNames.map(name => delete ret[`${relationAlias}_${name}`]); + } else { + const alias = `${relationAlias}_${prop.fieldNames[0]}`; relationPojo[prop.name] = ret[alias]; delete ret[alias]; } }); - if (relationExists) { - ret[relationName].push(relationPojo); + if ([ReferenceType.MANY_TO_MANY, ReferenceType.ONE_TO_MANY].includes(relation.reference)) { + ret[relation.name].push(relationPojo); + } else { + ret[relation.name] = relationPojo; } }); @@ -284,30 +289,34 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra return [...populate, ...toPopulate]; } - protected joinedLoads<T>(meta: EntityMetadata, populate: PopulateOptions<T>[]): string[] { + protected joinedProps<T>(meta: EntityMetadata, populate: PopulateOptions<T>[]): EntityProperty[] { return populate .filter(({ field, strategy }) => (strategy || meta.properties[field]?.strategy) === LoadStrategy.JOINED) - .map(({ field }) => field); + .map(({ field }) => meta.properties[field]); } - protected mergeSingleJoinedResult<T extends AnyEntity<T>>(rawResults: Dictionary[], joinedLoads: string[]): T | null { + protected mergeSingleJoinedResult<T extends AnyEntity<T>>(rawResults: Dictionary[], joinedProps: EntityProperty<T>[]): T | null { if (rawResults.length === 0) { return null; } + // TODO we might want to optimize this bit, as we are creating a lot of new arrays via destructing (so might be memory heavy) return rawResults.reduce((result, value) => { - joinedLoads.forEach(relationName => { - const relation = value[relationName]; - const existing = result[relationName] || []; - - result[relationName] = [...existing, ...relation]; + joinedProps.forEach(prop => { + if ([ReferenceType.MANY_TO_MANY, ReferenceType.ONE_TO_MANY].includes(prop.reference)) { + const relation = value[prop.name]; + const existing = result[prop.name] || []; + result[prop.name] = [...existing, ...relation]; + } else { + result[prop.name] = value[prop.name]; + } }); return { ...value, ...result }; }, {}) as unknown as T; } - protected mergeJoinedResult<T extends AnyEntity<T>>(rawResults: Dictionary[], meta: EntityMetadata<T>, joinedLoads: string[]): T[] { + protected mergeJoinedResult<T extends AnyEntity<T>>(rawResults: Dictionary[], meta: EntityMetadata<T>, joinedProps: EntityProperty<T>[]): T[] { // group by the root entity primary key first const res = rawResults.reduce((result, item) => { const pk = Utils.getCompositeKeyHash<T>(item as T, meta); @@ -317,14 +326,14 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra return result; }, {}) as Dictionary<any[]>; - return Object.values(res).map((rows: Dictionary[]) => this.mergeSingleJoinedResult(rows, joinedLoads)) as T[]; + return Object.values(res).map((rows: Dictionary[]) => this.mergeSingleJoinedResult(rows, joinedProps)) as T[]; } getRefForField(field: string, schema: string, alias: string) { return this.connection.getKnex().ref(field).withSchema(schema).as(alias); } - protected getSelectForJoinedLoad<T>(qb: QueryBuilder, meta: EntityMetadata, joinedLoads: string[], populate: PopulateOptions<T>[]): Field[] { + protected getSelectForJoinedLoad<T>(qb: QueryBuilder, meta: EntityMetadata, joinedProps: EntityProperty<T>[], populate: PopulateOptions<T>[]): Field[] { const selects: Field[] = []; // alias all fields in the primary table @@ -332,20 +341,18 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra .filter(prop => this.shouldHaveColumn(prop, populate)) .forEach(prop => selects.push(...prop.fieldNames)); - let previousRelationName: string; - joinedLoads.forEach(relationName => { - previousRelationName = relationName; - const prop = meta.properties[relationName]; - const meta2 = this.metadata.get(prop.type); + joinedProps.forEach(relation => { + const meta2 = this.metadata.get(relation.type); + const tableAlias = qb.getNextAlias(relation.name); + qb.join(relation.name, tableAlias, {}, 'leftJoin', `${meta.name}.${relation.name}`); // FIXME nesting in path param (recursive lookup) + const properties = Object.values(meta2.properties).filter(prop => { /* istanbul ignore next */ - return this.shouldHaveColumn(prop, populate.find(p => p.field === relationName)?.children || []); + return this.shouldHaveColumn(prop, populate.find(p => p.field === relation.name)?.children || []); }); - for (const prop of properties) { - const tableAlias = qb.getNextAlias(relationName, previousRelationName !== relationName); - selects.push(...prop.fieldNames.map(fieldName => this.getRefForField(fieldName, tableAlias, `${tableAlias}_${fieldName}`))); - qb.join(relationName, tableAlias, {}, 'leftJoin'); + for (const prop2 of properties) { + selects.push(...prop2.fieldNames.map(fieldName => this.getRefForField(fieldName, tableAlias, `${tableAlias}_${fieldName}`))); } }); @@ -431,15 +438,15 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra await this.rethrow(qb.execute()); } - protected buildFields<T>(meta: EntityMetadata<T>, populate: PopulateOptions<T>[], joinedLoads: string[], qb: QueryBuilder, fields?: Field[]): Field[] { + protected buildFields<T>(meta: EntityMetadata<T>, populate: PopulateOptions<T>[], joinedProps: EntityProperty<T>[], qb: QueryBuilder, fields?: Field[]): Field[] { const props = Object.values<EntityProperty<T>>(meta.properties).filter(prop => this.shouldHaveColumn(prop, populate)); const lazyProps = Object.values<EntityProperty<T>>(meta.properties).filter(prop => prop.lazy && !populate.some(p => p.field === prop.name || p.all)); const hasExplicitFields = !!fields; if (fields) { fields.unshift(...meta.primaryKeys.filter(pk => !fields!.includes(pk))); - } else if (joinedLoads.length > 0) { - fields = this.getSelectForJoinedLoad(qb, meta, joinedLoads, populate); + } else if (joinedProps.length > 0) { + fields = this.getSelectForJoinedLoad(qb, meta, joinedProps, populate); } else if (lazyProps.length > 0) { fields = Utils.flatten(props.filter(p => !lazyProps.includes(p)).map(p => p.fieldNames)); } diff --git a/QueryBuilder.ts b/QueryBuilder.ts index 44dcf02..fd4a3aa 100644 --- a/QueryBuilder.ts +++ b/QueryBuilder.ts @@ -261,9 +261,9 @@ export class QueryBuilder<T extends AnyEntity<T> = AnyEntity> { return found ? found[0] : undefined; } - getNextAlias(prefix = 'e', increment = true): string { + getNextAlias(prefix = 'e'): string { // Take only the first letter of the prefix to keep character counts down since some engines have character limits - return `${prefix.charAt(0)}${increment ? this.aliasCounter++ : this.aliasCounter}`; + return `${prefix.charAt(0).toLowerCase()}${this.aliasCounter++}`; } async execute<U = any>(method: 'all' | 'get' | 'run' = 'all', mapResults = true): Promise<U> { diff --git a/EntityGenerator.test.ts.snap b/EntityGenerator.test.ts.snap index 0b86b9c..333c664 100644 --- a/EntityGenerator.test.ts.snap +++ b/EntityGenerator.test.ts.snap @@ -430,7 +430,8 @@ export class Test2 { } ", - "import { Entity, Index, PrimaryKey, Property } from 'mikro-orm'; + "import { Entity, Index, OneToOne, PrimaryKey, Property } from 'mikro-orm'; +import { Car2 } from './Car2'; @Entity() export class User2 { @@ -446,6 +447,9 @@ export class User2 { @Property({ nullable: true }) foo?: number; + @OneToOne({ entity: () => Car2, nullable: true, index: 'user2_favourite_car_name_favourite_car_year_index', unique: 'user2_favourite_car_year_unique' }) + favouriteCar?: Car2; + } ", "import { Cascade, Entity, ManyToOne } from 'mikro-orm'; diff --git a/SchemaGenerator.test.ts.snap b/SchemaGenerator.test.ts.snap index 163a2ff..e290655 100644 --- a/SchemaGenerator.test.ts.snap +++ b/SchemaGenerator.test.ts.snap @@ -60,10 +60,13 @@ alter table \\`car2\\` add primary key \\`car2_pkey\\`(\\`name\\`, \\`year\\`); create table \\`car_owner2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`name\\` varchar(255) not null, \\`car_name\\` varchar(100) not null, \\`car_year\\` int(11) unsigned not null) default character set utf8mb4 engine = InnoDB; alter table \\`car_owner2\\` add index \\`car_owner2_car_name_car_year_index\\`(\\`car_name\\`, \\`car_year\\`); -create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null) default character set utf8mb4 engine = InnoDB; +create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null, \\`favourite_car_name\\` varchar(100) null, \\`favourite_car_year\\` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table \\`user2\\` add index \\`user2_first_name_index\\`(\\`first_name\\`); alter table \\`user2\\` add index \\`user2_last_name_index\\`(\\`last_name\\`); +alter table \\`user2\\` add unique \\`user2_favourite_car_name_unique\\`(\\`favourite_car_name\\`); +alter table \\`user2\\` add unique \\`user2_favourite_car_year_unique\\`(\\`favourite_car_year\\`); alter table \\`user2\\` add primary key \\`user2_pkey\\`(\\`first_name\\`, \\`last_name\\`); +alter table \\`user2\\` add index \\`user2_favourite_car_name_favourite_car_year_index\\`(\\`favourite_car_name\\`, \\`favourite_car_year\\`); create table \\`base_user2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`type\\` enum('employee', 'manager', 'owner') not null, \\`employee_prop\\` int(11) null, \\`manager_prop\\` varchar(255) null, \\`owner_prop\\` varchar(255) null, \\`favourite_employee_id\\` int(11) unsigned null, \\`favourite_manager_id\\` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table \\`base_user2\\` add index \\`base_user2_type_index\\`(\\`type\\`); @@ -127,6 +130,9 @@ alter table \\`configuration2\\` add constraint \\`configuration2_test_id_foreign\\` alter table \\`car_owner2\\` add constraint \\`car_owner2_car_name_foreign\\` foreign key (\\`car_name\\`) references \\`car2\\` (\\`name\\`) on update cascade; alter table \\`car_owner2\\` add constraint \\`car_owner2_car_year_foreign\\` foreign key (\\`car_year\\`) references \\`car2\\` (\\`year\\`) on update cascade; +alter table \\`user2\\` add constraint \\`user2_favourite_car_name_foreign\\` foreign key (\\`favourite_car_name\\`) references \\`car2\\` (\\`name\\`) on update cascade on delete set null; +alter table \\`user2\\` add constraint \\`user2_favourite_car_year_foreign\\` foreign key (\\`favourite_car_year\\`) references \\`car2\\` (\\`year\\`) on update cascade on delete set null; + alter table \\`base_user2\\` add constraint \\`base_user2_favourite_employee_id_foreign\\` foreign key (\\`favourite_employee_id\\`) references \\`base_user2\\` (\\`id\\`) on update cascade on delete set null; alter table \\`base_user2\\` add constraint \\`base_user2_favourite_manager_id_foreign\\` foreign key (\\`favourite_manager_id\\`) references \\`base_user2\\` (\\`id\\`) on update cascade on delete set null; @@ -272,10 +278,13 @@ alter table \\`car2\\` add primary key \\`car2_pkey\\`(\\`name\\`, \\`year\\`); create table \\`car_owner2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`name\\` varchar(255) not null, \\`car_name\\` varchar(100) not null, \\`car_year\\` int(11) unsigned not null) default character set utf8mb4 engine = InnoDB; alter table \\`car_owner2\\` add index \\`car_owner2_car_name_car_year_index\\`(\\`car_name\\`, \\`car_year\\`); -create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null) default character set utf8mb4 engine = InnoDB; +create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null, \\`favourite_car_name\\` varchar(100) null, \\`favourite_car_year\\` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table \\`user2\\` add index \\`user2_first_name_index\\`(\\`first_name\\`); alter table \\`user2\\` add index \\`user2_last_name_index\\`(\\`last_name\\`); +alter table \\`user2\\` add unique \\`user2_favourite_car_name_unique\\`(\\`favourite_car_name\\`); +alter table \\`user2\\` add unique \\`user2_favourite_car_year_unique\\`(\\`favourite_car_year\\`); alter table \\`user2\\` add primary key \\`user2_pkey\\`(\\`first_name\\`, \\`last_name\\`); +alter table \\`user2\\` add index \\`user2_favourite_car_name_favourite_car_year_index\\`(\\`favourite_car_name\\`, \\`favourite_car_year\\`); create table \\`base_user2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`type\\` enum('employee', 'manager', 'owner') not null, \\`employee_prop\\` int(11) null, \\`manager_prop\\` varchar(255) null, \\`owner_prop\\` varchar(255) null, \\`favourite_employee_id\\` int(11) unsigned null, \\`favourite_manager_id\\` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table \\`base_user2\\` add index \\`base_user2_type_index\\`(\\`type\\`); @@ -339,6 +348,9 @@ alter table \\`configuration2\\` add constraint \\`configuration2_test_id_foreign\\` alter table \\`car_owner2\\` add constraint \\`car_owner2_car_name_foreign\\` foreign key (\\`car_name\\`) references \\`car2\\` (\\`name\\`) on update cascade; alter table \\`car_owner2\\` add constraint \\`car_owner2_car_year_foreign\\` foreign key (\\`car_year\\`) references \\`car2\\` (\\`year\\`) on update cascade; +alter table \\`user2\\` add constraint \\`user2_favourite_car_name_foreign\\` foreign key (\\`favourite_car_name\\`) references \\`car2\\` (\\`name\\`) on update cascade on delete set null; +alter table \\`user2\\` add constraint \\`user2_favourite_car_year_foreign\\` foreign key (\\`favourite_car_year\\`) references \\`car2\\` (\\`year\\`) on update cascade on delete set null; + alter table \\`base_user2\\` add constraint \\`base_user2_favourite_employee_id_foreign\\` foreign key (\\`favourite_employee_id\\`) references \\`base_user2\\` (\\`id\\`) on update cascade on delete set null; alter table \\`base_user2\\` add constraint \\`base_user2_favourite_manager_id_foreign\\` foreign key (\\`favourite_manager_id\\`) references \\`base_user2\\` (\\`id\\`) on update cascade on delete set null; @@ -955,10 +967,13 @@ alter table \\`car2\\` add primary key \\`car2_pkey\\`(\\`name\\`, \\`year\\`); create table \\`car_owner2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`name\\` varchar(255) not null, \\`car_name\\` varchar(100) not null, \\`car_year\\` int(11) unsigned not null) default character set utf8mb4 engine = InnoDB; alter table \\`car_owner2\\` add index \\`car_owner2_car_name_car_year_index\\`(\\`car_name\\`, \\`car_year\\`); -create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null) default character set utf8mb4 engine = InnoDB; +create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null, \\`favourite_car_name\\` varchar(100) null, \\`favourite_car_year\\` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table \\`user2\\` add index \\`user2_first_name_index\\`(\\`first_name\\`); alter table \\`user2\\` add index \\`user2_last_name_index\\`(\\`last_name\\`); +alter table \\`user2\\` add unique \\`user2_favourite_car_name_unique\\`(\\`favourite_car_name\\`); +alter table \\`user2\\` add unique \\`user2_favourite_car_year_unique\\`(\\`favourite_car_year\\`); alter table \\`user2\\` add primary key \\`user2_pkey\\`(\\`first_name\\`, \\`last_name\\`); +alter table \\`user2\\` add index \\`user2_favourite_car_name_favourite_car_year_index\\`(\\`favourite_car_name\\`, \\`favourite_car_year\\`); create table \\`base_user2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`type\\` enum('employee', 'manager', 'owner') not null, \\`employee_prop\\` int(11) null, \\`manager_prop\\` varchar(255) null, \\`owner_prop\\` varchar(255) null, \\`favourite_employee_id\\` int(11) unsigned null, \\`favourite_manager_id\\` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table \\`base_user2\\` add index \\`base_user2_type_index\\`(\\`type\\`); @@ -1022,6 +1037,9 @@ alter table \\`configuration2\\` add constraint \\`configuration2_test_id_foreign\\` alter table \\`car_owner2\\` add constraint \\`car_owner2_car_name_foreign\\` foreign key (\\`car_name\\`) references \\`car2\\` (\\`name\\`) on update cascade; alter table \\`car_owner2\\` add constraint \\`car_owner2_car_year_foreign\\` foreign key (\\`car_year\\`) references \\`car2\\` (\\`year\\`) on update cascade; +alter table \\`user2\\` add constraint \\`user2_favourite_car_name_foreign\\` foreign key (\\`favourite_car_name\\`) references \\`car2\\` (\\`name\\`) on update cascade on delete set null; +alter table \\`user2\\` add constraint \\`user2_favourite_car_year_foreign\\` foreign key (\\`favourite_car_year\\`) references \\`car2\\` (\\`year\\`) on update cascade on delete set null; + alter table \\`base_user2\\` add constraint \\`base_user2_favourite_employee_id_foreign\\` foreign key (\\`favourite_employee_id\\`) references \\`base_user2\\` (\\`id\\`) on update cascade on delete set null; alter table \\`base_user2\\` add constraint \\`base_user2_favourite_manager_id_foreign\\` foreign key (\\`favourite_manager_id\\`) references \\`base_user2\\` (\\`id\\`) on update cascade on delete set null; diff --git a/bootstrap.ts b/bootstrap.ts index 504adeb..e0ea7b2 100644 --- a/bootstrap.ts +++ b/bootstrap.ts @@ -174,10 +174,12 @@ export async function wipeDatabaseMySql(em: SqlEntityManager) { await em.createQueryBuilder(FooParam2).truncate().execute(); await em.createQueryBuilder(Configuration2).truncate().execute(); await em.createQueryBuilder(Car2).truncate().execute(); + await em.createQueryBuilder(User2).truncate().execute(); await em.createQueryBuilder(CarOwner2).truncate().execute(); await em.createQueryBuilder(BaseUser2).truncate().execute(); await em.createQueryBuilder('author2_following').truncate().execute(); await em.createQueryBuilder('book2_tags').truncate().execute(); + await em.createQueryBuilder('user2_cars').truncate().execute(); await em.createQueryBuilder('book_to_tag_unordered').truncate().execute(); await em.createQueryBuilder('publisher2_tests').truncate().execute(); await em.getConnection().execute('set foreign_key_checks = 1'); diff --git a/composite-keys.mysql.test.ts b/composite-keys.mysql.test.ts index 8624ec9..b29c393 100644 --- a/composite-keys.mysql.test.ts +++ b/composite-keys.mysql.test.ts @@ -1,5 +1,5 @@ -import { EntityFactory, Logger, MikroORM, wrap } from '@mikro-orm/core'; -import { MySqlDriver } from '@mikro-orm/mysql'; +import { LoadStrategy, Logger, MikroORM, wrap } from '@mikro-orm/core'; +import { AbstractSqlConnection, MySqlDriver } from '@mikro-orm/mysql'; import { Author2, Configuration2, FooBar2, FooBaz2, FooParam2, Test2, Address2, Car2, CarOwner2, User2, Sandwich } from './entities-sql'; import { initORMMySql, wipeDatabaseMySql } from './bootstrap'; @@ -133,6 +133,18 @@ describe('composite keys in mysql', () => { await orm.em.removeEntity(c2, true); const c3 = await orm.em.findOne(Car2, car); expect(c3).toBeNull(); + const user1 = new User2('f', 'l'); + const car11 = new Car2('n', 1, 1); + user1.cars.add(car11); + user1.favouriteCar = car11; + user1.foo = 42; + await orm.em.persistAndFlush(user1); + orm.em.clear(); + + const connMock = jest.spyOn(AbstractSqlConnection.prototype, 'execute'); + const cc = await orm.em.findOneOrFail(Car2, car11, { populate: { users: LoadStrategy.JOINED } }); + expect(cc.users[0].foo).toBe(42); + expect(connMock).toBeCalledTimes(1); }); test('composite entity in m:n relationship, both entities are composite', async () => { @@ -148,7 +160,7 @@ describe('composite keys in mysql', () => { await orm.em.persistAndFlush([user1, user2, user3]); orm.em.clear(); - const u1 = await orm.em.findOneOrFail(User2, user1, ['cars']); + const u1 = await orm.em.findOneOrFail(User2, user1, { populate: { cars: LoadStrategy.JOINED } }); expect(u1.cars.getItems()).toMatchObject([ { name: 'Audi A8', price: 100_000, year: 2011 }, { name: 'Audi A8', price: 200_000, year: 2013 }, @@ -156,6 +168,7 @@ describe('composite keys in mysql', () => { expect(wrap(u1).toJSON()).toEqual({ firstName: 'John', lastName: 'Doe 1', + favouriteCar: null, foo: null, cars: [ { name: 'Audi A8', price: 100_000, year: 2011 }, @@ -244,7 +257,7 @@ describe('composite keys in mysql', () => { test('composite key in em.create()', async () => { await orm.em.nativeInsert(Car2, { name: 'n4', year: 2000, price: 456 }); - const factory = new EntityFactory(orm.em.getUnitOfWork(), orm.em); + const factory = orm.em.getEntityFactory(); const c1 = new Car2('n1', 2000, 1); const c2 = { name: 'n3', year: 2000, price: 123 }; const c3 = ['n4', 2000]; // composite PK @@ -262,9 +275,9 @@ describe('composite keys in mysql', () => { Object.assign(orm.config, { logger }); await orm.em.persistAndFlush(u1); expect(mock.mock.calls[0][0]).toMatch('begin'); - expect(mock.mock.calls[1][0]).toMatch('insert into `user2` (`first_name`, `last_name`) values (?, ?)'); // u1 - expect(mock.mock.calls[2][0]).toMatch('insert into `car2` (`name`, `price`, `year`) values (?, ?, ?)'); // c1 - expect(mock.mock.calls[3][0]).toMatch('insert into `car2` (`name`, `price`, `year`) values (?, ?, ?)'); // c2 + expect(mock.mock.calls[1][0]).toMatch('insert into `car2` (`name`, `price`, `year`) values (?, ?, ?)'); // c1 + expect(mock.mock.calls[2][0]).toMatch('insert into `car2` (`name`, `price`, `year`) values (?, ?, ?)'); // c2 + expect(mock.mock.calls[3][0]).toMatch('insert into `user2` (`first_name`, `last_name`) values (?, ?)'); // u1 expect(mock.mock.calls[4][0]).toMatch('insert into `user2_cars` (`car2_name`, `car2_year`, `user2_first_name`, `user2_last_name`) values (?, ?, ?, ?), (?, ?, ?, ?), (?, ?, ?, ?)'); expect(mock.mock.calls[5][0]).toMatch('commit'); }); diff --git a/Car2.ts b/Car2.ts index d3a272c..db64d62 100644 --- a/Car2.ts +++ b/Car2.ts @@ -1,5 +1,6 @@ -import { Collection, Entity, Index, ManyToMany, PrimaryKey, PrimaryKeyType, Property } from '@mikro-orm/core'; +import { Collection, Entity, Index, ManyToMany, OneToOne, PrimaryKey, PrimaryKeyType, Property } from '@mikro-orm/core'; import { User2 } from './User2'; +import { Test2 } from './Test2'; @Entity() export class Car2 { diff --git a/FooParam2.ts b/FooParam2.ts index c78a7e5..6629489 100644 --- a/FooParam2.ts +++ b/FooParam2.ts @@ -9,7 +9,7 @@ export class FooParam2 { bar!: FooBar2; @ManyToOne(() => FooBaz2, { primary: true }) - baz!: FooBar2; + baz!: FooBaz2; @Property() value: string; diff --git a/User2.ts b/User2.ts index e44810c..3e93ae2 100644 --- a/User2.ts +++ b/User2.ts @@ -1,4 +1,4 @@ -import { Collection, Entity, ManyToMany, PrimaryKey, Property } from '@mikro-orm/core'; +import { Collection, Entity, ManyToMany, OneToOne, PrimaryKey, Property } from '@mikro-orm/core'; import { Car2 } from './Car2'; import { Sandwich } from './sandwich'; @@ -20,6 +20,9 @@ export class User2 { @ManyToMany(() => Sandwich) sandwiches = new Collection<Sandwich>(this); + @OneToOne({ entity: () => Car2, nullable: true }) + favouriteCar?: Car2; + constructor(firstName: string, lastName: string) { this.firstName = firstName; this.lastName = lastName; diff --git a/joined-strategy.postgre.test.ts b/joined-strategy.postgre.test.ts index c5bd458..f4ce65f 100644 --- a/joined-strategy.postgre.test.ts +++ b/joined-strategy.postgre.test.ts @@ -1,9 +1,9 @@ import { LoadStrategy, Logger, MikroORM } from '@mikro-orm/core'; import { PostgreSqlDriver } from '@mikro-orm/postgresql'; import { initORMPostgreSql, wipeDatabasePostgreSql } from './bootstrap'; -import { Author2, Book2 } from './entities-sql'; +import { Author2, Book2, BookTag2 } from './entities-sql'; -describe('Joined loading', () => { +describe('Joined loading strategy', () => { let orm: MikroORM<PostgreSqlDriver>; @@ -13,14 +13,14 @@ describe('Joined loading', () => { afterAll(async () => orm.close(true)); test('populate OneToMany with joined strategy [findOne()]', async () => { - const author2 = new Author2('Albert Camus', '[email protected]'); - const stranger = new Book2('The Stranger', author2); - const fall = new Book2('The Fall', author2); - author2.books2.add(stranger, fall); - await orm.em.persistAndFlush(author2); + const author = new Author2('Albert Camus', '[email protected]'); + const stranger = new Book2('The Stranger', author); + const fall = new Book2('The Fall', author); + author.books2.add(stranger, fall); + await orm.em.persistAndFlush(author); orm.em.clear(); - const a2 = await orm.em.findOneOrFail(Author2, { id: author2.id }, { populate: ['books2', 'following'] }); + const a2 = await orm.em.findOneOrFail(Author2, author, { populate: ['books2', 'following'] }); expect(a2.books2).toHaveLength(2); expect(a2.books2[0].title).toEqual('The Stranger'); expect(a2.books2[1].title).toEqual('The Fall'); @@ -49,6 +49,39 @@ describe('Joined loading', () => { expect(ret[2].books2[1].title).toEqual('The Fall 3'); }); + test('populate ManyToOne with joined strategy [findOne()]', async () => { + const author = new Author2('Albert Camus', '[email protected]'); + const stranger = new Book2('The Stranger', author); + const fall = new Book2('The Fall', author); + author.books2.add(stranger, fall); + await orm.em.persistAndFlush(author); + orm.em.clear(); + + const b1 = await orm.em.findOneOrFail(Book2, stranger, { populate: { author: LoadStrategy.JOINED } }); + expect(b1.title).toEqual('The Stranger'); + expect(b1.author.name).toEqual('Albert Camus'); + }); + + test('populate ManyToOne with joined strategy [find()]', async () => { + const a1 = new Author2('Albert Camus 1', '[email protected]'); + a1.books2.add(new Book2('The Stranger 1', a1), new Book2('The Fall 1', a1)); + const a2 = new Author2('Albert Camus 2', '[email protected]'); + a2.books2.add(new Book2('The Stranger 2', a2), new Book2('The Fall 2', a2)); + const a3 = new Author2('Albert Camus 3', '[email protected]'); + a3.books2.add(new Book2('The Stranger 3', a3), new Book2('The Fall 3', a3)); + await orm.em.persistAndFlush([a1, a2, a3]); + orm.em.clear(); + + const books = await orm.em.find(Book2, {}, { populate: { author: LoadStrategy.JOINED } }); + expect(books).toHaveLength(6); + expect(books[0].title).toBe('The Stranger 1'); + expect(books[0].author.name).toBe('Albert Camus 1'); + expect(books[2].title).toBe('The Stranger 2'); + expect(books[2].author.name).toBe('Albert Camus 2'); + expect(books[4].title).toBe('The Stranger 3'); + expect(books[4].author.name).toBe('Albert Camus 3'); + }); + test('should only fire one query [findOne()]', async () => { const author2 = new Author2('Albert Camus', '[email protected]'); const stranger = new Book2('The Stranger', author2); @@ -151,6 +184,49 @@ describe('Joined loading', () => { 'where "e0"."id" = $1'); }); + test('populate ManyToMany with joined strategy', async () => { + const author = new Author2('name', 'email'); + const b1 = new Book2('b1', author); + const b2 = new Book2('b2', author); + const b3 = new Book2('b3', author); + const b4 = new Book2('b4', author); + const b5 = new Book2('b5', author); + const tag1 = new BookTag2('silly'); + const tag2 = new BookTag2('funny'); + const tag3 = new BookTag2('sick'); + const tag4 = new BookTag2('strange'); + const tag5 = new BookTag2('sexy'); + b1.tags.add(tag1, tag3); + b2.tags.add(tag1, tag2, tag5); + b3.tags.add(tag5); + b4.tags.add(tag2, tag4, tag5); + b5.tags.add(tag5); + + author.books.add(b1, b2, b3, b4, b5); + await orm.em.persistAndFlush(author); + orm.em.clear(); + + const mock = jest.fn(); + const logger = new Logger(mock, true); + Object.assign(orm.em.config, { logger }); + mock.mock.calls.length = 0; + const books = await orm.em.find(Book2, {}, { populate: { tags: LoadStrategy.JOINED }, orderBy: { tags: { name: 'desc' } } }); + expect(mock.mock.calls.length).toBe(1); + expect(mock.mock.calls[0][0]).toMatch('select "e0"."uuid_pk", "e0"."created_at", "e0"."title", "e0"."price", "e0"."double", "e0"."meta", "e0"."author_id", "e0"."publisher_id", ' + + '"t1"."id" as "t1_id", "t1"."name" as "t1_name", "e0".price * 1.19 as "price_taxed" ' + + 'from "book2" as "e0" ' + + 'left join "book2_tags" as "e2" on "e0"."uuid_pk" = "e2"."book2_uuid_pk" ' + + 'left join "book_tag2" as "t1" on "e2"."book_tag2_id" = "t1"."id" ' + + 'order by "t1"."name" desc'); + + expect(books.map(b => b.title)).toEqual(['b4', 'b2', 'b1', 'b5', 'b3']); + expect(books[0].tags.getItems().map(t => t.name)).toEqual(['strange', 'sexy', 'funny']); + expect(books[1].tags.getItems().map(t => t.name)).toEqual(['silly', 'sexy', 'funny']); + expect(books[2].tags.getItems().map(t => t.name)).toEqual(['silly', 'sick']); + expect(books[3].tags.getItems().map(t => t.name)).toEqual(['sexy']); + expect(books[4].tags.getItems().map(t => t.name)).toEqual(['sexy']); + }); + test('can populate all related entities', async () => { const author2 = new Author2('Albert Camus', '[email protected]'); const stranger = new Book2('The Stranger', author2); @@ -193,9 +269,7 @@ describe('Joined loading', () => { }); test.todo('populate OneToOne with joined strategy'); - test.todo('populate ManyToMany with joined strategy'); test.todo('handles nested joinedLoads that map to the same entity, eg book.author.favouriteAuthor'); - test.todo('works with em.find()'); - test.todo('works with formulas'); + test.todo('works with formulas (also in the nested entities, not just root)'); }); diff --git a/mysql-schema.sql b/mysql-schema.sql index 04c05a9..7e89fb3 100644 --- a/mysql-schema.sql +++ b/mysql-schema.sql @@ -88,10 +88,13 @@ alter table `car2` add primary key `car2_pkey`(`name`, `year`); create table `car_owner2` (`id` int unsigned not null auto_increment primary key, `name` varchar(255) not null, `car_name` varchar(100) not null, `car_year` int(11) unsigned not null) default character set utf8mb4 engine = InnoDB; alter table `car_owner2` add index `car_owner2_car_name_car_year_index`(`car_name`, `car_year`); -create table `user2` (`first_name` varchar(100) not null, `last_name` varchar(100) not null, `foo` int(11) null) default character set utf8mb4 engine = InnoDB; +create table `user2` (`first_name` varchar(100) not null, `last_name` varchar(100) not null, `foo` int(11) null, `favourite_car_name` varchar(100) null, `favourite_car_year` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table `user2` add index `user2_first_name_index`(`first_name`); alter table `user2` add index `user2_last_name_index`(`last_name`); +alter table `user2` add unique `user2_favourite_car_name_unique`(`favourite_car_name`); +alter table `user2` add unique `user2_favourite_car_year_unique`(`favourite_car_year`); alter table `user2` add primary key `user2_pkey`(`first_name`, `last_name`); +alter table `user2` add index `user2_favourite_car_name_favourite_car_year_index`(`favourite_car_name`, `favourite_car_year`); create table `base_user2` (`id` int unsigned not null auto_increment primary key, `first_name` varchar(100) not null, `last_name` varchar(100) not null, `type` enum('employee', 'manager', 'owner') not null, `employee_prop` int(11) null, `manager_prop` varchar(255) null, `owner_prop` varchar(255) null, `favourite_employee_id` int(11) unsigned null, `favourite_manager_id` int(11) unsigned null) default character set utf8mb4 engine = InnoDB; alter table `base_user2` add index `base_user2_type_index`(`type`); @@ -149,6 +152,9 @@ alter table `configuration2` add constraint `configuration2_test_id_foreign` for alter table `car_owner2` add constraint `car_owner2_car_name_foreign` foreign key (`car_name`) references `car2` (`name`) on update cascade; alter table `car_owner2` add constraint `car_owner2_car_year_foreign` foreign key (`car_year`) references `car2` (`year`) on update cascade; +alter table `user2` add constraint `user2_favourite_car_name_foreign` foreign key (`favourite_car_name`) references `car2` (`name`) on update cascade on delete set null; +alter table `user2` add constraint `user2_favourite_car_year_foreign` foreign key (`favourite_car_year`) references `car2` (`year`) on update cascade on delete set null; + alter table `base_user2` add constraint `base_user2_favourite_employee_id_foreign` foreign key (`favourite_employee_id`) references `base_user2` (`id`) on update cascade on delete set null; alter table `base_user2` add constraint `base_user2_favourite_manager_id_foreign` foreign key (`favourite_manager_id`) references `base_user2` (`id`) on update cascade on delete set null;
feat(core): add support for flush events (#642) There is a special kind of events executed during the commit phase (flush operation). They are executed before, during and after the flush, and they are not bound to any entity in particular. - `beforeFlush` is executed before change sets are computed, this is the only event where it is safe to persist new entities. - `onFlush` is executed after the change sets are computed. - `afterFlush` is executed as the last step just before the `flush` call resolves. it will be executed even if there are no changes to be flushed. Flush event args will not contain any entity instance, as they are entity agnostic. They do contain additional reference to the `UnitOfWork` instance. Closes #637
1f12affaf532e86cb75abc66e7e04f1f2a5ac0c7
feat
https://github.com/mikro-orm/mikro-orm/commit/1f12affaf532e86cb75abc66e7e04f1f2a5ac0c7
add support for flush events (#642) There is a special kind of events executed during the commit phase (flush operation). They are executed before, during and after the flush, and they are not bound to any entity in particular. - `beforeFlush` is executed before change sets are computed, this is the only event where it is safe to persist new entities. - `onFlush` is executed after the change sets are computed. - `afterFlush` is executed as the last step just before the `flush` call resolves. it will be executed even if there are no changes to be flushed. Flush event args will not contain any entity instance, as they are entity agnostic. They do contain additional reference to the `UnitOfWork` instance. Closes #637
diff --git a/lifecycle-hooks.md b/lifecycle-hooks.md index 8ae4eb5..953f749 100644 --- a/lifecycle-hooks.md +++ b/lifecycle-hooks.md @@ -96,12 +96,15 @@ import { EventArgs, EventSubscriber, Subscriber } from '@mikro-orm/core'; @Subscriber() export class EverythingSubscriber implements EventSubscriber { + async beforeCreate<T>(args: EventArgs<T>): Promise<void> { ... } async afterCreate<T>(args: EventArgs<T>): Promise<void> { ... } - async afterDelete<T>(args: EventArgs<T>): Promise<void> { ... } + async beforeUpdate<T>(args: EventArgs<T>): Promise<void> { ... } async afterUpdate<T>(args: EventArgs<T>): Promise<void> { ... } - async beforeCreate<T>(args: EventArgs<T>): Promise<void> { ... } async beforeDelete<T>(args: EventArgs<T>): Promise<void> { ... } - async beforeUpdate<T>(args: EventArgs<T>): Promise<void> { ... } + async afterDelete<T>(args: EventArgs<T>): Promise<void> { ... } + async beforeFlush<T>(args: EventArgs<T>): Promise<void> { ... } + async onFlush<T>(args: EventArgs<T>): Promise<void> { ... } + async afterFlush<T>(args: EventArgs<T>): Promise<void> { ... } onInit<T>(args: EventArgs<T>): void { ... } } @@ -120,3 +123,67 @@ interface EventArgs<T> { changeSet?: ChangeSet<T>; } ``` + +## Flush events + +There is a special kind of events executed during the commit phase (flush operation). +They are executed before, during and after the flush, and they are not bound to any +entity in particular. + +- `beforeFlush` is executed before change sets are computed, this is the only + event where it is safe to persist new entities. +- `onFlush` is executed after the change sets are computed. +- `afterFlush` is executed as the last step just before the `flush` call resolves. + it will be executed even if there are no changes to be flushed. + +Flush event args will not contain any entity instance, as they are entity agnostic. +They do contain additional reference to the `UnitOfWork` instance. + +```typescript +interface FlushEventArgs extends Omit<EventArgs<unknown>, 'entity'> { + uow?: UnitOfWork; +} +``` + +> Flush events are entity agnostic, specifying `getSubscribedEntities()` method +> will not have any effect for those. They are fired only once per the `flush` +> operation. + +### Using onFlush event + +In following example we have 2 entities: `FooBar` and `FooBaz`, connected via +M:1 relation. Our subscriber will automatically create new `FooBaz` entity and +connect it to the `FooBar` when we detect it in the change sets. + +We first use `uow.getChangeSets()` method to look up the change set of entity +we are interested in. After we create the `FooBaz` instance and link it with +`FooBar`, we need to do two things: + +1. Call `uow.computeChangeSet(baz)` to compute the change set of newly created + `FooBaz` entity +2. Call `uow.recomputeSingleChangeSet(cs.entity)` to recalculate the existing + change set of the `FooBar` entity. + +```typescript +@Subscriber() +export class FooBarSubscriber implements EventSubscriber { + + async onFlush(args: FlushEventArgs): Promise<void> { + const changeSets = args.uow.getChangeSets(); + const cs = changeSets.find(cs => cs.type === ChangeSetType.CREATE && cs.entity instanceof FooBar); + + if (cs) { + const baz = new FooBaz(); + baz.name = 'dynamic'; + cs.entity.baz = baz; + args.uow.computeChangeSet(baz); + args.uow.recomputeSingleChangeSet(cs.entity); + } + } + +} + +const bar = new FooBar(); +bar.name = 'bar'; +await em.persistAndFlush(bar); +``` diff --git a/EntityFactory.ts b/EntityFactory.ts index 6da6452..1b274cf 100644 --- a/EntityFactory.ts +++ b/EntityFactory.ts @@ -150,7 +150,7 @@ export class EntityFactory { hooks.forEach(hook => (entity[hook] as unknown as () => void)()); } - this.em.getEventManager().dispatchEvent(EventType.onInit, entity, { em: this.em }); + this.em.getEventManager().dispatchEvent(EventType.onInit, { entity, em: this.em }); } } diff --git a/EventManager.ts b/EventManager.ts index e549076..6e53274 100644 --- a/EventManager.ts +++ b/EventManager.ts @@ -1,5 +1,5 @@ import { AnyEntity } from '../typings'; -import { EventArgs, EventSubscriber } from './EventSubscriber'; +import { EventArgs, EventSubscriber, FlushEventArgs } from './EventSubscriber'; import { Utils } from '../utils'; import { EventType } from './EventType'; import { wrap } from '../entity/wrap'; @@ -23,28 +23,29 @@ export class EventManager { }); } - dispatchEvent<T extends AnyEntity<T>>(event: EventType.onInit, entity: T, args: Partial<EventArgs<T>>): unknown; - dispatchEvent<T extends AnyEntity<T>>(event: EventType, entity: T, args: Partial<EventArgs<T>>): Promise<unknown>; - dispatchEvent<T extends AnyEntity<T>>(event: EventType, entity: T, args: Partial<EventArgs<T>>): Promise<unknown> | unknown { + dispatchEvent<T extends AnyEntity<T>>(event: EventType.onInit, args: Partial<EventArgs<T>>): unknown; + dispatchEvent<T extends AnyEntity<T>>(event: EventType, args: Partial<EventArgs<T> | FlushEventArgs>): Promise<unknown>; + dispatchEvent<T extends AnyEntity<T>>(event: EventType, args: Partial<EventArgs<T> | FlushEventArgs>): Promise<unknown> | unknown { const listeners: [EventType, EventSubscriber<T>][] = []; + const entity: T = (args as EventArgs<T>).entity; // execute lifecycle hooks first - const hooks = wrap(entity, true).__meta.hooks[event] || []; + const hooks = (entity && wrap(entity, true).__meta.hooks[event]) || []; listeners.push(...hooks.map(hook => [hook, entity] as [EventType, EventSubscriber<T>])); for (const listener of this.listeners[event] || []) { const entities = this.entities.get(listener)!; - if (entities.length === 0 || entities.includes(entity.constructor.name)) { + if (entities.length === 0 || !entity || entities.includes(entity.constructor.name)) { listeners.push([event, listener]); } } if (event === EventType.onInit) { - return listeners.forEach(listener => listener[1][listener[0]]!({ ...args, entity } as EventArgs<T>)); + return listeners.forEach(listener => listener[1][listener[0]]!(args as (EventArgs<T> & FlushEventArgs))); } - return Utils.runSerial(listeners, listener => listener[1][listener[0]]!({ ...args, entity } as EventArgs<T>) as Promise<void>); + return Utils.runSerial(listeners, listener => listener[1][listener[0]]!(args as (EventArgs<T> & FlushEventArgs)) as Promise<void>); } private getSubscribedEntities(listener: EventSubscriber): string[] { diff --git a/EventSubscriber.ts b/EventSubscriber.ts index b7293bb..b84d656 100644 --- a/EventSubscriber.ts +++ b/EventSubscriber.ts @@ -1,6 +1,6 @@ import { AnyEntity, EntityName } from '../typings'; import { EntityManager } from '../EntityManager'; -import { ChangeSet } from '../unit-of-work'; +import { ChangeSet, UnitOfWork } from '../unit-of-work'; export interface EventArgs<T> { entity: T; @@ -8,6 +8,10 @@ export interface EventArgs<T> { changeSet?: ChangeSet<T>; } +export interface FlushEventArgs extends Omit<EventArgs<unknown>, 'entity'> { + uow: UnitOfWork; +} + export interface EventSubscriber<T = AnyEntity> { getSubscribedEntities?(): EntityName<T>[]; onInit?(args: EventArgs<T>): void; @@ -17,4 +21,7 @@ export interface EventSubscriber<T = AnyEntity> { afterUpdate?(args: EventArgs<T>): Promise<void>; beforeDelete?(args: EventArgs<T>): Promise<void>; afterDelete?(args: EventArgs<T>): Promise<void>; + beforeFlush?(args: FlushEventArgs): Promise<void>; + onFlush?(args: FlushEventArgs): Promise<void>; + afterFlush?(args: FlushEventArgs): Promise<void>; } diff --git a/EventType.ts b/EventType.ts index 8d983aa..006c8e3 100644 --- a/EventType.ts +++ b/EventType.ts @@ -6,4 +6,7 @@ export enum EventType { afterUpdate = 'afterUpdate', beforeDelete = 'beforeDelete', afterDelete = 'afterDelete', + beforeFlush = 'beforeFlush', + onFlush = 'onFlush', + afterFlush = 'afterFlush', } diff --git a/UnitOfWork.ts b/UnitOfWork.ts index c764014..c81c856 100644 --- a/UnitOfWork.ts +++ b/UnitOfWork.ts @@ -75,6 +75,52 @@ export class UnitOfWork { return this.identityMap; } + getOriginalEntityData(): Dictionary<EntityData<AnyEntity>> { + return this.originalEntityData; + } + + getPersistStack(): AnyEntity[] { + return this.persistStack; + } + + getRemoveStack(): AnyEntity[] { + return this.removeStack; + } + + getChangeSets(): ChangeSet<AnyEntity>[] { + return this.changeSets; + } + + computeChangeSet<T>(entity: T): void { + const cs = this.changeSetComputer.computeChangeSet(entity); + + if (!cs) { + return; + } + + const wrapped = wrap(entity, true); + this.initIdentifier(entity); + this.changeSets.push(cs); + this.cleanUpStack(this.persistStack, entity); + this.originalEntityData[wrapped.__uuid] = Utils.prepareEntity(entity, this.metadata, this.platform); + } + + recomputeSingleChangeSet<T>(entity: T): void { + const idx = this.changeSets.findIndex(cs => cs.entity === entity); + + if (idx === -1) { + return; + } + + const cs = this.changeSetComputer.computeChangeSet(entity); + + if (cs) { + Object.assign(this.changeSets[idx].payload, cs.payload); + const uuid = wrap(entity, true).__uuid; + this.originalEntityData[uuid] = Utils.prepareEntity(entity, this.metadata, this.platform); + } + } + persist<T extends AnyEntity<T>>(entity: T, visited: AnyEntity[] = [], checkRemoveStack = false): void { if (this.persistStack.includes(entity)) { return; @@ -112,11 +158,17 @@ export class UnitOfWork { throw ValidationError.cannotCommit(); } + await this.em.getEventManager().dispatchEvent(EventType.beforeFlush, { em: this.em, uow: this }); this.working = true; this.computeChangeSets(); + await this.em.getEventManager().dispatchEvent(EventType.onFlush, { em: this.em, uow: this }); + // nothing to do, do not start transaction if (this.changeSets.length === 0 && this.collectionUpdates.length === 0 && this.extraUpdates.length === 0) { - return this.postCommitCleanup(); // nothing to do, do not start transaction + this.postCommitCleanup(); + await this.em.getEventManager().dispatchEvent(EventType.afterFlush, { em: this.em, uow: this }); + + return; } this.reorderChangeSets(); @@ -130,6 +182,7 @@ export class UnitOfWork { } this.postCommitCleanup(); + await this.em.getEventManager().dispatchEvent(EventType.afterFlush, { em: this.em, uow: this }); } async lock<T extends AnyEntity<T>>(entity: T, mode: LockMode, version?: number | Date): Promise<void> { @@ -201,9 +254,7 @@ export class UnitOfWork { return; } - if (!Utils.isDefined(wrapped.__primaryKey, true) && !this.identifierMap[wrapped.__uuid]) { - this.identifierMap[wrapped.__uuid] = new EntityIdentifier(); - } + this.initIdentifier(entity); for (const prop of Object.values<EntityProperty>(wrapped.__meta.properties)) { const reference = this.unwrapReference(entity, prop); @@ -219,6 +270,16 @@ export class UnitOfWork { } } + private initIdentifier<T>(entity: T): void { + const wrapped = wrap(entity, true); + + if (Utils.isDefined(wrapped.__primaryKey, true) || this.identifierMap[wrapped.__uuid]) { + return; + } + + this.identifierMap[wrapped.__uuid] = new EntityIdentifier(); + } + private processReference<T extends AnyEntity<T>>(parent: T, prop: EntityProperty<T>, reference: any, visited: AnyEntity[]): void { const isToOne = prop.reference === ReferenceType.MANY_TO_ONE || prop.reference === ReferenceType.ONE_TO_ONE; @@ -274,7 +335,7 @@ export class UnitOfWork { await this.changeSetPersister.persistToDatabase(changeSet, ctx); switch (changeSet.type) { - case ChangeSetType.CREATE: this.em.merge(changeSet.entity as T); break; + case ChangeSetType.CREATE: this.em.merge(changeSet.entity as T, true); break; case ChangeSetType.UPDATE: this.merge(changeSet.entity as T); break; case ChangeSetType.DELETE: this.unsetIdentity(changeSet.entity as T); break; } @@ -283,7 +344,7 @@ export class UnitOfWork { } private async runHooks<T extends AnyEntity<T>>(type: EventType, changeSet: ChangeSet<T>) { - await this.em.getEventManager().dispatchEvent(type, changeSet.entity, { em: this.em, changeSet }); + await this.em.getEventManager().dispatchEvent(type, { entity: changeSet.entity, em: this.em, changeSet }); } /** diff --git a/EntityManager.mysql.test.ts b/EntityManager.mysql.test.ts index 7ef9ad7..2142e94 100644 --- a/EntityManager.mysql.test.ts +++ b/EntityManager.mysql.test.ts @@ -12,6 +12,7 @@ import { Author2, Book2, BookTag2, FooBar2, FooBaz2, Publisher2, PublisherType, import { initORMMySql, wipeDatabaseMySql } from './bootstrap'; import { Author2Subscriber } from './subscribers/Author2Subscriber'; import { EverythingSubscriber } from './subscribers/EverythingSubscriber'; +import { FlushSubscriber } from './subscribers/FlushSubscriber'; describe('EntityManagerMySql', () => { @@ -1202,6 +1203,7 @@ describe('EntityManagerMySql', () => { test('subscribers', async () => { expect(Author2Subscriber.log).toEqual([]); expect(EverythingSubscriber.log).toEqual([]); + expect(FlushSubscriber.log).toEqual([]); const pub = new Publisher2('Publisher2'); await orm.em.persistAndFlush(pub); @@ -1251,10 +1253,6 @@ describe('EntityManagerMySql', () => { ['afterUpdate', 'Author2'], ['beforeUpdate', 'Book2'], ['afterUpdate', 'Book2'], - ['beforeUpdate', 'Book2'], - ['afterUpdate', 'Book2'], - ['beforeUpdate', 'Book2'], - ['afterUpdate', 'Book2'], ['beforeDelete', 'Book2'], ['afterDelete', 'Book2'], ['beforeDelete', 'Book2'], @@ -1264,6 +1262,21 @@ describe('EntityManagerMySql', () => { ['beforeDelete', 'Publisher2'], ['afterDelete', 'Publisher2'], ]); + + expect(FlushSubscriber.log.map(l => [l[0], Object.keys(l[1])])).toEqual([ + ['beforeFlush', ['em', 'uow']], + ['onFlush', ['em', 'uow']], + ['afterFlush', ['em', 'uow']], + ['beforeFlush', ['em', 'uow']], + ['onFlush', ['em', 'uow']], + ['afterFlush', ['em', 'uow']], + ['beforeFlush', ['em', 'uow']], + ['onFlush', ['em', 'uow']], + ['afterFlush', ['em', 'uow']], + ['beforeFlush', ['em', 'uow']], + ['onFlush', ['em', 'uow']], + ['afterFlush', ['em', 'uow']], + ]); }); test('trying to populate non-existing or non-reference property will throw', async () => { @@ -1633,11 +1646,16 @@ describe('EntityManagerMySql', () => { const author = new Author2('name', 'email'); await orm.em.persistAndFlush(author); // we need to flush here so the entity gets inside IM - // fork EM without clearing the IM (once for each process), so author entity will be there + const saveBook = async (title: string, author: number) => { + const em = orm.em.fork(); + const book = new Book2(title, em.getReference(Author2, author)); + await em.persistAndFlush(book); + }; + await Promise.all([ - orm.em.fork(false).persistAndFlush(new Book2('b1', author)), - orm.em.fork(false).persistAndFlush(new Book2('b2', author)), - orm.em.fork(false).persistAndFlush(new Book2('b3', author)), + saveBook('b1', author.id), + saveBook('b2', author.id), + saveBook('b3', author.id), ]); orm.em.clear(); diff --git a/UnitOfWork.test.ts b/UnitOfWork.test.ts index 83a4581..36d9d57 100644 --- a/UnitOfWork.test.ts +++ b/UnitOfWork.test.ts @@ -1,6 +1,8 @@ import { Author } from './entities'; -import { EntityValidator, MikroORM, UnitOfWork, ChangeSetComputer } from '@mikro-orm/core'; +import { ChangeSetComputer, ChangeSetType, EntityValidator, EventSubscriber, FlushEventArgs, Logger, MikroORM, UnitOfWork, wrap } from '@mikro-orm/core'; import { initORMMongo, wipeDatabase } from './bootstrap'; +import FooBar from './entities/FooBar'; +import { FooBaz } from './entities/FooBaz'; describe('UnitOfWork', () => { @@ -110,6 +112,59 @@ describe('UnitOfWork', () => { expect(uow.removeStack.length).toBe(1); }); + test('getters', async () => { + const uow = new UnitOfWork(orm.em); + const author = new Author('test', 'test'); + author.id = '00000001885f0a3cc37dc9f0'; + uow.persist(author); + expect(uow.getPersistStack()).toEqual([author]); + expect(uow.getRemoveStack()).toEqual([]); + expect(uow.getOriginalEntityData()).toEqual({}); + uow.merge(author); + expect(uow.getOriginalEntityData()).toMatchObject({ + [wrap(author, true).__uuid]: { name: 'test', email: 'test' }, + }); + uow.remove(author); + expect(uow.getRemoveStack()).toEqual([author]); + expect(() => uow.recomputeSingleChangeSet(author)).not.toThrow(); + expect(() => uow.computeChangeSet(author)).not.toThrow(); + expect(() => uow.recomputeSingleChangeSet(author)).not.toThrow(); + expect(() => uow.computeChangeSet(author)).not.toThrow(); + }); + + test('manually changing the UoW state during flush', async () => { + class Subscriber implements EventSubscriber { + + async onFlush(args: FlushEventArgs): Promise<void> { + const changeSets = args.uow.getChangeSets(); + const cs = changeSets.find(cs => cs.type === ChangeSetType.CREATE && cs.entity instanceof FooBar); + + if (cs) { + const baz = new FooBaz(); + baz.name = 'dynamic'; + cs.entity.baz = baz; + args.uow.computeChangeSet(baz); + args.uow.recomputeSingleChangeSet(cs.entity); + } + } + + } + + const em = orm.em.fork(); + em.getEventManager().registerSubscriber(new Subscriber()); + const bar = new FooBar(); + bar.name = 'bar'; + + const mock = jest.fn(); + const logger = new Logger(mock, true); + Object.assign(orm.config, { logger }); + await em.persistAndFlush(bar); + expect(mock.mock.calls[0][0]).toMatch('db.begin()'); + expect(mock.mock.calls[1][0]).toMatch(`db.getCollection('foo-baz').insertOne({ name: 'dynamic' }, { session: '[ClientSession]' })`); + expect(mock.mock.calls[2][0]).toMatch(/db\\.getCollection\\('foo-bar'\\)\\.insertOne\\({ name: 'bar', baz: ObjectId\\('\\w+'\\), onCreateTest: true, onUpdateTest: true }, { session: '\\[ClientSession]' }\\)/); + expect(mock.mock.calls[3][0]).toMatch('db.commit()'); + }); + afterAll(async () => orm.close(true)); }); diff --git a/bootstrap.ts b/bootstrap.ts index 94da6d5..30c1be8 100644 --- a/bootstrap.ts +++ b/bootstrap.ts @@ -26,6 +26,7 @@ import { schema as FooBaz4 } from './entities-schema/FooBaz4'; import { schema as BaseEntity5 } from './entities-schema/BaseEntity5'; import { Author2Subscriber } from './subscribers/Author2Subscriber'; import { EverythingSubscriber } from './subscribers/EverythingSubscriber'; +import { FlushSubscriber } from './subscribers/FlushSubscriber'; const { BaseEntity4, Author3, Book3, BookTag3, Publisher3, Test3 } = require('./entities-js/index'); @@ -86,6 +87,7 @@ export async function initORMMySql<D extends MySqlDriver | MariaDbDriver = MySql orm = await MikroORM.init(orm.config); Author2Subscriber.log.length = 0; EverythingSubscriber.log.length = 0; + FlushSubscriber.log.length = 0; return orm as MikroORM<D>; } @@ -110,6 +112,7 @@ export async function initORMPostgreSql() { await connection.loadFile(__dirname + '/postgre-schema.sql'); Author2Subscriber.log.length = 0; EverythingSubscriber.log.length = 0; + FlushSubscriber.log.length = 0; return orm; } @@ -188,6 +191,7 @@ export async function wipeDatabaseMySql(em: SqlEntityManager) { em.clear(); Author2Subscriber.log.length = 0; EverythingSubscriber.log.length = 0; + FlushSubscriber.log.length = 0; } export async function wipeDatabasePostgreSql(em: SqlEntityManager) { @@ -207,6 +211,7 @@ export async function wipeDatabasePostgreSql(em: SqlEntityManager) { em.clear(); Author2Subscriber.log.length = 0; EverythingSubscriber.log.length = 0; + FlushSubscriber.log.length = 0; } export async function wipeDatabaseSqlite(em: SqlEntityManager) { diff --git a/Author2.ts b/Author2.ts index a4f5cb1..b61e36e 100644 --- a/Author2.ts +++ b/Author2.ts @@ -113,7 +113,6 @@ export class Author2 extends BaseEntity2 { @BeforeUpdate() beforeUpdate(args: EventArgs<this>) { this.version += 1; - console.log(this); this.hookParams.push(args); } diff --git a/FlushSubscriber.ts b/FlushSubscriber.ts index 1c6bcd3..1288a79 100644 --- a/FlushSubscriber.ts +++ b/FlushSubscriber.ts @@ -0,0 +1,20 @@ +import { EventSubscriber, FlushEventArgs, Subscriber } from '@mikro-orm/core'; + +@Subscriber() +export class FlushSubscriber implements EventSubscriber { + + static readonly log: [string, FlushEventArgs][] = []; + + async beforeFlush(args: FlushEventArgs): Promise<void> { + FlushSubscriber.log.push(['beforeFlush', args]); + } + + async onFlush(args: FlushEventArgs): Promise<void> { + FlushSubscriber.log.push(['onFlush', args]); + } + + async afterFlush(args: FlushEventArgs): Promise<void> { + FlushSubscriber.log.push(['afterFlush', args]); + } + +}
fix: don't over-estimate the number of objects to be received in the indexing phase. Previously, it would work with a number that is pessimistically duplicated as each object could in theory be a ref-delta, which then causes another base to be added to the stream, duplicating the amount of objects we need to account for in the acceleration data structure, which unfortunatelly cannot actually grow dynamically without violating invariants. Now we simply don't try to estimata the number of ref-deltas (which don't exist here anymore) at this stage.
ce182d6dae7e277d104893f0eec3285709946233
fix
https://github.com/Byron/gitoxide/commit/ce182d6dae7e277d104893f0eec3285709946233
don't over-estimate the number of objects to be received in the indexing phase. Previously, it would work with a number that is pessimistically duplicated as each object could in theory be a ref-delta, which then causes another base to be added to the stream, duplicating the amount of objects we need to account for in the acceleration data structure, which unfortunatelly cannot actually grow dynamically without violating invariants. Now we simply don't try to estimata the number of ref-deltas (which don't exist here anymore) at this stage.
diff --git a/mod.rs b/mod.rs index 4cfc3f6..107b304 100644 --- a/mod.rs +++ b/mod.rs @@ -103,13 +103,14 @@ impl crate::index::File { } let mut num_objects: usize = 0; let mut last_seen_trailer = None; - let anticipated_num_objects = entries.size_hint().1.unwrap_or_else(|| entries.size_hint().0); - let mut tree = Tree::with_capacity(anticipated_num_objects)?; + let (anticipated_num_objects, upper_bound) = entries.size_hint(); + let worst_case_num_objects_after_thin_pack_resolution = upper_bound.unwrap_or(anticipated_num_objects); + let mut tree = Tree::with_capacity(worst_case_num_objects_after_thin_pack_resolution)?; let indexing_start = std::time::Instant::now(); root_progress.init(Some(4), progress::steps()); let mut objects_progress = root_progress.add_child_with_id("indexing", ProgressId::IndexObjects.into()); - objects_progress.init(entries.size_hint().1, progress::count("objects")); + objects_progress.init(Some(anticipated_num_objects), progress::count("objects")); let mut decompressed_progress = root_progress.add_child_with_id("decompressing", ProgressId::DecompressedBytes.into()); decompressed_progress.init(None, progress::bytes()); @@ -168,11 +169,6 @@ impl crate::index::File { num_objects += 1; objects_progress.inc(); } - if num_objects != anticipated_num_objects { - objects_progress.info(format!( - "{anticipated_num_objects} objects were resolved into {num_objects} objects during thin-pack resolution" - )); - } let num_objects: u32 = num_objects .try_into() .map_err(|_| Error::IteratorInvariantTooManyObjects(num_objects))?;
feat: whitespace in mutable sections can be finely controlled, and is derived from existing sections (#331)
9157717c2fb143b5decbdf60d18cc2bd99dde775
feat
https://github.com/Byron/gitoxide/commit/9157717c2fb143b5decbdf60d18cc2bd99dde775
whitespace in mutable sections can be finely controlled, and is derived from existing sections (#331)
diff --git a/section.rs b/section.rs index 662c25d..bb336bf 100644 --- a/section.rs +++ b/section.rs @@ -18,7 +18,7 @@ use crate::{ pub struct MutableSection<'a, 'event> { section: &'a mut SectionBody<'event>, implicit_newline: bool, - whitespace: usize, + whitespace: Option<Cow<'event, BStr>>, } /// Mutating methods. @@ -26,12 +26,8 @@ impl<'a, 'event> MutableSection<'a, 'event> { /// Adds an entry to the end of this section. // TODO: multi-line handling - maybe just escape it for now. pub fn push(&mut self, key: Key<'event>, value: Cow<'event, BStr>) { - if self.whitespace > 0 { - self.section.0.push(Event::Whitespace({ - let mut s = BString::default(); - s.extend(std::iter::repeat(b' ').take(self.whitespace)); - s.into() - })); + if let Some(ws) = &self.whitespace { + self.section.0.push(Event::Whitespace(ws.clone())); } self.section.0.push(Event::SectionKey(key)); @@ -114,25 +110,25 @@ impl<'a, 'event> MutableSection<'a, 'event> { self.implicit_newline = on; } - /// Sets the number of spaces before the start of a key value. The _default - /// is 2_. Set to 0 to disable adding whitespace before a key - /// value. - pub fn set_leading_space(&mut self, num: usize) { - self.whitespace = num; + /// Sets the exact whitespace to use before each key-value pair. + /// The default is 2 tabs. + /// Set to `None` to disable adding whitespace before a key value. + pub fn set_leading_space(&mut self, whitespace: Option<Cow<'event, BStr>>) { + self.whitespace = whitespace; } - /// Returns the number of space characters this section will insert before the - /// beginning of a key. + /// Returns the whitespace this section will insert before the + /// beginning of a key, if any. #[must_use] - pub const fn leading_space(&self) -> usize { - self.whitespace + pub fn leading_space(&self) -> Option<&BStr> { + self.whitespace.as_deref() } } // Internal methods that may require exact indices for faster operations. impl<'a, 'event> MutableSection<'a, 'event> { pub(crate) fn new(section: &'a mut SectionBody<'event>) -> Self { - let whitespace = compute_whitespace(section); + let whitespace = Some(compute_whitespace(section)); Self { section, implicit_newline: true, @@ -193,7 +189,7 @@ impl<'a, 'event> MutableSection<'a, 'event> { } } -fn compute_whitespace(s: &mut SectionBody<'_>) -> usize { +fn compute_whitespace<'event>(s: &mut SectionBody<'event>) -> Cow<'event, BStr> { let mut saw_events = false; let computed = s.0.iter() @@ -211,7 +207,10 @@ fn compute_whitespace(s: &mut SectionBody<'_>) -> usize { _ => unreachable!(), }) .sum(); - saw_events.then(|| computed).unwrap_or(8) + + let mut buf = BString::default(); + buf.extend(std::iter::repeat(b' ').take(saw_events.then(|| computed).unwrap_or(8))); + buf.into() } impl<'event> Deref for MutableSection<'_, 'event> { diff --git a/mod.rs b/mod.rs index e479892..e214647 100644 --- a/mod.rs +++ b/mod.rs @@ -117,7 +117,7 @@ fn section<'a>( } } - if let Ok((new_i, new_newlines)) = section_body(i, node, receive_event) { + if let Ok((new_i, new_newlines)) = key_value_pair(i, node, receive_event) { if old_i != new_i { i = new_i; newlines += new_newlines; @@ -235,7 +235,7 @@ fn sub_section_delegate<'a>(i: &'a [u8], push_byte: &mut dyn FnMut(u8)) -> IResu Ok((&i[cursor - 1..], (found_escape, cursor - 1))) } -fn section_body<'a>( +fn key_value_pair<'a>( i: &'a [u8], node: &mut ParseNode, receive_event: &mut impl FnMut(Event<'a>), diff --git a/tests.rs b/tests.rs index 4bf7904..7062156 100644 --- a/tests.rs +++ b/tests.rs @@ -664,7 +664,7 @@ mod value_no_continuation { } } -mod section_body { +mod key_value_pair { use crate::parse::{ error::ParseNode, section, @@ -672,19 +672,19 @@ mod section_body { Event, }; - fn section_body<'a>( + fn key_value<'a>( i: &'a [u8], node: &mut ParseNode, events: &mut section::Events<'a>, ) -> nom::IResult<&'a [u8], ()> { - super::section_body(i, node, &mut |e| events.push(e)).map(|t| (t.0, ())) + super::key_value_pair(i, node, &mut |e| events.push(e)).map(|t| (t.0, ())) } #[test] fn whitespace_is_not_ambigious() { let mut node = ParseNode::SectionHeader; let mut vec = Default::default(); - assert!(section_body(b"a =b", &mut node, &mut vec).is_ok()); + assert!(key_value(b"a =b", &mut node, &mut vec).is_ok()); assert_eq!( vec, into_events(vec![ @@ -696,7 +696,7 @@ mod section_body { ); let mut vec = Default::default(); - assert!(section_body(b"a= b", &mut node, &mut vec).is_ok()); + assert!(key_value(b"a= b", &mut node, &mut vec).is_ok()); assert_eq!( vec, into_events(vec![
chore: move .babelrc to babel.config.js
b08dbcab943d4ba416661e4985b575c02078e648
chore
https://github.com/pmndrs/react-spring/commit/b08dbcab943d4ba416661e4985b575c02078e648
move .babelrc to babel.config.js
diff --git a/.babelrc b/.babelrc index d365ca9..cb415fc 100644 --- a/.babelrc +++ b/.babelrc @@ -1,23 +0,0 @@ -{ - "presets": [ - [ - "@babel/preset-env", - { - "loose": true, - "modules": false, - "targets": ">1%, not dead, not ie 11, not op_mini all" - } - ], - "@babel/preset-react", - "@babel/preset-typescript" - ], - "plugins": [ - ["@babel/plugin-proposal-class-properties", { "loose": true }], - ["@babel/plugin-proposal-object-rest-spread", { "loose": true }] - ], - "env": { - "test": { - "plugins": ["@babel/plugin-transform-modules-commonjs"] - } - } -} diff --git a/babel.config.js b/babel.config.js index 32e566f..f873446 100644 --- a/babel.config.js +++ b/babel.config.js @@ -0,0 +1,23 @@ +module.exports = { + presets: [ + [ + '@babel/preset-env', + { + loose: true, + modules: false, + targets: '>1%, not dead, not ie 11, not op_mini all', + }, + ], + '@babel/preset-react', + '@babel/preset-typescript', + ], + plugins: [ + ['@babel/plugin-proposal-class-properties', { loose: true }], + ['@babel/plugin-proposal-object-rest-spread', { loose: true }], + ], + env: { + test: { + plugins: ['@babel/plugin-transform-modules-commonjs'], + }, + }, +}
chore: add `log-level-error` feature
ea6e3df982d48e4ec4d2391caa7c0d9880d5f12d
chore
https://github.com/erg-lang/erg/commit/ea6e3df982d48e4ec4d2391caa7c0d9880d5f12d
add `log-level-error` feature
diff --git a/Cargo.toml b/Cargo.toml index 23db3fa..b0a74e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,7 @@ large_thread = ["erg_common/large_thread"] experimental = ["erg_common/experimental"] pylib = ["dep:pyo3", "erg_common/pylib"] pylib_parser = ["pylib"] +log-level-error = ["erg_common/log-level-error"] [dependencies] erg_common = { workspace = true } diff --git a/consts.rs b/consts.rs index 97257a6..83bfa81 100644 --- a/consts.rs +++ b/consts.rs @@ -7,6 +7,7 @@ pub const PYTHON_MODE: bool = cfg!(feature = "py_compat"); pub const ERG_MODE: bool = !cfg!(feature = "py_compat"); pub const ELS: bool = cfg!(feature = "els"); pub const DEBUG_MODE: bool = cfg!(feature = "debug"); +pub const LOG_LEVEL_ERROR: bool = cfg!(feature = "log-level-error"); pub const EXPERIMENTAL_MODE: bool = cfg!(feature = "experimental"); pub const BACKTRACE_MODE: bool = cfg!(feature = "backtrace"); pub const GAL: bool = cfg!(feature = "gal"); diff --git a/dict.rs b/dict.rs index be487c4..dd813ab 100644 --- a/dict.rs +++ b/dict.rs @@ -191,6 +191,15 @@ impl<K, V> IntoIterator for Dict<K, V> { } } +impl<'a, K, V> IntoIterator for &'a Dict<K, V> { + type Item = (&'a K, &'a V); + type IntoIter = Iter<'a, K, V>; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.dict.iter() + } +} + impl<K: Hash + Eq, V> Dict<K, V> { #[inline] pub fn get<Q>(&self, k: &Q) -> Option<&V> diff --git a/macros.rs b/macros.rs index f066bab..146616c 100644 --- a/macros.rs +++ b/macros.rs @@ -466,7 +466,9 @@ macro_rules! debug_info { #[macro_export] macro_rules! log { (info $($arg: tt)*) => {{ - $crate::log!(c DEBUG_MAIN, $($arg)*); + if !$crate::consts::LOG_LEVEL_ERROR { + $crate::log!(c DEBUG_MAIN, $($arg)*); + } }}; (err $($arg: tt)*) => {{ @@ -474,7 +476,9 @@ macro_rules! log { }}; (info_f $output:ident, $($arg: tt)*) => {{ - $crate::log!(f+c $output, DEBUG_MAIN, $($arg)*); + if !$crate::consts::LOG_LEVEL_ERROR { + $crate::log!(f+c $output, DEBUG_MAIN, $($arg)*); + } }}; (err_f $output:ident, $($arg: tt)*) => {{ @@ -533,7 +537,7 @@ macro_rules! log { }}; ($($arg: tt)*) => {{ - if cfg!(feature = "debug") { + if cfg!(feature = "debug") && !$crate::consts::LOG_LEVEL_ERROR { use $crate::style::*; $crate::debug_info!(); println!($($arg)*); diff --git a/set.rs b/set.rs index 2c6567c..a9fe8a8 100644 --- a/set.rs +++ b/set.rs @@ -154,6 +154,16 @@ impl<T: Hash> IntoIterator for Set<T> { } } +impl<'a, T> IntoIterator for &'a Set<T> { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + #[inline] + fn into_iter(self) -> Iter<'a, T> { + self.elems.iter() + } +} + impl<T: Hash + Eq> Set<T> { #[inline] pub fn get<Q>(&self, value: &Q) -> Option<&T> diff --git a/shared.rs b/shared.rs index 557143f..36cf6af 100644 --- a/shared.rs +++ b/shared.rs @@ -282,6 +282,7 @@ impl<T: Clone> Shared<T> { /// Thread-local objects that can be shared among threads. /// The initial value can be shared globally, but the changes are not reflected in other threads. +/// If you want to reflect the changes in other threads, you need to call `update_init`. /// Otherwise, this behaves as a `RefCell`. #[derive(Clone)] pub struct Forkable<T: Send + Clone> { diff --git a/build_features.md b/build_features.md index 4075e50..dab2593 100644 --- a/build_features.md +++ b/build_features.md @@ -42,3 +42,11 @@ Erg 内部オプション、ヘルプ (ヘルプ、著作権、ライセンス ## py_compatible Python互換モードを有効にする。APIや文法の一部がPythonと互換になる。[pylyzer](https://github.com/mtshiba/pylyzer)のために使用される。 + +## experimental + +実験的な機能を有効にする。 + +## log-level-error + +エラーログのみ表示する。
feat(snowflake): support SSO and other forms of passwordless authentication
23ac53d3f122a04de29f25e53c2434de6d34e544
feat
https://github.com/rohankumardubey/ibis/commit/23ac53d3f122a04de29f25e53c2434de6d34e544
support SSO and other forms of passwordless authentication
diff --git a/Snowflake.md b/Snowflake.md index caca386..b46bb72 100644 --- a/Snowflake.md +++ b/Snowflake.md @@ -63,6 +63,34 @@ passing a properly formatted Snowflake connection URL to `ibis.connect` con = ibis.connect(f"snowflake://{user}:{password}@{account}/{database}") ``` +### Authenticating with SSO + +Ibis supports connecting to SSO-enabled Snowflake warehouses using the `authenticator` parameter. + +You can use it in the explicit-parameters-style or in the URL-style connection +APIs. All values of `authenticator` are supported. + +#### Explicit + +```python +con = ibis.snowflake.connect( + user="user", + account="safpqpq-sq55555", + database="my_database/my_schema", + warehouse="my_warehouse", + authenticator="externalbrowser", +) +``` + +#### URL + +```python +con = ibis.connect( + f"snowflake://{user}@{account}/{database}?warehouse={warehouse}", + authenticator="externalbrowser", +) +``` + ### Looking up your Snowflake organization ID and user ID A [Snowflake account diff --git a/__init__.py b/__init__.py index 55c50c0..bdd28f6 100644 --- a/__init__.py +++ b/__init__.py @@ -137,9 +137,10 @@ $$ {defn["source"]} $$""" def do_connect( self, user: str, - password: str, account: str, database: str, + password: str | None = None, + authenticator: str | None = None, connect_args: Mapping[str, Any] | None = None, **kwargs: Any, ): @@ -149,8 +150,6 @@ $$ {defn["source"]} $$""" ---------- user Username - password - Password account A Snowflake organization ID and a Snowflake user ID, separated by a hyphen. Note that a Snowflake user ID is a separate identifier from a username. @@ -158,6 +157,17 @@ $$ {defn["source"]} $$""" database A Snowflake database and a Snowflake schema, separated by a `/`. See https://ibis-project.org/backends/Snowflake/ for details + password + Password. If empty or `None` then `authenticator` must be passed. + authenticator + String indicating authentication method. See + https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-example#connecting-with-oauth + for details. + + Note that the authentication flow **will not take place** until a + database connection is made. This means that + `ibis.snowflake.connect(...)` can succeed, while subsequent API + calls fail if the authentication fails for any reason. connect_args Additional arguments passed to the SQLAlchemy engine creation call. kwargs: @@ -171,10 +181,16 @@ $$ {defn["source"]} $$""" "Schema must be non-None. Pass the schema as part of the " f"database e.g., {dbparams['database']}/my_schema" ) - url = URL(account=account, user=user, password=password, **dbparams, **kwargs) + + # snowflake-connector-python does not handle `None` for password, but + # accepts the empty string + url = URL( + account=account, user=user, password=password or "", **dbparams, **kwargs + ) self.database_name = dbparams["database"] if connect_args is None: connect_args = {} + connect_args.setdefault( "session_parameters", { @@ -183,6 +199,9 @@ $$ {defn["source"]} $$""" "STRICT_JSON_OUTPUT": "TRUE", }, ) + if authenticator is not None: + connect_args.setdefault("authenticator", authenticator) + engine = sa.create_engine( url, connect_args=connect_args, poolclass=sa.pool.StaticPool )
fix(trino): allow passing the `auth` keyword (#9410)
560ddf6ca24e0d29fdd565aa59a22f3e7a32e959
fix
https://github.com/ibis-project/ibis/commit/560ddf6ca24e0d29fdd565aa59a22f3e7a32e959
allow passing the `auth` keyword (#9410)
diff --git a/__init__.py b/__init__.py index 36e8a8e..f7e5bd2 100644 --- a/__init__.py +++ b/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations import contextlib +import warnings from functools import cached_property from operator import itemgetter from typing import TYPE_CHECKING, Any @@ -43,11 +44,12 @@ class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema): catalog, db = url.path.strip("/").split("/") self.do_connect( user=url.username or None, - password=url.password or None, + auth=url.password or None, host=url.hostname or None, port=url.port or None, database=catalog, schema=db, + **kwargs, ) return self @@ -253,6 +255,7 @@ class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema): schema: str | None = None, source: str | None = None, timezone: str = "UTC", + auth: str | None = None, **kwargs, ) -> None: """Connect to Trino. @@ -262,7 +265,7 @@ class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema): user Username to connect with password - Password to connect with + Password to connect with. Mutually exclusive with `auth`. host Hostname of the Trino server port @@ -275,6 +278,9 @@ class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema): Application name passed to Trino timezone Timezone to use for the connection + auth + Authentication method to use for the connection. Mutually exclusive + with `password`. kwargs Additional keyword arguments passed directly to the `trino.dbapi.connect` API. @@ -298,15 +304,24 @@ class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema): >>> con = ibis.trino.connect(database=catalog, schema=schema, source="my-app") """ + if password is not None: + if auth is not None: + raise ValueError( + "Cannot specify both `auth` and `password` when connecting to Trino" + ) + warnings.warn( + "The `password` parameter is deprecated and will be removed in 10.0; use `auth` instead", + FutureWarning, + ) self.con = trino.dbapi.connect( user=user, - auth=password, host=host, port=port, catalog=database, schema=schema, source=source or "ibis", timezone=timezone, + auth=auth or password, **kwargs, ) diff --git a/conftest.py b/conftest.py index 08019f7..24ce6dc 100644 --- a/conftest.py +++ b/conftest.py @@ -142,7 +142,7 @@ class TestConf(ServiceBackendTest): host=TRINO_HOST, port=TRINO_PORT, user=TRINO_USER, - password=TRINO_PASS, + auth=TRINO_PASS, database="memory", schema="default", **kw, diff --git a/test_client.py b/test_client.py index 7a27500..77f9d54 100644 --- a/test_client.py +++ b/test_client.py @@ -74,7 +74,7 @@ def test_con_source(source, expected): user=TRINO_USER, host=TRINO_HOST, port=TRINO_PORT, - password=TRINO_PASS, + auth=TRINO_PASS, database="hive", schema="default", source=source, @@ -82,6 +82,18 @@ def test_con_source(source, expected): assert con.con.source == expected +def test_deprecated_password_parameter(): + with pytest.warns(FutureWarning, match="The `password` parameter is deprecated"): + ibis.trino.connect( + user=TRINO_USER, + host=TRINO_HOST, + port=TRINO_PORT, + password=TRINO_PASS, + database="hive", + schema="default", + ) + + @pytest.mark.parametrize( ("catalog", "database", "table"), [
refactor(core): use runtime value in `Type.toJSON()` BREAKING CHANGE: Custom types used to be serialized to the database value. In v4, the runtime value is used by default. Implement custom `toJSON()` method if you need to customize this.
9d8d0906bc7bcf28447a493a4497297312170175
refactor
https://github.com/mikro-orm/mikro-orm/commit/9d8d0906bc7bcf28447a493a4497297312170175
use runtime value in `Type.toJSON()` BREAKING CHANGE: Custom types used to be serialized to the database value. In v4, the runtime value is used by default. Implement custom `toJSON()` method if you need to customize this.
diff --git a/custom-types.md b/custom-types.md index b8be4b6..cffe845 100644 --- a/custom-types.md +++ b/custom-types.md @@ -17,7 +17,7 @@ You can define custom types by extending `Type` abstract class. It has 4 optiona - `toJSON(value: any, platform: Platform): any` Converts a value from its JS representation to its serialized JSON form of this type. - By default converts to the database value. + By default uses the runtime value. - `getColumnType(prop: EntityProperty, platform: Platform): string` diff --git a/upgrading-v3-to-v4.md b/upgrading-v3-to-v4.md index baa1af6..7efbb98 100644 --- a/upgrading-v3-to-v4.md +++ b/upgrading-v3-to-v4.md @@ -59,6 +59,12 @@ Input type defaults to `string`, output type defaults to the input type. You might need to explicitly provide the types if your methods are strictly typed. +## Custom type serialization + +Custom types used to be serialized to the database value. In v4, the runtime +value is used by default. Implement custom `toJSON()` method if you need to +customize this. + ## Property `default` and `defaultRaw` Previously the `default` option of properties was used as is, so we had to wrap diff --git a/DateType.ts b/DateType.ts index ab9b715..aa97337 100644 --- a/DateType.ts +++ b/DateType.ts @@ -35,4 +35,8 @@ export class DateType extends Type<Date, string> { return platform.getDateTypeDeclarationSQL(prop.length); } + toJSON(value: Date, platform: Platform): Date | string { + return this.convertToDatabaseValue(value, platform); + } + } diff --git a/Type.ts b/Type.ts index fbc16e5..f2f5306 100644 --- a/Type.ts +++ b/Type.ts @@ -21,10 +21,10 @@ export abstract class Type<JSType = string, DBType = JSType> { /** * Converts a value from its JS representation to its serialized JSON form of this type. - * By default converts to the database value. + * By default uses the runtime value. */ toJSON(value: JSType, platform: Platform): JSType | DBType { - return this.convertToDatabaseValue(value, platform); + return value; } /**
docs(config): improve Databricks driver instructions for Docker Fixes #5833
b1fb39e8012a324518c7f1155aeb08c1ec62ce3c
docs
https://github.com/wzhiqing/cube/commit/b1fb39e8012a324518c7f1155aeb08c1ec62ce3c
improve Databricks driver instructions for Docker Fixes #5833
diff --git a/Databricks-JDBC.mdx b/Databricks-JDBC.mdx index ce1d62f..4dc2eca 100644 --- a/Databricks-JDBC.mdx +++ b/Databricks-JDBC.mdx @@ -11,7 +11,7 @@ permalink: /config/databases/databricks/jdbc ## Setup -### Manual +### <--{"id" : "Setup"}--> Environment Variables Add the following to a `.env` file in your Cube project: @@ -23,6 +23,20 @@ CUBEJS_DB_NAME=default CUBEJS_DB_DATABRICKS_URL=jdbc:databricks://dbc-XXXXXXX-XXXX.cloud.databricks.com:443/default;transportMode=http;ssl=1;httpPath=sql/protocolv1/o/XXXXX/XXXXX;AuthMech=3;UID=token # You can specify the personal access token separately from `CUBEJS_DB_DATABRICKS_URL` by doing this: CUBEJS_DB_DATABRICKS_TOKEN=XXXXX +# This accepts the Databricks usage policy and must be set to `true` to use the Databricks JDBC driver +CUBEJS_DB_DATABRICKS_ACCEPT_POLICY=true +``` + +### <--{"id" : "Setup"}--> Docker + +Create a `.env` file [as above](#setup-environment-variables), then extend the +`cubejs/cube:jdk` Docker image tag to build a Cube image with the JDBC driver: + +```dockerfile +FROM cubejs/cube:jdk + +COPY . . +RUN npm install ``` ## Environment Variables
fix: prevent hosts or paths that look like arguments to be passed to invoked commands. See https://secure.phabricator.com/T12961 for more details.
b06a0dd781accad317fdec5f86f069df4c21875c
fix
https://github.com/Byron/gitoxide/commit/b06a0dd781accad317fdec5f86f069df4c21875c
prevent hosts or paths that look like arguments to be passed to invoked commands. See https://secure.phabricator.com/T12961 for more details.
diff --git a/file.rs b/file.rs index a268b19..b9e2687 100644 --- a/file.rs +++ b/file.rs @@ -211,6 +211,11 @@ impl client::Transport for SpawnProcessOnDemand { }; cmd.stdin = Stdio::piped(); cmd.stdout = Stdio::piped(); + if self.path.first() == Some(&b'-') { + return Err(client::Error::AmbiguousPath { + path: self.path.clone(), + }); + } let repo_path = if self.ssh_cmd.is_some() { cmd.args.push(service.as_str().into()); gix_quote::single(self.path.as_ref()).to_os_str_lossy().into_owned() @@ -225,6 +230,7 @@ impl client::Transport for SpawnProcessOnDemand { } cmd.envs(std::mem::take(&mut self.envs)); + gix_features::trace::debug!(command = ?cmd, "gix_transport::SpawnProcessOnDemand"); let mut child = cmd.spawn().map_err(|err| client::Error::InvokeProgram { source: err, command: cmd_name.into_owned(), diff --git a/mod.rs b/mod.rs index cba6681..4c9936f 100644 --- a/mod.rs +++ b/mod.rs @@ -165,6 +165,21 @@ mod message { "git-upload-pack hello\\\\world\\0host=host:404\\0" ) } + + #[test] + fn with_strange_host_and_port() { + assert_eq!( + git::message::connect( + Service::UploadPack, + Protocol::V1, + b"--upload-pack=attack", + Some(&("--proxy=other-attack".into(), Some(404))), + &[] + ), + "git-upload-pack --upload-pack=attack\\0host=--proxy=other-attack:404\\0", + "we explicitly allow possible `-arg` arguments to be passed to the git daemon - the remote must protect against exploitation, we don't want to prevent legitimate cases" + ) + } } } diff --git a/program_kind.rs b/program_kind.rs index ef6d23b..519b7d4 100644 --- a/program_kind.rs +++ b/program_kind.rs @@ -31,7 +31,6 @@ impl ProgramKind { if disallow_shell { prepare.use_shell = false; } - let host = url.host().expect("present in ssh urls"); match self { ProgramKind::Ssh => { if desired_version != Protocol::V1 { @@ -54,7 +53,7 @@ impl ProgramKind { } ProgramKind::Simple => { if url.port.is_some() { - return Err(ssh::invocation::Error { + return Err(ssh::invocation::Error::Unsupported { command: ssh_cmd.into(), function: "setting the port", }); @@ -62,8 +61,18 @@ impl ProgramKind { } }; let host_as_ssh_arg = match url.user() { - Some(user) => format!("{user}@{host}"), - None => host.into(), + Some(user) => { + let host = url.host().expect("present in ssh urls"); + format!("{user}@{host}") + } + None => { + let host = url + .host_argument_safe() + .ok_or_else(|| ssh::invocation::Error::AmbiguousHostName { + host: url.host().expect("ssh host always set").into(), + })?; + host.into() + } }; // Try to force ssh to yield english messages (for parsing later) diff --git a/tests.rs b/tests.rs index 2ef640c..9531fc9 100644 --- a/tests.rs +++ b/tests.rs @@ -144,13 +144,28 @@ mod program_kind { assert!(call_args(kind, "ssh://user@host:43/p", Protocol::V2).ends_with("-P 43 user@host")); } } + #[test] + fn ambiguous_host_is_allowed_with_user() { + assert_eq!( + call_args(ProgramKind::Ssh, "ssh://user@-arg/p", Protocol::V2), + joined(&["ssh", "-o", "SendEnv=GIT_PROTOCOL", "user@-arg"]) + ); + } + + #[test] + fn ambiguous_host_is_disallowed() { + assert!(matches!( + try_call(ProgramKind::Ssh, "ssh://-arg/p", Protocol::V2), + Err(ssh::invocation::Error::AmbiguousHostName { host }) if host == "-arg" + )); + } #[test] fn simple_cannot_handle_any_arguments() { - match try_call(ProgramKind::Simple, "ssh://user@host:42/p", Protocol::V2) { - Err(ssh::invocation::Error { .. }) => {} - _ => panic!("BUG: unexpected outcome"), - } + assert!(matches!( + try_call(ProgramKind::Simple, "ssh://user@host:42/p", Protocol::V2), + Err(ssh::invocation::Error::Unsupported { .. }) + )); assert_eq!( call_args(ProgramKind::Simple, "ssh://user@host/p", Protocol::V2), joined(&["simple", "user@host"]), diff --git a/non_io_types.rs b/non_io_types.rs index 56f112f..bf24acd 100644 --- a/non_io_types.rs +++ b/non_io_types.rs @@ -138,6 +138,8 @@ mod error { Http(#[from] HttpError), #[error(transparent)] SshInvocation(SshInvocationError), + #[error("The repository path '{path}' could be mistaken for a command-line argument")] + AmbiguousPath { path: BString }, } impl crate::IsSpuriousError for Error { diff --git a/lib.rs b/lib.rs index 86902d4..2e1a59a 100644 --- a/lib.rs +++ b/lib.rs @@ -52,7 +52,7 @@ pub struct Url { /// # Security-Warning /// /// URLs allow paths to start with `-` which makes it possible to mask command-line arguments as path which then leads to - /// the invocation of programs from an attacker controlled URL. See https://secure.phabricator.com/T12961 for details. + /// the invocation of programs from an attacker controlled URL. See <https://secure.phabricator.com/T12961> for details. /// /// If this value is going to be used in a command-line application, call [Self::path_argument_safe()] instead. pub path: bstr::BString, @@ -134,7 +134,7 @@ impl Url { /// # Security-Warning /// /// URLs allow hosts to start with `-` which makes it possible to mask command-line arguments as host which then leads to - /// the invocation of programs from an attacker controlled URL. See https://secure.phabricator.com/T12961 for details. + /// the invocation of programs from an attacker controlled URL. See <https://secure.phabricator.com/T12961> for details. /// /// If this value is going to be used in a command-line application, call [Self::host_argument_safe()] instead. pub fn host(&self) -> Option<&str> { @@ -179,7 +179,7 @@ impl Url { } fn looks_like_argument(b: &[u8]) -> bool { - b.get(0) == Some(&b'-') + b.first() == Some(&b'-') } /// Transformation
feat(core): add support for polymorphic embeddables (#2426) Polymorphic embeddables allow us to define multiple classes for a single embedded property and the right one will be used based on the discriminator column, similar to how single table inheritance work. ```ts enum AnimalType { CAT, DOG, } @Embeddable({ abstract: true, discriminatorColumn: 'type' }) abstract class Animal { @Enum(() => AnimalType) type!: AnimalType; @Property() name!: string; } @Embeddable({ discriminatorValue: AnimalType.CAT }) class Cat extends Animal { @Property({ nullable: true }) canMeow?: boolean = true; constructor(name: string) { super(); this.type = AnimalType.CAT; this.name = name; } } @Embeddable({ discriminatorValue: AnimalType.DOG }) class Dog extends Animal { @Property({ nullable: true }) canBark?: boolean = true; constructor(name: string) { super(); this.type = AnimalType.DOG; this.name = name; } } @Entity() class Owner { @PrimaryKey() id!: number; @Property() name!: string; @Embedded(() => [Cat, Dog]) pet!: Cat | Dog; } ``` Closes #1165 BREAKING CHANGE: Embeddable instances are now created via `EntityFactory` and they respect the `forceEntityConstructor` configuration. Due to this we need to have EM instance when assigning to embedded properties. Using `em.assign()` should be preferred to get around this. Deep assigning of child entities now works by default based on the presence of PKs in the payload. This behaviour can be disable via updateByPrimaryKey: false in the assign options. `mergeObjects` option is now enabled by default.
7b7c3a22fe517e13a1a610f142c59e758acd3c3f
feat
https://github.com/mikro-orm/mikro-orm/commit/7b7c3a22fe517e13a1a610f142c59e758acd3c3f
add support for polymorphic embeddables (#2426) Polymorphic embeddables allow us to define multiple classes for a single embedded property and the right one will be used based on the discriminator column, similar to how single table inheritance work. ```ts enum AnimalType { CAT, DOG, } @Embeddable({ abstract: true, discriminatorColumn: 'type' }) abstract class Animal { @Enum(() => AnimalType) type!: AnimalType; @Property() name!: string; } @Embeddable({ discriminatorValue: AnimalType.CAT }) class Cat extends Animal { @Property({ nullable: true }) canMeow?: boolean = true; constructor(name: string) { super(); this.type = AnimalType.CAT; this.name = name; } } @Embeddable({ discriminatorValue: AnimalType.DOG }) class Dog extends Animal { @Property({ nullable: true }) canBark?: boolean = true; constructor(name: string) { super(); this.type = AnimalType.DOG; this.name = name; } } @Entity() class Owner { @PrimaryKey() id!: number; @Property() name!: string; @Embedded(() => [Cat, Dog]) pet!: Cat | Dog; } ``` Closes #1165 BREAKING CHANGE: Embeddable instances are now created via `EntityFactory` and they respect the `forceEntityConstructor` configuration. Due to this we need to have EM instance when assigning to embedded properties. Using `em.assign()` should be preferred to get around this. Deep assigning of child entities now works by default based on the presence of PKs in the payload. This behaviour can be disable via updateByPrimaryKey: false in the assign options. `mergeObjects` option is now enabled by default.
diff --git a/embeddables.md b/embeddables.md index 16a5761..81d4cf7 100644 --- a/embeddables.md +++ b/embeddables.md @@ -180,3 +180,72 @@ class Identity { } ``` + +## Polymorphic embeddables + +Since v5, it is also possible to use polymorphic embeddables. This means we +can define multiple classes for a single embedded property and the right one +will be used based on the discriminator column, similar to how single table +inheritance work. + +```ts +import { Embeddable, Embedded, Entity, Enum, PrimaryKey, Property } from '@mikro-orm/core'; + +enum AnimalType { + CAT, + DOG, +} + +@Embeddable({ abstract: true, discriminatorColumn: 'type' }) +abstract class Animal { + + @Enum(() => AnimalType) + type!: AnimalType; + + @Property() + name!: string; + +} + +@Embeddable({ discriminatorValue: AnimalType.CAT }) +class Cat extends Animal { + + @Property({ nullable: true }) + canMeow?: boolean = true; + + constructor(name: string) { + super(); + this.type = AnimalType.CAT; + this.name = name; + } + +} + +@Embeddable({ discriminatorValue: AnimalType.DOG }) +class Dog extends Animal { + + @Property({ nullable: true }) + canBark?: boolean = true; + + constructor(name: string) { + super(); + this.type = AnimalType.DOG; + this.name = name; + } + +} + +@Entity() +class Owner { + + @PrimaryKey() + id!: number; + + @Property() + name!: string; + + @Embedded(() => [Cat, Dog]) + pet!: Cat | Dog; + +} +``` diff --git a/upgrading-v4-to-v5.md b/upgrading-v4-to-v5.md index 5c902b6..f954ba2 100644 --- a/upgrading-v4-to-v5.md +++ b/upgrading-v4-to-v5.md @@ -128,3 +128,16 @@ Running migrations in production via node and ts-node is now handled the same. This should actually not be breaking, as old format with extension is still supported (e.g. they still can be rolled back), but newly logged migrations will not contain the extension. + +## Changes in `assign()` helper + +Embeddable instances are now created via `EntityFactory` and they respect the +`forceEntityConstructor` configuration. Due to this we need to have EM instance +when assigning to embedded properties. + +Using `em.assign()` should be preferred to get around this. + +Deep assigning of child entities now works by default based on the presence of PKs in the payload. +This behaviour can be disable via updateByPrimaryKey: false in the assign options. + +`mergeObjects` option is now enabled by default. diff --git a/Embeddable.ts b/Embeddable.ts index 3865acb..33a4273 100644 --- a/Embeddable.ts +++ b/Embeddable.ts @@ -1,13 +1,21 @@ import type { Constructor, Dictionary } from '../typings'; import { MetadataStorage } from '../metadata'; -export function Embeddable() { +export function Embeddable(options: EmbeddableOptions = {}) { return function <T>(target: T & Dictionary) { const meta = MetadataStorage.getMetadataFromDecorator(target); meta.class = target as unknown as Constructor<T>; meta.name = target.name; meta.embeddable = true; + Object.assign(meta, options); return target; }; } + +export type EmbeddableOptions = { + discriminatorColumn?: string; + discriminatorMap?: Dictionary<string>; + discriminatorValue?: number | string; + abstract?: boolean; +}; diff --git a/Embedded.ts b/Embedded.ts index 93df01e..ccd685a 100644 --- a/Embedded.ts +++ b/Embedded.ts @@ -17,7 +17,7 @@ export function Embedded(type: EmbeddedOptions | (() => AnyEntity) = {}, options } export type EmbeddedOptions = { - entity?: string | (() => AnyEntity); + entity?: string | (() => AnyEntity | AnyEntity[]); type?: string; prefix?: string | boolean; nullable?: boolean; diff --git a/Entity.ts b/Entity.ts index f9ce5ea..82fe62c 100644 --- a/Entity.ts +++ b/Entity.ts @@ -22,7 +22,7 @@ export type EntityOptions<T> = { collection?: string; discriminatorColumn?: string; discriminatorMap?: Dictionary<string>; - discriminatorValue?: string; + discriminatorValue?: number | string; comment?: string; abstract?: boolean; readonly?: boolean; diff --git a/EntityAssigner.ts b/EntityAssigner.ts index 0537123..67c5e1f 100644 --- a/EntityAssigner.ts +++ b/EntityAssigner.ts @@ -17,6 +17,7 @@ export class EntityAssigner { options = { updateNestedEntities: true, updateByPrimaryKey: true, + mergeObjects: true, schema: wrapped.__schema, ...options, // allow overriding the defaults }; @@ -75,7 +76,7 @@ export class EntityAssigner { return entity[prop as keyof T] = validator.validateProperty(props[prop], value, entity); } - if (props[prop]?.reference === ReferenceType.EMBEDDED) { + if (props[prop]?.reference === ReferenceType.EMBEDDED && EntityAssigner.validateEM(em)) { return EntityAssigner.assignEmbeddable(entity, value, props[prop], em, options); } @@ -171,10 +172,8 @@ export class EntityAssigner { collection.set(items); } - private static assignEmbeddable<T extends AnyEntity<T>>(entity: T, value: any, prop: EntityProperty, em: EntityManager, options: AssignOptions): void { - const Embeddable = prop.embeddable; + private static assignEmbeddable<T extends AnyEntity<T>>(entity: T, value: any, prop: EntityProperty, em: EntityManager | undefined, options: AssignOptions): void { const propName = prop.embedded ? prop.embedded[1] : prop.name; - entity[propName] = prop.array || options.mergeObjects ? (entity[propName] || Object.create(Embeddable.prototype)) : Object.create(Embeddable.prototype); if (!value) { entity[propName] = value; @@ -194,6 +193,12 @@ export class EntityAssigner { }); } + const create = () => EntityAssigner.validateEM(em) && em!.getEntityFactory().createEmbeddable<T>(prop.type, value, { + convertCustomTypes: options.convertCustomTypes, + newEntity: options.mergeObjects ? !entity[propName] : true, + }); + entity[propName] = options.mergeObjects ? (entity[propName] || create()) : create(); + Object.keys(value).forEach(key => { const childProp = prop.embeddedProps[key]; diff --git a/EntityFactory.ts b/EntityFactory.ts index 6cf9a20..7bfddb5 100644 --- a/EntityFactory.ts +++ b/EntityFactory.ts @@ -138,6 +138,15 @@ export class EntityFactory { return this.create<T>(entityName, id as EntityData<T>, { ...options, initialized: false }) as T; } + createEmbeddable<T>(entityName: EntityName<T>, data: EntityData<T>, options: Pick<FactoryOptions, 'newEntity' | 'convertCustomTypes'> = {}): T { + entityName = Utils.className(entityName); + data = { ...data }; + const meta = this.metadata.get(entityName); + const meta2 = this.processDiscriminatorColumn<T>(meta, data); + + return this.createEntity(data, meta2, options); + } + private createEntity<T extends AnyEntity<T>>(data: EntityData<T>, meta: EntityMetadata<T>, options: FactoryOptions): T { if (options.newEntity || meta.forceConstructor) { const params = this.extractConstructorParams<T>(meta, data, options); @@ -185,7 +194,7 @@ export class EntityFactory { } private findEntity<T>(data: EntityData<T>, meta: EntityMetadata<T>, options: FactoryOptions): T | undefined { - if (!meta.compositePK && !meta.properties[meta.primaryKeys[0]].customType) { + if (!meta.compositePK && !meta.properties[meta.primaryKeys[0]]?.customType) { return this.unitOfWork.getById<T>(meta.name!, data[meta.primaryKeys[0]] as Primary<T>, options.schema); } @@ -253,6 +262,15 @@ export class EntityFactory { return this.createReference(meta.properties[k].type, data[k], options); } + if (meta.properties[k]?.reference === ReferenceType.EMBEDDED && data[k]) { + /* istanbul ignore next */ + if (Utils.isEntity<T>(data[k])) { + return data[k]; + } + + return this.createEmbeddable(meta.properties[k].type, data[k], options); + } + if (!meta.properties[k]) { return data; } diff --git a/errors.ts b/errors.ts index f3e489e..a532b3b 100644 --- a/errors.ts +++ b/errors.ts @@ -212,6 +212,10 @@ export class MetadataError<T extends AnyEntity = AnyEntity> extends ValidationEr return new MetadataError(`Property ${className}:${name} is being overwritten by its child property ${embeddedName}:${name}. Consider using a prefix to overcome this issue.`); } + static invalidPrimaryKey(meta: EntityMetadata, prop: EntityProperty, requiredName: string) { + return this.fromMessage(meta, prop, `has wrong field name, '${requiredName}' is required in current driver`); + } + private static fromMessage(meta: EntityMetadata, prop: EntityProperty, message: string): MetadataError { return new MetadataError(`${meta.className}.${prop.name} ${message}`); } diff --git a/ObjectHydrator.ts b/ObjectHydrator.ts index f837e51..18ee40a 100644 --- a/ObjectHydrator.ts +++ b/ObjectHydrator.ts @@ -188,7 +188,14 @@ export class ObjectHydrator extends Hydrator { const convertorKey = path.filter(k => !k.match(/\\[idx_\\d+]/)).map(k => this.safeKey(k)).join('_'); const ret: string[] = []; const conds: string[] = []; - context.set(`prototype_${convertorKey}`, prop.embeddable.prototype); + + if (prop.targetMeta?.polymorphs) { + prop.targetMeta.polymorphs.forEach(meta => { + context.set(`prototype_${convertorKey}_${meta.className}`, meta.prototype); + }); + } else { + context.set(`prototype_${convertorKey}`, prop.embeddable.prototype); + } if (!this.platform.convertsJsonAutomatically() && (prop.object || prop.array)) { ret.push( @@ -207,7 +214,21 @@ export class ObjectHydrator extends Hydrator { } ret.push(` if (${conds.join(' || ')}) {`); - ret.push(` entity${entityKey} = Object.create(prototype_${convertorKey});`); + + if (prop.targetMeta?.polymorphs) { + const targetMeta = prop.targetMeta; + targetMeta.polymorphs!.forEach(meta => { + const childProp = prop.embeddedProps[targetMeta.discriminatorColumn!]; + const childDataKey = prop.object ? dataKey + this.wrap(childProp.embedded![1]) : this.wrap(childProp.name); + // weak comparison as we can have numbers that might have been converted to strings due to being object keys + ret.push(` if (data${childDataKey} == '${meta.discriminatorValue}') {`); + ret.push(` entity${entityKey} = factory.createEmbeddable('${meta.className}', data${prop.object ? dataKey : ''}, { newEntity, convertCustomTypes });`); + ret.push(` }`); + }); + } else { + ret.push(` entity${entityKey} = factory.createEmbeddable('${prop.targetMeta!.className}', data${prop.object ? dataKey : ''}, { newEntity, convertCustomTypes });`); + } + meta.props .filter(p => p.embedded?.[0] === prop.name) .forEach(childProp => { diff --git a/EntitySchema.ts b/EntitySchema.ts index 1d8fec7..a9a69e8 100644 --- a/EntitySchema.ts +++ b/EntitySchema.ts @@ -324,7 +324,8 @@ export class EntitySchema<T extends AnyEntity<T> = AnyEntity, U extends AnyEntit if (Utils.isString(options.entity)) { type = options.type = options.entity; } else if (options.entity) { - type = options.type = Utils.className(options.entity()); + const tmp = options.entity(); + type = options.type = Array.isArray(tmp) ? tmp.map(t => Utils.className(t)).sort().join(' | ') : Utils.className(tmp); } } diff --git a/MetadataDiscovery.ts b/MetadataDiscovery.ts index 859e28c..7d48542 100644 --- a/MetadataDiscovery.ts +++ b/MetadataDiscovery.ts @@ -41,7 +41,10 @@ export class MetadataDiscovery { this.discovered .filter(meta => meta.name) - .forEach(meta => discovered.set(meta.name!, meta)); + .forEach(meta => { + this.platform.validateMetadata(meta); + discovered.set(meta.name!, meta); + }); return discovered; } @@ -50,6 +53,7 @@ export class MetadataDiscovery { for (const meta of discovered) { let i = 1; Object.values(meta.properties).forEach(prop => meta.propertyOrder.set(prop.name, i++)); + Object.values(meta.properties).forEach(prop => this.initPolyEmbeddables(prop, discovered)); } // ignore base entities (not annotated with @Entity) @@ -604,6 +608,53 @@ export class MetadataDiscovery { return order; } + private initPolyEmbeddables(embeddedProp: EntityProperty, discovered: EntityMetadata[], visited = new WeakSet<EntityProperty>()): void { + if (embeddedProp.reference !== ReferenceType.EMBEDDED || visited.has(embeddedProp)) { + return; + } + + visited.add(embeddedProp); + const types = embeddedProp.type.split(/ ?\\| ?/); + let embeddable = this.discovered.find(m => m.name === embeddedProp.type); + const polymorphs = this.discovered.filter(m => types.includes(m.name!)); + + // create virtual polymorphic entity + if (!embeddable && polymorphs.length > 0) { + const properties: Dictionary<EntityProperty> = {}; + let discriminatorColumn: string | undefined; + + const processExtensions = (meta: EntityMetadata) => { + const parent = this.discovered.find(m => meta.extends === m.className); + + if (!parent) { + return; + } + + discriminatorColumn ??= parent.discriminatorColumn; + Object.values(parent.properties).forEach(prop => properties[prop.name] = prop); + processExtensions(parent); + }; + + polymorphs.forEach(meta => { + Object.values(meta.properties).forEach(prop => properties[prop.name] = prop); + processExtensions(meta); + }); + const name = polymorphs.map(t => t.className).sort().join(' | '); + embeddable = new EntityMetadata({ + name, + className: name, + embeddable: true, + abstract: true, + properties, + polymorphs, + discriminatorColumn, + }); + embeddable.sync(); + discovered.push(embeddable); + polymorphs.forEach(meta => meta.root = embeddable!); + } + } + private initEmbeddables(meta: EntityMetadata, embeddedProp: EntityProperty, visited = new WeakSet<EntityProperty>()): void { if (embeddedProp.reference !== ReferenceType.EMBEDDED || visited.has(embeddedProp)) { return; @@ -686,7 +737,7 @@ export class MetadataDiscovery { meta.root.discriminatorMap = {} as Dictionary<string>; const children = metadata.filter(m => m.root.className === meta.root.className && !m.abstract); children.forEach(m => { - const name = m.discriminatorValue || this.namingStrategy.classToTableName(m.className); + const name = m.discriminatorValue ?? this.namingStrategy.classToTableName(m.className); meta.root.discriminatorMap![name] = m.className; }); } diff --git a/MetadataProvider.ts b/MetadataProvider.ts index 69d6bf5..5412c47 100644 --- a/MetadataProvider.ts +++ b/MetadataProvider.ts @@ -26,7 +26,8 @@ export abstract class MetadataProvider { if (Utils.isString(prop.entity)) { prop.type = prop.entity; } else if (prop.entity) { - prop.type = Utils.className(prop.entity()); + const tmp = prop.entity(); + prop.type = Array.isArray(tmp) ? tmp.map(t => Utils.className(t)).sort().join(' | ') : Utils.className(tmp); } else if (!prop.type) { await fallback(prop); } diff --git a/MetadataValidator.ts b/MetadataValidator.ts index 6e5da0f..26a26af 100644 --- a/MetadataValidator.ts +++ b/MetadataValidator.ts @@ -57,7 +57,7 @@ export class MetadataValidator { // check for not discovered entities discovered.forEach(meta => Object.values(meta.properties).forEach(prop => { - if (prop.reference !== ReferenceType.SCALAR && !discovered.find(m => m.className === prop.type)) { + if (prop.reference !== ReferenceType.SCALAR && !prop.type.split(/ ?\\| ?/).every(type => discovered.find(m => m.className === type))) { throw MetadataError.fromUnknownEntity(prop.type, `${meta.className}.${prop.name}`); } })); diff --git a/Platform.ts b/Platform.ts index a62dc12..05d58b8 100644 --- a/Platform.ts +++ b/Platform.ts @@ -2,7 +2,7 @@ import clone from 'clone'; import { EntityRepository } from '../entity'; import type { NamingStrategy } from '../naming-strategy'; import { UnderscoreNamingStrategy } from '../naming-strategy'; -import type { AnyEntity, Constructor, EntityProperty, IEntityGenerator, IMigrator, IPrimaryKey, ISchemaGenerator, PopulateOptions, Primary } from '../typings'; +import type { AnyEntity, Constructor, EntityProperty, IEntityGenerator, IMigrator, IPrimaryKey, ISchemaGenerator, PopulateOptions, Primary, EntityMetadata } from '../typings'; import { ExceptionConverter } from './ExceptionConverter'; import type { EntityManager } from '../EntityManager'; import type { Configuration } from '../utils/Configuration'; @@ -377,4 +377,8 @@ export abstract class Platform { return true; } + validateMetadata(meta: EntityMetadata): void { + return; + } + } diff --git a/typings.ts b/typings.ts index b45a77b..0886ee7 100644 --- a/typings.ts +++ b/typings.ts @@ -208,6 +208,7 @@ export interface EntityProperty<T extends AnyEntity<T> = any> { embedded?: [string, string]; embeddable: Constructor<T>; embeddedProps: Dictionary<EntityProperty>; + discriminatorColumn?: string; // only for poly embeddables currently object?: boolean; index?: boolean | string; unique?: boolean | string; @@ -344,7 +345,7 @@ export interface EntityMetadata<T extends AnyEntity<T> = any> { schema?: string; pivotTable: boolean; discriminatorColumn?: string; - discriminatorValue?: string; + discriminatorValue?: number | string; discriminatorMap?: Dictionary<string>; embeddable: boolean; constructorParams: string[]; @@ -374,6 +375,7 @@ export interface EntityMetadata<T extends AnyEntity<T> = any> { comment?: string; selfReferencing?: boolean; readonly?: boolean; + polymorphs?: EntityMetadata[]; root: EntityMetadata<T>; } diff --git a/QueryBuilderHelper.ts b/QueryBuilderHelper.ts index 33fe85c..d76d4cf 100644 --- a/QueryBuilderHelper.ts +++ b/QueryBuilderHelper.ts @@ -559,12 +559,13 @@ export class QueryBuilderHelper { if (!this.isPrefixed(field)) { const alias = always ? (quote ? this.alias : this.platform.quoteIdentifier(this.alias)) + '.' : ''; - const fieldName = this.fieldName(field, this.alias); - if (fieldName.startsWith('(')) { - ret = '(' + alias + fieldName.slice(1); - } else { - ret = alias + fieldName; + const fieldName = this.fieldName(field, this.alias, always); + + if (QueryBuilderHelper.isCustomExpression(fieldName)) { + return fieldName; } + + ret = alias + fieldName; } else { const [a, f] = field.split('.'); ret = a + '.' + this.fieldName(f, a); @@ -601,7 +602,7 @@ export class QueryBuilderHelper { return !!field.match(/[\\w`"[\\]]+\\./); } - private fieldName(field: string, alias?: string): string { + private fieldName(field: string, alias?: string, always?: boolean): string { const prop = this.getProperty(field, alias); if (!prop) { @@ -609,6 +610,17 @@ export class QueryBuilderHelper { } if (prop.fieldNameRaw) { + if (!always) { + return prop.fieldNameRaw + .replace(/\\[::alias::]\\.?/g, '') + .replace(this.platform.quoteIdentifier('') + '.', ''); + } + + if (alias) { + return prop.fieldNameRaw.replace(/\\[::alias::]/g, alias); + } + + /* istanbul ignore next */ return prop.fieldNameRaw; } diff --git a/MongoPlatform.ts b/MongoPlatform.ts index 2cf2e44..e854e22 100644 --- a/MongoPlatform.ts +++ b/MongoPlatform.ts @@ -1,6 +1,6 @@ import { ObjectId } from 'mongodb'; -import type { IPrimaryKey, Primary, NamingStrategy, Constructor, EntityRepository, EntityProperty, PopulateOptions } from '@mikro-orm/core'; -import { Platform, MongoNamingStrategy, Utils, ReferenceType } from '@mikro-orm/core'; +import type { IPrimaryKey, Primary, NamingStrategy, Constructor, EntityRepository, EntityProperty, PopulateOptions, EntityMetadata } from '@mikro-orm/core'; +import { Platform, MongoNamingStrategy, Utils, ReferenceType, MetadataError } from '@mikro-orm/core'; import { MongoExceptionConverter } from './MongoExceptionConverter'; import { MongoEntityRepository } from './MongoEntityRepository'; @@ -63,4 +63,12 @@ export class MongoPlatform extends Platform { return prop.reference === ReferenceType.MANY_TO_MANY && prop.owner; } + validateMetadata(meta: EntityMetadata): void { + const pk = meta.getPrimaryProps()[0]; + + if (pk && pk.fieldNames?.[0] !== '_id') { + throw MetadataError.invalidPrimaryKey(meta, pk, '_id'); + } + } + } diff --git a/MySqlPlatform.ts b/MySqlPlatform.ts index 2e5b6c5..760a567 100644 --- a/MySqlPlatform.ts +++ b/MySqlPlatform.ts @@ -2,7 +2,7 @@ import { AbstractSqlPlatform } from '@mikro-orm/knex'; import { MySqlSchemaHelper } from './MySqlSchemaHelper'; import { MySqlExceptionConverter } from './MySqlExceptionConverter'; import type { Type } from '@mikro-orm/core'; -import { Utils } from '@mikro-orm/core'; +import { expr, Utils } from '@mikro-orm/core'; export class MySqlPlatform extends AbstractSqlPlatform { @@ -15,7 +15,7 @@ export class MySqlPlatform extends AbstractSqlPlatform { getSearchJsonPropertyKey(path: string[], type: string): string { const [a, ...b] = path; - return `${this.quoteIdentifier(a)}->'$.${b.join('.')}'`; + return expr(alias => `${this.quoteIdentifier(`${alias}.${a}`)}->'$.${b.join('.')}'`); } getBooleanTypeDeclarationSQL(): string { diff --git a/PostgreSqlPlatform.ts b/PostgreSqlPlatform.ts index 9dad34a..2405a28 100644 --- a/PostgreSqlPlatform.ts +++ b/PostgreSqlPlatform.ts @@ -1,6 +1,6 @@ import { Client } from 'pg'; import type { EntityProperty, Type } from '@mikro-orm/core'; -import { JsonProperty, Utils } from '@mikro-orm/core'; +import { expr, JsonProperty, Utils } from '@mikro-orm/core'; import { AbstractSqlPlatform } from '@mikro-orm/knex'; import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper'; import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter'; @@ -107,7 +107,7 @@ export class PostgreSqlPlatform extends AbstractSqlPlatform { getSearchJsonPropertyKey(path: string[], type: string): string { const first = path.shift(); const last = path.pop(); - const root = this.quoteIdentifier(first!); + const root = expr(alias => this.quoteIdentifier(`${alias}.${first}`)); const types = { number: 'float8', boolean: 'bool', diff --git a/SqlitePlatform.ts b/SqlitePlatform.ts index 7c66aac..cefe596 100644 --- a/SqlitePlatform.ts +++ b/SqlitePlatform.ts @@ -1,7 +1,7 @@ // @ts-ignore import { escape } from 'sqlstring-sqlite'; import type { EntityProperty } from '@mikro-orm/core'; -import { JsonProperty, Utils } from '@mikro-orm/core'; +import { expr, JsonProperty, Utils } from '@mikro-orm/core'; import { AbstractSqlPlatform } from '@mikro-orm/knex'; import { SqliteSchemaHelper } from './SqliteSchemaHelper'; import { SqliteExceptionConverter } from './SqliteExceptionConverter'; @@ -104,7 +104,7 @@ export class SqlitePlatform extends AbstractSqlPlatform { getSearchJsonPropertyKey(path: string[], type: string): string { const [a, ...b] = path; - return `json_extract(${this.quoteIdentifier(a)}, '$.${b.join('.')}')`; + return expr(alias => `json_extract(${this.quoteIdentifier(`${alias}.${a}`)}, '$.${b.join('.')}')`); } getDefaultIntegrityRule(): string { diff --git a/EntityFactory.test.ts b/EntityFactory.test.ts index eb4eb04..567397f 100644 --- a/EntityFactory.test.ts +++ b/EntityFactory.test.ts @@ -58,6 +58,28 @@ describe('EntityFactory', () => { expect(entity.books[0].title).toBe('asd'); }); + test('should return embeddable', async () => { + // we are testing this on Author entity as it does not really matter whether it is an embeddable type or not + const data = { id: '5b0d19b28b21c648c2c8a600', name: 'test', email: '[email protected]', books: { title: 'asd' } }; + const managedEntity = factory.createEmbeddable(Author, data); + expect(managedEntity).toBeInstanceOf(Author); + expect(managedEntity._id).toBeUndefined(); + expect(managedEntity.name).toBeUndefined(); + expect(managedEntity.email).toBeUndefined(); + expect(managedEntity.books).toBeUndefined(); + expect(managedEntity.books).toBeUndefined(); + + const newEntity = factory.createEmbeddable(Author, data, { newEntity: true }); + expect(newEntity).toBeInstanceOf(Author); + // not available, as we are not hydrating here, just filling constructor parameters + expect(newEntity._id).toBeUndefined(); + expect(newEntity.name).toBe('test'); + expect(newEntity.email).toBe('[email protected]'); + expect(newEntity.books.isInitialized()).toBe(true); + // books are not assigned either, again we just care about creating the instance, while filling the constructor parameters + expect(newEntity.books).toHaveLength(0); + }); + test('entity ctor can have different params than props', async () => { const entity = factory.create(Test, { name: 'test' }); expect(entity).toBeInstanceOf(Test); diff --git a/EntityManager.mongo.test.ts b/EntityManager.mongo.test.ts index 9cfeea9..09549c0 100644 --- a/EntityManager.mongo.test.ts +++ b/EntityManager.mongo.test.ts @@ -2,7 +2,7 @@ import { ObjectId } from 'mongodb'; import c from 'ansi-colors'; import chalk from 'chalk'; import type { EntityProperty } from '@mikro-orm/core'; -import { Collection, Configuration, MikroORM, QueryOrder, Reference, wrap, Logger, UniqueConstraintViolationException, IdentityMap } from '@mikro-orm/core'; +import { Collection, Configuration, MikroORM, QueryOrder, Reference, wrap, Logger, UniqueConstraintViolationException, IdentityMap, EntitySchema } from '@mikro-orm/core'; import { EntityManager, MongoConnection, MongoDriver } from '@mikro-orm/mongodb'; import { MongoHighlighter } from '@mikro-orm/mongo-highlighter'; @@ -2189,6 +2189,15 @@ describe('EntityManagerMongo', () => { })).rejects.toThrowError('Mongo driver does not support `host` options, use `clientUrl` instead!'); }); + test('validation for `_id` PK field name', async () => { + const schema = new EntitySchema({ name: 'WrongPrimaryKeyEntity', properties: { id: { type: 'number', primary: true } } }); + await expect(MikroORM.init({ + entities: [schema], + dbName: 'bar', + type: 'mongo', + })).rejects.toThrowError(`WrongPrimaryKeyEntity.id has wrong field name, '_id' is required in current driver`); + }); + test('extracting child condition when populating (GH #1891)', async () => { const author = new Author('Jon Snow', '[email protected]'); const book1 = new Book('My Life on The Wall, part 1', author); diff --git a/EntityManager.postgre.test.ts b/EntityManager.postgre.test.ts index fb67028..a052184 100644 --- a/EntityManager.postgre.test.ts +++ b/EntityManager.postgre.test.ts @@ -520,9 +520,9 @@ describe('EntityManagerPostgre', () => { expect(mock.mock.calls).toHaveLength(4); expect(mock.mock.calls[0][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "f0"."id" = 1 limit 1`); - expect(mock.mock.calls[1][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where ("object_property"->'myPropName'->>'nestedProperty')::float8 = 123 limit 1`); - expect(mock.mock.calls[2][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "object_property"->'myPropName'->>'somethingElse' is null limit 1`); - expect(mock.mock.calls[3][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where ("object_property"->'myPropName'->>'nestedProperty')::float8 = 123 and "object_property"->'myPropName'->>'somethingElse' is null limit 1`); + expect(mock.mock.calls[1][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where ("f0"."object_property"->'myPropName'->>'nestedProperty')::float8 = 123 limit 1`); + expect(mock.mock.calls[2][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where "f0"."object_property"->'myPropName'->>'somethingElse' is null limit 1`); + expect(mock.mock.calls[3][0]).toMatch(`select "f0".*, (select 123) as "random" from "foo_bar2" as "f0" where ("f0"."object_property"->'myPropName'->>'nestedProperty')::float8 = 123 and "f0"."object_property"->'myPropName'->>'somethingElse' is null limit 1`); }); test('findOne should initialize entity that is already in IM', async () => { diff --git a/QueryBuilder.test.ts b/QueryBuilder.test.ts index 1ad36f6..32aa53b 100644 --- a/QueryBuilder.test.ts +++ b/QueryBuilder.test.ts @@ -441,7 +441,7 @@ describe('QueryBuilder', () => { const filter = Object.create(null); filter.meta = { foo: 'bar' }; qb1.select('*').where(filter); - expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `meta`->\\'$.foo\\' = ?'); + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `e0`.`meta`->\\'$.foo\\' = ?'); expect(qb1.getParams()).toEqual(['bar']); }); @@ -1942,19 +1942,19 @@ describe('QueryBuilder', () => { expect(qb10.getParams()).toEqual([timestamp, '[email protected]', 'John Doe', timestamp, 'John Doe', timestamp, timestamp]); const qb11 = pg.em.createQueryBuilder(Book2).where({ meta: { foo: 123 } }); - expect(qb11.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where ("meta"->>'foo')::float8 = 123`); + expect(qb11.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where ("b0"."meta"->>'foo')::float8 = 123`); const qb12 = pg.em.createQueryBuilder(Book2).where({ meta: { foo: { $eq: 123 } } }); - expect(qb12.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where ("meta"->>'foo')::float8 = 123`); + expect(qb12.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where ("b0"."meta"->>'foo')::float8 = 123`); const qb13 = pg.em.createQueryBuilder(Book2).where({ meta: { foo: { $lte: 123 } } }); - expect(qb13.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where ("meta"->>'foo')::float8 <= 123`); + expect(qb13.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" where ("b0"."meta"->>'foo')::float8 <= 123`); // order by json property const qb14 = pg.em.createQueryBuilder(Book2).orderBy({ meta: { foo: 'asc' } }); - expect(qb14.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" order by "meta"->>'foo' asc`); + expect(qb14.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" order by "b0"."meta"->>'foo' asc`); const qb15 = pg.em.createQueryBuilder(Book2).orderBy({ meta: { bar: { str: 'asc' } } }); - expect(qb15.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" order by "meta"->'bar'->>'str' asc`); + expect(qb15.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" order by "b0"."meta"->'bar'->>'str' asc`); const qb16 = pg.em.createQueryBuilder(Book2).orderBy({ meta: { bar: { num: QueryOrder.DESC } } }); - expect(qb16.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" order by "meta"->'bar'->>'num' desc`); + expect(qb16.getFormattedQuery()).toBe(`select "b0".*, "b0".price * 1.19 as "price_taxed" from "book2" as "b0" order by "b0"."meta"->'bar'->>'num' desc`); // pessimistic locking await pg.em.transactional(async em => { diff --git a/entities-in-embeddables.mongo.test.ts.snap b/entities-in-embeddables.mongo.test.ts.snap index 39f0e09..8e22d01 100644 --- a/entities-in-embeddables.mongo.test.ts.snap +++ b/entities-in-embeddables.mongo.test.ts.snap @@ -165,13 +165,13 @@ exports[`embedded entities in mongo diffing 2`] = ` if (typeof data._id !== 'undefined') entity._id = data._id; if (typeof data.name !== 'undefined') entity.name = data.name; if (data.profile1_username != null || data.profile1_identity != null || data.profile1_source != null) { - entity.profile1 = Object.create(prototype_profile1); + entity.profile1 = factory.createEmbeddable('Profile', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_username !== 'undefined') entity.profile1.username = data.profile1_username; if (data.profile1_identity_email != null || data.profile1_identity_meta != null || data.profile1_identity_links != null || data.profile1_identity_source != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_email !== 'undefined') entity.profile1.identity.email = data.profile1_identity_email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -185,7 +185,7 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1_identity_meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_meta && typeof data.profile1_identity_meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta.foo; if (data.profile1_identity_meta && typeof data.profile1_identity_meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta.bar; if (data.profile1_identity_meta.source === null) { @@ -202,12 +202,12 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links = []; data.profile1_identity_links.forEach((_, idx_0) => { if (data.profile1_identity_links[idx_0] != null) { - entity.profile1.identity.links[idx_0] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_0] = factory.createEmbeddable('IdentityLink', data.profile1_identity_links[idx_0], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && typeof data.profile1_identity_links[idx_0].url !== 'undefined') entity.profile1.identity.links[idx_0].url = data.profile1_identity_links[idx_0].url; if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].createdAt) entity.profile1.identity.links[idx_0].createdAt = new Date(data.profile1_identity_links[idx_0].createdAt); else if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].createdAt === null) entity.profile1.identity.links[idx_0].createdAt = null; if (data.profile1_identity_links[idx_0].meta != null) { - entity.profile1.identity.links[idx_0].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_0].meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_0].meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].meta && typeof data.profile1_identity_links[idx_0].meta.foo !== 'undefined') entity.profile1.identity.links[idx_0].meta.foo = data.profile1_identity_links[idx_0].meta.foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].meta && typeof data.profile1_identity_links[idx_0].meta.bar !== 'undefined') entity.profile1.identity.links[idx_0].meta.bar = data.profile1_identity_links[idx_0].meta.bar; if (data.profile1_identity_links[idx_0].meta.source === null) { @@ -224,7 +224,7 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links[idx_0].metas = []; data.profile1_identity_links[idx_0].metas.forEach((_, idx_1) => { if (data.profile1_identity_links[idx_0].metas[idx_1] != null) { - entity.profile1.identity.links[idx_0].metas[idx_1] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_0].metas[idx_1] = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_0].metas[idx_1], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].metas && data.profile1_identity_links[idx_0].metas[idx_1] && typeof data.profile1_identity_links[idx_0].metas[idx_1].foo !== 'undefined') entity.profile1.identity.links[idx_0].metas[idx_1].foo = data.profile1_identity_links[idx_0].metas[idx_1].foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].metas && data.profile1_identity_links[idx_0].metas[idx_1] && typeof data.profile1_identity_links[idx_0].metas[idx_1].bar !== 'undefined') entity.profile1.identity.links[idx_0].metas[idx_1].bar = data.profile1_identity_links[idx_0].metas[idx_1].bar; if (data.profile1_identity_links[idx_0].metas[idx_1].source === null) { @@ -262,10 +262,10 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1_identity != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data.profile1_identity, { newEntity, convertCustomTypes }); if (data.profile1_identity && typeof data.profile1_identity.email !== 'undefined') entity.profile1.identity.email = data.profile1_identity.email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -279,7 +279,7 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1_identity.meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity.meta, { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.meta && typeof data.profile1_identity.meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity.meta.foo; if (data.profile1_identity && data.profile1_identity.meta && typeof data.profile1_identity.meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity.meta.bar; if (data.profile1_identity.meta.source === null) { @@ -296,12 +296,12 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links = []; data.profile1_identity.links.forEach((_, idx_2) => { if (data.profile1_identity.links[idx_2] != null) { - entity.profile1.identity.links[idx_2] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_2] = factory.createEmbeddable('IdentityLink', data.profile1_identity.links[idx_2], { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && typeof data.profile1_identity.links[idx_2].url !== 'undefined') entity.profile1.identity.links[idx_2].url = data.profile1_identity.links[idx_2].url; if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].createdAt) entity.profile1.identity.links[idx_2].createdAt = new Date(data.profile1_identity.links[idx_2].createdAt); else if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].createdAt === null) entity.profile1.identity.links[idx_2].createdAt = null; if (data.profile1_identity.links[idx_2].meta != null) { - entity.profile1.identity.links[idx_2].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_2].meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity.links[idx_2].meta, { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].meta && typeof data.profile1_identity.links[idx_2].meta.foo !== 'undefined') entity.profile1.identity.links[idx_2].meta.foo = data.profile1_identity.links[idx_2].meta.foo; if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].meta && typeof data.profile1_identity.links[idx_2].meta.bar !== 'undefined') entity.profile1.identity.links[idx_2].meta.bar = data.profile1_identity.links[idx_2].meta.bar; if (data.profile1_identity.links[idx_2].meta.source === null) { @@ -318,7 +318,7 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links[idx_2].metas = []; data.profile1_identity.links[idx_2].metas.forEach((_, idx_3) => { if (data.profile1_identity.links[idx_2].metas[idx_3] != null) { - entity.profile1.identity.links[idx_2].metas[idx_3] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_2].metas[idx_3] = factory.createEmbeddable('IdentityMeta', data.profile1_identity.links[idx_2].metas[idx_3], { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].metas && data.profile1_identity.links[idx_2].metas[idx_3] && typeof data.profile1_identity.links[idx_2].metas[idx_3].foo !== 'undefined') entity.profile1.identity.links[idx_2].metas[idx_3].foo = data.profile1_identity.links[idx_2].metas[idx_3].foo; if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].metas && data.profile1_identity.links[idx_2].metas[idx_3] && typeof data.profile1_identity.links[idx_2].metas[idx_3].bar !== 'undefined') entity.profile1.identity.links[idx_2].metas[idx_3].bar = data.profile1_identity.links[idx_2].metas[idx_3].bar; if (data.profile1_identity.links[idx_2].metas[idx_3].source === null) { @@ -366,13 +366,13 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1 != null) { - entity.profile1 = Object.create(prototype_profile1); + entity.profile1 = factory.createEmbeddable('Profile', data.profile1, { newEntity, convertCustomTypes }); if (data.profile1 && typeof data.profile1.username !== 'undefined') entity.profile1.username = data.profile1.username; if (data.profile1_identity_email != null || data.profile1_identity_meta != null || data.profile1_identity_links != null || data.profile1_identity_source != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_email !== 'undefined') entity.profile1.identity.email = data.profile1_identity_email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -386,7 +386,7 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1_identity_meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_meta && typeof data.profile1_identity_meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta.foo; if (data.profile1_identity_meta && typeof data.profile1_identity_meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta.bar; if (data.profile1_identity_meta.source === null) { @@ -403,12 +403,12 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links = []; data.profile1_identity_links.forEach((_, idx_4) => { if (data.profile1_identity_links[idx_4] != null) { - entity.profile1.identity.links[idx_4] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_4] = factory.createEmbeddable('IdentityLink', data.profile1_identity_links[idx_4], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && typeof data.profile1_identity_links[idx_4].url !== 'undefined') entity.profile1.identity.links[idx_4].url = data.profile1_identity_links[idx_4].url; if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].createdAt) entity.profile1.identity.links[idx_4].createdAt = new Date(data.profile1_identity_links[idx_4].createdAt); else if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].createdAt === null) entity.profile1.identity.links[idx_4].createdAt = null; if (data.profile1_identity_links[idx_4].meta != null) { - entity.profile1.identity.links[idx_4].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_4].meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_4].meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].meta && typeof data.profile1_identity_links[idx_4].meta.foo !== 'undefined') entity.profile1.identity.links[idx_4].meta.foo = data.profile1_identity_links[idx_4].meta.foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].meta && typeof data.profile1_identity_links[idx_4].meta.bar !== 'undefined') entity.profile1.identity.links[idx_4].meta.bar = data.profile1_identity_links[idx_4].meta.bar; if (data.profile1_identity_links[idx_4].meta.source === null) { @@ -425,7 +425,7 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links[idx_4].metas = []; data.profile1_identity_links[idx_4].metas.forEach((_, idx_5) => { if (data.profile1_identity_links[idx_4].metas[idx_5] != null) { - entity.profile1.identity.links[idx_4].metas[idx_5] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_4].metas[idx_5] = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_4].metas[idx_5], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].metas && data.profile1_identity_links[idx_4].metas[idx_5] && typeof data.profile1_identity_links[idx_4].metas[idx_5].foo !== 'undefined') entity.profile1.identity.links[idx_4].metas[idx_5].foo = data.profile1_identity_links[idx_4].metas[idx_5].foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].metas && data.profile1_identity_links[idx_4].metas[idx_5] && typeof data.profile1_identity_links[idx_4].metas[idx_5].bar !== 'undefined') entity.profile1.identity.links[idx_4].metas[idx_5].bar = data.profile1_identity_links[idx_4].metas[idx_5].bar; if (data.profile1_identity_links[idx_4].metas[idx_5].source === null) { @@ -463,10 +463,10 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1.identity != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data.profile1.identity, { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && typeof data.profile1.identity.email !== 'undefined') entity.profile1.identity.email = data.profile1.identity.email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -480,7 +480,7 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile1.identity.meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1.identity.meta, { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.meta && typeof data.profile1.identity.meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1.identity.meta.foo; if (data.profile1 && data.profile1.identity && data.profile1.identity.meta && typeof data.profile1.identity.meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1.identity.meta.bar; if (data.profile1.identity.meta.source === null) { @@ -497,12 +497,12 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links = []; data.profile1.identity.links.forEach((_, idx_6) => { if (data.profile1.identity.links[idx_6] != null) { - entity.profile1.identity.links[idx_6] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_6] = factory.createEmbeddable('IdentityLink', data.profile1.identity.links[idx_6], { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && typeof data.profile1.identity.links[idx_6].url !== 'undefined') entity.profile1.identity.links[idx_6].url = data.profile1.identity.links[idx_6].url; if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].createdAt) entity.profile1.identity.links[idx_6].createdAt = new Date(data.profile1.identity.links[idx_6].createdAt); else if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].createdAt === null) entity.profile1.identity.links[idx_6].createdAt = null; if (data.profile1.identity.links[idx_6].meta != null) { - entity.profile1.identity.links[idx_6].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_6].meta = factory.createEmbeddable('IdentityMeta', data.profile1.identity.links[idx_6].meta, { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].meta && typeof data.profile1.identity.links[idx_6].meta.foo !== 'undefined') entity.profile1.identity.links[idx_6].meta.foo = data.profile1.identity.links[idx_6].meta.foo; if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].meta && typeof data.profile1.identity.links[idx_6].meta.bar !== 'undefined') entity.profile1.identity.links[idx_6].meta.bar = data.profile1.identity.links[idx_6].meta.bar; if (data.profile1.identity.links[idx_6].meta.source === null) { @@ -519,7 +519,7 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile1.identity.links[idx_6].metas = []; data.profile1.identity.links[idx_6].metas.forEach((_, idx_7) => { if (data.profile1.identity.links[idx_6].metas[idx_7] != null) { - entity.profile1.identity.links[idx_6].metas[idx_7] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_6].metas[idx_7] = factory.createEmbeddable('IdentityMeta', data.profile1.identity.links[idx_6].metas[idx_7], { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].metas && data.profile1.identity.links[idx_6].metas[idx_7] && typeof data.profile1.identity.links[idx_6].metas[idx_7].foo !== 'undefined') entity.profile1.identity.links[idx_6].metas[idx_7].foo = data.profile1.identity.links[idx_6].metas[idx_7].foo; if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].metas && data.profile1.identity.links[idx_6].metas[idx_7] && typeof data.profile1.identity.links[idx_6].metas[idx_7].bar !== 'undefined') entity.profile1.identity.links[idx_6].metas[idx_7].bar = data.profile1.identity.links[idx_6].metas[idx_7].bar; if (data.profile1.identity.links[idx_6].metas[idx_7].source === null) { @@ -567,13 +567,13 @@ exports[`embedded entities in mongo diffing 2`] = ` } } if (data.profile2 != null) { - entity.profile2 = Object.create(prototype_profile2); + entity.profile2 = factory.createEmbeddable('Profile', data.profile2, { newEntity, convertCustomTypes }); if (data.profile2 && typeof data.profile2.username !== 'undefined') entity.profile2.username = data.profile2.username; if (data.profile2.identity != null) { - entity.profile2.identity = Object.create(prototype_profile2_identity); + entity.profile2.identity = factory.createEmbeddable('Identity', data.profile2.identity, { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && typeof data.profile2.identity.email !== 'undefined') entity.profile2.identity.email = data.profile2.identity.email; if (data.profile2.identity.meta != null) { - entity.profile2.identity.meta = Object.create(prototype_profile2_identity_meta); + entity.profile2.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile2.identity.meta, { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.meta && typeof data.profile2.identity.meta.foo !== 'undefined') entity.profile2.identity.meta.foo = data.profile2.identity.meta.foo; if (data.profile2 && data.profile2.identity && data.profile2.identity.meta && typeof data.profile2.identity.meta.bar !== 'undefined') entity.profile2.identity.meta.bar = data.profile2.identity.meta.bar; if (data.profile2.identity.meta.source === null) { @@ -590,12 +590,12 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile2.identity.links = []; data.profile2.identity.links.forEach((_, idx_8) => { if (data.profile2.identity.links[idx_8] != null) { - entity.profile2.identity.links[idx_8] = Object.create(prototype_profile2_identity_links); + entity.profile2.identity.links[idx_8] = factory.createEmbeddable('IdentityLink', data.profile2.identity.links[idx_8], { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && typeof data.profile2.identity.links[idx_8].url !== 'undefined') entity.profile2.identity.links[idx_8].url = data.profile2.identity.links[idx_8].url; if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].createdAt) entity.profile2.identity.links[idx_8].createdAt = new Date(data.profile2.identity.links[idx_8].createdAt); else if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].createdAt === null) entity.profile2.identity.links[idx_8].createdAt = null; if (data.profile2.identity.links[idx_8].meta != null) { - entity.profile2.identity.links[idx_8].meta = Object.create(prototype_profile2_identity_links_meta); + entity.profile2.identity.links[idx_8].meta = factory.createEmbeddable('IdentityMeta', data.profile2.identity.links[idx_8].meta, { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].meta && typeof data.profile2.identity.links[idx_8].meta.foo !== 'undefined') entity.profile2.identity.links[idx_8].meta.foo = data.profile2.identity.links[idx_8].meta.foo; if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].meta && typeof data.profile2.identity.links[idx_8].meta.bar !== 'undefined') entity.profile2.identity.links[idx_8].meta.bar = data.profile2.identity.links[idx_8].meta.bar; if (data.profile2.identity.links[idx_8].meta.source === null) { @@ -612,7 +612,7 @@ exports[`embedded entities in mongo diffing 2`] = ` entity.profile2.identity.links[idx_8].metas = []; data.profile2.identity.links[idx_8].metas.forEach((_, idx_9) => { if (data.profile2.identity.links[idx_8].metas[idx_9] != null) { - entity.profile2.identity.links[idx_8].metas[idx_9] = Object.create(prototype_profile2_identity_links_metas); + entity.profile2.identity.links[idx_8].metas[idx_9] = factory.createEmbeddable('IdentityMeta', data.profile2.identity.links[idx_8].metas[idx_9], { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].metas && data.profile2.identity.links[idx_8].metas[idx_9] && typeof data.profile2.identity.links[idx_8].metas[idx_9].foo !== 'undefined') entity.profile2.identity.links[idx_8].metas[idx_9].foo = data.profile2.identity.links[idx_8].metas[idx_9].foo; if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].metas && data.profile2.identity.links[idx_8].metas[idx_9] && typeof data.profile2.identity.links[idx_8].metas[idx_9].bar !== 'undefined') entity.profile2.identity.links[idx_8].metas[idx_9].bar = data.profile2.identity.links[idx_8].metas[idx_9].bar; if (data.profile2.identity.links[idx_8].metas[idx_9].source === null) { diff --git a/entities-in-embeddables.postgres.test.ts.snap b/entities-in-embeddables.postgres.test.ts.snap index b499742..b7d476b 100644 --- a/entities-in-embeddables.postgres.test.ts.snap +++ b/entities-in-embeddables.postgres.test.ts.snap @@ -165,13 +165,13 @@ exports[`embedded entities in postgres diffing 2`] = ` if (typeof data.id !== 'undefined') entity.id = data.id; if (typeof data.name !== 'undefined') entity.name = data.name; if (data.profile1_username != null || data.profile1_identity != null || data.profile1_source != null) { - entity.profile1 = Object.create(prototype_profile1); + entity.profile1 = factory.createEmbeddable('Profile', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_username !== 'undefined') entity.profile1.username = data.profile1_username; if (data.profile1_identity_email != null || data.profile1_identity_meta != null || data.profile1_identity_links != null || data.profile1_identity_source != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_email !== 'undefined') entity.profile1.identity.email = data.profile1_identity_email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -185,7 +185,7 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1_identity_meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_meta && typeof data.profile1_identity_meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta.foo; if (data.profile1_identity_meta && typeof data.profile1_identity_meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta.bar; if (data.profile1_identity_meta.source === null) { @@ -202,12 +202,12 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links = []; data.profile1_identity_links.forEach((_, idx_0) => { if (data.profile1_identity_links[idx_0] != null) { - entity.profile1.identity.links[idx_0] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_0] = factory.createEmbeddable('IdentityLink', data.profile1_identity_links[idx_0], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && typeof data.profile1_identity_links[idx_0].url !== 'undefined') entity.profile1.identity.links[idx_0].url = data.profile1_identity_links[idx_0].url; if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].createdAt) entity.profile1.identity.links[idx_0].createdAt = new Date(data.profile1_identity_links[idx_0].createdAt); else if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].createdAt === null) entity.profile1.identity.links[idx_0].createdAt = null; if (data.profile1_identity_links[idx_0].meta != null) { - entity.profile1.identity.links[idx_0].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_0].meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_0].meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].meta && typeof data.profile1_identity_links[idx_0].meta.foo !== 'undefined') entity.profile1.identity.links[idx_0].meta.foo = data.profile1_identity_links[idx_0].meta.foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].meta && typeof data.profile1_identity_links[idx_0].meta.bar !== 'undefined') entity.profile1.identity.links[idx_0].meta.bar = data.profile1_identity_links[idx_0].meta.bar; if (data.profile1_identity_links[idx_0].meta.source === null) { @@ -224,7 +224,7 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links[idx_0].metas = []; data.profile1_identity_links[idx_0].metas.forEach((_, idx_1) => { if (data.profile1_identity_links[idx_0].metas[idx_1] != null) { - entity.profile1.identity.links[idx_0].metas[idx_1] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_0].metas[idx_1] = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_0].metas[idx_1], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].metas && data.profile1_identity_links[idx_0].metas[idx_1] && typeof data.profile1_identity_links[idx_0].metas[idx_1].foo !== 'undefined') entity.profile1.identity.links[idx_0].metas[idx_1].foo = data.profile1_identity_links[idx_0].metas[idx_1].foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_0] && data.profile1_identity_links[idx_0].metas && data.profile1_identity_links[idx_0].metas[idx_1] && typeof data.profile1_identity_links[idx_0].metas[idx_1].bar !== 'undefined') entity.profile1.identity.links[idx_0].metas[idx_1].bar = data.profile1_identity_links[idx_0].metas[idx_1].bar; if (data.profile1_identity_links[idx_0].metas[idx_1].source === null) { @@ -262,10 +262,10 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1_identity != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data.profile1_identity, { newEntity, convertCustomTypes }); if (data.profile1_identity && typeof data.profile1_identity.email !== 'undefined') entity.profile1.identity.email = data.profile1_identity.email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -279,7 +279,7 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1_identity.meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity.meta, { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.meta && typeof data.profile1_identity.meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity.meta.foo; if (data.profile1_identity && data.profile1_identity.meta && typeof data.profile1_identity.meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity.meta.bar; if (data.profile1_identity.meta.source === null) { @@ -296,12 +296,12 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links = []; data.profile1_identity.links.forEach((_, idx_2) => { if (data.profile1_identity.links[idx_2] != null) { - entity.profile1.identity.links[idx_2] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_2] = factory.createEmbeddable('IdentityLink', data.profile1_identity.links[idx_2], { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && typeof data.profile1_identity.links[idx_2].url !== 'undefined') entity.profile1.identity.links[idx_2].url = data.profile1_identity.links[idx_2].url; if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].createdAt) entity.profile1.identity.links[idx_2].createdAt = new Date(data.profile1_identity.links[idx_2].createdAt); else if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].createdAt === null) entity.profile1.identity.links[idx_2].createdAt = null; if (data.profile1_identity.links[idx_2].meta != null) { - entity.profile1.identity.links[idx_2].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_2].meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity.links[idx_2].meta, { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].meta && typeof data.profile1_identity.links[idx_2].meta.foo !== 'undefined') entity.profile1.identity.links[idx_2].meta.foo = data.profile1_identity.links[idx_2].meta.foo; if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].meta && typeof data.profile1_identity.links[idx_2].meta.bar !== 'undefined') entity.profile1.identity.links[idx_2].meta.bar = data.profile1_identity.links[idx_2].meta.bar; if (data.profile1_identity.links[idx_2].meta.source === null) { @@ -318,7 +318,7 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links[idx_2].metas = []; data.profile1_identity.links[idx_2].metas.forEach((_, idx_3) => { if (data.profile1_identity.links[idx_2].metas[idx_3] != null) { - entity.profile1.identity.links[idx_2].metas[idx_3] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_2].metas[idx_3] = factory.createEmbeddable('IdentityMeta', data.profile1_identity.links[idx_2].metas[idx_3], { newEntity, convertCustomTypes }); if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].metas && data.profile1_identity.links[idx_2].metas[idx_3] && typeof data.profile1_identity.links[idx_2].metas[idx_3].foo !== 'undefined') entity.profile1.identity.links[idx_2].metas[idx_3].foo = data.profile1_identity.links[idx_2].metas[idx_3].foo; if (data.profile1_identity && data.profile1_identity.links && data.profile1_identity.links[idx_2] && data.profile1_identity.links[idx_2].metas && data.profile1_identity.links[idx_2].metas[idx_3] && typeof data.profile1_identity.links[idx_2].metas[idx_3].bar !== 'undefined') entity.profile1.identity.links[idx_2].metas[idx_3].bar = data.profile1_identity.links[idx_2].metas[idx_3].bar; if (data.profile1_identity.links[idx_2].metas[idx_3].source === null) { @@ -366,13 +366,13 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1 != null) { - entity.profile1 = Object.create(prototype_profile1); + entity.profile1 = factory.createEmbeddable('Profile', data.profile1, { newEntity, convertCustomTypes }); if (data.profile1 && typeof data.profile1.username !== 'undefined') entity.profile1.username = data.profile1.username; if (data.profile1_identity_email != null || data.profile1_identity_meta != null || data.profile1_identity_links != null || data.profile1_identity_source != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_email !== 'undefined') entity.profile1.identity.email = data.profile1_identity_email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -386,7 +386,7 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1_identity_meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_meta && typeof data.profile1_identity_meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta.foo; if (data.profile1_identity_meta && typeof data.profile1_identity_meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta.bar; if (data.profile1_identity_meta.source === null) { @@ -403,12 +403,12 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links = []; data.profile1_identity_links.forEach((_, idx_4) => { if (data.profile1_identity_links[idx_4] != null) { - entity.profile1.identity.links[idx_4] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_4] = factory.createEmbeddable('IdentityLink', data.profile1_identity_links[idx_4], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && typeof data.profile1_identity_links[idx_4].url !== 'undefined') entity.profile1.identity.links[idx_4].url = data.profile1_identity_links[idx_4].url; if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].createdAt) entity.profile1.identity.links[idx_4].createdAt = new Date(data.profile1_identity_links[idx_4].createdAt); else if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].createdAt === null) entity.profile1.identity.links[idx_4].createdAt = null; if (data.profile1_identity_links[idx_4].meta != null) { - entity.profile1.identity.links[idx_4].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_4].meta = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_4].meta, { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].meta && typeof data.profile1_identity_links[idx_4].meta.foo !== 'undefined') entity.profile1.identity.links[idx_4].meta.foo = data.profile1_identity_links[idx_4].meta.foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].meta && typeof data.profile1_identity_links[idx_4].meta.bar !== 'undefined') entity.profile1.identity.links[idx_4].meta.bar = data.profile1_identity_links[idx_4].meta.bar; if (data.profile1_identity_links[idx_4].meta.source === null) { @@ -425,7 +425,7 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links[idx_4].metas = []; data.profile1_identity_links[idx_4].metas.forEach((_, idx_5) => { if (data.profile1_identity_links[idx_4].metas[idx_5] != null) { - entity.profile1.identity.links[idx_4].metas[idx_5] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_4].metas[idx_5] = factory.createEmbeddable('IdentityMeta', data.profile1_identity_links[idx_4].metas[idx_5], { newEntity, convertCustomTypes }); if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].metas && data.profile1_identity_links[idx_4].metas[idx_5] && typeof data.profile1_identity_links[idx_4].metas[idx_5].foo !== 'undefined') entity.profile1.identity.links[idx_4].metas[idx_5].foo = data.profile1_identity_links[idx_4].metas[idx_5].foo; if (data.profile1_identity_links && data.profile1_identity_links[idx_4] && data.profile1_identity_links[idx_4].metas && data.profile1_identity_links[idx_4].metas[idx_5] && typeof data.profile1_identity_links[idx_4].metas[idx_5].bar !== 'undefined') entity.profile1.identity.links[idx_4].metas[idx_5].bar = data.profile1_identity_links[idx_4].metas[idx_5].bar; if (data.profile1_identity_links[idx_4].metas[idx_5].source === null) { @@ -463,10 +463,10 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1.identity != null) { - entity.profile1.identity = Object.create(prototype_profile1_identity); + entity.profile1.identity = factory.createEmbeddable('Identity', data.profile1.identity, { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && typeof data.profile1.identity.email !== 'undefined') entity.profile1.identity.email = data.profile1.identity.email; if (data.profile1_identity_meta_foo != null || data.profile1_identity_meta_bar != null || data.profile1_identity_meta_source != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data, { newEntity, convertCustomTypes }); if (typeof data.profile1_identity_meta_foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1_identity_meta_foo; if (typeof data.profile1_identity_meta_bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1_identity_meta_bar; if (data.profile1_identity_meta_source === null) { @@ -480,7 +480,7 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile1.identity.meta != null) { - entity.profile1.identity.meta = Object.create(prototype_profile1_identity_meta); + entity.profile1.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile1.identity.meta, { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.meta && typeof data.profile1.identity.meta.foo !== 'undefined') entity.profile1.identity.meta.foo = data.profile1.identity.meta.foo; if (data.profile1 && data.profile1.identity && data.profile1.identity.meta && typeof data.profile1.identity.meta.bar !== 'undefined') entity.profile1.identity.meta.bar = data.profile1.identity.meta.bar; if (data.profile1.identity.meta.source === null) { @@ -497,12 +497,12 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links = []; data.profile1.identity.links.forEach((_, idx_6) => { if (data.profile1.identity.links[idx_6] != null) { - entity.profile1.identity.links[idx_6] = Object.create(prototype_profile1_identity_links); + entity.profile1.identity.links[idx_6] = factory.createEmbeddable('IdentityLink', data.profile1.identity.links[idx_6], { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && typeof data.profile1.identity.links[idx_6].url !== 'undefined') entity.profile1.identity.links[idx_6].url = data.profile1.identity.links[idx_6].url; if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].createdAt) entity.profile1.identity.links[idx_6].createdAt = new Date(data.profile1.identity.links[idx_6].createdAt); else if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].createdAt === null) entity.profile1.identity.links[idx_6].createdAt = null; if (data.profile1.identity.links[idx_6].meta != null) { - entity.profile1.identity.links[idx_6].meta = Object.create(prototype_profile1_identity_links_meta); + entity.profile1.identity.links[idx_6].meta = factory.createEmbeddable('IdentityMeta', data.profile1.identity.links[idx_6].meta, { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].meta && typeof data.profile1.identity.links[idx_6].meta.foo !== 'undefined') entity.profile1.identity.links[idx_6].meta.foo = data.profile1.identity.links[idx_6].meta.foo; if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].meta && typeof data.profile1.identity.links[idx_6].meta.bar !== 'undefined') entity.profile1.identity.links[idx_6].meta.bar = data.profile1.identity.links[idx_6].meta.bar; if (data.profile1.identity.links[idx_6].meta.source === null) { @@ -519,7 +519,7 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile1.identity.links[idx_6].metas = []; data.profile1.identity.links[idx_6].metas.forEach((_, idx_7) => { if (data.profile1.identity.links[idx_6].metas[idx_7] != null) { - entity.profile1.identity.links[idx_6].metas[idx_7] = Object.create(prototype_profile1_identity_links_metas); + entity.profile1.identity.links[idx_6].metas[idx_7] = factory.createEmbeddable('IdentityMeta', data.profile1.identity.links[idx_6].metas[idx_7], { newEntity, convertCustomTypes }); if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].metas && data.profile1.identity.links[idx_6].metas[idx_7] && typeof data.profile1.identity.links[idx_6].metas[idx_7].foo !== 'undefined') entity.profile1.identity.links[idx_6].metas[idx_7].foo = data.profile1.identity.links[idx_6].metas[idx_7].foo; if (data.profile1 && data.profile1.identity && data.profile1.identity.links && data.profile1.identity.links[idx_6] && data.profile1.identity.links[idx_6].metas && data.profile1.identity.links[idx_6].metas[idx_7] && typeof data.profile1.identity.links[idx_6].metas[idx_7].bar !== 'undefined') entity.profile1.identity.links[idx_6].metas[idx_7].bar = data.profile1.identity.links[idx_6].metas[idx_7].bar; if (data.profile1.identity.links[idx_6].metas[idx_7].source === null) { @@ -567,13 +567,13 @@ exports[`embedded entities in postgres diffing 2`] = ` } } if (data.profile2 != null) { - entity.profile2 = Object.create(prototype_profile2); + entity.profile2 = factory.createEmbeddable('Profile', data.profile2, { newEntity, convertCustomTypes }); if (data.profile2 && typeof data.profile2.username !== 'undefined') entity.profile2.username = data.profile2.username; if (data.profile2.identity != null) { - entity.profile2.identity = Object.create(prototype_profile2_identity); + entity.profile2.identity = factory.createEmbeddable('Identity', data.profile2.identity, { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && typeof data.profile2.identity.email !== 'undefined') entity.profile2.identity.email = data.profile2.identity.email; if (data.profile2.identity.meta != null) { - entity.profile2.identity.meta = Object.create(prototype_profile2_identity_meta); + entity.profile2.identity.meta = factory.createEmbeddable('IdentityMeta', data.profile2.identity.meta, { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.meta && typeof data.profile2.identity.meta.foo !== 'undefined') entity.profile2.identity.meta.foo = data.profile2.identity.meta.foo; if (data.profile2 && data.profile2.identity && data.profile2.identity.meta && typeof data.profile2.identity.meta.bar !== 'undefined') entity.profile2.identity.meta.bar = data.profile2.identity.meta.bar; if (data.profile2.identity.meta.source === null) { @@ -590,12 +590,12 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile2.identity.links = []; data.profile2.identity.links.forEach((_, idx_8) => { if (data.profile2.identity.links[idx_8] != null) { - entity.profile2.identity.links[idx_8] = Object.create(prototype_profile2_identity_links); + entity.profile2.identity.links[idx_8] = factory.createEmbeddable('IdentityLink', data.profile2.identity.links[idx_8], { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && typeof data.profile2.identity.links[idx_8].url !== 'undefined') entity.profile2.identity.links[idx_8].url = data.profile2.identity.links[idx_8].url; if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].createdAt) entity.profile2.identity.links[idx_8].createdAt = new Date(data.profile2.identity.links[idx_8].createdAt); else if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].createdAt === null) entity.profile2.identity.links[idx_8].createdAt = null; if (data.profile2.identity.links[idx_8].meta != null) { - entity.profile2.identity.links[idx_8].meta = Object.create(prototype_profile2_identity_links_meta); + entity.profile2.identity.links[idx_8].meta = factory.createEmbeddable('IdentityMeta', data.profile2.identity.links[idx_8].meta, { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].meta && typeof data.profile2.identity.links[idx_8].meta.foo !== 'undefined') entity.profile2.identity.links[idx_8].meta.foo = data.profile2.identity.links[idx_8].meta.foo; if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].meta && typeof data.profile2.identity.links[idx_8].meta.bar !== 'undefined') entity.profile2.identity.links[idx_8].meta.bar = data.profile2.identity.links[idx_8].meta.bar; if (data.profile2.identity.links[idx_8].meta.source === null) { @@ -612,7 +612,7 @@ exports[`embedded entities in postgres diffing 2`] = ` entity.profile2.identity.links[idx_8].metas = []; data.profile2.identity.links[idx_8].metas.forEach((_, idx_9) => { if (data.profile2.identity.links[idx_8].metas[idx_9] != null) { - entity.profile2.identity.links[idx_8].metas[idx_9] = Object.create(prototype_profile2_identity_links_metas); + entity.profile2.identity.links[idx_8].metas[idx_9] = factory.createEmbeddable('IdentityMeta', data.profile2.identity.links[idx_8].metas[idx_9], { newEntity, convertCustomTypes }); if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].metas && data.profile2.identity.links[idx_8].metas[idx_9] && typeof data.profile2.identity.links[idx_8].metas[idx_9].foo !== 'undefined') entity.profile2.identity.links[idx_8].metas[idx_9].foo = data.profile2.identity.links[idx_8].metas[idx_9].foo; if (data.profile2 && data.profile2.identity && data.profile2.identity.links && data.profile2.identity.links[idx_8] && data.profile2.identity.links[idx_8].metas && data.profile2.identity.links[idx_8].metas[idx_9] && typeof data.profile2.identity.links[idx_8].metas[idx_9].bar !== 'undefined') entity.profile2.identity.links[idx_8].metas[idx_9].bar = data.profile2.identity.links[idx_8].metas[idx_9].bar; if (data.profile2.identity.links[idx_8].metas[idx_9].source === null) { diff --git a/polymorphic-embedded-entities.sqlite.test.ts.snap b/polymorphic-embedded-entities.sqlite.test.ts.snap index 1011941..788944e 100644 --- a/polymorphic-embedded-entities.sqlite.test.ts.snap +++ b/polymorphic-embedded-entities.sqlite.test.ts.snap @@ -0,0 +1,96 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`polymorphic embeddables in sqlite diffing 1`] = ` +"function(entity, data, factory, newEntity, convertCustomTypes) { + if (typeof data.id !== 'undefined') entity.id = data.id; + if (typeof data.name !== 'undefined') entity.name = data.name; + if (data.pet_canBark != null || data.pet_type != null || data.pet_name != null || data.pet_canMeow != null) { + if (data.pet_type == '1') { + entity.pet = factory.createEmbeddable('Dog', data, { newEntity, convertCustomTypes }); + } + if (data.pet_type == '0') { + entity.pet = factory.createEmbeddable('Cat', data, { newEntity, convertCustomTypes }); + } + if (typeof data.pet_canBark !== 'undefined') entity.pet.canBark = data.pet_canBark === null ? null : !!data.pet_canBark; + if (typeof data.pet_type !== 'undefined') entity.pet.type = data.pet_type; + if (typeof data.pet_name !== 'undefined') entity.pet.name = data.pet_name; + if (typeof data.pet_canMeow !== 'undefined') entity.pet.canMeow = data.pet_canMeow === null ? null : !!data.pet_canMeow; + } + if (typeof data.pet === 'string') { + data.pet = JSON.parse(data.pet); + } + if (data.pet != null) { + if (data.pet.type == '1') { + entity.pet = factory.createEmbeddable('Dog', data.pet, { newEntity, convertCustomTypes }); + } + if (data.pet.type == '0') { + entity.pet = factory.createEmbeddable('Cat', data.pet, { newEntity, convertCustomTypes }); + } + if (data.pet && typeof data.pet.canBark !== 'undefined') entity.pet.canBark = data.pet.canBark === null ? null : !!data.pet.canBark; + if (data.pet && typeof data.pet.type !== 'undefined') entity.pet.type = data.pet.type; + if (data.pet && typeof data.pet.name !== 'undefined') entity.pet.name = data.pet.name; + if (data.pet && typeof data.pet.canMeow !== 'undefined') entity.pet.canMeow = data.pet.canMeow === null ? null : !!data.pet.canMeow; + } + if (typeof data.pet2 === 'string') { + data.pet2 = JSON.parse(data.pet2); + } + if (data.pet2 != null) { + if (data.pet2.type == '1') { + entity.pet2 = factory.createEmbeddable('Dog', data.pet2, { newEntity, convertCustomTypes }); + } + if (data.pet2.type == '0') { + entity.pet2 = factory.createEmbeddable('Cat', data.pet2, { newEntity, convertCustomTypes }); + } + if (data.pet2 && typeof data.pet2.canBark !== 'undefined') entity.pet2.canBark = data.pet2.canBark === null ? null : !!data.pet2.canBark; + if (data.pet2 && typeof data.pet2.type !== 'undefined') entity.pet2.type = data.pet2.type; + if (data.pet2 && typeof data.pet2.name !== 'undefined') entity.pet2.name = data.pet2.name; + if (data.pet2 && typeof data.pet2.canMeow !== 'undefined') entity.pet2.canMeow = data.pet2.canMeow === null ? null : !!data.pet2.canMeow; + } +}" +`; + +exports[`polymorphic embeddables in sqlite diffing 2`] = ` +"function(entity) { + const ret = {}; + if ('id' in entity && entity.id != null) { + ret.id = entity.id; + } + + if ('name' in entity) { + ret.name = entity.name; + } + + if (entity.pet != null) { + ret.pet_canBark = clone(entity.pet.canBark); + ret.pet_type = clone(entity.pet.type); + ret.pet_name = clone(entity.pet.name); + ret.pet_canMeow = clone(entity.pet.canMeow); + } + + if (entity.pet2 != null) { + ret.pet2 = {}; + ret.pet2.canBark = clone(entity.pet2.canBark); + ret.pet2.type = clone(entity.pet2.type); + ret.pet2.name = clone(entity.pet2.name); + ret.pet2.canMeow = clone(entity.pet2.canMeow); + ret.pet2 = cloneEmbeddable(ret.pet2); + } + + return ret; +}" +`; + +exports[`polymorphic embeddables in sqlite schema 1`] = ` +"create table \\`owner\\` (\\`id\\` integer not null primary key autoincrement, \\`name\\` text not null, \\`pet_can_bark\\` integer null, \\`pet_type\\` integer not null, \\`pet_name\\` text not null, \\`pet_can_meow\\` integer null, \\`pet2\\` json not null); +create index \\`owner_pet_type_index\\` on \\`owner\\` (\\`pet_type\\`); + +" +`; + +exports[`polymorphic embeddables in sqlite schema 2`] = `""`; + +exports[`polymorphic embeddables in sqlite schema 3`] = ` +"drop table if exists \\`owner\\`; + +" +`; diff --git a/embedded-entities.mongo.test.ts b/embedded-entities.mongo.test.ts index e14bb61..0ba5323 100644 --- a/embedded-entities.mongo.test.ts +++ b/embedded-entities.mongo.test.ts @@ -1,5 +1,5 @@ import type { Platform } from '@mikro-orm/core'; -import { assign, Embeddable, Embedded, Entity, EntitySchema, Logger, MikroORM, PrimaryKey, Property, ReferenceType, SerializedPrimaryKey, Type } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, EntitySchema, Logger, MikroORM, PrimaryKey, Property, ReferenceType, SerializedPrimaryKey, Type } from '@mikro-orm/core'; import type { MongoDriver } from '@mikro-orm/mongodb'; import { ObjectId, MongoConnection, MongoPlatform } from '@mikro-orm/mongodb'; @@ -324,9 +324,9 @@ describe('embedded entities in mongo', () => { test('#assign() works with embeddables', async () => { const jon = new User(); - assign(jon, { address1: { city: '1', country: '2', postalCode: '3', street: '4' } }); + orm.em.assign(jon, { address1: { city: '1', country: '2', postalCode: '3', street: '4' } }); expect(jon.address1).toMatchObject({ city: '1', country: '2', postalCode: '3', street: '4' }); - assign(jon, { address4: { city: '41', country: '42', postalCode: '43', street: '44' } }); + orm.em.assign(jon, { address4: { city: '41', country: '42', postalCode: '43', street: '44' } }); expect(jon.address4).toMatchObject({ city: '41', country: '42', postalCode: '43', street: '44' }); }); diff --git a/embedded-entities.mysql.test.ts b/embedded-entities.mysql.test.ts index dba4dc9..e47f558 100644 --- a/embedded-entities.mysql.test.ts +++ b/embedded-entities.mysql.test.ts @@ -1,4 +1,4 @@ -import { assign, Embeddable, Embedded, Entity, Logger, MikroORM, PrimaryKey, Property, ReferenceType, wrap } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, Logger, MikroORM, PrimaryKey, Property, ReferenceType, wrap } from '@mikro-orm/core'; import type { MySqlDriver } from '@mikro-orm/mysql'; @Embeddable() @@ -237,10 +237,10 @@ describe('embedded entities in mysql', () => { address2: { street: 'Downing street 11', city: 'London 2', country: 'UK 2' }, address3: { street: 'Downing street 12', postalCode: '789', city: 'London 3', country: 'UK 3' }, }, { em: orm.em }); - assign(user, { address4: { city: '41', country: '42', postalCode: '43', street: '44' } }); + orm.em.assign(user, { address4: { city: '41', country: '42', postalCode: '43', street: '44' } }); expect(user.address4).toMatchObject({ city: '41', country: '42', postalCode: '43', street: '44' }); - assign(user, { address5: { city: '51', country: '52', postalCode: '53', street: '54' } }); + orm.em.assign(user, { address5: { city: '51', country: '52', postalCode: '53', street: '54' } }); expect(user.address5).toMatchObject({ city: '51', country: '52', postalCode: '53', street: '54' }); expect(user.address1).toBeInstanceOf(Address1); @@ -264,25 +264,25 @@ describe('embedded entities in mysql', () => { country: 'UK 3', }); - assign(user, { address2: undefined }); + orm.em.assign(user, { address2: undefined }); expect(user.address2).toBe(undefined); - assign(user, { address2: null }); + orm.em.assign(user, { address2: null }); expect(user.address2).toBe(null); expect(() => { - assign(user, { address4: undefined }); + orm.em.assign(user, { address4: undefined }); }).toThrow('You must pass a non-undefined value to the property address4 of entity User.'); expect(() => { - assign(user, { address4: null }); + orm.em.assign(user, { address4: null }); }).toThrow('You must pass a non-null value to the property address4 of entity User.'); - assign(user, { address5: undefined }); + orm.em.assign(user, { address5: undefined }); expect(user.address5).toBe(undefined); - assign(user, { address5: null }); + orm.em.assign(user, { address5: null }); expect(user.address5).toBe(null); }); diff --git a/embedded-entities.postgres.test.ts b/embedded-entities.postgres.test.ts index f2eafb7..fc7906f 100644 --- a/embedded-entities.postgres.test.ts +++ b/embedded-entities.postgres.test.ts @@ -1,4 +1,4 @@ -import { assign, Embeddable, Embedded, Entity, expr, Logger, MikroORM, PrimaryKey, Property, ReferenceType, t, wrap } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, expr, Logger, MikroORM, PrimaryKey, Property, ReferenceType, t } from '@mikro-orm/core'; import type { PostgreSqlDriver } from '@mikro-orm/postgresql'; @Embeddable() @@ -152,19 +152,19 @@ describe('embedded entities in postgresql', () => { const address1 = new Address1('Downing street 13A', 10, '10A', 'London 4A', 'UK 4A'); const address2 = { street: 'Downing street 23A', number: 20, postalCode: '20A', city: 'London 24A', country: 'UK 24A' }; - wrap(user).assign({ addresses: [address1] }, { mergeObjects: true }); + orm.em.assign(user, { addresses: [address1] }); expect(user.addresses).toEqual([address1]); expect(user.addresses[0]).toBeInstanceOf(Address1); - wrap(user).assign({ addresses: [address1] }, { mergeObjects: true, updateNestedEntities: true }); + orm.em.assign(user, { addresses: [address1] }); expect(user.addresses).toEqual([address1]); expect(user.addresses[0]).toBeInstanceOf(Address1); - wrap(user).assign({ addresses: [address2] }); + orm.em.assign(user, { addresses: [address2] }); expect(user.addresses).toEqual([address2]); expect(user.addresses[0]).toBeInstanceOf(Address1); - wrap(user).assign({ addresses: address1 }); // push to existing array + orm.em.assign(user, { addresses: address1 }); // push to existing array expect(user.addresses).toEqual([address2, address1]); expect(user.addresses[0]).toBeInstanceOf(Address1); expect(user.addresses[1]).toBeInstanceOf(Address1); @@ -263,12 +263,12 @@ describe('embedded entities in postgresql', () => { test('assign', async () => { const user = new User(); - wrap(user).assign({ + orm.em.assign(user, { address1: { street: 'Downing street 10', postalCode: '123', city: 'London 1', country: 'UK 1' }, address2: { street: 'Downing street 11', city: 'London 2', country: 'UK 2' }, address3: { street: 'Downing street 12', postalCode: '789', city: 'London 3', country: 'UK 3' }, }, { em: orm.em }); - assign(user, { address4: { city: '41', country: '42', postalCode: '43', street: '44' } }); + orm.em.assign(user, { address4: { city: '41', country: '42', postalCode: '43', street: '44' } }); expect(user.address4).toMatchObject({ city: '41', country: '42', postalCode: '43', street: '44' }); expect(user.address1).toBeInstanceOf(Address1); @@ -293,10 +293,9 @@ describe('embedded entities in postgresql', () => { }); }); - test('native update entity', async () => { const user = new User(); - wrap(user).assign({ + orm.em.assign(user, { address1: { street: 'Downing street 10', number: 3, postalCode: '123', city: 'London 1', country: 'UK 1' }, address2: { street: 'Downing street 11', city: 'London 2', country: 'UK 2' }, address3: { street: 'Downing street 12', number: 3, postalCode: '789', city: 'London 3', country: 'UK 3' }, diff --git a/entities-in-embeddables.mongo.test.ts b/entities-in-embeddables.mongo.test.ts index a5d8df5..2411bab 100644 --- a/entities-in-embeddables.mongo.test.ts +++ b/entities-in-embeddables.mongo.test.ts @@ -1,5 +1,5 @@ import type { ObjectHydrator } from '@mikro-orm/core'; -import { assign, Embeddable, Embedded, Entity, Logger, ManyToOne, MikroORM, PrimaryKey, Property, wrap } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, Logger, ManyToOne, MikroORM, PrimaryKey, Property, wrap } from '@mikro-orm/core'; import type { MongoDriver } from '@mikro-orm/mongodb'; import { ObjectId } from '@mikro-orm/mongodb'; @@ -443,7 +443,7 @@ describe('embedded entities in mongo', () => { test('#assign() works with nested embeddables', async () => { const jon = new User(); - assign(jon, { + orm.em.assign(jon, { profile1: { username: 'u1', identity: { email: 'e1', meta: { bar: 'b1', foo: 'f1' } } }, profile2: { username: 'u2', identity: { email: 'e2', meta: { bar: 'b2', foo: 'f2' } } }, }); @@ -456,17 +456,17 @@ describe('embedded entities in mongo', () => { expect(jon.profile2.identity).toBeInstanceOf(Identity); expect(jon.profile2.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e3' } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { email: 'e3' } } }); expect(jon.profile1.username).toBe('u1'); expect(jon.profile1.identity.email).toBe('e3'); expect(jon.profile1.identity.meta).not.toBeUndefined(); delete jon.profile1.identity.meta; - assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }); expect(jon.profile1.identity.meta!.foo).toBe('f'); expect(jon.profile1.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e4' } } }); + orm.em.assign(jon, { profile1: { identity: { email: 'e4' } } }, { mergeObjects: false }); expect(jon.profile1.username).toBeUndefined(); expect(jon.profile1.identity.email).toBe('e4'); expect(jon.profile1.identity.meta).toBeUndefined(); diff --git a/entities-in-embeddables.postgres.test.ts b/entities-in-embeddables.postgres.test.ts index 25fe4ef..172dbe8 100644 --- a/entities-in-embeddables.postgres.test.ts +++ b/entities-in-embeddables.postgres.test.ts @@ -1,5 +1,5 @@ import type { ObjectHydrator } from '@mikro-orm/core'; -import { assign, Embeddable, Embedded, Entity, Logger, ManyToOne, MikroORM, PrimaryKey, Property, wrap } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, Logger, ManyToOne, MikroORM, PrimaryKey, Property, wrap } from '@mikro-orm/core'; import type { PostgreSqlDriver } from '@mikro-orm/postgresql'; @Entity() @@ -412,7 +412,7 @@ describe('embedded entities in postgres', () => { test('#assign() works with nested embeddables', async () => { const jon = new User(); - assign(jon, { + orm.em.assign(jon, { profile1: { username: 'u1', identity: { email: 'e1', meta: { bar: 'b1', foo: 'f1' } } }, profile2: { username: 'u2', identity: { email: 'e2', meta: { bar: 'b2', foo: 'f2' } } }, }); @@ -425,17 +425,17 @@ describe('embedded entities in postgres', () => { expect(jon.profile2.identity).toBeInstanceOf(Identity); expect(jon.profile2.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e3' } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { email: 'e3' } } }); expect(jon.profile1.username).toBe('u1'); expect(jon.profile1.identity.email).toBe('e3'); expect(jon.profile1.identity.meta).not.toBeUndefined(); delete jon.profile1.identity.meta; - assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }); expect(jon.profile1.identity.meta!.foo).toBe('f'); expect(jon.profile1.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e4' } } }); + orm.em.assign(jon, { profile1: { identity: { email: 'e4' } } }, { mergeObjects: false }); expect(jon.profile1.username).toBeUndefined(); expect(jon.profile1.identity.email).toBe('e4'); expect(jon.profile1.identity.meta).toBeUndefined(); diff --git a/nested-embeddables.mongo.test.ts b/nested-embeddables.mongo.test.ts index a4a5141..5fdffed 100644 --- a/nested-embeddables.mongo.test.ts +++ b/nested-embeddables.mongo.test.ts @@ -1,4 +1,4 @@ -import { assign, Embeddable, Embedded, Entity, Logger, MikroORM, PrimaryKey, Property } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, Logger, MikroORM, PrimaryKey, Property } from '@mikro-orm/core'; import type { MongoDriver } from '@mikro-orm/mongodb'; import { ObjectId, MongoConnection } from '@mikro-orm/mongodb'; @@ -201,7 +201,7 @@ describe('embedded entities in mongo', () => { test('#assign() works with nested embeddables', async () => { const jon = new User(); - assign(jon, { + orm.em.assign(jon, { profile1: { username: 'u1', identity: { email: 'e1', meta: { bar: 'b1', foo: 'f1' } } }, profile2: { username: 'u2', identity: { email: 'e2', meta: { bar: 'b2', foo: 'f2' } } }, }); @@ -214,17 +214,17 @@ describe('embedded entities in mongo', () => { expect(jon.profile2.identity).toBeInstanceOf(Identity); expect(jon.profile2.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e3' } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { email: 'e3' } } }); expect(jon.profile1.username).toBe('u1'); expect(jon.profile1.identity.email).toBe('e3'); expect(jon.profile1.identity.meta).not.toBeUndefined(); delete jon.profile1.identity.meta; - assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }); expect(jon.profile1.identity.meta!.foo).toBe('f'); expect(jon.profile1.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e4' } } }); + orm.em.assign(jon, { profile1: { identity: { email: 'e4' } } }, { mergeObjects: false }); expect(jon.profile1.username).toBeUndefined(); expect(jon.profile1.identity.email).toBe('e4'); expect(jon.profile1.identity.meta).toBeUndefined(); diff --git a/nested-embeddables.postgres.test.ts b/nested-embeddables.postgres.test.ts index 1b8228c..fc40ab5 100644 --- a/nested-embeddables.postgres.test.ts +++ b/nested-embeddables.postgres.test.ts @@ -1,4 +1,4 @@ -import { assign, Embeddable, Embedded, Entity, Logger, MikroORM, PrimaryKey, Property } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, Logger, MikroORM, PrimaryKey, Property } from '@mikro-orm/core'; import type { PostgreSqlDriver } from '@mikro-orm/postgresql'; @Embeddable() @@ -270,7 +270,7 @@ describe('embedded entities in postgres', () => { test('#assign() works with nested embeddables', async () => { const jon = new User(); - assign(jon, { + orm.em.assign(jon, { profile1: { username: 'u1', identity: { email: 'e1', meta: { bar: 'b1', foo: 'f1' } } }, profile2: { username: 'u2', identity: { email: 'e2', meta: { bar: 'b2', foo: 'f2' } } }, }); @@ -283,17 +283,17 @@ describe('embedded entities in postgres', () => { expect(jon.profile2.identity).toBeInstanceOf(Identity); expect(jon.profile2.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e3' } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { email: 'e3' } } }); expect(jon.profile1.username).toBe('u1'); expect(jon.profile1.identity.email).toBe('e3'); expect(jon.profile1.identity.meta).not.toBeUndefined(); delete jon.profile1.identity.meta; - assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }, { mergeObjects: true }); + orm.em.assign(jon, { profile1: { identity: { meta: { foo: 'f' } } } }); expect(jon.profile1.identity.meta!.foo).toBe('f'); expect(jon.profile1.identity.meta).toBeInstanceOf(IdentityMeta); - assign(jon, { profile1: { identity: { email: 'e4' } } }); + orm.em.assign(jon, { profile1: { identity: { email: 'e4' } } }, { mergeObjects: false }); expect(jon.profile1.username).toBeUndefined(); expect(jon.profile1.identity.email).toBe('e4'); expect(jon.profile1.identity.meta).toBeUndefined(); diff --git a/polymorphic-embedded-entities.mongo.test.ts b/polymorphic-embedded-entities.mongo.test.ts index 48a12df..3faba91 100644 --- a/polymorphic-embedded-entities.mongo.test.ts +++ b/polymorphic-embedded-entities.mongo.test.ts @@ -0,0 +1,250 @@ +import { Embeddable, Embedded, Entity, Enum, MikroORM, PrimaryKey, Property, SerializedPrimaryKey, wrap } from '@mikro-orm/core'; +import type { MongoDriver } from '@mikro-orm/mongodb'; +import { mockLogger } from '../../bootstrap'; +import { ObjectId } from 'mongodb'; + +enum AnimalType { + CAT, + DOG, +} + +@Embeddable({ abstract: true, discriminatorColumn: 'type' }) +abstract class Animal { + + @Enum(() => AnimalType) + type!: AnimalType; + + @Property() + name!: string; + +} + +@Embeddable({ discriminatorValue: AnimalType.CAT }) +class Cat extends Animal { + + @Property({ nullable: true }) + canMeow?: boolean = true; + + constructor(name: string) { + super(); + this.type = AnimalType.CAT; + this.name = name; + } + +} + +@Embeddable({ discriminatorValue: AnimalType.DOG }) +class Dog extends Animal { + + @Property({ nullable: true }) + canBark?: boolean = true; + + constructor(name: string) { + super(); + this.type = AnimalType.DOG; + this.name = name; + } + +} + +@Entity() +class Owner { + + @PrimaryKey() + _id!: ObjectId; + + @SerializedPrimaryKey() + id!: string; + + @Property() + name!: string; + + @Embedded(() => [Cat, Dog]) + pet!: Cat | Dog; + + @Embedded(() => [Cat, Dog], { object: true }) + pet2!: Cat | Dog; + +} + +describe('polymorphic embeddables in mongo', () => { + + let orm: MikroORM<MongoDriver>; + + beforeAll(async () => { + orm = await MikroORM.init({ + entities: [Dog, Cat, Owner], + clientUrl: 'mongodb://localhost:27017,localhost:27018,localhost:27019/mikro-orm-test-poly-embeddables?replicaSet=rs', + type: 'mongo', + validate: true, + }); + }); + + afterAll(async () => { + await orm.em.getDriver().dropCollections(); + await orm.close(); + }); + + beforeEach(async () => { + await orm.em.nativeDelete(Owner, {}); + orm.em.clear(); + }); + + test(`working with polymorphic embeddables`, async () => { + const ent1 = new Owner(); + ent1._id = new ObjectId('600000000000000000000001'); + ent1.name = 'o1'; + ent1.pet = new Dog('d1'); + ent1.pet2 = new Cat('c3'); + expect(ent1.pet).toBeInstanceOf(Dog); + expect((ent1.pet as Dog).canBark).toBe(true); + expect(ent1.pet2).toBeInstanceOf(Cat); + expect((ent1.pet2 as Cat).canMeow).toBe(true); + const ent2 = orm.em.create(Owner, { + id: '600000000000000000000002', + name: 'o2', + pet: { type: AnimalType.CAT, name: 'c1' }, + pet2: { type: AnimalType.DOG, name: 'd4' }, + }); + expect(ent2.pet).toBeInstanceOf(Cat); + expect((ent2.pet as Cat).canMeow).toBe(true); + expect(ent2.pet2).toBeInstanceOf(Dog); + expect((ent2.pet2 as Dog).canBark).toBe(true); + const ent3 = orm.em.create(Owner, { + id: '600000000000000000000003', + name: 'o3', + pet: { type: AnimalType.DOG, name: 'd2' }, + pet2: { type: AnimalType.CAT, name: 'c4' }, + }); + expect(ent3.pet).toBeInstanceOf(Dog); + expect((ent3.pet as Dog).canBark).toBe(true); + expect(ent3.pet2).toBeInstanceOf(Cat); + expect((ent3.pet2 as Cat).canMeow).toBe(true); + + const mock = mockLogger(orm, ['query']); + await orm.em.persistAndFlush([ent1, ent2, ent3]); + expect(mock.mock.calls[0][0]).toMatch(`db.getCollection('owner').insertMany([ { _id: ObjectId('600000000000000000000001'), name: 'o1', pet_canBark: true, pet_type: 1, pet_name: 'd1', pet_canMeow: undefined, pet2: { type: 0, name: 'c3', canMeow: true } }, { _id: ObjectId('600000000000000000000002'), name: 'o2', pet_canBark: undefined, pet_type: 0, pet_name: 'c1', pet_canMeow: true, pet2: { canBark: true, type: 1, name: 'd4' } }, { _id: ObjectId('600000000000000000000003'), name: 'o3', pet_canBark: true, pet_type: 1, pet_name: 'd2', pet_canMeow: undefined, pet2: { type: 0, name: 'c4', canMeow: true } } ], { session: undefined });`); + orm.em.clear(); + + const owners = await orm.em.find(Owner, {}, { orderBy: { name: 1 } }); + expect(mock.mock.calls[1][0]).toMatch(`db.getCollection('owner').find({}, { session: undefined }).sort([ [ 'name', 1 ] ]).toArray();`); + expect(owners[0].name).toBe('o1'); + expect(owners[0].pet).toBeInstanceOf(Dog); + expect(owners[0].pet.name).toBe('d1'); + expect(owners[0].pet.type).toBe(AnimalType.DOG); + expect((owners[0].pet as Cat).canMeow).toBeNull(); + expect((owners[0].pet as Dog).canBark).toBe(true); + expect(owners[1].pet).toBeInstanceOf(Cat); + expect(owners[1].pet.name).toBe('c1'); + expect(owners[1].pet.type).toBe(AnimalType.CAT); + expect((owners[1].pet as Cat).canMeow).toBe(true); + expect((owners[1].pet as Dog).canBark).toBeNull(); + expect(owners[2].pet).toBeInstanceOf(Dog); + expect(owners[2].pet.name).toBe('d2'); + expect(owners[2].pet.type).toBe(AnimalType.DOG); + + expect(mock.mock.calls).toHaveLength(2); + await orm.em.flush(); + expect(mock.mock.calls).toHaveLength(2); + + owners[0].pet = new Cat('c2'); + owners[1].pet = new Dog('d3'); + owners[2].pet.name = 'old dog'; + mock.mock.calls.length = 0; + await orm.em.flush(); + expect(mock.mock.calls).toHaveLength(1); + expect(mock.mock.calls[0][0]).toMatch(`bulk = db.getCollection('owner').initializeUnorderedBulkOp({ session: undefined });bulk.find({ _id: ObjectId('600000000000000000000001') }).update({ '$set': { pet_canBark: undefined, pet_type: 0, pet_name: 'c2', pet_canMeow: true } });bulk.find({ _id: ObjectId('600000000000000000000002') }).update({ '$set': { pet_canBark: true, pet_type: 1, pet_name: 'd3', pet_canMeow: undefined } });bulk.find({ _id: ObjectId('600000000000000000000003') }).update({ '$set': { pet_name: 'old dog' } });bulk.execute()`); + orm.em.clear(); + + const owners2 = await orm.em.find(Owner, {}, { orderBy: { name: 1 } }); + expect(mock.mock.calls[1][0]).toMatch(`db.getCollection('owner').find({}, { session: undefined }).sort([ [ 'name', 1 ] ]).toArray();`); + + expect(owners2[0].name).toBe('o1'); + expect(owners2[0].pet).toBeInstanceOf(Cat); + expect(owners2[0].pet.name).toBe('c2'); + expect(owners2[0].pet.type).toBe(AnimalType.CAT); + expect((owners2[0].pet as Dog).canBark).toBeNull(); + expect((owners2[0].pet as Cat).canMeow).toBe(true); + + expect(owners2[1].pet).toBeInstanceOf(Dog); + expect(owners2[1].pet.name).toBe('d3'); + expect(owners2[1].pet.type).toBe(AnimalType.DOG); + expect((owners2[1].pet as Dog).canBark).toBe(true); + expect((owners2[1].pet as Cat).canMeow).toBeNull(); + + expect(owners2[2].pet).toBeInstanceOf(Dog); + expect(owners2[2].pet.name).toBe('old dog'); + expect(owners2[2].pet.type).toBe(AnimalType.DOG); + expect((owners2[2].pet as Dog).canBark).toBe(true); + expect((owners2[2].pet as Cat).canMeow).toBeNull(); + orm.em.clear(); + + mock.mock.calls.length = 0; + const dogOwners = await orm.em.find(Owner, { pet: { name: ['d3', 'old dog'] } }, { orderBy: { name: 1 } }); + const dogOwners2 = await orm.em.find(Owner, { pet2: { name: ['d4', 'c4'] } }, { orderBy: { name: 1 } }); + const dogOwners3 = await orm.em.find(Owner, { $or: [ + { pet: { name: ['d3', 'old dog'] } }, + { pet2: { name: ['d4', 'c4'] } }, + ] }, { orderBy: { name: 1 } }); + + const check = (items: Owner[]) => { + expect(items).toHaveLength(2); + expect(items[0].pet).toBeInstanceOf(Dog); + expect(items[0].pet.name).toBe('d3'); + expect(items[0].pet2).toBeInstanceOf(Dog); + expect(items[0].pet2.name).toBe('d4'); + expect(items[1].pet).toBeInstanceOf(Dog); + expect(items[1].pet.name).toBe('old dog'); + expect(items[1].pet2).toBeInstanceOf(Cat); + expect(items[1].pet2.name).toBe('c4'); + }; + check(dogOwners); + check(dogOwners2); + check(dogOwners3); + + expect(mock.mock.calls[0][0]).toMatch(`db.getCollection('owner').find({ pet_name: { '$in': [ 'd3', 'old dog' ] } }, { session: undefined }).sort([ [ 'name', 1 ] ]).toArray();`); + expect(mock.mock.calls[1][0]).toMatch(`db.getCollection('owner').find({ 'pet2.name': { '$in': [ 'd4', 'c4' ] } }, { session: undefined }).sort([ [ 'name', 1 ] ]).toArray();`); + expect(mock.mock.calls[2][0]).toMatch(`db.getCollection('owner').find({ '$or': [ { pet_name: { '$in': [ 'd3', 'old dog' ] } }, { 'pet2.name': { '$in': [ 'd4', 'c4' ] } } ] }, { session: undefined }).sort([ [ 'name', 1 ] ]).toArray();`); + }); + + test('assign and serialization', async () => { + const owner = new Owner(); + owner._id = new ObjectId('600000000000000000000004'); + owner.name = 'o1'; + orm.em.assign(owner, { + pet: { name: 'cat', type: AnimalType.CAT }, + pet2: { name: 'dog', type: AnimalType.DOG }, + }); + expect(owner.pet).toMatchObject({ name: 'cat', type: AnimalType.CAT }); + expect(owner.pet).toBeInstanceOf(Cat); + expect(owner.pet2).toMatchObject({ name: 'dog', type: AnimalType.DOG }); + expect(owner.pet2).toBeInstanceOf(Dog); + + const mock = mockLogger(orm, ['query']); + await orm.em.persistAndFlush(owner); + expect(mock.mock.calls[0][0]).toMatch(`db.getCollection('owner').insertOne({ _id: ObjectId('600000000000000000000004'), name: 'o1', pet_canBark: undefined, pet_type: 0, pet_name: 'cat', pet_canMeow: true, pet2: { canBark: true, type: 1, name: 'dog' } }, { session: undefined });`); + + orm.em.assign(owner, { + pet: { name: 'cat name' }, + pet2: { name: 'dog name' }, + }); + await orm.em.persistAndFlush(owner); + expect(mock.mock.calls[1][0]).toMatch(`db.getCollection('owner').updateMany({ _id: ObjectId('600000000000000000000004') }, { '$set': { pet_name: 'cat name', pet2: { canBark: true, type: 1, name: 'dog name' } } }, { session: undefined });`); + + expect(wrap(owner).toObject()).toEqual({ + id: owner.id, + name: 'o1', + pet: { + canMeow: true, + name: 'cat name', + type: 0, + }, + pet2: { + canBark: true, + name: 'dog name', + type: 1, + }, + }); + }); + +}); diff --git a/polymorphic-embedded-entities.sqlite.test.ts b/polymorphic-embedded-entities.sqlite.test.ts index 28efe91..536cb39 100644 --- a/polymorphic-embedded-entities.sqlite.test.ts +++ b/polymorphic-embedded-entities.sqlite.test.ts @@ -0,0 +1,263 @@ +import type { ObjectHydrator } from '@mikro-orm/core'; +import { Embeddable, Embedded, Entity, Enum, MikroORM, PrimaryKey, Property, wrap } from '@mikro-orm/core'; +import { mockLogger } from '../../bootstrap'; + +enum AnimalType { + CAT, + DOG, +} + +@Embeddable({ abstract: true, discriminatorColumn: 'type' }) +abstract class Animal { + + @Enum(() => AnimalType) + type!: AnimalType; + + @Property() + name!: string; + +} + +@Embeddable({ discriminatorValue: AnimalType.CAT }) +class Cat extends Animal { + + @Property({ nullable: true }) + canMeow?: boolean = true; + + constructor(name: string) { + super(); + this.type = AnimalType.CAT; + this.name = name; + } + +} + +@Embeddable({ discriminatorValue: AnimalType.DOG }) +class Dog extends Animal { + + @Property({ nullable: true }) + canBark?: boolean = true; + + constructor(name: string) { + super(); + this.type = AnimalType.DOG; + this.name = name; + } + +} + +@Entity() +class Owner { + + @PrimaryKey() + id!: number; + + @Property() + name: string; + + @Embedded(() => [Cat, Dog]) + pet!: Cat | Dog; + + @Embedded(() => [Cat, Dog], { object: true }) + pet2!: Cat | Dog; + + constructor(name: string) { + this.name = name; + } + +} + +describe('polymorphic embeddables in sqlite', () => { + + let orm: MikroORM; + + beforeAll(async () => { + orm = await MikroORM.init({ + entities: [Dog, Cat, Owner], + dbName: ':memory:', + type: 'sqlite', + }); + await orm.getSchemaGenerator().createSchema(); + }); + + afterAll(async () => { + await orm.close(); + }); + + beforeEach(async () => { + await orm.em.nativeDelete(Owner, {}); + orm.em.clear(); + }); + + test(`schema`, async () => { + await expect(orm.getSchemaGenerator().getCreateSchemaSQL({ wrap: false })).resolves.toMatchSnapshot(); + await expect(orm.getSchemaGenerator().getUpdateSchemaSQL({ wrap: false })).resolves.toMatchSnapshot(); + await expect(orm.getSchemaGenerator().getDropSchemaSQL({ wrap: false })).resolves.toMatchSnapshot(); + }); + + test(`diffing`, async () => { + const hydrator = orm.config.getHydrator(orm.getMetadata()) as ObjectHydrator; + expect(hydrator.getEntityHydrator(orm.getMetadata().get('Owner'), 'full').toString()).toMatchSnapshot(); + expect(orm.em.getComparator().getSnapshotGenerator('Owner').toString()).toMatchSnapshot(); + }); + + test(`working with polymorphic embeddables`, async () => { + const ent1 = new Owner('o1'); + ent1.pet = new Dog('d1'); + ent1.pet2 = new Cat('c3'); + expect(ent1.pet).toBeInstanceOf(Dog); + expect((ent1.pet as Dog).canBark).toBe(true); + expect(ent1.pet2).toBeInstanceOf(Cat); + expect((ent1.pet2 as Cat).canMeow).toBe(true); + const ent2 = orm.em.create(Owner, { + name: 'o2', + pet: { type: AnimalType.CAT, name: 'c1' }, + pet2: { type: AnimalType.DOG, name: 'd4' }, + }); + expect(ent2.pet).toBeInstanceOf(Cat); + expect((ent2.pet as Cat).canMeow).toBe(true); + expect(ent2.pet2).toBeInstanceOf(Dog); + expect((ent2.pet2 as Dog).canBark).toBe(true); + const ent3 = orm.em.create(Owner, { + name: 'o3', + pet: { type: AnimalType.DOG, name: 'd2' }, + pet2: { type: AnimalType.CAT, name: 'c4' }, + }); + expect(ent3.pet).toBeInstanceOf(Dog); + expect((ent3.pet as Dog).canBark).toBe(true); + expect(ent3.pet2).toBeInstanceOf(Cat); + expect((ent3.pet2 as Cat).canMeow).toBe(true); + + const mock = mockLogger(orm, ['query']); + await orm.em.persistAndFlush([ent1, ent2, ent3]); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('insert into `owner` (`name`, `pet_can_bark`, `pet_type`, `pet_name`, `pet_can_meow`, `pet2`) values (?, ?, ?, ?, ?, ?), (?, ?, ?, ?, ?, ?), (?, ?, ?, ?, ?, ?)'); + expect(mock.mock.calls[2][0]).toMatch('commit'); + orm.em.clear(); + + const owners = await orm.em.find(Owner, {}, { orderBy: { name: 1 } }); + expect(mock.mock.calls[3][0]).toMatch('select `o0`.* from `owner` as `o0` order by `o0`.`name` asc'); + expect(owners[0].name).toBe('o1'); + expect(owners[0].pet).toBeInstanceOf(Dog); + expect(owners[0].pet.name).toBe('d1'); + expect(owners[0].pet.type).toBe(AnimalType.DOG); + expect((owners[0].pet as Cat).canMeow).toBeNull(); + expect((owners[0].pet as Dog).canBark).toBe(true); + expect(owners[1].pet).toBeInstanceOf(Cat); + expect(owners[1].pet.name).toBe('c1'); + expect(owners[1].pet.type).toBe(AnimalType.CAT); + expect((owners[1].pet as Cat).canMeow).toBe(true); + expect((owners[1].pet as Dog).canBark).toBeNull(); + expect(owners[2].pet).toBeInstanceOf(Dog); + expect(owners[2].pet.name).toBe('d2'); + expect(owners[2].pet.type).toBe(AnimalType.DOG); + + expect(mock.mock.calls).toHaveLength(4); + await orm.em.flush(); + expect(mock.mock.calls).toHaveLength(4); + + owners[0].pet = new Cat('c2'); + owners[1].pet = new Dog('d3'); + owners[2].pet.name = 'old dog'; + mock.mock.calls.length = 0; + await orm.em.flush(); + expect(mock.mock.calls).toHaveLength(3); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('update `owner` set `pet_can_bark` = case when (`id` = ?) then ? when (`id` = ?) then ? else `pet_can_bark` end, `pet_type` = case when (`id` = ?) then ? when (`id` = ?) then ? else `pet_type` end, `pet_name` = case when (`id` = ?) then ? when (`id` = ?) then ? when (`id` = ?) then ? else `pet_name` end, `pet_can_meow` = case when (`id` = ?) then ? when (`id` = ?) then ? else `pet_can_meow` end where `id` in (?, ?, ?)'); + expect(mock.mock.calls[2][0]).toMatch('commit'); + orm.em.clear(); + + const owners2 = await orm.em.find(Owner, {}, { orderBy: { name: 1 } }); + expect(mock.mock.calls[3][0]).toMatch('select `o0`.* from `owner` as `o0` order by `o0`.`name` asc'); + + expect(owners2[0].name).toBe('o1'); + expect(owners2[0].pet).toBeInstanceOf(Cat); + expect(owners2[0].pet.name).toBe('c2'); + expect(owners2[0].pet.type).toBe(AnimalType.CAT); + expect((owners2[0].pet as Dog).canBark).toBeNull(); + expect((owners2[0].pet as Cat).canMeow).toBe(true); + + expect(owners2[1].pet).toBeInstanceOf(Dog); + expect(owners2[1].pet.name).toBe('d3'); + expect(owners2[1].pet.type).toBe(AnimalType.DOG); + expect((owners2[1].pet as Dog).canBark).toBe(true); + expect((owners2[1].pet as Cat).canMeow).toBeNull(); + + expect(owners2[2].pet).toBeInstanceOf(Dog); + expect(owners2[2].pet.name).toBe('old dog'); + expect(owners2[2].pet.type).toBe(AnimalType.DOG); + expect((owners2[2].pet as Dog).canBark).toBe(true); + expect((owners2[2].pet as Cat).canMeow).toBeNull(); + orm.em.clear(); + + mock.mock.calls.length = 0; + const dogOwners = await orm.em.find(Owner, { pet: { name: ['d3', 'old dog'] } }, { orderBy: { name: 1 } }); + const dogOwners2 = await orm.em.find(Owner, { pet2: { name: ['d4', 'c4'] } }, { orderBy: { name: 1 } }); + const dogOwners3 = await orm.em.find(Owner, { $or: [ + { pet: { name: ['d3', 'old dog'] } }, + { pet2: { name: ['d4', 'c4'] } }, + ] }, { orderBy: { name: 1 } }); + + const check = (items: Owner[]) => { + expect(items).toHaveLength(2); + expect(items[0].pet).toBeInstanceOf(Dog); + expect(items[0].pet.name).toBe('d3'); + expect(items[0].pet2).toBeInstanceOf(Dog); + expect(items[0].pet2.name).toBe('d4'); + expect(items[1].pet).toBeInstanceOf(Dog); + expect(items[1].pet.name).toBe('old dog'); + expect(items[1].pet2).toBeInstanceOf(Cat); + expect(items[1].pet2.name).toBe('c4'); + }; + check(dogOwners); + check(dogOwners2); + check(dogOwners3); + + expect(mock.mock.calls[0][0]).toMatch('select `o0`.* from `owner` as `o0` where `o0`.`pet_name` in (?, ?) order by `o0`.`name` asc'); + expect(mock.mock.calls[1][0]).toMatch('select `o0`.* from `owner` as `o0` where json_extract(`o0`.`pet2`, \\'$.name\\') in (?, ?) order by `o0`.`name` asc'); + expect(mock.mock.calls[2][0]).toMatch('select `o0`.* from `owner` as `o0` where (`o0`.`pet_name` in (?, ?) or json_extract(`o0`.`pet2`, \\'$.name\\') in (?, ?)) order by `o0`.`name` asc'); + }); + + test('assign and serialization', async () => { + const owner = new Owner('o1'); + orm.em.assign(owner, { + pet: { name: 'cat', type: AnimalType.CAT }, + pet2: { name: 'dog', type: AnimalType.DOG }, + }); + expect(owner.pet).toMatchObject({ name: 'cat', type: AnimalType.CAT }); + expect(owner.pet).toBeInstanceOf(Cat); + expect(owner.pet2).toMatchObject({ name: 'dog', type: AnimalType.DOG }); + expect(owner.pet2).toBeInstanceOf(Dog); + + const mock = mockLogger(orm, ['query']); + await orm.em.persistAndFlush(owner); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('insert into `owner` (`name`, `pet2`, `pet_can_bark`, `pet_can_meow`, `pet_name`, `pet_type`) values (?, ?, ?, ?, ?, ?)'); + expect(mock.mock.calls[2][0]).toMatch('commit'); + + orm.em.assign(owner, { + pet: { name: 'cat name' }, + pet2: { name: 'dog name' }, + }); + await orm.em.persistAndFlush(owner); + expect(mock.mock.calls[3][0]).toMatch('begin'); + expect(mock.mock.calls[4][0]).toMatch('update `owner` set `pet_name` = ?, `pet2` = ? where `id` = ?'); + expect(mock.mock.calls[5][0]).toMatch('commit'); + + expect(wrap(owner).toObject()).toEqual({ + id: owner.id, + name: 'o1', + pet: { + canMeow: true, + name: 'cat name', + type: 0, + }, + pet2: { + canBark: true, + name: 'dog name', + type: 1, + }, + }); + }); + +}); diff --git a/EntityAssigner.mysql.test.ts b/EntityAssigner.mysql.test.ts index e67fd68..dcc777d 100644 --- a/EntityAssigner.mysql.test.ts +++ b/EntityAssigner.mysql.test.ts @@ -189,12 +189,14 @@ describe('EntityAssignerMySql', () => { book.meta = { items: 5, category: 'test' }; wrap(book).assign({ meta: { items: 3, category: 'foo' } }); expect(book.meta).toEqual({ items: 3, category: 'foo' }); - wrap(book).assign({ meta: { category: 'bar' } }, { mergeObjects: true }); - expect(book.meta).toEqual({ items: 3, category: 'bar' }); wrap(book).assign({ meta: { category: 'bar' } }); - expect(book.meta).toEqual({ category: 'bar' }); + expect(book.meta).toEqual({ items: 3, category: 'bar' }); + wrap(book).assign({ meta: { category: 'bar 1' } }); + expect(book.meta).toEqual({ items: 3, category: 'bar 1' }); + wrap(book).assign({ meta: { category: 'bar 2' } }, { mergeObjects: false }); + expect(book.meta).toEqual({ category: 'bar 2' }); jon.identities = ['1', '2']; - wrap(jon).assign({ identities: ['3', '4'] }, { mergeObjects: true }); + wrap(jon).assign({ identities: ['3', '4'] }); expect(jon.identities).toEqual(['3', '4']); }); @@ -208,7 +210,7 @@ describe('EntityAssignerMySql', () => { em.assign(existing, { name: 'updated name', blob: Buffer.from('123456'), - }, { mergeObjects: true }); + }); await em.flush(); const bar1 = await orm.em.fork().findOneOrFail(FooBar2, 1); @@ -227,7 +229,7 @@ describe('EntityAssignerMySql', () => { entity.books.populated(); const updated = wrap(entity).toObject(); updated.books[0].title = 'updated name'; - orm.em.assign(entity, updated, { updateNestedEntities: true }); + orm.em.assign(entity, updated); // `updateNestedEntities` defaults to true since v5 expect(entity.books[0].title).toBe('updated name'); });
fix(pandas/dask): handle non numpy scalar results in `wrap_case_result`
a3b82f77379f75d550ede0d15a8a4a4fa5dc28d7
fix
https://github.com/rohankumardubey/ibis/commit/a3b82f77379f75d550ede0d15a8a4a4fa5dc28d7
handle non numpy scalar results in `wrap_case_result`
diff --git a/generic.py b/generic.py index 11d14ea..a9a7928 100644 --- a/generic.py +++ b/generic.py @@ -1365,7 +1365,11 @@ def wrap_case_result(raw, expr): raw_1d, dtype=constants.IBIS_TYPE_TO_PANDAS_TYPE[expr.type()] ) if result.size == 1 and isinstance(expr, ir.Scalar): - return result.iloc[0].item() + value = result.iloc[0] + try: + return value.item() + except AttributeError: + return value return result diff --git a/test_string.py b/test_string.py index 6a3b6c3..bf3669a 100644 --- a/test_string.py +++ b/test_string.py @@ -829,7 +829,9 @@ def test_array_string_join(con): assert con.execute(expr) == expected [email protected](["dask", "datafusion", "mssql", "mysql", "pandas", "pyspark"]) [email protected]( + ["datafusion", "mssql", "mysql", "pyspark"], raises=com.OperationNotDefinedError +) def test_subs_with_re_replace(con): expr = ibis.literal("hi").re_replace("i", "a").substitute({"d": "b"}, else_="k") result = con.execute(expr)
build: improving links warp feature, not ready yet
eca22112be4202876768191ba174a41cf9f6f7d5
build
https://github.com/tsparticles/tsparticles/commit/eca22112be4202876768191ba174a41cf9f6f7d5
improving links warp feature, not ready yet
diff --git a/CircleWarp.ts b/CircleWarp.ts index 9521bb6..4747370 100644 --- a/CircleWarp.ts +++ b/CircleWarp.ts @@ -0,0 +1,20 @@ +import { canvasSize, distance, offsets, tests } from "./Fixture/defaultValues"; +import { describe, it } from "mocha"; +import { expect } from "chai"; +import { CircleWarp } from "../src/CircleWarp"; + +describe(`CircleWarp Tests (Canvas: ${canvasSize.width}x${canvasSize.height}, Distance: ${distance})`, () => { + for (const test of tests) { + describe(`Center (${test.begin.x}, ${test.begin.y}) (${test.warp ? "warp" : "no-warp"})`, () => { + const circle = new CircleWarp(test.begin.x, test.begin.y, distance, canvasSize, offsets); + + for (const end of test.tests) { + it(`should contain Point (${end.coordinates.x}, ${end.coordinates.y})`, () => { + const res = circle.contains(end.coordinates); + + expect(res).to.be.equal(!end.fail); + }); + } + }); + } +}); diff --git a/LinkContainer.ts b/LinkContainer.ts index ede096d..71fdf10 100644 --- a/LinkContainer.ts +++ b/LinkContainer.ts @@ -1,6 +1,7 @@ -import type { Container, IRgb } from "tsparticles-engine"; +import type { Container, ICoordinates, IRgb } from "tsparticles-engine"; export type LinkContainer = Container & { + offsets: ICoordinates[]; particles: { linksColor?: IRgb | string; linksColors: Map<string, IRgb | string | undefined>; diff --git a/LinkInstance.ts b/LinkInstance.ts index 16edb28..dec6094 100644 --- a/LinkInstance.ts +++ b/LinkInstance.ts @@ -1,5 +1,5 @@ import type { IContainerPlugin, IRangeColor, IRgb, RangeValue } from "tsparticles-engine"; -import { drawLinkLine, drawLinkTriangle } from "./Utils"; +import { drawLinkLine, drawLinkTriangle, getOffsets } from "./Utils"; import { getDistance, getLinkColor, getRandom, getRangeValue, rangeColorToRgb } from "tsparticles-engine"; import type { ILink } from "./ILink"; import type { LinkContainer } from "./LinkContainer"; @@ -44,6 +44,8 @@ export class LinkInstance implements IContainerPlugin { private readonly _freqs: IParticlesFrequencies; constructor(private readonly container: LinkContainer) { + container.offsets = getOffsets(container.canvas.size); + this._freqs = { links: new Map<string, number>(), triangles: new Map<string, number>(), @@ -92,6 +94,10 @@ export class LinkInstance implements IContainerPlugin { particle.links = []; } + resize(): void { + this.container.offsets = getOffsets(this.container.canvas.size); + } + private drawLinkLine(p1: LinkParticle, link: ILink): void { const container = this.container, options = container.actualOptions, @@ -154,6 +160,7 @@ export class LinkInstance implements IContainerPlugin { maxDistance, container.canvas.size, p1.options.links.warp, + this.container.offsets, options.backgroundMask.enable, options.backgroundMask.composite, colorLine, diff --git a/Linker.ts b/Linker.ts index 84650ca..6d3ed30 100644 --- a/Linker.ts +++ b/Linker.ts @@ -1,249 +1,8 @@ -import { ICoordinates, IDimension } from "tsparticles-engine"; import { describe, it } from "mocha"; import { expect } from "chai"; -import { getLinkPoints } from "../src/Utils"; - -function checkIntermediatePointsTests( - begin: ICoordinates, - end: ICoordinates, - midPoints: ICoordinates[], - distance: number, - warp: boolean, - canvasSize: IDimension -): void { - const linkPoints = getLinkPoints(begin, end, distance, warp, canvasSize); - - console.log(linkPoints); - - expect(linkPoints).to.be.not.empty; - - for (const point of linkPoints) { - expect(point).to.be.not.empty; - - console.log(point); - - if (midPoints.length) { - console.log(midPoints); - - expect( - midPoints.find( - t => (t.x === point.begin.x && t.y === point.begin.y) || (t.x === point.end.x && t.y === point.end.y) - ) - ).to.be.not.undefined; - } - - expect(point.begin.x).to.be.within(0, canvasSize.width); - expect(point.begin.y).to.be.within(0, canvasSize.height); - expect(point.end.x).to.be.within(0, canvasSize.width); - expect(point.end.y).to.be.within(0, canvasSize.height); - } -} - -function checkIntermediatePointsFailTests( - begin: ICoordinates, - end: ICoordinates, - distance: number, - warp: boolean, - canvasSize: IDimension -): void { - const linkPoints = getLinkPoints(begin, end, distance, warp, canvasSize); - - expect(linkPoints).to.be.empty; -} - -const canvasSize = { width: 100, height: 100 }, - distance = 10; - -type SingleLinkTest = { - coordinates: ICoordinates; - fail: boolean; - midPoints: ICoordinates[]; -}; - -type LinkTest = { - begin: ICoordinates; - tests: SingleLinkTest[]; - warp: boolean; -}; - -const tests: LinkTest[] = [ - { - begin: { x: 2, y: 2 }, - tests: [ - { - coordinates: { x: 4, y: 4 }, - fail: false, - midPoints: [], - }, - { - coordinates: { x: 15, y: 15 }, - fail: true, - midPoints: [], - }, - ], - warp: false, - }, - { - begin: { x: 2, y: 2 }, - tests: [ - { - coordinates: { x: 4, y: 4 }, - fail: false, - midPoints: [], - }, - { - coordinates: { x: 15, y: 15 }, - fail: true, - midPoints: [], - }, - { - coordinates: { x: 98, y: 2 }, - fail: false, - midPoints: [ - { x: 100, y: 2 }, - { x: 0, y: 2 }, - ], - }, - { - coordinates: { x: 2, y: 98 }, - fail: false, - midPoints: [ - { x: 2, y: 100 }, - { x: 2, y: 0 }, - ], - }, - { - coordinates: { x: 98, y: 98 }, - fail: false, - midPoints: [ - { x: 100, y: 100 }, - { x: 0, y: 0 }, - ], - }, - ], - warp: true, - }, - { - begin: { x: 98, y: 2 }, - tests: [ - { - coordinates: { x: 98, y: 4 }, - fail: false, - midPoints: [], - }, - { - coordinates: { x: 15, y: 15 }, - fail: true, - midPoints: [], - }, - { - coordinates: { x: 2, y: 2 }, - fail: false, - midPoints: [ - { x: 0, y: 2 }, - { x: 100, y: 2 }, - ], - }, - { - coordinates: { x: 2, y: 98 }, - fail: false, - midPoints: [ - { x: 2, y: 100 }, - { x: 2, y: 0 }, - ], - }, - { - coordinates: { x: 98, y: 98 }, - fail: false, - midPoints: [ - { x: 100, y: 100 }, - { x: 0, y: 0 }, - ], - }, - ], - warp: true, - }, - { - begin: { x: 2, y: 98 }, - tests: [ - { - coordinates: { x: 4, y: 98 }, - fail: false, - midPoints: [], - }, - { - coordinates: { x: 15, y: 15 }, - fail: true, - midPoints: [], - }, - { - coordinates: { x: 2, y: 2 }, - fail: false, - midPoints: [ - { x: 2, y: 0 }, - { x: 2, y: 100 }, - ], - }, - { - coordinates: { x: 98, y: 2 }, - fail: false, - midPoints: [ - { x: 100, y: 2 }, - { x: 0, y: 2 }, - ], - }, - { - coordinates: { x: 98, y: 98 }, - fail: false, - midPoints: [ - { x: 100, y: 100 }, - { x: 0, y: 0 }, - ], - }, - ], - warp: true, - }, - { - begin: { x: 98, y: 98 }, - tests: [ - { - coordinates: { x: 94, y: 94 }, - fail: false, - midPoints: [], - }, - { - coordinates: { x: 15, y: 15 }, - fail: true, - midPoints: [], - }, - { - coordinates: { x: 2, y: 2 }, - fail: false, - midPoints: [ - { x: 0, y: 0 }, - { x: 100, y: 100 }, - ], - }, - { - coordinates: { x: 2, y: 98 }, - fail: false, - midPoints: [ - { x: 2, y: 100 }, - { x: 2, y: 0 }, - ], - }, - { - coordinates: { x: 98, y: 2 }, - fail: false, - midPoints: [ - { x: 100, y: 2 }, - { x: 0, y: 2 }, - ], - }, - ], - warp: true, - }, -]; +import { getLinkDistance } from "../src/Linker"; +import { canvasSize, distance, offsets, tests } from "./Fixture/defaultValues"; +import { checkIntermediatePointsFailTests, checkIntermediatePointsTests } from "./Fixture/utils"; describe(`Linker (Canvas: ${canvasSize.width}x${canvasSize.height}, Distance: ${distance})`, () => { for (const test of tests) { @@ -251,15 +10,36 @@ describe(`Linker (Canvas: ${canvasSize.width}x${canvasSize.height}, Distance: ${ for (const end of test.tests) { it(`should link Point (${end.coordinates.x}, ${end.coordinates.y})`, () => { if (end.fail) { - checkIntermediatePointsFailTests(test.begin, end.coordinates, distance, test.warp, canvasSize); + checkIntermediatePointsFailTests( + test.begin, + end.coordinates, + distance, + test.warp, + canvasSize, + offsets + ); } else { + const linkDistance = getLinkDistance( + test.begin, + end.coordinates, + distance, + canvasSize, + test.warp, + offsets + ); + + console.log("linkDistance", linkDistance); + + expect(linkDistance).to.be.greaterThanOrEqual(0).and.lessThan(distance); + checkIntermediatePointsTests( test.begin, end.coordinates, end.midPoints, distance, test.warp, - canvasSize + canvasSize, + offsets ); } }); diff --git a/Utils.ts b/Utils.ts index 5906850..e844b0f 100644 --- a/Utils.ts +++ b/Utils.ts @@ -9,12 +9,26 @@ import { } from "tsparticles-engine"; import type { ILinksShadow } from "./Options/Interfaces/ILinksShadow"; +export function getOffsets(canvasSize: IDimension): ICoordinates[] { + return [ + { x: canvasSize.width, y: 0 }, + { x: 0, y: canvasSize.height }, + { x: canvasSize.width, y: canvasSize.height }, + { x: -canvasSize.width, y: 0 }, + { x: 0, y: -canvasSize.height }, + { x: -canvasSize.width, y: -canvasSize.height }, + { x: canvasSize.width, y: -canvasSize.height }, + { x: -canvasSize.width, y: canvasSize.height }, + ]; +} + export function getLinkPoints( begin: ICoordinates, end: ICoordinates, maxDistance: number, warp: boolean, - canvasSize: IDimension + canvasSize: IDimension, + offsets: ICoordinates[] ): { begin: ICoordinates; end: ICoordinates }[] { const lines: { begin: ICoordinates; end: ICoordinates }[] = []; @@ -23,7 +37,7 @@ export function getLinkPoints( } if (warp) { - for (const line of getIntermediatePoints(begin, end, canvasSize, maxDistance)) { + for (const line of getIntermediatePoints(begin, end, canvasSize, maxDistance, offsets)) { lines.push(line); } } @@ -35,19 +49,9 @@ export function getIntermediatePoints( begin: ICoordinates, end: ICoordinates, canvasSize: IDimension, - maxDistance: number + maxDistance: number, + offsets: ICoordinates[] ): { begin: ICoordinates; end: ICoordinates }[] { - const offsets = [ - { x: canvasSize.width, y: 0 }, - { x: 0, y: canvasSize.height }, - { x: canvasSize.width, y: canvasSize.height }, - { x: -canvasSize.width, y: 0 }, - { x: 0, y: -canvasSize.height }, - { x: -canvasSize.width, y: -canvasSize.height }, - { x: canvasSize.width, y: -canvasSize.height }, - { x: -canvasSize.width, y: canvasSize.height }, - ]; - let pi1: ICoordinates | undefined, pi2: ICoordinates | undefined; for (const offset of offsets) { @@ -62,72 +66,95 @@ export function getIntermediatePoints( }, d2 = getDistances(begin, pos2); - console.log({ pos1, end, d1, begin, pos2, d2, offset }); - if (d1.distance > maxDistance && d2.distance > maxDistance) { continue; } - let m: number, q: number, beginPos: ICoordinates, endPos: ICoordinates; - - if (d1.distance <= maxDistance) { - m = d1.dy / d1.dx; - q = Number.isFinite(m) ? end.y - m * end.x : begin.x; - beginPos = pos1; - endPos = end; - } else if (d2.distance <= maxDistance) { - m = d2.dy / d2.dx; - q = Number.isFinite(m) ? begin.y - m * begin.x : begin.x; - beginPos = begin; - endPos = pos2; - } else { - return []; - } - - console.log({ beginPos, endPos, m, q }); - - for (const innerOffset of offsets) { - const px = { x: innerOffset.x, y: m * innerOffset.x + q }, - py = { - x: Number.isFinite(m) ? innerOffset.y - q / m : q, - y: Number.isFinite(m) ? innerOffset.y : offset.y, - }; + if (d1.dx === 0 || d2.dx === 0) { + if (Math.abs(d1.dy) > maxDistance && Math.abs(d2.dy) > maxDistance) { + continue; + } - console.log({ beginPos, endPos, px, py, innerOffset }); + if (begin.y >= end.y) { + pi1 = { x: begin.x, y: canvasSize.height }; + pi2 = { x: end.x, y: 0 }; + } else { + pi1 = { x: begin.x, y: 0 }; + pi2 = { x: end.x, y: canvasSize.height }; + } - if (isPointBetweenPoints(px, beginPos, endPos)) { - const db = getDistance(beginPos, px), - de = getDistance(endPos, px); + if (pi1 && pi2) { + break; + } + } else if (d1.dy === 0 || d2.dy === 0) { + if (Math.abs(d1.dx) > maxDistance && Math.abs(d2.dx) > maxDistance) { + continue; + } - console.log("px between points", db, de); + if (begin.x >= end.x) { + pi1 = { x: canvasSize.width, y: begin.y }; + pi2 = { x: 0, y: end.y }; + } else { + pi1 = { x: 0, y: begin.y }; + pi2 = { x: canvasSize.width, y: end.y }; + } - const xi = offset.x - px.x, - yi = m * xi + q; + if (pi1 && pi2) { + break; + } + } else { + let m: number, q: number, beginPos: ICoordinates, endPos: ICoordinates; + + if (d1.distance <= maxDistance) { + m = d1.dy / d1.dx; + q = Number.isFinite(m) ? end.y - m * end.x : begin.x; + beginPos = pos1; + endPos = end; + } else if (d2.distance <= maxDistance) { + m = d2.dy / d2.dx; + q = Number.isFinite(m) ? begin.y - m * begin.x : begin.x; + beginPos = begin; + endPos = pos2; + } else { + return []; + } - if (db < de) { - pi1 = { x: xi, y: yi }; - pi2 = px; - } else { - pi1 = px; - pi2 = { x: xi, y: yi }; - } - } else if (isPointBetweenPoints(py, beginPos, endPos)) { - const db = getDistance(beginPos, py), - de = getDistance(endPos, py); + for (const innerOffset of offsets) { + const px = { x: innerOffset.x, y: m * innerOffset.x + q }, + py = { + x: Number.isFinite(m) ? innerOffset.y - q / m : q, + y: Number.isFinite(m) ? innerOffset.y : offset.y, + }; - console.log("py between points", db, de); + if (isPointBetweenPoints(px, beginPos, endPos)) { + const db = getDistance(beginPos, px), + de = getDistance(endPos, px); - const yi = offset.y - py.y, - xi = (yi - q) / m; + const xi = offset.x - px.x, + yi = m * xi + q; - if (yi >= 0 && xi >= 0) { - if (db < de) { + if (db <= de) { pi1 = { x: xi, y: yi }; - pi2 = py; + pi2 = px; } else { - pi1 = py; + pi1 = px; pi2 = { x: xi, y: yi }; } + } else if (isPointBetweenPoints(py, beginPos, endPos)) { + const db = getDistance(beginPos, py), + de = getDistance(endPos, py), + yi = offset.y - py.y, + xi = (yi - q) / m; + + if (yi >= 0 && xi >= 0) { + if (db <= de) { + pi1 = { x: xi, y: yi }; + pi2 = py; + } else { + pi1 = py; + pi2 = { x: xi, y: yi }; + } + } } } @@ -162,13 +189,14 @@ export function drawLinkLine( maxDistance: number, canvasSize: IDimension, warp: boolean, + offsets: ICoordinates[], backgroundMask: boolean, composite: GlobalCompositeOperation, colorLine: IRgb, opacity: number, shadow: ILinksShadow ): void { - const lines = getLinkPoints(begin, end, maxDistance, warp, canvasSize); + const lines = getLinkPoints(begin, end, maxDistance, warp, canvasSize, offsets); if (!lines.length) { return; diff --git a/defaultValues.ts b/defaultValues.ts index da80f66..098da82 100644 --- a/defaultValues.ts +++ b/defaultValues.ts @@ -0,0 +1,204 @@ +import { LinkTest } from "./types"; +import { getOffsets } from "../../src/Utils"; + +export const canvasSize = { width: 100, height: 100 }, + distance = 10, + offsets = getOffsets(canvasSize); + +export const tests: LinkTest[] = [ + { + begin: { x: 2, y: 2 }, + tests: [ + { + coordinates: { x: 4, y: 4 }, + fail: false, + midPoints: [], + }, + { + coordinates: { x: 15, y: 15 }, + fail: true, + midPoints: [], + }, + ], + warp: false, + }, + { + begin: { x: 2, y: 2 }, + tests: [ + { + coordinates: { x: 4, y: 4 }, + fail: false, + midPoints: [], + }, + { + coordinates: { x: 15, y: 15 }, + fail: true, + midPoints: [], + }, + { + coordinates: { x: 98, y: 2 }, + fail: false, + midPoints: [ + { x: 100, y: 2 }, + { x: 0, y: 2 }, + ], + }, + { + coordinates: { x: 2, y: 98 }, + fail: false, + midPoints: [ + { x: 2, y: 100 }, + { x: 2, y: 0 }, + ], + }, + { + coordinates: { x: 98, y: 98 }, + fail: false, + midPoints: [ + { x: 100, y: 100 }, + { x: 0, y: 0 }, + ], + }, + ], + warp: true, + }, + { + begin: { x: 98, y: 2 }, + tests: [ + { + coordinates: { x: 98, y: 4 }, + fail: false, + midPoints: [], + }, + { + coordinates: { x: 15, y: 15 }, + fail: true, + midPoints: [], + }, + { + coordinates: { x: 2, y: 2 }, + fail: false, + midPoints: [ + { x: 0, y: 2 }, + { x: 100, y: 2 }, + ], + }, + { + coordinates: { x: 2, y: 98 }, + fail: false, + midPoints: [ + { x: 100, y: 0 }, + { x: 0, y: 100 }, + ], + }, + { + coordinates: { x: 98, y: 98 }, + fail: false, + midPoints: [ + { x: 98, y: 100 }, + { x: 98, y: 0 }, + ], + }, + ], + warp: true, + }, + { + begin: { x: 2, y: 98 }, + tests: [ + { + coordinates: { x: 4, y: 98 }, + fail: false, + midPoints: [], + }, + { + coordinates: { x: 15, y: 15 }, + fail: true, + midPoints: [], + }, + { + coordinates: { x: 2, y: 2 }, + fail: false, + midPoints: [ + { x: 2, y: 0 }, + { x: 2, y: 100 }, + ], + }, + { + coordinates: { x: 98, y: 2 }, + fail: false, + midPoints: [ + { x: 0, y: 100 }, + { x: 100, y: 0 }, + ], + }, + { + coordinates: { x: 98, y: 98 }, + fail: false, + midPoints: [ + { x: 100, y: 98 }, + { x: 0, y: 98 }, + ], + }, + ], + warp: true, + }, + { + begin: { x: 98, y: 98 }, + tests: [ + { + coordinates: { x: 94, y: 94 }, + fail: false, + midPoints: [], + }, + { + coordinates: { x: 15, y: 15 }, + fail: true, + midPoints: [], + }, + { + coordinates: { x: 2, y: 2 }, + fail: false, + midPoints: [ + { x: 0, y: 0 }, + { x: 100, y: 100 }, + ], + }, + { + coordinates: { x: 2, y: 98 }, + fail: false, + midPoints: [ + { x: 100, y: 98 }, + { x: 0, y: 98 }, + ], + }, + { + coordinates: { x: 98, y: 2 }, + fail: false, + midPoints: [ + { x: 98, y: 100 }, + { x: 98, y: 0 }, + ], + }, + ], + warp: true, + }/*, + { + begin: { x: 3, y: 1 }, + tests: [ + { + coordinates: { x: 3, y: 3 }, + fail: false, + midPoints: [], + }, + { + coordinates: { x: 99, y: 2 }, + fail: false, + midPoints: [ + { x: 100, y: 1.75 }, + { x: 0, y: 26.75 } + ], + } + ], + warp: true, + }*/ +]; diff --git a/types.ts b/types.ts index 7f35a6e..4a9628f 100644 --- a/types.ts +++ b/types.ts @@ -0,0 +1,13 @@ +import type { ICoordinates } from "tsparticles-engine"; + +export type SingleLinkTest = { + coordinates: ICoordinates; + fail: boolean; + midPoints: ICoordinates[]; +}; + +export type LinkTest = { + begin: ICoordinates; + tests: SingleLinkTest[]; + warp: boolean; +}; diff --git a/utils.ts b/utils.ts index 443dabb..a62ac76 100644 --- a/utils.ts +++ b/utils.ts @@ -0,0 +1,53 @@ +import { ICoordinates, IDimension } from "tsparticles-engine"; +import { getLinkPoints } from "../../src/Utils"; +import { expect } from "chai"; + +export function checkIntermediatePointsTests( + begin: ICoordinates, + end: ICoordinates, + midPoints: ICoordinates[], + distance: number, + warp: boolean, + canvasSize: IDimension, + offsets: ICoordinates[] +): void { + const linkPoints = getLinkPoints(begin, end, distance, warp, canvasSize, offsets); + + console.log("begin", begin); + console.log("end", end); + console.log("midPoints", midPoints); + console.log("linkPoints", linkPoints); + + expect(linkPoints).to.be.not.empty; + + for (const point of linkPoints) { + expect(point).to.be.not.empty; + + if (midPoints.length) { + expect( + midPoints.find( + t => + (t.x === point.begin.x && t.y === point.begin.y) || (t.x === point.end.x && t.y === point.end.y) + ) + ).to.be.not.undefined; + } + + expect(point.begin.x).to.be.within(0, canvasSize.width); + expect(point.begin.y).to.be.within(0, canvasSize.height); + expect(point.end.x).to.be.within(0, canvasSize.width); + expect(point.end.y).to.be.within(0, canvasSize.height); + } +} + +export function checkIntermediatePointsFailTests( + begin: ICoordinates, + end: ICoordinates, + distance: number, + warp: boolean, + canvasSize: IDimension, + offsets: ICoordinates[] +): void { + const linkPoints = getLinkPoints(begin, end, distance, warp, canvasSize, offsets); + + expect(linkPoints).to.be.empty; +} diff --git a/warp.json b/warp.json index b7ee7bf..95a091b 100644 --- a/warp.json +++ b/warp.json @@ -1,67 +1,47 @@ { - "fullScreen": false, - "fpsLimit": 120, - "manualParticles": [ - { - "position": { - "x": 2, - "y": 2 - } - }, - { - "position": { - "x": 2, - "y": 98 - } - }, - { - "position": { - "x": 98, - "y": 2 - } - }, - { - "position": { - "x": 98, - "y": 98 - } - } - ], "particles": { "number": { - "value": 0 + "value": 80, + "density": { + "enable": true, + "area": 800 + } }, "color": { - "value": "#ffffff" + "value": "#ff0000", + "animation": { + "enable": true, + "speed": 20, + "sync": true + } }, "shape": { "type": "circle" }, "opacity": { - "value": 1 + "value": 0.5 }, "size": { - "value": 3 + "value": { + "min": 1, + "max": 3 + } }, "links": { "enable": true, "distance": 150, "color": "#ffffff", - "warp": true, - "opacity": 1, - "width": 1 + "opacity": 0.4, + "width": 1, + "warp": true }, "move": { - "enable": false, - "speed": 2, - "direction": "none", - "random": false, - "straight": false, - "outModes": "out", + "enable": true, + "speed": 6, "warp": true } }, "background": { - "color": "#0d47a1" + "color": "#000000" } }
fix(reflection): improve detection of `Ref` types with FK as PK Closes #5144
c8858d225f514957fc13591bb8806dbba2227e45
fix
https://github.com/mikro-orm/mikro-orm/commit/c8858d225f514957fc13591bb8806dbba2227e45
improve detection of `Ref` types with FK as PK Closes #5144
diff --git a/TsMorphMetadataProvider.ts b/TsMorphMetadataProvider.ts index 0f7472c..9453d6d 100644 --- a/TsMorphMetadataProvider.ts +++ b/TsMorphMetadataProvider.ts @@ -173,7 +173,9 @@ export class TsMorphMetadataProvider extends MetadataProvider { // `{ object?: import("...").Entity | undefined; } & import("...").Reference<Entity>` // `{ node?: ({ id?: number | undefined; } & import("...").Reference<import("...").Entity>) | undefined; } & import("...").Reference<Entity>` // the regexp is looking for the `wrapper`, possible prefixed with `.` or wrapped in parens. - const type = prop.type.replace(/import\\(.*\\)\\./g, ''); + const type = prop.type + .replace(/import\\(.*\\)\\./g, '') + .replace(/\\{ .* } & ([\\w &]+)/g, '$1'); const m = type.match(new RegExp(`(?:^|[.( ])${wrapper}<(\\\\w+),?.*>(?:$|[) ])`)); if (!m) { diff --git a/GH5144.test.ts b/GH5144.test.ts index c776008..ddc3d17 100644 --- a/GH5144.test.ts +++ b/GH5144.test.ts @@ -0,0 +1,113 @@ +import { + Collection, + Entity, + Enum, + ManyToOne, + OneToMany, + OneToOne, + PrimaryKey, + PrimaryKeyProp, + Property, + ref, + Ref, +} from '@mikro-orm/core'; +import { MikroORM } from '@mikro-orm/sqlite'; +import { TsMorphMetadataProvider } from '@mikro-orm/reflection'; + +enum AccountType { + User = 1, + Organiztion = 2, +} + +@Entity() +class Account { + + @PrimaryKey() + id!: number; + + @Property() + name: string; + + @Property() + balance: number = 0; + + @OneToOne(() => User, 'account') + user?: Ref<User>; + + @OneToOne(() => Organization, 'account') + organization?: Ref<Organization>; + + @Enum(() => AccountType) + type!: AccountType; + + constructor(name: string, startBalance: number = 0, type = AccountType.User) { + this.name = name; + this.balance = startBalance; + this.type = type; + } + +} + +@Entity() +class User { + + @OneToOne({ primary: true }) + account!: Ref<Account>; + + @Property() + username!: string; + + @Property({ hidden: true }) + password!: string; + + @OneToMany(() => Organization, org => org.owner) + organizations = new Collection<Organization>(this); + + [PrimaryKeyProp]?: 'account'; + + constructor(account: Account, username: string, password: string) { + this.account = ref(account); + this.username = username; + this.password = password; + } + +} + +@Entity() +class Organization { + + @OneToOne({ primary: true }) + account!: Ref<Account>; + + @ManyToOne() + owner!: Ref<User>; + + [PrimaryKeyProp]?: 'account'; + + constructor(account: Account, owner: User) { + this.account = ref(account); + this.owner = ref(owner); + } + +} + +let orm: MikroORM; + +beforeAll(async () => { + orm = await MikroORM.init({ + entities: [Organization, Account, User], + dbName: ':memory:', + metadataProvider: TsMorphMetadataProvider, + metadataCache: { enabled: false }, + }); + await orm.schema.createSchema(); +}); + +afterAll(async () => { + await orm.close(true); +}); + +test('#5144', async () => { + const schema = await orm.schema.getCreateSchemaSQL(); + expect(schema).toMatchSnapshot(); +}); diff --git a/GH5144.test.ts.snap b/GH5144.test.ts.snap index 9c4937e..0f37138 100644 --- a/GH5144.test.ts.snap +++ b/GH5144.test.ts.snap @@ -0,0 +1,15 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`#5144 1`] = ` +"pragma foreign_keys = off; + +create table \\`account\\` (\\`id\\` integer not null primary key autoincrement, \\`name\\` text not null, \\`balance\\` integer not null default 0, \\`type\\` integer not null default 1); + +create table \\`user\\` (\\`account_id\\` integer not null, \\`username\\` text not null, \\`password\\` text not null, constraint \\`user_account_id_foreign\\` foreign key(\\`account_id\\`) references \\`account\\`(\\`id\\`) on delete cascade on update cascade, primary key (\\`account_id\\`)); + +create table \\`organization\\` (\\`account_id\\` integer not null, \\`owner_account_id\\` integer not null, constraint \\`organization_account_id_foreign\\` foreign key(\\`account_id\\`) references \\`account\\`(\\`id\\`) on delete cascade on update cascade, constraint \\`organization_owner_account_id_foreign\\` foreign key(\\`owner_account_id\\`) references \\`user\\`(\\`account_id\\`) on update cascade, primary key (\\`account_id\\`)); +create index \\`organization_owner_account_id_index\\` on \\`organization\\` (\\`owner_account_id\\`); + +pragma foreign_keys = on; +" +`;
feat: add SpringContext component This component lets you override a few props for all SpringValue objects created by the hook API or renderprops API in child components.
1f023329df1f0cc2ea1d423d6d36c5392626873f
feat
https://github.com/pmndrs/react-spring/commit/1f023329df1f0cc2ea1d423d6d36c5392626873f
add SpringContext component This component lets you override a few props for all SpringValue objects created by the hook API or renderprops API in child components.
diff --git a/SpringContext.tsx b/SpringContext.tsx index 3c4411d..a7f3245 100644 --- a/SpringContext.tsx +++ b/SpringContext.tsx @@ -0,0 +1,29 @@ +import React, { useContext, PropsWithChildren } from 'react' + +/** + * This context affects all new `SpringValue` objects created with + * the hook API or the renderprops API. + */ +export interface SpringContext { + /** Pause all new and existing animations. */ + pause?: boolean + /** Cancel all new and existing animations. */ + cancel?: boolean + /** Force all new and existing animations to be immediate. */ + immediate?: boolean +} + +const ctx = React.createContext<SpringContext>({}) + +export const SpringContext = ({ + children, + ...props +}: PropsWithChildren<SpringContext>) => { + const inherited = useContext(ctx) + props = { ...inherited, ...props } + + const { Provider } = ctx + return <Provider value={props}>{children}</Provider> +} + +export const useSpringContext = () => useContext(ctx) diff --git a/SpringValue.ts b/SpringValue.ts index 4117dab..3fa8668 100644 --- a/SpringValue.ts +++ b/SpringValue.ts @@ -41,6 +41,7 @@ import { getDefaultProp, } from './helpers' import { FrameValue, isFrameValue } from './FrameValue' +import { SpringContext } from './SpringContext' import { SpringPhase, CREATED, @@ -1052,3 +1053,8 @@ export function createUpdate(props: any) { function findDefined(values: any, keys: Set<string>) { each(values, (value, key) => value != null && keys.add(key as any)) } + +/** Update a `SpringValue` with the values from a `SpringContext` object. */ +export function applyContext(spring: SpringValue, context: SpringContext) { + spring.start({ default: true, ...context }) +} diff --git a/useSprings.ts b/useSprings.ts index 4d2c829..81535e5 100644 --- a/useSprings.ts +++ b/useSprings.ts @@ -20,7 +20,7 @@ import { ControllerUpdate, } from '../types' import { UseSpringProps } from './useSpring' -import { createUpdate } from '../SpringValue' +import { createUpdate, applyContext } from '../SpringValue' import { Controller, getSprings, @@ -28,6 +28,7 @@ import { setSprings, } from '../Controller' import { useMemo as useMemoOne, mergeDefaultProps } from '../helpers' +import { useSpringContext } from '../SpringContext' import { SpringHandle } from '../SpringHandle' export type UseSpringsProps<State extends Lookup = Lookup> = unknown & @@ -177,6 +178,7 @@ export function useSprings( // commit phase (see the `useLayoutEffect` callback below). const springs = ctrls.map((ctrl, i) => getSprings(ctrl, updates[i])) + const context = useSpringContext() useLayoutEffect(() => { layoutId.current++ @@ -200,7 +202,13 @@ export function useSprings( // Update existing controllers. each(ctrls, (ctrl, i) => { - setSprings(ctrl, springs[i]) + const values = springs[i] + setSprings(ctrl, values) + + // Update the default props. + for (const key in values) { + applyContext(values[key], context) + } // Apply updates created during render. const update = updates[i] diff --git a/useTransition.tsx b/useTransition.tsx index ce135e5..fbc3328 100644 --- a/useTransition.tsx +++ b/useTransition.tsx @@ -26,6 +26,8 @@ import { import { Valid } from '../types/common' import { callProp, inferTo, mergeDefaultProps } from '../helpers' import { Controller, getSprings, setSprings } from '../Controller' +import { useSpringContext } from '../SpringContext' +import { applyContext } from '../SpringValue' import { SpringHandle } from '../SpringHandle' import { ENTER, @@ -234,6 +236,17 @@ export function useTransition( useImperativeHandle(ref, () => api) + const context = useSpringContext() + useLayoutEffect(() => { + // Update the default props of each spring. + transitions.forEach(t => { + const { springs }: any = t.ctrl + for (const key in springs) { + applyContext(springs[key], context) + } + }) + }, [context]) + useLayoutEffect( () => { each(changes, ({ phase, springs, payload }, t) => { diff --git a/index.ts b/index.ts index 7a44c11..fc037db 100644 --- a/index.ts +++ b/index.ts @@ -7,6 +7,7 @@ export * from './globals' export { Controller } from './Controller' export { SpringValue } from './SpringValue' export { Interpolation } from './Interpolation' +export { SpringContext } from './SpringContext' export { SpringHandle } from './SpringHandle' export { FrameValue } from './FrameValue'
ci: use an action for issue assignment
fb3a231b29bc8bff9270b99dd4aff9dad599f21f
ci
https://github.com/ibis-project/ibis/commit/fb3a231b29bc8bff9270b99dd4aff9dad599f21f
use an action for issue assignment
diff --git a/assign.yml b/assign.yml index e4ac5e3..268c7f5 100644 --- a/assign.yml +++ b/assign.yml @@ -8,8 +8,6 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.comment.body == '/take' }} steps: - - uses: actions/checkout@v2 - - name: Assign issue ${{ github.event.issue.number }} to ${{ github.event.comment.user.login }} - run: gh issue edit ${{ github.event.issue.number }} --add-assignee "${{ github.event.comment.user.login }}" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - uses: pozil/[email protected] + with: + assignees: ${{ github.event.comment.user.login }}
fix(schema): respect explicit `columnType` when comparing columns Related: #3317
f0a20fafa1425ca20f4f6fb2977eae7773d6ac6a
fix
https://github.com/mikro-orm/mikro-orm/commit/f0a20fafa1425ca20f4f6fb2977eae7773d6ac6a
respect explicit `columnType` when comparing columns Related: #3317
diff --git a/UnknownType.ts b/UnknownType.ts index a514b26..e1bdf1e 100644 --- a/UnknownType.ts +++ b/UnknownType.ts @@ -1,7 +1,13 @@ import { StringType } from './StringType'; +import type { EntityProperty } from '../typings'; +import type { Platform } from '../platforms/Platform'; export class UnknownType extends StringType { + getColumnType(prop: EntityProperty, platform: Platform) { + return prop.columnTypes?.[0] ?? platform.getVarcharTypeDeclarationSQL(prop); + } + compareAsType(): string { return 'unknown'; } diff --git a/SchemaComparator.ts b/SchemaComparator.ts index 65e76b1..a34c7f5 100644 --- a/SchemaComparator.ts +++ b/SchemaComparator.ts @@ -397,8 +397,8 @@ export class SchemaComparator { const changedProperties = new Set<string>(); const prop1 = this.mapColumnToProperty({ ...column1, autoincrement: false }); const prop2 = this.mapColumnToProperty({ ...column2, autoincrement: false }); - const columnType1 = column1.mappedType.getColumnType(prop1, this.platform); - const columnType2 = column2.mappedType.getColumnType(prop2, this.platform); + const columnType1 = column1.mappedType.getColumnType(prop1, this.platform).toLowerCase(); + const columnType2 = column2.mappedType.getColumnType(prop2, this.platform).toLowerCase(); const log = (msg: string, params: Dictionary) => { if (tableName) { this.log(msg, params); diff --git a/SchemaGenerator.postgres.test.ts b/SchemaGenerator.postgres.test.ts index 98a1af9..879da95 100644 --- a/SchemaGenerator.postgres.test.ts +++ b/SchemaGenerator.postgres.test.ts @@ -323,6 +323,13 @@ describe('SchemaGenerator [postgres]', () => { expect(diff).toMatchSnapshot('postgres-update-schema-drop-unique'); await generator.execute(diff, { wrap: true }); + // test index on column with type tsvector + delete meta.get('Book2').properties.title.defaultRaw; + meta.get('Book2').properties.title.columnTypes[0] = 'tsvector'; + diff = await generator.getUpdateSchemaSQL({ wrap: false }); + expect(diff).toMatchSnapshot('postgres-update-schema-add-fulltext-index-tsvector'); + await generator.execute(diff, { wrap: true }); + await orm.close(true); }); diff --git a/SchemaGenerator.postgres.test.ts.snap b/SchemaGenerator.postgres.test.ts.snap index ca1ccf4..e979838 100644 --- a/SchemaGenerator.postgres.test.ts.snap +++ b/SchemaGenerator.postgres.test.ts.snap @@ -345,6 +345,13 @@ set session_replication_role = 'origin'; " `; +exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-add-fulltext-index-tsvector 1`] = ` +"alter table \\\\"book2\\\\" alter column \\\\"title\\\\" drop default; +alter table \\\\"book2\\\\" alter column \\\\"title\\\\" type tsvector using (\\\\"title\\\\"::tsvector); + +" +`; + exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-add-index 1`] = ` "create index \\\\"book2_author_id_publisher_id_index\\\\" on \\\\"book2\\\\" (\\\\"author_id\\\\", \\\\"publisher_id\\\\"); alter table \\\\"book2\\\\" add constraint \\\\"book2_author_id_publisher_id_unique\\\\" unique (\\\\"author_id\\\\", \\\\"publisher_id\\\\");
test(bigquery): fix failing udf test
e5cb3a5d1e2fb695ae20414799d4d4110b7012dc
test
https://github.com/ibis-project/ibis/commit/e5cb3a5d1e2fb695ae20414799d4d4110b7012dc
fix failing udf test
diff --git a/out.sql b/out.sql index 639e2d7..6988046 100644 --- a/out.sql +++ b/out.sql @@ -1,11 +0,0 @@ -CREATE TEMPORARY FUNCTION my_str_len_0(s STRING) -RETURNS FLOAT64 -LANGUAGE js AS """ -'use strict'; -function my_str_len(s) { - return s.length; -} -return my_str_len(s); -"""; - -SELECT my_str_len_0('abcd') + my_str_len_0('abcd') AS `tmp` \\ No newline at end of file diff --git a/test_udf_execute.py b/test_udf_execute.py index 965fed7..ef9f5c7 100644 --- a/test_udf_execute.py +++ b/test_udf_execute.py @@ -93,17 +93,19 @@ def test_udf_scalar(client): assert result == 3 -def test_multiple_calls_has_one_definition(client, snapshot): +def test_multiple_calls_has_one_definition(client): @udf([dt.string], dt.double) def my_str_len(s): return s.length s = ibis.literal("abcd") expr = my_str_len(s) + my_str_len(s) - sql = client.compile(expr) - snapshot.assert_match(sql, "out.sql") - result = client.execute(expr) - assert result == 8.0 + + add = expr.op() + + # generated javascript is identical + assert add.left.op().js == add.right.op().js + assert client.execute(expr) == 8.0 def test_udf_libraries(client):
docs(jupyterlite): disable insecure extensions (#10052)
3d8280b494dd9df6f2e40fe2f4966786a6fa5766
docs
https://github.com/ibis-project/ibis/commit/3d8280b494dd9df6f2e40fe2f4966786a6fa5766
disable insecure extensions (#10052)
diff --git a/jupyter-lite.json b/jupyter-lite.json index a874eb2..e115887 100644 --- a/jupyter-lite.json +++ b/jupyter-lite.json @@ -0,0 +1,11 @@ +{ + "jupyter-lite-schema-version": 0, + "jupyter-config-data": { + "appName": "Ibis JupyterLite Console", + "disabledExtensions": [ + "@jupyterlab/markdownviewer-extension:plugin", + "@jupyterlab/mathjax-extension:plugin", + "@jupyterlab/mathjax2-extension:plugin" + ] + } +}
feat: allow construction of decimal literals
3d9e865ab3badd092d8155302641a3e91c72c028
feat
https://github.com/ibis-project/ibis/commit/3d9e865ab3badd092d8155302641a3e91c72c028
allow construction of decimal literals
diff --git a/datatypes.py b/datatypes.py index 86038c9..e2e411e 100644 --- a/datatypes.py +++ b/datatypes.py @@ -4,6 +4,7 @@ import ast import builtins import collections import datetime +import decimal import enum import functools import itertools @@ -1571,3 +1572,8 @@ def _str_to_uuid(typ: UUID, value: str) -> _uuid.UUID: @_normalize.register(String, _uuid.UUID) def _uuid_to_str(typ: String, value: _uuid.UUID) -> str: return str(value) + + +@_normalize.register(Decimal, int) +def _int_to_decimal(typ: Decimal, value: int) -> decimal.Decimal: + return decimal.Decimal(value).scaleb(-typ.scale) diff --git a/generic.py b/generic.py index 0bd58b2..8aa3143 100644 --- a/generic.py +++ b/generic.py @@ -1,4 +1,5 @@ import datetime +import decimal import enum import functools import itertools @@ -242,6 +243,7 @@ class Literal(ValueOp): tuple, type(None), uuid.UUID, + decimal.Decimal, ) ), rlz.is_computable_input, diff --git a/test_value_exprs.py b/test_value_exprs.py index 897ecb8..f2239b5 100644 --- a/test_value_exprs.py +++ b/test_value_exprs.py @@ -4,6 +4,7 @@ import os import uuid from collections import OrderedDict from datetime import date, datetime, time +from decimal import Decimal from operator import methodcaller import numpy as np @@ -112,6 +113,8 @@ multipolygon1 = [polygon1, polygon2] (tuple(multipolygon1), 'multipolygon'), pytest.param(uuid.uuid4(), 'uuid', id='uuid'), pytest.param(str(uuid.uuid4()), 'uuid', id='uuid_str'), + pytest.param(Decimal("234.234"), "decimal(6, 3)", id="decimal_native"), + pytest.param(234234, "decimal(6, 3)", id="decimal_int"), ], ) def test_literal_with_explicit_type(value, expected_type): @@ -119,6 +122,25 @@ def test_literal_with_explicit_type(value, expected_type): assert expr.type().equals(dt.validate_type(expected_type)) [email protected]( + ("value", "expected", "dtype"), + [ + # precision > scale + (Decimal("234.234"), Decimal("234.234"), "decimal(6, 3)"), + (234234, Decimal("234.234"), "decimal(6, 3)"), + # scale == 0 + (Decimal("234"), Decimal("234"), "decimal(6, 0)"), + (234, Decimal("234"), "decimal(6, 0)"), + # precision == scale + (Decimal(".234"), Decimal(".234"), "decimal(3, 3)"), + (234, Decimal(".234"), "decimal(3, 3)"), + ], +) +def test_normalize_decimal_literal(value, expected, dtype): + expr = ibis.literal(value, type=dtype) + assert expr.op().value == expected + + @pytest.mark.parametrize( ['value', 'expected_type', 'expected_class'], [
docs(clickhouse): freeze clickhouse backend docs to avoid rate limit from upstream playground
e3a7eac5b081359fd1ea583a25b4bab01fed3635
docs
https://github.com/rohankumardubey/ibis/commit/e3a7eac5b081359fd1ea583a25b4bab01fed3635
freeze clickhouse backend docs to avoid rate limit from upstream playground
diff --git a/html.json b/html.json index 91661b9..db27441 100644 --- a/html.json +++ b/html.json @@ -0,0 +1,15 @@ +{ + "hash": "d7d652378e73534de8400a7133016093", + "result": { + "markdown": "---\\nexecute:\\n freeze: auto\\ntitle: ClickHouse\\n---\\n\\n\\n\\n\\n\\n[https://clickhouse.com](https://clickhouse.com)\\n\\n![](https://img.shields.io/badge/memtables-native-green?style=flat-square) ![](https://img.shields.io/badge/inputs-ClickHouse tables-blue?style=flat-square) ![](https://img.shields.io/badge/outputs-ClickHouse tables | CSV | pandas | Parquet | PyArrow-orange?style=flat-square)\\n\\n## Install\\n\\nInstall Ibis and dependencies for the ClickHouse backend:\\n\\n::: {.panel-tabset}\\n\\n## `pip`\\n\\nInstall with the `clickhouse` extra:\\n\\n```{.bash}\\npip install 'ibis-framework[clickhouse]'\\n```\\n\\nAnd connect:\\n\\n```{.python}\\nimport ibis\\n\\ncon = ibis.clickhouse.connect() # <1>\\n```\\n\\n1. Adjust connection parameters as needed.\\n\\n## `conda`\\n\\nInstall for ClickHouse:\\n\\n```{.bash}\\nconda install -c conda-forge ibis-clickhouse\\n```\\n\\nAnd connect:\\n\\n```{.python}\\nimport ibis\\n\\ncon = ibis.clickhouse.connect() # <1>\\n```\\n\\n1. Adjust connection parameters as needed.\\n\\n## `mamba`\\n\\nInstall for ClickHouse:\\n\\n```{.bash}\\nmamba install -c conda-forge ibis-clickhouse\\n```\\n\\nAnd connect:\\n\\n```{.python}\\nimport ibis\\n\\ncon = ibis.clickhouse.connect() # <1>\\n```\\n\\n1. Adjust connection parameters as needed.\\n\\n:::\\n\\n## Connect\\n\\n### `ibis.clickhouse.connect`\\n\\n```python\\ncon = ibis.clickhouse.connect(\\n user=\\"username\\",\\n password=\\"password\\",\\n host=\\"hostname\\",\\n)\\n```\\n\\n::: {.callout-note}\\n`ibis.clickhouse.connect` is a thin wrapper around [`ibis.backends.clickhouse.Backend.do_connect`](#ibis.backends.clickhouse.Backend.do_connect).\\n:::\\n\\n### Connection Parameters\\n\\n\\n\\n\\n#### do_connect { #ibis.backends.clickhouse.Backend.do_connect }\\n\\n\\n`do_connect(self, host='localhost', port=None, database='default', user='default', password='', client_name='ibis', secure=None, compression=True, **kwargs)`\\n\\n\\nCreate a ClickHouse client for use with Ibis.\\n\\n##### Parameters\\n\\n| Name | Type | Description | Default |\\n|---------------|--------------|---------------------------------------------------------------------------------------------------------------------------------------|---------------|\\n| `host` | str | Host name of the clickhouse server | `'localhost'` |\\n| `port` | int \\\\| None | ClickHouse HTTP server's port. If not passed, the value depends on whether `secure` is `True` or `False`. | `None` |\\n| `database` | str | Default database when executing queries | `'default'` |\\n| `user` | str | User to authenticate with | `'default'` |\\n| `password` | str | Password to authenticate with | `''` |\\n| `client_name` | str | Name of client that will appear in clickhouse server logs | `'ibis'` |\\n| `secure` | bool \\\\| None | Whether or not to use an authenticated endpoint | `None` |\\n| `compression` | str \\\\| bool | The kind of compression to use for requests. See https://clickhouse.com/docs/en/integrations/python#compression for more information. | `True` |\\n| `kwargs` | Any | Client specific keyword arguments | `{}` |\\n\\n##### Examples\\n\\n```python\\n>>> import ibis\\n>>> client = ibis.clickhouse.connect()\\n>>> client\\n<ibis.clickhouse.client.ClickhouseClient object at 0x...>\\n```\\n\\n\\n### `ibis.connect` URL format\\n\\nIn addition to `ibis.clickhouse.connect`, you can also connect to ClickHouse by\\npassing a properly formatted ClickHouse connection URL to `ibis.connect`\\n\\n```python\\ncon = ibis.connect(f\\"clickhouse://{user}:{password}@{host}:{port}?secure={secure}\\")\\n```\\n\\n## ClickHouse playground\\n\\nClickHouse provides a free playground with several datasets that you can connect to using Ibis:\\n\\n::: {#7d05ccde .cell execution_count=2}\\n``` {.python .cell-code}\\nfrom ibis.interactive import *\\n\\ncon = ibis.connect(\\"clickhouse://play:[email protected]:443?secure=True\\")\\nactors = con.table(\\"actors\\")\\nactors\\n```\\n\\n::: {.cell-output .cell-output-display execution_count=22}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\">┏━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓\\n┃<span style=\\"font-weight: bold\\"> login </span>┃<span style=\\"font-weight: bold\\"> type </span>┃<span style=\\"font-weight: bold\\"> site_admin </span>┃<span style=\\"font-weight: bold\\"> name </span>┃<span style=\\"font-weight: bold\\"> company </span>┃<span style=\\"font-weight: bold\\"> blog </span>┃<span style=\\"font-weight: bold\\"> location </span>┃<span style=\\"font-weight: bold\\"> email </span>┃<span style=\\"font-weight: bold\\"> hireable </span>┃<span style=\\"font-weight: bold\\"> bio </span>┃<span style=\\"font-weight: bold\\"> twitter_username </span>┃<span style=\\"font-weight: bold\\"> public_repos </span>┃<span style=\\"font-weight: bold\\"> public_gists </span>┃<span style=\\"font-weight: bold\\"> followers </span>┃<span style=\\"font-weight: bold\\"> following </span>┃<span style=\\"font-weight: bold\\"> created_at </span>┃<span style=\\"font-weight: bold\\"> updated_at </span>┃\\n┡━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!boolean</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!boolean</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!timestamp(0)</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">!timestamp(0)</span> │\\n├──────────────────┼─────────┼────────────┼──────────────┼─────────────────────┼─────────────────────┼───────────────────┼───────────────────────────┼──────────┼───────────────────────────────────────────┼──────────────────┼──────────────┼──────────────┼───────────┼───────────┼─────────────────────┼─────────────────────┤\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">0000Blaze </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">RohanChhetry</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Pulchowk Campus,IOE</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">rohanchhetry.com.np</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Sanepa , Lalitpur</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ True │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">rohanchhetry9 </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">56</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">57</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">83</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2019-02-24 02:31:21</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-07-30 11:30:14</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">007developforfun</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2015-08-07 11:28:01</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2022-08-12 08:45:30</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">00arthur00 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Arthur </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Beijing, China </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">[email protected] </span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">72</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">5</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">61</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2017-04-01 13:37:01</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-06-15 14:50:12</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">010001 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">[email protected] </span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">15</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2015-02-05 03:11:59</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-03-17 06:07:01</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">01001101ilad </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Milad </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Programmer, Writer and Full-Time Learner.</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">10</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2016-10-31 19:12:55</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-07-24 11:43:03</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">010227leo </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">zucker </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">trip.com </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Shanghai, China </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">7</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">11</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2012-01-11 06:23:15</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-07-24 03:35:26</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">010ric </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Mario Turic </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Munich </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ True │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Enthusiast and Maker </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">19</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">23</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">68</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2017-10-27 14:00:07</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-08-04 18:44:35</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">01egen </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ False │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2019-02-27 08:59:00</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-06-20 04:02:51</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">0400H </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">0400H </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Shanghai </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">[email protected] </span> │ True │ <span style=\\"color: #008000; text-decoration-color: #008000\\">HPC &amp; MLSys &amp; PPML </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">17</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">3</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">42</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2015-12-20 17:38:00</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-07-21 11:28:22</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">0442A403 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">User </span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Damir Petrov</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">Moscow </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">[email protected]</span> │ False │ <span style=\\"color: #008000; text-decoration-color: #008000\\">HSE student </span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">~</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">19</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">15</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">30</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2016-11-05 18:59:38</span> │ <span style=\\"color: #800080; text-decoration-color: #800080\\">2023-06-22 06:08:50</span> │\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │\\n└──────────────────┴─────────┴────────────┴──────────────┴─────────────────────┴─────────────────────┴───────────────────┴───────────────────────────┴──────────┴───────────────────────────────────────────┴──────────────────┴──────────────┴──────────────┴───────────┴───────────┴─────────────────────┴─────────────────────┘\\n</pre>\\n```\\n:::\\n:::\\n\\n\\n\\n\\n## `clickhouse.Backend` { #ibis.backends.clickhouse.Backend }\\n\\n\\n### close { #ibis.backends.clickhouse.Backend.close }\\n\\n\\n`close(self)`\\n\\n\\nClose ClickHouse connection.\\n\\n\\n### compile { #ibis.backends.clickhouse.Backend.compile }\\n\\n\\n`compile(self, expr, limit=None, params=None, **kwargs)`\\n\\n\\nCompile an Ibis expression to a ClickHouse SQL string.\\n\\n\\n### create_database { #ibis.backends.clickhouse.Backend.create_database }\\n\\n\\n`create_database(self, name, *, force=False, engine='Atomic')`\\n\\n\\nCreate a new database.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|---------|--------|--------------------------------------------------------------------|------------|\\n| `name` | str | Name of the new database. | _required_ |\\n| `force` | bool | If `False`, an exception is raised if the database already exists. | `False` |\\n\\n\\n### create_table { #ibis.backends.clickhouse.Backend.create_table }\\n\\n\\n`create_table(self, name, obj=None, *, schema=None, database=None, temp=False, overwrite=False, engine='MergeTree', order_by=None, partition_by=None, sample_by=None, settings=None)`\\n\\n\\nCreate a table in a ClickHouse database.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|----------------|----------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|\\n| `name` | str | Name of the table to create | _required_ |\\n| `obj` | pd.DataFrame \\\\| pa.Table \\\\| ir.Table \\\\| None | Optional data to create the table with | `None` |\\n| `schema` | ibis.Schema \\\\| None | Optional names and types of the table | `None` |\\n| `database` | str \\\\| None | Database to create the table in | `None` |\\n| `temp` | bool | Create a temporary table. This is not yet supported, and exists for API compatibility. | `False` |\\n| `overwrite` | bool | Whether to overwrite the table | `False` |\\n| `engine` | str | The table engine to use. See [ClickHouse's `CREATE TABLE` documentation](https://clickhouse.com/docs/en/sql-reference/statements/create/table) for specifics. Defaults to [`MergeTree`](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/mergetree) with `ORDER BY tuple()` because `MergeTree` is the most feature-complete engine. | `'MergeTree'` |\\n| `order_by` | Iterable\\\\[str\\\\] \\\\| None | String column names to order by. Required for some table engines like `MergeTree`. | `None` |\\n| `partition_by` | Iterable\\\\[str\\\\] \\\\| None | String column names to partition by | `None` |\\n| `sample_by` | str \\\\| None | String column names to sample by | `None` |\\n| `settings` | Mapping\\\\[str, Any\\\\] \\\\| None | Key-value pairs of settings for table creation | `None` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|--------|---------------|\\n| Table | The new table |\\n\\n\\n### create_view { #ibis.backends.clickhouse.Backend.create_view }\\n\\n\\n`create_view(self, name, obj, *, database=None, overwrite=False)`\\n\\n\\nCreate a new view from an expression.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|-------------|-------------|------------------------------------------------------------------------------------------------------|------------|\\n| `name` | str | Name of the new view. | _required_ |\\n| `obj` | ir.Table | An Ibis table expression that will be used to create the view. | _required_ |\\n| `database` | str \\\\| None | Name of the database where the view will be created, if not provided the database's default is used. | `None` |\\n| `overwrite` | bool | Whether to clobber an existing view with the same name | `False` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|--------|----------------------------|\\n| Table | The view that was created. |\\n\\n\\n### drop_database { #ibis.backends.clickhouse.Backend.drop_database }\\n\\n\\n`drop_database(self, name, *, force=False)`\\n\\n\\nDrop a database with name `name`.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|---------|--------|--------------------------------------------------------------------|------------|\\n| `name` | str | Database to drop. | _required_ |\\n| `force` | bool | If `False`, an exception is raised if the database does not exist. | `False` |\\n\\n\\n### drop_table { #ibis.backends.clickhouse.Backend.drop_table }\\n\\n\\n`drop_table(self, name, database=None, force=False)`\\n\\n\\nDrop a table.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|------------|-------------|------------------------------------------------------------------|------------|\\n| `name` | str | Name of the table to drop. | _required_ |\\n| `database` | str \\\\| None | Name of the database where the table exists, if not the default. | `None` |\\n| `force` | bool | If `False`, an exception is raised if the table does not exist. | `False` |\\n\\n\\n### drop_view { #ibis.backends.clickhouse.Backend.drop_view }\\n\\n\\n`drop_view(self, name, *, database=None, force=False)`\\n\\n\\nDrop a view.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|------------|-------------|-----------------------------------------------------------------|------------|\\n| `name` | str | Name of the view to drop. | _required_ |\\n| `database` | str \\\\| None | Name of the database where the view exists, if not the default. | `None` |\\n| `force` | bool | If `False`, an exception is raised if the view does not exist. | `False` |\\n\\n\\n### execute { #ibis.backends.clickhouse.Backend.execute }\\n\\n\\n`execute(self, expr, limit='default', external_tables=None, **kwargs)`\\n\\n\\nExecute an expression.\\n\\n\\n### get_schema { #ibis.backends.clickhouse.Backend.get_schema }\\n\\n\\n`get_schema(self, table_name, database=None)`\\n\\n\\nReturn a Schema object for the indicated table and database.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|--------------|-------------|---------------------------------------------------------------------------------------|------------|\\n| `table_name` | str | May **not** be fully qualified. Use `database` if you want to qualify the identifier. | _required_ |\\n| `database` | str \\\\| None | Database name | `None` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|------------|---------------|\\n| sch.Schema | Ibis schema |\\n\\n\\n### has_operation { #ibis.backends.clickhouse.Backend.has_operation }\\n\\n\\n`has_operation(cls, operation)`\\n\\n\\nReturn whether the backend implements support for `operation`.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|-------------|-------------------|----------------------------------------|------------|\\n| `operation` | type\\\\[ops.Value\\\\] | A class corresponding to an operation. | _required_ |\\n\\n#### Returns\\n\\n| Type | Description |\\n|--------|-----------------------------------------------|\\n| bool | Whether the backend implements the operation. |\\n\\n#### Examples\\n\\n```python\\n>>> import ibis\\n>>> import ibis.expr.operations as ops\\n>>> ibis.sqlite.has_operation(ops.ArrayIndex)\\nFalse\\n>>> ibis.postgres.has_operation(ops.ArrayIndex)\\nTrue\\n```\\n\\n\\n### insert { #ibis.backends.clickhouse.Backend.insert }\\n\\n\\n`insert(self, name, obj, settings=None, **kwargs)`\\n\\n\\n\\n\\n\\n### list_databases { #ibis.backends.clickhouse.Backend.list_databases }\\n\\n\\n`list_databases(self, like=None)`\\n\\n\\nList existing databases in the current connection.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|--------|-------------|-----------------------------------------------------------------------|-----------|\\n| `like` | str \\\\| None | A pattern in Python's regex format to filter returned database names. | `None` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|-------------|-----------------------------------------------------------------------------------------------------|\\n| list\\\\[str\\\\] | The database names that exist in the current connection, that match the `like` pattern if provided. |\\n\\n\\n### list_tables { #ibis.backends.clickhouse.Backend.list_tables }\\n\\n\\n`list_tables(self, like=None, database=None)`\\n\\n\\nReturn the list of table names in the current database.\\n\\nFor some backends, the tables may be files in a directory,\\nor other equivalent entities in a SQL database.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|------------|-------------|----------------------------------------------------------------------------------------|-----------|\\n| `like` | str \\\\| None | A pattern in Python's regex format. | `None` |\\n| `database` | str \\\\| None | The database from which to list tables. If not provided, the current database is used. | `None` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|-------------|------------------------------------------------------------|\\n| list\\\\[str\\\\] | The list of the table names that match the pattern `like`. |\\n\\n\\n### raw_sql { #ibis.backends.clickhouse.Backend.raw_sql }\\n\\n\\n`raw_sql(self, query, external_tables=None, **kwargs)`\\n\\n\\nExecute a SQL string `query` against the database.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|-------------------|--------------------------------------|-----------------------------------------------------------------------------------------|------------|\\n| `query` | str \\\\| sg.exp.Expression | Raw SQL string | _required_ |\\n| `external_tables` | Mapping\\\\[str, pd.DataFrame\\\\] \\\\| None | Mapping of table name to pandas DataFrames providing external datasources for the query | `None` |\\n| `kwargs` | | Backend specific query arguments | `{}` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|--------|-------------------|\\n| Cursor | Clickhouse cursor |\\n\\n\\n### read_csv { #ibis.backends.clickhouse.Backend.read_csv }\\n\\n\\n`read_csv(self, path, table_name=None, engine='MergeTree', **kwargs)`\\n\\n\\nRegister a CSV file as a table in the current backend.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|--------------|-------------|------------------------------------------------------------------------------------------------|------------|\\n| `path` | str \\\\| Path | The data source. A string or Path to the CSV file. | _required_ |\\n| `table_name` | str \\\\| None | An optional name to use for the created table. This defaults to a sequentially generated name. | `None` |\\n| `**kwargs` | Any | Additional keyword arguments passed to the backend loading function. | `{}` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|----------|---------------------------|\\n| ir.Table | The just-registered table |\\n\\n\\n### read_parquet { #ibis.backends.clickhouse.Backend.read_parquet }\\n\\n\\n`read_parquet(self, path, table_name=None, engine='MergeTree', **kwargs)`\\n\\n\\nRegister a parquet file as a table in the current backend.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|--------------|-------------|------------------------------------------------------------------------------------------------|------------|\\n| `path` | str \\\\| Path | The data source. | _required_ |\\n| `table_name` | str \\\\| None | An optional name to use for the created table. This defaults to a sequentially generated name. | `None` |\\n| `**kwargs` | Any | Additional keyword arguments passed to the backend loading function. | `{}` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|----------|---------------------------|\\n| ir.Table | The just-registered table |\\n\\n\\n### sql { #ibis.backends.clickhouse.Backend.sql }\\n\\n\\n`sql(self, query, schema=None, dialect=None)`\\n\\n\\n\\n\\n\\n### table { #ibis.backends.clickhouse.Backend.table }\\n\\n\\n`table(self, name, database=None)`\\n\\n\\nConstruct a table expression.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|------------|-------------|---------------|------------|\\n| `name` | str | Table name | _required_ |\\n| `database` | str \\\\| None | Database name | `None` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|--------|------------------|\\n| Table | Table expression |\\n\\n\\n### to_pyarrow { #ibis.backends.clickhouse.Backend.to_pyarrow }\\n\\n\\n`to_pyarrow(self, expr, *, params=None, limit=None, external_tables=None, **kwargs)`\\n\\n\\nExecute expression and return results in as a pyarrow table.\\n\\nThis method is eager and will execute the associated expression\\nimmediately.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|----------|-----------------------------------|--------------------------------------------------------------------------------------------------------------------|------------|\\n| `expr` | ir.Expr | Ibis expression to export to pyarrow | _required_ |\\n| `params` | Mapping\\\\[ir.Scalar, Any\\\\] \\\\| None | Mapping of scalar parameter expressions to value. | `None` |\\n| `limit` | int \\\\| str \\\\| None | An integer to effect a specific row limit. A value of `None` means \\"no limit\\". The default is in `ibis/config.py`. | `None` |\\n| `kwargs` | Any | Keyword arguments | `{}` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|--------|-----------------------------------------------------------------|\\n| Table | A pyarrow table holding the results of the executed expression. |\\n\\n\\n### to_pyarrow_batches { #ibis.backends.clickhouse.Backend.to_pyarrow_batches }\\n\\n\\n`to_pyarrow_batches(self, expr, *, limit=None, params=None, external_tables=None, chunk_size=1000000, **_)`\\n\\n\\nExecute expression and return an iterator of pyarrow record batches.\\n\\nThis method is eager and will execute the associated expression\\nimmediately.\\n\\n#### Parameters\\n\\n| Name | Type | Description | Default |\\n|-------------------|-----------------------------------|--------------------------------------------------------------------------------------------------------------------|------------|\\n| `expr` | ir.Expr | Ibis expression to export to pyarrow | _required_ |\\n| `limit` | int \\\\| str \\\\| None | An integer to effect a specific row limit. A value of `None` means \\"no limit\\". The default is in `ibis/config.py`. | `None` |\\n| `params` | Mapping\\\\[ir.Scalar, Any\\\\] \\\\| None | Mapping of scalar parameter expressions to value. | `None` |\\n| `external_tables` | Mapping\\\\[str, Any\\\\] \\\\| None | External data | `None` |\\n| `chunk_size` | int | Maximum number of row to return in a single chunk | `1000000` |\\n\\n#### Returns\\n\\n| Type | Description |\\n|---------|-------------------|\\n| results | RecordBatchReader |\\n\\n#### Notes\\n\\nThere are a variety of ways to implement clickhouse -> record batches.\\n\\n1. FORMAT ArrowStream -> record batches via raw_query\\n This has the same type conversion problem(s) as `to_pyarrow`.\\n It's harder to address due to lack of `cast` on `RecordBatch`.\\n However, this is a ClickHouse problem: we should be able to get\\n string data out without a bunch of settings/permissions rigmarole.\\n2. Native -> Python objects -> pyarrow batches\\n This is what is implemented, using `query_column_block_stream`.\\n3. Native -> Python objects -> DataFrame chunks -> pyarrow batches\\n This is not implemented because it adds an unnecessary pandas step in\\n between Python object -> arrow. We can go directly to record batches\\n without pandas in the middle.\\n\\n\\n### truncate_table { #ibis.backends.clickhouse.Backend.truncate_table }\\n\\n\\n`truncate_table(self, name, database=None)`\\n\\n\\n\\n\\n\\n", + "supporting": [ + "clickhouse_files" + ], + "filters": [], + "includes": { + "include-in-header": [ + "<script src=\\"https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js\\" integrity=\\"sha512-c3Nl8+7g4LMSTdrm621y7kf9v3SDPnhxLNhcjFJbKECVnmZHTdo+IRO05sNLTH/D3vA6u1X32ehoLC7WFVdheg==\\" crossorigin=\\"anonymous\\"></script>\\n<script src=\\"https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.min.js\\" integrity=\\"sha512-bLT0Qm9VnAYZDflyKcBaQ2gg0hSYNQrJ8RilYldYQ1FxQYoCLtUjuuRuZo+fjqhx/qtq/1itJ0C2ejDxltZVFg==\\" crossorigin=\\"anonymous\\"></script>\\n<script type=\\"application/javascript\\">define('jquery', [],function() {return window.jQuery;})</script>\\n" + ] + } + } +} \\ No newline at end of file diff --git a/clickhouse.qmd b/clickhouse.qmd index 62a048b..98cdd2c 100644 --- a/clickhouse.qmd +++ b/clickhouse.qmd @@ -1,3 +1,7 @@ +--- +execute: + freeze: auto +--- # ClickHouse [https://clickhouse.com](https://clickhouse.com) @@ -103,25 +107,14 @@ con = ibis.connect(f"clickhouse://{user}:{password}@{host}:{port}?secure={secure ## ClickHouse playground -ClickHouse provides a free playground with several datasets that you can connect to using `ibis`: +ClickHouse provides a free playground with several datasets that you can connect to using Ibis: ```{python} from ibis.interactive import * -con = ibis.clickhouse.connect( - host="play.clickhouse.com", - secure=True, - user="play", - password="clickhouse", -) -con.table("actors") -``` - -or - -```{python} con = ibis.connect("clickhouse://play:[email protected]:443?secure=True") -con.table("opensky") +actors = con.table("actors") +actors ``` ```{python}
fix: superclass declaration bug
83cd92bb4837e8a38dd325941903c1c68f1c0366
fix
https://github.com/erg-lang/erg/commit/83cd92bb4837e8a38dd325941903c1c68f1c0366
superclass declaration bug
diff --git a/register.rs b/register.rs index cebc908..985e4f5 100644 --- a/register.rs +++ b/register.rs @@ -1488,6 +1488,47 @@ impl Context { // .retain(|t| !ctx.same_type_of(t, trait_)); } + pub(crate) fn register_base_class(&mut self, ctx: &Self, class: Type) -> CompileResult<()> { + let class_ctx = ctx.get_nominal_type_ctx(&class).ok_or_else(|| { + CompileError::type_not_found( + self.cfg.input.clone(), + line!() as usize, + ().loc(), + self.caused_by(), + &class, + ) + })?; + if class_ctx.typ.has_qvar() { + let _substituter = Substituter::substitute_typarams(ctx, &class_ctx.typ, &class)?; + self.super_classes.push(class); + let mut tv_cache = TyVarCache::new(ctx.level, ctx); + let classes = class_ctx.super_classes.iter().cloned().map(|ty| { + if ty.has_undoable_linked_var() { + ctx.detach(ty, &mut tv_cache) + } else { + ty + } + }); + self.super_classes.extend(classes); + let traits = class_ctx.super_traits.iter().cloned().map(|ty| { + if ty.has_undoable_linked_var() { + ctx.detach(ty, &mut tv_cache) + } else { + ty + } + }); + self.super_traits.extend(traits); + } else { + self.super_classes.push(class); + let classes = class_ctx.super_classes.clone(); + self.super_classes.extend(classes); + let traits = class_ctx.super_traits.clone(); + self.super_traits.extend(traits); + } + unique_in_place(&mut self.super_classes); + Ok(()) + } + pub(crate) fn register_gen_const( &mut self, ident: &Identifier, diff --git a/declare.rs b/declare.rs index ffdf341..2c5ba67 100644 --- a/declare.rs +++ b/declare.rs @@ -937,7 +937,7 @@ impl<A: ASTBuildable> GenericASTLowerer<A> { Ok(()) } - fn declare_subtype(&mut self, ident: &ast::Identifier, trait_: &Type) -> LowerResult<()> { + fn declare_subtype(&mut self, ident: &ast::Identifier, sup: &Type) -> LowerResult<()> { if ident.is_raw() { return Ok(()); } @@ -952,12 +952,16 @@ impl<A: ASTBuildable> GenericASTLowerer<A> { }; if let Some(ctx) = self.module.context.rec_get_mut_type(&name) { let mut tmp = mem::take(ctx); - tmp.register_marker_trait(&self.module.context, trait_.clone()) - .map_err(|err| { - let ctx = self.module.context.rec_get_mut_type(&name).unwrap(); - mem::swap(ctx, &mut tmp); - err - })?; + let res = if self.module.context.is_class(sup) { + tmp.register_base_class(&self.module.context, sup.clone()) + } else { + tmp.register_marker_trait(&self.module.context, sup.clone()) + }; + res.map_err(|err| { + let ctx = self.module.context.rec_get_mut_type(&name).unwrap(); + mem::swap(ctx, &mut tmp); + err + })?; let ctx = self.module.context.rec_get_mut_type(&name).unwrap(); mem::swap(ctx, &mut tmp); Ok(()) diff --git a/__init__.d.er b/__init__.d.er index 665b779..e783f3d 100644 --- a/__init__.d.er +++ b/__init__.d.er @@ -0,0 +1 @@ +{.ToTensor;} = pyimport "./transforms" diff --git a/transforms.d.er b/transforms.d.er index f78c0aa..4352af2 100644 --- a/transforms.d.er +++ b/transforms.d.er @@ -1,3 +1,4 @@ .ToTensor: ClassType +.ToTensor <: GenericCallable .ToTensor. __call__: () -> .ToTensor
chore: rerender some posts
4a1d58fc6af2a31c72178a821bb4ac617b5f7062
chore
https://github.com/ibis-project/ibis/commit/4a1d58fc6af2a31c72178a821bb4ac617b5f7062
rerender some posts
diff --git a/html.json b/html.json index e37053f..889cd4f 100644 --- a/html.json +++ b/html.json @@ -1,8 +1,10 @@ { - "hash": "a9a9b6b7357a396dae8dfa0cadd00ec5", + "hash": "0588743209a9486459cbaeabb909e87e", "result": { - "markdown": "---\\nexecute:\\n freeze: auto\\n---\\n\\n# Tutorial: Ibis for dplyr users\\n\\n[R](https://www.r-project.org/) users familiar with [dplyr](https://dplyr.tidyverse.org/), [tidyr](https://tidyr.tidyverse.org/), and other packages in the [Tidyverse](https://www.tidyverse.org/) are likely to find Ibis familiar.\\nIn fact, some Ibis features were even inspired by similar features in the [Tidyverse](https://www.tidyverse.org/).\\n\\nHowever, due to differences between Python and R and the design and goals of Ibis itself, you may notice some big differences right away:\\n\\n- **No pipe:** The handy [magrittr pipe](https://magrittr.tidyverse.org/) (`%>%`) or R's newer native pipe (`|>`) don't exist in Python so you instead have to chain sequences of operations together with a period (`.`). The `.` in Python is analogous to R's `$` which lets you access attributes and methods on objects.\\n- **No unquoted column names:** Non-standard evaluation is common in R but not present in Python. To reference a column in Ibis, you can pass a string, property on a table (e.g., `tbl.some_column`), or you can make use of [selectors](https://ibis-project.org/api/selectors/).\\n- **Ibis is lazy by default:** Similar to [dbplyr](https://dbplyr.tidyverse.org/) and its `collect()` method, Ibis does not evaluate our queries until we call `.to_pandas()`. For the purposes of this document, we set `ibis.options.interactive = True` which limits results to 10 rows, executes automatically, and prints a nicely-formatted table.\\n\\nUsing the same example data and similar operations as in [Introduction to dplyr](https://dplyr.tidyverse.org/articles/dplyr.html), below you will find some examples of the more common dplyr and tidyr operations and their Ibis equivalents.\\n\\n## Loading Ibis\\n\\nTo start using dplyr in R we would run:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nlibrary(dplyr)\\n```\\n:::\\n\\n\\nTo load Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nimport ibis\\n```\\n:::\\n\\n\\nAnd then also load and alias some helpers to make our code more concise:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nimport ibis.selectors as s\\nfrom ibis import _\\n```\\n:::\\n\\n\\nLast, as mentioned above, to get Ibis to automatically execute our queries and show the results in a nicely-formatted table, we run:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nibis.options.interactive = True\\n```\\n:::\\n\\n\\n## Loading example data\\n\\nIn R, datasets are typically lazily loaded with packages. For instance, the `starwars` dataset is packaged with dplyr, but is not loaded in memory before you start using it. Ibis provides many datasets in the `examples` module. So to be able to use the `starwars` dataset, you can use:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars = ibis.examples.starwars.fetch()\\n```\\n:::\\n\\n\\nSimilar to dplyr, if we evaluate the name of a table, we get a nicely-formatted table:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────────┼────────┼─────────┼───────────────┼─────────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n│ Beru Whitesun lars │ 165 │ 75.0 │ brown │ light │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Obi-Wan Kenobi │ 182 │ 77.0 │ auburn, white │ fair │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└────────────────────┴────────┴─────────┴───────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn addition to printing a nicely-formatted table and automatically executing, setting `ibis.options.interactive` to `True` also causes our query to be limited to 10 rows. To get Ibis to give us all rows, we can directly call `to_pandas` and save the result as a pandas DataFrame:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars_df = starwars.to_pandas()\\n```\\n:::\\n\\n\\nWhich then gives us all of the data as a pandas DataFrame:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars_df\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n name height mass hair_color ... species films vehicles starships\\n0 Luke Skywalker 172.0 77.0 blond ... Human None None None\\n1 C-3PO 167.0 75.0 None ... Droid None None None\\n2 R2-D2 96.0 32.0 None ... Droid None None None\\n3 Darth Vader 202.0 136.0 none ... Human None None None\\n4 Leia Organa 150.0 49.0 brown ... Human None None None\\n.. ... ... ... ... ... ... ... ... ...\\n82 Rey NaN NaN brown ... Human None None None\\n83 Poe Dameron NaN NaN brown ... Human None None None\\n84 BB8 NaN NaN none ... Droid None None None\\n85 Captain Phasma NaN NaN unknown ... None None None None\\n86 Padmé Amidala 165.0 45.0 brown ... Human None None None\\n\\n[87 rows x 14 columns]\\n```\\n\\n\\n:::\\n:::\\n\\n\\nDirectly calling `to_pandas` and saving the result to a variable is useful for passing the results of Ibis table expressions to other packages (e.g., matplotlib).\\n\\n## Inspecting the dataset with `head()`\\n\\nJust like in R, you can use `head()` to inspect the beginning of a dataset. You can also specify the number of rows you want to get back by using the parameter `n` (default `n = 5`).\\n\\nIn R:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nhead(starwars) # or starwars |> head()\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 6 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n1 Luke Sky… 172 77 blond fair blue 19 male mascu…\\n2 C-3PO 167 75 <NA> gold yellow 112 none mascu…\\n3 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n4 Darth Va… 202 136 none white yellow 41.9 male mascu…\\n5 Leia Org… 150 49 brown light brown 19 fema… femin…\\n6 Owen Lars 178 120 brown, gr… light blue 52 male mascu…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nWith Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.head(6)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────┼────────┼─────────┼─────────────┼─────────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n└────────────────┴────────┴─────────┴─────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nThere is no `tail()` in Ibis because most databases do not support this operation.\\n\\nAnother method you can use to limit the number of rows returned by a query is `limit()` which also takes the `n` parameter.\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.limit(3)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ eye_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ string │ … │\\n├────────────────┼────────┼─────────┼────────────┼─────────────┼───────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ blue │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ yellow │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ red │ … │\\n└────────────────┴────────┴─────────┴────────────┴─────────────┴───────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Filtering rows with filter()\\n\\nIbis, like dplyr, has `filter` to select rows based on conditions.\\n\\nWith dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n filter(skin_color == \\"light\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 11 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 Leia Or… 150 49 brown light brown 19 fema… femin…\\n 2 Owen La… 178 120 brown, gr… light blue 52 male mascu…\\n 3 Beru Wh… 165 75 brown light blue 47 fema… femin…\\n 4 Biggs D… 183 84 black light brown 24 male mascu…\\n 5 Lobot 175 79 none light blue 37 male mascu…\\n 6 Cordé 157 NA brown light brown NA fema… femin…\\n 7 Dormé 165 NA brown light brown NA fema… femin…\\n 8 Raymus … 188 79 brown light brown NA male mascu…\\n 9 Rey NA NA brown light hazel NA fema… femin…\\n10 Poe Dam… NA NA brown light brown NA male mascu…\\n11 Padmé A… 165 45 brown light brown 46 fema… femin…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter(_.skin_color == \\"light\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────────┼────────┼─────────┼─────────────┼────────────┼───┤\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n│ Beru Whitesun lars │ 165 │ 75.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Lobot │ 175 │ 79.0 │ none │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ Raymus Antilles │ 188 │ 79.0 │ brown │ light │ … │\\n│ Rey │ NULL │ nan │ brown │ light │ … │\\n│ Poe Dameron │ NULL │ nan │ brown │ light │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└────────────────────┴────────┴─────────┴─────────────┴────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn dplyr, you can specify multiple conditions separated with `,` that are then combined with the `&` operator:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n filter(skin_color == \\"light\\", eye_color == \\"brown\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 7 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n1 Leia Org… 150 49 brown light brown 19 fema… femin…\\n2 Biggs Da… 183 84 black light brown 24 male mascu…\\n3 Cordé 157 NA brown light brown NA fema… femin…\\n4 Dormé 165 NA brown light brown NA fema… femin…\\n5 Raymus A… 188 79 brown light brown NA male mascu…\\n6 Poe Dame… NA NA brown light brown NA male mascu…\\n7 Padmé Am… 165 45 brown light brown 46 fema… femin…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, you can do the same by putting multiple conditions in a list:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter([_.skin_color == \\"light\\", _.eye_color == \\"brown\\"])\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────┼────────┼─────────┼────────────┼────────────┼───┤\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ Raymus Antilles │ 188 │ 79.0 │ brown │ light │ … │\\n│ Poe Dameron │ NULL │ nan │ brown │ light │ … │\\n│ Padmé Amidala │ 165 │ 45.0 │ brown │ light │ … │\\n└───────────────────┴────────┴─────────┴────────────┴────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn previous code, we used the `_` helper we imported earlier. The `_` is shorthand for the table returned by the previous step in the chained sequence of operations (in this case, `starwars`). We could have also written the more verbose form,\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter([starwars.skin_color == \\"light\\", starwars.eye_color == \\"brown\\"])\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────┼────────┼─────────┼────────────┼────────────┼───┤\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ Raymus Antilles │ 188 │ 79.0 │ brown │ light │ … │\\n│ Poe Dameron │ NULL │ nan │ brown │ light │ … │\\n│ Padmé Amidala │ 165 │ 45.0 │ brown │ light │ … │\\n└───────────────────┴────────┴─────────┴────────────┴────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIf you want to combine multiple conditions, in dplyr, you could do:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n filter(\\n (skin_color == \\"light\\" & eye_color == \\"brown\\") |\\n species == \\"Droid\\"\\n )\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 13 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 C-3PO 167 75 <NA> gold yellow 112 none mascu…\\n 2 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n 3 Leia Or… 150 49 brown light brown 19 fema… femin…\\n 4 R5-D4 97 32 <NA> white, red red NA none mascu…\\n 5 Biggs D… 183 84 black light brown 24 male mascu…\\n 6 IG-88 200 140 none metal red 15 none mascu…\\n 7 Cordé 157 NA brown light brown NA fema… femin…\\n 8 Dormé 165 NA brown light brown NA fema… femin…\\n 9 R4-P17 96 NA none silver, r… red, blue NA none femin…\\n10 Raymus … 188 79 brown light brown NA male mascu…\\n11 Poe Dam… NA NA brown light brown NA male mascu…\\n12 BB8 NA NA none none black NA none mascu…\\n13 Padmé A… 165 45 brown light brown 46 fema… femin…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, this would be:\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter(\\n ((_.skin_color == \\"light\\") & (_.eye_color == \\"brown\\")) |\\n (_.species == \\"Droid\\")\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────┼────────┼─────────┼────────────┼─────────────┼───┤\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ IG-88 │ 200 │ 140.0 │ none │ metal │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ R4-P17 │ 96 │ nan │ none │ silver, red │ … │\\n│ BB8 │ NULL │ nan │ none │ none │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────────────┴────────┴─────────┴────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Sorting your data with order_by()\\n\\nTo sort a column, dplyr has the verb `arrange`. For instance, to sort the column `height` using dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n arrange(height)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 Yoda 66 17 white green brown 896 male mascu…\\n 2 Ratts T… 79 15 none grey, blue unknown NA male mascu…\\n 3 Wicket … 88 20 brown brown brown 8 male mascu…\\n 4 Dud Bolt 94 45 none blue, grey yellow NA male mascu…\\n 5 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n 6 R4-P17 96 NA none silver, r… red, blue NA none femin…\\n 7 R5-D4 97 32 <NA> white, red red NA none mascu…\\n 8 Sebulba 112 40 none grey, red orange NA male mascu…\\n 9 Gasgano 122 NA none white, bl… black NA male mascu…\\n10 Watto 137 NA black blue, grey yellow NA male mascu…\\n# ℹ 77 more rows\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIbis has the `order_by` method, so to perform the same operation:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.order_by(_.height)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────────┼────────┼─────────┼────────────┼─────────────┼───┤\\n│ Yoda │ 66 │ 17.0 │ white │ green │ … │\\n│ Ratts Tyerell │ 79 │ 15.0 │ none │ grey, blue │ … │\\n│ Wicket Systri Warrick │ 88 │ 20.0 │ brown │ brown │ … │\\n│ Dud Bolt │ 94 │ 45.0 │ none │ blue, grey │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ R4-P17 │ 96 │ nan │ none │ silver, red │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Sebulba │ 112 │ 40.0 │ none │ grey, red │ … │\\n│ Gasgano │ 122 │ nan │ none │ white, blue │ … │\\n│ Watto │ 137 │ nan │ black │ blue, grey │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────────────────┴────────┴─────────┴────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nYou might notice that while dplyr puts missing values at the end, Ibis places them at the top. This behavior can actually vary from backend to backend and is something to be aware of when using Ibis.\\n\\nIf you want to order using multiple variables, you can pass them as a list:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.order_by([_.height, _.mass]) # or starwars.order_by([\\"height\\", \\"mass\\"])\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────────┼────────┼─────────┼────────────┼─────────────┼───┤\\n│ Yoda │ 66 │ 17.0 │ white │ green │ … │\\n│ Ratts Tyerell │ 79 │ 15.0 │ none │ grey, blue │ … │\\n│ Wicket Systri Warrick │ 88 │ 20.0 │ brown │ brown │ … │\\n│ Dud Bolt │ 94 │ 45.0 │ none │ blue, grey │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ R4-P17 │ 96 │ nan │ none │ silver, red │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Sebulba │ 112 │ 40.0 │ none │ grey, red │ … │\\n│ Gasgano │ 122 │ nan │ none │ white, blue │ … │\\n│ Watto │ 137 │ nan │ black │ blue, grey │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────────────────┴────────┴─────────┴────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nTo order a column in descending order, there are two ways to do it. Note that missing values remain at the top.\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.order_by(_.height.desc()) # or: starwars.order_by(ibis.desc(\\"height\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├──────────────┼────────┼─────────┼────────────┼──────────────┼───┤\\n│ Yarael Poof │ 264 │ nan │ none │ white │ … │\\n│ Tarfful │ 234 │ 136.0 │ brown │ brown │ … │\\n│ Lama Su │ 229 │ 88.0 │ none │ grey │ … │\\n│ Chewbacca │ 228 │ 112.0 │ brown │ unknown │ … │\\n│ Roos Tarpals │ 224 │ 82.0 │ none │ grey │ … │\\n│ Grievous │ 216 │ 159.0 │ none │ brown, white │ … │\\n│ Taun We │ 213 │ nan │ none │ grey │ … │\\n│ Rugor Nass │ 206 │ nan │ none │ green │ … │\\n│ Tion Medon │ 206 │ 80.0 │ none │ grey │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└──────────────┴────────┴─────────┴────────────┴──────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Selecting columns with select()\\n\\nIbis, like dplyr, has a `select` method to include or exclude columns:\\n\\nWith dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n select(hair_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 1\\n hair_color \\n <chr> \\n 1 blond \\n 2 <NA> \\n 3 <NA> \\n 4 none \\n 5 brown \\n 6 brown, grey \\n 7 brown \\n 8 <NA> \\n 9 black \\n10 auburn, white\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(_.hair_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┓\\n┃ hair_color ┃\\n┡━━━━━━━━━━━━━━━┩\\n│ string │\\n├───────────────┤\\n│ blond │\\n│ NULL │\\n│ NULL │\\n│ none │\\n│ brown │\\n│ brown, grey │\\n│ brown │\\n│ NULL │\\n│ black │\\n│ auburn, white │\\n│ … │\\n└───────────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nNote: A common pitfall to be aware of when referencing column names in Ibis is when column names collide with built-in methods on the Ibis Table object, such as `count`. In this situation, you will have to reference `count` like `table[\\"count\\"]` or `_[\\"count\\"]`.\\n\\ndplyr also allows selecting more than one column at a time:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n select(hair_color, skin_color, eye_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 3\\n hair_color skin_color eye_color\\n <chr> <chr> <chr> \\n 1 blond fair blue \\n 2 <NA> gold yellow \\n 3 <NA> white, blue red \\n 4 none white yellow \\n 5 brown light brown \\n 6 brown, grey light blue \\n 7 brown light blue \\n 8 <NA> white, red red \\n 9 black light brown \\n10 auburn, white fair blue-gray\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, we can either quote the names:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(\\"hair_color\\", \\"skin_color\\", \\"eye_color\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├───────────────┼─────────────┼───────────┤\\n│ blond │ fair │ blue │\\n│ NULL │ gold │ yellow │\\n│ NULL │ white, blue │ red │\\n│ none │ white │ yellow │\\n│ brown │ light │ brown │\\n│ brown, grey │ light │ blue │\\n│ brown │ light │ blue │\\n│ NULL │ white, red │ red │\\n│ black │ light │ brown │\\n│ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │\\n└───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nOr use the `_` helper:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(_.hair_color, _.skin_color, _.eye_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├───────────────┼─────────────┼───────────┤\\n│ blond │ fair │ blue │\\n│ NULL │ gold │ yellow │\\n│ NULL │ white, blue │ red │\\n│ none │ white │ yellow │\\n│ brown │ light │ brown │\\n│ brown, grey │ light │ blue │\\n│ brown │ light │ blue │\\n│ NULL │ white, red │ red │\\n│ black │ light │ brown │\\n│ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │\\n└───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nTo select columns by name based on a condition, dplyr has helpers such as:\\n\\n- starts_with(): Starts with a prefix.\\n- ends_with(): Ends with a suffix.\\n- contains(): Contains a literal string.\\n\\nThese and many more [selectors](https://ibis-project.org/api/selectors/) are available in Ibis as well, with slightly different names:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(s.startswith(\\"h\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ height ┃ hair_color ┃ homeworld ┃\\n┡━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ int64 │ string │ string │\\n├────────┼───────────────┼───────────┤\\n│ 172 │ blond │ Tatooine │\\n│ 167 │ NULL │ Tatooine │\\n│ 96 │ NULL │ Naboo │\\n│ 202 │ none │ Tatooine │\\n│ 150 │ brown │ Alderaan │\\n│ 178 │ brown, grey │ Tatooine │\\n│ 165 │ brown │ Tatooine │\\n│ 97 │ NULL │ Tatooine │\\n│ 183 │ black │ Tatooine │\\n│ 182 │ auburn, white │ Stewjon │\\n│ … │ … │ … │\\n└────────┴───────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(s.endswith(\\"color\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├───────────────┼─────────────┼───────────┤\\n│ blond │ fair │ blue │\\n│ NULL │ gold │ yellow │\\n│ NULL │ white, blue │ red │\\n│ none │ white │ yellow │\\n│ brown │ light │ brown │\\n│ brown, grey │ light │ blue │\\n│ brown │ light │ blue │\\n│ NULL │ white, red │ red │\\n│ black │ light │ brown │\\n│ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │\\n└───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(s.contains(\\"world\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━┓\\n┃ homeworld ┃\\n┡━━━━━━━━━━━┩\\n│ string │\\n├───────────┤\\n│ Tatooine │\\n│ Tatooine │\\n│ Naboo │\\n│ Tatooine │\\n│ Alderaan │\\n│ Tatooine │\\n│ Tatooine │\\n│ Tatooine │\\n│ Tatooine │\\n│ Stewjon │\\n│ … │\\n└───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nSee the Ibis [Column Selectors](https://ibis-project.org/api/selectors/) documentation for the full list of selectors in Ibis.\\n\\n## Renaming columns with relabel()\\n\\nIbis allows you to rename columns using `relabel()` which provides similar functionality to `rename()` in dplyr.\\n\\nIn dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n rename(\\"home_world\\" = \\"homeworld\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 Luke Sk… 172 77 blond fair blue 19 male mascu…\\n 2 C-3PO 167 75 <NA> gold yellow 112 none mascu…\\n 3 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n 4 Darth V… 202 136 none white yellow 41.9 male mascu…\\n 5 Leia Or… 150 49 brown light brown 19 fema… femin…\\n 6 Owen La… 178 120 brown, gr… light blue 52 male mascu…\\n 7 Beru Wh… 165 75 brown light blue 47 fema… femin…\\n 8 R5-D4 97 32 <NA> white, red red NA none mascu…\\n 9 Biggs D… 183 84 black light brown 24 male mascu…\\n10 Obi-Wan… 182 77 auburn, w… fair blue-gray 57 male mascu…\\n# ℹ 77 more rows\\n# ℹ 5 more variables: home_world <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, use `relabel` and pass a `dict` of name mappings:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.rename(home_world=\\"homeworld\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────────┼────────┼─────────┼───────────────┼─────────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n│ Beru Whitesun lars │ 165 │ 75.0 │ brown │ light │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Obi-Wan Kenobi │ 182 │ 77.0 │ auburn, white │ fair │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└────────────────────┴────────┴─────────┴───────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Add new columns with mutate()\\n\\nIbis, like dplyr, uses the `mutate` verb to add columns.\\n\\nIn dplyr,\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n mutate(height_m = height / 100) |>\\n select(name, height_m)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 2\\n name height_m\\n <chr> <dbl>\\n 1 Luke Skywalker 1.72\\n 2 C-3PO 1.67\\n 3 R2-D2 0.96\\n 4 Darth Vader 2.02\\n 5 Leia Organa 1.5 \\n 6 Owen Lars 1.78\\n 7 Beru Whitesun lars 1.65\\n 8 R5-D4 0.97\\n 9 Biggs Darklighter 1.83\\n10 Obi-Wan Kenobi 1.82\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(\\n starwars\\n .mutate(height_m = _.height / 100)\\n .select(\\"name\\", \\"height_m\\")\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━┓\\n┃ name ┃ height_m ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━┩\\n│ string │ float64 │\\n├────────────────────┼──────────┤\\n│ Luke Skywalker │ 1.72 │\\n│ C-3PO │ 1.67 │\\n│ R2-D2 │ 0.96 │\\n│ Darth Vader │ 2.02 │\\n│ Leia Organa │ 1.50 │\\n│ Owen Lars │ 1.78 │\\n│ Beru Whitesun lars │ 1.65 │\\n│ R5-D4 │ 0.97 │\\n│ Biggs Darklighter │ 1.83 │\\n│ Obi-Wan Kenobi │ 1.82 │\\n│ … │ … │\\n└────────────────────┴──────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nA big difference between dplyr's `mutate` and Ibis' `mutate` is that, in Ibis, you have to chain separate `mutate` calls together when you reference newly-created columns in the same `mutate` whereas in dplyr, you can put them all in the same call. This makes Ibis' `mutate` more similar to `transform` in base R.\\n\\nIn dplyr, we only need one `mutate` call:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars %>%\\n mutate(\\n height_m = height / 100,\\n BMI = mass / (height_m^2)\\n ) %>%\\n select(BMI, everything())\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 16\\n BMI name height mass hair_color skin_color eye_color birth_year sex \\n <dbl> <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr>\\n 1 26.0 Luke Sky… 172 77 blond fair blue 19 male \\n 2 26.9 C-3PO 167 75 <NA> gold yellow 112 none \\n 3 34.7 R2-D2 96 32 <NA> white, bl… red 33 none \\n 4 33.3 Darth Va… 202 136 none white yellow 41.9 male \\n 5 21.8 Leia Org… 150 49 brown light brown 19 fema…\\n 6 37.9 Owen Lars 178 120 brown, gr… light blue 52 male \\n 7 27.5 Beru Whi… 165 75 brown light blue 47 fema…\\n 8 34.0 R5-D4 97 32 <NA> white, red red NA none \\n 9 25.1 Biggs Da… 183 84 black light brown 24 male \\n10 23.2 Obi-Wan … 182 77 auburn, w… fair blue-gray 57 male \\n# ℹ 77 more rows\\n# ℹ 7 more variables: gender <chr>, homeworld <chr>, species <chr>,\\n# films <list>, vehicles <list>, starships <list>, height_m <dbl>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, for `BMI` to reference `height_m`, it needs to be in a separate `mutate` call:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(starwars\\n .mutate(\\n height_m = _.height / 100\\n )\\n .mutate(\\n BMI = _.mass / (_.height_m**2)\\n )\\n .select(\\"BMI\\", ~s.matches(\\"BMI\\"))\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━┓\\n┃ BMI ┃ name ┃ height ┃ mass ┃ hair_color ┃ … ┃\\n┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━┩\\n│ float64 │ string │ int64 │ float64 │ string │ … │\\n├───────────┼────────────────────┼────────┼─────────┼───────────────┼───┤\\n│ 26.027582 │ Luke Skywalker │ 172 │ 77.0 │ blond │ … │\\n│ 26.892323 │ C-3PO │ 167 │ 75.0 │ NULL │ … │\\n│ 34.722222 │ R2-D2 │ 96 │ 32.0 │ NULL │ … │\\n│ 33.330066 │ Darth Vader │ 202 │ 136.0 │ none │ … │\\n│ 21.777778 │ Leia Organa │ 150 │ 49.0 │ brown │ … │\\n│ 37.874006 │ Owen Lars │ 178 │ 120.0 │ brown, grey │ … │\\n│ 27.548209 │ Beru Whitesun lars │ 165 │ 75.0 │ brown │ … │\\n│ 34.009990 │ R5-D4 │ 97 │ 32.0 │ NULL │ … │\\n│ 25.082863 │ Biggs Darklighter │ 183 │ 84.0 │ black │ … │\\n│ 23.245985 │ Obi-Wan Kenobi │ 182 │ 77.0 │ auburn, white │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────┴────────────────────┴────────┴─────────┴───────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Summarize values with aggregate()\\n\\nTo summarize tables, dplyr has the verbs `summarise`/`summarize`:\\n\\nIn dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars %>%\\n summarise(height = mean(height, na.rm = TRUE))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 1 × 1\\n height\\n <dbl>\\n1 174.\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, the corresponding verb is `aggregate`:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.aggregate(height = _.height.mean())\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━┓\\n┃ height ┃\\n┡━━━━━━━━━━━━┩\\n│ float64 │\\n├────────────┤\\n│ 174.358025 │\\n└────────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n**Note**: Throughout this guide, where dplyr uses R generics, Ibis uses Python methods. In the previous code cell, `aggregate` is a method on a _table_ and `mean` is a method on a _column_. If you want to perform aggregations on multiple columns, you can call the method that you want on the column you want to apply it to.\\n\\n## Join tables with left_join()\\n\\nTo demonstrate how to do joins with Ibis, we'll load two more example datasets that also come from the example datasets included in dplyr:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nband_members = ibis.examples.band_members.fetch()\\nband_instruments = ibis.examples.band_instruments.fetch()\\n```\\n:::\\n\\n\\nIn dplyr, we can perform a left join of these two tables like:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nband_members |>\\n left_join(band_instruments)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 3 × 3\\n name band plays \\n <chr> <chr> <chr> \\n1 Mick Stones <NA> \\n2 John Beatles guitar\\n3 Paul Beatles bass \\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nband_members.left_join(band_instruments, \\"name\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━┓\\n┃ name ┃ band ┃ name_right ┃ plays ┃\\n┡━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━┩\\n│ string │ string │ string │ string │\\n├────────┼─────────┼────────────┼────────┤\\n│ John │ Beatles │ John │ guitar │\\n│ Paul │ Beatles │ Paul │ bass │\\n│ Mick │ Stones │ NULL │ NULL │\\n└────────┴─────────┴────────────┴────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nThere are two main differences between Ibis and dplyr here:\\n\\n- Ibis requires us to explicitly specify our join key (\\"name\\", in this example) whereas in dplyr, if the join key is missing, we get the natural join of the two tables which joins across all shared column names\\n- Ibis keeps columns for join keys from each table whereas dplyr does not by default\\n\\nTo replicate the result we'd get by default in dplyr but using Ibis, we need to incorporate two other verbs we've already seen in this tutorial:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(\\n band_members\\n .left_join(band_instruments, \\"name\\")\\n .select(~s.contains(\\"_right\\"))\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓\\n┃ name ┃ band ┃ plays ┃\\n┡━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩\\n│ string │ string │ string │\\n├────────┼─────────┼────────┤\\n│ John │ Beatles │ guitar │\\n│ Paul │ Beatles │ bass │\\n│ Mick │ Stones │ NULL │\\n└────────┴─────────┴────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Pivot data with pivot_wider() and pivot_longer()\\n\\ndplyr users are likely to be familiar with the `pivot_wider` and `pivot_longer` functions from the [tidyr](https://tidyr.tidyverse.org) package which convert tables between wide and long formats, respectively.\\n\\nIn dplyr+tidyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nlibrary(tidyr)\\n\\nstarwars_colors <-\\n starwars |>\\n select(name, matches(\\"color\\")) |>\\n pivot_longer(matches(\\"color\\"), names_to = \\"attribute\\", values_to = \\"color\\")\\n```\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars_colors = (\\n starwars\\n .select(\\"name\\", s.matches(\\"color\\"))\\n .pivot_longer(s.matches(\\"color\\"), names_to=\\"attribute\\", values_to=\\"color\\")\\n)\\n\\nstarwars_colors\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┓\\n┃ name ┃ attribute ┃ color ┃\\n┡━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├────────────────┼────────────┼─────────────┤\\n│ Luke Skywalker │ hair_color │ blond │\\n│ Luke Skywalker │ skin_color │ fair │\\n│ Luke Skywalker │ eye_color │ blue │\\n│ C-3PO │ hair_color │ NULL │\\n│ C-3PO │ skin_color │ gold │\\n│ C-3PO │ eye_color │ yellow │\\n│ R2-D2 │ hair_color │ NULL │\\n│ R2-D2 │ skin_color │ white, blue │\\n│ R2-D2 │ eye_color │ red │\\n│ Darth Vader │ hair_color │ none │\\n│ … │ … │ … │\\n└────────────────┴────────────┴─────────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nAnd the reverse, in dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars_colors |>\\n pivot_wider(names_from = \\"attribute\\", values_from = \\"color\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 4\\n name hair_color skin_color eye_color\\n <chr> <chr> <chr> <chr> \\n 1 Luke Skywalker blond fair blue \\n 2 C-3PO <NA> gold yellow \\n 3 R2-D2 <NA> white, blue red \\n 4 Darth Vader none white yellow \\n 5 Leia Organa brown light brown \\n 6 Owen Lars brown, grey light blue \\n 7 Beru Whitesun lars brown light blue \\n 8 R5-D4 <NA> white, red red \\n 9 Biggs Darklighter black light brown \\n10 Obi-Wan Kenobi auburn, white fair blue-gray\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(\\n starwars_colors.\\n pivot_wider(names_from=\\"attribute\\", values_from=\\"color\\")\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ name ┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │ string │\\n├────────────────────┼───────────────┼─────────────┼───────────┤\\n│ Luke Skywalker │ blond │ fair │ blue │\\n│ C-3PO │ NULL │ gold │ yellow │\\n│ R2-D2 │ NULL │ white, blue │ red │\\n│ Darth Vader │ none │ white │ yellow │\\n│ Leia Organa │ brown │ light │ brown │\\n│ Owen Lars │ brown, grey │ light │ blue │\\n│ Beru Whitesun lars │ brown │ light │ blue │\\n│ R5-D4 │ NULL │ white, red │ red │\\n│ Biggs Darklighter │ black │ light │ brown │\\n│ Obi-Wan Kenobi │ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │ … │\\n└────────────────────┴───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Next Steps\\n\\nNow that you've gotten an introduction to the common differences between dplyr and Ibis, head over to [Getting Started with ibis](./getting_started.qmd) for a full introduction. If you're familiar with SQL, check out [Ibis for SQL Programmers](https://ibis-project.org/tutorial/ibis-for-sql-users/). If you're familiar with pandas, take a look at [Ibis for pandas Users](https://ibis-project.org/tutorial/ibis-for-pandas-users/.)\\n", - "supporting": [], + "markdown": "---\\nexecute:\\n freeze: auto\\n---\\n\\n# Tutorial: Ibis for dplyr users\\n\\n[R](https://www.r-project.org/) users familiar with\\n[dplyr](https://dplyr.tidyverse.org/), [tidyr](https://tidyr.tidyverse.org/),\\nand other packages in the [Tidyverse](https://www.tidyverse.org/) are likely to\\nfind Ibis familiar.\\n\\nIn fact, some Ibis features were even inspired by similar features in the\\n[Tidyverse](https://www.tidyverse.org/).\\n\\nHowever, due to differences between Python and R and the design and goals of\\nIbis itself, you may notice some big differences right away:\\n\\n- **No pipe:** The handy [magrittr pipe](https://magrittr.tidyverse.org/)\\n (`%>%`) or R's newer native pipe (`|>`) don't exist in Python so you instead\\n have to chain sequences of operations together with a period (`.`). The `.`\\n in Python is analogous to R's `$` which lets you access attributes and\\n methods on objects.\\n- **No unquoted column names:** Non-standard evaluation is common in R but not\\n present in Python. To reference a column in Ibis, you can pass a string,\\n property on a table (e.g., `tbl.some_column`), or you can make use of\\n [selectors](../reference/selectors.qmd).\\n- **Ibis is lazy by default:** Similar to\\n [dbplyr](https://dbplyr.tidyverse.org/) and its `collect()` method, Ibis does\\n not evaluate our queries until we call `.to_pandas()`. For the purposes of\\n this document, we set `ibis.options.interactive = True` which limits results\\n to 10 rows, executes automatically, and prints a nicely-formatted table.\\n\\nUsing the same example data and similar operations as in [Introduction to\\ndplyr](https://dplyr.tidyverse.org/articles/dplyr.html), below you will find\\nsome examples of the more common dplyr and tidyr operations and their Ibis\\nequivalents.\\n\\n## Loading Ibis\\n\\nTo start using dplyr in R we would run:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nlibrary(dplyr)\\n```\\n:::\\n\\n\\nTo load Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nimport ibis\\n```\\n:::\\n\\n\\nAnd then also load and alias some helpers to make our code more concise:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nimport ibis.selectors as s\\nfrom ibis import _\\n```\\n:::\\n\\n\\nLast, as mentioned above, to get Ibis to automatically execute our queries and\\nshow the results in a nicely-formatted table, we run:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nibis.options.interactive = True\\n```\\n:::\\n\\n\\n## Loading example data\\n\\nIn R, datasets are typically lazily loaded with packages. For instance, the\\n`starwars` dataset is packaged with dplyr, but is not loaded in memory before\\nyou start using it. Ibis provides many datasets in the `examples` module. So to\\nbe able to use the `starwars` dataset, you can use:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars = ibis.examples.starwars.fetch()\\n```\\n:::\\n\\n\\nSimilar to dplyr, if we evaluate the name of a table, we get a nicely-formatted\\ntable:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────────┼────────┼─────────┼───────────────┼─────────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n│ Beru Whitesun lars │ 165 │ 75.0 │ brown │ light │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Obi-Wan Kenobi │ 182 │ 77.0 │ auburn, white │ fair │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└────────────────────┴────────┴─────────┴───────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn addition to printing a nicely-formatted table and automatically executing,\\nsetting `ibis.options.interactive` to `True` also causes our query to be\\nlimited to 10 rows. To get Ibis to give us all rows, we can directly call\\n`to_pandas` and save the result as a pandas DataFrame:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars_df = starwars.to_pandas()\\n```\\n:::\\n\\n\\nWhich then gives us all of the data as a pandas DataFrame:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars_df\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n name height mass hair_color ... species films vehicles starships\\n0 Luke Skywalker 172.0 77.0 blond ... Human None None None\\n1 C-3PO 167.0 75.0 None ... Droid None None None\\n2 R2-D2 96.0 32.0 None ... Droid None None None\\n3 Darth Vader 202.0 136.0 none ... Human None None None\\n4 Leia Organa 150.0 49.0 brown ... Human None None None\\n.. ... ... ... ... ... ... ... ... ...\\n82 Rey NaN NaN brown ... Human None None None\\n83 Poe Dameron NaN NaN brown ... Human None None None\\n84 BB8 NaN NaN none ... Droid None None None\\n85 Captain Phasma NaN NaN unknown ... None None None None\\n86 Padmé Amidala 165.0 45.0 brown ... Human None None None\\n\\n[87 rows x 14 columns]\\n```\\n\\n\\n:::\\n:::\\n\\n\\nDirectly calling `to_pandas` and saving the result to a variable is useful for\\npassing the results of Ibis table expressions to other packages (e.g.,\\nmatplotlib).\\n\\n## Inspecting the dataset with `head()`\\n\\nJust like in R, you can use `head()` to inspect the beginning of a dataset. You\\ncan also specify the number of rows you want to get back by using the parameter\\n`n` (default `n = 5`).\\n\\nIn R:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nhead(starwars) # or starwars |> head()\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 6 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n1 Luke Sky… 172 77 blond fair blue 19 male mascu…\\n2 C-3PO 167 75 <NA> gold yellow 112 none mascu…\\n3 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n4 Darth Va… 202 136 none white yellow 41.9 male mascu…\\n5 Leia Org… 150 49 brown light brown 19 fema… femin…\\n6 Owen Lars 178 120 brown, gr… light blue 52 male mascu…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nWith Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.head(6)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────┼────────┼─────────┼─────────────┼─────────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n└────────────────┴────────┴─────────┴─────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nThere is no `tail()` in Ibis because most databases do not support this\\noperation.\\n\\nAnother method you can use to limit the number of rows returned by a query is\\n`limit()` which also takes the `n` parameter.\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.limit(3)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ eye_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ string │ … │\\n├────────────────┼────────┼─────────┼────────────┼─────────────┼───────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ blue │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ yellow │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ red │ … │\\n└────────────────┴────────┴─────────┴────────────┴─────────────┴───────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Filtering rows with filter()\\n\\nIbis, like dplyr, has `filter` to select rows based on conditions.\\n\\nWith dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n filter(skin_color == \\"light\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 11 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 Leia Or… 150 49 brown light brown 19 fema… femin…\\n 2 Owen La… 178 120 brown, gr… light blue 52 male mascu…\\n 3 Beru Wh… 165 75 brown light blue 47 fema… femin…\\n 4 Biggs D… 183 84 black light brown 24 male mascu…\\n 5 Lobot 175 79 none light blue 37 male mascu…\\n 6 Cordé 157 NA brown light brown NA fema… femin…\\n 7 Dormé 165 NA brown light brown NA fema… femin…\\n 8 Raymus … 188 79 brown light brown NA male mascu…\\n 9 Rey NA NA brown light hazel NA fema… femin…\\n10 Poe Dam… NA NA brown light brown NA male mascu…\\n11 Padmé A… 165 45 brown light brown 46 fema… femin…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter(_.skin_color == \\"light\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────────┼────────┼─────────┼─────────────┼────────────┼───┤\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n│ Beru Whitesun lars │ 165 │ 75.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Lobot │ 175 │ 79.0 │ none │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ Raymus Antilles │ 188 │ 79.0 │ brown │ light │ … │\\n│ Rey │ NULL │ nan │ brown │ light │ … │\\n│ Poe Dameron │ NULL │ nan │ brown │ light │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└────────────────────┴────────┴─────────┴─────────────┴────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn dplyr, you can specify multiple conditions separated with `,` that are then combined with the `&` operator:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n filter(skin_color == \\"light\\", eye_color == \\"brown\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 7 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n1 Leia Org… 150 49 brown light brown 19 fema… femin…\\n2 Biggs Da… 183 84 black light brown 24 male mascu…\\n3 Cordé 157 NA brown light brown NA fema… femin…\\n4 Dormé 165 NA brown light brown NA fema… femin…\\n5 Raymus A… 188 79 brown light brown NA male mascu…\\n6 Poe Dame… NA NA brown light brown NA male mascu…\\n7 Padmé Am… 165 45 brown light brown 46 fema… femin…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, you can do the same by putting multiple conditions in a list:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter([_.skin_color == \\"light\\", _.eye_color == \\"brown\\"])\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────┼────────┼─────────┼────────────┼────────────┼───┤\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ Raymus Antilles │ 188 │ 79.0 │ brown │ light │ … │\\n│ Poe Dameron │ NULL │ nan │ brown │ light │ … │\\n│ Padmé Amidala │ 165 │ 45.0 │ brown │ light │ … │\\n└───────────────────┴────────┴─────────┴────────────┴────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn previous code, we used the `_` helper we imported earlier. The `_` is\\nshorthand for the table returned by the previous step in the chained sequence\\nof operations (in this case, `starwars`). We could have also written the more\\nverbose form,\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter([starwars.skin_color == \\"light\\", starwars.eye_color == \\"brown\\"])\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────┼────────┼─────────┼────────────┼────────────┼───┤\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ Raymus Antilles │ 188 │ 79.0 │ brown │ light │ … │\\n│ Poe Dameron │ NULL │ nan │ brown │ light │ … │\\n│ Padmé Amidala │ 165 │ 45.0 │ brown │ light │ … │\\n└───────────────────┴────────┴─────────┴────────────┴────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIf you want to combine multiple conditions, in dplyr, you could do:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n filter(\\n (skin_color == \\"light\\" & eye_color == \\"brown\\") |\\n species == \\"Droid\\"\\n )\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 13 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 C-3PO 167 75 <NA> gold yellow 112 none mascu…\\n 2 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n 3 Leia Or… 150 49 brown light brown 19 fema… femin…\\n 4 R5-D4 97 32 <NA> white, red red NA none mascu…\\n 5 Biggs D… 183 84 black light brown 24 male mascu…\\n 6 IG-88 200 140 none metal red 15 none mascu…\\n 7 Cordé 157 NA brown light brown NA fema… femin…\\n 8 Dormé 165 NA brown light brown NA fema… femin…\\n 9 R4-P17 96 NA none silver, r… red, blue NA none femin…\\n10 Raymus … 188 79 brown light brown NA male mascu…\\n11 Poe Dam… NA NA brown light brown NA male mascu…\\n12 BB8 NA NA none none black NA none mascu…\\n13 Padmé A… 165 45 brown light brown 46 fema… femin…\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, this would be:\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.filter(\\n ((_.skin_color == \\"light\\") & (_.eye_color == \\"brown\\")) |\\n (_.species == \\"Droid\\")\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────┼────────┼─────────┼────────────┼─────────────┼───┤\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ IG-88 │ 200 │ 140.0 │ none │ metal │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Cordé │ 157 │ nan │ brown │ light │ … │\\n│ Dormé │ 165 │ nan │ brown │ light │ … │\\n│ R4-P17 │ 96 │ nan │ none │ silver, red │ … │\\n│ BB8 │ NULL │ nan │ none │ none │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────────────┴────────┴─────────┴────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Sorting your data with order_by()\\n\\nTo sort a column, dplyr has the verb `arrange`. For instance, to sort the column `height` using dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n arrange(height)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 Yoda 66 17 white green brown 896 male mascu…\\n 2 Ratts T… 79 15 none grey, blue unknown NA male mascu…\\n 3 Wicket … 88 20 brown brown brown 8 male mascu…\\n 4 Dud Bolt 94 45 none blue, grey yellow NA male mascu…\\n 5 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n 6 R4-P17 96 NA none silver, r… red, blue NA none femin…\\n 7 R5-D4 97 32 <NA> white, red red NA none mascu…\\n 8 Sebulba 112 40 none grey, red orange NA male mascu…\\n 9 Gasgano 122 NA none white, bl… black NA male mascu…\\n10 Watto 137 NA black blue, grey yellow NA male mascu…\\n# ℹ 77 more rows\\n# ℹ 5 more variables: homeworld <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIbis has the `order_by` method, so to perform the same operation:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.order_by(_.height)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────────┼────────┼─────────┼────────────┼─────────────┼───┤\\n│ Yoda │ 66 │ 17.0 │ white │ green │ … │\\n│ Ratts Tyerell │ 79 │ 15.0 │ none │ grey, blue │ … │\\n│ Wicket Systri Warrick │ 88 │ 20.0 │ brown │ brown │ … │\\n│ Dud Bolt │ 94 │ 45.0 │ none │ blue, grey │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ R4-P17 │ 96 │ nan │ none │ silver, red │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Sebulba │ 112 │ 40.0 │ none │ grey, red │ … │\\n│ Gasgano │ 122 │ nan │ none │ white, blue │ … │\\n│ Watto │ 137 │ nan │ black │ blue, grey │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────────────────┴────────┴─────────┴────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nYou might notice that while dplyr puts missing values at the end, Ibis places\\nthem at the top. This behavior can actually vary from backend to backend and is\\nsomething to be aware of when using Ibis.\\n\\nIf you want to order using multiple variables, you can pass them as a list:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.order_by([_.height, _.mass]) # or starwars.order_by([\\"height\\", \\"mass\\"])\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├───────────────────────┼────────┼─────────┼────────────┼─────────────┼───┤\\n│ Yoda │ 66 │ 17.0 │ white │ green │ … │\\n│ Ratts Tyerell │ 79 │ 15.0 │ none │ grey, blue │ … │\\n│ Wicket Systri Warrick │ 88 │ 20.0 │ brown │ brown │ … │\\n│ Dud Bolt │ 94 │ 45.0 │ none │ blue, grey │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ R4-P17 │ 96 │ nan │ none │ silver, red │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Sebulba │ 112 │ 40.0 │ none │ grey, red │ … │\\n│ Gasgano │ 122 │ nan │ none │ white, blue │ … │\\n│ Watto │ 137 │ nan │ black │ blue, grey │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────────────────┴────────┴─────────┴────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nTo order a column in descending order, there are two ways to do it. Note that missing values remain at the top.\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.order_by(_.height.desc()) # or: starwars.order_by(ibis.desc(\\"height\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├──────────────┼────────┼─────────┼────────────┼──────────────┼───┤\\n│ Yarael Poof │ 264 │ nan │ none │ white │ … │\\n│ Tarfful │ 234 │ 136.0 │ brown │ brown │ … │\\n│ Lama Su │ 229 │ 88.0 │ none │ grey │ … │\\n│ Chewbacca │ 228 │ 112.0 │ brown │ unknown │ … │\\n│ Roos Tarpals │ 224 │ 82.0 │ none │ grey │ … │\\n│ Grievous │ 216 │ 159.0 │ none │ brown, white │ … │\\n│ Taun We │ 213 │ nan │ none │ grey │ … │\\n│ Rugor Nass │ 206 │ nan │ none │ green │ … │\\n│ Tion Medon │ 206 │ 80.0 │ none │ grey │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└──────────────┴────────┴─────────┴────────────┴──────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Selecting columns with select()\\n\\nIbis, like dplyr, has a `select` method to include or exclude columns:\\n\\nWith dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n select(hair_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 1\\n hair_color \\n <chr> \\n 1 blond \\n 2 <NA> \\n 3 <NA> \\n 4 none \\n 5 brown \\n 6 brown, grey \\n 7 brown \\n 8 <NA> \\n 9 black \\n10 auburn, white\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(_.hair_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┓\\n┃ hair_color ┃\\n┡━━━━━━━━━━━━━━━┩\\n│ string │\\n├───────────────┤\\n│ blond │\\n│ NULL │\\n│ NULL │\\n│ none │\\n│ brown │\\n│ brown, grey │\\n│ brown │\\n│ NULL │\\n│ black │\\n│ auburn, white │\\n│ … │\\n└───────────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nNote: A common pitfall to be aware of when referencing column names in Ibis is\\nwhen column names collide with built-in methods on the Ibis Table object, such\\nas `count`. In this situation, you will have to reference `count` like\\n`table[\\"count\\"]` or `_[\\"count\\"]`.\\n\\ndplyr also allows selecting more than one column at a time:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n select(hair_color, skin_color, eye_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 3\\n hair_color skin_color eye_color\\n <chr> <chr> <chr> \\n 1 blond fair blue \\n 2 <NA> gold yellow \\n 3 <NA> white, blue red \\n 4 none white yellow \\n 5 brown light brown \\n 6 brown, grey light blue \\n 7 brown light blue \\n 8 <NA> white, red red \\n 9 black light brown \\n10 auburn, white fair blue-gray\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, we can either quote the names:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(\\"hair_color\\", \\"skin_color\\", \\"eye_color\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├───────────────┼─────────────┼───────────┤\\n│ blond │ fair │ blue │\\n│ NULL │ gold │ yellow │\\n│ NULL │ white, blue │ red │\\n│ none │ white │ yellow │\\n│ brown │ light │ brown │\\n│ brown, grey │ light │ blue │\\n│ brown │ light │ blue │\\n│ NULL │ white, red │ red │\\n│ black │ light │ brown │\\n│ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │\\n└───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nOr use the `_` helper:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(_.hair_color, _.skin_color, _.eye_color)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├───────────────┼─────────────┼───────────┤\\n│ blond │ fair │ blue │\\n│ NULL │ gold │ yellow │\\n│ NULL │ white, blue │ red │\\n│ none │ white │ yellow │\\n│ brown │ light │ brown │\\n│ brown, grey │ light │ blue │\\n│ brown │ light │ blue │\\n│ NULL │ white, red │ red │\\n│ black │ light │ brown │\\n│ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │\\n└───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nTo select columns by name based on a condition, dplyr has helpers such as:\\n\\n- starts_with(): Starts with a prefix.\\n- ends_with(): Ends with a suffix.\\n- contains(): Contains a literal string.\\n\\nThese and many more [selectors](../reference/selectors.qmd) are available in Ibis as well, with slightly different names:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(s.startswith(\\"h\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ height ┃ hair_color ┃ homeworld ┃\\n┡━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ int64 │ string │ string │\\n├────────┼───────────────┼───────────┤\\n│ 172 │ blond │ Tatooine │\\n│ 167 │ NULL │ Tatooine │\\n│ 96 │ NULL │ Naboo │\\n│ 202 │ none │ Tatooine │\\n│ 150 │ brown │ Alderaan │\\n│ 178 │ brown, grey │ Tatooine │\\n│ 165 │ brown │ Tatooine │\\n│ 97 │ NULL │ Tatooine │\\n│ 183 │ black │ Tatooine │\\n│ 182 │ auburn, white │ Stewjon │\\n│ … │ … │ … │\\n└────────┴───────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(s.endswith(\\"color\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├───────────────┼─────────────┼───────────┤\\n│ blond │ fair │ blue │\\n│ NULL │ gold │ yellow │\\n│ NULL │ white, blue │ red │\\n│ none │ white │ yellow │\\n│ brown │ light │ brown │\\n│ brown, grey │ light │ blue │\\n│ brown │ light │ blue │\\n│ NULL │ white, red │ red │\\n│ black │ light │ brown │\\n│ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │\\n└───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.select(s.contains(\\"world\\"))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━┓\\n┃ homeworld ┃\\n┡━━━━━━━━━━━┩\\n│ string │\\n├───────────┤\\n│ Tatooine │\\n│ Tatooine │\\n│ Naboo │\\n│ Tatooine │\\n│ Alderaan │\\n│ Tatooine │\\n│ Tatooine │\\n│ Tatooine │\\n│ Tatooine │\\n│ Stewjon │\\n│ … │\\n└───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nSee the Ibis [Column Selectors](../reference/selectors.qmd) documentation for the full list of selectors in Ibis.\\n\\n## Renaming columns with relabel()\\n\\nIbis allows you to rename columns using `relabel()` which provides similar functionality to `rename()` in dplyr.\\n\\nIn dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n rename(\\"home_world\\" = \\"homeworld\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 14\\n name height mass hair_color skin_color eye_color birth_year sex gender\\n <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr> <chr> \\n 1 Luke Sk… 172 77 blond fair blue 19 male mascu…\\n 2 C-3PO 167 75 <NA> gold yellow 112 none mascu…\\n 3 R2-D2 96 32 <NA> white, bl… red 33 none mascu…\\n 4 Darth V… 202 136 none white yellow 41.9 male mascu…\\n 5 Leia Or… 150 49 brown light brown 19 fema… femin…\\n 6 Owen La… 178 120 brown, gr… light blue 52 male mascu…\\n 7 Beru Wh… 165 75 brown light blue 47 fema… femin…\\n 8 R5-D4 97 32 <NA> white, red red NA none mascu…\\n 9 Biggs D… 183 84 black light brown 24 male mascu…\\n10 Obi-Wan… 182 77 auburn, w… fair blue-gray 57 male mascu…\\n# ℹ 77 more rows\\n# ℹ 5 more variables: home_world <chr>, species <chr>, films <list>,\\n# vehicles <list>, starships <list>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, use `relabel` and pass a `dict` of name mappings:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.rename(home_world=\\"homeworld\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━┓\\n┃ name ┃ height ┃ mass ┃ hair_color ┃ skin_color ┃ … ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━┩\\n│ string │ int64 │ float64 │ string │ string │ … │\\n├────────────────────┼────────┼─────────┼───────────────┼─────────────┼───┤\\n│ Luke Skywalker │ 172 │ 77.0 │ blond │ fair │ … │\\n│ C-3PO │ 167 │ 75.0 │ NULL │ gold │ … │\\n│ R2-D2 │ 96 │ 32.0 │ NULL │ white, blue │ … │\\n│ Darth Vader │ 202 │ 136.0 │ none │ white │ … │\\n│ Leia Organa │ 150 │ 49.0 │ brown │ light │ … │\\n│ Owen Lars │ 178 │ 120.0 │ brown, grey │ light │ … │\\n│ Beru Whitesun lars │ 165 │ 75.0 │ brown │ light │ … │\\n│ R5-D4 │ 97 │ 32.0 │ NULL │ white, red │ … │\\n│ Biggs Darklighter │ 183 │ 84.0 │ black │ light │ … │\\n│ Obi-Wan Kenobi │ 182 │ 77.0 │ auburn, white │ fair │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└────────────────────┴────────┴─────────┴───────────────┴─────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Add new columns with mutate()\\n\\nIbis, like dplyr, uses the `mutate` verb to add columns.\\n\\nIn dplyr,\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars |>\\n mutate(height_m = height / 100) |>\\n select(name, height_m)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 2\\n name height_m\\n <chr> <dbl>\\n 1 Luke Skywalker 1.72\\n 2 C-3PO 1.67\\n 3 R2-D2 0.96\\n 4 Darth Vader 2.02\\n 5 Leia Organa 1.5 \\n 6 Owen Lars 1.78\\n 7 Beru Whitesun lars 1.65\\n 8 R5-D4 0.97\\n 9 Biggs Darklighter 1.83\\n10 Obi-Wan Kenobi 1.82\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(\\n starwars\\n .mutate(height_m = _.height / 100)\\n .select(\\"name\\", \\"height_m\\")\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━┓\\n┃ name ┃ height_m ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━┩\\n│ string │ float64 │\\n├────────────────────┼──────────┤\\n│ Luke Skywalker │ 1.72 │\\n│ C-3PO │ 1.67 │\\n│ R2-D2 │ 0.96 │\\n│ Darth Vader │ 2.02 │\\n│ Leia Organa │ 1.50 │\\n│ Owen Lars │ 1.78 │\\n│ Beru Whitesun lars │ 1.65 │\\n│ R5-D4 │ 0.97 │\\n│ Biggs Darklighter │ 1.83 │\\n│ Obi-Wan Kenobi │ 1.82 │\\n│ … │ … │\\n└────────────────────┴──────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nA big difference between dplyr's `mutate` and Ibis' `mutate` is that, in Ibis,\\nyou have to chain separate `mutate` calls together when you reference\\nnewly-created columns in the same `mutate` whereas in dplyr, you can put them\\nall in the same call. This makes Ibis' `mutate` more similar to `transform` in\\nbase R.\\n\\nIn dplyr, we only need one `mutate` call:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars %>%\\n mutate(\\n height_m = height / 100,\\n BMI = mass / (height_m^2)\\n ) %>%\\n select(BMI, everything())\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 16\\n BMI name height mass hair_color skin_color eye_color birth_year sex \\n <dbl> <chr> <int> <dbl> <chr> <chr> <chr> <dbl> <chr>\\n 1 26.0 Luke Sky… 172 77 blond fair blue 19 male \\n 2 26.9 C-3PO 167 75 <NA> gold yellow 112 none \\n 3 34.7 R2-D2 96 32 <NA> white, bl… red 33 none \\n 4 33.3 Darth Va… 202 136 none white yellow 41.9 male \\n 5 21.8 Leia Org… 150 49 brown light brown 19 fema…\\n 6 37.9 Owen Lars 178 120 brown, gr… light blue 52 male \\n 7 27.5 Beru Whi… 165 75 brown light blue 47 fema…\\n 8 34.0 R5-D4 97 32 <NA> white, red red NA none \\n 9 25.1 Biggs Da… 183 84 black light brown 24 male \\n10 23.2 Obi-Wan … 182 77 auburn, w… fair blue-gray 57 male \\n# ℹ 77 more rows\\n# ℹ 7 more variables: gender <chr>, homeworld <chr>, species <chr>,\\n# films <list>, vehicles <list>, starships <list>, height_m <dbl>\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, for `BMI` to reference `height_m`, it needs to be in a separate `mutate` call:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(starwars\\n .mutate(\\n height_m = _.height / 100\\n )\\n .mutate(\\n BMI = _.mass / (_.height_m**2)\\n )\\n .select(\\"BMI\\", ~s.matches(\\"BMI\\"))\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━┓\\n┃ BMI ┃ name ┃ height ┃ mass ┃ hair_color ┃ … ┃\\n┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━┩\\n│ float64 │ string │ int64 │ float64 │ string │ … │\\n├───────────┼────────────────────┼────────┼─────────┼───────────────┼───┤\\n│ 26.027582 │ Luke Skywalker │ 172 │ 77.0 │ blond │ … │\\n│ 26.892323 │ C-3PO │ 167 │ 75.0 │ NULL │ … │\\n│ 34.722222 │ R2-D2 │ 96 │ 32.0 │ NULL │ … │\\n│ 33.330066 │ Darth Vader │ 202 │ 136.0 │ none │ … │\\n│ 21.777778 │ Leia Organa │ 150 │ 49.0 │ brown │ … │\\n│ 37.874006 │ Owen Lars │ 178 │ 120.0 │ brown, grey │ … │\\n│ 27.548209 │ Beru Whitesun lars │ 165 │ 75.0 │ brown │ … │\\n│ 34.009990 │ R5-D4 │ 97 │ 32.0 │ NULL │ … │\\n│ 25.082863 │ Biggs Darklighter │ 183 │ 84.0 │ black │ … │\\n│ 23.245985 │ Obi-Wan Kenobi │ 182 │ 77.0 │ auburn, white │ … │\\n│ … │ … │ … │ … │ … │ … │\\n└───────────┴────────────────────┴────────┴─────────┴───────────────┴───┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Summarize values with aggregate()\\n\\nTo summarize tables, dplyr has the verbs `summarise`/`summarize`:\\n\\nIn dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars %>%\\n summarise(height = mean(height, na.rm = TRUE))\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 1 × 1\\n height\\n <dbl>\\n1 174.\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis, the corresponding verb is `aggregate`:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars.aggregate(height = _.height.mean())\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━┓\\n┃ height ┃\\n┡━━━━━━━━━━━━┩\\n│ float64 │\\n├────────────┤\\n│ 174.358025 │\\n└────────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n**Note**: Throughout this guide, where dplyr uses R generics, Ibis uses Python methods. In the previous code cell, `aggregate` is a method on a _table_ and `mean` is a method on a _column_. If you want to perform aggregations on multiple columns, you can call the method that you want on the column you want to apply it to.\\n\\n## Join tables with left_join()\\n\\nTo demonstrate how to do joins with Ibis, we'll load two more example datasets that also come from the example datasets included in dplyr:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nband_members = ibis.examples.band_members.fetch()\\nband_instruments = ibis.examples.band_instruments.fetch()\\n```\\n:::\\n\\n\\nIn dplyr, we can perform a left join of these two tables like:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nband_members |>\\n left_join(band_instruments)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 3 × 3\\n name band plays \\n <chr> <chr> <chr> \\n1 Mick Stones <NA> \\n2 John Beatles guitar\\n3 Paul Beatles bass \\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nband_members.left_join(band_instruments, \\"name\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━┓\\n┃ name ┃ band ┃ name_right ┃ plays ┃\\n┡━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━┩\\n│ string │ string │ string │ string │\\n├────────┼─────────┼────────────┼────────┤\\n│ John │ Beatles │ John │ guitar │\\n│ Paul │ Beatles │ Paul │ bass │\\n│ Mick │ Stones │ NULL │ NULL │\\n└────────┴─────────┴────────────┴────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nThere are two main differences between Ibis and dplyr here:\\n\\n- Ibis requires us to explicitly specify our join key (\\"name\\", in this example) whereas in dplyr, if the join key is missing, we get the natural join of the two tables which joins across all shared column names\\n- Ibis keeps columns for join keys from each table whereas dplyr does not by default\\n\\nTo replicate the result we'd get by default in dplyr but using Ibis, we need to incorporate two other verbs we've already seen in this tutorial:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(\\n band_members\\n .left_join(band_instruments, \\"name\\")\\n .select(~s.contains(\\"_right\\"))\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓\\n┃ name ┃ band ┃ plays ┃\\n┡━━━━━━━━╇━━━━━━━━━╇━━━━━━━━┩\\n│ string │ string │ string │\\n├────────┼─────────┼────────┤\\n│ John │ Beatles │ guitar │\\n│ Paul │ Beatles │ bass │\\n│ Mick │ Stones │ NULL │\\n└────────┴─────────┴────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Pivot data with pivot_wider() and pivot_longer()\\n\\ndplyr users are likely to be familiar with the `pivot_wider` and `pivot_longer` functions from the [tidyr](https://tidyr.tidyverse.org) package which convert tables between wide and long formats, respectively.\\n\\nIn dplyr+tidyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nlibrary(tidyr)\\n\\nstarwars_colors <-\\n starwars |>\\n select(name, matches(\\"color\\")) |>\\n pivot_longer(matches(\\"color\\"), names_to = \\"attribute\\", values_to = \\"color\\")\\n```\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\nstarwars_colors = (\\n starwars\\n .select(\\"name\\", s.matches(\\"color\\"))\\n .pivot_longer(s.matches(\\"color\\"), names_to=\\"attribute\\", values_to=\\"color\\")\\n)\\n\\nstarwars_colors\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━┓\\n┃ name ┃ attribute ┃ color ┃\\n┡━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━┩\\n│ string │ string │ string │\\n├────────────────┼────────────┼─────────────┤\\n│ Luke Skywalker │ hair_color │ blond │\\n│ Luke Skywalker │ skin_color │ fair │\\n│ Luke Skywalker │ eye_color │ blue │\\n│ C-3PO │ hair_color │ NULL │\\n│ C-3PO │ skin_color │ gold │\\n│ C-3PO │ eye_color │ yellow │\\n│ R2-D2 │ hair_color │ NULL │\\n│ R2-D2 │ skin_color │ white, blue │\\n│ R2-D2 │ eye_color │ red │\\n│ Darth Vader │ hair_color │ none │\\n│ … │ … │ … │\\n└────────────────┴────────────┴─────────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\nAnd the reverse, in dplyr:\\n\\n\\n::: {.cell}\\n\\n```{.r .cell-code}\\nstarwars_colors |>\\n pivot_wider(names_from = \\"attribute\\", values_from = \\"color\\")\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n# A tibble: 87 × 4\\n name hair_color skin_color eye_color\\n <chr> <chr> <chr> <chr> \\n 1 Luke Skywalker blond fair blue \\n 2 C-3PO <NA> gold yellow \\n 3 R2-D2 <NA> white, blue red \\n 4 Darth Vader none white yellow \\n 5 Leia Organa brown light brown \\n 6 Owen Lars brown, grey light blue \\n 7 Beru Whitesun lars brown light blue \\n 8 R5-D4 <NA> white, red red \\n 9 Biggs Darklighter black light brown \\n10 Obi-Wan Kenobi auburn, white fair blue-gray\\n# ℹ 77 more rows\\n```\\n\\n\\n:::\\n:::\\n\\n\\nIn Ibis:\\n\\n\\n\\n::: {.cell}\\n\\n```{.python .cell-code}\\n(\\n starwars_colors.\\n pivot_wider(names_from=\\"attribute\\", values_from=\\"color\\")\\n)\\n```\\n\\n::: {.cell-output .cell-output-stdout}\\n\\n```\\n┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━┓\\n┃ name ┃ hair_color ┃ skin_color ┃ eye_color ┃\\n┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━┩\\n│ string │ string │ string │ string │\\n├────────────────────┼───────────────┼─────────────┼───────────┤\\n│ Luke Skywalker │ blond │ fair │ blue │\\n│ C-3PO │ NULL │ gold │ yellow │\\n│ R2-D2 │ NULL │ white, blue │ red │\\n│ Darth Vader │ none │ white │ yellow │\\n│ Leia Organa │ brown │ light │ brown │\\n│ Owen Lars │ brown, grey │ light │ blue │\\n│ Beru Whitesun lars │ brown │ light │ blue │\\n│ R5-D4 │ NULL │ white, red │ red │\\n│ Biggs Darklighter │ black │ light │ brown │\\n│ Obi-Wan Kenobi │ auburn, white │ fair │ blue-gray │\\n│ … │ … │ … │ … │\\n└────────────────────┴───────────────┴─────────────┴───────────┘\\n```\\n\\n\\n:::\\n:::\\n\\n\\n## Next Steps\\n\\nNow that you've gotten an introduction to the common differences between dplyr\\nand Ibis, head over to [Getting Started with ibis](./getting_started.qmd) for a\\nfull introduction. If you're familiar with SQL, check out [Ibis for SQL\\nProgrammers](./ibis-for-sql-users.qmd). If you're\\nfamiliar with pandas, take a look at [Ibis for pandas\\nUsers](./ibis-for-pandas-users.qmd)\\n", + "supporting": [ + "ibis-for-dplyr-users_files" + ], "filters": [ "rmarkdown/pagebreak.lua" ],
chore: allow dependency pinning
20d35e64e2c82d428d2e622886d2d2d0aae4f5fc
chore
https://github.com/mikro-orm/mikro-orm/commit/20d35e64e2c82d428d2e622886d2d2d0aae4f5fc
allow dependency pinning
diff --git a/package.json b/package.json index 6b152bd..130db42 100644 --- a/package.json +++ b/package.json @@ -92,8 +92,7 @@ "renovate": { "extends": [ "config:base" - ], - "pinVersions": false + ] }, "engines": { "node": ">= 10.13.0"
chore: fix TS errors reported by TS 4.9 beta
49f0083834dfd088e7fda7b567f2773f35b1da53
chore
https://github.com/mikro-orm/mikro-orm/commit/49f0083834dfd088e7fda7b567f2773f35b1da53
fix TS errors reported by TS 4.9 beta
diff --git a/BetterSqliteSchemaHelper.ts b/BetterSqliteSchemaHelper.ts index c614e62..fde5078 100644 --- a/BetterSqliteSchemaHelper.ts +++ b/BetterSqliteSchemaHelper.ts @@ -48,7 +48,7 @@ export class BetterSqliteSchemaHelper extends SchemaHelper { const sql = `select sql from sqlite_master where type = ? and name = ?`; const tableDefinition = await connection.execute<{ sql: string }>(sql, ['table', tableName], 'get'); - const checkConstraints = tableDefinition.sql.match(/[`["'][^`\\]"']+[`\\]"'] text check \\(.*?\\)/gi) ?? []; + const checkConstraints = [...tableDefinition.sql.match(/[`["'][^`\\]"']+[`\\]"'] text check \\(.*?\\)/gi) ?? []]; return checkConstraints.reduce((o, item) => { // check constraints are defined as (note that last closing paren is missing): // `type` text check (`type` in ('local', 'global') diff --git a/SqliteSchemaHelper.ts b/SqliteSchemaHelper.ts index 5044074..9469e43 100644 --- a/SqliteSchemaHelper.ts +++ b/SqliteSchemaHelper.ts @@ -48,7 +48,7 @@ export class SqliteSchemaHelper extends SchemaHelper { const sql = `select sql from sqlite_master where type = ? and name = ?`; const tableDefinition = await connection.execute<{ sql: string }>(sql, ['table', tableName], 'get'); - const checkConstraints = tableDefinition.sql.match(/[`["'][^`\\]"']+[`\\]"'] text check \\(.*?\\)/gi) ?? []; + const checkConstraints = [...tableDefinition.sql.match(/[`["'][^`\\]"']+[`\\]"'] text check \\(.*?\\)/gi) ?? []]; return checkConstraints.reduce((o, item) => { // check constraints are defined as (note that last closing paren is missing): // `type` text check (`type` in ('local', 'global') diff --git a/EntityComparator.test.ts b/EntityComparator.test.ts index 1a9167b..91785e8 100644 --- a/EntityComparator.test.ts +++ b/EntityComparator.test.ts @@ -198,7 +198,7 @@ export class EntityComparatorOld { /** * should be used only for `meta.comparableProps` that are defined based on the static `isComparable` helper */ - private shouldIgnoreProperty<T>(entity: T, prop: EntityProperty<T>) { + private shouldIgnoreProperty<T extends object>(entity: T, prop: EntityProperty<T>) { if (!(prop.name in entity)) { return true; }
fix(sql): support uuid like PKs in M:N references (#272) Closes #268
2abc19f770331d6d5a033f09297523217fb17217
fix
https://github.com/mikro-orm/mikro-orm/commit/2abc19f770331d6d5a033f09297523217fb17217
support uuid like PKs in M:N references (#272) Closes #268
diff --git a/AbstractSqlDriver.ts b/AbstractSqlDriver.ts index 74abdd5..494693b 100644 --- a/AbstractSqlDriver.ts +++ b/AbstractSqlDriver.ts @@ -77,7 +77,7 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra const qb = this.createQueryBuilder(entityName, ctx, true); const res = await qb.insert(data).execute('run', false); res.row = res.row || {}; - res.insertId = res.insertId || res.row[pk] || data[pk]; + res.insertId = data[pk] || res.insertId || res.row[pk]; await this.processManyToMany(entityName, res.insertId, collections, ctx); return res; diff --git a/GH268.test.ts b/GH268.test.ts index 4eca91e..8bf290d 100644 --- a/GH268.test.ts +++ b/GH268.test.ts @@ -0,0 +1,75 @@ +import { unlinkSync } from 'fs'; +import { v4 } from 'uuid'; + +import { Collection, Entity, ManyToMany, MikroORM, PrimaryKey, Property, ReflectMetadataProvider, UuidEntity } from '../../lib'; +import { BASE_DIR } from '../bootstrap'; +import { SqliteDriver } from '../../lib/drivers/SqliteDriver'; + +@Entity() +export class A implements UuidEntity<A> { + + @PrimaryKey() + uuid: string = v4(); + + @Property() + name!: string; + + @ManyToMany(() => B, b => b.aCollection) + bCollection = new Collection<B>(this); + +} + +@Entity() +export class B implements UuidEntity<B> { + + @PrimaryKey() + uuid: string = v4(); + + @Property() + name!: string; + + @ManyToMany(() => A, undefined, { fixedOrder: true }) + aCollection = new Collection<A>(this); + +} + +describe('GH issue 268', () => { + + let orm: MikroORM<SqliteDriver>; + + beforeAll(async () => { + orm = await MikroORM.init({ + entities: [A, B], + dbName: BASE_DIR + '/../temp/mikro_orm_test_gh268.db', + debug: false, + highlight: false, + type: 'sqlite', + metadataProvider: ReflectMetadataProvider, + cache: { enabled: false }, + }); + await orm.getSchemaGenerator().dropSchema(); + await orm.getSchemaGenerator().createSchema(); + }); + + afterAll(async () => { + await orm.close(true); + unlinkSync(orm.config.get('dbName')); + }); + + test('m:n with uuid PKs', async () => { + const a1 = new A(); + a1.name = 'a1'; + const a2 = new A(); + a2.name = 'a2'; + const a3 = new A(); + a3.name = 'a3'; + const b = new B(); + b.name = 'b'; + b.aCollection.add(a1, a2, a3); + await orm.em.persistAndFlush(b); + + const res = await orm.em.getConnection().execute('select * from b_to_a'); + expect(res[0]).toEqual({ id: 1, a_uuid: a1.uuid, b_uuid: b.uuid }); + }); + +});
chore: add CONTRIBUTING.md
5007d437cd562a81f14fd9d0f466dff05904b81c
chore
https://github.com/pmndrs/react-spring/commit/5007d437cd562a81f14fd9d0f466dff05904b81c
add CONTRIBUTING.md
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 96e67e5..9ac6fba 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# How to Contribute + +1. Clone this repository: + +```sh +git clone https://github.com/react-spring/react-spring +cd react-spring +``` + +2. Install `yarn` (https://yarnpkg.com/en/docs/install) + +3. Bootstrap the packages: + +```sh +npx lerna bootstrap +``` + +4. Link the packages: + +``` +npx lerna exec -- yarn link +``` + +5. Link `react-spring` to your project: + +```sh +cd ~/my-project +yarn link react-spring +``` + +6. Let's get cooking!
fix: allow parsing double-dash date offsets Negative date offsets seem to occour with double-dashes, too.
d3f65d8361244f48e5ad79f034c05b6623cf7312
fix
https://github.com/Byron/gitoxide/commit/d3f65d8361244f48e5ad79f034c05b6623cf7312
allow parsing double-dash date offsets Negative date offsets seem to occour with double-dashes, too.
diff --git a/decode.rs b/decode.rs index 52ba880..4b685fc 100644 --- a/decode.rs +++ b/decode.rs @@ -2,6 +2,7 @@ pub(crate) mod function { use bstr::ByteSlice; use btoi::btoi; use gix_date::{time::Sign, OffsetInSeconds, SecondsSinceUnixEpoch, Time}; + use nom::multi::many1_count; use nom::{ branch::alt, bytes::complete::{tag, take, take_until, take_while_m_n}, @@ -10,6 +11,7 @@ pub(crate) mod function { sequence::{terminated, tuple}, IResult, }; + use std::cell::RefCell; use crate::{IdentityRef, SignatureRef}; @@ -19,7 +21,9 @@ pub(crate) mod function { pub fn decode<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], SignatureRef<'a>, E> { - let (i, (identity, _, time, tzsign, hours, minutes)) = context( + use nom::Parser; + let tzsign = RefCell::new(b'-'); // TODO: there should be no need for this. + let (i, (identity, _, time, _tzsign_count, hours, minutes)) = context( "<name> <<email>> <timestamp> <+|-><HHMM>", tuple(( identity, @@ -31,7 +35,13 @@ pub(crate) mod function { .map_err(|_| nom::Err::Error(E::from_error_kind(i, nom::error::ErrorKind::MapRes))) }) }), - context("+|-", alt((tag(b"-"), tag(b"+")))), + context( + "+|-", + alt(( + many1_count(tag(b"-")).map(|_| *tzsign.borrow_mut() = b'-'), // TODO: this should be a non-allocating consumer of consecutive tags + many1_count(tag(b"+")).map(|_| *tzsign.borrow_mut() = b'+'), + )), + ), context("HH", |i| { take_while_m_n(2usize, 2, is_digit)(i).and_then(|(i, v)| { btoi::<OffsetInSeconds>(v) @@ -40,7 +50,7 @@ pub(crate) mod function { }) }), context("MM", |i| { - take_while_m_n(2usize, 2, is_digit)(i).and_then(|(i, v)| { + take_while_m_n(1usize, 2, is_digit)(i).and_then(|(i, v)| { btoi::<OffsetInSeconds>(v) .map(|v| (i, v)) .map_err(|_| nom::Err::Error(E::from_error_kind(i, nom::error::ErrorKind::MapRes))) @@ -49,8 +59,9 @@ pub(crate) mod function { )), )(i)?; - debug_assert!(tzsign[0] == b'-' || tzsign[0] == b'+', "parser assure it's +|- only"); - let sign = if tzsign[0] == b'-' { Sign::Minus } else { Sign::Plus }; // + let tzsign = tzsign.into_inner(); + debug_assert!(tzsign == b'-' || tzsign == b'+', "parser assure it's +|- only"); + let sign = if tzsign == b'-' { Sign::Minus } else { Sign::Plus }; // let offset = (hours * 3600 + minutes * 60) * if sign == Sign::Minus { -1 } else { 1 }; Ok(( @@ -148,6 +159,16 @@ mod tests { ); } + #[test] + fn negative_offset_double_dash() { + assert_eq!( + decode(b"name <[email protected]> 1288373970 --700") + .expect("parse to work") + .1, + signature("name", "[email protected]", 1288373970, Sign::Minus, -252000) + ); + } + #[test] fn empty_name_and_email() { assert_eq!(
docs(getting-started): fix Docker command
e3bc06a684389b4d0e5339dba3612a722aaf73cd
docs
https://github.com/wzhiqing/cube/commit/e3bc06a684389b4d0e5339dba3612a722aaf73cd
fix Docker command
diff --git a/Getting-Started-Docker.md b/Getting-Started-Docker.md index 0447d72..723a93e 100644 --- a/Getting-Started-Docker.md +++ b/Getting-Started-Docker.md @@ -15,11 +15,17 @@ This guide will help you get Cube.js running using Docker. ## 1. Run Cube.js with Docker CLI +<!-- prettier-ignore-start --> +[[info |]] +| Using Windows? Remember to use [PowerShell][link-powershell] to run the +| command below! +<!-- prettier-ignore-end --> + In a new folder for your project, run the following command: ```bash docker run -p 4000:4000 \\ - -v ${pwd}:/cube/conf \\ + -v ${PWD}:/cube/conf \\ -e CUBEJS_DEV_MODE=true \\ cubejs/cube ``` @@ -78,6 +84,7 @@ Learn how to [query Cube.js with REST API][ref-rest-api] or [use our Javascript client library and integrations with frontend frameworks][ref-frontend-introduction]. +[link-powershell]: https://docs.microsoft.com/en-us/powershell/scripting/overview?view=powershell-7.1 [ref-config]: /config [ref-connecting-to-the-database]: /connecting-to-the-database [ref-cubejs-schema]: /getting-started-cubejs-schema
chore(deps): bump docformatter and run it over the codebase
9da5c0bb5d0965a42468cf64630ae1e9723d01da
chore
https://github.com/ibis-project/ibis/commit/9da5c0bb5d0965a42468cf64630ae1e9723d01da
bump docformatter and run it over the codebase
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7460f3..6dcec38 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - id: flake8 exclude: (^ibis/tests/sql/snapshots/|_py310\\.py$) - repo: https://github.com/pycqa/docformatter - rev: v1.5.0 + rev: v1.5.1 hooks: - id: docformatter - repo: https://github.com/MarcoGorelli/absolufy-imports diff --git a/base.py b/base.py index b2ca4fb..465e042 100644 --- a/base.py +++ b/base.py @@ -21,7 +21,6 @@ class DDL(abc.ABC): class QueryAST: - __slots__ = 'context', 'dml', 'setup_queries', 'teardown_queries' def __init__(self, context, dml, setup_queries=None, teardown_queries=None): diff --git a/query_builder.py b/query_builder.py index 2f9cd63..82d7aa9 100644 --- a/query_builder.py +++ b/query_builder.py @@ -17,7 +17,6 @@ from ibis.config import options class TableSetFormatter: - _join_names = { ops.InnerJoin: 'INNER JOIN', ops.LeftJoin: 'LEFT OUTER JOIN', @@ -164,7 +163,6 @@ class TableSetFormatter: class Select(DML, Comparable): - """A SELECT statement which, after execution, might yield back to the user a table, array/list, or scalar value, depending on the expression that generated it.""" diff --git a/select_builder.py b/select_builder.py index ba80990..5f5db60 100644 --- a/select_builder.py +++ b/select_builder.py @@ -106,7 +106,6 @@ def _get_column(name): class SelectBuilder: - """Transforms expression IR to a query pipeline (potentially multiple queries). There will typically be a primary SELECT query, perhaps with some subqueries and other DDL to ingest and tear down intermediate data sources. diff --git a/ddl.py b/ddl.py index a708ee6..f2d8dcd 100644 --- a/ddl.py +++ b/ddl.py @@ -48,7 +48,6 @@ def _format_properties(props): class CreateTable(CreateTable): - """Create a table.""" def __init__( @@ -79,7 +78,6 @@ class CreateTableWithSchema(CreateTableWithSchema): class CTAS(CTAS): - """Create Table As Select.""" def __init__( @@ -104,7 +102,6 @@ class CTAS(CTAS): class CreateView(CTAS): - """Create a view.""" def __init__( @@ -154,7 +151,6 @@ def _format_schema_element(name, t): class DropDatabase(DropObject): - _object_type = 'DATABASE' def __init__(self, name, must_exist=True, cascade=False): @@ -174,7 +170,6 @@ class DropDatabase(DropObject): class DropFunction(DropObject): - _object_type = 'TEMPORARY FUNCTION' def __init__(self, name, must_exist=True): diff --git a/test_timecontext.py b/test_timecontext.py index 08d3897..9165c2e 100644 --- a/test_timecontext.py +++ b/test_timecontext.py @@ -188,13 +188,13 @@ def test_setting_timecontext_in_scope(time_table, time_df3): context = pd.Timestamp('20170105'), pd.Timestamp('20170111') window1 = ibis.trailing_window(3 * ibis.interval(days=1), order_by=time_table.time) - """ - In the following expression, Selection node will be executed first and - get table in context ('20170105', '20170101'). Then in window execution - table will be executed again with a larger context adjusted by window - preceeding days ('20170102', '20170111'). To get the correct result, - the cached table result with a smaller context must be discard and updated - to a larger time range. + """In the following expression, Selection node will be executed first and + get table in context ('20170105', '20170101'). + + Then in window execution table will be executed again with a larger + context adjusted by window preceeding days ('20170102', '20170111'). + To get the correct result, the cached table result with a smaller + context must be discard and updated to a larger time range. """ expr = time_table.mutate(value=time_table['value'].mean().over(window1)) result = expr.execute(timecontext=context) diff --git a/udf.py b/udf.py index 6ae07aa..6e8bb1f 100644 --- a/udf.py +++ b/udf.py @@ -104,7 +104,6 @@ def pre_execute_analytic_and_reduction_udf(op, *clients, scope=None, **kwargs): # This is generally not recommened. @execute_node.register(type(op), *(itertools.repeat(dd.Series, nargs))) def execute_udaf_node_no_groupby(op, *args, aggcontext, **kwargs): - # This function is in essence fully materializing the dd.Series and # passing that (now) pd.Series to aggctx. This materialization # happens at `.compute()` time, making this "lazy" diff --git a/__init__.py b/__init__.py index 4192082..1c56235 100644 --- a/__init__.py +++ b/__init__.py @@ -79,7 +79,6 @@ _arg_type = re.compile(r'(.*)\\.\\.\\.|([^\\.]*)') class _type_parser: - NORMAL, IN_PAREN = 0, 1 def __init__(self, value): @@ -544,7 +543,6 @@ class Backend(BaseSQLBackend): @contextlib.contextmanager def _setup_insert(self, obj): - import pandas as pd if isinstance(obj, pd.DataFrame): diff --git a/metadata.py b/metadata.py index d4605a2..df7b303 100644 --- a/metadata.py +++ b/metadata.py @@ -86,7 +86,6 @@ def _try_int(x): class MetadataParser: - """A simple state-ish machine to parse the results of DESCRIBE FORMATTED.""" @@ -282,7 +281,6 @@ def _get_meta(attr, key): class TableMetadata: - """Container for the parsed and wrangled results of DESCRIBE FORMATTED for easier Ibis use (and testing).""" diff --git a/caching.py b/caching.py index 7ccd276..96e0766 100644 --- a/caching.py +++ b/caching.py @@ -5,7 +5,6 @@ from typing import MutableMapping class WeakCache(MutableMapping): - __slots__ = ('_data',) def __init__(self): diff --git a/graph.py b/graph.py index f11b6cb..ad8ebff 100644 --- a/graph.py +++ b/graph.py @@ -38,7 +38,6 @@ def _flatten_collections(node, filter): class Traversable(Hashable): - __slots__ = () @property diff --git a/grounds.py b/grounds.py index 67e5ed6..d99ef64 100644 --- a/grounds.py +++ b/grounds.py @@ -13,7 +13,6 @@ from ibis.util import frozendict, recursive_get class BaseMeta(ABCMeta): - __slots__ = () def __new__(metacls, clsname, bases, dct, **kwargs): @@ -26,7 +25,6 @@ class BaseMeta(ABCMeta): class Base(metaclass=BaseMeta): - __slots__ = ('__weakref__',) @classmethod @@ -177,7 +175,6 @@ class Immutable(Base): class Singleton(Base): - __instances__ = WeakValueDictionary() @classmethod @@ -192,7 +189,6 @@ class Singleton(Base): class Comparable(Base): - __cache__ = WeakCache() def __eq__(self, other) -> bool: diff --git a/test_grounds.py b/test_grounds.py index 101fe3d..40d2fa9 100644 --- a/test_grounds.py +++ b/test_grounds.py @@ -502,7 +502,6 @@ def test_initialized_attribute_mixed_with_classvar(): class Node(Comparable): - # override the default cache object __cache__ = WeakCache() __slots__ = ('name',) diff --git a/analysis.py b/analysis.py index e403630..fdf2d76 100644 --- a/analysis.py +++ b/analysis.py @@ -504,7 +504,6 @@ def simplify_aggregation(agg): class Projector: - """Analysis and validation of projection operation, taking advantage of "projection fusion" opportunities where they exist, i.e. combining compatible projections together rather than nesting them. diff --git a/core.py b/core.py index a2ef56b..4bfdcc6 100644 --- a/core.py +++ b/core.py @@ -37,7 +37,6 @@ class Node(Concrete): @public class Named(ABC): - __slots__ = tuple() @property @@ -53,7 +52,6 @@ class Named(ABC): @public class Value(Node, Named): - # TODO(kszucs): cover it with tests # TODO(kszucs): figure out how to represent not named arguments @property diff --git a/generic.py b/generic.py index a3ebd76..b64f78b 100644 --- a/generic.py +++ b/generic.py @@ -19,7 +19,6 @@ if TYPE_CHECKING: @public class Value(Expr): - """Base class for a data generating expression having a fixed and known type, either a single value (scalar)""" diff --git a/logical.py b/logical.py index a37679e..aa705b7 100644 --- a/logical.py +++ b/logical.py @@ -158,7 +158,6 @@ class NotContains(Contains): @public class Where(Value): - """Ternary case expression, equivalent to. bool_expr.case() .when(True, true_expr) diff --git a/relations.py b/relations.py index 76cb063..e3e0ea5 100644 --- a/relations.py +++ b/relations.py @@ -386,7 +386,6 @@ class Selection(Projection): @public class Aggregation(TableNode): - """ metrics : per-group scalar aggregates by : group expressions diff --git a/schema.py b/schema.py index 29c3016..ee0c9c1 100644 --- a/schema.py +++ b/schema.py @@ -49,7 +49,8 @@ class Schema(Concrete): names = tuple_of(instance_of((str, UnnamedMarker))) """A sequence of [`str`][str] indicating the name of each column.""" types = tuple_of(datatype) - """A sequence of [DataType][ibis.expr.datatypes.DataType] objects representing type of each column.""" # noqa: E501 + """A sequence of [DataType][ibis.expr.datatypes.DataType] objects + representing type of each column.""" # noqa: E501 @attribute.default def _name_locs(self) -> dict[str, int]: diff --git a/strings.py b/strings.py index 1b76eb4..0496402 100644 --- a/strings.py +++ b/strings.py @@ -765,7 +765,6 @@ class StringValue(Value): return ops.BaseConvert(self, from_base, to_base).to_expr() def __mul__(self, n: int | ir.IntegerValue) -> StringValue | NotImplemented: - return _binop(ops.Repeat, self, n) __rmul__ = __mul__ diff --git a/temporal.py b/temporal.py index 216eec9..5ef3ad3 100644 --- a/temporal.py +++ b/temporal.py @@ -560,7 +560,6 @@ class IntervalValue(Value): @staticmethod def __negate_op__(): - return ops.Negate diff --git a/util.py b/util.py index dbc9f95..208bdf7 100644 --- a/util.py +++ b/util.py @@ -46,7 +46,6 @@ HORIZONTAL_ELLIPSIS = "\\u2026" class frozendict(Mapping, Hashable): - __slots__ = ("_dict", "_hash") def __init__(self, *args, **kwargs): diff --git a/poetry.lock b/poetry.lock index 8cb19e6..722f813 100644 --- a/poetry.lock +++ b/poetry.lock @@ -974,17 +974,18 @@ files = [ [[package]] name = "docformatter" -version = "1.5.0" +version = "1.5.1" description = "Formats docstrings to follow PEP 257" category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ - {file = "docformatter-1.5.0-py3-none-any.whl", hash = "sha256:ae56c64822c3184602ac83ec37650c9785e80dfec17b4eba4f49ad68815d71c0"}, - {file = "docformatter-1.5.0.tar.gz", hash = "sha256:9dc71659d3b853c3018cd7b2ec34d5d054370128e12b79ee655498cb339cc711"}, + {file = "docformatter-1.5.1-py3-none-any.whl", hash = "sha256:05d6e4c528278b3a54000e08695822617a38963a380f5aef19e12dd0e630f19a"}, + {file = "docformatter-1.5.1.tar.gz", hash = "sha256:3fa3cdb90cdbcdee82747c58410e47fc7e2e8c352b82bed80767915eb03f2e43"}, ] [package.dependencies] +charset_normalizer = ">=2.0.0,<3.0.0" tomli = {version = ">=2.0.0,<3.0.0", markers = "python_version >= \\"3.7\\""} untokenize = ">=0.1.1,<0.2.0" diff --git a/requirements.txt b/requirements.txt index 8d35035..c213339 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ coverage[toml]==6.5.0 ; python_version >= "3.8" and python_version < "4.0" debugpy==1.6.4 ; python_version >= "3.8" and python_version < "4.0" decorator==5.1.1 ; python_version >= "3.8" and python_version < "4.0" defusedxml==0.7.1 ; python_version >= "3.8" and python_version < "4" -docformatter==1.5.0 ; python_version >= "3.8" and python_version < "4.0" +docformatter==1.5.1 ; python_version >= "3.8" and python_version < "4.0" docstring-parser==0.13 ; python_version >= "3.8" and python_version < "4.0" dunamai==1.15.0 ; python_version >= "3.8" and python_version < "4.0" entrypoints==0.4 ; python_version >= "3.8" and python_version < "4.0"
fix: prune CPU snapshots locally (#1313)
03b4521a59b16524f3ecc50ba44bd591481c0320
fix
https://github.com/Hardeepex/crawlee/commit/03b4521a59b16524f3ecc50ba44bd591481c0320
prune CPU snapshots locally (#1313)
diff --git a/snapshotter.js b/snapshotter.js index 1fed0d4..540adcd 100644 --- a/snapshotter.js +++ b/snapshotter.js @@ -367,7 +367,7 @@ class Snapshotter { */ _snapshotCpuOnLocal(intervalCallback) { const now = new Date(); - this._pruneSnapshots(this.eventLoopSnapshots, now); + this._pruneSnapshots(this.cpuSnapshots, now); const ticks = this._getCurrentCpuTicks(); const snapshot = {
chore: make RQ option optional for the context bound `enqueueLinks`
c5a1022e961b241d2fbf47d20c983703c62319ab
chore
https://github.com/Hardeepex/crawlee/commit/c5a1022e961b241d2fbf47d20c983703c62319ab
make RQ option optional for the context bound `enqueueLinks`
diff --git a/crawler_commons.ts b/crawler_commons.ts index 4596b81..08f4b2e 100644 --- a/crawler_commons.ts +++ b/crawler_commons.ts @@ -48,7 +48,7 @@ export interface CrawlingContext<Crawler = unknown, UserData extends Dictionary * @param options All `enqueueLinks()` parameters are passed via an options object. * @returns Promise that resolves to {@apilink BatchAddRequestsResult} object. */ - enqueueLinks(options: EnqueueLinksOptions): Promise<BatchAddRequestsResult>; + enqueueLinks(options: Partial<EnqueueLinksOptions>): Promise<BatchAddRequestsResult>; /** * Fires HTTP request via [`got-scraping`](https://crawlee.dev/docs/guides/got-scraping), allowing to override the request
feat: `parse::Event::to_bstr_lossy()` to get a glimpse at event content. (#331)
fc7e311b423c5fffb8240d9d0f917ae7139a6133
feat
https://github.com/Byron/gitoxide/commit/fc7e311b423c5fffb8240d9d0f917ae7139a6133
`parse::Event::to_bstr_lossy()` to get a glimpse at event content. (#331)
diff --git a/mutate.rs b/mutate.rs index cc5b3f8..347ddae 100644 --- a/mutate.rs +++ b/mutate.rs @@ -161,12 +161,11 @@ impl<'event> File<'event> { pub fn append(&mut self, mut other: Self) { let nl = self.detect_newline_style(); - // TODO: don't allocate here fn ends_with_newline<'a>(it: impl DoubleEndedIterator<Item = &'a Event<'a>>) -> bool { - it.last().map_or(true, |e| e.to_bstring().last() == Some(&b'\\n')) + it.last().map_or(true, |e| e.to_bstr_lossy().last() == Some(&b'\\n')) } fn starts_with_newline<'a>(mut it: impl Iterator<Item = &'a Event<'a>>) -> bool { - it.next().map_or(true, |e| e.to_bstring().first() == Some(&b'\\n')) + it.next().map_or(true, |e| e.to_bstr_lossy().first() == Some(&b'\\n')) } let newline_event = || Event::Newline(Cow::Owned(nl.clone())); diff --git a/event.rs b/event.rs index 29fc91c..c2ac61c 100644 --- a/event.rs +++ b/event.rs @@ -1,6 +1,6 @@ use std::{borrow::Cow, fmt::Display}; -use bstr::BString; +use bstr::{BStr, BString}; use crate::parse::Event; @@ -15,6 +15,22 @@ impl Event<'_> { buf.into() } + /// Turn ourselves into the text we represent, lossy. + /// + /// Note that this will be partial in case of `ValueNotDone` which doesn't include the backslash, and `SectionHeader` will only + /// provide their name, lacking the sub-section name. + pub fn to_bstr_lossy(&self) -> &BStr { + match self { + Self::ValueNotDone(e) | Self::Whitespace(e) | Self::Newline(e) | Self::Value(e) | Self::ValueDone(e) => { + e.as_ref() + } + Self::KeyValueSeparator => "=".into(), + Self::SectionKey(k) => k.0.as_ref(), + Self::SectionHeader(h) => h.name.0.as_ref(), + Self::Comment(c) => c.comment.as_ref(), + } + } + /// Stream ourselves to the given `out`, in order to reproduce this event mostly losslessly /// as it was parsed. pub fn write_to(&self, mut out: impl std::io::Write) -> std::io::Result<()> {
docs: fix pandas backend installation docs
a3c298cf9bed01a7e4212d736f1e418ad6420461
docs
https://github.com/rohankumardubey/ibis/commit/a3c298cf9bed01a7e4212d736f1e418ad6420461
fix pandas backend installation docs
diff --git a/template.md b/template.md index 67261ca..2e56c63 100644 --- a/template.md +++ b/template.md @@ -19,12 +19,15 @@ Install dependencies for the {{ backend_name }} backend: pip install 'ibis-framework{% if not is_core %}[{{ backend_module }}]{% endif %}' ``` -=== "conda" +{% for mgr in ["conda", "mamba"] %} +=== "{{ mgr }}" ```sh - conda install -c conda-forge ibis{% if not is_core %}-{{ backend_module }}{% endif %} + {{ mgr }} install -c conda-forge ibis-{% if is_core %}framework{% else %}{{ backend_module }}{% endif %} ``` +{% endfor %} + ## Connect <!-- prettier-ignore-start -->
feat(core): support cascade merging detached entity Closes #30
880196017864cf177558a22aaed4767aef1cb107
feat
https://github.com/mikro-orm/mikro-orm/commit/880196017864cf177558a22aaed4767aef1cb107
support cascade merging detached entity Closes #30
diff --git a/EntityManager.ts b/EntityManager.ts index 550ff09..94e5c4c 100644 --- a/EntityManager.ts +++ b/EntityManager.ts @@ -188,12 +188,6 @@ export class EntityManager { * Gets a reference to the entity identified by the given type and identifier without actually loading it, if the entity is not yet loaded */ getReference<T extends IEntityType<T>>(entityName: EntityName<T>, id: IPrimaryKey): T { - entityName = Utils.className(entityName); - - if (this.getUnitOfWork().getById(entityName, id)) { - return this.getUnitOfWork().getById<T>(entityName, id); - } - const entity = this.getEntityFactory().createReference<T>(entityName, id); this.getUnitOfWork().merge(entity); diff --git a/EntityAssigner.ts b/EntityAssigner.ts index 7922ef2..2177504 100644 --- a/EntityAssigner.ts +++ b/EntityAssigner.ts @@ -13,7 +13,7 @@ export class EntityAssigner { const props = meta.properties; Object.keys(data).forEach(prop => { - if (onlyProperties && !props[prop]) { + if (onlyProperties && !(prop in props)) { return; } @@ -33,7 +33,7 @@ export class EntityAssigner { entity[prop as keyof T] = entity.__em.getValidator().validateProperty(props[prop], value, entity); } - entity[prop as keyof T] = value as T[keyof T]; + entity[prop as keyof T] = value; }); } @@ -43,12 +43,13 @@ export class EntityAssigner { return; } - const meta = MetadataStorage.getMetadata(entity.constructor.name); - const id = Utils.extractPK(value, meta); + if (Utils.isPrimaryKey(value)) { + entity[prop.name as keyof T] = em.getReference(prop.type, value); + return; + } - if (id) { - const normalized = em.getDriver().getPlatform().normalizePrimaryKey(id); - entity[prop.name as keyof T] = em.getReference(prop.type, normalized); + if (Utils.isObject(value)) { + entity[prop.name as keyof T] = em.create(prop.type, value) as T[keyof T]; return; } @@ -58,27 +59,32 @@ export class EntityAssigner { private static assignCollection<T extends IEntityType<T>>(entity: T, value: any[], prop: EntityProperty, em: EntityManager): void { const invalid: any[] = []; - const items = value.map((item: any) => { - if (Utils.isEntity(item)) { - return item; - } + const items = value.map((item: any) => this.createCollectionItem(item, em, prop, invalid)); - if (Utils.isPrimaryKey(item)) { - const id = em.getDriver().getPlatform().normalizePrimaryKey(item); - return em.getReference(prop.type, id); - } + if (invalid.length > 0) { + const name = entity.constructor.name; + throw new Error(`Invalid collection values provided for '${name}.${prop.name}' in ${name}.assign(): ${JSON.stringify(invalid)}`); + } - invalid.push(item); + (entity[prop.name as keyof T] as Collection<IEntity>).set(items, true); + } + private static createCollectionItem(item: any, em: EntityManager, prop: EntityProperty, invalid: any[]): IEntity { + if (Utils.isEntity(item)) { return item; - }); + } - if (invalid.length > 0) { - const name = entity.constructor.name; - throw new Error(`Invalid collection values provided for '${name}.${prop.name}' in ${name}.assign(): ${JSON.stringify(invalid)}`); + if (Utils.isPrimaryKey(item)) { + return em.getReference(prop.type, item); + } + + if (Utils.isObject(item)) { + return em.create(prop.type, item); } - (entity[prop.name as keyof T] as Collection<IEntity>).set(items); + invalid.push(item); + + return item; } } diff --git a/ObjectHydrator.ts b/ObjectHydrator.ts index 88434d1..ce7129b 100644 --- a/ObjectHydrator.ts +++ b/ObjectHydrator.ts @@ -1,4 +1,4 @@ -import { EntityProperty, IEntity, IEntityType, IPrimaryKey } from '../decorators'; +import { EntityData, EntityProperty, IEntity, IEntityType, IPrimaryKey } from '../decorators'; import { Hydrator } from './Hydrator'; import { Collection, ReferenceType } from '../entity'; import { Utils } from '../utils'; @@ -35,7 +35,7 @@ export class ObjectHydrator extends Hydrator { private hydrateManyToManyOwner<T extends IEntityType<T>>(entity: T, prop: EntityProperty, value: any): void { if (Array.isArray(value)) { - const items = value.map((id: IPrimaryKey) => this.factory.createReference(prop.type, id)); + const items = value.map((value: IPrimaryKey | EntityData<T>) => this.createCollectionItem(prop, value)); entity[prop.name as keyof T] = new Collection<IEntity>(entity, items) as T[keyof T]; } else if (!entity[prop.name as keyof T]) { const items = this.driver.getPlatform().usesPivotTable() ? undefined : []; @@ -50,9 +50,29 @@ export class ObjectHydrator extends Hydrator { } private hydrateManyToOne<T extends IEntityType<T>>(value: any, entity: T, prop: EntityProperty): void { - if (value && !Utils.isEntity(value)) { - entity[prop.name as keyof T] = this.factory.createReference(prop.type, value as IPrimaryKey); + if (!value) { + return; } + + if (Utils.isPrimaryKey(value)) { + entity[prop.name as keyof T] = this.factory.createReference(prop.type, value); + return; + } + + if (Utils.isObject<T[keyof T]>(value)) { + entity[prop.name as keyof T] = this.factory.create(prop.type, value); + } + } + + private createCollectionItem<T extends IEntityType<T>>(prop: EntityProperty, value: IPrimaryKey | EntityData<T>): T { + if (Utils.isPrimaryKey(value)) { + return this.factory.createReference(prop.type, value); + } + + const child = this.factory.create(prop.type, value as EntityData<T>); + child.__em.merge(child); + + return child; } } diff --git a/MetadataStorage.ts b/MetadataStorage.ts index 7e9f073..143ccb7 100644 --- a/MetadataStorage.ts +++ b/MetadataStorage.ts @@ -1,12 +1,12 @@ -import { EntityMetadata } from '../decorators'; +import { EntityMetadata, IEntityType } from '../decorators'; export class MetadataStorage { private static readonly metadata: Record<string, EntityMetadata> = {}; static getMetadata(): Record<string, EntityMetadata>; - static getMetadata(entity: string): EntityMetadata; - static getMetadata(entity?: string): Record<string, EntityMetadata> | EntityMetadata { + static getMetadata<T extends IEntityType<T> = any>(entity: string): EntityMetadata<T>; + static getMetadata<T extends IEntityType<T> = any>(entity?: string): Record<string, EntityMetadata> | EntityMetadata<T> { if (entity && !MetadataStorage.metadata[entity]) { MetadataStorage.metadata[entity] = { properties: {} } as EntityMetadata; } diff --git a/UnitOfWork.ts b/UnitOfWork.ts index 2824d3b..b5de702 100644 --- a/UnitOfWork.ts +++ b/UnitOfWork.ts @@ -138,10 +138,6 @@ export class UnitOfWork { } private findNewEntities<T extends IEntityType<T>>(entity: T, visited: IEntity[] = []): void { - if (visited.includes(entity)) { - return; - } - visited.push(entity); const meta = this.metadata[entity.constructor.name] as EntityMetadata<T>; diff --git a/EntityHelper.mongo.test.ts b/EntityHelper.mongo.test.ts index 3c4de25..ec7e705 100644 --- a/EntityHelper.mongo.test.ts +++ b/EntityHelper.mongo.test.ts @@ -98,7 +98,9 @@ describe('EntityAssignerMongo', () => { expect(book.tags.getIdentifiers('id')).toMatchObject([tag1.id, tag3.id]); EntityAssigner.assign(book, { tags: [tag2] }); expect(book.tags.getIdentifiers('_id')).toMatchObject([tag2._id]); - expect(() => EntityAssigner.assign(book, { tags: [{ foo: 'bar' }] })).toThrowError(`Invalid collection values provided for 'Book.tags' in Book.assign(): [{"foo":"bar"}]`); + EntityAssigner.assign(book, { tags: [tag2.toObject()] }); + expect(book.tags.getIdentifiers('_id')).toMatchObject([tag2._id]); + expect(() => EntityAssigner.assign(book, { tags: [false] })).toThrowError(`Invalid collection values provided for 'Book.tags' in Book.assign(): [false]`); expect(() => EntityAssigner.assign(book, { publisher: [{ foo: 'bar' }] })).toThrowError(`Invalid reference value provided for 'Book.publisher' in Book.assign(): [{"foo":"bar"}]`); }); diff --git a/EntityManager.mongo.test.ts b/EntityManager.mongo.test.ts index 226fd6e..0334d04 100644 --- a/EntityManager.mongo.test.ts +++ b/EntityManager.mongo.test.ts @@ -450,6 +450,14 @@ describe('EntityManagerMongo', () => { book = (await orm.em.findOne(Book, book._id))!; expect(book.tags.count()).toBe(2); + // set + const items = book.tags.getIdentifiers().map(t => tagRepository.getReference(t)); + book.tags.set(items); + await orm.em.persist(book); + orm.em.clear(); + book = (await orm.em.findOne(Book, book._id))!; + expect(book.tags.count()).toBe(2); + // contains expect(book.tags.contains(tag1)).toBe(true); expect(book.tags.contains(tag2)).toBe(false); @@ -532,6 +540,63 @@ describe('EntityManagerMongo', () => { }); }); + test('merging detached entity', async () => { + const author = new Author('Jon Snow', '[email protected]'); + const book1 = new Book('My Life on The Wall, part 1', author); + const book2 = new Book('My Life on The Wall, part 2', author); + const book3 = new Book('My Life on The Wall, part 3', author); + author.favouriteBook = book1; + const tag1 = new BookTag('silly'); + const tag2 = new BookTag('funny'); + const tag3 = new BookTag('sick'); + const tag4 = new BookTag('strange'); + const tag5 = new BookTag('sexy'); + book1.tags.add(tag1, tag3); + book2.tags.add(tag1, tag2, tag5); + book3.tags.add(tag2, tag4, tag5); + await orm.em.persist([book1, book2, book3]); + orm.em.clear(); + + // cache author with favouriteBook and its tags + const jon = await orm.em.findOne(Author, author.id, ['favouriteBook.tags']); + const cache = jon!.toObject(); + + // merge cached author with his references + orm.em.clear(); + const cachedAuthor = orm.em.merge(Author, cache); + expect(cachedAuthor).toBe(cachedAuthor.favouriteBook.author); + expect(Object.keys(orm.em.getUnitOfWork().getIdentityMap())).toEqual([ + 'BookTag-' + tag1.id, + 'BookTag-' + tag3.id, + 'Author-' + author.id, + 'Book-' + book1.id, + ]); + expect(author).not.toBe(cachedAuthor); + expect(author.id).toBe(cachedAuthor.id); + const book4 = new Book('My Life on The Wall, part 4', cachedAuthor); + await orm.em.persist(book4); + + // merge detached author + orm.em.clear(); + const cachedAuthor2 = orm.em.merge(author); + expect(cachedAuthor2).toBe(cachedAuthor2.favouriteBook.author); + expect(Object.keys(orm.em.getUnitOfWork().getIdentityMap())).toEqual([ + 'Author-' + author.id, + 'Book-' + book1.id, + 'BookTag-' + tag1.id, + 'Book-' + book2.id, + 'BookTag-' + tag2.id, + 'Book-' + book3.id, + 'BookTag-' + tag4.id, + 'BookTag-' + tag5.id, + 'BookTag-' + tag3.id, + ]); + expect(author).toBe(cachedAuthor2); + expect(author.id).toBe(cachedAuthor2.id); + const book5 = new Book('My Life on The Wall, part 5', cachedAuthor2); + await orm.em.persist(book5); + }); + test('cascade persist on owning side', async () => { const author = new Author('Jon Snow', '[email protected]'); const book1 = new Book('My Life on The Wall, part 1', author);
test(bigquery): clean up test data loading (#8661)
110a18c4fe892f1ae75ec1b472bc822cf2920878
test
https://github.com/ibis-project/ibis/commit/110a18c4fe892f1ae75ec1b472bc822cf2920878
clean up test data loading (#8661)
diff --git a/bigquery.sql b/bigquery.sql index bb73eeb..ae64daf 100644 --- a/bigquery.sql +++ b/bigquery.sql @@ -0,0 +1,164 @@ +CREATE SCHEMA IF NOT EXISTS ibis_gbq_testing; + +CREATE OR REPLACE TABLE ibis_gbq_testing.struct ( + abc STRUCT<a FLOAT64, b STRING, c INT64> +); + +INSERT INTO ibis_gbq_testing.struct VALUES + (STRUCT(1.0, 'banana', 2)), + (STRUCT(2.0, 'apple', 3)), + (STRUCT(3.0, 'orange', 4)), + (STRUCT(NULL, 'banana', 2)), + (STRUCT(2.0, NULL, 3)), + (NULL), + (STRUCT(3.0, 'orange', NULL)); + +CREATE OR REPLACE TABLE ibis_gbq_testing.array_types ( + x ARRAY<INT64>, + y ARRAY<STRING>, + z ARRAY<FLOAT64>, + grouper STRING, + scalar_column FLOAT64, +); + +INSERT INTO ibis_gbq_testing.array_types VALUES + ([1, 2, 3], ['a', 'b', 'c'], [1.0, 2.0, 3.0], 'a', 1.0), + ([4, 5], ['d', 'e'], [4.0, 5.0], 'a', 2.0), + ([6], ['f'], [6.0], 'a', 3.0), + ([1], ['a'], [], 'b', 4.0), + ([2, 3], ['b', 'c'], NULL, 'b', 5.0), + ([4, 5], ['d', 'e'], [4.0, 5.0], 'c', 6.0); + +CREATE OR REPLACE TABLE ibis_gbq_testing.win ( + g STRING, + x INT64, + y INT64 +); + +INSERT INTO ibis_gbq_testing.win VALUES + ('a', 0, 3), + ('a', 1, 2), + ('a', 2, 0), + ('a', 3, 1), + ('a', 4, 1); + +CREATE OR REPLACE TABLE ibis_gbq_testing.topk ( + x INT64 +); + +INSERT INTO ibis_gbq_testing.topk VALUES (1), (1), (NULL); + +CREATE OR REPLACE TABLE ibis_gbq_testing.numeric_table ( + string_col STRING, + numeric_col NUMERIC +); + +INSERT INTO ibis_gbq_testing.numeric_table VALUES + ('1st value', 0.999999999), + ('2nd value', 0.000000002); + +CREATE OR REPLACE TABLE ibis_gbq_testing.json_t ( + js JSON +); + +INSERT INTO ibis_gbq_testing.json_t VALUES + (JSON '{"a": [1,2,3,4], "b": 1}'), + (JSON '{"a":null,"b":2}'), + (JSON '{"a":"foo", "c":null}'), + (JSON 'null'), + (JSON '[42,47,55]'), + (JSON '[]'); + + +LOAD DATA OVERWRITE ibis_gbq_testing.functional_alltypes ( + id INT64, + bool_col BOOLEAN, + tinyint_col INT64, + smallint_col INT64, + int_col INT64, + bigint_col INT64, + float_col FLOAT64, + double_col FLOAT64, + date_string_col STRING, + string_col STRING, + timestamp_col DATETIME, + year INT64, + month INT64 +) +FROM FILES ( + format = 'PARQUET', + uris = ['gs://ibis-ci-data/functional_alltypes.parquet'] +); + +LOAD DATA OVERWRITE ibis_gbq_testing.awards_players +FROM FILES ( + format = 'PARQUET', + uris = ['gs://ibis-ci-data/awards_players.parquet'] +); + +LOAD DATA OVERWRITE ibis_gbq_testing.batting +FROM FILES ( + format = 'PARQUET', + uris = ['gs://ibis-ci-data/batting.parquet'] +); + +LOAD DATA OVERWRITE ibis_gbq_testing.diamonds +FROM FILES ( + format = 'PARQUET', + uris = ['gs://ibis-ci-data/diamonds.parquet'] +); + +LOAD DATA OVERWRITE ibis_gbq_testing.astronauts +FROM FILES ( + format = 'PARQUET', + uris = ['gs://ibis-ci-data/astronauts.parquet'] +); + +LOAD DATA OVERWRITE ibis_gbq_testing.functional_alltypes_parted ( + id INT64, + bool_col BOOLEAN, + tinyint_col INT64, + smallint_col INT64, + int_col INT64, + bigint_col INT64, + float_col FLOAT64, + double_col FLOAT64, + date_string_col STRING, + string_col STRING, + timestamp_col DATETIME, + year INT64, + month INT64 +) +PARTITION BY _PARTITIONDATE +FROM FILES ( + format = 'PARQUET', + uris = ['gs://ibis-ci-data/functional_alltypes.parquet'] +); + +CREATE OR REPLACE TABLE ibis_gbq_testing.timestamp_column_parted ( + my_timestamp_parted_col TIMESTAMP, + string_col STRING, + int_col INT64 +) +PARTITION BY DATE(my_timestamp_parted_col); + +CREATE OR REPLACE TABLE ibis_gbq_testing.date_column_parted ( + my_date_parted_col DATE, + string_col STRING, + int_col INT64 +) +PARTITION BY my_date_parted_col; + +CREATE OR REPLACE TABLE ibis_gbq_testing.struct_table ( + array_of_structs_col ARRAY<STRUCT<int_field INTEGER, string_field STRING>>, + nested_struct_col STRUCT<sub_struct STRUCT<timestamp_col TIMESTAMP>>, + struct_col STRUCT<string_field STRING> +); + +INSERT INTO ibis_gbq_testing.struct_table VALUES + ([(12345, 'abcdefg'), (NULL, NULL)], + STRUCT(STRUCT(NULL)), + STRUCT(NULL)), + ([(12345, 'abcdefg'), (NULL, 'hijklmnop')], + STRUCT(STRUCT('2017-10-20 16:37:50.000000')), + STRUCT('a')); diff --git a/conftest.py b/conftest.py index 6769794..35aa289 100644 --- a/conftest.py +++ b/conftest.py @@ -1,8 +1,5 @@ from __future__ import annotations -import concurrent.futures -import contextlib -import io import os from typing import Any @@ -13,20 +10,9 @@ from google.cloud import bigquery as bq import ibis from ibis.backends.bigquery import EXTERNAL_DATA_SCOPES, Backend -from ibis.backends.bigquery.datatypes import BigQuerySchema -from ibis.backends.conftest import TEST_TABLES from ibis.backends.tests.base import BackendTest -from ibis.backends.tests.data import ( - json_types, - non_null_array_types, - struct_types, - topk, - win, -) DATASET_ID = "ibis_gbq_testing" -DATASET_ID_TOKYO = "ibis_gbq_testing_tokyo" -REGION_TOKYO = "asia-northeast1" DEFAULT_PROJECT_ID = "ibis-gbq" PROJECT_ID_ENV_VAR = "GOOGLE_BIGQUERY_PROJECT_ID" @@ -70,226 +56,10 @@ class TestConf(BackendTest): except gexc.Forbidden: pytest.skip("User does not have permission to create dataset") - testing_dataset = bq.DatasetReference(project_id, DATASET_ID) - - with contextlib.suppress(gexc.NotFound): - client.create_dataset(testing_dataset, exists_ok=True) - - testing_dataset_tokyo = bq.Dataset( - bq.DatasetReference(project_id, DATASET_ID_TOKYO) - ) - testing_dataset_tokyo.location = REGION_TOKYO - - with contextlib.suppress(gexc.NotFound): - client.create_dataset(testing_dataset_tokyo, exists_ok=True) - - # day partitioning - functional_alltypes_parted = bq.Table( - bq.TableReference(testing_dataset, "functional_alltypes_parted") - ) - functional_alltypes_parted.require_partition_filter = False - functional_alltypes_parted.time_partitioning = bq.TimePartitioning( - type_=bq.TimePartitioningType.DAY - ) - - # ingestion timestamp partitioning - timestamp_table = bq.Table( - bq.TableReference(testing_dataset, "timestamp_column_parted") - ) - timestamp_table.schema = BigQuerySchema.from_ibis( - ibis.schema( - dict( - my_timestamp_parted_col="timestamp", - string_col="string", - int_col="int", - ) - ) - ) - timestamp_table.time_partitioning = bq.TimePartitioning( - field="my_timestamp_parted_col" - ) - client.create_table(timestamp_table, exists_ok=True) - - # ingestion date partitioning - date_table = bq.Table(bq.TableReference(testing_dataset, "date_column_parted")) - date_table.schema = BigQuerySchema.from_ibis( - ibis.schema( - dict(my_date_parted_col="date", string_col="string", int_col="int") - ) - ) - date_table.time_partitioning = bq.TimePartitioning(field="my_date_parted_col") - client.create_table(date_table, exists_ok=True) - - write_disposition = bq.WriteDisposition.WRITE_TRUNCATE - make_job = lambda func, *a, **kw: func(*a, **kw).result() - - futures = [] - # 10 is because of urllib3 connection pool size - with concurrent.futures.ThreadPoolExecutor(max_workers=10) as e: - futures.append( - e.submit( - make_job, - client.load_table_from_dataframe, - struct_types, - bq.TableReference(testing_dataset, "struct"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - schema=BigQuerySchema.from_ibis( - ibis.schema( - dict(abc="struct<a: float64, b: string, c: int64>") - ) - ), - ), - ) - ) - - futures.append( - e.submit( - make_job, - client.load_table_from_dataframe, - non_null_array_types.drop(columns=["multi_dim"]), - bq.TableReference(testing_dataset, "array_types"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - schema=BigQuerySchema.from_ibis( - ibis.schema( - dict( - x="array<int64>", - y="array<string>", - z="array<float64>", - grouper="string", - scalar_column="float64", - ) - ) - ), - ), - ) - ) - - futures.append( - e.submit( - make_job, - client.load_table_from_file, - io.BytesIO( - self.data_dir.joinpath("avro", "struct_table.avro").read_bytes() - ), - bq.TableReference(testing_dataset, "struct_table"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - source_format=bq.SourceFormat.AVRO, - ), - ) - ) - - futures.append( - e.submit( - make_job, - client.load_table_from_file, - io.StringIO( - "\\n".join( # noqa: FLY002 - [ - """{"string_col": "1st value", "numeric_col": 0.999999999}""", - """{"string_col": "2nd value", "numeric_col": 0.000000002}""", - ] - ) - ), - bq.TableReference(testing_dataset, "numeric_table"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - schema=BigQuerySchema.from_ibis( - ibis.schema( - dict(string_col="string", numeric_col="decimal(38, 9)") - ) - ), - source_format=bq.SourceFormat.NEWLINE_DELIMITED_JSON, - ), - ) - ) - - futures.append( - e.submit( - make_job, - client.load_table_from_dataframe, - win, - bq.TableReference(testing_dataset, "win"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - schema=BigQuerySchema.from_ibis( - ibis.schema(dict(g="string", x="!int64", y="int64")) - ), - ), - ) - ) - - futures.append( - e.submit( - make_job, - client.load_table_from_dataframe, - topk.to_pandas(), - bq.TableReference(testing_dataset, "topk"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - schema=BigQuerySchema.from_ibis(ibis.schema(dict(x="int64"))), - ), - ) - ) - - futures.append( - e.submit( - make_job, - client.load_table_from_file, - io.StringIO("\\n".join(f'{{"js": {row}}}' for row in json_types.js)), - bq.TableReference(testing_dataset, "json_t"), - job_config=bq.LoadJobConfig( - write_disposition=write_disposition, - schema=BigQuerySchema.from_ibis(ibis.schema(dict(js="json"))), - source_format=bq.SourceFormat.NEWLINE_DELIMITED_JSON, - ), - ) - ) - - futures.extend( - e.submit( - make_job, - client.load_table_from_file, - io.BytesIO( - self.data_dir.joinpath( - "parquet", f"{table}.parquet" - ).read_bytes() - ), - bq.TableReference(testing_dataset, table), - job_config=bq.LoadJobConfig( - schema=BigQuerySchema.from_ibis(ibis.schema(schema)), - write_disposition=write_disposition, - source_format=bq.SourceFormat.PARQUET, - ), - ) - for table, schema in TEST_TABLES.items() - ) - - # Test regional endpoints with non-US data. - - futures.extend( - e.submit( - make_job, - client.load_table_from_file, - io.BytesIO( - self.data_dir.joinpath( - "parquet", f"{table}.parquet" - ).read_bytes() - ), - bq.TableReference(testing_dataset_tokyo, table), - job_config=bq.LoadJobConfig( - schema=BigQuerySchema.from_ibis(ibis.schema(schema)), - write_disposition=write_disposition, - source_format=bq.SourceFormat.PARQUET, - ), - ) - for table, schema in TEST_TABLES.items() - ) - - for fut in concurrent.futures.as_completed(futures): - fut.result() + path = self.script_dir.joinpath(f"{self.name()}.sql") + ddl = path.read_text() + query = client.query(ddl) + query.result() @staticmethod def connect(*, tmpdir, worker_id, **kw) -> Backend: diff --git a/test_client.py b/test_client.py index 47d3385..7f993df 100644 --- a/test_client.py +++ b/test_client.py @@ -94,7 +94,7 @@ def test_cast_string_to_date(alltypes, df): def test_cast_float_to_int(alltypes, df): result = (alltypes.float_col - 2.55).cast("int64").to_pandas().sort_values() expected = (df.float_col - 2.55).astype("int64").sort_values() - tm.assert_series_equal(result, expected, check_names=False) + tm.assert_series_equal(result, expected, check_names=False, check_index=False) def test_has_partitions(alltypes, parted_alltypes, con): diff --git a/test_connect.py b/test_connect.py index 13f5c72..801b631 100644 --- a/test_connect.py +++ b/test_connect.py @@ -202,15 +202,40 @@ def test_client_with_regional_endpoints( client_options=bqstorage_options, credentials=credentials ) - con = ibis.bigquery.connect( + tokyo_con = ibis.bigquery.connect( client=bq_client, storage_client=bqstorage_client, project_id=project_id ) + tokyo_con.raw_sql( + """ + CREATE SCHEMA IF NOT EXISTS ibis_gbq_testing_tokyo OPTIONS ( + location = 'asia-northeast1' + ); + + CREATE OR REPLACE TABLE ibis_gbq_testing_tokyo.functional_alltypes ( + id INT64, + bool_col BOOLEAN, + tinyint_col INT64, + smallint_col INT64, + int_col INT64, + bigint_col INT64, + float_col FLOAT64, + double_col FLOAT64, + date_string_col STRING, + string_col STRING, + timestamp_col DATETIME, + year INT64, + month INT64 + ) + """ + ) + # Fails because dataset not in Tokyo. with pytest.raises(gexc.NotFound, match=dataset_id): - con.table(f"{dataset_id}.functional_alltypes") + tokyo_con.table(f"{dataset_id}.functional_alltypes") # Succeeds because dataset is in Tokyo. - alltypes = con.table(f"{dataset_id_tokyo}.functional_alltypes") - df = alltypes.limit(2).execute() - assert len(df.index) == 2 + alltypes = tokyo_con.table(f"{dataset_id_tokyo}.functional_alltypes") + df = alltypes.execute() + assert df.empty + assert not len(alltypes.to_pyarrow()) diff --git a/test_json.py b/test_json.py index e4357fe..b5da189 100644 --- a/test_json.py +++ b/test_json.py @@ -4,11 +4,10 @@ from __future__ import annotations import sqlite3 +import numpy as np import pandas as pd -import pandas.testing as tm import pytest from packaging.version import parse as vparse -from pytest import param pytestmark = [ pytest.mark.never(["impala"], reason="doesn't support JSON and never will"), @@ -17,21 +16,30 @@ pytestmark = [ ] [email protected]( - ("expr_fn", "expected"), - [ - param( - lambda t: t.js["a"].name("res"), - pd.Series([[1, 2, 3, 4], None, "foo"] + [None] * 3, name="res"), - id="object", - ), - param( - lambda t: t.js[1].name("res"), - pd.Series([None] * 4 + [47, None], dtype="object", name="res"), - id="array", - ), - ], [email protected]( + ["sqlite"], + condition=vparse(sqlite3.sqlite_version) < vparse("3.38.0"), + reason="JSON not supported in SQLite < 3.38.0", +) [email protected]( + ["flink"], + reason="https://github.com/ibis-project/ibis/pull/6920#discussion_r1373212503", ) [email protected]( + ["risingwave"], reason="TODO(Kexiang): order mismatch in array", strict=False +) +def test_json_getitem_object(json_t): + expr_fn = lambda t: t.js["a"].name("res") + expected = frozenset([(1, 2, 3, 4), None, "foo"] + [None] * 3) + expr = expr_fn(json_t) + result = frozenset( + expr.execute() + .map(lambda o: tuple(o) if isinstance(o, list) else o) + .replace({np.nan: None}) + ) + assert result == expected + + @pytest.mark.notyet( ["sqlite"], condition=vparse(sqlite3.sqlite_version) < vparse("3.38.0"), @@ -44,10 +52,12 @@ pytestmark = [ @pytest.mark.broken( ["risingwave"], reason="TODO(Kexiang): order mismatch in array", strict=False ) -def test_json_getitem(json_t, expr_fn, expected): +def test_json_getitem_array(json_t): + expr_fn = lambda t: t.js[1].name("res") + expected = frozenset([None] * 4 + [47, None]) expr = expr_fn(json_t) - result = expr.execute() - tm.assert_series_equal(result.fillna(pd.NA), expected.fillna(pd.NA)) + result = frozenset(expr.execute().replace({np.nan: None})) + assert result == expected @pytest.mark.notimpl(["dask", "mysql", "pandas", "risingwave"])
chore: remove extra line at the top of internals
461d634e69bdfd0c045aae92e9ae73aaaa3e5438
chore
https://github.com/rohankumardubey/ibis/commit/461d634e69bdfd0c045aae92e9ae73aaaa3e5438
remove extra line at the top of internals
diff --git a/internals.qmd b/internals.qmd index d6fae80..29ccabe 100644 --- a/internals.qmd +++ b/internals.qmd @@ -74,7 +74,6 @@ defines the shape (scalar or column) and element type of the operation. An example of usage is a node that representats a logarithm operation: ```python - import ibis.expr.rules as rlz from ibis.expr.operations import Value
fix(ir): handle renaming for scalar operations
6f77f1799812f979f004df08e1b152765a3fa730
fix
https://github.com/ibis-project/ibis/commit/6f77f1799812f979f004df08e1b152765a3fa730
handle renaming for scalar operations
diff --git a/generic.py b/generic.py index 7d61791..4c69cf4 100644 --- a/generic.py +++ b/generic.py @@ -1247,9 +1247,15 @@ class Scalar(Value): op = self.op() table = find_first_base_table(op) if table is not None: - return table.to_expr().aggregate([self]) + return table.to_expr().aggregate(**{self.get_name(): self}) else: - return ops.DummyTable(values=(op,)).to_expr() + if isinstance(op, ops.Alias): + value = op + assert value.name == self.get_name() + else: + value = ops.Alias(op, self.get_name()) + + return ops.DummyTable(values=(value,)).to_expr() def __deferred_repr__(self): return f"<scalar[{self.type()}]>"
build: update version (nightly.0)
a7c174a0e09e5a83aa3f4aa8d633da386e0335ac
build
https://github.com/erg-lang/erg/commit/a7c174a0e09e5a83aa3f4aa8d633da386e0335ac
update version (nightly.0)
diff --git a/Cargo.lock b/Cargo.lock index d9810f8..6cd2a6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -103,7 +103,7 @@ dependencies = [ [[package]] name = "els" -version = "0.1.31" +version = "0.1.32-nightly.0" dependencies = [ "erg_common", "erg_compiler", @@ -115,7 +115,7 @@ dependencies = [ [[package]] name = "erg" -version = "0.6.19" +version = "0.6.20-nightly.0" dependencies = [ "els", "erg_common", @@ -125,7 +125,7 @@ dependencies = [ [[package]] name = "erg_common" -version = "0.6.19" +version = "0.6.20-nightly.0" dependencies = [ "backtrace-on-stack-overflow", "crossterm", @@ -135,7 +135,7 @@ dependencies = [ [[package]] name = "erg_compiler" -version = "0.6.19" +version = "0.6.20-nightly.0" dependencies = [ "erg_common", "erg_parser", @@ -143,7 +143,7 @@ dependencies = [ [[package]] name = "erg_parser" -version = "0.6.19" +version = "0.6.20-nightly.0" dependencies = [ "erg_common", "unicode-xid", @@ -271,9 +271,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "5486aed0026218e61b8a01d5fbd5a0a134649abb71a0e53b7bc088529dced86e" [[package]] name = "memoffset" @@ -397,9 +397,9 @@ checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustix" -version = "0.38.10" +version = "0.38.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6248e1caa625eb708e266e06159f135e8c26f2bb7ceb72dc4b2766d0340964" +checksum = "c0c3dde1fc030af041adc40e79c0e7fbcf431dd24870053d187d7c66e4b87453" dependencies = [ "bitflags 2.4.0", "errno", @@ -422,18 +422,18 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.186" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.186" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", @@ -596,9 +596,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "url" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" dependencies = [ "form_urlencoded", "idna", diff --git a/Cargo.toml b/Cargo.toml index 395e5d2..b1be63a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,7 @@ name = "els" description = "An Erg compiler frontend for IDEs, implements LSP." documentation = "http://docs.rs/els" -version = "0.1.31" +version = "0.1.32-nightly.0" authors.workspace = true license.workspace = true edition.workspace = true
refactor(sql): clean up unnecessary use of explicit visit methods
2acaa16a5bbeb744a7a02b348a1e95b961ddd0f9
refactor
https://github.com/ibis-project/ibis/commit/2acaa16a5bbeb744a7a02b348a1e95b961ddd0f9
clean up unnecessary use of explicit visit methods
diff --git a/base.py b/base.py index ef7f357..b68d9d1 100644 --- a/base.py +++ b/base.py @@ -381,6 +381,8 @@ class SQLGlotCompiler(abc.ABC): ops.Translate: "translate", ops.Unnest: "explode", ops.Uppercase: "upper", + ops.RandomUUID: "uuid", + ops.RandomScalar: "rand", } BINARY_INFIX_OPS = { @@ -869,14 +871,6 @@ class SQLGlotCompiler(abc.ABC): def visit_Round(self, op, *, arg, digits): return self.cast(self.f.round(arg, digits), op.dtype) - ### Random Noise - - def visit_RandomScalar(self, op, **kwargs): - return self.f.rand() - - def visit_RandomUUID(self, op, **kwargs): - return self.f.uuid() - ### Dtype Dysmorphia def visit_TryCast(self, op, *, arg, to): diff --git a/__init__.py b/__init__.py index 2749c88..d780d22 100644 --- a/__init__.py +++ b/__init__.py @@ -199,8 +199,7 @@ class BigQueryCompiler(SQLGlotCompiler): ops.TimeFromHMS: "time_from_parts", ops.TimestampNow: "current_timestamp", ops.ExtractHost: "net.host", - ops.ArgMin: "min_by", - ops.ArgMax: "max_by", + ops.RandomUUID: "generate_uuid", } def to_sqlglot( @@ -997,9 +996,6 @@ class BigQueryCompiler(SQLGlotCompiler): arg = self.if_(where, arg, NULL) return self.f.count(sge.Distinct(expressions=[arg])) - def visit_RandomUUID(self, op, **kwargs): - return self.f.generate_uuid() - def visit_ExtractFile(self, op, *, arg): return self._pudf("cw_url_extract_file", arg) diff --git a/clickhouse.py b/clickhouse.py index b717ff4..0a56f33 100644 --- a/clickhouse.py +++ b/clickhouse.py @@ -115,6 +115,8 @@ class ClickHouseCompiler(SQLGlotCompiler): ops.TimestampNow: "now", ops.TypeOf: "toTypeName", ops.Unnest: "arrayJoin", + ops.RandomUUID: "generateUUIDv4", + ops.RandomScalar: "randCanonical", } @staticmethod @@ -719,12 +721,6 @@ class ClickHouseCompiler(SQLGlotCompiler): def visit_RegexSplit(self, op, *, arg, pattern): return self.f.splitByRegexp(pattern, self.cast(arg, dt.String(nullable=False))) - def visit_RandomScalar(self, op, **kwargs): - return self.f.randCanonical() - - def visit_RandomUUID(self, op, **kwargs): - return self.f.generateUUIDv4() - @staticmethod def _generate_groups(groups): return groups diff --git a/datafusion.py b/datafusion.py index 78d39dd..9d906a7 100644 --- a/datafusion.py +++ b/datafusion.py @@ -530,7 +530,7 @@ class DataFusionCompiler(SQLGlotCompiler): def visit_ArrayFlatten(self, op, *, arg): return self.if_(arg.is_(NULL), NULL, self.f.flatten(arg)) - def visit_RandomUUID(self, op, **kw): + def visit_RandomUUID(self, op): return self.f.anon.uuid() diff --git a/duckdb.py b/duckdb.py index 53f3bb5..63185f7 100644 --- a/duckdb.py +++ b/duckdb.py @@ -103,6 +103,7 @@ class DuckDBCompiler(SQLGlotCompiler): ops.GeoWithin: "st_within", ops.GeoX: "st_x", ops.GeoY: "st_y", + ops.RandomScalar: "random", } def to_sqlglot( @@ -608,12 +609,6 @@ class DuckDBCompiler(SQLGlotCompiler): ) return super().visit_StructField(op, arg=arg, field=field) - def visit_RandomScalar(self, op, **kwargs): - return self.f.random() - - def visit_RandomUUID(self, op, **kwargs): - return self.f.uuid() - def visit_TypeOf(self, op, *, arg): return self.f.coalesce(self.f.nullif(self.f.typeof(arg), '"NULL"'), "NULL") diff --git a/impala.py b/impala.py index f01a550..7174ae6 100644 --- a/impala.py +++ b/impala.py @@ -130,7 +130,7 @@ class ImpalaCompiler(SQLGlotCompiler): def visit_Xor(self, op, *, left, right): return sg.and_(sg.or_(left, right), sg.not_(sg.and_(left, right))) - def visit_RandomScalar(self, op, **_): + def visit_RandomScalar(self, op): return self.f.rand(self.f.utc_to_unix_micros(self.f.utc_timestamp())) def visit_DayOfWeekIndex(self, op, *, arg): diff --git a/mssql.py b/mssql.py index e721fef..618e69a 100644 --- a/mssql.py +++ b/mssql.py @@ -135,6 +135,7 @@ class MSSQLCompiler(SQLGlotCompiler): ops.TimestampNow: "sysdatetime", ops.Min: "min", ops.Max: "max", + ops.RandomUUID: "newid", } NAN = sg.func("double", sge.convert("NaN")) @@ -177,10 +178,7 @@ class MSSQLCompiler(SQLGlotCompiler): table_expr = table_expr.mutate(**conversions) return super().to_sqlglot(table_expr, limit=limit, params=params) - def visit_RandomUUID(self, op, **_): - return self.f.newid() - - def visit_RandomScalar(self, op, **_): + def visit_RandomScalar(self, op): # By default RAND() will generate the same value for all calls within a # query. The standard way to work around this is to pass in a unique # value per call, which `CHECKSUM(NEWID())` provides. diff --git a/oracle.py b/oracle.py index d84c9ea..8fea5eb 100644 --- a/oracle.py +++ b/oracle.py @@ -214,7 +214,7 @@ class OracleCompiler(SQLGlotCompiler): def visit_IsInf(self, op, *, arg): return arg.isin(self.POS_INF, self.NEG_INF) - def visit_RandomScalar(self, op, **_): + def visit_RandomScalar(self, op): # Not using FuncGen here because of dotted function call return sg.func("dbms_random.value") diff --git a/postgres.py b/postgres.py index 4ae0c75..477e579 100644 --- a/postgres.py +++ b/postgres.py @@ -119,6 +119,7 @@ class PostgresCompiler(SQLGlotCompiler): ops.MapValues: "avals", ops.RegexSearch: "regexp_like", ops.TimeFromHMS: "make_time", + ops.RandomUUID: "gen_random_uuid", } def to_sqlglot( @@ -179,9 +180,6 @@ class PostgresCompiler(SQLGlotCompiler): args=", ".join(argnames), ) - def visit_RandomUUID(self, op, **kwargs): - return self.f.gen_random_uuid() - def visit_Mode(self, op, *, arg, where): expr = self.f.mode() expr = sge.WithinGroup( diff --git a/snowflake.py b/snowflake.py index 11f481f..63b87c8 100644 --- a/snowflake.py +++ b/snowflake.py @@ -374,7 +374,7 @@ $$""", def visit_Log(self, op, *, arg, base): return self.f.log(base, arg) - def visit_RandomScalar(self, op, **_): + def visit_RandomScalar(self, op): return self.f.uniform( self.f.to_double(0.0), self.f.to_double(1.0), self.f.random() ) diff --git a/sqlite.py b/sqlite.py index 07b2d87..368cfe5 100644 --- a/sqlite.py +++ b/sqlite.py @@ -194,11 +194,11 @@ class SQLiteCompiler(SQLGlotCompiler): return arg - def visit_RandomScalar(self, op, **kwargs): + def visit_RandomScalar(self, op): return 0.5 + self.f.random() / sge.Literal.number(float(-1 << 64)) def visit_Cot(self, op, *, arg): - return 1 / self.f.tan(arg) + return 1.0 / self.f.tan(arg) def visit_ArgMin(self, *args, **kwargs): return self._visit_arg_reduction("min", *args, **kwargs)
fix: infinite tyvar recursion bug
8c5d70ca4f3207b1e091f7d839a70f71ae73e40c
fix
https://github.com/erg-lang/erg/commit/8c5d70ca4f3207b1e091f7d839a70f71ae73e40c
infinite tyvar recursion bug
diff --git a/consts.rs b/consts.rs index 93c0bfa..5a73212 100644 --- a/consts.rs +++ b/consts.rs @@ -8,3 +8,4 @@ pub const ERG_MODE: bool = !cfg!(feature = "py_compat"); pub const ELS: bool = cfg!(feature = "els"); pub const DEBUG_MODE: bool = cfg!(feature = "debug"); pub const EXPERIMENTAL_MODE: bool = cfg!(feature = "experimental"); +pub const BACKTRACE_MODE: bool = cfg!(feature = "backtrace"); diff --git a/spawn.rs b/spawn.rs index 831304d..a5a7e8b 100644 --- a/spawn.rs +++ b/spawn.rs @@ -1,4 +1,4 @@ -#[cfg(all(unix, any(feature = "debug", feature = "backtrace")))] +#[cfg(all(unix, feature = "backtrace"))] pub use backtrace_on_stack_overflow; use std::thread::{self, JoinHandle}; @@ -11,7 +11,7 @@ const STACK_SIZE: usize = if cfg!(feature = "large_thread") { #[macro_export] macro_rules! enable_overflow_stacktrace { () => { - #[cfg(all(unix, any(feature = "debug", feature = "backtrace")))] + #[cfg(all(unix, feature = "backtrace"))] unsafe { $crate::spawn::backtrace_on_stack_overflow::enable() }; diff --git a/free.rs b/free.rs index f5ac333..3763586 100644 --- a/free.rs +++ b/free.rs @@ -4,6 +4,7 @@ use std::hash::{Hash, Hasher}; use std::mem; use std::sync::atomic::AtomicUsize; +use erg_common::consts::{BACKTRACE_MODE, DEBUG_MODE}; use erg_common::shared::Forkable; use erg_common::traits::{LimitedDisplay, StructuralEq}; use erg_common::{addr, Str}; @@ -76,7 +77,7 @@ impl LimitedDisplay for Constraint { Self::Sandwiched { sub, sup } => match (sub == &Type::Never, sup == &Type::Obj) { (true, true) => { write!(f, ": Type")?; - if cfg!(feature = "debug") { + if DEBUG_MODE { write!(f, "(:> Never, <: Obj)")?; } Ok(()) @@ -231,9 +232,15 @@ impl Constraint { pub fn eliminate_recursion(self, target: &Type) -> Self { match self { Self::Sandwiched { sub, sup } => { - let sub = sub.eliminate(target); - let sup = sup.eliminate(target); - Self::new_sandwiched(sub, sup) + if sub.addr_eq(target) { + Self::new_subtype_of(sup) + } else if sup.addr_eq(target) { + Self::new_supertype_of(sub) + } else { + let sub = sub.eliminate(target); + let sup = sup.eliminate(target); + Self::new_sandwiched(sub, sup) + } } other => other, } @@ -370,7 +377,7 @@ impl<T: LimitedDisplay> LimitedDisplay for FreeKind<T> { } match self { Self::Linked(t) | Self::UndoableLinked { t, .. } => { - if cfg!(feature = "debug") { + if DEBUG_MODE || BACKTRACE_MODE { write!(f, "(")?; t.limited_fmt(f, limit)?; write!(f, ")") @@ -385,14 +392,14 @@ impl<T: LimitedDisplay> LimitedDisplay for FreeKind<T> { } => { if *lev == GENERIC_LEVEL { write!(f, "{name}")?; - if cfg!(feature = "debug") { + if DEBUG_MODE || BACKTRACE_MODE { write!(f, "(")?; constraint.limited_fmt(f, limit - 1)?; write!(f, ")")?; } } else { write!(f, "?{name}")?; - if cfg!(feature = "debug") { + if DEBUG_MODE || BACKTRACE_MODE { write!(f, "(")?; constraint.limited_fmt(f, limit - 1)?; write!(f, ")")?; @@ -408,14 +415,14 @@ impl<T: LimitedDisplay> LimitedDisplay for FreeKind<T> { } => { if *lev == GENERIC_LEVEL { write!(f, "%{id}")?; - if cfg!(feature = "debug") { + if DEBUG_MODE || BACKTRACE_MODE { write!(f, "(")?; constraint.limited_fmt(f, limit - 1)?; write!(f, ")")?; } } else { write!(f, "?{id}")?; - if cfg!(feature = "debug") { + if DEBUG_MODE || BACKTRACE_MODE { write!(f, "(")?; constraint.limited_fmt(f, limit - 1)?; write!(f, ")")?; diff --git a/mod.rs b/mod.rs index 20663bf..9001e86 100644 --- a/mod.rs +++ b/mod.rs @@ -3295,6 +3295,13 @@ impl Type { let t = fv.crack().clone(); t.eliminate(target) } + Self::FreeVar(ref fv) if fv.constraint_is_sandwiched() => { + let (sub, sup) = fv.get_subsup().unwrap(); + let sub = sub.eliminate(target); + let sup = sup.eliminate(target); + self.update_tyvar(sub, sup, None, false); + self + } Self::And(l, r) => { if l.addr_eq(target) { return r.eliminate(target); @@ -3555,9 +3562,10 @@ impl Type { if self.level() == Some(GENERIC_LEVEL) { panic!("{self} is fixed"); } + let to = to.clone().eliminate(self); match self { - Self::FreeVar(fv) => fv.link(to), - Self::Refinement(refine) => refine.t.destructive_link(to), + Self::FreeVar(fv) => fv.link(&to), + Self::Refinement(refine) => refine.t.destructive_link(&to), _ => panic!("{self} is not a free variable"), } }
test: add tests for and, or, and xor
6a1b104be8922fbb77e4270979b4a4fd6077a38b
test
https://github.com/ibis-project/ibis/commit/6a1b104be8922fbb77e4270979b4a4fd6077a38b
add tests for and, or, and xor
diff --git a/test_generic.py b/test_generic.py index b055b1d..601c631 100644 --- a/test_generic.py +++ b/test_generic.py @@ -198,8 +198,29 @@ def test_notin(backend, alltypes, sorted_df, column, elements): lambda t: t['bool_col'], lambda df: df['bool_col'], marks=pytest.mark.notimpl(["datafusion"]), + id="no_op", + ), + param( + lambda t: ~t['bool_col'], lambda df: ~df['bool_col'], id="negate" + ), + param( + lambda t: t.bool_col & t.bool_col, + lambda df: df.bool_col & df.bool_col, + id="and", + marks=pytest.mark.notimpl(["datafusion"]), + ), + param( + lambda t: t.bool_col | t.bool_col, + lambda df: df.bool_col | df.bool_col, + id="or", + marks=pytest.mark.notimpl(["datafusion"]), + ), + param( + lambda t: t.bool_col ^ t.bool_col, + lambda df: df.bool_col ^ df.bool_col, + id="xor", + marks=pytest.mark.notimpl(["datafusion"]), ), - (lambda t: ~t['bool_col'], lambda df: ~df['bool_col']), ], ) def test_filter(backend, alltypes, sorted_df, predicate_fn, expected_fn):
build(docs): downgrade TS to fix docs build
e93140c227a7b5745dde3b527cac5c1074cf11b8
build
https://github.com/mikro-orm/mikro-orm/commit/e93140c227a7b5745dde3b527cac5c1074cf11b8
downgrade TS to fix docs build
diff --git a/package.json b/package.json index 170e5fb..97a48f8 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "react-dom": "16.14.0", "typedoc": "0.20.36", "typedoc-plugin-markdown": "3.8.2", - "typescript": "4.3.2" + "typescript": "4.2.4" }, "browserslist": { "production": [
build: fix wrong parameter placement
dad7b2450119cd616e3c1c41b6cac6d2c902ade6
build
https://github.com/mikro-orm/mikro-orm/commit/dad7b2450119cd616e3c1c41b6cac6d2c902ade6
fix wrong parameter placement
diff --git a/tests.yml b/tests.yml index 4bb6133..1fb05c5 100644 --- a/tests.yml +++ b/tests.yml @@ -11,11 +11,13 @@ jobs: steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v2-beta with: node-version: ${{ matrix.node-version }} - fetch-depth: 0 - name: EditorConfig Lint uses: docker://mstruebing/editorconfig-checker:2.2.0@sha256:cafda9b945dd452928aba85d20b79def419237c6e961a46193dbc2d6fc08f561
chore(deps): bump nix flakes and quarto version (#10406)
9cbbc49362ad4a4745a4d8d7af5d1638637e811d
chore
https://github.com/ibis-project/ibis/commit/9cbbc49362ad4a4745a4d8d7af5d1638637e811d
bump nix flakes and quarto version (#10406)
diff --git a/flake.lock b/flake.lock index 62ba51c..71cc25c 100644 --- a/flake.lock +++ b/flake.lock @@ -46,11 +46,11 @@ ] }, "locked": { - "lastModified": 1720066371, - "narHash": "sha256-uPlLYH2S0ACj0IcgaK9Lsf4spmJoGejR9DotXiXSBZQ=", + "lastModified": 1729742964, + "narHash": "sha256-B4mzTcQ0FZHdpeWcpDYPERtyjJd/NIuaQ9+BV1h+MpA=", "owner": "nix-community", "repo": "nix-github-actions", - "rev": "622f829f5fe69310a866c8a6cd07e747c44ef820", + "rev": "e04df33f62cdcf93d73e9a04142464753a16db67", "type": "github" }, "original": { @@ -61,11 +61,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1730157240, - "narHash": "sha256-P8wF4ag6Srmpb/gwskYpnIsnspbjZlRvu47iN527ABQ=", + "lastModified": 1730359060, + "narHash": "sha256-Hkk0mf4pgvX9Ut0YA397nsFqMLhzFVBdFHc4PhBrxYE=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "75e28c029ef2605f9841e0baa335d70065fe7ae2", + "rev": "e19cfce6f3f08d07653157d8826f5c920c770d7b", "type": "github" }, "original": { @@ -88,11 +88,11 @@ "treefmt-nix": "treefmt-nix" }, "locked": { - "lastModified": 1730205069, - "narHash": "sha256-CI3Nnetbs2J17vMOfkb9JZmIn39JqsbnY/OMcChETVY=", + "lastModified": 1730284601, + "narHash": "sha256-eHYcKVLIRRv3J1vjmxurS6HVdGphB53qxUeAkylYrZY=", "owner": "nix-community", "repo": "poetry2nix", - "rev": "2b84afaf6ff1765bcb4cdd97e53a6914feb82ebf", + "rev": "43a898b4d76f7f3f70df77a2cc2d40096bc9d75e", "type": "github" }, "original": { @@ -146,11 +146,11 @@ ] }, "locked": { - "lastModified": 1727984844, - "narHash": "sha256-xpRqITAoD8rHlXQafYZOLvUXCF6cnZkPfoq67ThN0Hc=", + "lastModified": 1730120726, + "narHash": "sha256-LqHYIxMrl/1p3/kvm2ir925tZ8DkI0KA10djk8wecSk=", "owner": "numtide", "repo": "treefmt-nix", - "rev": "4446c7a6fc0775df028c5a3f6727945ba8400e64", + "rev": "9ef337e492a5555d8e17a51c911ff1f02635be15", "type": "github" }, "original": { diff --git a/default.nix b/default.nix index bb0d476..9e4d727 100644 --- a/default.nix +++ b/default.nix @@ -1,7 +1,6 @@ { stdenv , lib , esbuild -, deno_1 , fetchurl , dart-sass , makeWrapper @@ -17,15 +16,15 @@ let "aarch64-darwin" = "macos"; }; shas = { - "x86_64-linux" = "sha256-4PgvIPExU6FTaGB5gOSt/InJ9wftVas5OSlvtbW4Rm4="; - "aarch64-linux" = "sha256-7hrySIJREoVuqPQfLYxR0cqT82oocYgB1Gbi4Rbh3ns="; - "aarch64-darwin" = "sha256-BcliqrsJQrP2xjTMv+jqQziQTD7nQap4IAIp2R8ZVCM="; + "x86_64-linux" = "sha256-mVoFBQJJHGn5ZbwOtamshEQl9FzmRVEBye3bBXFUlUI="; + "aarch64-linux" = "sha256-TNik4+OdDqGwArw9wkrq4wNHt6tGgYo32V9KNPSsPWo="; + "aarch64-darwin" = "sha256-fjcmyVyPSHyHBICjpweuCnGtMAAlPNNzBMHEk+2emBA="; }; inherit (stdenv.hostPlatform) system; in stdenv.mkDerivation rec { pname = "quarto"; - version = "1.6.25"; + version = "1.6.32"; src = fetchurl { url = "https://github.com/quarto-dev/quarto-cli/releases/download/v${version}/quarto-${version}-${platforms.${system}}.tar.gz"; sha256 = shas.${system}; @@ -46,7 +45,6 @@ stdenv.mkDerivation rec { '' wrapProgram $out/bin/quarto \\ --prefix QUARTO_ESBUILD : ${lib.getExe esbuild} \\ - --prefix QUARTO_DENO : ${lib.getExe deno_1} \\ --prefix QUARTO_R : ${lib.getExe' rEnv "R"} \\ --prefix QUARTO_DART_SASS : ${lib.getExe dart-sass} ''; @@ -56,8 +54,6 @@ stdenv.mkDerivation rec { mkdir -p $out/bin $out/share - rm -r bin/tools/*/deno* - mv bin/* $out/bin mv share/* $out/share
chore: highlight exact place of failure in compiled functions
3fca9a696b2112d437a5544ab88e109aee2c3fb2
chore
https://github.com/mikro-orm/mikro-orm/commit/3fca9a696b2112d437a5544ab88e109aee2c3fb2
highlight exact place of failure in compiled functions
diff --git a/Utils.ts b/Utils.ts index 0a66125..1f4ad69 100644 --- a/Utils.ts +++ b/Utils.ts @@ -783,8 +783,23 @@ export class Utils { return fn(...args); } catch (e: any) { if ([SyntaxError, TypeError, EvalError, ReferenceError].some(t => e instanceof t)) { + const position = e.stack.match(/<anonymous>:(\\d+):(\\d+)/); + let code = fn.toString(); + + if (position) { + const lines = code.split('\\n').map((line, idx) => { + if (idx === +position[1] - 4) { + return '> ' + line; + } + + return ' ' + line; + }); + lines.splice(+position[1] - 3, 0, ' '.repeat(+position[2] - 4) + '^'); + code = lines.join('\\n'); + } + // eslint-disable-next-line no-console - console.error(`JIT runtime error: ${e.message}\\n\\n${fn.toString()}`); + console.error(`JIT runtime error: ${e.message}\\n\\n${code}`); } throw e;
fix(pandas): handle casting to arrays with None elements
382b90fb391b8c8bbf2ae487b8b4d163aefed6bc
fix
https://github.com/ibis-project/ibis/commit/382b90fb391b8c8bbf2ae487b8b4d163aefed6bc
handle casting to arrays with None elements
diff --git a/generic.py b/generic.py index 05b5ea3..ff50f95 100644 --- a/generic.py +++ b/generic.py @@ -129,7 +129,17 @@ def execute_cast_series_array(op, data, type, **kwargs): 'Array value type must be a primitive type ' '(e.g., number, string, or timestamp)' ) - return data.map(lambda array, numpy_type=numpy_type: array.astype(numpy_type)) + + def cast_to_array(array, numpy_type=numpy_type): + elems = [ + el if el is None else np.array(el, dtype=numpy_type).item() for el in array + ] + try: + return np.array(elems, dtype=numpy_type) + except TypeError: + return np.array(elems) + + return data.map(cast_to_array) @execute_node.register(ops.Cast, pd.Series, dt.Timestamp)
fix(exprs): ensure that left_semi and semi are equivalent
bbc1eb7ac2573aa0e762caae1cdc87137da2ac8b
fix
https://github.com/rohankumardubey/ibis/commit/bbc1eb7ac2573aa0e762caae1cdc87137da2ac8b
ensure that left_semi and semi are equivalent
diff --git a/test_join.py b/test_join.py index ef84dc4..e53c94d 100644 --- a/test_join.py +++ b/test_join.py @@ -164,9 +164,20 @@ def test_mutate_then_join_no_column_overlap(batting, awards_players): @pytest.mark.notimpl(["datafusion", "bigquery", "druid"]) @pytest.mark.notyet(["dask"], reason="dask doesn't support descending order by") -def test_semi_join_topk(batting, awards_players): [email protected]( + "func", + [ + param(lambda left, right: left.semi_join(right, "year"), id="method"), + param( + lambda left, right: left.join(right, "year", how="left_semi"), + id="how_left_semi", + ), + param(lambda left, right: left.join(right, "year", how="semi"), id="how_semi"), + ], +) +def test_semi_join_topk(batting, awards_players, func): batting = batting.mutate(year=batting.yearID) - left = batting.semi_join(batting.year.topk(5), "year").select("year", "RBI") + left = func(batting, batting.year.topk(5)).select("year", "RBI") expr = left.join(awards_players, left.year == awards_players.yearID) assert not expr.limit(5).execute().empty diff --git a/relations.py b/relations.py index 7ef98df..b0bd11a 100644 --- a/relations.py +++ b/relations.py @@ -2442,7 +2442,7 @@ class Table(Expr, _FixedTextJupyterMixin): # semi/anti join only give access to the left table's fields, so # there's never overlap - if how in ("semi", "anti"): + if how in ("left_semi", "semi", "anti"): return expr return ops.relations._dedup_join_columns(expr, lname=lname, rname=rname)
fix: add support for 'any' unit, when parsing `<count> <unit> ago`. Similar to Git, any unit is allowed and will default to seconds, like `60 flurps ago` will mean a minute in the past.
34d2fce57e2836f758387b6cb54ee1f11bebd473
fix
https://github.com/Byron/gitoxide/commit/34d2fce57e2836f758387b6cb54ee1f11bebd473
add support for 'any' unit, when parsing `<count> <unit> ago`. Similar to Git, any unit is allowed and will default to seconds, like `60 flurps ago` will mean a minute in the past.
diff --git a/parse.rs b/parse.rs index 7bfb979..86d8782 100644 --- a/parse.rs +++ b/parse.rs @@ -184,6 +184,7 @@ mod relative { // For comparison, a few are the same as in: https://github.com/git/git/blob/master/t/t0006-date.sh let cases = [ ("5 seconds ago", 5.seconds()), + ("12345 florx ago", 12_345.seconds()), // Anything parses as seconds ("5 minutes ago", 5.minutes()), ("5 hours ago", 5.hours()), ("5 days ago", 5.days()),
chore: regenerate snapshots
d2216f7fcd134996caa09704418d0d2153520b52
chore
https://github.com/rohankumardubey/ibis/commit/d2216f7fcd134996caa09704418d0d2153520b52
regenerate snapshots
diff --git a/out.sql b/out.sql index a6ee161..66ffaa9 100644 --- a/out.sql +++ b/out.sql @@ -56,7 +56,7 @@ FROM ( ) AS t3 ) AS t4 GROUP BY - 1 + t4.field_of_study ) AS t5 ORDER BY t5.diff DESC @@ -120,7 +120,7 @@ FROM ( ) AS t3 ) AS t4 GROUP BY - 1 + t4.field_of_study ) AS t5 WHERE t5.diff < 0 diff --git a/out1.sql b/out1.sql index f7296b6..ab8d9d2 100644 --- a/out1.sql +++ b/out1.sql @@ -9,4 +9,4 @@ FROM t0 AS t0 WHERE t0.value = 42 GROUP BY - 1 \\ No newline at end of file + t0.key \\ No newline at end of file diff --git a/out2.sql b/out2.sql index c4a45f5..0812a98 100644 --- a/out2.sql +++ b/out2.sql @@ -9,4 +9,4 @@ FROM t0 AS t0 WHERE t0.value = 42 GROUP BY - 1 \\ No newline at end of file + t0.key \\ No newline at end of file
build: working on wordpress plugin
5a8ec8c3e7b55eb922f0e0be70a6129334de8ae4
build
https://github.com/tsparticles/tsparticles/commit/5a8ec8c3e7b55eb922f0e0be70a6129334de8ae4
working on wordpress plugin
diff --git a/edit.js b/edit.js index 0ebf399..50fa464 100644 --- a/edit.js +++ b/edit.js @@ -50,5 +50,7 @@ export default function Edit() { }); }, 500); - return <div id={"tsparticles"} style={{ height: "500px"}} {...useBlockProps()}></div>; + return <p {...useBlockProps()}> + <div id={"tsparticles"} style={{ height: "500px" }}></div> + </p>; }
ci(rebase): get all history for rebasing nightly
796e38e1b3d43973b3dc5ad616019b9d3ff63178
ci
https://github.com/rohankumardubey/ibis/commit/796e38e1b3d43973b3dc5ad616019b9d3ff63178
get all history for rebasing nightly
diff --git a/ibis-rebase-nightly.yml b/ibis-rebase-nightly.yml index 5799134..2a8e197 100644 --- a/ibis-rebase-nightly.yml +++ b/ibis-rebase-nightly.yml @@ -37,6 +37,7 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ matrix.branch }} + fetch-depth: 0 - name: fetch and rebase on top of upstream run: git pull --rebase https://github.com/ibis-project/ibis master
docs: add postgres specific operators to the list of supported ones
8376bc25d403c329373447352b22929f7f42b138
docs
https://github.com/mikro-orm/mikro-orm/commit/8376bc25d403c329373447352b22929f7f42b138
add postgres specific operators to the list of supported ones
diff --git a/query-conditions.md b/query-conditions.md index 88dde46..91a38bc 100644 --- a/query-conditions.md +++ b/query-conditions.md @@ -94,6 +94,10 @@ const res = await orm.em.find(Author, [1, 2, 7]); | `$nin` | not contains | Matches none of the values specified in an array. | | `$like` | like | Uses LIKE operator | | `$re` | regexp | Uses REGEXP operator | +| `$ilike` | ilike | (postgres only) | +| `$overlap` | && | (postgres only) | +| `$contains` | @> | (postgres only) | +| `$contained` | <@ | (postgres only) | ### Logical
docs: bring back versioning policy doc
9dc896694fec270bb679032979262e7660c799bd
docs
https://github.com/rohankumardubey/ibis/commit/9dc896694fec270bb679032979262e7660c799bd
bring back versioning policy doc
diff --git a/versioning.qmd b/versioning.qmd index 7ac2291..7822c17 100644 --- a/versioning.qmd +++ b/versioning.qmd @@ -0,0 +1,29 @@ +# Versioning Policy + +Ibis follows a [Semantic Versioning](https://semver.org/) scheme +(`MAJOR.MINOR.PATCH`, like `6.1.0`). + +- An increase in the `MAJOR` version number will happen when a release contains + breaking changes in the public API. This includes anything documented in the + [reference documentation](../reference), excluding any + features explicitly marked as "experimental". Features not part of the public + API (e.g. anything in `ibis.expr.operations` may make breaking changes at any + time). + +- An increase in the `MINOR` or `PATCH` version number indicate changes to + public APIs that should remain compatible with previous Ibis versions with + the same `MAJOR` version number. + +## Supported Python Versions + +Ibis follows [NEP29](https://numpy.org/neps/nep-0029-deprecation_policy.html) +with respect to supported Python versions. + +This has been in-place [since Ibis version 3.0.0](https://github.com/ibis-project/ibis/blob/5015677d78909473014a61725d371b4bf772cdff/docs/blog/Ibis-version-3.0.0-release.md?plain=1#L83). + +The [support +table](https://numpy.org/neps/nep-0029-deprecation_policy.html#support-table) +shows the schedule for dropping support for Python versions. + +The next major release of Ibis that occurs on or after the NEP29 drop date +removes support for the specified Python version.
build: preparing fireworks bundle
364a125d41b27d907a8f559ed34e377a1059789c
build
https://github.com/tsparticles/tsparticles/commit/364a125d41b27d907a8f559ed34e377a1059789c
preparing fireworks bundle
diff --git a/package.dist.json b/package.dist.json index f706fc0..56a025c 100644 --- a/package.dist.json +++ b/package.dist.json @@ -77,14 +77,6 @@ "types": "types/index.d.ts", "dependencies": { "tsparticles-engine": "^2.8.0", - "tsparticles-interaction-external-trail": "^2.8.0", - "tsparticles-plugin-absorbers": "^2.8.0", - "tsparticles-plugin-emitters": "^2.8.0", - "tsparticles-slim": "^2.8.0", - "tsparticles-updater-destroy": "^2.8.0", - "tsparticles-updater-roll": "^2.8.0", - "tsparticles-updater-tilt": "^2.8.0", - "tsparticles-updater-twinkle": "^2.8.0", - "tsparticles-updater-wobble": "^2.8.0" + "tsparticles-preset-fireworks": "^2.8.0" } } \\ No newline at end of file diff --git a/package.json b/package.json index da789a1..e615223 100644 --- a/package.json +++ b/package.json @@ -111,15 +111,7 @@ }, "dependencies": { "tsparticles-engine": "^2.8.0", - "tsparticles-interaction-external-trail": "^2.8.0", - "tsparticles-plugin-absorbers": "^2.8.0", - "tsparticles-plugin-emitters": "^2.8.0", - "tsparticles-slim": "^2.8.0", - "tsparticles-updater-destroy": "^2.8.0", - "tsparticles-updater-roll": "^2.8.0", - "tsparticles-updater-tilt": "^2.8.0", - "tsparticles-updater-twinkle": "^2.8.0", - "tsparticles-updater-wobble": "^2.8.0" + "tsparticles-preset-fireworks": "^2.8.0" }, "publishConfig": { "directory": "dist", diff --git a/bundle.ts b/bundle.ts index ce91fc0..c6fea5d 100644 --- a/bundle.ts +++ b/bundle.ts @@ -1 +1,2 @@ export * from "."; +export * from "tsparticles-engine"; diff --git a/fireworks.ts b/fireworks.ts index ae98bbd..acf65fc 100644 --- a/fireworks.ts +++ b/fireworks.ts @@ -0,0 +1,62 @@ +import type { Container } from "tsparticles-engine"; +import { loadFireworksPreset } from "tsparticles-preset-fireworks"; +import { tsParticles } from "tsparticles-engine"; + +let initialized = false; +let initializing = false; + +class FireworksInstance { + private readonly _container: Container; + + constructor(container: Container) { + this._container = container; + } + + pause(): void { + this._container.pause(); + } + + play(): void { + this._container.play(); + } + + stop(): void { + this._container.stop(); + } +} + +async function initPlugins(): Promise<void> { + if (initialized) { + return; + } + + if (initializing) { + return new Promise<void>((resolve) => { + const interval = setInterval(() => { + if (initialized) { + clearInterval(interval); + resolve(); + } + }, 100); + }); + } + + initializing = true; + + await loadFireworksPreset(tsParticles); + + initializing = false; + initialized = true; +} + +export async function fireworks(): Promise<FireworksInstance | undefined> { + await initPlugins(); + + const container = await tsParticles.load({ preset: "fireworks" }); + + if (!container) { + return; + } + + return new FireworksInstance(container); +} diff --git a/index.ts b/index.ts index e738f18..f34fab2 100644 --- a/index.ts +++ b/index.ts @@ -1,3 +1 @@ -import { tsParticles } from "tsparticles-engine"; - -tsParticles.load({}); +export * from "./fireworks"; diff --git a/fireworks.js b/fireworks.js index ecc2b72..3ae21b0 100644 --- a/fireworks.js +++ b/fireworks.js @@ -0,0 +1,7 @@ +(async () => { + const f = await fireworks(); + + setTimeout(() => { + f.stop(); + }, 5000); +})(); diff --git a/fireworks.pug b/fireworks.pug index 292be33..a918c22 100644 --- a/fireworks.pug +++ b/fireworks.pug @@ -29,5 +29,4 @@ html(lang="en") script(src="/jsoneditor/jsoneditor.js") script(src="/lodash/lodash.min.js") script(src="/tsparticles-fireworks/tsparticles.fireworks.bundle.js") - script. - setInterval(fireworks, 200); + script(src="/javascripts/fireworks.js") diff --git a/Canvas.ts b/Canvas.ts index fd372ed..76c3c0c 100644 --- a/Canvas.ts +++ b/Canvas.ts @@ -125,9 +125,7 @@ export class Canvas { this._resetOriginalStyle(); } - this.draw((ctx) => { - clear(ctx, this.size); - }); + this.stop(); this._preDrawUpdaters = []; this._postDrawUpdaters = []; @@ -402,6 +400,12 @@ export class Canvas { } } + stop(): void { + this.draw((ctx) => { + clear(ctx, this.size); + }); + } + /** * The window resize event handler */ diff --git a/Container.ts b/Container.ts index 4a727fa..0b7d407 100644 --- a/Container.ts +++ b/Container.ts @@ -760,7 +760,7 @@ export class Container { this._eventListeners.removeListeners(); this.pause(); this.particles.clear(); - this.canvas.clear(); + this.canvas.stop(); if (this.interactivity.element instanceof HTMLElement && this._intersectionObserver) { this._intersectionObserver.unobserve(this.interactivity.element); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 49b96ce..aa2ffb8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -141,15 +141,7 @@ importers: rimraf: 4.1.1 terser-webpack-plugin: ^5.3.6 tsparticles-engine: ^2.8.0 - tsparticles-interaction-external-trail: ^2.8.0 - tsparticles-plugin-absorbers: ^2.8.0 - tsparticles-plugin-emitters: ^2.8.0 - tsparticles-slim: ^2.8.0 - tsparticles-updater-destroy: ^2.8.0 - tsparticles-updater-roll: ^2.8.0 - tsparticles-updater-tilt: ^2.8.0 - tsparticles-updater-twinkle: ^2.8.0 - tsparticles-updater-wobble: ^2.8.0 + tsparticles-preset-fireworks: ^2.8.0 typescript: ^4.9.4 webpack: ^5.75.0 webpack-bundle-analyzer: ^4.7.0 @@ -157,15 +149,7 @@ importers: webpack-tsparticles-plugin: ^1.8.1 dependencies: tsparticles-engine: link:../../engine/dist - tsparticles-interaction-external-trail: link:../../interactions/external/trail/dist - tsparticles-plugin-absorbers: link:../../plugins/absorbers/dist - tsparticles-plugin-emitters: link:../../plugins/emitters/dist - tsparticles-slim: link:../slim/dist - tsparticles-updater-destroy: link:../../updaters/destroy/dist - tsparticles-updater-roll: link:../../updaters/roll/dist - tsparticles-updater-tilt: link:../../updaters/tilt/dist - tsparticles-updater-twinkle: link:../../updaters/twinkle/dist - tsparticles-updater-wobble: link:../../updaters/wobble/dist + tsparticles-preset-fireworks: link:../../presets/fireworks/dist devDependencies: '@babel/core': 7.20.7 '@tsparticles/build': [email protected]
chore: add `r` ranges documentation
25b692014c3e75e56e33675fff454719059211a4
chore
https://github.com/rohankumardubey/ibis/commit/25b692014c3e75e56e33675fff454719059211a4
add `r` ranges documentation
diff --git a/selectors.py b/selectors.py index 107ad31..820f3b2 100644 --- a/selectors.py +++ b/selectors.py @@ -668,6 +668,7 @@ class Sliceable(Singleton): r = Sliceable() +"""Ranges of columns.""" @public
fix(ux): make top level set operations work
f5976b2b0f53b8c9c2a0d671ece5ca7945d90d70
fix
https://github.com/rohankumardubey/ibis/commit/f5976b2b0f53b8c9c2a0d671ece5ca7945d90d70
make top level set operations work
diff --git a/select_builder.py b/select_builder.py index efcaa2a..0490cac 100644 --- a/select_builder.py +++ b/select_builder.py @@ -279,15 +279,18 @@ class SelectBuilder: def _collect_Union(self, op, toplevel=False): if toplevel: - raise NotImplementedError() + self.table_set = op + self.select_set = [op] def _collect_Difference(self, op, toplevel=False): if toplevel: - raise NotImplementedError() + self.table_set = op + self.select_set = [op] def _collect_Intersection(self, op, toplevel=False): if toplevel: - raise NotImplementedError() + self.table_set = op + self.select_set = [op] def _collect_Aggregation(self, op, toplevel=False): # The select set includes the grouping keys (if any), and these are diff --git a/test_set_ops.py b/test_set_ops.py index bf24fa6..384e2d3 100644 --- a/test_set_ops.py +++ b/test_set_ops.py @@ -139,6 +139,45 @@ def test_difference(backend, alltypes, df, distinct): @pytest.mark.parametrize("method", ["intersect", "difference", "union"]) -def test_empty_set_op(backend, alltypes, method): +def test_empty_set_op(alltypes, method): with pytest.raises(com.IbisTypeError, match="requires a table or tables"): getattr(alltypes, method)() + + [email protected]("distinct", [True, False]) [email protected](["dask", "pandas"], raises=com.UnboundExpressionError) [email protected](["datafusion", "polars"], raises=com.OperationNotDefinedError) +def test_top_level_union(backend, con, distinct): + t1 = ibis.memtable(dict(a=[1]), name="t1") + t2 = ibis.memtable(dict(a=[2]), name="t2") + expr = t1.union(t2, distinct=distinct).limit(2) + result = con.execute(expr) + expected = pd.DataFrame({"a": [1, 2]}) + backend.assert_frame_equal(result.sort_values("a").reset_index(drop=True), expected) + + [email protected]( + "distinct", + [ + True, + param( + False, + marks=pytest.mark.notimpl(["bigquery", "mssql", "snowflake", "sqlite"]), + ), + ], +) [email protected]( + ("opname", "expected"), + [("intersect", pd.DataFrame({"a": [2]})), ("difference", pd.DataFrame({"a": [1]}))], + ids=["intersect", "difference"], +) [email protected](["dask", "pandas"], raises=com.UnboundExpressionError) [email protected](["datafusion", "polars"], raises=com.OperationNotDefinedError) [email protected](["impala"], reason="doesn't support intersection or difference") +def test_top_level_intersect_difference(backend, con, distinct, opname, expected): + t1 = ibis.memtable(dict(a=[1, 2]), name="t1") + t2 = ibis.memtable(dict(a=[2, 3]), name="t2") + op = getattr(t1, opname) + expr = op(t2, distinct=distinct).limit(2) + result = con.execute(expr) + backend.assert_frame_equal(result, expected)
fix: don't panic during checkouts when submodules or sparse directories are encountered. Now we trace instead.
82ae37d70dc244cdf705d20c617d4b0e6bf3cdbf
fix
https://github.com/Byron/gitoxide/commit/82ae37d70dc244cdf705d20c617d4b0e6bf3cdbf
don't panic during checkouts when submodules or sparse directories are encountered. Now we trace instead.
diff --git a/entry.rs b/entry.rs index 8cb5ca3..fc2514a 100644 --- a/entry.rs +++ b/entry.rs @@ -164,8 +164,20 @@ where entry.stat = Stat::from_fs(&std::fs::symlink_metadata(dest)?)?; obj.data.len() } - gix_index::entry::Mode::DIR => todo!(), - gix_index::entry::Mode::COMMIT => 0, + gix_index::entry::Mode::DIR => { + gix_features::trace::warn!( + "Skipped sparse directory at '{entry_path}' ({id}) as it cannot yet be handled", + id = entry.id + ); + 0 + } + gix_index::entry::Mode::COMMIT => { + gix_features::trace::warn!( + "Skipped submodule at '{entry_path}' ({id}) as it cannot yet be handled", + id = entry.id + ); + 0 + } _ => unreachable!(), }; Ok(Outcome::Written { bytes: object_size })
ci: include only a single macos nix job
cceffebb85c36baab6069c06a6b5b9e3b4edf8c4
ci
https://github.com/rohankumardubey/ibis/commit/cceffebb85c36baab6069c06a6b5b9e3b4edf8c4
include only a single macos nix job
diff --git a/nix-skip-helper.yml b/nix-skip-helper.yml index fa4c65a..79e413c 100644 --- a/nix-skip-helper.yml +++ b/nix-skip-helper.yml @@ -30,11 +30,13 @@ jobs: matrix: os: - ubuntu-latest - - macos-latest python-version: - "3.8" - "3.9" - "3.10" - "3.11" + include: + - os: macos-latest + python-version: "3.10" steps: - run: echo "No build required" diff --git a/nix.yml b/nix.yml index d654728..7105630 100644 --- a/nix.yml +++ b/nix.yml @@ -31,12 +31,14 @@ jobs: matrix: os: - ubuntu-latest - - macos-latest python-version: - "3.8" - "3.9" - "3.10" - "3.11" + include: + - os: macos-latest + python-version: "3.10" steps: - name: checkout uses: actions/checkout@v3
feat: add `format`, `help!`
868609f95ae449e64e6318de7a07081162a0fedb
feat
https://github.com/erg-lang/erg/commit/868609f95ae449e64e6318de7a07081162a0fedb
add `format`, `help!`
diff --git a/classes.rs b/classes.rs index 2b98270..74c14ae 100644 --- a/classes.rs +++ b/classes.rs @@ -778,7 +778,14 @@ impl Context { ); str_.register_builtin_erg_impl( FUNC_FORMAT, - fn_met(Str, vec![], Some(kw(KW_ARGS, Obj)), vec![], None, Str), + fn_met( + Str, + vec![], + Some(kw(KW_ARGS, Obj)), + vec![], + Some(kw(KW_KWARGS, Obj)), + Str, + ), Immutable, Visibility::BUILTIN_PUBLIC, ); diff --git a/funcs.rs b/funcs.rs index df5d7d5..3fa380f 100644 --- a/funcs.rs +++ b/funcs.rs @@ -104,6 +104,7 @@ impl Context { poly(FILTER, vec![ty_tp(T.clone())]), ) .quantify(); + let t_format = no_var_func(vec![kw(KW_VALUE, Obj)], vec![kw(KW_SPEC, Str)], Str); let t_frozenset = nd_func( vec![kw(KW_ITERABLE, poly(ITERABLE, vec![ty_tp(T.clone())]))], None, @@ -339,6 +340,13 @@ impl Context { Some(FUNC_ENUMERATE), ); self.register_builtin_py_impl(FUNC_EXIT, t_exit, Immutable, vis.clone(), Some(FUNC_EXIT)); + self.register_builtin_py_impl( + FUNC_FORMAT, + t_format, + Immutable, + vis.clone(), + Some(FUNC_FORMAT), + ); self.register_builtin_py_impl( FUNC_FILTER, t_filter, diff --git a/mod.rs b/mod.rs index 44999ec..e9e417c 100644 --- a/mod.rs +++ b/mod.rs @@ -525,6 +525,7 @@ const KW_INTO: &str = "into"; const KW_ENCODING: &str = "encoding"; const KW_ERRORS: &str = "errors"; const KW_ARGS: &str = "args"; +const KW_KWARGS: &str = "kwargs"; const KW_IDX: &str = "idx"; const KW_LHS: &str = "lhs"; const KW_RHS: &str = "rhs"; @@ -539,6 +540,7 @@ const KW_OBJECT: &str = "object"; const KW_OBJECTS: &str = "objects"; const KW_TEST: &str = "test"; const KW_MSG: &str = "msg"; +const KW_SPEC: &str = "spec"; const KW_STR: &str = "str"; const KW_I: &str = "i"; const KW_SRC: &str = "src"; diff --git a/procs.rs b/procs.rs index 9efa7b4..76177e2 100644 --- a/procs.rs +++ b/procs.rs @@ -21,8 +21,8 @@ impl Context { let T = mono_q("T", instanceof(Type)); let U = mono_q("U", instanceof(Type)); let t_dir = no_var_proc( - vec![kw("obj", ref_(Obj))], vec![], + vec![kw("object", ref_(Obj))], array_t(Str, TyParam::erased(Nat)), ); let t_print = proc( @@ -62,6 +62,7 @@ impl Context { ) .quantify(); let t_globals = no_var_proc(vec![], vec![], dict! { Str => Obj }.into()); + let t_help = nd_proc(vec![kw("object", ref_(Obj))], None, NoneType); let t_locals = no_var_proc(vec![], vec![], dict! { Str => Obj }.into()); let t_next = nd_proc( vec![kw( @@ -126,6 +127,7 @@ impl Context { vis.clone(), Some("globals"), ); + self.register_builtin_py_impl("help!", t_help, Immutable, vis.clone(), Some("help")); self.register_builtin_py_impl("locals!", t_locals, Immutable, vis.clone(), Some("locals")); self.register_builtin_py_impl("next!", t_next, Immutable, vis.clone(), Some("next")); self.register_py_builtin("open!", t_open, Some("open"), 198);
chore(release): v5.7.4 [skip ci]
fab6bf6c28fcfecb8e6ab6bb6d4339618cc1cd41
chore
https://github.com/mikro-orm/mikro-orm/commit/fab6bf6c28fcfecb8e6ab6bb6d4339618cc1cd41
v5.7.4 [skip ci]
diff --git a/CHANGELOG.md b/CHANGELOG.md index 65a8309..a0c1335 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [5.7.4](https://github.com/mikro-orm/mikro-orm/compare/v5.7.3...v5.7.4) (2023-05-01) + +**Note:** Version bump only for package @mikro-orm/sqlite + + + + + ## [5.7.3](https://github.com/mikro-orm/mikro-orm/compare/v5.7.2...v5.7.3) (2023-04-28) **Note:** Version bump only for package @mikro-orm/sqlite diff --git a/lerna.json b/lerna.json index 7f3d0f4..7fd3881 100644 --- a/lerna.json +++ b/lerna.json @@ -2,7 +2,7 @@ "packages": [ "packages/*" ], - "version": "5.7.3", + "version": "5.7.4", "command": { "version": { "conventionalCommits": true, diff --git a/package.json b/package.json index c60c6b9..d3ebe13 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@mikro-orm/sqlite", - "version": "5.7.3", + "version": "5.7.4", "description": "TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.", "main": "dist/index.js", "module": "dist/index.mjs", @@ -58,13 +58,13 @@ "access": "public" }, "dependencies": { - "@mikro-orm/knex": "~5.7.3", + "@mikro-orm/knex": "^5.7.4", "fs-extra": "11.1.1", "sqlite3": "5.1.6", "sqlstring-sqlite": "0.1.1" }, "devDependencies": { - "@mikro-orm/core": "^5.7.3" + "@mikro-orm/core": "^5.7.4" }, "peerDependencies": { "@mikro-orm/core": "^5.0.0", diff --git a/yarn.lock b/yarn.lock index caaf396..6225cf6 100644 --- a/yarn.lock +++ b/yarn.lock Binary files a/yarn.lock and b/yarn.lock differ
fix(datatypes): ensure that array construction supports literals and infers their shape from its inputs (#8049) We were previously returning `ArrayColumn` from `ibis.array` when any inputs were expressions regardless of their shape. This PR renames `ArrayColumn` -> `Array` and uses the input arguments shapes to determine the output array shape. Fixes #8022. --------- Co-authored-by: Nick Crews <[email protected]>
899dce1b92d08987b3cfbcb29b31729f0328df5f
fix
https://github.com/ibis-project/ibis/commit/899dce1b92d08987b3cfbcb29b31729f0328df5f
ensure that array construction supports literals and infers their shape from its inputs (#8049) We were previously returning `ArrayColumn` from `ibis.array` when any inputs were expressions regardless of their shape. This PR renames `ArrayColumn` -> `Array` and uses the input arguments shapes to determine the output array shape. Fixes #8022. --------- Co-authored-by: Nick Crews <[email protected]>
diff --git a/registry.py b/registry.py index 80651e0..73268b9 100644 --- a/registry.py +++ b/registry.py @@ -43,7 +43,7 @@ class make_array(FunctionElement): pass -@compiles(make_array, "trino") +@compiles(make_array, "default") def compile_make_array(element, compiler, **kw): return f"ARRAY[{compiler.process(element.clauses, **kw)}]" @@ -117,7 +117,7 @@ def _group_concat(t, op): def _array_column(t, op): args = ", ".join( str(t.translate(arg).compile(compile_kwargs={"literal_binds": True})) - for arg in op.cols + for arg in op.exprs ) return sa.literal_column(f"ARRAY[{args}]", type_=t.get_sqla_type(op.dtype)) @@ -431,7 +431,7 @@ operation_registry.update( ops.ArrayIndex: fixed_arity( lambda arg, index: sa.func.element_at(arg, index + 1), 2 ), - ops.ArrayColumn: _array_column, + ops.Array: _array_column, ops.ArrayRepeat: fixed_arity( lambda arg, times: sa.func.flatten(sa.func.repeat(arg, times)), 2 ), diff --git a/values.py b/values.py index 0d588ed..ffe39fa 100644 --- a/values.py +++ b/values.py @@ -733,9 +733,9 @@ def _not_null(op, *, arg, **_): return sg.not_(arg.is_(NULL)) -@translate_val.register(ops.ArrayColumn) -def array_column(op, *, cols, **_): - return F.make_array(*cols) +@translate_val.register(ops.Array) +def array_column(op, *, exprs, **_): + return F.make_array(*exprs) @translate_val.register(ops.ArrayRepeat) diff --git a/arrays.py b/arrays.py index c1ac0a4..f978c38 100644 --- a/arrays.py +++ b/arrays.py @@ -1017,82 +1017,50 @@ class ArrayColumn(Column, ArrayValue): @public @deferrable -def array(values: Iterable[V], type: str | dt.DataType | None = None) -> ArrayValue: +def array(values: Iterable[V]) -> ArrayValue: """Create an array expression. - If the input expressions are all column expressions, then the output will - be an `ArrayColumn`. The input columns will be concatenated row-wise to - produce each array in the output array column. Each array will have length - _n_, where _n_ is the number of input columns. All input columns should be - of the same datatype. - - If the input expressions are Python literals, then the output will be a - single `ArrayScalar` of length _n_, where _n_ is the number of input - values. This is equivalent to - - ```python - values = [1, 2, 3] - ibis.literal(values) - ``` - Parameters ---------- values An iterable of Ibis expressions or a list of Python literals - type - An instance of `ibis.expr.datatypes.DataType` or a string indicating - the ibis type of `value`. Returns ------- ArrayValue - An array column (if the inputs are column expressions), or an array - scalar (if the inputs are Python literals) Examples -------- - Create an array column from column expressions + Create an array from scalar values >>> import ibis >>> ibis.options.interactive = True + >>> ibis.array([1.0, None]) + [1.0, None] + + Create an array from column and scalar expressions + >>> t = ibis.memtable({"a": [1, 2, 3], "b": [4, 5, 6]}) - >>> ibis.array([t.a, t.b]) + >>> ibis.array([t.a, 42, ibis.literal(None)]) ┏━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ ArrayColumn() ┃ + ┃ Array() ┃ ┡━━━━━━━━━━━━━━━━━━━━━━┩ │ array<int64> │ ├──────────────────────┤ - │ [1, 4] │ - │ [2, 5] │ - │ [3, 6] │ + │ [1, 42, ... +1] │ + │ [2, 42, ... +1] │ + │ [3, 42, ... +1] │ └──────────────────────┘ - Create an array scalar from Python literals - - >>> ibis.array([1.0, 2.0, 3.0]) - [1.0, 2.0, ... +1] - - Mixing scalar and column expressions is allowed - - >>> ibis.array([t.a, 42]) + >>> ibis.array([t.a, 42 + ibis.literal(5)]) ┏━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ ArrayColumn() ┃ + ┃ Array() ┃ ┡━━━━━━━━━━━━━━━━━━━━━━┩ │ array<int64> │ ├──────────────────────┤ - │ [1, 42] │ - │ [2, 42] │ - │ [3, 42] │ + │ [1, 47] │ + │ [2, 47] │ + │ [3, 47] │ └──────────────────────┘ """ - if any(isinstance(value, Value) for value in values): - return ops.ArrayColumn(values).to_expr() - else: - try: - return literal(list(values), type=type) - except com.IbisTypeError as e: - raise com.IbisTypeError( - "Could not create an array scalar from the values provided " - "to `array`. Ensure that all input values have the same " - "Python type, or can be casted to a single Python type." - ) from e + return ops.Array(tuple(values)).to_expr() diff --git a/generic.py b/generic.py index efd2d8f..d6394c4 100644 --- a/generic.py +++ b/generic.py @@ -145,6 +145,28 @@ def execute_cast_series_array(op, data, type, **kwargs): return data.map(cast_to_array) +@execute_node.register(ops.Cast, list, dt.Array) +def execute_cast_list_array(op, data, type, **kwargs): + value_type = type.value_type + numpy_type = constants.IBIS_TYPE_TO_PANDAS_TYPE.get(value_type, None) + if numpy_type is None: + raise ValueError( + "Array value type must be a primitive type " + "(e.g., number, string, or timestamp)" + ) + + def cast_to_array(array, numpy_type=numpy_type): + elems = [ + el if el is None else np.array(el, dtype=numpy_type).item() for el in array + ] + try: + return np.array(elems, dtype=numpy_type) + except TypeError: + return np.array(elems) + + return cast_to_array(data) + + @execute_node.register(ops.Cast, pd.Series, dt.Timestamp) def execute_cast_series_timestamp(op, data, type, **kwargs): arg = op.arg diff --git a/compiler.py b/compiler.py index 3a9cbc0..eed19f7 100644 --- a/compiler.py +++ b/compiler.py @@ -1634,9 +1634,9 @@ def compile_interval_from_integer(t, op, **kwargs): # -------------------------- Array Operations ---------------------------- -@compiles(ops.ArrayColumn) +@compiles(ops.Array) def compile_array_column(t, op, **kwargs): - cols = [t.translate(col, **kwargs) for col in op.cols] + cols = [t.translate(col, **kwargs) for col in op.exprs] return F.array(cols) diff --git a/test_array.py b/test_array.py index 951d9a3..63f4051 100644 --- a/test_array.py +++ b/test_array.py @@ -1,6 +1,5 @@ from __future__ import annotations -import contextlib import functools from datetime import datetime @@ -15,13 +14,13 @@ from pytest import param import ibis import ibis.common.exceptions as com +import ibis.expr.datashape as ds import ibis.expr.datatypes as dt import ibis.expr.types as ir from ibis.backends.tests.errors import ( ClickHouseDatabaseError, GoogleBadRequest, PolarsComputeError, - Py4JJavaError, PySparkAnalysisException, ) @@ -42,29 +41,21 @@ pytestmark = [ @pytest.mark.notimpl(["flink"], raises=com.OperationNotDefinedError) def test_array_column(backend, alltypes, df): - expr = ibis.array([alltypes["double_col"], alltypes["double_col"]]) + expr = ibis.array( + [alltypes["double_col"], alltypes["double_col"], 5.0, ibis.literal(6.0)] + ) assert isinstance(expr, ir.ArrayColumn) result = expr.execute() expected = df.apply( - lambda row: [row["double_col"], row["double_col"]], + lambda row: [row["double_col"], row["double_col"], 5.0, 6.0], axis=1, ) backend.assert_series_equal(result, expected, check_names=False) -ARRAY_BACKEND_TYPES = { - "clickhouse": "Array(Float64)", - "snowflake": "ARRAY", - "trino": "array(double)", - "bigquery": "ARRAY", - "duckdb": "DOUBLE[]", - "postgres": "numeric[]", - "flink": "ARRAY<DECIMAL(2, 1) NOT NULL> NOT NULL", -} - - -def test_array_scalar(con, backend): [email protected](["flink"], raises=com.OperationNotDefinedError) +def test_array_scalar(con): expr = ibis.array([1.0, 2.0, 3.0]) assert isinstance(expr, ir.ArrayScalar) @@ -73,10 +64,6 @@ def test_array_scalar(con, backend): assert np.array_equal(result, expected) - with contextlib.suppress(com.OperationNotDefinedError): - backend_name = backend.name() - assert con.execute(expr.typeof()) == ARRAY_BACKEND_TYPES[backend_name] - @pytest.mark.notimpl(["polars", "flink"], raises=com.OperationNotDefinedError) def test_array_repeat(con): @@ -327,7 +314,7 @@ def test_unnest_default_name(backend): array_types = backend.array_types df = array_types.execute() expr = ( - array_types.x.cast("!array<int64>") + ibis.array([1], type="!array<int64>") + array_types.x.cast("!array<int64>") + ibis.array([1]).cast("!array<int64>") ).unnest() assert expr.get_name().startswith("ArrayConcat(") @@ -1050,15 +1037,16 @@ def test_timestamp_range_zero_step(con, start, stop, step, tzinfo): assert list(result) == [] [email protected](["flink"], raises=Py4JJavaError) [email protected](["datafusion"], raises=Exception) [email protected]( + ["flink"], raises=AssertionError, reason="arrays not yet implemented" +) def test_repr_timestamp_array(con, monkeypatch): monkeypatch.setattr(ibis.options, "interactive", True) monkeypatch.setattr(ibis.options, "default_backend", con) assert ibis.options.interactive is True assert ibis.options.default_backend is con expr = ibis.array(pd.date_range("2010-01-01", "2010-01-03", freq="D").tolist()) - assert repr(expr) + assert "No translation rule" not in repr(expr) @pytest.mark.notyet( @@ -1070,3 +1058,34 @@ def test_unnest_range(con): result = con.execute(expr) expected = pd.DataFrame({"x": np.array([0, 1], dtype="int8"), "y": [1.0, 1.0]}) tm.assert_frame_equal(result, expected) + + [email protected](["flink"], raises=com.OperationNotDefinedError) [email protected]( + ("input", "expected"), + [ + param([1, ibis.literal(2)], [1, 2], id="int-int"), + param([1.0, ibis.literal(2)], [1.0, 2.0], id="float-int"), + param([1.0, ibis.literal(2.0)], [1.0, 2.0], id="float-float"), + param([1, ibis.literal(2.0)], [1.0, 2.0], id="int-float"), + param([ibis.literal(1), ibis.literal(2.0)], [1.0, 2.0], id="int-float-exprs"), + param( + [[1], ibis.literal([2])], + [[1], [2]], + id="array", + marks=[ + pytest.mark.notyet(["bigquery"], raises=GoogleBadRequest), + pytest.mark.broken( + ["polars"], + reason="expression input not supported with nested arrays", + raises=TypeError, + ), + ], + ), + ], +) +def test_array_literal_with_exprs(con, input, expected): + expr = ibis.array(input) + assert expr.op().shape == ds.scalar + result = list(con.execute(expr)) + assert result == expected diff --git a/test_map.py b/test_map.py index 8d42463..195ba16 100644 --- a/test_map.py +++ b/test_map.py @@ -236,7 +236,7 @@ def test_map_construct_dict(con, keys, values): @pytest.mark.notimpl( ["flink"], raises=exc.OperationNotDefinedError, - reason="No translation rule for <class 'ibis.expr.operations.arrays.ArrayColumn'>", + reason="No translation rule for <class 'ibis.expr.operations.arrays.Array'>", ) def test_map_construct_array_column(con, alltypes, df): expr = ibis.map(ibis.array([alltypes.string_col]), ibis.array([alltypes.int_col])) diff --git a/test_param.py b/test_param.py index f99e3e2..d72f7a7 100644 --- a/test_param.py +++ b/test_param.py @@ -60,9 +60,7 @@ def test_timestamp_accepts_date_literals(alltypes): assert expr.compile(params=params) is not None [email protected]( - ["dask", "impala", "pandas", "pyspark", "druid", "oracle", "exasol"] -) [email protected](["impala", "pyspark", "druid", "oracle", "exasol"]) @pytest.mark.never( ["mysql", "sqlite", "mssql"], reason="backend will never implement array types" ) diff --git a/test_sql.py b/test_sql.py index e46a7a6..a2fd241 100644 --- a/test_sql.py +++ b/test_sql.py @@ -18,15 +18,10 @@ array_literal = param( ibis.array([1]), marks=[ pytest.mark.never( - ["mysql", "mssql", "oracle"], - raises=sa.exc.CompileError, + ["mysql", "mssql", "oracle", "impala", "sqlite"], + raises=exc.OperationNotDefinedError, reason="arrays not supported in the backend", ), - pytest.mark.notyet( - ["impala", "sqlite"], - raises=NotImplementedError, - reason="backends hasn't implemented array literals", - ), ], id="array_literal", ) diff --git a/test_format.py b/test_format.py index 0f21513..2d89606 100644 --- a/test_format.py +++ b/test_format.py @@ -382,7 +382,7 @@ def test_format_literal(literal, typ, output): def test_format_dummy_table(snapshot): - t = ops.DummyTable([ibis.array([1], type="array<int8>").name("foo")]).to_expr() + t = ops.DummyTable([ibis.array([1]).cast("array<int8>").name("foo")]).to_expr() result = fmt(t) assert "DummyTable" in result
feat(flink): array sort
ca85ae232c0d0bbcb4d3af1ae3134067b8171a64
feat
https://github.com/ibis-project/ibis/commit/ca85ae232c0d0bbcb4d3af1ae3134067b8171a64
array sort
diff --git a/flink.py b/flink.py index ff9b509..c7f8abd 100644 --- a/flink.py +++ b/flink.py @@ -72,7 +72,6 @@ class FlinkCompiler(SQLGlotCompiler): ops.ArgMax, ops.ArgMin, ops.ArrayFlatten, - ops.ArraySort, ops.ArrayStringJoin, ops.Correlation, ops.CountDistinctStar, @@ -102,6 +101,7 @@ class FlinkCompiler(SQLGlotCompiler): ops.ArrayLength: "cardinality", ops.ArrayPosition: "array_position", ops.ArrayRemove: "array_remove", + ops.ArraySort: "array_sort", ops.ArrayUnion: "array_union", ops.ExtractDayOfYear: "dayofyear", ops.MapKeys: "map_keys", @@ -576,10 +576,20 @@ class FlinkCompiler(SQLGlotCompiler): return self.cast(sge.Struct(expressions=list(values)), op.dtype) def visit_ArrayCollect(self, op, *, arg, where, order_by, include_null): + if order_by: + raise com.UnsupportedOperationError( + "ordering of order-sensitive aggregations via `order_by` is " + "not supported for this backend" + ) + # the only way to get filtering *and* respecting nulls is to use + # `FILTER` syntax, but it's broken in various ways for other aggregates + out = self.f.array_agg(arg) if not include_null: cond = arg.is_(sg.not_(NULL, copy=False)) where = cond if where is None else sge.And(this=cond, expression=where) - return self.agg.array_agg(arg, where=where, order_by=order_by) + if where is not None: + out = sge.Filter(this=out, expression=sge.Where(this=where)) + return out compiler = FlinkCompiler() diff --git a/dialects.py b/dialects.py index d38839d..e159941 100644 --- a/dialects.py +++ b/dialects.py @@ -212,6 +212,7 @@ class Flink(Hive): sge.ArrayConcat: rename_func("array_concat"), sge.ArraySize: rename_func("cardinality"), sge.ArrayAgg: rename_func("array_agg"), + sge.ArraySort: rename_func("array_sort"), sge.Length: rename_func("char_length"), sge.TryCast: lambda self, e: f"TRY_CAST({e.this.sql(self.dialect)} AS {e.to.sql(self.dialect)})", diff --git a/test_aggregation.py b/test_aggregation.py index 77af707..b76bdef 100644 --- a/test_aggregation.py +++ b/test_aggregation.py @@ -1480,13 +1480,13 @@ def test_collect_ordered(alltypes, df, filtered): def test_collect(alltypes, df, filtered, include_null): ibis_cond = (_.id % 13 == 0) if filtered else None pd_cond = (df.id % 13 == 0) if filtered else slice(None) - res = ( + expr = ( alltypes.string_col.nullif("3") .collect(where=ibis_cond, include_null=include_null) .length() - .execute() ) - vals = df.string_col if include_null else df.string_col[(df.string_col != "3")] + res = expr.execute() + vals = df.string_col if include_null else df.string_col[df.string_col != "3"] sol = len(vals[pd_cond]) assert res == sol diff --git a/test_array.py b/test_array.py index a6e35a9..fc9bf64 100644 --- a/test_array.py +++ b/test_array.py @@ -316,13 +316,13 @@ def test_unnest_idempotent(backend): ["scalar_column", array_types.x.cast("!array<int64>").unnest().name("x")] ) .group_by("scalar_column") - .aggregate(x=lambda t: t.x.collect()) + .aggregate(x=lambda t: t.x.collect().sort()) .order_by("scalar_column") ) result = expr.execute().reset_index(drop=True) expected = ( df[["scalar_column", "x"]] - .assign(x=df.x.map(lambda arr: [i for i in arr if not pd.isna(i)])) + .assign(x=df.x.map(lambda arr: sorted(i for i in arr if not pd.isna(i)))) .sort_values("scalar_column") .reset_index(drop=True) ) @@ -718,20 +718,34 @@ def test_array_unique(con, input, expected): @builtin_array [email protected]( - ["flink", "polars"], - raises=com.OperationNotDefinedError, -) [email protected](["polars"], raises=com.OperationNotDefinedError) @pytest.mark.notyet( ["risingwave"], raises=AssertionError, reason="Refer to https://github.com/risingwavelabs/risingwave/issues/14735", ) -def test_array_sort(con): - t = ibis.memtable({"a": [[3, 2], [], [42, 42], []], "id": range(4)}) [email protected]( + "data", + ( + param( + [[3, 2], [], [42, 42], []], + marks=[ + pytest.mark.notyet( + ["flink"], + raises=Py4JJavaError, + reason="flink cannot handle empty arrays", + ) + ], + ), + [[3, 2], [42, 42]], + ), + ids=["empty", "nonempty"], +) +def test_array_sort(con, data): + t = ibis.memtable({"a": data, "id": range(len(data))}) expr = t.mutate(a=t.a.sort()).order_by("id") result = con.execute(expr) - expected = pd.Series([[2, 3], [], [42, 42], []], dtype="object") + expected = pd.Series(list(map(sorted, data)), dtype="object") assert frozenset(map(tuple, result["a"].values)) == frozenset( map(tuple, expected.values) diff --git a/test_struct.py b/test_struct.py index dee7d91..04db05c 100644 --- a/test_struct.py +++ b/test_struct.py @@ -116,9 +116,6 @@ def test_struct_column(alltypes, df): @pytest.mark.notimpl(["postgres", "risingwave", "polars"]) @pytest.mark.notyet(["datafusion"], raises=Exception, reason="unsupported syntax") [email protected]( - ["flink"], reason="flink doesn't support creating struct columns from collect" -) def test_collect_into_struct(alltypes): from ibis import _
feat(core): use custom errors for failHandler and metadata Closes #611
6db22af044aa1181050675ff04184ccb37d38bb5
feat
https://github.com/mikro-orm/mikro-orm/commit/6db22af044aa1181050675ff04184ccb37d38bb5
use custom errors for failHandler and metadata Closes #611
diff --git a/ManyToMany.ts b/ManyToMany.ts index de0e394..a46a29a 100644 --- a/ManyToMany.ts +++ b/ManyToMany.ts @@ -1,7 +1,7 @@ import { ReferenceOptions } from './Property'; -import { MetadataStorage } from '../metadata'; +import { MetadataStorage, MetadataValidator } from '../metadata'; import { Utils } from '../utils'; -import { EntityValidator, ReferenceType } from '../entity'; +import { ReferenceType } from '../entity'; import { EntityName, EntityProperty, AnyEntity } from '../typings'; import { QueryOrder } from '../enums'; @@ -13,7 +13,7 @@ export function ManyToMany<T, O>( return function (target: AnyEntity, propertyName: string) { options = Utils.isObject<ManyToManyOptions<T, O>>(entity) ? entity : { ...options, entity, mappedBy }; const meta = MetadataStorage.getMetadataFromDecorator(target.constructor); - EntityValidator.validateSingleDecorator(meta, propertyName); + MetadataValidator.validateSingleDecorator(meta, propertyName); const property = { name: propertyName, reference: ReferenceType.MANY_TO_MANY } as EntityProperty<T>; meta.properties[propertyName] = Object.assign(property, options); }; diff --git a/ManyToOne.ts b/ManyToOne.ts index b08577c..73c0a02 100644 --- a/ManyToOne.ts +++ b/ManyToOne.ts @@ -1,7 +1,7 @@ import { ReferenceOptions } from './Property'; -import { MetadataStorage } from '../metadata'; +import { MetadataStorage, MetadataValidator } from '../metadata'; import { Utils } from '../utils'; -import { EntityValidator, ReferenceType } from '../entity'; +import { ReferenceType } from '../entity'; import { AnyEntity, EntityName, EntityProperty } from '../typings'; export function ManyToOne<T, O>( @@ -11,7 +11,7 @@ export function ManyToOne<T, O>( return function (target: AnyEntity, propertyName: string) { options = Utils.isObject<ManyToOneOptions<T, O>>(entity) ? entity : { ...options, entity }; const meta = MetadataStorage.getMetadataFromDecorator(target.constructor); - EntityValidator.validateSingleDecorator(meta, propertyName); + MetadataValidator.validateSingleDecorator(meta, propertyName); const property = { name: propertyName, reference: ReferenceType.MANY_TO_ONE } as EntityProperty; meta.properties[propertyName] = Object.assign(property, options); }; diff --git a/OneToMany.ts b/OneToMany.ts index d433c35..e4656b9 100644 --- a/OneToMany.ts +++ b/OneToMany.ts @@ -1,7 +1,7 @@ import { ReferenceOptions } from './Property'; -import { MetadataStorage } from '../metadata'; +import { MetadataStorage, MetadataValidator } from '../metadata'; import { Utils } from '../utils'; -import { EntityValidator, ReferenceType } from '../entity'; +import { ReferenceType } from '../entity'; import { QueryOrder } from '../enums'; import { EntityName, EntityProperty, AnyEntity } from '../typings'; @@ -14,7 +14,7 @@ export function createOneToDecorator<T, O>( return function (target: AnyEntity, propertyName: string) { options = Utils.isObject<OneToManyOptions<T, O>>(entity) ? entity : { ...options, entity, mappedBy }; const meta = MetadataStorage.getMetadataFromDecorator(target.constructor); - EntityValidator.validateSingleDecorator(meta, propertyName); + MetadataValidator.validateSingleDecorator(meta, propertyName); const prop = { name: propertyName, reference } as EntityProperty<T>; Object.assign(prop, options); diff --git a/Property.ts b/Property.ts index c48522a..98e4d42 100644 --- a/Property.ts +++ b/Property.ts @@ -1,6 +1,6 @@ -import { MetadataStorage } from '../metadata'; +import { MetadataStorage, MetadataValidator } from '../metadata'; import { Utils } from '../utils'; -import { Cascade, EntityValidator, ReferenceType, LoadStrategy } from '../entity'; +import { Cascade, ReferenceType, LoadStrategy } from '../entity'; import { EntityName, EntityProperty, AnyEntity, Constructor } from '../typings'; import { Type } from '../types'; @@ -8,7 +8,7 @@ export function Property<T>(options: PropertyOptions<T> = {}) { return function (target: AnyEntity, propertyName: string) { const meta = MetadataStorage.getMetadataFromDecorator(target.constructor); const desc = Object.getOwnPropertyDescriptor(target, propertyName) || {}; - EntityValidator.validateSingleDecorator(meta, propertyName); + MetadataValidator.validateSingleDecorator(meta, propertyName); const name = options.name || propertyName; if (propertyName !== name && !(desc.value instanceof Function)) { diff --git a/EntityValidator.ts b/EntityValidator.ts index 686ec3b..c5dcf55 100644 --- a/EntityValidator.ts +++ b/EntityValidator.ts @@ -8,12 +8,6 @@ export class EntityValidator { constructor(private strict: boolean) { } - static validateSingleDecorator(meta: EntityMetadata, propertyName: string): void { - if (meta.properties[propertyName]?.reference) { - throw ValidationError.multipleDecorators(meta.className, propertyName); - } - } - validate<T extends AnyEntity<T>>(entity: T, payload: any, meta: EntityMetadata): void { Object.values(meta.properties).forEach(prop => { if ([ReferenceType.ONE_TO_MANY, ReferenceType.MANY_TO_MANY].includes(prop.reference)) { diff --git a/MetadataDiscovery.ts b/MetadataDiscovery.ts index 2e9fd26..aebf0e3 100644 --- a/MetadataDiscovery.ts +++ b/MetadataDiscovery.ts @@ -3,7 +3,7 @@ import globby from 'globby'; import chalk from 'chalk'; import { AnyEntity, Constructor, Dictionary, EntityClass, EntityClassGroup, EntityMetadata, EntityProperty } from '../typings'; -import { Configuration, Utils, ValidationError } from '../utils'; +import { Configuration, Utils, MetadataError } from '../utils'; import { MetadataValidator } from './MetadataValidator'; import { MetadataStorage } from './MetadataStorage'; import { Cascade, ReferenceType } from '../entity'; @@ -723,7 +723,7 @@ export class MetadataDiscovery { } if (!target) { - throw ValidationError.entityNotFound(name, path.replace(this.config.get('baseDir'), '.')); + throw MetadataError.entityNotFound(name, path.replace(this.config.get('baseDir'), '.')); } return target; diff --git a/MetadataStorage.ts b/MetadataStorage.ts index 3a78db5..15b0397 100644 --- a/MetadataStorage.ts +++ b/MetadataStorage.ts @@ -1,5 +1,5 @@ import { EntityMetadata, AnyEntity, Dictionary } from '../typings'; -import { Utils, ValidationError } from '../utils'; +import { MetadataError, Utils } from '../utils'; import { EntityManager } from '../EntityManager'; import { EntityHelper } from '../entity'; import { EventSubscriber } from '../events'; @@ -52,7 +52,7 @@ export class MetadataStorage { get<T extends AnyEntity<T> = any>(entity: string, init = false, validate = true): EntityMetadata<T> { if (entity && !this.metadata[entity] && validate && !init) { - throw ValidationError.missingMetadata(entity); + throw MetadataError.missingMetadata(entity); } if (!this.metadata[entity] && init) { diff --git a/MetadataValidator.ts b/MetadataValidator.ts index 0a9f13f..034fec5 100644 --- a/MetadataValidator.ts +++ b/MetadataValidator.ts @@ -1,16 +1,22 @@ import { EntityMetadata, EntityProperty } from '../typings'; -import { Utils, ValidationError } from '../utils'; +import { Utils, MetadataError } from '../utils'; import { ReferenceType } from '../entity'; import { MetadataStorage } from './MetadataStorage'; export class MetadataValidator { + static validateSingleDecorator(meta: EntityMetadata, propertyName: string): void { + if (meta.properties[propertyName]?.reference) { + throw MetadataError.multipleDecorators(meta.className, propertyName); + } + } + validateEntityDefinition(metadata: MetadataStorage, name: string): void { const meta = metadata.get(name); // entities have PK if (!meta.embeddable && (!meta.primaryKeys || meta.primaryKeys.length === 0)) { - throw ValidationError.fromMissingPrimaryKey(meta); + throw MetadataError.fromMissingPrimaryKey(meta); } this.validateVersionField(meta); @@ -24,29 +30,29 @@ export class MetadataValidator { validateDiscovered(discovered: EntityMetadata[], warnWhenNoEntities: boolean): void { if (discovered.length === 0 && warnWhenNoEntities) { - throw ValidationError.noEntityDiscovered(); + throw MetadataError.noEntityDiscovered(); } const duplicates = Utils.findDuplicates(discovered.map(meta => meta.className)); if (duplicates.length > 0) { - throw ValidationError.duplicateEntityDiscovered(duplicates); + throw MetadataError.duplicateEntityDiscovered(duplicates); } // validate base entities discovered .filter(meta => meta.extends && !discovered.find(m => m.className === meta.extends)) - .forEach(meta => { throw ValidationError.fromUnknownBaseEntity(meta); }); + .forEach(meta => { throw MetadataError.fromUnknownBaseEntity(meta); }); // validate we found at least one entity (not just abstract/base entities) if (discovered.filter(meta => meta.name).length === 0 && warnWhenNoEntities) { - throw ValidationError.onlyAbstractEntitiesDiscovered(); + throw MetadataError.onlyAbstractEntitiesDiscovered(); } // check for not discovered entities discovered.forEach(meta => Object.values(meta.properties).forEach(prop => { if (prop.reference !== ReferenceType.SCALAR && !discovered.find(m => m.className === prop.type)) { - throw ValidationError.fromUnknownEntity(prop.type, `${meta.className}.${prop.name}`); + throw MetadataError.fromUnknownEntity(prop.type, `${meta.className}.${prop.name}`); } })); } @@ -54,12 +60,12 @@ export class MetadataValidator { private validateReference(meta: EntityMetadata, prop: EntityProperty, metadata: MetadataStorage): void { // references do have types if (!prop.type) { - throw ValidationError.fromWrongTypeDefinition(meta, prop); + throw MetadataError.fromWrongTypeDefinition(meta, prop); } // references do have type of known entity if (!metadata.get(prop.type, false, false)) { - throw ValidationError.fromWrongTypeDefinition(meta, prop); + throw MetadataError.fromWrongTypeDefinition(meta, prop); } } @@ -76,34 +82,34 @@ export class MetadataValidator { private validateOwningSide(meta: EntityMetadata, prop: EntityProperty, inverse: EntityProperty): void { // has correct `inversedBy` on owning side if (!inverse) { - throw ValidationError.fromWrongReference(meta, prop, 'inversedBy'); + throw MetadataError.fromWrongReference(meta, prop, 'inversedBy'); } // has correct `inversedBy` reference type if (inverse.type !== meta.name) { - throw ValidationError.fromWrongReference(meta, prop, 'inversedBy', inverse); + throw MetadataError.fromWrongReference(meta, prop, 'inversedBy', inverse); } // inversed side is not defined as owner if (inverse.inversedBy) { - throw ValidationError.fromWrongOwnership(meta, prop, 'inversedBy'); + throw MetadataError.fromWrongOwnership(meta, prop, 'inversedBy'); } } private validateInverseSide(meta: EntityMetadata, prop: EntityProperty, owner: EntityProperty): void { // has correct `mappedBy` on inverse side if (prop.mappedBy && !owner) { - throw ValidationError.fromWrongReference(meta, prop, 'mappedBy'); + throw MetadataError.fromWrongReference(meta, prop, 'mappedBy'); } // has correct `mappedBy` reference type if (owner.type !== meta.name) { - throw ValidationError.fromWrongReference(meta, prop, 'mappedBy', owner); + throw MetadataError.fromWrongReference(meta, prop, 'mappedBy', owner); } // owning side is not defined as inverse if (owner.mappedBy) { - throw ValidationError.fromWrongOwnership(meta, prop, 'mappedBy'); + throw MetadataError.fromWrongOwnership(meta, prop, 'mappedBy'); } } @@ -115,14 +121,14 @@ export class MetadataValidator { const props = Object.values(meta.properties).filter(p => p.version); if (props.length > 1) { - throw ValidationError.multipleVersionFields(meta, props.map(p => p.name)); + throw MetadataError.multipleVersionFields(meta, props.map(p => p.name)); } const prop = meta.properties[meta.versionProperty]; const type = prop.type.toLowerCase(); if (type !== 'number' && type !== 'date' && !type.startsWith('timestamp') && !type.startsWith('datetime')) { - throw ValidationError.invalidVersionFieldType(meta); + throw MetadataError.invalidVersionFieldType(meta); } } diff --git a/Configuration.ts b/Configuration.ts index a23dc35..b06fc9f 100644 --- a/Configuration.ts +++ b/Configuration.ts @@ -6,7 +6,7 @@ import { CacheAdapter, FileCacheAdapter, NullCacheAdapter } from '../cache'; import { EntityFactory, EntityRepository } from '../entity'; import { AnyEntity, Constructor, Dictionary, EntityClass, EntityClassGroup, IPrimaryKey } from '../typings'; import { Hydrator, ObjectHydrator } from '../hydration'; -import { Logger, LoggerNamespace, Utils, ValidationError } from '../utils'; +import { Logger, LoggerNamespace, NotFoundError, Utils } from '../utils'; import { EntityManager } from '../EntityManager'; import { EntityOptions, EntitySchema, IDatabaseDriver, MetadataStorage } from '..'; import { Platform } from '../platforms'; @@ -31,7 +31,7 @@ export class Configuration<D extends IDatabaseDriver = IDatabaseDriver> { strict: false, // eslint-disable-next-line no-console logger: console.log.bind(console), - findOneOrFailHandler: (entityName: string, where: Dictionary | IPrimaryKey) => ValidationError.findOneFailed(entityName, where), + findOneOrFailHandler: (entityName: string, where: Dictionary | IPrimaryKey) => NotFoundError.findOneFailed(entityName, where), baseDir: process.cwd(), hydrator: ObjectHydrator, autoJoinOneToOneOwner: true, diff --git a/errors.ts b/errors.ts index 2876e53..58e6694 100644 --- a/errors.ts +++ b/errors.ts @@ -34,45 +34,10 @@ export class ValidationError<T extends AnyEntity = AnyEntity> extends Error { return new ValidationError(msg); } - static fromMissingPrimaryKey(meta: EntityMetadata): ValidationError { - return new ValidationError(`${meta.className} entity is missing @PrimaryKey()`); - } - - static fromWrongReference(meta: EntityMetadata, prop: EntityProperty, key: keyof EntityProperty, owner?: EntityProperty): ValidationError { - if (owner) { - return ValidationError.fromMessage(meta, prop, `has wrong '${key}' reference type: ${owner.type} instead of ${meta.className}`); - } - - return ValidationError.fromMessage(meta, prop, `has unknown '${key}' reference: ${prop.type}.${prop[key]}`); - } - - static fromWrongTypeDefinition(meta: EntityMetadata, prop: EntityProperty): ValidationError { - if (!prop.type) { - return ValidationError.fromMessage(meta, prop, `is missing type definition`); - } - - return ValidationError.fromMessage(meta, prop, `has unknown type: ${prop.type}`); - } - - static fromWrongOwnership(meta: EntityMetadata, prop: EntityProperty, key: keyof EntityProperty): ValidationError { - const type = key === 'inversedBy' ? 'owning' : 'inverse'; - const other = key === 'inversedBy' ? 'mappedBy' : 'inversedBy'; - - return new ValidationError(`Both ${meta.className}.${prop.name} and ${prop.type}.${prop[key]} are defined as ${type} sides, use '${other}' on one of them`); - } - static fromMergeWithoutPK(meta: EntityMetadata): void { throw new ValidationError(`You cannot merge entity '${meta.className}' without identifier!`); } - static fromUnknownEntity(className: string, source: string): ValidationError { - return new ValidationError(`Entity '${className}' was not discovered, please make sure to provide it in 'entities' array when initializing the ORM (used in ${source})`); - } - - static fromUnknownBaseEntity(meta: EntityMetadata): ValidationError { - return new ValidationError(`Entity '${meta.className}' extends unknown base entity '${meta.extends}', please make sure to provide it in 'entities' array when initializing the ORM`); - } - static transactionRequired(): ValidationError { return new ValidationError('An open transaction is required for this operation'); } @@ -89,15 +54,6 @@ export class ValidationError<T extends AnyEntity = AnyEntity> extends Error { return new ValidationError(`Cannot obtain optimistic lock on unversioned entity ${meta.className}`); } - static multipleVersionFields(meta: EntityMetadata, fields: string[]): ValidationError { - return new ValidationError(`Entity ${meta.className} has multiple version properties defined: '${fields.join('\\', \\'')}'. Only one version property is allowed per entity.`); - } - - static invalidVersionFieldType(meta: EntityMetadata): ValidationError { - const prop = meta.properties[meta.versionProperty]; - return new ValidationError(`Version property ${meta.className}.${prop.name} has unsupported type '${prop.type}'. Only 'number' and 'Date' are allowed.`); - } - static lockFailed(entityOrName: AnyEntity | string): ValidationError { const name = Utils.isString(entityOrName) ? entityOrName : entityOrName.constructor.name; const entity = Utils.isString(entityOrName) ? undefined : entityOrName; @@ -112,38 +68,10 @@ export class ValidationError<T extends AnyEntity = AnyEntity> extends Error { return new ValidationError(`The optimistic lock failed, version ${expectedLockVersion} was expected, but is actually ${actualLockVersion}`, entity); } - static noEntityDiscovered(): ValidationError { - return new ValidationError('No entities were discovered'); - } - - static onlyAbstractEntitiesDiscovered(): ValidationError { - return new ValidationError('Only abstract entities were discovered, maybe you forgot to use @Entity() decorator?'); - } - - static duplicateEntityDiscovered(paths: string[]): ValidationError { - return new ValidationError(`Duplicate entity names are not allowed: ${paths.join(', ')}`); - } - - static entityNotFound(name: string, path: string): ValidationError { - return new ValidationError(`Entity '${name}' not found in ${path}`); - } - - static findOneFailed(name: string, where: Dictionary | IPrimaryKey): ValidationError { - return new ValidationError(`${name} not found (${inspect(where)})`); - } - - static missingMetadata(entity: string): ValidationError { - return new ValidationError(`Metadata for entity ${entity} not found`); - } - static invalidPropertyName(entityName: string, invalid: string): ValidationError { return new ValidationError(`Entity '${entityName}' does not have property '${invalid}'`); } - static multipleDecorators(entityName: string, propertyName: string): ValidationError { - return new ValidationError(`Multiple property decorators used on '${entityName}.${propertyName}' property`); - } - static invalidType(type: Constructor<Type<any>>, value: any, mode: string): ValidationError { const valueType = Utils.getObjectType(value); @@ -175,8 +103,88 @@ export class ValidationError<T extends AnyEntity = AnyEntity> extends Error { return new ValidationError(`Using operators inside embeddables is not allowed, move the operator above. (property: ${className}.${propName}, payload: ${inspect(payload)})`); } - private static fromMessage(meta: EntityMetadata, prop: EntityProperty, message: string): ValidationError { - return new ValidationError(`${meta.className}.${prop.name} ${message}`); +} + +export class MetadataError<T extends AnyEntity = AnyEntity> extends ValidationError { + + static fromMissingPrimaryKey(meta: EntityMetadata): MetadataError { + return new MetadataError(`${meta.className} entity is missing @PrimaryKey()`); + } + + static fromWrongReference(meta: EntityMetadata, prop: EntityProperty, key: keyof EntityProperty, owner?: EntityProperty): MetadataError { + if (owner) { + return MetadataError.fromMessage(meta, prop, `has wrong '${key}' reference type: ${owner.type} instead of ${meta.className}`); + } + + return MetadataError.fromMessage(meta, prop, `has unknown '${key}' reference: ${prop.type}.${prop[key]}`); + } + + static fromWrongTypeDefinition(meta: EntityMetadata, prop: EntityProperty): MetadataError { + if (!prop.type) { + return MetadataError.fromMessage(meta, prop, `is missing type definition`); + } + + return MetadataError.fromMessage(meta, prop, `has unknown type: ${prop.type}`); + } + + static fromWrongOwnership(meta: EntityMetadata, prop: EntityProperty, key: keyof EntityProperty): MetadataError { + const type = key === 'inversedBy' ? 'owning' : 'inverse'; + const other = key === 'inversedBy' ? 'mappedBy' : 'inversedBy'; + + return new MetadataError(`Both ${meta.className}.${prop.name} and ${prop.type}.${prop[key]} are defined as ${type} sides, use '${other}' on one of them`); + } + + static entityNotFound(name: string, path: string): MetadataError { + return new MetadataError(`Entity '${name}' not found in ${path}`); + } + + static multipleVersionFields(meta: EntityMetadata, fields: string[]): MetadataError { + return new MetadataError(`Entity ${meta.className} has multiple version properties defined: '${fields.join('\\', \\'')}'. Only one version property is allowed per entity.`); + } + + static invalidVersionFieldType(meta: EntityMetadata): MetadataError { + const prop = meta.properties[meta.versionProperty]; + return new MetadataError(`Version property ${meta.className}.${prop.name} has unsupported type '${prop.type}'. Only 'number' and 'Date' are allowed.`); + } + + static fromUnknownEntity(className: string, source: string): MetadataError { + return new MetadataError(`Entity '${className}' was not discovered, please make sure to provide it in 'entities' array when initializing the ORM (used in ${source})`); + } + + static fromUnknownBaseEntity(meta: EntityMetadata): MetadataError { + return new MetadataError(`Entity '${meta.className}' extends unknown base entity '${meta.extends}', please make sure to provide it in 'entities' array when initializing the ORM`); + } + + static noEntityDiscovered(): MetadataError { + return new MetadataError('No entities were discovered'); + } + + static onlyAbstractEntitiesDiscovered(): MetadataError { + return new MetadataError('Only abstract entities were discovered, maybe you forgot to use @Entity() decorator?'); + } + + static duplicateEntityDiscovered(paths: string[]): MetadataError { + return new MetadataError(`Duplicate entity names are not allowed: ${paths.join(', ')}`); + } + + static multipleDecorators(entityName: string, propertyName: string): MetadataError { + return new MetadataError(`Multiple property decorators used on '${entityName}.${propertyName}' property`); + } + + static missingMetadata(entity: string): MetadataError { + return new MetadataError(`Metadata for entity ${entity} not found`); + } + + private static fromMessage(meta: EntityMetadata, prop: EntityProperty, message: string): MetadataError { + return new MetadataError(`${meta.className}.${prop.name} ${message}`); + } + +} + +export class NotFoundError<T extends AnyEntity = AnyEntity> extends ValidationError { + + static findOneFailed(name: string, where: Dictionary | IPrimaryKey): NotFoundError { + return new NotFoundError(`${name} not found (${inspect(where)})`); } } diff --git a/index.ts b/index.ts index 5153772..00cff3d 100644 --- a/index.ts +++ b/index.ts @@ -1,7 +1,7 @@ export * from './Configuration'; export * from './ConfigurationLoader'; -export * from './ValidationError'; export * from './Logger'; export * from './Utils'; export * from './RequestContext'; export * from './SmartQueryHelper'; +export * from './errors';
test: clean up data loading; remove service spec where possible
87e7dffb7c24c7d05ff7e244d38e83aecdcd1c00
test
https://github.com/rohankumardubey/ibis/commit/87e7dffb7c24c7d05ff7e244d38e83aecdcd1c00
clean up data loading; remove service spec where possible
diff --git a/druid.sql b/druid.sql index b680b56..69143c5 100644 --- a/druid.sql +++ b/druid.sql @@ -3,7 +3,7 @@ OVERWRITE ALL SELECT * FROM TABLE( EXTERN( - '{"type":"local","files":["/opt/shared/diamonds.parquet"]}', + '{"type":"local","files":["/data/diamonds.parquet"]}', '{"type":"parquet"}', '[{"name":"carat","type":"double"},{"name":"cut","type":"string"},{"name":"color","type":"string"},{"name":"clarity","type":"string"},{"name":"depth","type":"double"},{"name":"table","type":"double"},{"name":"price","type":"long"},{"name":"x","type":"double"},{"name":"y","type":"double"},{"name":"z","type":"double"}]' ) @@ -15,7 +15,7 @@ OVERWRITE ALL SELECT * FROM TABLE( EXTERN( - '{"type":"local","files":["/opt/shared/batting.parquet"]}', + '{"type":"local","files":["/data/batting.parquet"]}', '{"type":"parquet"}', '[{"name":"playerID","type":"string"},{"name":"yearID","type":"long"},{"name":"stint","type":"long"},{"name":"teamID","type":"string"},{"name":"lgID","type":"string"},{"name":"G","type":"long"},{"name":"AB","type":"long"},{"name":"R","type":"long"},{"name":"H","type":"long"},{"name":"X2B","type":"long"},{"name":"X3B","type":"long"},{"name":"HR","type":"long"},{"name":"RBI","type":"long"},{"name":"SB","type":"long"},{"name":"CS","type":"long"},{"name":"BB","type":"long"},{"name":"SO","type":"long"},{"name":"IBB","type":"long"},{"name":"HBP","type":"long"},{"name":"SH","type":"long"},{"name":"SF","type":"long"},{"name":"GIDP","type":"long"}]' ) @@ -27,7 +27,7 @@ OVERWRITE ALL SELECT * FROM TABLE( EXTERN( - '{"type":"local","files":["/opt/shared/awards_players.parquet"]}', + '{"type":"local","files":["/data/awards_players.parquet"]}', '{"type":"parquet"}', '[{"name":"playerID","type":"string"},{"name":"awardID","type":"string"},{"name":"yearID","type":"long"},{"name":"lgID","type":"string"},{"name":"tie","type":"string"},{"name":"notes","type":"string"}]' ) @@ -39,7 +39,7 @@ OVERWRITE ALL SELECT * FROM TABLE( EXTERN( - '{"type":"local","files":["/opt/shared/functional_alltypes.parquet"]}', + '{"type":"local","files":["/data/functional_alltypes.parquet"]}', '{"type":"parquet"}', '[{"name":"id","type":"long"},{"name":"bool_col","type":"long"},{"name":"tinyint_col","type":"long"},{"name":"smallint_col","type":"long"},{"name":"int_col","type":"long"},{"name":"bigint_col","type":"long"},{"name":"float_col","type":"double"},{"name":"double_col","type":"double"},{"name":"date_string_col","type":"string"},{"name":"string_col","type":"string"},{"name":"timestamp_col","type":"string"},{"name":"year","type":"long"},{"name":"month","type":"long"}]' ) diff --git a/postgresql.sql b/postgresql.sql index 225d850..23debfd 100644 --- a/postgresql.sql +++ b/postgresql.sql @@ -18,6 +18,8 @@ CREATE TABLE diamonds ( z FLOAT ); +COPY diamonds FROM '/data/diamonds.csv' WITH (FORMAT CSV, HEADER TRUE, DELIMITER ','); + DROP TABLE IF EXISTS batting CASCADE; CREATE TABLE batting ( @@ -45,6 +47,8 @@ CREATE TABLE batting ( "GIDP" BIGINT ); +COPY batting FROM '/data/batting.csv' WITH (FORMAT CSV, HEADER TRUE, DELIMITER ','); + DROP TABLE IF EXISTS awards_players CASCADE; CREATE TABLE awards_players ( @@ -60,6 +64,8 @@ CREATE TABLE awards_players ( simvec VECTOR GENERATED always AS ('[1,2,3]'::VECTOR) STORED ); +COPY awards_players FROM '/data/awards_players.csv' WITH (FORMAT CSV, HEADER TRUE, DELIMITER ','); + DROP TABLE IF EXISTS functional_alltypes CASCADE; CREATE TABLE functional_alltypes ( @@ -78,6 +84,8 @@ CREATE TABLE functional_alltypes ( month INTEGER ); +COPY functional_alltypes FROM '/data/functional_alltypes.csv' WITH (FORMAT CSV, HEADER TRUE, DELIMITER ','); + DROP TABLE IF EXISTS tzone CASCADE; CREATE TABLE tzone ( @@ -170,7 +178,7 @@ CREATE TABLE IF NOT EXISTS not_supported_intervals ( DROP TABLE IF EXISTS geo CASCADE; -CREATE TABLE IF NOT EXISTS geo ( +CREATE TABLE geo ( id BIGSERIAL PRIMARY KEY, geo_point GEOMETRY(POINT), geo_linestring GEOMETRY(LINESTRING), @@ -178,6 +186,8 @@ CREATE TABLE IF NOT EXISTS geo ( geo_multipolygon GEOMETRY(MULTIPOLYGON) ); +COPY geo FROM '/data/geo.csv' WITH (FORMAT CSV, HEADER TRUE, DELIMITER ','); + CREATE INDEX IF NOT EXISTS idx_geo_geo_linestring ON geo USING GIST (geo_linestring); CREATE INDEX IF NOT EXISTS idx_geo_geo_multipolygon ON geo USING GIST (geo_multipolygon); CREATE INDEX IF NOT EXISTS idx_geo_geo_point ON geo USING GIST (geo_point); diff --git a/docker-compose.yml b/docker-compose.yml index 7bfc67f..efcbed9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -135,6 +135,8 @@ services: - 5432:5432 networks: - postgres + volumes: + - $PWD/ci/ibis-testing-data/csv:/data:ro mssql: image: mcr.microsoft.com/mssql/server:2022-latest environment: @@ -150,7 +152,7 @@ services: ports: - 1433:1433 volumes: - - mssql:/data + - $PWD/ci/ibis-testing-data/csv:/data:ro networks: - mssql trino-postgres: @@ -172,6 +174,8 @@ services: - 5433:5432 networks: - trino + volumes: + - $PWD/ci/ibis-testing-data/csv:/data:ro trino: depends_on: - trino-postgres @@ -195,8 +199,6 @@ services: druid-postgres: image: postgres:15.2-alpine container_name: druid-postgres - volumes: - - metadata_data:/var/lib/postgresql/data environment: - POSTGRES_PASSWORD=FoolishPassword - POSTGRES_USER=druid @@ -310,6 +312,7 @@ services: volumes: - druid:/opt/shared - middle_var:/opt/druid/var + - $PWD/ci/ibis-testing-data/parquet:/data:ro depends_on: - druid-zookeeper - druid-postgres @@ -367,7 +370,6 @@ networks: druid: volumes: - metadata_data: middle_var: historical_var: broker_var: @@ -375,4 +377,3 @@ volumes: router_var: clickhouse: druid: - mssql: diff --git a/conftest.py b/conftest.py index 46a8872..9ca0426 100644 --- a/conftest.py +++ b/conftest.py @@ -21,7 +21,7 @@ import pytest import sqlalchemy as sa import ibis -from ibis.backends.conftest import TEST_TABLES, init_database +from ibis.backends.conftest import init_database from ibis.backends.tests.base import BackendTest, RoundHalfToEven PG_USER = os.environ.get( @@ -67,30 +67,16 @@ class TestConf(BackendTest, RoundHalfToEven): Location of scripts defining schemas """ with open(script_dir / 'schema' / 'postgresql.sql') as schema: - engine = init_database( + init_database( url=sa.engine.make_url( f"postgresql://{user}:{password}@{host}:{port:d}/{database}" ), database=database, schema=schema, - isolation_level='AUTOCOMMIT', + isolation_level="AUTOCOMMIT", recreate=False, ) - tables = list(TEST_TABLES) + ['geo'] - with engine.begin() as con, con.connection.cursor() as cur: - for table in tables: - # Here we insert rows using COPY table FROM STDIN, using - # psycopg2's `copy_expert` API. - # - # We could use DataFrame.to_sql(method=callable), but that - # incurs an unnecessary round trip and requires more code: the - # `data_iter` argument would have to be turned back into a CSV - # before being passed to `copy_expert`. - sql = f"COPY {table} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')" - with data_dir.joinpath("csv", f'{table}.csv').open('r') as file: - cur.copy_expert(sql=sql, file=file) - @staticmethod def connect(data_directory: Path): return ibis.postgres.connect(
chore: relock
39902f0ff297c8238f977aa8430bed9bcb76b450
chore
https://github.com/rohankumardubey/ibis/commit/39902f0ff297c8238f977aa8430bed9bcb76b450
relock
diff --git a/poetry.lock b/poetry.lock index 26a049c..bf628e1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -415,29 +415,29 @@ files = [ [[package]] name = "black" -version = "23.10.1" +version = "23.11.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, + {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, + {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, + {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, + {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, + {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, + {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, + {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, + {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, + {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, + {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, + {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, + {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, + {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, + {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, + {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, + {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, + {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, ] [package.dependencies] @@ -713,91 +713,91 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] [[package]] name = "clickhouse-connect" -version = "0.6.18" +version = "0.6.19" description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" optional = true python-versions = "~=3.7" files = [ - {file = "clickhouse-connect-0.6.18.tar.gz", hash = "sha256:f69814a6fccb7977ab865481fdf8ef80a18aa9db761c3bae043a18712dc2baca"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fe97cd1bd278dfeb72eabcb6a359f4d803576d3247df17664fbfeee4d77ea13c"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4814de4ea7ef04d72974d4d489b6b74e08896786a650296f22f4255b2cf1d763"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61fc85630288c61e81b3ce9cccc54abc05b52ff4644d0b45dca217f7d673ca77"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7d90822a356d3aa02e813f285a79a895221ba96d31ad593bd610b5d14355f15"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd6c09932e1ac571de1e43ef4cf5d8fb912fd728913171e07333b8673ac0c606"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:79d6e5d0679707926585afb4d8e39eb69bc235db56f819a3a1b6b9ba1d2e8d28"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d00b2b228a0a88e203eeef6a1a4cfe7b061b5835ea2deef2e3ca7a3caaa023ab"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbe07e2dccfdd07ca937e80ee3039a5784d38a24af07c3b2abd87963c708b6c1"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-win32.whl", hash = "sha256:bdc1ed6ee3775542a27298990880c6ee2f6f676ba23d0cb9ab268a080817cf77"}, - {file = "clickhouse_connect-0.6.18-cp310-cp310-win_amd64.whl", hash = "sha256:82e60ea4e86864fe837cb34f64abd21a40afd6cd62afda44763a48890474f168"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86507f75c8b58d899dd3268c1a8497713214eb4731be4e95940e9ff13ac0724b"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:800751502d14d98153012cbb25c77f2b1619e5f9d480524eac1b249f5138ff54"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6834817fd17245ffd3864ec07c98d9f6fa424259188271a6529943fc16a7bfa7"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:450e73ab5058671b1aee064a09dbf91854d5779b13d030d03cfe1982e85f6324"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f69121da34920eadc316549aac3b2388fdd0d3d2a83b95c0f18e83651a59a446"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1e98476b511e184631ae35f7b8069b2b61a3bd94925442eb763fdf250a27249a"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c8fe486cac1b7d8d70e5e4940a6b6165cc2ae0106e236337c71b1e87a227d4dc"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1dd6a8d8d80d30e1fd39de914ef97b6163877cffb3a344f6a988b0404e530c0"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-win32.whl", hash = "sha256:0a401a2f1c361e40e98bc1414b9c0813d1fcfc18abf828923159da5f51541247"}, - {file = "clickhouse_connect-0.6.18-cp311-cp311-win_amd64.whl", hash = "sha256:6bf68d6bebd5646c9487f2bb919afb426524fe8c6f6b9f563ee1f32c14864ef5"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:701dc3547c12431de5518ce322c025c892d8f6838ebf9f297da4dc82d5f1c5b8"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d06abf46e36364baeb0d3465c891992cf4bd12b493d54e027ba4674adc6f45f6"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:268a23f4df19dba18762c40327183882ddda6e1783f8a736d43a72022472a8c7"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d79bdecb3af0c3f7b3ef7b050d27cee11add6114afdc57e77305b3152885ad7"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:185c69f503611539dea9fb653ceab50222eb19ea53b736fa08aee8d2133aebd0"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08276d3a006008e10ed0740799873b9ba3bf00819ed9025232c67238efea50f3"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1d0b41eed17c1bbccb43f5ee89a469fbf7ab7a2afcc2794163f44d6e39e7a465"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2dfcba94f3aeedf60344be4c53d6567015e2135ed620ffe7ae692ff84627a06f"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-win32.whl", hash = "sha256:b3e5bdff69ffe8e051535722c8965e1385846c8ce9925f73fa21c84d4c117b86"}, - {file = "clickhouse_connect-0.6.18-cp312-cp312-win_amd64.whl", hash = "sha256:d95b6007d7d0e5e5b0d696351ada0263e34e6c279394e05f59d4cff11febef79"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:90af09044989405d3e61f8e16c5d517d58aa9a9cc4d7fffacd23d941611853b4"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40c0367e89023b0397847d6f8b9bfaf72850e9326771187d4f8eab273352dfe2"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:505a204bb7389c20b02d6ad93ded732af4f56d957ac9f3fdd908d84116d0b6c9"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1341b6d3bda2a3d938d58016da3b03be46b1e0eb459743a6dd677269a56ead84"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8106efbef6a1c5d4ce6490594465f5438b98c244d760a3fdbfaf5b2bbdf5998c"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:338191951305f3cb5b3c89f78f24306387fa861365bbaa98b71d47363b6cb6ed"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89001dd41fd8ca029e800186a3bfe43a1b63653405f3ef529cdc1aa9a539fad1"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-win32.whl", hash = "sha256:853f2575c281f4c5e81ac86a60e5f4cecd893a48d055e5d20a1856f2a15f5e9b"}, - {file = "clickhouse_connect-0.6.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2dc82e4829e099c0bfa2fb956dcaf4745405994ce85dd4bb31271e9950039152"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4f49fcb14c0a0418601eefbcaf5426729935b8aca311d6067b38f51b70a0f418"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b17584ca56c6ce1442a1595fb2784ec33d1bf1a7964cbaea91ba13f91bd7569e"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5866585557a261aa6a93373a1f1e1c637cc92a97c364432172fa3550ddedf47"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f592ed089dfec045904a73bfc3ee63ed020207851e29bd8bf574d8ba333702f"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d83dddc093a8f8f2535c48154ed048e23377ed59c64a037d07f452cae7019750"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e4851fe61b5d8d9e3b3e643cbe3215feef34f286696828108e4809ddd6308fd"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:342dd61c788db85c78e47e11aae192cb28297ae462a8dd26c0164977b9daa362"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0bb28ad8800a20a413a9eee9d59bdf3ed812141e5f56f3be54ff2d3636aa1412"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-win32.whl", hash = "sha256:cbae29e17a587303d19d2652688e9bbac93625c10d49d8005a42381214fa1255"}, - {file = "clickhouse_connect-0.6.18-cp38-cp38-win_amd64.whl", hash = "sha256:39344f034372d2f9fe1030540bd5f1bdace68a37cf06ded13d184cfbb9022c9c"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a851bdbcd5db1603d54ba159545d20465442e7f7b691b73ad09c7ae051b4b891"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:66f8ef5d9043539f65a33ea56283329304b97536c526c17d991670234bc9300b"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b09e3c2471a907c27758db85c904e4ae441dd2d4734869d56c85365b4acead"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55034902f08db3e060c6189d4bafd3f4a859f159de4c6b8575e482d3cc09dcc2"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:098d5334994885352a5fc82aedb06c5ad03588c94861184999e890f1cafb29f4"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7d6c204c3ef3769ae3ab1d1f9b636beb9700b5de5293ddc1d47957cb889b68eb"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2239f65040cd6709c8d7c75c6c4a86a020575397c1aa5867fbd47dd54d0e457e"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:86aee265741d625e7bedf8132f5107c81f0f005705c463d4630409acfc264466"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-win32.whl", hash = "sha256:f76cc9b4d4d749bc20ef2046904b64cd910a56f2ed56e4b93f46b4578b9479c5"}, - {file = "clickhouse_connect-0.6.18-cp39-cp39-win_amd64.whl", hash = "sha256:8661aec683f8436c49033976dbd3eaf9c2465788b8d7364bab5f9d9ce6baf1c1"}, - {file = "clickhouse_connect-0.6.18-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e410bf751f143421f7f65f8ffe3ee22482635773d4f63d69c5aaa608db6ed937"}, - {file = "clickhouse_connect-0.6.18-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:587f98cd49fee9e56339265c4dce428c12239b8c8228b45f041fb8ef357193e0"}, - {file = "clickhouse_connect-0.6.18-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:354565f0e552d4a47907907e3cd8033ed91aa1ef0314fae054f8f937f9612a63"}, - {file = "clickhouse_connect-0.6.18-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090678f47c8d9e1ca30a63150145587a4143095f3144b2f3f55523ac3a185efb"}, - {file = "clickhouse_connect-0.6.18-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dff0f2c6fb3a3ec601a891f3496a409b257339de0049bfd34277778d0a8067da"}, - {file = "clickhouse_connect-0.6.18-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f77fc7bb2907cfef00b0ac9f24d3ad2787162ac7067cea13f526ca73353b1ea4"}, - {file = "clickhouse_connect-0.6.18-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed3d7a2c0e148421309db9d0e45fad2c1bf61f8a8cd3d24c6a6f6c14ea367f5e"}, - {file = "clickhouse_connect-0.6.18-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9afc73f9dc58c29fe09f20256d647fa0ceb2064f7d862aae157412568ce60f"}, - {file = "clickhouse_connect-0.6.18-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89191351b07037320b6a4e29b7607eb290e5f2b4472ea2f16ea25d72c7c8d331"}, - {file = "clickhouse_connect-0.6.18-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:87edd41b484285db58b76b547849bd28d28a81da4f08031da9eea185aef3abff"}, - {file = "clickhouse_connect-0.6.18-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d15ad5c0e85a7ba71397f9921d42c4bbd84a69fb8b63d5c166f9a8e0f9c2cb0b"}, - {file = "clickhouse_connect-0.6.18-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3baad1eb4d9594257e00f0de562a2c923b1573fdb879cf3dd085f77d8d2cf9a0"}, - {file = "clickhouse_connect-0.6.18-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a504681fd839384b02dc990d544832296f530d54164e140258821924ddb1a5df"}, - {file = "clickhouse_connect-0.6.18-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f4d4428b89aa12a7b4a2785a8280f27b8ed1b1ea7ec730ed9d6fdfa052cc822"}, - {file = "clickhouse_connect-0.6.18-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:90034c4bcd4faca6b55177d9b684bf884f48cee8600fe3699db57a66e6f9e9ae"}, - {file = "clickhouse_connect-0.6.18-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5d3c79ad91b457f4755bd7598946d3afc21b7b38bf3dc0abd54361094876ee4b"}, - {file = "clickhouse_connect-0.6.18-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11fdadf2f6bcb24983a1a3a314f3b928ebde412b71d03dba7e2c8db0aa2169e"}, - {file = "clickhouse_connect-0.6.18-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e8748b734fbf683cda162c9402bc8d3989d67530563cfb5c5eb84e66c56d247"}, - {file = "clickhouse_connect-0.6.18-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425ce4198a4dc78bf70f477af53c067874af57a90817c3c07dd65127e9e44f09"}, - {file = "clickhouse_connect-0.6.18-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1fabc2d3782bf73ab8ec36d1ba1ae96d1de04f1bd2bc6681e10e6b887d511088"}, + {file = "clickhouse-connect-0.6.19.tar.gz", hash = "sha256:863a2bfaae6daa9771830b8c734b628af60e1eccd426528407e70b6055914a18"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5e9f101e6fdb0ed6575f5d0da49d31d9f2e9173c20fbd0183a280a72b75c4d2"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:719cafa48049c04538b757dc68ca7d5c52c7598adf54c0185086d198999ce67d"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35e5217bcea65fde26666fc7fb8119c1fb70e88b1851b149bcf5a9fc526e0c73"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a162900bc2a95b5164a6e296e9ed8b672b37d523fcb0b67f88a4004af234d06c"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:064ad9d8544ec022af82223eef70ce09240ddadc2f0ae2abf277fc694ff88bf4"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:15daec4b71003303edbb5d67df85066e28fffae2b85f83866edb532681a286a1"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9825039c8b335941bb5745a6f2165281ed0c664d22681825a4deceae9caa92e1"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ef29a5bbca8cf376dc055848f331d49812f4fd9a41e9c4d73172817ed22cc5"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-win32.whl", hash = "sha256:4537642d635fd83c5f01acb2f209a88af4b34748ab6b32e8435600634f75f8d6"}, + {file = "clickhouse_connect-0.6.19-cp310-cp310-win_amd64.whl", hash = "sha256:6a666f58addc84ae888c04f4e2c17ca4a0da4425543ff29daa9f8ebf3a60f844"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dc7e41f0781ec5c18a725ddd1178a8bdad2d1f32e5305435d8b9a780da144a5f"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5dc9223e36c6c90287e170eb5c17ec6e8b05f8de19c37bf1cbab8e9f0295c56"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda6f9073b0b8326958de10cf1a13bebe1d49a0cecb13bea953d27ac1d4a6542"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f22d9a012683c7f19df43af0baa38745e10e95c60887e6b2f79c393cfc241fd"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3a7f4849369c76447449bc59afa8ffb127a0ae7ebde341c90c105c7dc5c490"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6a092734ab7a2bb620c5bd2f36a1b525b3710d5bbad3f83bcaa28c479b08440a"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8d643d7f72621d7888270276f196d40857527427f5a1d594a83cbeffb9d61921"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dfb3c4ede7b8eeac38007895348706fce48709fbd96e9f0cc43b9e9e0807e195"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-win32.whl", hash = "sha256:7047e77159249da658b66219aac1dcdfc56f87e7b83084ea8c43bf51f6744f77"}, + {file = "clickhouse_connect-0.6.19-cp311-cp311-win_amd64.whl", hash = "sha256:dcf79a0ee17c5be7ea359a78bb3929ff96df5f8306fc9d277a08465e7502a527"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:046c952866ebca9eef0718619bec2014736c4dc0531e7240780588ec4b7ab03b"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40c05f565930584018a331ade5b6b4d1c56908fe6d206644b442e118d4c8fd5f"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0219ca2ed41ba9f44cfdf0f661d40fa32634eb2a81e28ef1a72cf63579dceec"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524fbfbcc902f7cd1177e397e601990dfc280f617e0dbc11c90c439c8a2b86d7"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54eb3fede4d3416fbb20089689dbfa7b301d247b7e803a5a725c6409a6ca88e7"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2314959b1d84371fea3c904a3e8b176c7b9f243204ffa18eaf558cbb48b13471"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f3e932c686b039d9880fe6be1f4731d5207a176fa658f528e2adcebdc0499e40"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dac0973e65fca9e693577466843d87d80457b74f4e6c899bc9cbc62edd96058d"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-win32.whl", hash = "sha256:ba15a7fd17c4fc701264e06f2a48eb02eec0a486ee1e6223d25e89529f5c63e5"}, + {file = "clickhouse_connect-0.6.19-cp312-cp312-win_amd64.whl", hash = "sha256:b5ac9bc50a328bf3d27b55aed32a6f1bf37169a56cac002690ba41707fbcfc5a"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:03c2b514505f0e13a25c543abd5c5ef853f5afb22c14fe415fcc7d068e4bab04"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bafe5ad3871461746be74bc3f5f153b4c37e8bb4f115c94ac788c36f1bfa3e"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:186e9c808607708d956adf239a220bc358ce76e9ac6109d5286544e5e55d72c6"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30293c134b4a78cabca0db9d3177750a966ebc7fe1e6f0e1bde762d210f210bf"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8e58315b90decae32dcd25336d6a81120174b7f31fa6483d9683a250b61d9540"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ae5db8592082c90c654c023a462a3713d05031ee8f2ec46f7c19dae1454c34ec"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0862f51d4a546116ddbf275f5e012c955565fd21a23f7f3f069274e40ae27966"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-win32.whl", hash = "sha256:9df87d232f1669b72499249579ad121ba46e6625cc68812d4b2676214ea35bd0"}, + {file = "clickhouse_connect-0.6.19-cp37-cp37m-win_amd64.whl", hash = "sha256:75c9a7ba73c305e6267781b4100387987a5b8e6c0fba15bbf9d6091638fed8b8"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec913a80b1f659574b9b0406fc0781af46d1124356dd6b695a29ecca5edf9f9e"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8ca9036a0068fac1a5467e8f6994f99b707435fd94f89fdea8f8399b492575fd"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4079ca07e482f948705adc8983b23d7fdf50c24125942946a469fb197fdef3c7"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b09f8b1dda20f5da663c6082f617996413e33e7912906e33e28f60148f032e74"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749a0e491af25646c02cb37d62a1a4448b8943cd5a6fc5e04af1d580f8d34ccd"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5d69e63d76319eb3bb397953b31d7f13c49c8b4a770cb6b88d5376814ccaf2da"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fcc5b3a296a4e95fb704bcaf933b940a44ca000c4dc62082bd27626de51a55ed"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94680f91d7912d67f5f8c44bb12bc609fb45e2d784e1d2e295469e103a20b703"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-win32.whl", hash = "sha256:542d2d64f5b4306845afd5338bcbab5ec9cfbf92c834fc20d1658988d0c93860"}, + {file = "clickhouse_connect-0.6.19-cp38-cp38-win_amd64.whl", hash = "sha256:6ae2c25a88b7836c01bde9b067c1b944884ca3d899c4451070c5925f2e89a670"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c17cf413ba9506e88f49d856420c761eb29300c2cf7154251ce184e75f7f7d4d"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:660c5b915a23be57bc2c462407d4edd1117627e1f908adcda3783529c116f0aa"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c688d9c083e6caa351295af3cd52dcaf3aea99f7c3725325024819aa431a8862"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d9d1cb60caf40386971d8f6d6a1d4217888715eaa719ab21e93a2c839873d7"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86bd89335c21a34ebf8049de870ac2e1a29a623bf31e9393ead7fd7d8784d445"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:04ed161761fb7840c34dcafe0f106be918cc94be196ecb9d14099a4b48c14fe4"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e36e60c5cf39803e977bc563f30b1ec17fefc23af2a2028c5383d6eda9944cd5"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:deb16de522d99aef2872e25ab065137d82507fbae6219a762c6f797aa4c073a5"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-win32.whl", hash = "sha256:286ca314fa143d0aca7c7feac01d74d05d9abf213662ac4c8554c064c34b9275"}, + {file = "clickhouse_connect-0.6.19-cp39-cp39-win_amd64.whl", hash = "sha256:9c4792301ac7f8958aa67f5a36b4602382877917fd00c9cbb7bd50331fd834de"}, + {file = "clickhouse_connect-0.6.19-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8857bd6368e27ac51394d4522d1033b105f4efb777dbb412e43fae5814ee86b6"}, + {file = "clickhouse_connect-0.6.19-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9499d119c26d6b0de31cbf17b9ef78a680cf6f95c8f1cdcf96a9ceef303a2c15"}, + {file = "clickhouse_connect-0.6.19-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c954713be14a565f105ebb44b2a871b2a820359373b68ff4b0f5cc16eaa1bb"}, + {file = "clickhouse_connect-0.6.19-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9373e54117755f7f9bbb9f25587c250d94dfdc12e1ad7232cc396d6d5ccc4275"}, + {file = "clickhouse_connect-0.6.19-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3db7bbca628bf5c17791f42b75b19a383dbb3dcc24fd56703fb31ace4d85533"}, + {file = "clickhouse_connect-0.6.19-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ef4da77a3505741f290bce099eb78d8377187402bded93812a36d10390eb2913"}, + {file = "clickhouse_connect-0.6.19-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b555023ac39d62c1279f4dc55e844544637621f04b587cd5d485c3c3e504e9fd"}, + {file = "clickhouse_connect-0.6.19-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdba4a825cd356af5d2aabbf3bf5033cded4d62ef2df57253c386c1434e75d6"}, + {file = "clickhouse_connect-0.6.19-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeba802ba88a8bd74faaddcd496942a2725313a104b510fa6ba2353784b877b8"}, + {file = "clickhouse_connect-0.6.19-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:828979f71144a4a2b0aee7f0f6e5f611f0a2ee2a46b59bac1104597d414e4653"}, + {file = "clickhouse_connect-0.6.19-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00a99051d1206ca36cb2c07941e2cf76b5e2a09d29a9a7af585c097b749f5777"}, + {file = "clickhouse_connect-0.6.19-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16adc4642257e95f3135ad09e97d2cca888504e7454ed98af49ca3f313956b1d"}, + {file = "clickhouse_connect-0.6.19-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b210f55fb16072cc36d03428355f102d05db49e13b793d4ef542254b009a4362"}, + {file = "clickhouse_connect-0.6.19-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80adf4b1beb436383c9a050586104e7c4fa60fc7569aaa5973c957b5ce94651b"}, + {file = "clickhouse_connect-0.6.19-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:46a886de996982727126419538739d59431003e204c2a9ee3f89ab31a3614b26"}, + {file = "clickhouse_connect-0.6.19-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9f164b340e65cc8e2175e82add77df90a36e7056636d2f4d323dd115447c44a8"}, + {file = "clickhouse_connect-0.6.19-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b199aa4394742c46d877590ed91370cc8d80837e280500d6a4d0709a8dd2e81f"}, + {file = "clickhouse_connect-0.6.19-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0003aa247c10339622551af651d564d8013a28bc3dda1def6d681767539e642"}, + {file = "clickhouse_connect-0.6.19-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2a6be9d54bee4c37e5aceb5da19918371d84b8e66da3e043801a149c58b117e"}, + {file = "clickhouse_connect-0.6.19-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d4723c3c778ea620800ea428060f99d3e75e86c6c1b13701679e022c133c1538"}, ] [package.dependencies] @@ -879,22 +879,20 @@ files = [ [[package]] name = "comm" -version = "0.1.4" +version = "0.2.0" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, - {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, + {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, + {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, ] [package.dependencies] traitlets = ">=4" [package.extras] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] test = ["pytest"] -typing = ["mypy (>=0.990)"] [[package]] name = "contourpy" @@ -1191,17 +1189,18 @@ files = [ [[package]] name = "deltalake" -version = "0.10.1" +version = "0.13.0" description = "Native Delta Lake Python binding based on delta-rs with Pandas integration" optional = true python-versions = ">=3.7" files = [ - {file = "deltalake-0.10.1-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:f7e4b0fb89d5c1710d954b3a71791dc1f71584e9e4d4129eccc0addc4a3a629c"}, - {file = "deltalake-0.10.1-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:07becd9aff2ba07829af9bf84e61ee238f3756536ace0455c39b7415edb5d49b"}, - {file = "deltalake-0.10.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ea1db94cb0ef152134a63267d2450daf95684017493c85f4f98f5b5b39bac4"}, - {file = "deltalake-0.10.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6511ef9f7b03f5650bbcf4585d8fee02f26da000c518b27a4cb16eb573af226"}, - {file = "deltalake-0.10.1-cp37-abi3-win_amd64.whl", hash = "sha256:c9527c81b2f11ab58def12d84b059675c6a92bb8cb879690426a3aeff43760bb"}, - {file = "deltalake-0.10.1.tar.gz", hash = "sha256:c59e9e206442917be2b5c8bbecf1324c340a37084f663ec9345ab6ed200f0e14"}, + {file = "deltalake-0.13.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:5dd8a7d1e4f4733b743c181b4a83a30283871836a40894f65af7b2e4a1eab907"}, + {file = "deltalake-0.13.0-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:557bb14f181c59bba85c6c3fbc970c50c3a47e6e99bee266e54c54e9051b4bce"}, + {file = "deltalake-0.13.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:15aa88a24b35042ca7ed0d3cc33bde208e6f1786d7bd89b334453b46aa3afcbf"}, + {file = "deltalake-0.13.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6789ace8208d4ecea2d7a20665d15fb97e9203eeba948a59c46e36005833c6a9"}, + {file = "deltalake-0.13.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc425191fc4813a237d2e8b64581c313246329bb5025ff2310b6d0b70c58d4e"}, + {file = "deltalake-0.13.0-cp37-abi3-win_amd64.whl", hash = "sha256:89ea7292d651a56c4c8dac83ecc77b5e6182e3f1ada0908847fadbf0cff75e18"}, + {file = "deltalake-0.13.0.tar.gz", hash = "sha256:e433215eadbc4b845a5b66fc21d8cd18993b4e9fa3fa604b8cd2915271e4e02e"}, ] [package.dependencies] @@ -1209,7 +1208,7 @@ pyarrow = ">=8" [package.extras] devel = ["black", "mypy", "packaging (>=20)", "pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-timeout", "ruff", "sphinx (<=4.5)", "sphinx-rtd-theme", "toml", "wheel"] -pandas = ["pandas (<2)"] +pandas = ["pandas"] pyspark = ["delta-spark", "numpy (==1.22.2)", "pyspark"] [[package]] @@ -1653,13 +1652,13 @@ shapely = ">=1.8.0" [[package]] name = "google-api-core" -version = "2.12.0" +version = "2.13.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.12.0.tar.gz", hash = "sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553"}, - {file = "google_api_core-2.12.0-py3-none-any.whl", hash = "sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160"}, + {file = "google-api-core-2.13.0.tar.gz", hash = "sha256:abc1da067c9026c6cd15dfbd4f6ad07735a62eeadc541d1cc296314447fc3aad"}, + {file = "google_api_core-2.13.0-py3-none-any.whl", hash = "sha256:44ed591f6c3a0c1ac7a91867d2b3841f92839f860f3d3fe26c464dbd50f97094"}, ] [package.dependencies] @@ -2210,13 +2209,13 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "6.1.0" +version = "6.1.1" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, - {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, + {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, + {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] [package.dependencies] @@ -2412,13 +2411,13 @@ referencing = ">=0.28.0" [[package]] name = "jupyter-client" -version = "8.5.0" +version = "8.6.0" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.5.0-py3-none-any.whl", hash = "sha256:c3877aac7257ec68d79b5c622ce986bd2a992ca42f6ddc9b4dd1da50e89f7028"}, - {file = "jupyter_client-8.5.0.tar.gz", hash = "sha256:e8754066510ce456358df363f97eae64b50860f30dc1fe8c6771440db3be9a63"}, + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, ] [package.dependencies] @@ -2935,13 +2934,13 @@ files = [ [[package]] name = "nbclient" -version = "0.8.0" +version = "0.9.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.8.0" files = [ - {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, - {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, + {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, + {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, ] [package.dependencies] @@ -3450,13 +3449,13 @@ tenacity = ">=6.2.0" [[package]] name = "plotnine" -version = "0.12.3" +version = "0.12.4" description = "A Grammar of Graphics for Python" optional = false python-versions = ">=3.8" files = [ - {file = "plotnine-0.12.3-py3-none-any.whl", hash = "sha256:3868a538aecaf44505f7218e2ffedc5611a7b23c2cad4a1e28e1255b403f467b"}, - {file = "plotnine-0.12.3.tar.gz", hash = "sha256:a38dcb3607fc003c1e59ae0c9d535dae7817650d1cbc2e56e56e5b3de88dfe99"}, + {file = "plotnine-0.12.4-py3-none-any.whl", hash = "sha256:12748f346f107c33f3e0658ac46fbb052205ae7e97ffaf52be68310e5d29f799"}, + {file = "plotnine-0.12.4.tar.gz", hash = "sha256:adc41a672503594445a8fa19872799253bd0784cdbd5a1cc16657a1dd20ba905"}, ] [package.dependencies] @@ -3685,6 +3684,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -3757,47 +3758,47 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.0" +version = "14.0.1" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:4fce1db17efbc453080c5b306f021926de7c636456a128328797e574c151f81a"}, - {file = "pyarrow-14.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:28de7c05b4d7a71ec660360639cc9b65ceb1175e0e9d4dfccd879a1545bc38f7"}, - {file = "pyarrow-14.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1541e9209c094e7f4d7b43fdd9de3a8c71d3069cf6fc03b59bf5774042411849"}, - {file = "pyarrow-14.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c05e6c45d303c80e41ab04996430a0251321f70986ed51213903ea7bc0b7efd"}, - {file = "pyarrow-14.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:426ffec63ab9b4dff23dec51be2150e3a4a99eb38e66c10a70e2c48779fe9c9d"}, - {file = "pyarrow-14.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:968844f591902160bd3c9ee240ce8822a3b4e7de731e91daea76ad43fe0ff062"}, - {file = "pyarrow-14.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dcedbc0b4ea955c530145acfe99e324875c386419a09db150291a24cb01aeb81"}, - {file = "pyarrow-14.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:97993a12aacc781efad9c92d4545a877e803c4d106d34237ec4ce987bec825a3"}, - {file = "pyarrow-14.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80225768d94024d59a31320374f5e6abf8899866c958dfb4f4ea8e2d9ec91bde"}, - {file = "pyarrow-14.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b61546977a8bd7e3d0c697ede723341ef4737e761af2239aef6e1db447f97727"}, - {file = "pyarrow-14.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42509e6c93b4a1c8ae8ccd939a43f437097783fe130a1991497a6a1abbba026f"}, - {file = "pyarrow-14.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3eccce331a1392e46573f2ce849a9ee3c074e0d7008e9be0b44566ac149fd6a1"}, - {file = "pyarrow-14.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ecc463c45f2b6b36431f5f2025842245e8c15afe4d42072230575785f3bb00c6"}, - {file = "pyarrow-14.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:4362ed90def81640addcd521811dd16a13015f0a8255bec324a41262c1524b6c"}, - {file = "pyarrow-14.0.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:2fbb7ab62537782c5ab31aa08db0e1f6de92c2c515fdfc0790128384e919adcb"}, - {file = "pyarrow-14.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad7095f8f0fe0bfa3d3fca1909b8fa15c70e630b0cc1ff8d35e143f5e2704064"}, - {file = "pyarrow-14.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6602272fce71c0fb64f266e7cdbe51b93b00c22fc1bb57f2b0cb681c4aeedf4"}, - {file = "pyarrow-14.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2b8f87951b08a3e72265c8963da3fe4f737bb81290269037e047dd172aa591"}, - {file = "pyarrow-14.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a1c9675966662a042caebbaafa1ae7fc26291287ebc3da06aa63ad74c323ec30"}, - {file = "pyarrow-14.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:771079fddc0b4440c41af541dbdebc711a7062c93d3c4764476a9442606977db"}, - {file = "pyarrow-14.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:c4096136318de1c4937370c0c365f949961c371201c396d8cc94a353f342069d"}, - {file = "pyarrow-14.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:6c94056fb5f0ee0bae2206c3f776881e1db2bd0d133d06805755ae7ac5145349"}, - {file = "pyarrow-14.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:687d0df1e08876b2d24d42abae129742fc655367e3fe6700aa4d79fcf2e3215e"}, - {file = "pyarrow-14.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f4054e5ee6c88ca256a67fc8b27f9c59bcd385216346265831d462a6069033f"}, - {file = "pyarrow-14.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:768b962e4c042ab2c96576ca0757935472e220d11af855c7d0be3279d7fced5f"}, - {file = "pyarrow-14.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:77293b1319c7044f68ebfa43db8c929a0a5254ce371f1a0873d343f1460171d0"}, - {file = "pyarrow-14.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d2bc7c53941d85f0133b1bd5a814bca0af213922f50d8a8dc0eed4d9ed477845"}, - {file = "pyarrow-14.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:378955365dd087c285ef4f34ad939d7e551b7715326710e8cd21cfa2ce511bd7"}, - {file = "pyarrow-14.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:f05e81b4c621e6ad4bcd8f785e3aa1d6c49a935818b809ea6e7bf206a5b1a4e8"}, - {file = "pyarrow-14.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6867f6a8057eaef5a7ac6d27fe5518133f67973c5d4295d79a943458350e7c61"}, - {file = "pyarrow-14.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca54b87c46abdfe027f18f959ca388102bd7326c344838f72244807462d091b2"}, - {file = "pyarrow-14.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35abf61bd0cc9daca3afc715f6ba74ea83d792fa040025352624204bec66bf6a"}, - {file = "pyarrow-14.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:65c377523b369f7ef1ba02be814e832443bb3b15065010838f02dae5bdc0f53c"}, - {file = "pyarrow-14.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8a1e470e4b5f7bda7bede0410291daec55ab69f346d77795d34fd6a45b41579"}, - {file = "pyarrow-14.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:466c1a5a7a4b279cfa363ac34dedd0c3c6af388cec9e6a468ffc095a6627849a"}, - {file = "pyarrow-14.0.0.tar.gz", hash = "sha256:45d3324e1c9871a07de6b4d514ebd73225490963a6dd46c64c465c4b6079fe1e"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, + {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, + {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, + {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, + {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, + {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, + {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, ] [package.dependencies] @@ -4053,71 +4054,71 @@ plugins = ["importlib-metadata"] [[package]] name = "pyinstrument" -version = "4.6.0" +version = "4.6.1" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, - {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73476e4bc6e467ac1b2c3c0dd1f0b71c9061d4de14626676adfdfbb14aa342b4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4d1da8efd974cf9df52ee03edaee2d3875105ddd00de35aa542760f7c612bdf7"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507be1ee2f2b0c9fba74d622a272640dd6d1b0c9ec3388b2cdeb97ad1e77125f"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cee6de08eb45754ef4f602ce52b640d1c535d934a6a8733a974daa095def37"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7873e8cec92321251fdf894a72b3c78f4c5c20afdd1fef0baf9042ec843bb04"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a242f6cac40bc83e1f3002b6b53681846dfba007f366971db0bf21e02dbb1903"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97c9660cdb4bd2a43cf4f3ab52cffd22f3ac9a748d913b750178fb34e5e39e64"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e304cd0723e2b18ada5e63c187abf6d777949454c734f5974d64a0865859f0f4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win32.whl", hash = "sha256:cee21a2d78187dd8a80f72f5d0f1ddb767b2d9800f8bb4d94b6d11f217c22cdb"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:2000712f71d693fed2f8a1c1638d37b7919124f367b37976d07128d49f1445eb"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a366c6f3dfb11f1739bdc1dee75a01c1563ad0bf4047071e5e77598087df457f"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6be327be65d934796558aa9cb0f75ce62ebd207d49ad1854610c97b0579ad47"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e160d9c5d20d3e4ef82269e4e8b246ff09bdf37af5fb8cb8ccca97936d95ad6"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ffbf56605ef21c2fcb60de2fa74ff81f417d8be0c5002a407e414d6ef6dee43"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92cc4924596d6e8f30a16182bbe90893b1572d847ae12652f72b34a9a17c24a"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f4b48a94d938cae981f6948d9ec603bab2087b178d2095d042d5a48aabaecaab"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7a386392275bdef4a1849712dc5b74f0023483fca14ef93d0ca27d453548982"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:871b131b83e9b1122f2325061c68ed1e861eebcb568c934d2fb193652f077f77"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win32.whl", hash = "sha256:8d8515156dd91f5652d13b5fcc87e634f8fe1c07b68d1d0840348cdd50bf5ace"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb868fbe089036e9f32525a249f4c78b8dc46967612393f204b8234f439c9cc4"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a18cd234cce4f230f1733807f17a134e64a1f1acabf74a14d27f583cf2b183df"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:574cfca69150be4ce4461fb224712fbc0722a49b0dc02fa204d02807adf6b5a0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e02cf505e932eb8ccf561b7527550a67ec14fcae1fe0e25319b09c9c166e914"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832fb2acef9d53701c1ab546564c45fb70a8770c816374f8dd11420d399103c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb57e9607545623ebe462345b3d0c4caee0125d2d02267043ece8aca8f4ea0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9be89e7419bcfe8dd6abb0d959d6d9c439c613a4a873514c43d16b48dae697c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:476785cfbc44e8e1b1ad447398aa3deae81a8df4d37eb2d8bbb0c404eff979cd"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e9cebd90128a3d2fee36d3ccb665c1b9dce75261061b2046203e45c4a8012d54"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win32.whl", hash = "sha256:1d0b76683df2ad5c40eff73607dc5c13828c92fbca36aff1ddf869a3c5a55fa6"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4b7af1d9d6a523cfbfedebcb69202242d5bd0cb89c4e094cc73d5d6e38279bd"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79ae152f8c6a680a188fb3be5e0f360ac05db5bbf410169a6c40851dfaebcce9"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cad2745964c174c65aa75f1bf68a4394d1b4d28f33894837cfd315d1e836f0"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb81f66f7f94045d723069cf317453d42375de9ff3c69089cf6466b078ac1db4"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab30ae75969da99e9a529e21ff497c18fdf958e822753db4ae7ed1e67094040"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f36cb5b644762fb3c86289324bbef17e95f91cd710603ac19444a47f638e8e96"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8b45075d9dbbc977dbc7007fb22bb0054c6990fbe91bf48dd80c0b96c6307ba7"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:475ac31477f6302e092463896d6a2055f3e6abcd293bad16ff94fc9185308a88"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:29172ab3d8609fdf821c3f2562dc61e14f1a8ff5306607c32ca743582d3a760e"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bd176f297c99035127b264369d2bb97a65255f65f8d4e843836baf55ebb3cee4"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:23e9b4526978432e9999021da9a545992cf2ac3df5ee82db7beb6908fc4c978c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2dbcaccc9f456ef95557ec501caeb292119c24446d768cb4fb43578b0f3d572c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2097f63c66c2bc9678c826b9ff0c25acde3ed455590d9dcac21220673fe74fbf"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:205ac2e76bd65d61b9611a9ce03d5f6393e34ec5b41dd38808f25d54e6b3e067"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f414ddf1161976a40fc0a333000e6a4ad612719eac0b8c9bb73f47153187148"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65e62ebfa2cd8fb57eda90006f4505ac4c70da00fc2f05b6d8337d776ea76d41"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d96309df4df10be7b4885797c5f69bb3a89414680ebaec0722d8156fde5268c3"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f3d1ad3bc8ebb4db925afa706aa865c4bfb40d52509f143491ac0df2440ee5d2"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win32.whl", hash = "sha256:dc37cb988c8854eb42bda2e438aaf553536566657d157c4473cc8aad5692a779"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:2cd4ce750c34a0318fc2d6c727cc255e9658d12a5cf3f2d0473f1c27157bdaeb"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ca95b21f022e995e062b371d1f42d901452bcbedd2c02f036de677119503355"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac1e1d7e1f1b64054c4eb04eb4869a7a5eef2261440e73943cc1b1bc3c828c18"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0711845e953fce6ab781221aacffa2a66dbc3289f8343e5babd7b2ea34da6c90"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7d28582017de35cb64eb4e4fa603e753095108ca03745f5d17295970ee631f"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7be57db08bd366a37db3aa3a6187941ee21196e8b14975db337ddc7d1490649d"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a0ac0f56860398d2628ce389826ce83fb3a557d0c9a2351e8a2eac6eb869983"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a9045186ff13bc826fef16be53736a85029aae3c6adfe52e666cad00d7ca623b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c4c56b6eab9004e92ad8a48bb54913fdd71fc8a748ae42a27b9e26041646f8b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win32.whl", hash = "sha256:37e989c44b51839d0c97466fa2b623638b9470d56d79e329f359f0e8fa6d83db"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:5494c5a84fee4309d7d973366ca6b8b9f8ba1d6b254e93b7c506264ef74f2cef"}, + {file = "pyinstrument-4.6.1.tar.gz", hash = "sha256:f4731b27121350f5a983d358d2272fe3df2f538aed058f57217eef7801a89288"}, ] [package.extras] @@ -5020,28 +5021,28 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.1.4" +version = "0.1.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.4-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:864958706b669cce31d629902175138ad8a069d99ca53514611521f532d91495"}, - {file = "ruff-0.1.4-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9fdd61883bb34317c788af87f4cd75dfee3a73f5ded714b77ba928e418d6e39e"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4eaca8c9cc39aa7f0f0d7b8fe24ecb51232d1bb620fc4441a61161be4a17539"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9a1301dc43cbf633fb603242bccd0aaa34834750a14a4c1817e2e5c8d60de17"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e8db8ab6f100f02e28b3d713270c857d370b8d61871d5c7d1702ae411df683"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:80fea754eaae06335784b8ea053d6eb8e9aac75359ebddd6fee0858e87c8d510"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bc02a480d4bfffd163a723698da15d1a9aec2fced4c06f2a753f87f4ce6969c"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862811b403063765b03e716dac0fda8fdbe78b675cd947ed5873506448acea4"}, - {file = "ruff-0.1.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58826efb8b3efbb59bb306f4b19640b7e366967a31c049d49311d9eb3a4c60cb"}, - {file = "ruff-0.1.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fdfd453fc91d9d86d6aaa33b1bafa69d114cf7421057868f0b79104079d3e66e"}, - {file = "ruff-0.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e8791482d508bd0b36c76481ad3117987301b86072158bdb69d796503e1c84a8"}, - {file = "ruff-0.1.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01206e361021426e3c1b7fba06ddcb20dbc5037d64f6841e5f2b21084dc51800"}, - {file = "ruff-0.1.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:645591a613a42cb7e5c2b667cbefd3877b21e0252b59272ba7212c3d35a5819f"}, - {file = "ruff-0.1.4-py3-none-win32.whl", hash = "sha256:99908ca2b3b85bffe7e1414275d004917d1e0dfc99d497ccd2ecd19ad115fd0d"}, - {file = "ruff-0.1.4-py3-none-win_amd64.whl", hash = "sha256:1dfd6bf8f6ad0a4ac99333f437e0ec168989adc5d837ecd38ddb2cc4a2e3db8a"}, - {file = "ruff-0.1.4-py3-none-win_arm64.whl", hash = "sha256:d98ae9ebf56444e18a3e3652b3383204748f73e247dea6caaf8b52d37e6b32da"}, - {file = "ruff-0.1.4.tar.gz", hash = "sha256:21520ecca4cc555162068d87c747b8f95e1e95f8ecfcbbe59e8dd00710586315"}, + {file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"}, + {file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"}, + {file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"}, + {file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"}, + {file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"}, + {file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"}, + {file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"}, + {file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"}, ] [[package]] @@ -5408,13 +5409,13 @@ sqlalchemy = ">=1.0.0" [[package]] name = "sqlglot" -version = "19.0.3" +version = "19.1.1" description = "An easily customizable SQL parser and transpiler" optional = false python-versions = ">=3.7" files = [ - {file = "sqlglot-19.0.3-py3-none-any.whl", hash = "sha256:27df5eed15b92426113ac7cbb072c11024f880347ca0e90bb266af456853d20a"}, - {file = "sqlglot-19.0.3.tar.gz", hash = "sha256:69ea28d2e215b96c887952ea43beda8c449d19574b7110ca07e7218c43d109f3"}, + {file = "sqlglot-19.1.1-py3-none-any.whl", hash = "sha256:9f80c39c05312d5bb539e3fbf49ba9a9ca7f0b4a0744aae253f07065cb7fa7c3"}, + {file = "sqlglot-19.1.1.tar.gz", hash = "sha256:88c8b17cd5e6c9244e5ee8a717629e51e2bc76f861dd55fa9cce2e5665df0628"}, ] [package.extras] diff --git a/requirements-dev.txt b/requirements-dev.txt index 3f48504..eb69f05 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,7 @@ attrs==23.1.0 ; python_version >= "3.9" and python_version < "4.0" beartype==0.16.4 ; python_version >= "3.10" and python_version < "4" bidict==0.22.1 ; python_version >= "3.9" and python_version < "4.0" bitarray==2.8.2 ; python_version >= "3.9" and python_version < "4.0" -black==23.10.1 ; python_version >= "3.9" and python_version < "4.0" +black==23.11.0 ; python_version >= "3.9" and python_version < "4.0" blackdoc==0.3.9 ; python_version >= "3.9" and python_version < "4.0" cachetools==5.3.2 ; python_version >= "3.9" and python_version < "4.0" certifi==2023.7.22 ; python_version >= "3.9" and python_version < "4.0" @@ -22,12 +22,12 @@ chardet==5.2.0 ; python_version >= "3.9" and python_version < "4.0" charset-normalizer==3.3.2 ; python_version >= "3.9" and python_version < "4.0" click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "4.0" click==8.1.7 ; python_version >= "3.9" and python_version < "4.0" -clickhouse-connect[arrow,numpy,pandas]==0.6.18 ; python_version >= "3.9" and python_version < "4.0" +clickhouse-connect[arrow,numpy,pandas]==0.6.19 ; python_version >= "3.9" and python_version < "4.0" cligj==0.7.2 ; python_version >= "3.9" and python_version < "4" cloudpickle==3.0.0 ; python_version >= "3.9" and python_version < "4.0" codespell[hard-encoding-detection,toml]==2.2.6 ; python_version >= "3.9" and python_version < "4.0" colorama==0.4.6 ; python_version < "4.0" and sys_platform == "win32" and python_version >= "3.9" or python_version < "4.0" and platform_system == "Windows" and python_version >= "3.9" or python_version >= "3.10" and python_version < "4.0" -comm==0.1.4 ; python_version >= "3.10" and python_version < "4" +comm==0.2.0 ; python_version >= "3.10" and python_version < "4" contourpy==1.2.0 ; python_version >= "3.10" and python_version < "4" coverage[toml]==7.3.2 ; python_version >= "3.9" and python_version < "4.0" cryptography==41.0.5 ; python_version >= "3.9" and python_version < "4.0" @@ -37,7 +37,7 @@ datafusion==32.0.0 ; python_version >= "3.9" and python_version < "4.0" db-dtypes==1.1.1 ; python_version >= "3.9" and python_version < "4.0" debugpy==1.8.0 ; python_version >= "3.10" and python_version < "4" decorator==5.1.1 ; python_version >= "3.9" and python_version < "4.0" -deltalake==0.10.1 ; python_version >= "3.9" and python_version < "4.0" +deltalake==0.13.0 ; python_version >= "3.9" and python_version < "4.0" distlib==0.3.7 ; python_version >= "3.9" and python_version < "4.0" duckdb-engine==0.9.2 ; python_version >= "3.9" and python_version < "4.0" duckdb==0.9.1 ; python_version >= "3.9" and python_version < "4.0" @@ -54,8 +54,8 @@ fsspec==2023.6.0 ; python_version >= "3.9" and python_version < "4.0" gcsfs==2023.6.0 ; python_version >= "3.9" and python_version < "4.0" geoalchemy2==0.14.2 ; python_version >= "3.9" and python_version < "4.0" geopandas==0.14.0 ; python_version >= "3.9" and python_version < "4.0" -google-api-core==2.12.0 ; python_version >= "3.9" and python_version < "4.0" -google-api-core[grpc]==2.12.0 ; python_version >= "3.9" and python_version < "4.0" +google-api-core==2.13.0 ; python_version >= "3.9" and python_version < "4.0" +google-api-core[grpc]==2.13.0 ; python_version >= "3.9" and python_version < "4.0" google-auth-oauthlib==1.1.0 ; python_version >= "3.9" and python_version < "4.0" google-auth==2.23.4 ; python_version >= "3.9" and python_version < "4.0" google-cloud-bigquery-storage==2.22.0 ; python_version >= "3.9" and python_version < "4.0" @@ -75,7 +75,7 @@ hypothesis==6.88.3 ; python_version >= "3.9" and python_version < "4.0" identify==2.5.31 ; python_version >= "3.9" and python_version < "4.0" idna==3.4 ; python_version >= "3.9" and python_version < "4.0" importlib-metadata==6.8.0 ; python_version >= "3.9" and python_version < "4.0" -importlib-resources==6.1.0 ; python_version >= "3.9" and python_version < "4.0" +importlib-resources==6.1.1 ; python_version >= "3.9" and python_version < "4.0" impyla==0.18.0 ; python_version >= "3.9" and python_version < "4.0" iniconfig==2.0.0 ; python_version >= "3.9" and python_version < "4.0" ipykernel==6.26.0 ; python_version >= "3.10" and python_version < "4" @@ -85,7 +85,7 @@ jinja2==3.1.2 ; python_version >= "3.9" and python_version < "4.0" joblib==1.3.2 ; python_version >= "3.9" and python_version < "4.0" jsonschema-specifications==2023.7.1 ; python_version >= "3.10" and python_version < "4" jsonschema==4.19.2 ; python_version >= "3.10" and python_version < "4" -jupyter-client==8.5.0 ; python_version >= "3.10" and python_version < "4" +jupyter-client==8.6.0 ; python_version >= "3.10" and python_version < "4" jupyter-core==5.5.0 ; python_version >= "3.10" and python_version < "4" kiwisolver==1.4.5 ; python_version >= "3.10" and python_version < "4" locket==1.0.0 ; python_version >= "3.9" and python_version < "4.0" @@ -100,7 +100,7 @@ more-itertools==10.1.0 ; python_version >= "3.9" and python_version < "4.0" multidict==6.0.4 ; python_version >= "3.9" and python_version < "4.0" multipledispatch==1.0.0 ; python_version >= "3.9" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.9" and python_version < "4.0" -nbclient==0.8.0 ; python_version >= "3.10" and python_version < "4" +nbclient==0.9.0 ; python_version >= "3.10" and python_version < "4" nbformat==5.9.2 ; python_version >= "3.10" and python_version < "4" nest-asyncio==1.5.8 ; python_version >= "3.10" and python_version < "4" nodeenv==1.8.0 ; python_version >= "3.9" and python_version < "4.0" @@ -119,7 +119,7 @@ pillow==10.1.0 ; python_version >= "3.10" and python_version < "4" pins[gcs]==0.8.3 ; python_version >= "3.9" and python_version < "4.0" platformdirs==3.11.0 ; python_version >= "3.9" and python_version < "4.0" plotly==5.18.0 ; python_version >= "3.10" and python_version < "4" -plotnine==0.12.3 ; python_version >= "3.10" and python_version < "4" +plotnine==0.12.4 ; python_version >= "3.10" and python_version < "4" pluggy==1.3.0 ; python_version >= "3.9" and python_version < "4.0" plum-dispatch==2.2.2 ; python_version >= "3.10" and python_version < "4" poetry-dynamic-versioning==1.1.1 ; python_version >= "3.9" and python_version < "4.0" @@ -137,7 +137,7 @@ pure-sasl==0.6.2 ; python_version >= "3.9" and python_version < "4.0" py-cpuinfo==9.0.0 ; python_version >= "3.9" and python_version < "4.0" py4j==0.10.9.5 ; python_version >= "3.9" and python_version < "4.0" pyarrow-hotfix==0.4 ; python_version >= "3.9" and python_version < "4.0" -pyarrow==14.0.0 ; python_version >= "3.9" and python_version < "4.0" +pyarrow==14.0.1 ; python_version >= "3.9" and python_version < "4.0" pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "4.0" pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "4.0" pycparser==2.21 ; python_version >= "3.9" and python_version < "4.0" @@ -147,7 +147,7 @@ pydata-google-auth==1.8.2 ; python_version >= "3.9" and python_version < "4.0" pydeps==1.12.17 ; python_version >= "3.9" and python_version < "4.0" pydruid[sqlalchemy]==0.6.5 ; python_version >= "3.9" and python_version < "4.0" pygments==2.16.1 ; python_version >= "3.9" and python_version < "4.0" -pyinstrument==4.6.0 ; python_version >= "3.9" and python_version < "4.0" +pyinstrument==4.6.1 ; python_version >= "3.9" and python_version < "4.0" pyjwt==2.8.0 ; python_version >= "3.9" and python_version < "4.0" pymssql==2.2.10 ; python_version >= "3.9" and python_version < "4.0" pymysql==1.1.0 ; python_version >= "3.9" and python_version < "4.0" @@ -178,7 +178,7 @@ requests==2.31.0 ; python_version >= "3.9" and python_version < "4.0" rich==13.6.0 ; python_version >= "3.9" and python_version < "4.0" rpds-py==0.12.0 ; python_version >= "3.10" and python_version < "4" rsa==4.9 ; python_version >= "3.9" and python_version < "4" -ruff==0.1.4 ; python_version >= "3.9" and python_version < "4.0" +ruff==0.1.5 ; python_version >= "3.9" and python_version < "4.0" scikit-learn==1.3.2 ; python_version >= "3.10" and python_version < "4" scipy==1.9.3 ; python_version >= "3.10" and python_version < "4" seaborn==0.13.0 ; python_version >= "3.10" and python_version < "4" @@ -191,7 +191,7 @@ sortedcontainers==2.4.0 ; python_version >= "3.9" and python_version < "4.0" sphobjinv==2.3.1 ; python_version >= "3.10" and python_version < "4" sqlalchemy-views==0.3.2 ; python_version >= "3.9" and python_version < "4.0" sqlalchemy==1.4.50 ; python_version >= "3.9" and python_version < "4.0" -sqlglot==19.0.3 ; python_version >= "3.9" and python_version < "4.0" +sqlglot==19.1.1 ; python_version >= "3.9" and python_version < "4.0" stack-data==0.6.3 ; python_version >= "3.9" and python_version < "4.0" statsmodels==0.14.0 ; python_version >= "3.10" and python_version < "4" stdlib-list==0.9.0 ; python_version >= "3.9" and python_version < "4.0"
feat(core): add cursor-based pagination via `em.findByCursor()` (#3975) As an alternative to the offset based pagination with `limit` and `offset`, we can paginate based on a cursor. A cursor is an opaque string that defines specific place in ordered entity graph. You can use `em.findByCursor()` to access those options. Under the hood, it will call `em.find()` and `em.count()` just like the `em.findAndCount()` method, but will use the cursor options instead. Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option is required. Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination. - `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time - `before` and `after` specify the previous cursor value, it can be one of the: - `Cursor` instance - opaque string provided by `startCursor/endCursor` properties - POJO/entity instance ```ts const currentCursor = await em.findByCursor(User, {}, { first: 10, after: previousCursor, // cursor instance orderBy: { id: 'desc' }, }); // to fetch next page const nextCursor = await em.findByCursor(User, {}, { first: 10, after: currentCursor.endCursor, // opaque string orderBy: { id: 'desc' }, }); // to fetch next page const nextCursor2 = await em.findByCursor(User, {}, { first: 10, after: { id: lastSeenId }, // entity-like POJO orderBy: { id: 'desc' }, }); ``` The `Cursor` object provides following interface: ```ts Cursor<User> { items: [ User { ... }, User { ... }, User { ... }, ... ], totalCount: 50, length: 10, startCursor: 'WzRd', endCursor: 'WzZd', hasPrevPage: true, hasNextPage: true, } ```
1e6825f2ff5a7d505b73225b1696b44629a7eebb
feat
https://github.com/mikro-orm/mikro-orm/commit/1e6825f2ff5a7d505b73225b1696b44629a7eebb
add cursor-based pagination via `em.findByCursor()` (#3975) As an alternative to the offset based pagination with `limit` and `offset`, we can paginate based on a cursor. A cursor is an opaque string that defines specific place in ordered entity graph. You can use `em.findByCursor()` to access those options. Under the hood, it will call `em.find()` and `em.count()` just like the `em.findAndCount()` method, but will use the cursor options instead. Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option is required. Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination. - `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time - `before` and `after` specify the previous cursor value, it can be one of the: - `Cursor` instance - opaque string provided by `startCursor/endCursor` properties - POJO/entity instance ```ts const currentCursor = await em.findByCursor(User, {}, { first: 10, after: previousCursor, // cursor instance orderBy: { id: 'desc' }, }); // to fetch next page const nextCursor = await em.findByCursor(User, {}, { first: 10, after: currentCursor.endCursor, // opaque string orderBy: { id: 'desc' }, }); // to fetch next page const nextCursor2 = await em.findByCursor(User, {}, { first: 10, after: { id: lastSeenId }, // entity-like POJO orderBy: { id: 'desc' }, }); ``` The `Cursor` object provides following interface: ```ts Cursor<User> { items: [ User { ... }, User { ... }, User { ... }, ... ], totalCount: 50, length: 10, startCursor: 'WzRd', endCursor: 'WzZd', hasPrevPage: true, hasNextPage: true, } ```
diff --git a/entity-manager.md b/entity-manager.md index 8818e19..650d35e 100644 --- a/entity-manager.md +++ b/entity-manager.md @@ -284,6 +284,61 @@ console.log(authors.length); // based on limit parameter, e.g. 10 console.log(count); // total count, e.g. 1327 ``` +### Cursor-based pagination + +As an alternative to the offset based pagination with `limit` and `offset`, we can paginate based on a cursor. A cursor is an opaque string that defines specific place in ordered entity graph. You can use `em.findByCursor()` to access those options. Under the hood, it will call `em.find()` and `em.count()` just like the `em.findAndCount()` method, but will use the cursor options instead. + +Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option is required. + +Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination. + +- `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time +- `before` and `after` specify the previous cursor value, it can be one of the: + - `Cursor` instance + - opaque string provided by `startCursor/endCursor` properties + - POJO/entity instance + +```ts +const currentCursor = await em.findByCursor(User, {}, { + first: 10, + after: previousCursor, // cursor instance + orderBy: { id: 'desc' }, +}); + +// to fetch next page +const nextCursor = await em.findByCursor(User, {}, { + first: 10, + after: currentCursor.endCursor, // opaque string + orderBy: { id: 'desc' }, +}); + +// to fetch next page +const nextCursor2 = await em.findByCursor(User, {}, { + first: 10, + after: { id: lastSeenId }, // entity-like POJO + orderBy: { id: 'desc' }, +}); +``` + +The `Cursor` object provides following interface: + +```ts +Cursor<User> { + items: [ + User { ... }, + User { ... }, + User { ... }, + ... + ], + totalCount: 50, + length: 10, + startCursor: 'WzRd', + endCursor: 'WzZd', + hasPrevPage: true, + hasNextPage: true, +} +``` + ### Handling Not Found Entities When we call `em.findOne()` and no entity is found based on our criteria, `null` will be returned. If we rather have an `Error` instance thrown, we can use `em.findOneOrFail()`: diff --git a/EntityManager.ts b/EntityManager.ts index a75836a..197dc26 100644 --- a/EntityManager.ts +++ b/EntityManager.ts @@ -1,5 +1,12 @@ import { inspect } from 'util'; -import { type Configuration, getOnConflictReturningFields, QueryHelper, TransactionContext, Utils } from './utils'; +import { + type Configuration, + type Cursor, + getOnConflictReturningFields, + QueryHelper, + TransactionContext, + Utils, +} from './utils'; import { type AssignOptions, EntityAssigner, @@ -17,6 +24,7 @@ import type { DeleteOptions, EntityField, EntityManagerType, + FindByCursorOptions, FindOneOptions, FindOneOrFailOptions, FindOptions, @@ -451,14 +459,84 @@ export class EntityManager<D extends IDatabaseDriver = IDatabaseDriver> { Entity extends object, Hint extends string = never, >(entityName: EntityName<Entity>, where: FilterQuery<Entity>, options: FindOptions<Entity, Hint> = {}): Promise<[Loaded<Entity, Hint>[], number]> { + const em = this.getContext(false); const [entities, count] = await Promise.all([ - this.find<Entity, Hint>(entityName, where, options), - this.count(entityName, where, options), + em.find<Entity, Hint>(entityName, where, options), + em.count(entityName, where, options), ]); return [entities, count]; } + /** + * Calls `em.find()` and `em.count()` with the same arguments (where applicable) and returns the results as {@apilink Cursor} object. + * Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option + * is required. + * + * Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination. + * + * - `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time + * - `before` and `after` specify the previous cursor value, it can be one of the: + * - `Cursor` instance + * - opaque string provided by `startCursor/endCursor` properties + * - POJO/entity instance + * + * ```ts + * const currentCursor = await em.findByCursor(User, {}, { + * first: 10, + * after: previousCursor, // cursor instance + * orderBy: { id: 'desc' }, + * }); + * + * // to fetch next page + * const nextCursor = await em.findByCursor(User, {}, { + * first: 10, + * after: currentCursor.endCursor, // opaque string + * orderBy: { id: 'desc' }, + * }); + * + * // to fetch next page + * const nextCursor2 = await em.findByCursor(User, {}, { + * first: 10, + * after: { id: lastSeenId }, // entity-like POJO + * orderBy: { id: 'desc' }, + * }); + * ``` + * + * The `Cursor` object provides following interface: + * + * ```ts + * Cursor<User> { + * items: [ + * User { ... }, + * User { ... }, + * User { ... }, + * ], + * totalCount: 50, + * startCursor: 'WzRd', + * endCursor: 'WzZd', + * hasPrevPage: true, + * hasNextPage: true, + * } + * ``` + */ + async findByCursor< + Entity extends object, + Hint extends string = never, + >(entityName: EntityName<Entity>, where: FilterQuery<Entity>, options: FindByCursorOptions<Entity, Hint> = {}): Promise<Cursor<Entity, Hint>> { + const em = this.getContext(false); + entityName = Utils.className(entityName); + options.overfetch ??= true; + + if (Utils.isEmpty(options.orderBy)) { + throw new Error('Explicit `orderBy` option required'); + } + + const [entities, count] = await em.findAndCount(entityName, where, options); + + return new Cursor<Entity, Hint>(entities, count, options, this.metadata.get(entityName)); + } + /** * Refreshes the persistent state of an entity from the database, overriding any local changes that have not yet been persisted. */ @@ -1033,7 +1111,10 @@ export class EntityManager<D extends IDatabaseDriver = IDatabaseDriver> { } const em = this.getContext(false); - em.transactionContext = await em.getConnection('write').begin({ ...options, eventBroadcaster: new TransactionEventBroadcaster(em) }); + em.transactionContext = await em.getConnection('write').begin({ + ...options, + eventBroadcaster: new TransactionEventBroadcaster(em), + }); } /** @@ -1215,7 +1296,10 @@ export class EntityManager<D extends IDatabaseDriver = IDatabaseDriver> { } }); - return this.merge<Entity>(entityName, data as EntityData<Entity>, { convertCustomTypes: true, refresh: true, ...options }); + return this.merge<Entity>(entityName, data as EntityData<Entity>, { + convertCustomTypes: true, + refresh: true, ...options, + }); } /** @@ -1821,9 +1905,17 @@ export class EntityManager<D extends IDatabaseDriver = IDatabaseDriver> { let data: R; if (Array.isArray(cached) && merge) { - data = cached.map(item => em.entityFactory.create<T>(entityName, item, { merge: true, convertCustomTypes: true, refresh })) as unknown as R; + data = cached.map(item => em.entityFactory.create<T>(entityName, item, { + merge: true, + convertCustomTypes: true, + refresh, + })) as unknown as R; } else if (Utils.isObject<EntityData<T>>(cached) && merge) { - data = em.entityFactory.create<T>(entityName, cached, { merge: true, convertCustomTypes: true, refresh }) as unknown as R; + data = em.entityFactory.create<T>(entityName, cached, { + merge: true, + convertCustomTypes: true, + refresh, + }) as unknown as R; } else { data = cached; } diff --git a/DatabaseDriver.ts b/DatabaseDriver.ts index c49c1b1..71b97c6 100644 --- a/DatabaseDriver.ts +++ b/DatabaseDriver.ts @@ -1,9 +1,32 @@ -import { EntityManagerType, type CountOptions, type LockOptions, type DeleteOptions, type FindOneOptions, type FindOptions, type IDatabaseDriver, type NativeInsertUpdateManyOptions, type NativeInsertUpdateOptions, type DriverMethodOptions } from './IDatabaseDriver'; -import type { ConnectionType, Dictionary, EntityData, EntityDictionary, EntityMetadata, EntityProperty, FilterQuery, PopulateOptions, Primary } from '../typings'; +import { + EntityManagerType, + type CountOptions, + type LockOptions, + type DeleteOptions, + type FindOneOptions, + type FindOptions, + type IDatabaseDriver, + type NativeInsertUpdateManyOptions, + type NativeInsertUpdateOptions, + type DriverMethodOptions, + type OrderDefinition, +} from './IDatabaseDriver'; +import type { + ConnectionType, + Dictionary, + EntityData, + EntityDictionary, + EntityMetadata, + EntityProperty, + FilterObject, + FilterQuery, + PopulateOptions, + Primary, +} from '../typings'; import type { MetadataStorage } from '../metadata'; import type { Connection, QueryResult, Transaction } from '../connections'; -import { EntityComparator, Utils, type Configuration, type ConnectionOptions } from '../utils'; -import { QueryOrder, ReferenceType, type QueryOrderMap } from '../enums'; +import { EntityComparator, Utils, type Configuration, type ConnectionOptions, Cursor } from '../utils'; +import { QueryOrder, ReferenceType, type QueryOrderMap, type QueryOrderKeys, QueryOrderNumeric } from '../enums'; import type { Platform } from '../platforms'; import type { Collection } from '../entity/Collection'; import { EntityManager } from '../EntityManager'; @@ -136,6 +159,98 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD return this.dependencies; } + protected processCursorOptions<T extends object, P extends string>(meta: EntityMetadata<T>, options: FindOptions<T, P>, orderBy: OrderDefinition<T>): { orderBy: OrderDefinition<T>[]; where: FilterQuery<T> } { + const { first, last, before, after, overfetch } = options; + const limit = first || last; + const isLast = !first && !!last; + const definition = Cursor.getDefinition(meta, orderBy); + const $and: FilterQuery<T>[] = []; + + // allow POJO as well, we care only about the correct key being present + const isCursor = (val: unknown, key: 'startCursor' | 'endCursor'): val is Cursor<T, any> => { + return !!val && typeof val === 'object' && key in val; + }; + const createCursor = (val: unknown, key: 'startCursor' | 'endCursor', inverse = false) => { + let def = isCursor(val, key) ? val[key] : val; + + if (Utils.isPlainObject<FilterObject<T>>(def)) { + def = Cursor.for<T>(meta, def, orderBy); + } + + const offsets = def ? Cursor.decode(def as string) as Dictionary[] : []; + + if (definition.length === offsets.length) { + return this.createCursorCondition<T>(definition, offsets, inverse); + } + + return {} as FilterQuery<T>; + }; + + if (after) { + $and.push(createCursor(after, 'endCursor')); + } + + if (before) { + $and.push(createCursor(before, 'startCursor', true)); + } + + if (limit) { + options.limit = limit + (overfetch ? 1 : 0); + } + + const createOrderBy = (prop: string, direction: QueryOrderKeys<T>): OrderDefinition<T> => { + if (Utils.isPlainObject(direction)) { + const value = Object.keys(direction).reduce((o, key) => { + Object.assign(o, createOrderBy(key, direction[key])); + return o; + }, {}); + return ({ [prop]: value }) as OrderDefinition<T>; + } + + const desc = direction as unknown === QueryOrderNumeric.DESC || direction.toString().toLowerCase() === 'desc'; + const dir = Utils.xor(desc, isLast) ? 'desc' : 'asc'; + return ({ [prop]: dir }) as OrderDefinition<T>; + }; + + return { + orderBy: definition.map(([prop, direction]) => createOrderBy(prop, direction)), + where: ($and.length > 1 ? { $and } : { ...$and[0] }) as FilterQuery<T>, + }; + } + + protected createCursorCondition<T extends object>(definition: (readonly [keyof T & string, QueryOrder])[], offsets: Dictionary[], inverse = false): FilterQuery<T> { + const createCondition = (prop: string, direction: QueryOrderKeys<T>, offset: Dictionary, eq = false) => { + if (Utils.isPlainObject(direction)) { + const value = Object.keys(direction).reduce((o, key) => { + Object.assign(o, createCondition(key, direction[key], offset[prop][key], eq)); + return o; + }, {}); + return ({ [prop]: value }); + } + + const desc = direction as unknown === QueryOrderNumeric.DESC || direction.toString().toLowerCase() === 'desc'; + const operator = Utils.xor(desc, inverse) ? '$lt' : '$gt'; + + return { [prop]: { [operator + (eq ? 'e' : '')]: offset } } as FilterQuery<T>; + }; + + const [order, ...otherOrders] = definition; + const [offset, ...otherOffsets] = offsets; + const [prop, direction] = order; + + if (!otherOrders.length) { + return createCondition(prop, direction, offset) as FilterQuery<T>; + } + + return { + ...createCondition(prop, direction, offset, true), + $or: [ + createCondition(prop, direction, offset), + this.createCursorCondition(otherOrders, otherOffsets, inverse), + ], + } as FilterQuery<T>; + } + protected inlineEmbeddables<T>(meta: EntityMetadata<T>, data: T, where?: boolean): void { Object.keys(data as Dictionary).forEach(k => { if (Utils.isOperator(k)) { diff --git a/IDatabaseDriver.ts b/IDatabaseDriver.ts index 14a8145..17fe62b 100644 --- a/IDatabaseDriver.ts +++ b/IDatabaseDriver.ts @@ -1,6 +1,6 @@ import type { ConnectionType, EntityData, EntityMetadata, EntityProperty, FilterQuery, Primary, Dictionary, QBFilterQuery, - IPrimaryKey, PopulateOptions, EntityDictionary, ExpandProperty, AutoPath, ObjectQuery, + IPrimaryKey, PopulateOptions, EntityDictionary, ExpandProperty, AutoPath, ObjectQuery, FilterObject, } from '../typings'; import type { Connection, QueryResult, Transaction } from '../connections'; import type { FlushMode, LockMode, QueryOrderMap, QueryFlag, LoadStrategy, PopulateHint } from '../enums'; @@ -10,6 +10,7 @@ import type { Collection } from '../entity/Collection'; import type { EntityManager } from '../EntityManager'; import type { DriverException } from '../exceptions'; import type { Configuration } from '../utils/Configuration'; +import type { Cursor } from '../utils/Cursor'; export const EntityManagerType = Symbol('EntityManagerType'); @@ -94,13 +95,25 @@ export interface IDatabaseDriver<C extends Connection = Connection> { type FieldsMap<T, P extends string = never> = { [K in keyof T]?: EntityField<ExpandProperty<T[K]>>[] }; export type EntityField<T, P extends string = never> = keyof T | '*' | AutoPath<T, P, '*'> | FieldsMap<T, P>; +export type OrderDefinition<T> = (QueryOrderMap<T> & { 0?: never }) | QueryOrderMap<T>[]; + export interface FindOptions<T, P extends string = never> { populate?: readonly AutoPath<T, P>[] | boolean; populateWhere?: ObjectQuery<T> | PopulateHint; - orderBy?: (QueryOrderMap<T> & { 0?: never }) | QueryOrderMap<T>[]; + orderBy?: OrderDefinition<T>; cache?: boolean | number | [string, number]; limit?: number; offset?: number; + /** Fetch items `before` this cursor. */ + before?: string | { startCursor: string | null } | FilterObject<T>; + /** Fetch items `after` this cursor. */ + after?: string | { endCursor: string | null } | FilterObject<T>; + /** Fetch `first` N items. */ + first?: number; + /** Fetch `last` N items. */ + last?: number; + /** Fetch one more item than `first`/`last`, enabled automatically in `em.findByCursor` to check if there is a next page. */ + overfetch?: boolean; refresh?: boolean; convertCustomTypes?: boolean; disableIdentityMap?: boolean; @@ -128,6 +141,9 @@ export interface FindOptions<T, P extends string = never> { hintComments?: string | string[]; } +export interface FindByCursorOptions<T extends object, P extends string = never> extends Omit<FindOptions<T, P>, 'limit' | 'offset'> { +} + export interface FindOneOptions<T extends object, P extends string = never> extends Omit<FindOptions<T, P>, 'limit' | 'lockMode'> { lockMode?: LockMode; lockVersion?: number | Date; diff --git a/EntityRepository.ts b/EntityRepository.ts index f865815..2f4232d 100644 --- a/EntityRepository.ts +++ b/EntityRepository.ts @@ -1,11 +1,22 @@ import type { CreateOptions, EntityManager, MergeOptions } from '../EntityManager'; import type { AssignOptions } from './EntityAssigner'; import type { EntityData, EntityName, AnyEntity, Primary, Loaded, FilterQuery, EntityDictionary, AutoPath, RequiredEntityData } from '../typings'; -import type { CountOptions, DeleteOptions, FindOneOptions, FindOneOrFailOptions, FindOptions, GetReferenceOptions, NativeInsertUpdateOptions, UpdateOptions } from '../drivers/IDatabaseDriver'; +import type { + CountOptions, + DeleteOptions, + FindByCursorOptions, + FindOneOptions, + FindOneOrFailOptions, + FindOptions, + GetReferenceOptions, + NativeInsertUpdateOptions, + UpdateOptions, +} from '../drivers/IDatabaseDriver'; import type { IdentifiedReference, Reference } from './Reference'; import type { EntityLoaderOptions } from './EntityLoader'; import { ValidationError } from '../errors'; import { Utils } from '../utils/Utils'; +import type { Cursor } from '../utils/Cursor'; export class EntityRepository<T extends object> { @@ -128,6 +139,13 @@ export class EntityRepository<T extends object> { return this.getEntityManager().findAndCount<T, P>(this.entityName, where, options); } + /** + * @inheritDoc EntityManager.findByCursor + */ + async findByCursor<P extends string = never>(where: FilterQuery<T>, options?: FindByCursorOptions<T, P>): Promise<Cursor<T, P>> { + return this.em.findByCursor<T, P>(this.entityName, where, options); + } + /** * Finds all entities of given type. You can pass additional options via the `options` parameter. */ diff --git a/index.ts b/index.ts index 124ce6f..4834d9c 100644 --- a/index.ts +++ b/index.ts @@ -1,5 +1,6 @@ export * from './Configuration'; export * from './ConfigurationLoader'; +export * from './Cursor'; export * from './Utils'; export * from './RequestContext'; export * from './TransactionContext'; diff --git a/typings.ts b/typings.ts index 2d4673b..835ee4d 100644 --- a/typings.ts +++ b/typings.ts @@ -90,10 +90,11 @@ export type OperatorMap<T> = { export type FilterValue2<T> = T | ExpandScalar<T> | Primary<T>; export type FilterValue<T> = OperatorMap<FilterValue2<T>> | FilterValue2<T> | FilterValue2<T>[] | null; +export type FilterObject<T> = { -readonly [K in keyof T as ExcludeFunctions<T, K>]?: Query<ExpandProperty<T[K]>> | FilterValue<ExpandProperty<T[K]>> | null }; type ExpandObject<T> = T extends object ? T extends Scalar ? never - : { -readonly [K in keyof T as ExcludeFunctions<T, K>]?: Query<ExpandProperty<T[K]>> | FilterValue<ExpandProperty<T[K]>> | null } + : FilterObject<T> : never; export type Query<T> = T extends object diff --git a/Cursor.ts b/Cursor.ts index 1e23e75..6c5c8a3 100644 --- a/Cursor.ts +++ b/Cursor.ts @@ -0,0 +1,169 @@ +import { inspect } from 'util'; +import type { EntityMetadata, FilterObject, Loaded } from '../typings'; +import type { FindByCursorOptions, OrderDefinition } from '../drivers/IDatabaseDriver'; +import { Utils } from './Utils'; +import type { QueryOrder, QueryOrderKeys } from '../enums'; +import { ReferenceType } from '../enums'; +import { Reference } from '../entity/Reference'; + +/** + * As an alternative to the offset based pagination with `limit` and `offset`, we can paginate based on a cursor. + * A cursor is an opaque string that defines specific place in ordered entity graph. You can use `em.findByCursor()` + * to access those options. Under the hood, it will call `em.find()` and `em.count()` just like the `em.findAndCount()` + * method, but will use the cursor options instead. + * + * Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option is required. + * + * Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination. + * + * - `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time + * - `before` and `after` specify the previous cursor value + * + * ```ts + * const currentCursor = await em.findByCursor(User, {}, { + * first: 10, + * after: previousCursor, // can be either string or `Cursor` instance + * orderBy: { id: 'desc' }, + * }); + * + * // to fetch next page + * const nextCursor = await em.findByCursor(User, {}, { + * first: 10, + * after: currentCursor.endCursor, // or currentCursor.endCursor + * orderBy: { id: 'desc' }, + * }); + * ``` + * + * The `Cursor` object provides following interface: + * + * ```ts + * Cursor<User> { + * items: [ + * User { ... }, + * User { ... }, + * User { ... }, + * ... + * ], + * totalCount: 50, + * length: 10, + * startCursor: 'WzRd', + * endCursor: 'WzZd', + * hasPrevPage: true, + * hasNextPage: true, + * } + * ``` + */ +export class Cursor<Entity extends object, Hint extends string = never> { + + readonly hasPrevPage: boolean; + readonly hasNextPage: boolean; + + private readonly definition: (readonly [keyof Entity, QueryOrder])[]; + + constructor( + readonly items: Loaded<Entity, Hint>[], + readonly totalCount: number, + options: FindByCursorOptions<Entity, Hint>, + meta: EntityMetadata<Entity>, + ) { + const { first, last, before, after, orderBy, overfetch } = options; + const limit = first || last; + const isLast = !first && !!last; + const hasMorePages = !!overfetch && limit != null && items.length > limit; + this.hasPrevPage = before || after ? true : (isLast && hasMorePages); + this.hasNextPage = !(isLast && !before && !after) && hasMorePages; + + if (hasMorePages) { + if (isLast) { + items.shift(); + } else { + items.pop(); + } + } + + this.definition = Cursor.getDefinition(meta, orderBy!); + } + + get startCursor(): string | null { + if (this.items.length === 0) { + return null; + } + + return this.from(this.items[0]); + } + + get endCursor(): string | null { + if (this.items.length === 0) { + return null; + } + + return this.from(this.items[this.items.length - 1]); + } + + /** + * Computes the cursor value for given entity. + */ + from(entity: Entity) { + const processEntity = <T> (entity: T, prop: string, direction: QueryOrderKeys<T>, object = false) => { + if (Utils.isPlainObject(direction)) { + const value = Object.keys(direction).reduce((o, key) => { + Object.assign(o, processEntity(Reference.unwrapReference(entity[prop]), key, direction[key], true)); + return o; + }, {}); + return ({ [prop]: value }); + } + + if (object) { + return ({ [prop]: entity[prop] }); + } + + return entity[prop]; + }; + const value = this.definition.map(([key, direction]) => processEntity(entity, key as string, direction)); + return Cursor.encode(value); + } + + * [Symbol.iterator](): IterableIterator<Loaded<Entity, Hint>> { + for (const item of this.items) { + yield item; + } + } + + get length(): number { + return this.items.length; + } + + /** + * Computes the cursor value for given entity and order definition. + */ + static for<Entity extends object>(meta: EntityMetadata<Entity>, entity: FilterObject<Entity>, orderBy: OrderDefinition<Entity>) { + const definition = this.getDefinition(meta, orderBy); + return Cursor.encode(definition.map(([key]) => entity[key as string])); + } + + static encode(value: unknown[]): string { + return Buffer.from(JSON.stringify(value)).toString('base64url'); + } + + static decode(value: string): unknown[] { + return JSON.parse(Buffer.from(value, 'base64url').toString('utf8')); + } + + static getDefinition<Entity extends object>(meta: EntityMetadata<Entity>, orderBy: OrderDefinition<Entity>) { + return Utils.asArray(orderBy).flatMap(order => { + return Object.keys(order) + .map(key => meta.properties[key]) + .filter(prop => [ReferenceType.SCALAR, ReferenceType.MANY_TO_ONE].includes(prop.reference) || (prop.reference === ReferenceType.ONE_TO_ONE && prop.owner)) + .map(prop => [prop.name, order[prop.name]] as const); + }); + } + + /* istanbul ignore next */ + [inspect.custom]() { + const type = this.items[0]?.constructor.name; + const { items, startCursor, endCursor, hasPrevPage, hasNextPage, totalCount, length } = this; + const options = inspect({ startCursor, endCursor, totalCount, hasPrevPage, hasNextPage, items, length }, { depth: 0 }); + return `Cursor${type ? `<${type}>` : ''} ${options.replace('items: [Array]', 'items: [...]')}`; + } + +} diff --git a/Utils.ts b/Utils.ts index 551bcd5..1ec7700 100644 --- a/Utils.ts +++ b/Utils.ts @@ -740,7 +740,7 @@ export class Utils { /** * Checks whether the value is POJO (e.g. `{ foo: 'bar' }`, and not instance of `Foo`) */ - static isPlainObject(value: any): value is Dictionary { + static isPlainObject<T extends Dictionary>(value: any): value is T { return ( value !== null && typeof value === 'object' @@ -1188,4 +1188,8 @@ export class Utils { }, {} as Dictionary) as T; } + static xor(a: boolean, b: boolean): boolean { + return (a || b) && !(a && b); + } + } diff --git a/AbstractSqlDriver.ts b/AbstractSqlDriver.ts index 8014a6e..ce12448 100644 --- a/AbstractSqlDriver.ts +++ b/AbstractSqlDriver.ts @@ -19,6 +19,7 @@ import { type EntityName, type EntityProperty, type FilterQuery, + type FindByCursorOptions, type FindOneOptions, type FindOptions, getOnConflictFields, @@ -84,15 +85,18 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection const qb = this.createQueryBuilder<T>(entityName, options.ctx, options.connectionType, false); const fields = this.buildFields(meta, populate, joinedProps, qb, options.fields as Field<T>[]); const joinedPropsOrderBy = this.buildJoinedPropsOrderBy(entityName, qb, meta, joinedProps); + const orderBy = [...Utils.asArray(options.orderBy), ...joinedPropsOrderBy]; if (Utils.isPrimaryKey(where, meta.compositePK)) { where = { [Utils.getPrimaryKeyHash(meta.primaryKeys)]: where } as FilterQuery<T>; } + const { first, last, before, after } = options as FindByCursorOptions<T>; + const isCursorPagination = [first, last, before, after].some(v => v != null); + qb.select(fields) .populate(populate, joinedProps.length > 0 ? options.populateWhere : undefined) .where(where) - .orderBy([...Utils.asArray(options.orderBy), ...joinedPropsOrderBy]) .groupBy(options.groupBy!) .having(options.having!) .indexHint(options.indexHint!) @@ -100,6 +104,13 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection .hintComment(options.hintComments!) .withSchema(this.getSchemaName(meta, options)); + if (isCursorPagination) { + const { orderBy: newOrderBy, where } = this.processCursorOptions(meta, options, orderBy); + qb.andWhere(where).orderBy(newOrderBy); + } else { + qb.orderBy(orderBy); + } + if (options.limit !== undefined) { qb.limit(options.limit, options.offset); } @@ -109,7 +120,13 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection } Utils.asArray(options.flags).forEach(flag => qb.setFlag(flag)); - return this.rethrow(qb.execute('all')); + const result = await this.rethrow(qb.execute('all')); + + if (isCursorPagination && !first && !!last) { + result.reverse(); + } + + return result; } async findOne<T extends object, P extends string = never>(entityName: string, where: FilterQuery<T>, options?: FindOneOptions<T, P>): Promise<EntityData<T> | null> { @@ -868,7 +885,7 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection return qb; } - protected resolveConnectionType(args: { ctx?: Transaction<Knex.Transaction>; connectionType?: ConnectionType}) { + protected resolveConnectionType(args: { ctx?: Transaction<Knex.Transaction>; connectionType?: ConnectionType }) { if (args.ctx) { return 'write'; } else if (args.connectionType) { @@ -951,7 +968,11 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection /* istanbul ignore else */ if (this.platform.allowsMultiInsert()) { - await this.nativeInsertMany<T>(prop.pivotEntity, items as EntityData<T>[], { ...options, convertCustomTypes: false, processCollections: false }); + await this.nativeInsertMany<T>(prop.pivotEntity, items as EntityData<T>[], { + ...options, + convertCustomTypes: false, + processCollections: false, + }); } else { await Utils.runSerial(items, item => { return this.createQueryBuilder(prop.pivotEntity, options?.ctx, 'write') diff --git a/MongoDriver.ts b/MongoDriver.ts index 2bfdbd5..2b4b9f3 100644 --- a/MongoDriver.ts +++ b/MongoDriver.ts @@ -10,6 +10,7 @@ import { type Configuration, type FindOneOptions, type FindOptions, + type FindByCursorOptions, type QueryResult, type Transaction, type IDatabaseDriver, @@ -27,7 +28,6 @@ import { import { MongoConnection } from './MongoConnection'; import { MongoPlatform } from './MongoPlatform'; import { MongoEntityManager } from './MongoEntityManager'; -import type { CreateSchemaOptions } from './MongoSchemaGenerator'; export class MongoDriver extends DatabaseDriver<MongoConnection> { @@ -49,8 +49,36 @@ export class MongoDriver extends DatabaseDriver<MongoConnection> { return this.findVirtual(entityName, where, options); } + const { first, last, before, after } = options as FindByCursorOptions<T>; const fields = this.buildFields(entityName, options.populate as unknown as PopulateOptions<T>[] || [], options.fields); where = this.renameFields(entityName, where, true); + const isCursorPagination = [first, last, before, after].some(v => v != null); + + if (isCursorPagination) { + const andWhere = (cond1: FilterQuery<T>, cond2: FilterQuery<T>): FilterQuery<T> => { + if (Utils.isEmpty(cond1)) { + return cond2; + } + + if (Utils.isEmpty(cond2)) { + return cond1; + } + + return { $and: [cond1, cond2] } as FilterQuery<T>; + }; + const meta = this.metadata.find<T>(entityName)!; + const { orderBy: newOrderBy, where: newWhere } = this.processCursorOptions(meta, options, options.orderBy!); + const newWhereConverted = this.renameFields(entityName, newWhere as FilterQuery<T>, true); + const orderBy = Utils.asArray(newOrderBy).map(order => this.renameFields(entityName, order)); + const res = await this.rethrow(this.getConnection('read').find(entityName, andWhere(where, newWhereConverted), orderBy, options.limit, options.offset, fields, options.ctx)); + + if (isCursorPagination && !first && !!last) { + res.reverse(); + } + + return res.map(r => this.mapResult<T>(r, this.metadata.find<T>(entityName))!); + } + const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, false), ); diff --git a/EntityRepository.test.ts b/EntityRepository.test.ts index 33d7ac6..516d029 100644 --- a/EntityRepository.test.ts +++ b/EntityRepository.test.ts @@ -19,6 +19,7 @@ const methods = { upsertMany: jest.fn(), find: jest.fn(), findAndCount: jest.fn(), + findByCursor: jest.fn(), remove: jest.fn(), removeAndFlush: jest.fn(), removeLater: jest.fn(), @@ -60,6 +61,8 @@ describe('EntityRepository', () => { expect(methods.find.mock.calls[0]).toEqual([Publisher, { name: 'bar' }, undefined]); await repo.findAndCount({ name: 'bar' }); expect(methods.findAndCount.mock.calls[0]).toEqual([Publisher, { name: 'bar' }, undefined]); + await repo.findByCursor({ name: 'bar' }, { first: 10, after: '...' }); + expect(methods.findByCursor.mock.calls[0]).toEqual([Publisher, { name: 'bar' }, { first: 10, after: '...' }]); await repo.findOne('bar'); expect(methods.findOne.mock.calls[0]).toEqual([Publisher, 'bar', undefined]); await repo.findOneOrFail('bar'); diff --git a/complex-cursor.test.ts.snap b/complex-cursor.test.ts.snap index faecd0d..6eea173 100644 --- a/complex-cursor.test.ts.snap +++ b/complex-cursor.test.ts.snap @@ -0,0 +1,131 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`simple cursor based pagination (better-sqlite) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2' order by \\`u0\\`.\\`name\\` desc, \\`u0\\`.\\`age\\` asc, \\`u0\\`.\\`email\\` asc limit 11", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2'", +] +`; + +exports[`simple cursor based pagination (better-sqlite) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2' and \\`u0\\`.\\`name\\` <= 'User 1' and (\\`u0\\`.\\`name\\` < 'User 1' or (\\`u0\\`.\\`age\\` >= 28 and (\\`u0\\`.\\`age\\` > 28 or \\`u0\\`.\\`email\\` > 'email-55'))) order by \\`u0\\`.\\`name\\` desc, \\`u0\\`.\\`age\\` asc, \\`u0\\`.\\`email\\` asc limit 11", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2'", +] +`; + +exports[`simple cursor based pagination (better-sqlite) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3' order by \\`u1\\`.\\`email\\` desc, \\`u1\\`.\\`name\\` desc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`age\\` desc, \\`u0\\`.\\`email\\` desc limit 6", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3'", +] +`; + +exports[`simple cursor based pagination (better-sqlite) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3' and \\`u1\\`.\\`email\\` <= 'email-96' and \\`u1\\`.\\`name\\` <= 'User 3' and ((\\`u1\\`.\\`email\\` < 'email-96' and \\`u1\\`.\\`name\\` < 'User 3') or (\\`u0\\`.\\`name\\` >= 'User 3' and (\\`u0\\`.\\`name\\` > 'User 3' or (\\`u0\\`.\\`age\\` <= 38 and (\\`u0\\`.\\`age\\` < 38 or \\`u0\\`.\\`email\\` < 'email-76'))))) order by \\`u1\\`.\\`email\\` desc, \\`u1\\`.\\`name\\` desc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`age\\` desc, \\`u0\\`.\\`email\\` desc limit 6", + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` in (5) order by \\`u0\\`.\\`email\\` asc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`_id\\` asc", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3'", +] +`; + +exports[`simple cursor based pagination (mongo) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] db.getCollection('user').countDocuments({ '$or': [ { termsAccepted: true }, { name: 'User 1' }, { age: { '$lte': 30 } } ], name: { '$ne': 'User 2' } }, {});", + "[query] db.getCollection('user').find({ '$or': [ { termsAccepted: true }, { name: 'User 1' }, { age: { '$lte': 30 } } ], name: { '$ne': 'User 2' } }, {}).sort([ [ 'name', -1 ], [ 'age', 1 ], [ 'email', 1 ] ]).limit(11).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] db.getCollection('user').countDocuments({ '$or': [ { termsAccepted: true }, { name: 'User 1' }, { age: { '$lte': 30 } } ], name: { '$ne': 'User 2' } }, {});", + "[query] db.getCollection('user').find({ '$and': [ { '$or': [ { termsAccepted: true }, { name: 'User 1' }, { age: { '$lte': 30 } } ], name: { '$ne': 'User 2' } }, { name: { '$lte': 'User 1' }, '$or': [ { name: { '$lt': 'User 1' } }, { age: { '$gte': 28 }, '$or': [ [Object], [Object] ] } ] } ] }, {}).sort([ [ 'name', -1 ], [ 'age', 1 ], [ 'email', 1 ] ]).limit(11).toArray();", +] +`; + +exports[`simple cursor based pagination (mysql) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2' order by \\`u0\\`.\\`name\\` desc, \\`u0\\`.\\`age\\` asc, \\`u0\\`.\\`email\\` asc limit 11", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2'", +] +`; + +exports[`simple cursor based pagination (mysql) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2' and \\`u0\\`.\\`name\\` <= 'User 1' and (\\`u0\\`.\\`name\\` < 'User 1' or (\\`u0\\`.\\`age\\` >= 28 and (\\`u0\\`.\\`age\\` > 28 or \\`u0\\`.\\`email\\` > 'email-55'))) order by \\`u0\\`.\\`name\\` desc, \\`u0\\`.\\`age\\` asc, \\`u0\\`.\\`email\\` asc limit 11", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2'", +] +`; + +exports[`simple cursor based pagination (mysql) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3' order by \\`u1\\`.\\`email\\` desc, \\`u1\\`.\\`name\\` desc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`age\\` desc, \\`u0\\`.\\`email\\` desc limit 6", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3'", +] +`; + +exports[`simple cursor based pagination (mysql) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3' and \\`u1\\`.\\`email\\` <= 'email-96' and \\`u1\\`.\\`name\\` <= 'User 3' and ((\\`u1\\`.\\`email\\` < 'email-96' and \\`u1\\`.\\`name\\` < 'User 3') or (\\`u0\\`.\\`name\\` >= 'User 3' and (\\`u0\\`.\\`name\\` > 'User 3' or (\\`u0\\`.\\`age\\` <= 38 and (\\`u0\\`.\\`age\\` < 38 or \\`u0\\`.\\`email\\` < 'email-76'))))) order by \\`u1\\`.\\`email\\` desc, \\`u1\\`.\\`name\\` desc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`age\\` desc, \\`u0\\`.\\`email\\` desc limit 6", + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` in (5) order by \\`u0\\`.\\`email\\` asc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`_id\\` asc", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3'", +] +`; + +exports[`simple cursor based pagination (postgresql) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select "u0".* from "user" as "u0" where ("u0"."terms_accepted" = true or "u0"."name" = 'User 1' or "u0"."age" <= 30) and "u0"."name" != 'User 2' order by "u0"."name" desc, "u0"."age" asc, "u0"."email" asc limit 11", + "[query] select count(*) as "count" from "user" as "u0" where ("u0"."terms_accepted" = true or "u0"."name" = 'User 1' or "u0"."age" <= 30) and "u0"."name" != 'User 2'", +] +`; + +exports[`simple cursor based pagination (postgresql) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select "u0".* from "user" as "u0" where ("u0"."terms_accepted" = true or "u0"."name" = 'User 1' or "u0"."age" <= 30) and "u0"."name" != 'User 2' and "u0"."name" <= 'User 1' and ("u0"."name" < 'User 1' or ("u0"."age" >= 28 and ("u0"."age" > 28 or "u0"."email" > 'email-55'))) order by "u0"."name" desc, "u0"."age" asc, "u0"."email" asc limit 11", + "[query] select count(*) as "count" from "user" as "u0" where ("u0"."terms_accepted" = true or "u0"."name" = 'User 1' or "u0"."age" <= 30) and "u0"."name" != 'User 2'", +] +`; + +exports[`simple cursor based pagination (postgresql) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select "u0".* from "user" as "u0" left join "user" as "u1" on "u0"."best_friend__id" = "u1"."_id" where "u1"."name" = 'User 3' order by "u1"."email" desc, "u1"."name" desc, "u0"."name" asc, "u0"."age" desc, "u0"."email" desc limit 6", + "[query] select count(*) as "count" from "user" as "u0" left join "user" as "u1" on "u0"."best_friend__id" = "u1"."_id" where "u1"."name" = 'User 3'", +] +`; + +exports[`simple cursor based pagination (postgresql) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select "u0".* from "user" as "u0" left join "user" as "u1" on "u0"."best_friend__id" = "u1"."_id" where "u1"."name" = 'User 3' and "u1"."email" <= 'email-96' and "u1"."name" <= 'User 3' and (("u1"."email" < 'email-96' and "u1"."name" < 'User 3') or ("u0"."name" >= 'User 3' and ("u0"."name" > 'User 3' or ("u0"."age" <= 38 and ("u0"."age" < 38 or "u0"."email" < 'email-76'))))) order by "u1"."email" desc, "u1"."name" desc, "u0"."name" asc, "u0"."age" desc, "u0"."email" desc limit 6", + "[query] select "u0".* from "user" as "u0" where "u0"."_id" in (5) order by "u0"."email" asc, "u0"."name" asc, "u0"."_id" asc", + "[query] select count(*) as "count" from "user" as "u0" left join "user" as "u1" on "u0"."best_friend__id" = "u1"."_id" where "u1"."name" = 'User 3'", +] +`; + +exports[`simple cursor based pagination (sqlite) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2' order by \\`u0\\`.\\`name\\` desc, \\`u0\\`.\\`age\\` asc, \\`u0\\`.\\`email\\` asc limit 11", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2'", +] +`; + +exports[`simple cursor based pagination (sqlite) complex cursor based pagination using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2' and \\`u0\\`.\\`name\\` <= 'User 1' and (\\`u0\\`.\\`name\\` < 'User 1' or (\\`u0\\`.\\`age\\` >= 28 and (\\`u0\\`.\\`age\\` > 28 or \\`u0\\`.\\`email\\` > 'email-55'))) order by \\`u0\\`.\\`name\\` desc, \\`u0\\`.\\`age\\` asc, \\`u0\\`.\\`email\\` asc limit 11", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` where (\\`u0\\`.\\`terms_accepted\\` = true or \\`u0\\`.\\`name\\` = 'User 1' or \\`u0\\`.\\`age\\` <= 30) and \\`u0\\`.\\`name\\` != 'User 2'", +] +`; + +exports[`simple cursor based pagination (sqlite) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3' order by \\`u1\\`.\\`email\\` desc, \\`u1\\`.\\`name\\` desc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`age\\` desc, \\`u0\\`.\\`email\\` desc limit 6", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3'", +] +`; + +exports[`simple cursor based pagination (sqlite) complex joined cursor based pagination using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3' and \\`u1\\`.\\`email\\` <= 'email-96' and \\`u1\\`.\\`name\\` <= 'User 3' and ((\\`u1\\`.\\`email\\` < 'email-96' and \\`u1\\`.\\`name\\` < 'User 3') or (\\`u0\\`.\\`name\\` >= 'User 3' and (\\`u0\\`.\\`name\\` > 'User 3' or (\\`u0\\`.\\`age\\` <= 38 and (\\`u0\\`.\\`age\\` < 38 or \\`u0\\`.\\`email\\` < 'email-76'))))) order by \\`u1\\`.\\`email\\` desc, \\`u1\\`.\\`name\\` desc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`age\\` desc, \\`u0\\`.\\`email\\` desc limit 6", + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` in (5) order by \\`u0\\`.\\`email\\` asc, \\`u0\\`.\\`name\\` asc, \\`u0\\`.\\`_id\\` asc", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\` left join \\`user\\` as \\`u1\\` on \\`u0\\`.\\`best_friend__id\\` = \\`u1\\`.\\`_id\\` where \\`u1\\`.\\`name\\` = 'User 3'", +] +`; diff --git a/simple-cursor.test.ts.snap b/simple-cursor.test.ts.snap index dae5916..73250ec 100644 --- a/simple-cursor.test.ts.snap +++ b/simple-cursor.test.ts.snap @@ -0,0 +1,841 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 3 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id asc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 6 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id asc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 9 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id asc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 49 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id asc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 50 order by \\`u0\\`.\\`_id\\` asc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id desc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id desc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 48 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id desc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 45 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id desc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 42 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id desc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 2 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`first\\` and \\`after\\` (id desc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 1 order by \\`u0\\`.\\`_id\\` desc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 48 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id asc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 45 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id asc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 42 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id asc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 2 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id asc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 1 order by \\`u0\\`.\\`_id\\` desc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id desc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id desc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 3 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id desc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 6 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id desc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 9 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id desc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 49 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (better-sqlite) using \\`last\\` and \\`before\\` (id desc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 50 order by \\`u0\\`.\\`_id\\` asc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({}, {}).sort([ [ '_id', 1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 3 } }, {}).sort([ [ '_id', 1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id asc) 3`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 6 } }, {}).sort([ [ '_id', 1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id asc) 4`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 9 } }, {}).sort([ [ '_id', 1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id asc) 5`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 49 } }, {}).sort([ [ '_id', 1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id asc) 6`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 50 } }, {}).sort([ [ '_id', 1 ] ]).limit(2).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id desc) 1`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({}, {}).sort([ [ '_id', -1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id desc) 2`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 48 } }, {}).sort([ [ '_id', -1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id desc) 3`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 45 } }, {}).sort([ [ '_id', -1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id desc) 4`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 42 } }, {}).sort([ [ '_id', -1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id desc) 5`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 2 } }, {}).sort([ [ '_id', -1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`first\\` and \\`after\\` (id desc) 6`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 1 } }, {}).sort([ [ '_id', -1 ] ]).limit(2).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({}, {}).sort([ [ '_id', -1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 48 } }, {}).sort([ [ '_id', -1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id asc) 3`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 45 } }, {}).sort([ [ '_id', -1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id asc) 4`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 42 } }, {}).sort([ [ '_id', -1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id asc) 5`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 2 } }, {}).sort([ [ '_id', -1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id asc) 6`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$lt': 1 } }, {}).sort([ [ '_id', -1 ] ]).limit(2).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id desc) 1`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({}, {}).sort([ [ '_id', 1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id desc) 2`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 3 } }, {}).sort([ [ '_id', 1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id desc) 3`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 6 } }, {}).sort([ [ '_id', 1 ] ]).limit(4).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id desc) 4`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 9 } }, {}).sort([ [ '_id', 1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id desc) 5`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 49 } }, {}).sort([ [ '_id', 1 ] ]).limit(41).toArray();", +] +`; + +exports[`simple cursor based pagination (mongo) using \\`last\\` and \\`before\\` (id desc) 6`] = ` +[ + "[query] db.getCollection('user').countDocuments({}, {});", + "[query] db.getCollection('user').find({ _id: { '$gt': 50 } }, {}).sort([ [ '_id', 1 ] ]).limit(2).toArray();", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 3 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id asc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 6 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id asc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 9 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id asc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 49 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id asc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 50 order by \\`u0\\`.\\`_id\\` asc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id desc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id desc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 48 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id desc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 45 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id desc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 42 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id desc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 2 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`first\\` and \\`after\\` (id desc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 1 order by \\`u0\\`.\\`_id\\` desc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 48 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id asc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 45 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id asc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 42 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id asc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 2 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id asc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 1 order by \\`u0\\`.\\`_id\\` desc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id desc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id desc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 3 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id desc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 6 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id desc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 9 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id desc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 49 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (mysql) using \\`last\\` and \\`before\\` (id desc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 50 order by \\`u0\\`.\\`_id\\` asc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select "u0".* from "user" as "u0" order by "u0"."_id" asc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 3 order by "u0"."_id" asc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id asc) 3`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 6 order by "u0"."_id" asc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id asc) 4`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 9 order by "u0"."_id" asc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id asc) 5`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 49 order by "u0"."_id" asc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id asc) 6`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 50 order by "u0"."_id" asc limit 2", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id desc) 1`] = ` +[ + "[query] select "u0".* from "user" as "u0" order by "u0"."_id" desc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id desc) 2`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 48 order by "u0"."_id" desc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id desc) 3`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 45 order by "u0"."_id" desc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id desc) 4`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 42 order by "u0"."_id" desc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id desc) 5`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 2 order by "u0"."_id" desc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`first\\` and \\`after\\` (id desc) 6`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 1 order by "u0"."_id" desc limit 2", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select "u0".* from "user" as "u0" order by "u0"."_id" desc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 48 order by "u0"."_id" desc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id asc) 3`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 45 order by "u0"."_id" desc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id asc) 4`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 42 order by "u0"."_id" desc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id asc) 5`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 2 order by "u0"."_id" desc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id asc) 6`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" < 1 order by "u0"."_id" desc limit 2", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id desc) 1`] = ` +[ + "[query] select "u0".* from "user" as "u0" order by "u0"."_id" asc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id desc) 2`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 3 order by "u0"."_id" asc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id desc) 3`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 6 order by "u0"."_id" asc limit 4", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id desc) 4`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 9 order by "u0"."_id" asc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id desc) 5`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 49 order by "u0"."_id" asc limit 41", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (postgresql) using \\`last\\` and \\`before\\` (id desc) 6`] = ` +[ + "[query] select "u0".* from "user" as "u0" where "u0"."_id" > 50 order by "u0"."_id" asc limit 2", + "[query] select count(*) as "count" from "user" as "u0"", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 3 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id asc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 6 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id asc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 9 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id asc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 49 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id asc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 50 order by \\`u0\\`.\\`_id\\` asc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id desc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id desc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 48 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id desc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 45 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id desc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 42 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id desc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 2 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`first\\` and \\`after\\` (id desc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 1 order by \\`u0\\`.\\`_id\\` desc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id asc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id asc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 48 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id asc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 45 order by \\`u0\\`.\\`_id\\` desc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id asc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 42 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id asc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 2 order by \\`u0\\`.\\`_id\\` desc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id asc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` < 1 order by \\`u0\\`.\\`_id\\` desc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id desc) 1`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id desc) 2`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 3 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id desc) 3`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 6 order by \\`u0\\`.\\`_id\\` asc limit 4", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id desc) 4`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 9 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id desc) 5`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 49 order by \\`u0\\`.\\`_id\\` asc limit 41", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; + +exports[`simple cursor based pagination (sqlite) using \\`last\\` and \\`before\\` (id desc) 6`] = ` +[ + "[query] select \\`u0\\`.* from \\`user\\` as \\`u0\\` where \\`u0\\`.\\`_id\\` > 50 order by \\`u0\\`.\\`_id\\` asc limit 2", + "[query] select count(*) as \\`count\\` from \\`user\\` as \\`u0\\`", +] +`; diff --git a/complex-cursor.test.ts b/complex-cursor.test.ts index 0842c27..734791c 100644 --- a/complex-cursor.test.ts +++ b/complex-cursor.test.ts @@ -0,0 +1,205 @@ +import { + Cursor, + Entity, + FilterQuery, + MikroORM, + ManyToOne, + PrimaryKey, + Property, + SimpleLogger, + Ref, + ref, + serialize, + Options, +} from '@mikro-orm/core'; +import { mockLogger } from '../../helpers'; +import { PLATFORMS } from '../../bootstrap'; + +@Entity() +export class User { + + @PrimaryKey({ name: '_id' }) + id!: number; + + @Property() + name!: string; + + @Property({ unique: true }) + email!: string; + + @Property() + age!: number; + + @Property() + termsAccepted!: boolean; + + @ManyToOne(() => User, { ref: true, nullable: true }) + bestFriend?: Ref<User>; + +} + +describe.each(['sqlite', 'better-sqlite', 'mysql', 'postgresql', 'mongo'] as const)('simple cursor based pagination (%s)', type => { + + let orm: MikroORM; + + beforeAll(async () => { + const options: Options = {}; + + if (type === 'mysql') { + options.port = 3308; + } + + orm = await MikroORM.init({ + entities: [User], + dbName: type.includes('sqlite') ? ':memory:' : 'mikro_orm_cursor', + driver: PLATFORMS[type], + loggerFactory: options => new SimpleLogger(options), + ...options, + }); + await orm.schema.refreshDatabase(); + const users: User[] = []; + + for (let i = 0; i < 50; i++) { + const u = orm.em.create(User, { + id: i + 1, + name: `User ${Math.round((i * 2) % 5 / 2) + 1}`, + email: `email-${100 - i}`, + termsAccepted: i % 3 === 0, + age: Math.round((100 - i) / 2), + }); + users.push(u); + + if (i % 5 === 4) { + u.bestFriend = ref(users[i % 5]); + } + } + + await orm.em.flush(); + orm.em.clear(); + }); + + afterAll(() => orm.close(true)); + + test('complex cursor based pagination using `first` and `after` (id asc)', async () => { + const mock = mockLogger(orm, ['query', 'query-params']); + const where = { $or: [{ termsAccepted: true }, { name: 'User 1' }, { age: { $lte: 30 } }], name: { $ne: 'User 2' } } satisfies FilterQuery<User>; + const orderBy = { name: 'desc', age: 'asc', email: 'asc' } as const; + + // 1. page + const cursor1 = await orm.em.findByCursor(User, where, { + first: 10, + orderBy, + }); + expect(cursor1).toBeInstanceOf(Cursor); + expect(cursor1.items).toMatchObject([ + { name: 'User 3', age: 26, email: 'email-51', termsAccepted: false, id: 50 }, + { name: 'User 3', age: 27, email: 'email-53', termsAccepted: false, id: 48 }, + { name: 'User 3', age: 28, email: 'email-56', termsAccepted: false, id: 45 }, + { name: 'User 3', age: 29, email: 'email-58', termsAccepted: true, id: 43 }, + { name: 'User 3', age: 31, email: 'email-61', termsAccepted: true, id: 40 }, + { name: 'User 3', age: 37, email: 'email-73', termsAccepted: true, id: 28 }, + { name: 'User 3', age: 38, email: 'email-76', termsAccepted: true, id: 25 }, + { name: 'User 3', age: 44, email: 'email-88', termsAccepted: true, id: 13 }, + { name: 'User 3', age: 46, email: 'email-91', termsAccepted: true, id: 10 }, + { name: 'User 1', age: 28, email: 'email-55', termsAccepted: true, id: 46 }, + ]); + expect(cursor1.totalCount).toBe(19); + expect(cursor1.startCursor).toBe('WyJVc2VyIDMiLDI2LCJlbWFpbC01MSJd'); + expect(cursor1.endCursor).toBe('WyJVc2VyIDEiLDI4LCJlbWFpbC01NSJd'); + expect(cursor1.hasNextPage).toBe(true); + expect(cursor1.hasPrevPage).toBe(false); + let queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 2. page + const cursor2 = await orm.em.findByCursor(User, where, { + first: 10, + after: cursor1, + orderBy, + }); + expect(cursor2).toBeInstanceOf(Cursor); + expect(cursor2.items).toMatchObject([ + { name: 'User 1', age: 30, email: 'email-60', termsAccepted: false, id: 41 }, + { name: 'User 1', age: 33, email: 'email-65', termsAccepted: false, id: 36 }, + { name: 'User 1', age: 35, email: 'email-70', termsAccepted: true, id: 31 }, + { name: 'User 1', age: 38, email: 'email-75', termsAccepted: false, id: 26 }, + { name: 'User 1', age: 40, email: 'email-80', termsAccepted: false, id: 21 }, + { name: 'User 1', age: 43, email: 'email-85', termsAccepted: true, id: 16 }, + { name: 'User 1', age: 45, email: 'email-90', termsAccepted: false, id: 11 }, + { name: 'User 1', age: 48, email: 'email-95', termsAccepted: false, id: 6 }, + { name: 'User 1', age: 50, email: 'email-100', termsAccepted: true, id: 1 }, + ]); + expect(cursor2.totalCount).toBe(19); + expect(cursor2.startCursor).toBe('WyJVc2VyIDEiLDMwLCJlbWFpbC02MCJd'); + expect(cursor2.endCursor).toBe('WyJVc2VyIDEiLDUwLCJlbWFpbC0xMDAiXQ'); + expect(cursor2.hasNextPage).toBe(false); + expect(cursor2.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + }); + + test('complex joined cursor based pagination using `last` and `before` (id asc)', async () => { + if (type === 'mongo') { // skip in mongo, there are no joins, so we can't order by a relation property + expect(1).toBe(1); + return; + } + + const mock = mockLogger(orm, ['query', 'query-params']); + const where = { bestFriend: { name: 'User 3' } } satisfies FilterQuery<User>; + const orderBy = { bestFriend: { email: 'asc', name: 'asc' }, name: 'desc', age: 'asc', email: 'asc' } as const; + + // 1. page + const cursor1 = await orm.em.findByCursor(User, where, { + last: 5, + orderBy, + populate: ['bestFriend'], + }); + expect(cursor1).toBeInstanceOf(Cursor); + expect(serialize(cursor1.items, { populate: ['bestFriend'] })).toMatchObject([ + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 38, email: 'email-76' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 41, email: 'email-81' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 43, email: 'email-86' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 46, email: 'email-91' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 48, email: 'email-96' }, + ]); + expect(cursor1.totalCount).toBe(10); + expect(cursor1.startCursor).toBe('W3siYmVzdEZyaWVuZCI6eyJlbWFpbCI6ImVtYWlsLTk2IiwibmFtZSI6IlVzZXIgMyJ9fSwiVXNlciAzIiwzOCwiZW1haWwtNzYiXQ'); + expect(cursor1.endCursor).toBe('W3siYmVzdEZyaWVuZCI6eyJlbWFpbCI6ImVtYWlsLTk2IiwibmFtZSI6IlVzZXIgMyJ9fSwiVXNlciAzIiw0OCwiZW1haWwtOTYiXQ'); + expect(cursor1.hasNextPage).toBe(false); + expect(cursor1.hasPrevPage).toBe(true); + let queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 2. page + const cursor2 = await orm.em.findByCursor(User, where, { + last: 5, + before: cursor1, + orderBy, + populate: ['bestFriend'], + }); + expect(cursor2).toBeInstanceOf(Cursor); + expect(serialize(cursor2.items, { populate: ['bestFriend'] })).toMatchObject([ + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 26, email: 'email-51' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 28, email: 'email-56' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 31, email: 'email-61' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 33, email: 'email-66' }, + { bestFriend: { email: 'email-96', name: 'User 3' }, name: 'User 3', age: 36, email: 'email-71' }, + ]); + expect(cursor2.totalCount).toBe(10); + expect(cursor2.startCursor).toBe('W3siYmVzdEZyaWVuZCI6eyJlbWFpbCI6ImVtYWlsLTk2IiwibmFtZSI6IlVzZXIgMyJ9fSwiVXNlciAzIiwyNiwiZW1haWwtNTEiXQ'); + expect(cursor2.endCursor).toBe('W3siYmVzdEZyaWVuZCI6eyJlbWFpbCI6ImVtYWlsLTk2IiwibmFtZSI6IlVzZXIgMyJ9fSwiVXNlciAzIiwzNiwiZW1haWwtNzEiXQ'); + expect(cursor2.hasNextPage).toBe(false); + expect(cursor2.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + + orm.em.clear(); + mock.mockReset(); + }); +}); diff --git a/simple-cursor.test.ts b/simple-cursor.test.ts index 4189852..deb89ac 100644 --- a/simple-cursor.test.ts +++ b/simple-cursor.test.ts @@ -0,0 +1,661 @@ +import { Cursor, Entity, MikroORM, Options, PrimaryKey, Property, SimpleLogger } from '@mikro-orm/core'; +import { mockLogger } from '../../helpers'; +import { PLATFORMS } from '../../bootstrap'; + +@Entity() +class User { + + @PrimaryKey({ name: '_id' }) + id!: number; + + @Property() + name!: string; + + @Property() + email!: string; + + @Property() + age!: number; + + @Property() + termsAccepted!: boolean; + +} + +describe.each(['sqlite', 'better-sqlite', 'mysql', 'postgresql', 'mongo'] as const)('simple cursor based pagination (%s)', type => { + + let orm: MikroORM; + + beforeAll(async () => { + const options: Options = {}; + + if (type === 'mysql') { + options.port = 3308; + } + + orm = await MikroORM.init({ + entities: [User], + dbName: type.includes('sqlite') ? ':memory:' : 'mikro_orm_cursor', + driver: PLATFORMS[type], + loggerFactory: options => new SimpleLogger(options), + ...options, + }); + await orm.schema.refreshDatabase(); + + for (let i = 0; i < 50; i++) { + orm.em.create(User, { + id: i + 1, + name: `User ${i + 1}`, + email: `email-${100 - i}`, + termsAccepted: i % 5 === 0, + age: Math.round((100 - i) / 2), + }); + } + + await orm.em.flush(); + orm.em.clear(); + }); + + afterAll(() => orm.close(true)); + + test('using `first` and `after` (id asc)', async () => { + const mock = mockLogger(orm, ['query', 'query-params']); + + // 1. page + const cursor1 = await orm.em.findByCursor(User, {}, { + first: 3, + orderBy: { id: 'asc' }, + }); + expect(cursor1).toBeInstanceOf(Cursor); + expect(cursor1.items).toMatchObject([ + { id: 1, name: 'User 1' }, + { id: 2, name: 'User 2' }, + { id: 3, name: 'User 3' }, + ]); + expect(cursor1.totalCount).toBe(50); + expect(cursor1.startCursor).toBe('WzFd'); + expect(cursor1.endCursor).toBe('WzNd'); + expect(cursor1.hasNextPage).toBe(true); + expect(cursor1.hasPrevPage).toBe(false); + let queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 2. page + const cursor2 = await orm.em.findByCursor(User, {}, { + first: 3, + after: cursor1, + orderBy: { id: 'asc' }, + }); + expect(cursor2).toBeInstanceOf(Cursor); + expect(cursor2.items).toMatchObject([ + { id: 4, name: 'User 4' }, + { id: 5, name: 'User 5' }, + { id: 6, name: 'User 6' }, + ]); + expect(cursor2.totalCount).toBe(50); + expect(cursor2.startCursor).toBe('WzRd'); + expect(cursor2.endCursor).toBe('WzZd'); + expect(cursor2.hasNextPage).toBe(true); + expect(cursor2.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 3. page + const cursor3 = await orm.em.findByCursor(User, {}, { + first: 3, + after: cursor2, + orderBy: { id: 'asc' }, + }); + expect(cursor3).toBeInstanceOf(Cursor); + expect(cursor3.items).toMatchObject([ + { id: 7, name: 'User 7' }, + { id: 8, name: 'User 8' }, + { id: 9, name: 'User 9' }, + ]); + expect(cursor3.totalCount).toBe(50); + expect(cursor3.startCursor).toBe('Wzdd'); + expect(cursor3.endCursor).toBe('Wzld'); + expect(cursor3.hasNextPage).toBe(true); + expect(cursor3.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 4. page + const cursor4 = await orm.em.findByCursor(User, {}, { + first: 40, + after: cursor3, + orderBy: { id: 'asc' }, + }); + expect(cursor4).toBeInstanceOf(Cursor); + expect(cursor4.items).toHaveLength(40); + expect(cursor4.items[0]).toMatchObject({ id: 10, name: 'User 10' }); + expect(cursor4.items[39]).toMatchObject({ id: 49, name: 'User 49' }); + expect(cursor4.totalCount).toBe(50); + expect(cursor4.startCursor).toBe('WzEwXQ'); + expect(cursor4.endCursor).toBe('WzQ5XQ'); + expect(cursor4.hasNextPage).toBe(true); + expect(cursor4.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 5. page (last) + const cursor5 = await orm.em.findByCursor(User, {}, { + first: 40, + after: cursor4, + orderBy: { id: 'asc' }, + }); + expect(cursor5).toBeInstanceOf(Cursor); + expect(cursor5.items).toMatchObject([{ id: 50, name: 'User 50' }]); + expect(cursor5.totalCount).toBe(50); + expect(cursor5.startCursor).toBe('WzUwXQ'); + expect(cursor5.endCursor).toBe('WzUwXQ'); + expect(cursor5.hasNextPage).toBe(false); + expect(cursor5.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 6. page (empty) + const cursor6 = await orm.em.findByCursor(User, {}, { + first: 1, + after: cursor5, + orderBy: { id: 'asc' }, + }); + expect(cursor6).toBeInstanceOf(Cursor); + expect(cursor6.items).toHaveLength(0); + expect(cursor6.totalCount).toBe(50); + expect(cursor6.startCursor).toBeNull(); + expect(cursor6.endCursor).toBeNull(); + expect(cursor6.hasNextPage).toBe(false); + expect(cursor6.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + }); + + test('using `first` and `after` (id desc)', async () => { + const mock = mockLogger(orm, ['query', 'query-params']); + + // 1. page + const cursor1 = await orm.em.findByCursor(User, {}, { + first: 3, + orderBy: { id: 'desc' }, + }); + expect(cursor1).toBeInstanceOf(Cursor); + expect(cursor1.items).toMatchObject([ + { id: 50, name: 'User 50' }, + { id: 49, name: 'User 49' }, + { id: 48, name: 'User 48' }, + ]); + expect(cursor1.totalCount).toBe(50); + expect(cursor1.startCursor).toBe('WzUwXQ'); + expect(cursor1.endCursor).toBe('WzQ4XQ'); + expect(cursor1.hasNextPage).toBe(true); + expect(cursor1.hasPrevPage).toBe(false); + let queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 2. page + const cursor2 = await orm.em.findByCursor(User, {}, { + first: 3, + after: cursor1, + orderBy: { id: 'desc' }, + }); + expect(cursor2).toBeInstanceOf(Cursor); + expect(cursor2.items).toMatchObject([ + { id: 47, name: 'User 47' }, + { id: 46, name: 'User 46' }, + { id: 45, name: 'User 45' }, + ]); + expect(cursor2.totalCount).toBe(50); + expect(cursor2.startCursor).toBe('WzQ3XQ'); + expect(cursor2.endCursor).toBe('WzQ1XQ'); + expect(cursor2.hasNextPage).toBe(true); + expect(cursor2.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 3. page + const cursor3 = await orm.em.findByCursor(User, {}, { + first: 3, + after: cursor2, + orderBy: { id: 'desc' }, + }); + expect(cursor3).toBeInstanceOf(Cursor); + expect(cursor3.items).toMatchObject([ + { id: 44, name: 'User 44' }, + { id: 43, name: 'User 43' }, + { id: 42, name: 'User 42' }, + ]); + expect(cursor3.totalCount).toBe(50); + expect(cursor3.startCursor).toBe('WzQ0XQ'); + expect(cursor3.endCursor).toBe('WzQyXQ'); + expect(cursor3.hasNextPage).toBe(true); + expect(cursor3.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 4. page + const cursor4 = await orm.em.findByCursor(User, {}, { + first: 40, + after: cursor3, + orderBy: { id: 'desc' }, + }); + expect(cursor4).toBeInstanceOf(Cursor); + expect(cursor4.items).toHaveLength(40); + expect(cursor4.items[0]).toMatchObject({ id: 41, name: 'User 41' }); + expect(cursor4.items[39]).toMatchObject({ id: 2, name: 'User 2' }); + expect(cursor4.totalCount).toBe(50); + expect(cursor4.startCursor).toBe('WzQxXQ'); + expect(cursor4.endCursor).toBe('WzJd'); + expect(cursor4.hasNextPage).toBe(true); + expect(cursor4.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 5. page (last) + const cursor5 = await orm.em.findByCursor(User, {}, { + first: 40, + after: cursor4, + orderBy: { id: 'desc' }, + }); + expect(cursor5).toBeInstanceOf(Cursor); + expect(cursor5.items).toMatchObject([{ id: 1, name: 'User 1' }]); + expect(cursor5.totalCount).toBe(50); + expect(cursor5.startCursor).toBe('WzFd'); + expect(cursor5.endCursor).toBe('WzFd'); + expect(cursor5.hasNextPage).toBe(false); + expect(cursor5.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 6. page (empty) + const cursor6 = await orm.em.findByCursor(User, {}, { + first: 1, + after: cursor5, + orderBy: { id: 'desc' }, + }); + expect(cursor6).toBeInstanceOf(Cursor); + expect(cursor6.items).toHaveLength(0); + expect(cursor6.totalCount).toBe(50); + expect(cursor6.startCursor).toBeNull(); + expect(cursor6.endCursor).toBeNull(); + expect(cursor6.hasNextPage).toBe(false); + expect(cursor6.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + }); + + test('using `last` and `before` (id asc)', async () => { + const mock = mockLogger(orm, ['query', 'query-params']); + + // 1. page + const cursor1 = await orm.em.findByCursor(User, {}, { + last: 3, + orderBy: { id: 'asc' }, + }); + expect(cursor1).toBeInstanceOf(Cursor); + expect(cursor1.items).toMatchObject([ + { id: 48, name: 'User 48' }, + { id: 49, name: 'User 49' }, + { id: 50, name: 'User 50' }, + ]); + expect(cursor1.totalCount).toBe(50); + expect(cursor1.startCursor).toBe('WzQ4XQ'); + expect(cursor1.endCursor).toBe('WzUwXQ'); + expect(cursor1.hasNextPage).toBe(false); + expect(cursor1.hasPrevPage).toBe(true); + let queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 2. page + const cursor2 = await orm.em.findByCursor(User, {}, { + last: 3, + before: cursor1, + orderBy: { id: 'asc' }, + }); + expect(cursor2).toBeInstanceOf(Cursor); + expect(cursor2.items).toMatchObject([ + { id: 45, name: 'User 45' }, + { id: 46, name: 'User 46' }, + { id: 47, name: 'User 47' }, + ]); + expect(cursor2.totalCount).toBe(50); + expect(cursor2.startCursor).toBe('WzQ1XQ'); + expect(cursor2.endCursor).toBe('WzQ3XQ'); + expect(cursor2.hasNextPage).toBe(true); + expect(cursor2.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 3. page + const cursor3 = await orm.em.findByCursor(User, {}, { + last: 3, + before: cursor2, + orderBy: { id: 'asc' }, + }); + expect(cursor3).toBeInstanceOf(Cursor); + expect(cursor3.items).toMatchObject([ + { id: 42, name: 'User 42' }, + { id: 43, name: 'User 43' }, + { id: 44, name: 'User 44' }, + ]); + expect(cursor3.totalCount).toBe(50); + expect(cursor3.startCursor).toBe('WzQyXQ'); + expect(cursor3.endCursor).toBe('WzQ0XQ'); + expect(cursor3.hasNextPage).toBe(true); + expect(cursor3.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 4. page + const cursor4 = await orm.em.findByCursor(User, {}, { + last: 40, + before: cursor3, + orderBy: { id: 'asc' }, + }); + expect(cursor4).toBeInstanceOf(Cursor); + expect(cursor4.items).toHaveLength(40); + expect(cursor4.items[0]).toMatchObject({ id: 2, name: 'User 2' }); + expect(cursor4.items[39]).toMatchObject({ id: 41, name: 'User 41' }); + expect(cursor4.totalCount).toBe(50); + expect(cursor4.startCursor).toBe('WzJd'); + expect(cursor4.endCursor).toBe('WzQxXQ'); + expect(cursor4.hasNextPage).toBe(true); + expect(cursor4.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 5. page (last) + const cursor5 = await orm.em.findByCursor(User, {}, { + last: 40, + before: cursor4, + orderBy: { id: 'asc' }, + }); + expect(cursor5).toBeInstanceOf(Cursor); + expect(cursor5.items).toMatchObject([{ id: 1, name: 'User 1' }]); + expect(cursor5.totalCount).toBe(50); + expect(cursor5.startCursor).toBe('WzFd'); + expect(cursor5.endCursor).toBe('WzFd'); + expect(cursor5.hasNextPage).toBe(false); + expect(cursor5.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 6. page (empty) + const cursor6 = await orm.em.findByCursor(User, {}, { + last: 1, + before: cursor5, + orderBy: { id: 'asc' }, + }); + expect(cursor6).toBeInstanceOf(Cursor); + expect(cursor6.items).toHaveLength(0); + expect(cursor6.totalCount).toBe(50); + expect(cursor6.startCursor).toBeNull(); + expect(cursor6.endCursor).toBeNull(); + expect(cursor6.hasNextPage).toBe(false); + expect(cursor6.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + }); + + test('using `last` and `before` (id desc)', async () => { + const mock = mockLogger(orm, ['query', 'query-params']); + + // 1. page + const cursor1 = await orm.em.findByCursor(User, {}, { + last: 3, + orderBy: { id: 'desc' }, + }); + expect(cursor1).toBeInstanceOf(Cursor); + expect(cursor1.items).toMatchObject([ + { id: 3, name: 'User 3' }, + { id: 2, name: 'User 2' }, + { id: 1, name: 'User 1' }, + ]); + expect(cursor1.totalCount).toBe(50); + expect(cursor1.startCursor).toBe('WzNd'); + expect(cursor1.endCursor).toBe('WzFd'); + expect(cursor1.hasNextPage).toBe(false); + expect(cursor1.hasPrevPage).toBe(true); + let queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 2. page + const cursor2 = await orm.em.findByCursor(User, {}, { + last: 3, + before: cursor1, + orderBy: { id: 'desc' }, + }); + expect(cursor2).toBeInstanceOf(Cursor); + expect(cursor2.items).toMatchObject([ + { id: 6, name: 'User 6' }, + { id: 5, name: 'User 5' }, + { id: 4, name: 'User 4' }, + ]); + expect(cursor2.totalCount).toBe(50); + expect(cursor2.startCursor).toBe('WzZd'); + expect(cursor2.endCursor).toBe('WzRd'); + expect(cursor2.hasNextPage).toBe(true); + expect(cursor2.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 3. page + const cursor3 = await orm.em.findByCursor(User, {}, { + last: 3, + before: cursor2, + orderBy: { id: 'desc' }, + }); + expect(cursor3).toBeInstanceOf(Cursor); + expect(cursor3.items).toMatchObject([ + { id: 9, name: 'User 9' }, + { id: 8, name: 'User 8' }, + { id: 7, name: 'User 7' }, + ]); + expect(cursor3.totalCount).toBe(50); + expect(cursor3.startCursor).toBe('Wzld'); + expect(cursor3.endCursor).toBe('Wzdd'); + expect(cursor3.hasNextPage).toBe(true); + expect(cursor3.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 4. page + const cursor4 = await orm.em.findByCursor(User, {}, { + last: 40, + before: cursor3, + orderBy: { id: 'desc' }, + }); + expect(cursor4).toBeInstanceOf(Cursor); + expect(cursor4.items).toHaveLength(40); + expect(cursor4.items[0]).toMatchObject({ id: 49, name: 'User 49' }); + expect(cursor4.items[39]).toMatchObject({ id: 10, name: 'User 10' }); + expect(cursor4.totalCount).toBe(50); + expect(cursor4.startCursor).toBe('WzQ5XQ'); + expect(cursor4.endCursor).toBe('WzEwXQ'); + expect(cursor4.hasNextPage).toBe(true); + expect(cursor4.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 5. page (last) + const cursor5 = await orm.em.findByCursor(User, {}, { + last: 40, + before: cursor4, + orderBy: { id: 'desc' }, + }); + expect(cursor5).toBeInstanceOf(Cursor); + expect(cursor5.items).toMatchObject([{ id: 50, name: 'User 50' }]); + expect(cursor5.totalCount).toBe(50); + expect(cursor5.startCursor).toBe('WzUwXQ'); + expect(cursor5.endCursor).toBe('WzUwXQ'); + expect(cursor5.hasNextPage).toBe(false); + expect(cursor5.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + + // 6. page (empty) + const cursor6 = await orm.em.findByCursor(User, {}, { + last: 1, + before: cursor5, + orderBy: { id: 'desc' }, + }); + expect(cursor6).toBeInstanceOf(Cursor); + expect(cursor6.items).toHaveLength(0); + expect(cursor6.totalCount).toBe(50); + expect(cursor6.startCursor).toBeNull(); + expect(cursor6.endCursor).toBeNull(); + expect(cursor6.hasNextPage).toBe(false); + expect(cursor6.hasPrevPage).toBe(true); + queries = mock.mock.calls.map(call => call[0]).sort(); + expect(queries).toMatchSnapshot(); + orm.em.clear(); + mock.mockReset(); + }); + + test('using both `before` and `after` (id desc)', async () => { + const cursor1 = await orm.em.findByCursor(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + orderBy: { id: 'desc' }, + }); + expect([...cursor1]).toMatchObject([ + { id: 14, name: 'User 14' }, + { id: 13, name: 'User 13' }, + { id: 12, name: 'User 12' }, + { id: 11, name: 'User 11' }, + { id: 10, name: 'User 10' }, + { id: 9, name: 'User 9' }, + { id: 8, name: 'User 8' }, + { id: 7, name: 'User 7' }, + { id: 6, name: 'User 6' }, + ]); + + const cursor2 = await orm.em.findByCursor(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + first: 5, + orderBy: { id: 'desc' }, + }); + expect([...cursor2]).toMatchObject([ + { id: 14, name: 'User 14' }, + { id: 13, name: 'User 13' }, + { id: 12, name: 'User 12' }, + { id: 11, name: 'User 11' }, + { id: 10, name: 'User 10' }, + ]); + + const cursor3 = await orm.em.findByCursor(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + last: 5, + orderBy: { id: 'desc' }, + }); + expect([...cursor3]).toMatchObject([ + { id: 10, name: 'User 10' }, + { id: 9, name: 'User 9' }, + { id: 8, name: 'User 8' }, + { id: 7, name: 'User 7' }, + { id: 6, name: 'User 6' }, + ]); + }); + + test('using both `before` and `after` with `em.find` (id desc)', async () => { + const items1 = await orm.em.find(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + orderBy: { id: 'desc' }, + }); + expect(items1).toMatchObject([ + { id: 14, name: 'User 14' }, + { id: 13, name: 'User 13' }, + { id: 12, name: 'User 12' }, + { id: 11, name: 'User 11' }, + { id: 10, name: 'User 10' }, + { id: 9, name: 'User 9' }, + { id: 8, name: 'User 8' }, + { id: 7, name: 'User 7' }, + { id: 6, name: 'User 6' }, + ]); + + const items2 = await orm.em.find(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + first: 5, + orderBy: { id: 'desc' }, + }); + expect(items2).toMatchObject([ + { id: 14, name: 'User 14' }, + { id: 13, name: 'User 13' }, + { id: 12, name: 'User 12' }, + { id: 11, name: 'User 11' }, + { id: 10, name: 'User 10' }, + ]); + + const items3 = await orm.em.find(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + last: 5, + orderBy: { id: 'desc' }, + }); + expect(items3).toMatchObject([ + { id: 10, name: 'User 10' }, + { id: 9, name: 'User 9' }, + { id: 8, name: 'User 8' }, + { id: 7, name: 'User 7' }, + { id: 6, name: 'User 6' }, + ]); + }); + + test('validation', async () => { + await expect(orm.em.findByCursor(User, {}, { + before: { id: 5 }, + after: { id: 15 }, + })).rejects.toThrow('Explicit `orderBy` option required'); + }); +});
docs: use buf/feat prefix only
2561a29dc689989c987e494518c456e458650580
docs
https://github.com/rohankumardubey/ibis/commit/2561a29dc689989c987e494518c456e458650580
use buf/feat prefix only
diff --git a/bug-report.yml b/bug-report.yml index c0db5be..8004d68 100644 --- a/bug-report.yml +++ b/bug-report.yml @@ -1,6 +1,6 @@ name: Bug Report description: File a bug report -title: "bug: something isn't working ..." +title: "bug: " labels: ["bug"] body: - type: markdown diff --git a/feature-request.yml b/feature-request.yml index 81f9f9b..63194bb 100644 --- a/feature-request.yml +++ b/feature-request.yml @@ -1,6 +1,6 @@ name: Feature Request description: Suggest an idea for ibis -title: "feat: superb idea" +title: "feat: " labels: ["feature"] body: - type: markdown
style(docstrings): format docstrings according to `black` style
9e1109b118a1b82275305e72526c9e8ddfefae4d
style
https://github.com/rohankumardubey/ibis/commit/9e1109b118a1b82275305e72526c9e8ddfefae4d
format docstrings according to `black` style
diff --git a/__init__.py b/__init__.py index e19c3cd..47a301e 100644 --- a/__init__.py +++ b/__init__.py @@ -379,7 +379,9 @@ class Backend(BaseSQLBackend, CanCreateDatabase): Examples -------- - >>> con.create_table('new_table_name', table_expr) # quartodoc: +SKIP # doctest: +SKIP + >>> con.create_table( + ... "new_table_name", table_expr + ... ) # quartodoc: +SKIP # doctest: +SKIP """ import pandas as pd import pyarrow as pa @@ -500,9 +502,11 @@ class Backend(BaseSQLBackend, CanCreateDatabase): Examples -------- - >>> table = 'my_table' - >>> db = 'operations' - >>> con.drop_table_or_view(table, db, force=True) # quartodoc: +SKIP # doctest: +SKIP + >>> table = "my_table" + >>> db = "operations" + >>> con.drop_table_or_view( + ... table, db, force=True + ... ) # quartodoc: +SKIP # doctest: +SKIP """ statement = DropTable(name, database=database, must_exist=not force) self.raw_sql(statement.compile()) @@ -546,11 +550,13 @@ class Backend(BaseSQLBackend, CanCreateDatabase): Examples -------- - >>> table = 'my_table' + >>> table = "my_table" >>> con.insert(table, table_expr) # quartodoc: +SKIP # doctest: +SKIP # Completely overwrite contents - >>> con.insert(table, table_expr, overwrite=True) # quartodoc: +SKIP # doctest: +SKIP + >>> con.insert( + ... table, table_expr, overwrite=True + ... ) # quartodoc: +SKIP # doctest: +SKIP """ table = self.table(table_name, database=database) return table.insert( diff --git a/timecontext.py b/timecontext.py index 32abcb6..d4467bc 100644 --- a/timecontext.py +++ b/timecontext.py @@ -59,9 +59,9 @@ def combine_time_context( -------- >>> import pandas as pd >>> timecontexts = [ - ... (pd.Timestamp('20200102'), pd.Timestamp('20200103')), - ... (pd.Timestamp('20200101'), pd.Timestamp('20200106')), - ... (pd.Timestamp('20200109'), pd.Timestamp('20200110')), + ... (pd.Timestamp("20200102"), pd.Timestamp("20200103")), + ... (pd.Timestamp("20200101"), pd.Timestamp("20200106")), + ... (pd.Timestamp("20200109"), pd.Timestamp("20200110")), ... ] >>> combine_time_context(timecontexts) (Timestamp(...), Timestamp(...)) diff --git a/client.py b/client.py index dd3900a..beee908 100644 --- a/client.py +++ b/client.py @@ -178,8 +178,7 @@ def parse_project_and_dataset(project: str, dataset: str = "") -> tuple[str, str Examples -------- >>> data_project, billing_project, dataset = parse_project_and_dataset( - ... 'ibis-gbq', - ... 'foo-bar.my_dataset' + ... "ibis-gbq", "foo-bar.my_dataset" ... ) >>> data_project 'foo-bar' @@ -188,8 +187,7 @@ def parse_project_and_dataset(project: str, dataset: str = "") -> tuple[str, str >>> dataset 'my_dataset' >>> data_project, billing_project, dataset = parse_project_and_dataset( - ... 'ibis-gbq', - ... 'my_dataset' + ... "ibis-gbq", "my_dataset" ... ) >>> data_project 'ibis-gbq' @@ -197,9 +195,7 @@ def parse_project_and_dataset(project: str, dataset: str = "") -> tuple[str, str 'ibis-gbq' >>> dataset 'my_dataset' - >>> data_project, billing_project, _ = parse_project_and_dataset( - ... 'ibis-gbq' - ... ) + >>> data_project, billing_project, _ = parse_project_and_dataset("ibis-gbq") >>> data_project 'ibis-gbq' """ diff --git a/find.py b/find.py index 8542bb5..2448fb2 100644 --- a/find.py +++ b/find.py @@ -44,7 +44,7 @@ def find_names(node: ast.AST) -> list[ast.Name]: Examples -------- >>> import ast - >>> node = ast.parse('a + b') + >>> node = ast.parse("a + b") >>> names = find_names(node) >>> names [<....Name object at 0x...>, <....Name object at 0x...>] diff --git a/util.py b/util.py index e420932..5dc5dbf 100644 --- a/util.py +++ b/util.py @@ -205,11 +205,11 @@ def is_iterable(o: Any) -> bool: Examples -------- - >>> is_iterable('1') + >>> is_iterable("1") False - >>> is_iterable(b'1') + >>> is_iterable(b"1") False - >>> is_iterable(iter('1')) + >>> is_iterable(iter("1")) True >>> is_iterable(i for i in range(1)) True @@ -254,17 +254,17 @@ def convert_unit(value, unit, to, floor: bool = True): Examples -------- >>> one_second = 1000 - >>> x = convert_unit(one_second, 'ms', 's') + >>> x = convert_unit(one_second, "ms", "s") >>> x 1 >>> one_second = 1 - >>> x = convert_unit(one_second, 's', 'ms') + >>> x = convert_unit(one_second, "s", "ms") >>> x 1000 - >>> x = convert_unit(one_second, 's', 's') + >>> x = convert_unit(one_second, "s", "s") >>> x 1 - >>> x = convert_unit(one_second, 's', 'M') + >>> x = convert_unit(one_second, "s", "M") Traceback (most recent call last): ... ValueError: Cannot convert to or from unit ... to unit ... diff --git a/aggcontext.py b/aggcontext.py index b7f0dfd..ff1a23e 100644 --- a/aggcontext.py +++ b/aggcontext.py @@ -25,12 +25,14 @@ Pandas :: >>> import pandas as pd >>> import numpy as np - >>> df = pd.DataFrame({ - ... 'key': list('aabc'), - ... 'value': np.random.randn(4), - ... 'time': pd.date_range(periods=4, start='now') - ... }) - >>> s = pd.Series(df.value.sum(), index=df.index, name='sum_value') + >>> df = pd.DataFrame( + ... { + ... "key": list("aabc"), + ... "value": np.random.randn(4), + ... "time": pd.date_range(periods=4, start="now"), + ... } + ... ) + >>> s = pd.Series(df.value.sum(), index=df.index, name="sum_value") >>> s # quartodoc: +SKIP # doctest: +SKIP Ibis @@ -38,9 +40,11 @@ Ibis :: >>> import ibis - >>> schema = dict(time='timestamp', key='string', value='double') - >>> t = ibis.table(schema, name='t') - >>> t[t, t.value.sum().name('sum_value')].sum_value # quartodoc: +SKIP # doctest: +SKIP + >>> schema = dict(time="timestamp", key="string", value="double") + >>> t = ibis.table(schema, name="t") + >>> t[ + ... t, t.value.sum().name("sum_value") + ... ].sum_value # quartodoc: +SKIP # doctest: +SKIP ``group_by``, no ``order_by``: ``context.Transform()`` @@ -62,21 +66,25 @@ Pandas >>> import pandas as pd >>> import numpy as np - >>> df = pd.DataFrame({ - ... 'key': list('aabc'), - ... 'value': np.random.randn(4), - ... 'time': pd.date_range(periods=4, start='now') - ... }) - >>> df.groupby('key').value.transform('sum') # quartodoc: +SKIP # doctest: +SKIP + >>> df = pd.DataFrame( + ... { + ... "key": list("aabc"), + ... "value": np.random.randn(4), + ... "time": pd.date_range(periods=4, start="now"), + ... } + ... ) + >>> df.groupby("key").value.transform("sum") # quartodoc: +SKIP # doctest: +SKIP Ibis :: >>> import ibis - >>> schema = dict(time='timestamp', key='string', value='double') - >>> t = ibis.table(schema, name='t') - >>> t.value.sum().over(ibis.window(group_by=t.key)) # quartodoc: +SKIP # doctest: +SKIP + >>> schema = dict(time="timestamp", key="string", value="double") + >>> t = ibis.table(schema, name="t") + >>> t.value.sum().over( + ... ibis.window(group_by=t.key) + ... ) # quartodoc: +SKIP # doctest: +SKIP ``order_by``, no ``group_by``: ``context.Cumulative()``/``context.Rolling()`` ----------------------------------------------------------------------------- @@ -104,20 +112,22 @@ Pandas >>> import pandas as pd >>> import numpy as np - >>> df = pd.DataFrame({ - ... 'key': list('aabc'), - ... 'value': np.random.randn(4), - ... 'time': pd.date_range(periods=4, start='now') - ... }) - >>> df.sort_values('time').value.cumsum() # quartodoc: +SKIP # doctest: +SKIP + >>> df = pd.DataFrame( + ... { + ... "key": list("aabc"), + ... "value": np.random.randn(4), + ... "time": pd.date_range(periods=4, start="now"), + ... } + ... ) + >>> df.sort_values("time").value.cumsum() # quartodoc: +SKIP # doctest: +SKIP Ibis :: >>> import ibis - >>> schema = dict(time='timestamp', key='string', value='double') - >>> t = ibis.table(schema, name='t') + >>> schema = dict(time="timestamp", key="string", value="double") + >>> t = ibis.table(schema, name="t") >>> window = ibis.cumulative_window(order_by=t.time) >>> t.value.sum().over(window) # quartodoc: +SKIP # doctest: +SKIP @@ -142,20 +152,24 @@ Pandas >>> import pandas as pd >>> import numpy as np - >>> df = pd.DataFrame({ - ... 'key': list('aabc'), - ... 'value': np.random.randn(4), - ... 'time': pd.date_range(periods=4, start='now') - ... }) - >>> df.sort_values('time').value.rolling(3).sum() # quartodoc: +SKIP # doctest: +SKIP + >>> df = pd.DataFrame( + ... { + ... "key": list("aabc"), + ... "value": np.random.randn(4), + ... "time": pd.date_range(periods=4, start="now"), + ... } + ... ) + >>> df.sort_values("time").value.rolling( + ... 3 + ... ).sum() # quartodoc: +SKIP # doctest: +SKIP Ibis :: >>> import ibis - >>> schema = dict(time='timestamp', key='string', value='double') - >>> t = ibis.table(schema, name='t') + >>> schema = dict(time="timestamp", key="string", value="double") + >>> t = ibis.table(schema, name="t") >>> window = ibis.trailing_window(3, order_by=t.time) >>> t.value.sum().over(window) # quartodoc: +SKIP # doctest: +SKIP @@ -181,15 +195,20 @@ Pandas >>> import pandas as pd >>> import numpy as np - >>> df = pd.DataFrame({ - ... 'key': list('aabc'), - ... 'value': np.random.randn(4), - ... 'time': pd.date_range(periods=4, start='now') - ... }) - >>> sorter = lambda df: df.sort_values('time') - >>> gb = df.groupby('key', group_keys=False).apply(sorter).reset_index( - ... drop=True - ... ).groupby('key') + >>> df = pd.DataFrame( + ... { + ... "key": list("aabc"), + ... "value": np.random.randn(4), + ... "time": pd.date_range(periods=4, start="now"), + ... } + ... ) + >>> sorter = lambda df: df.sort_values("time") + >>> gb = ( + ... df.groupby("key", group_keys=False) + ... .apply(sorter) + ... .reset_index(drop=True) + ... .groupby("key") + ... ) >>> rolling = gb.value.rolling(2) >>> rolling.sum() # quartodoc: +SKIP # doctest: +SKIP @@ -198,8 +217,8 @@ Ibis :: >>> import ibis - >>> schema = dict(time='timestamp', key='string', value='double') - >>> t = ibis.table(schema, name='t') + >>> schema = dict(time="timestamp", key="string", value="double") + >>> t = ibis.table(schema, name="t") >>> window = ibis.trailing_window(2, order_by=t.time, group_by=t.key) >>> t.value.sum().over(window) # quartodoc: +SKIP # doctest: +SKIP """ diff --git a/strings.py b/strings.py index daece24..cd411c9 100644 --- a/strings.py +++ b/strings.py @@ -619,8 +619,8 @@ class StringValue(Value): Examples -------- >>> import ibis - >>> table = ibis.table(dict(string_col='string')) - >>> result = table.string_col.translate('a', 'b') + >>> table = ibis.table(dict(string_col="string")) + >>> result = table.string_col.translate("a", "b") """ return ops.Translate(self, from_str, to_str).to_expr() @@ -772,8 +772,8 @@ class StringValue(Value): Examples -------- >>> import ibis - >>> table = ibis.table(dict(string_col='string')) - >>> result = table.string_col.find_in_set(['a', 'b']) + >>> table = ibis.table(dict(string_col="string")) + >>> result = table.string_col.find_in_set(["a", "b"]) """ return ops.FindInSet(self, str_list).to_expr() @@ -1254,7 +1254,9 @@ class StringValue(Value): Examples -------- >>> import ibis - >>> url = ibis.literal("https://example.com:80/docs/books/tutorial/index.html?name=networking") + >>> url = ibis.literal( + ... "https://example.com:80/docs/books/tutorial/index.html?name=networking" + ... ) >>> result = url.file() # docs/books/tutorial/index.html?name=networking Returns @@ -1270,7 +1272,9 @@ class StringValue(Value): Examples -------- >>> import ibis - >>> url = ibis.literal("https://example.com:80/docs/books/tutorial/index.html?name=networking") + >>> url = ibis.literal( + ... "https://example.com:80/docs/books/tutorial/index.html?name=networking" + ... ) >>> result = url.path() # docs/books/tutorial/index.html Returns @@ -1294,9 +1298,11 @@ class StringValue(Value): Examples -------- >>> import ibis - >>> url = ibis.literal("https://example.com:80/docs/books/tutorial/index.html?name=networking") + >>> url = ibis.literal( + ... "https://example.com:80/docs/books/tutorial/index.html?name=networking" + ... ) >>> result = url.query() # name=networking - >>> query_name = url.query('name') # networking + >>> query_name = url.query("name") # networking Returns ------- diff --git a/annotations.py b/annotations.py index 964a0e5..88b4f8c 100644 --- a/annotations.py +++ b/annotations.py @@ -573,6 +573,7 @@ def annotated(_1=None, _2=None, _3=None, **kwargs): >>> @annotated(x=instance_of(int), y=instance_of(str)) ... def foo(x, y): ... return float(x) + float(y) + ... 3. With mixing type annotations and patterns where the latter takes precedence @@ -585,6 +586,7 @@ def annotated(_1=None, _2=None, _3=None, **kwargs): >>> @annotated([instance_of(int), instance_of(str)], instance_of(float)) ... def foo(x, y): ... return float(x) + float(y) + ... Parameters ---------- diff --git a/patterns.py b/patterns.py index 361f642..c05bc87 100644 --- a/patterns.py +++ b/patterns.py @@ -495,8 +495,8 @@ class Call(Slotted, Builder): >>> from ibis.common.patterns import Call >>> from ibis.expr.operations import Negate >>> - >>> c = Call.namespace('ibis.expr.operations') - >>> x = Variable('x') + >>> c = Call.namespace("ibis.expr.operations") + >>> x = Variable("x") >>> pattern = c.Negate(x) >>> pattern Call(func=<class 'ibis.expr.operations.numeric.Negate'>, args=(Variable(name='x'),), kwargs=FrozenDict({})) @@ -847,13 +847,13 @@ class GenericInstanceOf(Slotted, Pattern): Examples -------- >>> class MyNumber(Generic[T_co]): - ... value: T_co + ... value: T_co ... - ... def __init__(self, value: T_co): - ... self.value = value + ... def __init__(self, value: T_co): + ... self.value = value ... - ... def __eq__(self, other): - ... return type(self) is type(other) and self.value == other.value + ... def __eq__(self, other): + ... return type(self) is type(other) and self.value == other.value ... >>> p = GenericInstanceOf(MyNumber[int]) >>> assert p.match(MyNumber(1), {}) == MyNumber(1) @@ -1770,6 +1770,7 @@ def pattern(obj: AnyType) -> Pattern: >>> @pattern ... def as_int(x, context): ... return int(x) + ... >>> >>> assert as_int.match(1, {}) == 1 @@ -1826,7 +1827,11 @@ def match( >>> assert match(1, 1, context={"x": 1}) == 1 >>> assert match(1, 2, context={"x": 1}) is NoMatch >>> assert match([1, int], [1, 2]) == [1, 2] - >>> assert match([1, int, "a" @ InstanceOf(str)], [1, 2, "three"]) == [1, 2, "three"] + >>> assert match([1, int, "a" @ InstanceOf(str)], [1, 2, "three"]) == [ + ... 1, + ... 2, + ... "three", + ... ] """ if context is None: context = {} diff --git a/typing.py b/typing.py index 4a87024..9439265 100644 --- a/typing.py +++ b/typing.py @@ -94,12 +94,16 @@ def get_type_params(obj: Any) -> dict[str, type]: -------- >>> from typing import Dict, List >>> - >>> class MyList(List[T]): ... + >>> class MyList(List[T]): + ... ... + ... >>> >>> get_type_params(MyList[int]) {'T': <class 'int'>} >>> - >>> class MyDict(Dict[T, U]): ... + >>> class MyDict(Dict[T, U]): + ... ... + ... >>> >>> get_type_params(MyDict[int, str]) {'T': <class 'int'>, 'U': <class 'str'>} @@ -135,18 +139,18 @@ def get_bound_typevars(obj: Any) -> dict[TypeVar, tuple[str, type]]: Examples -------- >>> class MyStruct(Generic[T, U]): - ... a: T - ... b: U + ... a: T + ... b: U ... >>> get_bound_typevars(MyStruct[int, str]) {~T: ('a', <class 'int'>), ~U: ('b', <class 'str'>)} >>> >>> class MyStruct(Generic[T, U]): - ... a: T + ... a: T ... - ... @property - ... def myprop(self) -> U: - ... ... + ... @property + ... def myprop(self) -> U: + ... ... ... >>> get_bound_typevars(MyStruct[float, bytes]) {~T: ('a', <class 'float'>), ~U: ('myprop', <class 'bytes'>)} @@ -183,7 +187,7 @@ def evaluate_annotations( Examples -------- - >>> annots = {'a': 'dict[str, float]', 'b': 'int'} + >>> annots = {"a": "dict[str, float]", "b": "int"} >>> evaluate_annotations(annots, __name__) {'a': dict[str, float], 'b': <class 'int'>} """ diff --git a/analysis.py b/analysis.py index 6bf3cee..210bab4 100644 --- a/analysis.py +++ b/analysis.py @@ -86,12 +86,12 @@ def find_immediate_parent_tables(input_node, keep_input=True): Examples -------- >>> import ibis, toolz - >>> t = ibis.table([('a', 'int64')], name='t') + >>> t = ibis.table([("a", "int64")], name="t") >>> expr = t.mutate(foo=t.a + 1) - >>> result, = find_immediate_parent_tables(expr.op()) + >>> (result,) = find_immediate_parent_tables(expr.op()) >>> result.equals(expr.op()) True - >>> result, = find_immediate_parent_tables(expr.op(), keep_input=False) + >>> (result,) = find_immediate_parent_tables(expr.op(), keep_input=False) >>> result.equals(t.op()) True """ @@ -590,8 +590,8 @@ def flatten_predicate(node): Examples -------- >>> import ibis - >>> t = ibis.table([('a', 'int64'), ('b', 'string')], name='t') - >>> filt = (t.a == 1) & (t.b == 'foo') + >>> t = ibis.table([("a", "int64"), ("b", "string")], name="t") + >>> filt = (t.a == 1) & (t.b == "foo") >>> predicates = flatten_predicate(filt.op()) >>> len(predicates) 2 diff --git a/api.py b/api.py index 8bfaf90..706a77c 100644 --- a/api.py +++ b/api.py @@ -213,10 +213,10 @@ def param(type: dt.DataType) -> ir.Scalar: Examples -------- >>> import ibis - >>> start = ibis.param('date') - >>> end = ibis.param('date') - >>> schema = dict(timestamp_col='timestamp', value='double') - >>> t = ibis.table(schema, name='t') + >>> start = ibis.param("date") + >>> end = ibis.param("date") + >>> schema = dict(timestamp_col="timestamp", value="double") + >>> t = ibis.table(schema, name="t") >>> predicates = [t.timestamp_col >= start, t.timestamp_col <= end] >>> t.filter(predicates).value.sum() r0 := UnboundTable: t @@ -256,11 +256,8 @@ def schema( Examples -------- >>> from ibis import schema, Schema - >>> sc = schema([('foo', 'string'), - ... ('bar', 'int64'), - ... ('baz', 'boolean')]) - >>> sc = schema(names=['foo', 'bar', 'baz'], - ... types=['string', 'int64', 'boolean']) + >>> sc = schema([("foo", "string"), ("bar", "int64"), ("baz", "boolean")]) + >>> sc = schema(names=["foo", "bar", "baz"], types=["string", "int64", "boolean"]) >>> sc = schema(dict(foo="string")) >>> sc = schema(Schema(dict(foo="string"))) # no-op """ @@ -873,6 +870,7 @@ def read_csv( ... ''' >>> with open("/tmp/lines.csv", mode="w") as f: ... _ = f.write(lines) + ... >>> t = ibis.read_csv("/tmp/lines.csv") >>> t ┏━━━━━━━┳━━━━━━━━┓ @@ -928,6 +926,7 @@ def read_json( ... ''' >>> with open("/tmp/lines.json", mode="w") as f: ... _ = f.write(lines) + ... >>> t = ibis.read_json("/tmp/lines.json") >>> t ┏━━━━━━━┳━━━━━━━━┓ @@ -1070,7 +1069,9 @@ def set_backend(backend: str | BaseBackend) -> None: Or as a URI - >>> ibis.set_backend("postgres://user:password@hostname:5432") # quartodoc: +SKIP # doctest: +SKIP + >>> ibis.set_backend( + ... "postgres://user:password@hostname:5432" + ... ) # quartodoc: +SKIP # doctest: +SKIP Or as an existing backend instance diff --git a/analytic.py b/analytic.py index b6e5aaf..facc52c 100644 --- a/analytic.py +++ b/analytic.py @@ -63,10 +63,10 @@ class RowNumber(RankBase): Examples -------- >>> import ibis - >>> t = ibis.table([('values', dt.int64)]) + >>> t = ibis.table([("values", dt.int64)]) >>> w = ibis.window(order_by=t.values) >>> row_num = ibis.row_number().over(w) - >>> result = t[t.values, row_num.name('row_num')] + >>> result = t[t.values, row_num.name("row_num")] Returns ------- diff --git a/udf.py b/udf.py index 7efed5b..ecd6b19 100644 --- a/udf.py +++ b/udf.py @@ -106,7 +106,6 @@ class scalar: >>> @ibis.udf.scalar.python ... def add_one(x: int) -> int: ... return x + 1 - ... >>> expr = add_one(2) >>> con = ibis.connect("duckdb://") >>> con.execute(expr) @@ -158,7 +157,6 @@ class scalar: >>> @ibis.udf.scalar.pandas ... def add_one(x: int) -> int: ... return x + 1 - ... >>> expr = add_one(2) >>> con = ibis.connect(os.environ["SNOWFLAKE_URL"]) # doctest: +SKIP >>> con.execute(expr) # doctest: +SKIP @@ -211,7 +209,6 @@ class scalar: >>> @ibis.udf.scalar.pyarrow ... def add_one(x: int) -> int: ... return pc.add(x, 1) - ... >>> expr = add_one(2) >>> con = ibis.connect("duckdb://") >>> con.execute(expr) @@ -262,7 +259,6 @@ class scalar: >>> @ibis.udf.scalar.builtin ... def hamming(a: str, b: str) -> int: ... '''Compute the Hamming distance between two strings.''' - ... >>> expr = hamming("duck", "luck") >>> con = ibis.connect("duckdb://") >>> con.execute(expr) diff --git a/arrays.py b/arrays.py index 00a032f..c9aa0f6 100644 --- a/arrays.py +++ b/arrays.py @@ -144,7 +144,7 @@ class ArrayValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({"a": [[7], [3] , None]}) + >>> t = ibis.memtable({"a": [[7], [3], None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ ┃ a ┃ @@ -224,7 +224,7 @@ class ArrayValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({"a": [[7], [3] , None]}) + >>> t = ibis.memtable({"a": [[7], [3], None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ ┃ a ┃ @@ -274,7 +274,7 @@ class ArrayValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({"a": [[7, 42], [3, 3] , None]}) + >>> t = ibis.memtable({"a": [[7, 42], [3, 3], None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ ┃ a ┃ @@ -765,7 +765,9 @@ class ArrayValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({"arr1": [[3, 2], [], None], "arr2": [[1, 3], [None], [5]]}) + >>> t = ibis.memtable( + ... {"arr1": [[3, 2], [], None], "arr2": [[1, 3], [None], [5]]} + ... ) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ ┃ arr1 ┃ arr2 ┃ @@ -816,7 +818,9 @@ class ArrayValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({"arr1": [[3, 2], [], None], "arr2": [[1, 3], [None], [5]]}) + >>> t = ibis.memtable( + ... {"arr1": [[3, 2], [], None], "arr2": [[1, 3], [None], [5]]} + ... ) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ ┃ arr1 ┃ arr2 ┃ @@ -860,7 +864,9 @@ class ArrayValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({"numbers": [[3, 2], [], None], "strings": [["a", "c"], None, ["e"]]}) + >>> t = ibis.memtable( + ... {"numbers": [[3, 2], [], None], "strings": [["a", "c"], None, ["e"]]} + ... ) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ ┃ numbers ┃ strings ┃ @@ -947,7 +953,7 @@ def array(values: Iterable[V], type: str | dt.DataType | None = None) -> ArrayVa >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({'a': [1, 2, 3], 'b': [4, 5, 6]}) + >>> t = ibis.memtable({"a": [1, 2, 3], "b": [4, 5, 6]}) >>> ibis.array([t.a, t.b]) ┏━━━━━━━━━━━━━━━━━━━━━━┓ ┃ ArrayColumn() ┃ diff --git a/core.py b/core.py index d2a05e2..c4244b8 100644 --- a/core.py +++ b/core.py @@ -205,9 +205,9 @@ class Expr(Immutable, Coercible): Examples -------- >>> import ibis - >>> t = ibis.table([('a', 'int64'), ('b', 'string')], name='t') - >>> f = lambda a: (a + 1).name('a') - >>> g = lambda a: (a * 2).name('a') + >>> t = ibis.table([("a", "int64"), ("b", "string")], name="t") + >>> f = lambda a: (a + 1).name("a") + >>> g = lambda a: (a * 2).name("a") >>> result1 = t.a.pipe(f).pipe(g) >>> result1 r0 := UnboundTable: t diff --git a/generic.py b/generic.py index f9c4e01..04271de 100644 --- a/generic.py +++ b/generic.py @@ -136,7 +136,9 @@ class Value(Expr): >>> import ibis >>> from ibis import _ >>> ibis.options.interactive = True - >>> t = ibis.memtable({"numbers": [1, 2, 3, 4], "strings": ["1.0", "2", "hello", "world"]}) + >>> t = ibis.memtable( + ... {"numbers": [1, 2, 3, 4], "strings": ["1.0", "2", "hello", "world"]} + ... ) >>> t ┏━━━━━━━━━┳━━━━━━━━━┓ ┃ numbers ┃ strings ┃ @@ -353,15 +355,15 @@ class Value(Expr): Check whether a column's values are contained in a sequence >>> import ibis - >>> table = ibis.table(dict(string_col='string'), name="t") - >>> table.string_col.isin(['foo', 'bar', 'baz']) + >>> table = ibis.table(dict(string_col="string"), name="t") + >>> table.string_col.isin(["foo", "bar", "baz"]) r0 := UnboundTable: t string_col string InValues(string_col): InValues(...) Check whether a column's values are contained in another table's column - >>> table2 = ibis.table(dict(other_string_col='string'), name="t2") + >>> table2 = ibis.table(dict(other_string_col="string"), name="t2") >>> table.string_col.isin(table2.other_string_col) r0 := UnboundTable: t string_col string @@ -627,13 +629,15 @@ class Value(Expr): Examples -------- >>> import ibis - >>> t = ibis.table([('string_col', 'string')], name='t') + >>> t = ibis.table([("string_col", "string")], name="t") >>> expr = t.string_col - >>> case_expr = (expr.case() - ... .when('a', 'an a') - ... .when('b', 'a b') - ... .else_('null or (not a and not b)') - ... .end()) + >>> case_expr = ( + ... expr.case() + ... .when("a", "an a") + ... .when("b", "a b") + ... .else_("null or (not a and not b)") + ... .end() + ... ) >>> case_expr r0 := UnboundTable: t string_col string @@ -1409,7 +1413,7 @@ class Column(Value, _FixedTextJupyterMixin): └────────┘ >>> t.chars.first() 'a' - >>> t.chars.first(where=t.chars != 'a') + >>> t.chars.first(where=t.chars != "a") 'b' """ return ops.First(self, where=where).to_expr() @@ -1435,7 +1439,7 @@ class Column(Value, _FixedTextJupyterMixin): └────────┘ >>> t.chars.last() 'd' - >>> t.chars.last(where=t.chars != 'd') + >>> t.chars.last(where=t.chars != "d") 'c' """ return ops.Last(self, where=where).to_expr() @@ -1661,13 +1665,13 @@ def literal(value: Any, type: dt.DataType | str | None = None) -> Scalar: Construct a `float64` literal from an `int` - >>> y = ibis.literal(42, type='double') + >>> y = ibis.literal(42, type="double") >>> y.type() Float64(nullable=True) Ibis checks for invalid types - >>> ibis.literal('foobar', type='int64') # quartodoc: +EXPECTED_FAILURE + >>> ibis.literal("foobar", type="int64") # quartodoc: +EXPECTED_FAILURE Traceback (most recent call last): ... TypeError: Value 'foobar' cannot be safely coerced to int64 diff --git a/groupby.py b/groupby.py index dc1d75b..3732858 100644 --- a/groupby.py +++ b/groupby.py @@ -199,9 +199,12 @@ class GroupedTable: │ … │ … │ … │ … │ … │ … │ └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴───┘ >>> ( - ... t.select("species", "bill_length_mm") - ... .group_by("species") - ... .mutate(centered_bill_len=ibis._.bill_length_mm - ibis._.bill_length_mm.mean()) + ... t.select("species", "bill_length_mm") + ... .group_by("species") + ... .mutate( + ... centered_bill_len=ibis._.bill_length_mm + ... - ibis._.bill_length_mm.mean() + ... ) ... ) ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┓ ┃ species ┃ bill_length_mm ┃ centered_bill_len ┃ diff --git a/maps.py b/maps.py index bf30ab8..28c08ab 100644 --- a/maps.py +++ b/maps.py @@ -23,9 +23,14 @@ class MapValue(Value): >>> import ibis >>> ibis.options.interactive = True >>> import pyarrow as pa - >>> tab = pa.table({ - ... "m": pa.array([[("a", 1), ("b", 2)], [("a", 1)], None], - ... type=pa.map_(pa.utf8(), pa.int64()))}) + >>> tab = pa.table( + ... { + ... "m": pa.array( + ... [[("a", 1), ("b", 2)], [("a", 1)], None], + ... type=pa.map_(pa.utf8(), pa.int64()), + ... ) + ... } + ... ) >>> t = ibis.memtable(tab) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ @@ -39,7 +44,7 @@ class MapValue(Value): └──────────────────────┘ Can use `[]` to access values: - >>> t.m['a'] + >>> t.m["a"] ┏━━━━━━━━━━━━━━━━━━━━━━┓ ┃ MapGet(m, 'a', None) ┃ ┡━━━━━━━━━━━━━━━━━━━━━━┩ @@ -51,7 +56,7 @@ class MapValue(Value): └──────────────────────┘ To provide default values, use `get`: - >>> t.m.get('b', 0) + >>> t.m.get("b", 0) ┏━━━━━━━━━━━━━━━━━━━┓ ┃ MapGet(m, 'b', 0) ┃ ┡━━━━━━━━━━━━━━━━━━━┩ @@ -89,9 +94,14 @@ class MapValue(Value): >>> import ibis >>> import pyarrow as pa >>> ibis.options.interactive = True - >>> tab = pa.table({ - ... "m": pa.array([[("a", 1), ("b", 2)], [("a", 1)], None], - ... type=pa.map_(pa.utf8(), pa.int64()))}) + >>> tab = pa.table( + ... { + ... "m": pa.array( + ... [[("a", 1), ("b", 2)], [("a", 1)], None], + ... type=pa.map_(pa.utf8(), pa.int64()), + ... ) + ... } + ... ) >>> t = ibis.memtable(tab) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ @@ -150,9 +160,14 @@ class MapValue(Value): >>> import ibis >>> import pyarrow as pa >>> ibis.options.interactive = True - >>> tab = pa.table({ - ... "m": pa.array([[("a", 1), ("b", 2)], [("a", 1)], None], - ... type=pa.map_(pa.utf8(), pa.int64()))}) + >>> tab = pa.table( + ... { + ... "m": pa.array( + ... [[("a", 1), ("b", 2)], [("a", 1)], None], + ... type=pa.map_(pa.utf8(), pa.int64()), + ... ) + ... } + ... ) >>> t = ibis.memtable(tab) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ @@ -202,9 +217,14 @@ class MapValue(Value): >>> import ibis >>> import pyarrow as pa >>> ibis.options.interactive = True - >>> tab = pa.table({ - ... "m": pa.array([[("a", 1), ("b", 2)], [("a", 1)], None], - ... type=pa.map_(pa.utf8(), pa.int64()))}) + >>> tab = pa.table( + ... { + ... "m": pa.array( + ... [[("a", 1), ("b", 2)], [("a", 1)], None], + ... type=pa.map_(pa.utf8(), pa.int64()), + ... ) + ... } + ... ) >>> t = ibis.memtable(tab) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ @@ -249,9 +269,14 @@ class MapValue(Value): >>> import ibis >>> import pyarrow as pa >>> ibis.options.interactive = True - >>> tab = pa.table({ - ... "m": pa.array([[("a", 1), ("b", 2)], [("a", 1)], None], - ... type=pa.map_(pa.utf8(), pa.int64()))}) + >>> tab = pa.table( + ... { + ... "m": pa.array( + ... [[("a", 1), ("b", 2)], [("a", 1)], None], + ... type=pa.map_(pa.utf8(), pa.int64()), + ... ) + ... } + ... ) >>> t = ibis.memtable(tab) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ @@ -289,9 +314,14 @@ class MapValue(Value): >>> import ibis >>> import pyarrow as pa >>> ibis.options.interactive = True - >>> tab = pa.table({ - ... "m": pa.array([[("a", 1), ("b", 2)], [("a", 1)], None], - ... type=pa.map_(pa.utf8(), pa.int64()))}) + >>> tab = pa.table( + ... { + ... "m": pa.array( + ... [[("a", 1), ("b", 2)], [("a", 1)], None], + ... type=pa.map_(pa.utf8(), pa.int64()), + ... ) + ... } + ... ) >>> t = ibis.memtable(tab) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┓ @@ -431,7 +461,7 @@ def map( >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({'keys': [['a', 'b'], ['b']], 'values': [[1, 2], [3]]}) + >>> t = ibis.memtable({"keys": [["a", "b"], ["b"]], "values": [[1, 2], [3]]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ ┃ keys ┃ values ┃ diff --git a/relations.py b/relations.py index 54438d9..a162ec9 100644 --- a/relations.py +++ b/relations.py @@ -745,7 +745,12 @@ class Table(Expr, _FixedTextJupyterMixin): >>> import ibis >>> from ibis import _ >>> ibis.options.interactive = True - >>> t = ibis.memtable({"fruit": ["apple", "apple", "banana", "orange"], "price": [0.5, 0.5, 0.25, 0.33]}) + >>> t = ibis.memtable( + ... { + ... "fruit": ["apple", "apple", "banana", "orange"], + ... "price": [0.5, 0.5, 0.25, 0.33], + ... } + ... ) >>> t ┏━━━━━━━━┳━━━━━━━━━┓ ┃ fruit ┃ price ┃ @@ -907,7 +912,12 @@ class Table(Expr, _FixedTextJupyterMixin): >>> import ibis >>> from ibis import _ >>> ibis.options.interactive = True - >>> t = ibis.memtable({"fruit": ["apple", "apple", "banana", "orange"], "price": [0.5, 0.5, 0.25, 0.33]}) + >>> t = ibis.memtable( + ... { + ... "fruit": ["apple", "apple", "banana", "orange"], + ... "price": [0.5, 0.5, 0.25, 0.33], + ... } + ... ) >>> t ┏━━━━━━━━┳━━━━━━━━━┓ ┃ fruit ┃ price ┃ @@ -919,7 +929,12 @@ class Table(Expr, _FixedTextJupyterMixin): │ banana │ 0.25 │ │ orange │ 0.33 │ └────────┴─────────┘ - >>> t.aggregate(by=["fruit"], total_cost=_.price.sum(), avg_cost=_.price.mean(), having=_.price.sum() < 0.5) + >>> t.aggregate( + ... by=["fruit"], + ... total_cost=_.price.sum(), + ... avg_cost=_.price.mean(), + ... having=_.price.sum() < 0.5, + ... ) ┏━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━┓ ┃ fruit ┃ total_cost ┃ avg_cost ┃ ┡━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━┩ @@ -1058,7 +1073,9 @@ class Table(Expr, _FixedTextJupyterMixin): Drop all duplicated rows - >>> expr = t.distinct(on=["species", "island", "year", "bill_length_mm"], keep=None) + >>> expr = t.distinct( + ... on=["species", "island", "year", "bill_length_mm"], keep=None + ... ) >>> expr.count() 273 >>> t.count() @@ -1481,7 +1498,9 @@ class Table(Expr, _FixedTextJupyterMixin): >>> import ibis.selectors as s >>> from ibis import _ >>> ibis.options.interactive = True - >>> t = ibis.examples.penguins.fetch().select("species", "year", "bill_length_mm") + >>> t = ibis.examples.penguins.fetch().select( + ... "species", "year", "bill_length_mm" + ... ) >>> t ┏━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┓ ┃ species ┃ year ┃ bill_length_mm ┃ @@ -1518,7 +1537,9 @@ class Table(Expr, _FixedTextJupyterMixin): Add a new column based on an aggregation. Note the automatic broadcasting. - >>> t.select("species", bill_demean=_.bill_length_mm - _.bill_length_mm.mean()).head() + >>> t.select( + ... "species", bill_demean=_.bill_length_mm - _.bill_length_mm.mean() + ... ).head() ┏━━━━━━━━━┳━━━━━━━━━━━━━┓ ┃ species ┃ bill_demean ┃ ┡━━━━━━━━━╇━━━━━━━━━━━━━┩ @@ -2084,7 +2105,9 @@ class Table(Expr, _FixedTextJupyterMixin): │ Adelie │ Torgersen │ 42.0 │ 20.2 │ 190 │ … │ │ … │ … │ … │ … │ … │ … │ └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴───┘ - >>> t.filter([t.species == "Adelie", t.body_mass_g > 3500]).sex.value_counts().dropna("sex") + >>> t.filter( + ... [t.species == "Adelie", t.body_mass_g > 3500] + ... ).sex.value_counts().dropna("sex") ┏━━━━━━━━┳━━━━━━━━━━━┓ ┃ sex ┃ sex_count ┃ ┡━━━━━━━━╇━━━━━━━━━━━┩ @@ -2357,6 +2380,7 @@ class Table(Expr, _FixedTextJupyterMixin): ... ''' >>> with open("/tmp/lines.json", "w") as f: ... _ = f.write(lines) + ... >>> t = ibis.read_json("/tmp/lines.json") >>> t ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ @@ -2934,6 +2958,7 @@ class Table(Expr, _FixedTextJupyterMixin): >>> with t.mutate(computation="Heavy Computation").cache() as cached_penguins: ... cached_penguins + ... ┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━┓ ┃ species ┃ island ┃ bill_length_mm ┃ bill_depth_mm ┃ flipper_length_mm ┃ … ┃ ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━┩ @@ -3024,7 +3049,9 @@ class Table(Expr, _FixedTextJupyterMixin): Here we convert column names not matching the selector for the `religion` column and convert those names into values - >>> relig_income.pivot_longer(~s.c("religion"), names_to="income", values_to="count") + >>> relig_income.pivot_longer( + ... ~s.c("religion"), names_to="income", values_to="count" + ... ) ┏━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┓ ┃ religion ┃ income ┃ count ┃ ┡━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━┩ @@ -3181,7 +3208,12 @@ class Table(Expr, _FixedTextJupyterMixin): ... names_pattern="new_?(.*)_(.)(.*)", ... names_transform=dict( ... gender={"m": 1, "f": 2}.get, - ... age=dict(zip(["014", "1524", "2534", "3544", "4554", "5564", "65"], range(7))).get, + ... age=dict( + ... zip( + ... ["014", "1524", "2534", "3544", "4554", "5564", "65"], + ... range(7), + ... ) + ... ).get, ... ), ... values_to="count", ... ) @@ -3216,7 +3248,9 @@ class Table(Expr, _FixedTextJupyterMixin): `names_transform` must be a mapping or callable - >>> who.pivot_longer(s.r["new_sp_m014":"newrel_f65"], names_transform="upper") # quartodoc: +EXPECTED_FAILURE + >>> who.pivot_longer( + ... s.r["new_sp_m014":"newrel_f65"], names_transform="upper" + ... ) # quartodoc: +EXPECTED_FAILURE Traceback (most recent call last): ... ibis.common.exceptions.IbisTypeError: ... Got <class 'str'> @@ -3382,7 +3416,9 @@ class Table(Expr, _FixedTextJupyterMixin): Fill missing pivoted values using `values_fill` - >>> fish_encounters.pivot_wider(names_from="station", values_from="seen", values_fill=0) + >>> fish_encounters.pivot_wider( + ... names_from="station", values_from="seen", values_fill=0 + ... ) ┏━━━━━━━┳━━━━━━━━━┳━━━━━━━┳━━━━━━━━┳━━━━━━━┳━━━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━┓ ┃ fish ┃ Release ┃ I80_1 ┃ Lisbon ┃ Rstr ┃ Base_TD ┃ BCE ┃ BCW ┃ … ┃ ┡━━━━━━━╇━━━━━━━━━╇━━━━━━━╇━━━━━━━━╇━━━━━━━╇━━━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━┩ @@ -3422,7 +3458,9 @@ class Table(Expr, _FixedTextJupyterMixin): │ 06 │ California │ rent │ 1358 │ 3 │ │ … │ … │ … │ … │ … │ └────────┴────────────┴──────────┴──────────┴───────┘ - >>> us_rent_income.pivot_wider(names_from="variable", values_from=["estimate", "moe"]) + >>> us_rent_income.pivot_wider( + ... names_from="variable", values_from=["estimate", "moe"] + ... ) ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━┓ ┃ geoid ┃ name ┃ estimate_income ┃ moe_income ┃ … ┃ ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━┩ @@ -3468,7 +3506,9 @@ class Table(Expr, _FixedTextJupyterMixin): Supply an alternative function to summarize values - >>> warpbreaks = ibis.examples.warpbreaks.fetch().select("wool", "tension", "breaks") + >>> warpbreaks = ibis.examples.warpbreaks.fetch().select( + ... "wool", "tension", "breaks" + ... ) >>> warpbreaks ┏━━━━━━━━┳━━━━━━━━━┳━━━━━━━━┓ ┃ wool ┃ tension ┃ breaks ┃ @@ -3487,7 +3527,9 @@ class Table(Expr, _FixedTextJupyterMixin): │ A │ M │ 18 │ │ … │ … │ … │ └────────┴─────────┴────────┘ - >>> warpbreaks.pivot_wider(names_from="wool", values_from="breaks", values_agg="mean") + >>> warpbreaks.pivot_wider( + ... names_from="wool", values_from="breaks", values_agg="mean" + ... ) ┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┓ ┃ tension ┃ A ┃ B ┃ ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━┩ diff --git a/structs.py b/structs.py index b186992..f2a77eb 100644 --- a/structs.py +++ b/structs.py @@ -48,18 +48,18 @@ def struct( -------- Create a struct literal from a [](`dict`) with the type inferred >>> import ibis - >>> t = ibis.struct(dict(a=1, b='foo')) + >>> t = ibis.struct(dict(a=1, b="foo")) Create a struct literal from a [](`dict`) with a specified type - >>> t = ibis.struct(dict(a=1, b='foo'), type='struct<a: float, b: string>') + >>> t = ibis.struct(dict(a=1, b="foo"), type="struct<a: float, b: string>") Specify a specific type for the struct literal - >>> t = ibis.struct(dict(a=1, b=40), type='struct<a: float, b: int32>') + >>> t = ibis.struct(dict(a=1, b=40), type="struct<a: float, b: int32>") Create a struct array from multiple arrays >>> ibis.options.interactive = True - >>> t = ibis.memtable({'a': [1, 2, 3], 'b': ['foo', 'bar', 'baz']}) - >>> ibis.struct([('a', t.a), ('b', t.b)]) + >>> t = ibis.memtable({"a": [1, 2, 3], "b": ["foo", "bar", "baz"]}) + >>> ibis.struct([("a", t.a), ("b", t.b)]) ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ StructColumn() ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ @@ -71,7 +71,7 @@ def struct( └─────────────────────────────┘ Create a struct array from columns and literals - >>> ibis.struct([('a', t.a), ('b', 'foo')]) + >>> ibis.struct([("a", t.a), ("b", "foo")]) ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ StructColumn() ┃ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ @@ -103,7 +103,7 @@ class StructValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({'s': [{'a': 1, 'b': 'foo'}, {'a': 3, 'b': None}, None]}) + >>> t = ibis.memtable({"s": [{"a": 1, "b": "foo"}, {"a": 3, "b": None}, None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ s ┃ @@ -127,7 +127,7 @@ class StructValue(Value): │ 3 │ │ NULL │ └───────┘ - >>> t.s['a'] + >>> t.s["a"] ┏━━━━━━━┓ ┃ a ┃ ┡━━━━━━━┩ @@ -166,7 +166,7 @@ class StructValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({'s': [{'a': 1, 'b': 'foo'}, {'a': 3, 'b': None}, None]}) + >>> t = ibis.memtable({"s": [{"a": 1, "b": "foo"}, {"a": 3, "b": None}, None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ s ┃ @@ -177,7 +177,7 @@ class StructValue(Value): │ {'a': 3, 'b': None} │ │ NULL │ └─────────────────────────────┘ - >>> t.s['a'] + >>> t.s["a"] ┏━━━━━━━┓ ┃ a ┃ ┡━━━━━━━┩ @@ -187,7 +187,7 @@ class StructValue(Value): │ 3 │ │ NULL │ └───────┘ - >>> t.s['b'] + >>> t.s["b"] ┏━━━━━━━━┓ ┃ b ┃ ┡━━━━━━━━┩ @@ -197,7 +197,7 @@ class StructValue(Value): │ NULL │ │ NULL │ └────────┘ - >>> t.s['foo_bar'] + >>> t.s["foo_bar"] Traceback (most recent call last): ... KeyError: 'foo_bar' @@ -226,7 +226,7 @@ class StructValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({'s': [{'a': 1, 'b': 'foo'}, {'a': 3, 'b': None}, None]}) + >>> t = ibis.memtable({"s": [{"a": 1, "b": "foo"}, {"a": 3, "b": None}, None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ s ┃ @@ -353,7 +353,7 @@ class StructValue(Value): -------- >>> import ibis >>> ibis.options.interactive = True - >>> t = ibis.memtable({'s': [{'a': 1, 'b': 'foo'}, {'a': 3, 'b': None}, None]}) + >>> t = ibis.memtable({"s": [{"a": 1, "b": "foo"}, {"a": 3, "b": None}, None]}) >>> t ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ s ┃ diff --git a/vectorized.py b/vectorized.py index 40d809c..16d5aef 100644 --- a/vectorized.py +++ b/vectorized.py @@ -134,22 +134,32 @@ def _coerce_to_dataframe( Examples -------- >>> import pandas as pd - >>> _coerce_to_dataframe(pd.DataFrame({'a': [1, 2, 3]}), dt.Struct(dict(b="int32"))) # noqa: E501 + >>> _coerce_to_dataframe( + ... pd.DataFrame({"a": [1, 2, 3]}), dt.Struct(dict(b="int32")) + ... ) # noqa: E501 b 0 1 1 2 2 3 - >>> _coerce_to_dataframe(pd.Series([[1, 2, 3]]), dt.Struct(dict.fromkeys('abc', 'int32'))) # noqa: E501 + >>> _coerce_to_dataframe( + ... pd.Series([[1, 2, 3]]), dt.Struct(dict.fromkeys("abc", "int32")) + ... ) # noqa: E501 a b c 0 1 2 3 - >>> _coerce_to_dataframe(pd.Series([range(3), range(3)]), dt.Struct(dict.fromkeys('abc', 'int32'))) # noqa: E501 + >>> _coerce_to_dataframe( + ... pd.Series([range(3), range(3)]), dt.Struct(dict.fromkeys("abc", "int32")) + ... ) # noqa: E501 a b c 0 0 1 2 1 0 1 2 - >>> _coerce_to_dataframe([pd.Series(x) for x in [1, 2, 3]], dt.Struct(dict.fromkeys('abc', 'int32'))) # noqa: E501 + >>> _coerce_to_dataframe( + ... [pd.Series(x) for x in [1, 2, 3]], dt.Struct(dict.fromkeys("abc", "int32")) + ... ) # noqa: E501 a b c 0 1 2 3 - >>> _coerce_to_dataframe([1, 2, 3], dt.Struct(dict.fromkeys('abc', 'int32'))) # noqa: E501 + >>> _coerce_to_dataframe( + ... [1, 2, 3], dt.Struct(dict.fromkeys("abc", "int32")) + ... ) # noqa: E501 a b c 0 1 2 3 """ @@ -277,6 +287,7 @@ def analytic(input_type, output_type): >>> @analytic(input_type=[dt.double], output_type=dt.double) ... def zscore(series): # note the use of aggregate functions ... return (series - series.mean()) / series.std() + ... Define and use an UDF with multiple return columns: @@ -289,10 +300,10 @@ def analytic(input_type, output_type): ... std = v.std() ... return v - mean, (v - mean) / std >>> - >>> win = ibis.window(preceding=None, following=None, group_by='key') + >>> win = ibis.window(preceding=None, following=None, group_by="key") >>> # add two columns "demean" and "zscore" >>> table = table.mutate( # quartodoc: +SKIP # doctest: +SKIP - ... demean_and_zscore(table['v']).over(win).destructure() + ... demean_and_zscore(table["v"]).over(win).destructure() ... ) """ return _udf_decorator(AnalyticVectorizedUDF, input_type, output_type) @@ -318,24 +329,28 @@ def elementwise(input_type, output_type): >>> @elementwise(input_type=[dt.string], output_type=dt.int64) ... def my_string_length(series): ... return series.str.len() * 2 + ... Define an UDF with non-column parameters: >>> @elementwise(input_type=[dt.string], output_type=dt.int64) ... def my_string_length(series, *, times): ... return series.str.len() * times + ... Define and use an UDF with multiple return columns: >>> @elementwise( ... input_type=[dt.string], - ... output_type=dt.Struct(dict(year=dt.string, monthday=dt.string)) + ... output_type=dt.Struct(dict(year=dt.string, monthday=dt.string)), ... ) ... def year_monthday(date): ... return date.str.slice(0, 4), date.str.slice(4, 8) >>> >>> # add two columns "year" and "monthday" - >>> table = table.mutate(year_monthday(table['date']).destructure()) # quartodoc: +SKIP # doctest: +SKIP + >>> table = table.mutate( + ... year_monthday(table["date"]).destructure() + ... ) # quartodoc: +SKIP # doctest: +SKIP """ return _udf_decorator(ElementWiseVectorizedUDF, input_type, output_type) @@ -360,19 +375,20 @@ def reduction(input_type, output_type): >>> @reduction(input_type=[dt.string], output_type=dt.int64) ... def my_string_length_agg(series, **kwargs): ... return (series.str.len() * 2).sum() + ... Define and use an UDF with multiple return columns: >>> @reduction( ... input_type=[dt.double], - ... output_type=dt.Struct(dict(mean="double", std="double")) + ... output_type=dt.Struct(dict(mean="double", std="double")), ... ) ... def mean_and_std(v): ... return v.mean(), v.std() >>> >>> # create aggregation columns "mean" and "std" - >>> table = table.group_by('key').aggregate( # quartodoc: +SKIP # doctest: +SKIP - ... mean_and_std(table['v']).destructure() + >>> table = table.group_by("key").aggregate( # quartodoc: +SKIP # doctest: +SKIP + ... mean_and_std(table["v"]).destructure() ... ) """ return _udf_decorator(ReductionVectorizedUDF, input_type, output_type) diff --git a/selectors.py b/selectors.py index 35a9cb5..a3169d4 100644 --- a/selectors.py +++ b/selectors.py @@ -32,7 +32,8 @@ When there are multiple properties to check it gets worse: >>> expr = t.select( ... [ -... t[c] for c in t.columns +... t[c] +... for c in t.columns ... if t[c].type().is_numeric() or t[c].type().is_string() ... if ("a" in c or "b" in c or "cd" in c) ... ] @@ -213,7 +214,9 @@ def of_type(dtype: dt.DataType | str | type[dt.DataType]) -> Predicate: >>> import ibis >>> import ibis.expr.datatypes as dt >>> import ibis.selectors as s - >>> t = ibis.table(dict(name="string", siblings="array<string>", parents="array<int64>")) + >>> t = ibis.table( + ... dict(name="string", siblings="array<string>", parents="array<int64>") + ... ) >>> expr = t.select(s.of_type(dt.Array(dt.string))) >>> expr.columns ['siblings'] @@ -327,7 +330,11 @@ def contains( >>> import ibis >>> import ibis.selectors as s - >>> t = ibis.table(dict(a="int64", b="string", c="float", d="array<int16>", ab="struct<x: int>")) + >>> t = ibis.table( + ... dict( + ... a="int64", b="string", c="float", d="array<int16>", ab="struct<x: int>" + ... ) + ... ) >>> expr = t.select(s.contains(("a", "b"))) >>> expr.columns ['a', 'b', 'ab'] @@ -467,11 +474,7 @@ def across( >>> from ibis import _, selectors as s >>> t = ibis.examples.penguins.fetch() >>> t.select(s.startswith("bill")).mutate( - ... s.across( - ... s.numeric(), - ... dict(centered =_ - _.mean()), - ... names = "{fn}_{col}" - ... ) + ... s.across(s.numeric(), dict(centered=_ - _.mean()), names="{fn}_{col}") ... ) ┏━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━┓ ┃ bill_length_mm ┃ bill_depth_mm ┃ centered_bill_length_mm ┃ … ┃
feat(bigquery): fill out `CREATE TABLE` DDL options including support for `overwrite`
5dac7eccd43fb8fbeb146340864837b5a1084a65
feat
https://github.com/rohankumardubey/ibis/commit/5dac7eccd43fb8fbeb146340864837b5a1084a65
fill out `CREATE TABLE` DDL options including support for `overwrite`
diff --git a/__init__.py b/__init__.py index 23f9fbc..87b2177 100644 --- a/__init__.py +++ b/__init__.py @@ -27,7 +27,7 @@ from ibis.backends.bigquery.client import ( schema_from_bigquery_table, ) from ibis.backends.bigquery.compiler import BigQueryCompiler -from ibis.backends.bigquery.datatypes import BigQuerySchema +from ibis.backends.bigquery.datatypes import BigQuerySchema, BigQueryType from ibis.formats.pandas import PandasData with contextlib.suppress(ImportError): @@ -438,47 +438,119 @@ class Backend(BaseSQLBackend): database: str | None = None, temp: bool | None = None, overwrite: bool = False, + default_collate: str | None = None, + partition_by: str | None = None, + cluster_by: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, ) -> ir.Table: + """Create a table in BigQuery. + + Parameters + ---------- + name + Name of the table to create + obj + The data with which to populate the table; optional, but one of `obj` + or `schema` must be specified + schema + The schema of the table to create; optional, but one of `obj` or + `schema` must be specified + database + The BigQuery *dataset* in which to create the table; optional + temp + This parameter is not yet supported in the BigQuery backend + overwrite + If `True`, replace the table if it already exists, otherwise fail if + the table exists + default_collate + Default collation for string columns. See BigQuery's documentation + for more details: https://cloud.google.com/bigquery/docs/reference/standard-sql/collation-concepts + partition_by + Partition the table by the given expression. See BigQuery's documentation + for more details: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#partition_expression + cluster_by + List of columns to cluster the table by. See BigQuery's documentation + for more details: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#clustering_column_list + options + BigQuery-specific table options; see the BigQuery documentation for + details: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list + + Returns + ------- + Table + The table that was just created + """ if obj is None and schema is None: - raise com.IbisError("The schema or obj parameter is required") - if temp is True: - raise NotImplementedError( - "BigQuery backend does not yet support temporary tables" - ) - if overwrite is not False: + raise com.IbisError("One of the `schema` or `obj` parameter is required") + + if temp: + # TODO: these require a BQ session; figure out how to handle that raise NotImplementedError( - "BigQuery backend does not yet support overwriting tables" + "Temporary tables in the BigQuery backend are not yet supported" ) + create_stmt = "CREATE" + + if overwrite: + create_stmt += " OR REPLACE" + + table_ref = self._fully_qualified_name(name, database) + + create_stmt += f" TABLE `{table_ref}`" + if isinstance(obj, ir.Table) and schema is not None: if not schema.equals(obj.schema()): raise com.IbisTypeError( - """Provided schema and Ibis table schema are incompatible. -Please align the two schemas, or provide only one of the two arguments.""" + "Provided schema and Ibis table schema are incompatible. Please " + "align the two schemas, or provide only one of the two arguments." ) + if schema is not None: + schema_str = ", ".join( + ( + f"{name} {BigQueryType.from_ibis(typ)}" + + " NOT NULL" * (not typ.nullable) + ) + for name, typ in schema.items() + ) + create_stmt += f" ({schema_str})" + + if default_collate is not None: + create_stmt += f" DEFAULT COLLATE {default_collate!r}" + + if partition_by is not None: + create_stmt += f" PARTITION BY {partition_by}" + + if cluster_by is not None: + create_stmt += f" CLUSTER BY {', '.join(cluster_by)}" + + if options: + pairs = ", ".join(f"{k}={v!r}" for k, v in options.items()) + create_stmt += f" OPTIONS({pairs})" + if obj is not None: import pyarrow as pa - project_id, dataset = self._parse_project_and_dataset(database) if isinstance(obj, (pd.DataFrame, pa.Table)): table = ibis.memtable(obj, schema=schema) else: table = obj - sql_select = self.compile(table) - table_ref = f"`{project_id}`.`{dataset}`.`{name}`" - self.raw_sql(f'CREATE TABLE {table_ref} AS ({sql_select})') - elif schema is not None: - table_id = self._fully_qualified_name(name, database) - table = bq.Table(table_id, schema=BigQuerySchema.from_ibis(schema)) - self.client.create_table(table) - return self.table(name, database=database) + + create_stmt += f" AS ({self.compile(table)})" + + self.raw_sql(create_stmt) + + return self.table(table_ref) def drop_table( self, name: str, *, database: str | None = None, force: bool = False ) -> None: table_id = self._fully_qualified_name(name, database) - self.client.delete_table(table_id, not_found_ok=not force) + drop_stmt = "DROP TABLE" + if force: + drop_stmt += " IF EXISTS" + drop_stmt += f" `{table_id}`" + self.raw_sql(drop_stmt) def create_view( self, @@ -491,14 +563,20 @@ Please align the two schemas, or provide only one of the two arguments.""" or_replace = "OR REPLACE " * overwrite sql_select = self.compile(obj) table_id = self._fully_qualified_name(name, database) - code = f"CREATE {or_replace}VIEW {table_id} AS {sql_select}" + code = f"CREATE {or_replace}VIEW `{table_id}` AS {sql_select}" self.raw_sql(code) return self.table(name, database=database) def drop_view( self, name: str, *, database: str | None = None, force: bool = False ) -> None: - self.drop_table(name=name, database=database, force=force) + # default_project, default_dataset = self._parse_project_and_dataset(database) + table_id = self._fully_qualified_name(name, database) + drop_stmt = "DROP VIEW" + if force: + drop_stmt += " IF EXISTS" + drop_stmt += f" `{table_id}`" + self.raw_sql(drop_stmt) def compile(expr, params=None, **kwargs): diff --git a/test_client.py b/test_client.py index 2a99d9e..3c982a7 100644 --- a/test_client.py +++ b/test_client.py @@ -1288,7 +1288,7 @@ def gen_test_name(con: BaseBackend) -> str: @mark.notimpl( - ["bigquery", "datafusion", "polars"], + ["datafusion", "polars"], raises=NotImplementedError, reason="overwriting not implemented in ibis for this backend", )
test(markers): avoid mutating kwargs to prevent incorrect test fixture behavior
71290abdfed9084d710e8c2165c3ffa367c70181
test
https://github.com/rohankumardubey/ibis/commit/71290abdfed9084d710e8c2165c3ffa367c70181
avoid mutating kwargs to prevent incorrect test fixture behavior
diff --git a/conftest.py b/conftest.py index 7d26901..95fc9e3 100644 --- a/conftest.py +++ b/conftest.py @@ -495,7 +495,7 @@ def pytest_runtest_call(item): ) for marker in item.iter_markers(name="xfail_version"): - kwargs = marker.kwargs + kwargs = marker.kwargs.copy() if backend not in kwargs: continue
feat(duckdb): implement `to_timestamp`
26ca1e4f3160de594c8bb3047a3588e607385ad1
feat
https://github.com/ibis-project/ibis/commit/26ca1e4f3160de594c8bb3047a3588e607385ad1
implement `to_timestamp`
diff --git a/registry.py b/registry.py index d0f9637..0bb68b6 100644 --- a/registry.py +++ b/registry.py @@ -233,5 +233,6 @@ operation_registry.update( ops.BitwiseXor: fixed_arity(sa.func.xor, 2), ops.JSONGetItem: _json_get_item, ops.RowID: lambda *_: sa.literal_column('rowid'), + ops.StringToTimestamp: fixed_arity(sa.func.strptime, 2), } )
chore(deps): relock
4537e1f3ff4327ce2412e51e52d5adcc0f786e57
chore
https://github.com/ibis-project/ibis/commit/4537e1f3ff4327ce2412e51e52d5adcc0f786e57
relock
diff --git a/poetry.lock b/poetry.lock index 000e192..42360a2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -993,7 +993,7 @@ toml = ["tomli"] name = "cryptography" version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, @@ -5278,13 +5278,11 @@ files = [ {file = "statsmodels-0.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a6a0a1a06ff79be8aa89c8494b33903442859add133f0dda1daf37c3c71682e"}, {file = "statsmodels-0.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77b3cd3a5268ef966a0a08582c591bd29c09c88b4566c892a7c087935234f285"}, {file = "statsmodels-0.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c64ebe9cf376cba0c31aed138e15ed179a1d128612dd241cdf299d159e5e882"}, - {file = "statsmodels-0.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:229b2f676b4a45cb62d132a105c9c06ca8a09ffba060abe34935391eb5d9ba87"}, {file = "statsmodels-0.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb471f757fc45102a87e5d86e87dc2c8c78b34ad4f203679a46520f1d863b9da"}, {file = "statsmodels-0.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:582f9e41092e342aaa04920d17cc3f97240e3ee198672f194719b5a3d08657d6"}, {file = "statsmodels-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7ebe885ccaa64b4bc5ad49ac781c246e7a594b491f08ab4cfd5aa456c363a6f6"}, {file = "statsmodels-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b587ee5d23369a0e881da6e37f78371dce4238cf7638a455db4b633a1a1c62d6"}, {file = "statsmodels-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef7fa4813c7a73b0d8a0c830250f021c102c71c95e9fe0d6877bcfb56d38b8c"}, - {file = "statsmodels-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afe80544ef46730ea1b11cc655da27038bbaa7159dc5af4bc35bbc32982262f2"}, {file = "statsmodels-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:a6ad7b8aadccd4e4dd7f315a07bef1bca41d194eeaf4ec600d20dea02d242fce"}, {file = "statsmodels-0.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3757542c95247e4ab025291a740efa5da91dc11a05990c033d40fce31c450dc9"}, {file = "statsmodels-0.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:de489e3ed315bdba55c9d1554a2e89faa65d212e365ab81bc323fa52681fc60e"}, diff --git a/requirements-dev.txt b/requirements-dev.txt index 2a4b66d..f17ca89 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -30,7 +30,7 @@ colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and (sys_pl comm==0.1.4 ; python_version >= "3.10" and python_version < "4.0" contourpy==1.1.0 ; python_version >= "3.10" and python_version < "4.0" coverage[toml]==7.3.0 ; python_version >= "3.9" and python_version < "4.0" -cryptography==41.0.3 ; python_version >= "3.9" and python_version < "4.0" +cryptography==41.0.4 ; python_version >= "3.9" and python_version < "4.0" cycler==0.11.0 ; python_version >= "3.10" and python_version < "4.0" dask[array,dataframe]==2023.8.1 ; python_version >= "3.9" and python_version < "4.0" datafusion==22.0.0 ; python_version >= "3.9" and python_version < "4.0"
build: updated repo url
04e94286b1113d30a3b8e309542a50e48257d9dc
build
https://github.com/tsparticles/tsparticles/commit/04e94286b1113d30a3b8e309542a50e48257d9dc
updated repo url
diff --git a/CHANGELOG.md b/CHANGELOG.md index 73b3766..9849725 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,67 +13,67 @@ See [Conventional Commits](https://conventionalcommits.org) for commit guideline - **deps:** update dependency eslint-config-prettier to v9 ([e6e2ba7](https://github.com/tsparticles/tsparticles/commit/e6e2ba7309db151c6d0fc6c01a4f01116ecf24b8)) -# [2.12.0](https://github.com/matteobruni/tsparticles/compare/v2.11.1...v2.12.0) (2023-08-03) +# [2.12.0](https://github.com/tsparticles/tsparticles/compare/v2.11.1...v2.12.0) (2023-08-03) **Note:** Version bump only for package @tsparticles/options-updater -## [2.11.1](https://github.com/matteobruni/tsparticles/compare/v2.11.0...v2.11.1) (2023-07-24) +## [2.11.1](https://github.com/tsparticles/tsparticles/compare/v2.11.0...v2.11.1) (2023-07-24) **Note:** Version bump only for package @tsparticles/options-updater -# [2.11.0](https://github.com/matteobruni/tsparticles/compare/v2.10.1...v2.11.0) (2023-07-12) +# [2.11.0](https://github.com/tsparticles/tsparticles/compare/v2.10.1...v2.11.0) (2023-07-12) **Note:** Version bump only for package @tsparticles/options-updater -## [2.10.1](https://github.com/matteobruni/tsparticles/compare/v2.10.0...v2.10.1) (2023-06-04) +## [2.10.1](https://github.com/tsparticles/tsparticles/compare/v2.10.0...v2.10.1) (2023-06-04) **Note:** Version bump only for package @tsparticles/options-updater -# [2.10.0](https://github.com/matteobruni/tsparticles/compare/v2.0.0-alpha.0...v2.10.0) (2023-06-03) +# [2.10.0](https://github.com/tsparticles/tsparticles/compare/v2.0.0-alpha.0...v2.10.0) (2023-06-03) ### Bug Fixes -- **deps:** update dependency fs-extra to v11 ([e82352a](https://github.com/matteobruni/tsparticles/commit/e82352a685960603a58fb222f91d157ee65967de)) -- **deps:** update dependency jsdom to v21 ([85a816a](https://github.com/matteobruni/tsparticles/commit/85a816a2f5389afffc3a75b9e6c3bbd754a48db1)) -- **deps:** update dependency jsdom to v22 ([5f8737a](https://github.com/matteobruni/tsparticles/commit/5f8737a5d3635947da822127d395a971d8feee4d)) -- **deps:** update dependency rimraf to v4.4.1 ([370d1ca](https://github.com/matteobruni/tsparticles/commit/370d1ca4d3bb0ea8bfe5fb3e0f5e1d74f45f4de6)) -- **deps:** update dependency rimraf to v5 ([c29cbc4](https://github.com/matteobruni/tsparticles/commit/c29cbc43ed0d3522b718e7236a48eae9b91cde43)) -- **deps:** update dependency rimraf to v5.0.1 ([6627473](https://github.com/matteobruni/tsparticles/commit/66274734c70b5759c59f7e949c8fcb2c8529bdf2)) +- **deps:** update dependency fs-extra to v11 ([e82352a](https://github.com/tsparticles/tsparticles/commit/e82352a685960603a58fb222f91d157ee65967de)) +- **deps:** update dependency jsdom to v21 ([85a816a](https://github.com/tsparticles/tsparticles/commit/85a816a2f5389afffc3a75b9e6c3bbd754a48db1)) +- **deps:** update dependency jsdom to v22 ([5f8737a](https://github.com/tsparticles/tsparticles/commit/5f8737a5d3635947da822127d395a971d8feee4d)) +- **deps:** update dependency rimraf to v4.4.1 ([370d1ca](https://github.com/tsparticles/tsparticles/commit/370d1ca4d3bb0ea8bfe5fb3e0f5e1d74f45f4de6)) +- **deps:** update dependency rimraf to v5 ([c29cbc4](https://github.com/tsparticles/tsparticles/commit/c29cbc43ed0d3522b718e7236a48eae9b91cde43)) +- **deps:** update dependency rimraf to v5.0.1 ([6627473](https://github.com/tsparticles/tsparticles/commit/66274734c70b5759c59f7e949c8fcb2c8529bdf2)) ### Features -- added error prefix to standardize error messages ([f735252](https://github.com/matteobruni/tsparticles/commit/f73525291139f45c1b5abda04b604813f9247d9f)) -- added version to engine ([9406873](https://github.com/matteobruni/tsparticles/commit/9406873c6551b59e64edbe3a0e4fe59ef2cde4c6)) -- adding new path plugin, using svg paths as a source ([72316ec](https://github.com/matteobruni/tsparticles/commit/72316ec38ee3556ad2db0af4e84a14529ddb1b9b)) -- implemented delay options in opacity, size and colors updaters ([dfd4e9f](https://github.com/matteobruni/tsparticles/commit/dfd4e9f711a83ff5ef6e1bcf5f6fdf62d61dc157)) +- added error prefix to standardize error messages ([f735252](https://github.com/tsparticles/tsparticles/commit/f73525291139f45c1b5abda04b604813f9247d9f)) +- added version to engine ([9406873](https://github.com/tsparticles/tsparticles/commit/9406873c6551b59e64edbe3a0e4fe59ef2cde4c6)) +- adding new path plugin, using svg paths as a source ([72316ec](https://github.com/tsparticles/tsparticles/commit/72316ec38ee3556ad2db0af4e84a14529ddb1b9b)) +- implemented delay options in opacity, size and colors updaters ([dfd4e9f](https://github.com/tsparticles/tsparticles/commit/dfd4e9f711a83ff5ef6e1bcf5f6fdf62d61dc157)) -## [2.9.3](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-12) +## [2.9.3](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-12) **Note:** Version bump only for package @tsparticles/options-updater -## [2.9.2](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-12) +## [2.9.2](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-12) **Note:** Version bump only for package @tsparticles/options-updater -## [2.9.1](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-11) +## [2.9.1](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-11) **Note:** Version bump only for package @tsparticles/options-updater -# [2.9.0](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-10) +# [2.9.0](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-02-10) ### Features -- added version to engine ([9406873](https://github.com/matteobruni/tsparticles/commit/9406873c6551b59e64edbe3a0e4fe59ef2cde4c6)) +- added version to engine ([9406873](https://github.com/tsparticles/tsparticles/commit/9406873c6551b59e64edbe3a0e4fe59ef2cde4c6)) -# [2.8.0](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-01-18) +# [2.8.0](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2023-01-18) **Note:** Version bump only for package @tsparticles/options-updater -## [2.7.1](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2022-12-25) +## [2.7.1](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2022-12-25) **Note:** Version bump only for package @tsparticles/options-updater -# [2.7.0](https://github.com/matteobruni/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2022-12-23) +# [2.7.0](https://github.com/tsparticles/tsparticles/compare/@tsparticles/[email protected]...@tsparticles/[email protected]) (2022-12-23) **Note:** Version bump only for package @tsparticles/options-updater @@ -81,4 +81,4 @@ See [Conventional Commits](https://conventionalcommits.org) for commit guideline ### Bug Fixes -- **deps:** update dependency fs-extra to v11 ([e82352a](https://github.com/matteobruni/tsparticles/commit/e82352a685960603a58fb222f91d157ee65967de)) +- **deps:** update dependency fs-extra to v11 ([e82352a](https://github.com/tsparticles/tsparticles/commit/e82352a685960603a58fb222f91d157ee65967de)) diff --git a/README.md b/README.md index c2836ee..54554c7 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [![npmjs](https://badge.fury.io/js/@tsparticles/updater-wobble.svg)](https://www.npmjs.com/package/@tsparticles/updater-wobble) [![npmjs](https://img.shields.io/npm/dt/@tsparticles/updater-wobble)](https://www.npmjs.com/package/@tsparticles/updater-wobble) [![GitHub Sponsors](https://img.shields.io/github/sponsors/matteobruni)](https://github.com/sponsors/matteobruni) -[tsParticles](https://github.com/matteobruni/tsparticles) updater plugin for wobble animations. +[tsParticles](https://github.com/tsparticles/tsparticles) updater plugin for wobble animations. ## How to use it diff --git a/package.dist.json b/package.dist.json index 6fbdc9b..c3c88c9 100644 --- a/package.dist.json +++ b/package.dist.json @@ -5,7 +5,7 @@ "homepage": "https://particles.js.org", "repository": { "type": "git", - "url": "git+https://github.com/matteobruni/tsparticles.git", + "url": "git+https://github.com/tsparticles/tsparticles.git", "directory": "utils/configs" }, "keywords": [ @@ -64,7 +64,7 @@ "author": "Matteo Bruni <[email protected]>", "license": "MIT", "bugs": { - "url": "https://github.com/matteobruni/tsparticles/issues" + "url": "https://github.com/tsparticles/tsparticles/issues" }, "funding": [ { diff --git a/package.json b/package.json index 689b072..a9c7887 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ }, "repository": { "type": "git", - "url": "git+https://github.com/matteobruni/tsparticles.git", + "url": "git+https://github.com/tsparticles/tsparticles.git", "directory": "utils/configs" }, "keywords": [ @@ -69,7 +69,7 @@ "author": "Matteo Bruni <[email protected]>", "license": "MIT", "bugs": { - "url": "https://github.com/matteobruni/tsparticles/issues" + "url": "https://github.com/tsparticles/tsparticles/issues" }, "funding": [ { diff --git a/demo.js b/demo.js index 7c68647..5566951 100644 --- a/demo.js +++ b/demo.js @@ -320,7 +320,7 @@ const inputData = document.getElementById("code-pen-data"); const particlesContainer = document.getElementById("tsparticles"); const data = { - html: `<!-- tsParticles - https://particles.js.org - https://github.com/matteobruni/tsparticles --> + html: `<!-- tsParticles - https://particles.js.org - https://github.com/tsparticles/tsparticles --> <div id="tsparticles"></div>`, css: `/* ---- reset ---- */ body { diff --git a/playground.js b/playground.js index d845c7d..f419893 100644 --- a/playground.js +++ b/playground.js @@ -208,7 +208,7 @@ const form = document.getElementById("code-pen-form"), inputData = document.getElementById("code-pen-data"), data = { - html: `<!-- tsParticles - https://particles.js.org - https://github.com/matteobruni/tsparticles --> + html: `<!-- tsParticles - https://particles.js.org - https://github.com/tsparticles/tsparticles --> <div id="tsparticles"></div>`, css: "", js: editor.getValue(), diff --git a/index.pug b/index.pug index eace7fb..7f7b78f 100644 --- a/index.pug +++ b/index.pug @@ -64,7 +64,7 @@ html(lang="en") .rectangle.bounce(style="width: 200px; height: 200px; background-color: rgba(255, 255, 255, .5); position: absolute; bottom: 10%; left: 50%; margin-left: -100px; margin-top: -100px; z-index: 200") #tsparticles .github - a.btn.btn-link(href="https://github.com/matteobruni/tsparticles" title="Find more info on GitHub") + a.btn.btn-link(href="https://github.com/tsparticles/tsparticles" title="Find more info on GitHub") img#gh-mark.img-fluid(src="https://particles.js.org/images/GitHub-Mark-120px-plus.png" alt="") span#gh-project tsParticles span.toggle-sidebar diff --git a/playground.pug b/playground.pug index 2705b64..89943e8 100644 --- a/playground.pug +++ b/playground.pug @@ -74,7 +74,7 @@ html(lang="en") #tsparticles-container.col.p-0.h-100 #tsparticles .github - a.btn.btn-link(href="https://github.com/matteobruni/tsparticles" title="Find more info on GitHub") + a.btn.btn-link(href="https://github.com/tsparticles/tsparticles" title="Find more info on GitHub") img#gh-mark.img-fluid(src="https://particles.js.org/images/GitHub-Mark-120px-plus.png" alt="") span#gh-project tsParticles span.toggle-sidebar diff --git a/slim.pug b/slim.pug index cfae6b8..8e96536 100644 --- a/slim.pug +++ b/slim.pug @@ -132,7 +132,7 @@ html(lang="en") .rectangle.bounce(style="width: 200px; height: 200px; background-color: rgba(255, 255, 255, .5); position: absolute; bottom: 10%; left: 50%; margin-left: -100px; margin-top: -100px; z-index: 200") #tsparticles .github - a.btn.btn-link(href="https://github.com/matteobruni/tsparticles" title="Find more info on GitHub") + a.btn.btn-link(href="https://github.com/tsparticles/tsparticles" title="Find more info on GitHub") img#gh-mark.img-fluid(src="https://particles.js.org/images/GitHub-Mark-120px-plus.png" alt="") span#gh-project tsParticles span.toggle-sidebar diff --git a/deploy.docs-json.js b/deploy.docs-json.js index afa0fc1..ff17a55 100644 --- a/deploy.docs-json.js +++ b/deploy.docs-json.js @@ -9,7 +9,7 @@ const ghToken = process.env.GITHUB_TOKEN, gitUser = ghToken ? { ghpages.publish(path.resolve("."), { src: "docs.json", - repo: ghToken ? `https://git:${ghToken}@github.com/matteobruni/tsparticles.git` : `https://github.com/matteobruni/tsparticles.git`, + repo: ghToken ? `https://git:${ghToken}@github.com/tsparticles/tsparticles.git` : `https://github.com/tsparticles/tsparticles.git`, branch: "docs-gen", dotfiles: true, history: false, diff --git a/install.js b/install.js index d4865d8..fcdd945 100644 --- a/install.js +++ b/install.js @@ -8,7 +8,7 @@ try { console.log("You can find more samples on CodePen too: https://codepen.io/collection/DPOage"); console.log("If you need documentation you can find it here: https://particles.js.org/docs"); console.log( - "Don't forget to star the tsParticles repository, if you like the project and want to support it: https://github.com/matteobruni/tsparticles" + "Don't forget to star the tsParticles repository, if you like the project and want to support it: https://github.com/tsparticles/tsparticles" ); const pkgSettings = require(path.join(process.env.INIT_CWD, "package.json")); @@ -62,7 +62,7 @@ try { "Found React installed. Please download react-particles to use tsParticles with a component ready to use and easier to configure." ); console.log( - "You can read more about the component here: https://github.com/matteobruni/tsparticles/blob/main/components/react/README.md" + "You can read more about the component here: https://github.com/tsparticles/tsparticles/blob/main/components/react/README.md" ); } } @@ -90,7 +90,7 @@ try { "Found Vue 3.x installed. Please Download vue3-particles to use tsParticles with a component ready to use and easier to configure." ); console.log( - "You can read more about the component here: https://github.com/matteobruni/tsparticles/blob/main/components/vue3/README.md" + "You can read more about the component here: https://github.com/tsparticles/tsparticles/blob/main/components/vue3/README.md" ); } } else { @@ -100,7 +100,7 @@ try { "Found Vue 2.x installed. Please Download vue2-particles to use tsParticles with a component ready to use and easier to configure." ); console.log( - "You can read more about the component here: https://github.com/matteobruni/tsparticles/blob/main/components/vue/README.md" + "You can read more about the component here: https://github.com/tsparticles/tsparticles/blob/main/components/vue/README.md" ); } } @@ -113,7 +113,7 @@ try { "Found Svelte installed. Please Download svelte-particles to use tsParticles with a component ready to use and easier to configure." ); console.log( - "You can read more about the component here: https://github.com/matteobruni/tsparticles/blob/main/components/svelte/README.md" + "You can read more about the component here: https://github.com/tsparticles/tsparticles/blob/main/components/svelte/README.md" ); } } @@ -125,7 +125,7 @@ try { "Found Inferno installed. Please Download inferno-particles to use tsParticles with a component ready to use and easier to configure." ); console.log( - "You can read more about the component here: https://github.com/matteobruni/tsparticles/blob/main/components/inferno/README.md" + "You can read more about the component here: https://github.com/tsparticles/tsparticles/blob/main/components/inferno/README.md" ); } } @@ -137,7 +137,7 @@ try { "Found Preact installed. Please Download preact-particles to use tsParticles with a component ready to use and easier to configure." ); console.log( - "You can read more about the component here: https://github.com/matteobruni/tsparticles/blob/main/components/preact/README.md" + "You can read more about the component here: https://github.com/tsparticles/tsparticles/blob/main/components/preact/README.md" ); } } @@ -149,7 +149,7 @@ try { "Found jQuery installed. Please Download jquery-particles to use tsParticles with a plugin ready to use and easier to configure." ); console.log( - "You can read more about the plugin here: https://github.com/matteobruni/tsparticles/blob/main/components/jquery/README.md" + "You can read more about the plugin here: https://github.com/tsparticles/tsparticles/blob/main/components/jquery/README.md" ); } } diff --git a/Modes.md b/Modes.md index b28e4cb..008fc90 100644 --- a/Modes.md +++ b/Modes.md @@ -14,5 +14,5 @@ | `repulse.duration` | `number` | `1.2` | seconds | | `push.quantity` | `number` | `4` | | | `remove.quantity` | `number` | `4` | | -| `emitter` | `object` / `array` | | See `Emitters` documentation <https://github.com/matteobruni/tsparticles/tree/main/plugins/emitters> | -| `absorber` | `object` / `array` | | See `Absorbers` documentation <https://github.com/matteobruni/tsparticles/tree/main/plugins/absorbers> | +| `emitter` | `object` / `array` | | See `Emitters` documentation <https://github.com/tsparticles/tsparticles/tree/main/plugins/emitters> | +| `absorber` | `object` / `array` | | See `Absorbers` documentation <https://github.com/tsparticles/tsparticles/tree/main/plugins/absorbers> | diff --git a/pathseg.js b/pathseg.js index b87236b..8372bfc 100644 --- a/pathseg.js +++ b/pathseg.js @@ -1794,7 +1794,7 @@ } catch (e) { // ignore, if it's not working we can ignore errors console.warn( - "An error occurred in tsParticles pathseg polyfill. If the Polygon Mask is not working, please open an issue here: https://github.com/matteobruni/tsparticles", + "An error occurred in tsParticles pathseg polyfill. If the Polygon Mask is not working, please open an issue here: https://github.com/tsparticles/tsparticles", e, ); } diff --git a/README.html b/README.html index 709e259..60ccc8f 100644 --- a/README.html +++ b/README.html @@ -46,7 +46,7 @@ <p><a href="https://www.jsdelivr.com/package/npm/tsparticles-shape-rounded-polygon"><img src="https://data.jsdelivr.com/v1/package/npm/tsparticles-shape-rounded-polygon/badge" alt="jsDelivr"></a> <a href="https://www.npmjs.com/package/tsparticles-shape-rounded-polygon"><img src="https://badge.fury.io/js/tsparticles-shape-rounded-polygon.svg" alt="npmjs"></a> <a href="https://www.npmjs.com/package/tsparticles-shape-rounded-polygon"><img src="https://img.shields.io/npm/dt/tsparticles-shape-rounded-polygon" alt="npmjs"></a> <a href="https://github.com/sponsors/matteobruni"><img src="https://img.shields.io/github/sponsors/matteobruni" alt="GitHub Sponsors"></a></p> -<p><a href="https://github.com/matteobruni/tsparticles">tsParticles</a> additional rounded polygon shape.</p> +<p><a href="https://github.com/tsparticles/tsparticles">tsParticles</a> additional rounded polygon shape.</p> <h2 id="how-to-use-it">How to use it</h2> <h3 id="cdn--vanilla-js--jquery">CDN / Vanilla JS / jQuery</h3> <p>The CDN/Vanilla version JS has one required file in vanilla configuration:</p> @@ -95,4 +95,4 @@ <script async src="https://cdn.jsdelivr.net/npm/katex-copytex@latest/dist/katex-copytex.min.js"></script> </body> - </html> \\ No newline at end of file + </html>
ci: skip ruff format in pre-commit ci runner
9117fdedb9b5ce0345c31b3e1fa22ae8554944d4
ci
https://github.com/rohankumardubey/ibis/commit/9117fdedb9b5ce0345c31b3e1fa22ae8554944d4
skip ruff format in pre-commit ci runner
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c6b461..ebbdd42 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,6 +9,7 @@ ci: - nixpkgs-fmt - prettier - ruff + - ruff-format - shellcheck - shfmt - statix
chore: add pre-commit ci config
0e4545ddf11ad1426334571757c6827d79b4ead6
chore
https://github.com/rohankumardubey/ibis/commit/0e4545ddf11ad1426334571757c6827d79b4ead6
add pre-commit ci config
diff --git a/renovate.json b/renovate.json index 40712b5..fe4e462 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,6 @@ { "rangeStrategy": "widen", - "extends": ["config:base"], + "extends": ["config:base", ":enablePreCommit"], "schedule": ["after 10pm and before 5am every weekday", "every weekend"], "semanticCommits": "enabled", "lockFileMaintenance": { "enabled": true }, diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6614671..1ebba05 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,79 +1,56 @@ +ci: + autofix_commit_msg: "style: auto fixes from pre-commit.ci hooks" + autofix_prs: false + autoupdate_commit_msg: "style: pre-commit.ci autoupdate" + skip: + - prettier + - shellcheck + - shfmt + - just default_stages: - commit repos: - - hooks: - - entry: black --check - exclude: ^$ - files: "" - id: black - language: system - name: black - pass_filenames: true - stages: - - commit - types: - - python - - file - - python - types_or: [] - - entry: flake8 - exclude: ^$ - files: "" - id: flake8 - language: python - name: flake8 - pass_filenames: true - stages: - - commit + - repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + - repo: https://github.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/MarcoGorelli/absolufy-imports + rev: v0.3.1 + hooks: + - id: absolufy-imports + - repo: https://github.com/asottile/pyupgrade + rev: v2.32.0 + hooks: + - id: pyupgrade + exclude: setup.py + entry: pyupgrade --py38-plus types: - python - types_or: [] - - entry: absolufy-imports - name: absolufy - id: absolufy - language: python - files: "" - stages: - - commit - types: - - python - - entry: isort --check - exclude: ^$ - files: "" - id: isort - language: python - name: isort - pass_filenames: true - stages: - - commit - types: - - file - - python - types_or: - - cython - - pyi - - python - - entry: pyupgrade --py38-plus - exclude: (setup.py) - files: "" - id: pyupgrade + - repo: local + hooks: + - id: prettier language: system - name: pyupgrade - pass_filenames: true - stages: - - commit - types: - - python - types_or: [] - - entry: shellcheck - exclude: ^$ - files: \\.sh$ - id: shellcheck + name: prettier + entry: prettier --write + exclude: poetry.lock + types_or: + - json + - toml + - yaml + - markdown + - id: shellcheck language: system + entry: shellcheck name: shellcheck - pass_filenames: true - stages: - - commit + files: \\.sh$ types: - shell types_or: @@ -84,44 +61,20 @@ repos: - bats - dash - ksh - - entry: shfmt -i 2 -sr -d -s -l - exclude: ^$ - files: \\.sh$ - id: shfmt - language: system + - id: shfmt name: shfmt - pass_filenames: true - stages: - - commit + language: system + entry: shfmt -i 2 -sr -s + files: \\.sh$ types: - file types_or: - file - - entry: prettier --check - exclude: ^$ - files: "" - id: prettier - language: system - name: prettier - pass_filenames: true - stages: - - commit - types: - - text - types_or: - - json - - toml - - yaml - - markdown - entry: just --fmt --unstable --check - exclude: ^$ files: ^justfile$ id: just language: system name: just pass_filenames: false - stages: - - commit types: - file - repo: local
refactor: respect default column length for date time columns in postgres `timestamptz` actually means `timestamptz(6)` in postgres, but previously the ORM saw it as `timestamptz(0)` and produced unnecessary alter queries. Closes #2655
10c01460ece664075db91b2b971d10b4ee157d47
refactor
https://github.com/mikro-orm/mikro-orm/commit/10c01460ece664075db91b2b971d10b4ee157d47
respect default column length for date time columns in postgres `timestamptz` actually means `timestamptz(6)` in postgres, but previously the ORM saw it as `timestamptz(0)` and produced unnecessary alter queries. Closes #2655
diff --git a/Platform.ts b/Platform.ts index cdf5ec2..40117a5 100644 --- a/Platform.ts +++ b/Platform.ts @@ -112,6 +112,10 @@ export abstract class Platform { return 'datetime' + (column.length ? `(${column.length})` : ''); } + getDefaultDateTimeLength(): number { + return 0; + } + getDateTypeDeclarationSQL(length?: number): string { return 'date' + (length ? `(${length})` : ''); } diff --git a/DatabaseTable.ts b/DatabaseTable.ts index 417d0aa..640ff01 100644 --- a/DatabaseTable.ts +++ b/DatabaseTable.ts @@ -1,5 +1,5 @@ import type { Dictionary, EntityMetadata, EntityProperty, NamingStrategy } from '@mikro-orm/core'; -import { Cascade, DecimalType, EntitySchema, ReferenceType, t, Utils } from '@mikro-orm/core'; +import { Cascade, DateTimeType, DecimalType, EntitySchema, ReferenceType, t, Utils } from '@mikro-orm/core'; import type { SchemaHelper } from './SchemaHelper'; import type { Column, ForeignKey, Index } from '../typings'; import type { AbstractSqlPlatform } from '../AbstractSqlPlatform'; @@ -72,6 +72,10 @@ export class DatabaseTable { } } + if (mappedType instanceof DateTimeType) { + prop.length ??= this.platform.getDefaultDateTimeLength(); + } + const primary = !meta.compositePK && !!prop.primary && prop.reference === ReferenceType.SCALAR && this.platform.isNumericColumn(mappedType); this.columns[field] = { name: prop.fieldNames[idx], diff --git a/PostgreSqlPlatform.ts b/PostgreSqlPlatform.ts index a89a4a9..7231122 100644 --- a/PostgreSqlPlatform.ts +++ b/PostgreSqlPlatform.ts @@ -31,8 +31,13 @@ export class PostgreSqlPlatform extends AbstractSqlPlatform { return `current_timestamp(${length})`; } - getDateTimeTypeDeclarationSQL(column: { length: number }): string { - return `timestamptz(${column.length})`; + getDateTimeTypeDeclarationSQL(column: { length?: number }): string { + /* istanbul ignore next */ + return 'timestamptz' + (column.length != null ? `(${column.length})` : ''); + } + + getDefaultDateTimeLength(): number { + return 6; // timestamptz actually means timestamptz(6) } getTimeTypeDeclarationSQL(): string { diff --git a/PostgreSqlSchemaHelper.ts b/PostgreSqlSchemaHelper.ts index f2ae47b..22c649a 100644 --- a/PostgreSqlSchemaHelper.ts +++ b/PostgreSqlSchemaHelper.ts @@ -38,8 +38,7 @@ export class PostgreSqlSchemaHelper extends SchemaHelper { column_default, is_nullable, udt_name, - coalesce(datetime_precision, - character_maximum_length) length, + coalesce(datetime_precision, character_maximum_length) length, numeric_precision, numeric_scale, data_type, diff --git a/adding-composite-fk.postgres.test.ts.snap b/adding-composite-fk.postgres.test.ts.snap index 8842a0e..ab22c2f 100644 --- a/adding-composite-fk.postgres.test.ts.snap +++ b/adding-composite-fk.postgres.test.ts.snap @@ -39,10 +39,6 @@ exports[`adding m:1 with composite PK (FK as PK + scalar PK) (GH 1687) schema ge Object { "down": "alter table \\\\"user\\\\" drop constraint \\\\"user_city_state_country_id_city_state_id_city_id_foreign\\\\"; -alter table \\\\"user\\\\" drop constraint if exists \\\\"user_created_check\\\\"; -alter table \\\\"user\\\\" alter column \\\\"created\\\\" type timestamptz using (\\\\"created\\\\"::timestamptz); -alter table \\\\"user\\\\" drop constraint if exists \\\\"user_modified_check\\\\"; -alter table \\\\"user\\\\" alter column \\\\"modified\\\\" type timestamptz using (\\\\"modified\\\\"::timestamptz); drop index \\\\"user_city_state_country_id_city_state_id_city_id_index\\\\"; alter table \\\\"user\\\\" drop column \\\\"city_state_country_id\\\\"; alter table \\\\"user\\\\" drop column \\\\"city_state_id\\\\"; @@ -50,10 +46,6 @@ alter table \\\\"user\\\\" drop column \\\\"city_id\\\\"; ", "up": "alter table \\\\"user\\\\" add column \\\\"city_state_country_id\\\\" int not null, add column \\\\"city_state_id\\\\" int not null, add column \\\\"city_id\\\\" int not null; -alter table \\\\"user\\\\" drop constraint if exists \\\\"user_created_check\\\\"; -alter table \\\\"user\\\\" alter column \\\\"created\\\\" type timestamptz using (\\\\"created\\\\"::timestamptz); -alter table \\\\"user\\\\" drop constraint if exists \\\\"user_modified_check\\\\"; -alter table \\\\"user\\\\" alter column \\\\"modified\\\\" type timestamptz using (\\\\"modified\\\\"::timestamptz); alter table \\\\"user\\\\" add constraint \\\\"user_city_state_country_id_city_state_id_city_id_foreign\\\\" foreign key (\\\\"city_state_country_id\\\\", \\\\"city_state_id\\\\", \\\\"city_id\\\\") references \\\\"city\\\\" (\\\\"state_country_id\\\\", \\\\"state_id\\\\", \\\\"id\\\\") on update cascade; create index \\\\"user_city_state_country_id_city_state_id_city_id_index\\\\" on \\\\"user\\\\" (\\\\"city_state_country_id\\\\", \\\\"city_state_id\\\\", \\\\"city_id\\\\");
feat: Allow to print a tree without prettification, using `--tree-style raw`. This is mainly useful to generate fixtures for the test-suite, and is assured to not add extra-bytes to the output either.
8dfbb4ba531d86894a59e2b85ec29cd3133c8c4d
feat
https://github.com/Byron/gitoxide/commit/8dfbb4ba531d86894a59e2b85ec29cd3133c8c4d
Allow to print a tree without prettification, using `--tree-style raw`. This is mainly useful to generate fixtures for the test-suite, and is assured to not add extra-bytes to the output either.
diff --git a/resolve.rs b/resolve.rs index f44c07a..de546f0 100644 --- a/resolve.rs +++ b/resolve.rs @@ -4,6 +4,12 @@ pub struct Options { pub format: OutputFormat, pub explain: bool, pub cat_file: bool, + pub tree_mode: TreeMode, +} + +pub enum TreeMode { + Raw, + Pretty, } pub(crate) mod function { @@ -13,6 +19,7 @@ pub(crate) mod function { use gix::revision::Spec; use super::Options; + use crate::repository::revision::resolve::TreeMode; use crate::{repository::revision, OutputFormat}; pub fn resolve( @@ -23,6 +30,7 @@ pub(crate) mod function { format, explain, cat_file, + tree_mode, }: Options, ) -> anyhow::Result<()> { repo.object_cache_size_if_unset(1024 * 1024); @@ -36,7 +44,7 @@ pub(crate) mod function { let spec = gix::path::os_str_into_bstr(&spec)?; let spec = repo.rev_parse(spec)?; if cat_file { - return display_object(spec, out); + return display_object(spec, tree_mode, out); } writeln!(out, "{spec}", spec = spec.detach())?; } @@ -63,11 +71,11 @@ pub(crate) mod function { Ok(()) } - fn display_object(spec: Spec<'_>, mut out: impl std::io::Write) -> anyhow::Result<()> { + fn display_object(spec: Spec<'_>, tree_mode: TreeMode, mut out: impl std::io::Write) -> anyhow::Result<()> { let id = spec.single().context("rev-spec must resolve to a single object")?; let object = id.object()?; match object.kind { - gix::object::Kind::Tree => { + gix::object::Kind::Tree if matches!(tree_mode, TreeMode::Pretty) => { for entry in object.into_tree().iter() { writeln!(out, "{}", entry?)?; } diff --git a/main.rs b/main.rs index 70f969b..77a274f 100644 --- a/main.rs +++ b/main.rs @@ -925,6 +925,7 @@ pub fn main() -> Result<()> { specs, explain, cat_file, + tree_mode, } => prepare_and_run( "revision-parse", trace, @@ -941,6 +942,12 @@ pub fn main() -> Result<()> { format, explain, cat_file, + tree_mode: match tree_mode.unwrap_or_default() { + revision::resolve::TreeMode::Raw => core::repository::revision::resolve::TreeMode::Raw, + revision::resolve::TreeMode::Pretty => { + core::repository::revision::resolve::TreeMode::Pretty + } + }, }, ) }, diff --git a/mod.rs b/mod.rs index 9878832..4da749f 100644 --- a/mod.rs +++ b/mod.rs @@ -595,6 +595,16 @@ pub mod commitgraph { } pub mod revision { + pub mod resolve { + #[derive(Default, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)] + pub enum TreeMode { + /// Show the raw bytes - only useful for piping into files for use with tooling. + Raw, + /// Display a tree in human-readable form. + #[default] + Pretty, + } + } #[derive(Debug, clap::Subcommand)] #[clap(visible_alias = "rev", visible_alias = "r")] pub enum Subcommands { @@ -625,6 +635,8 @@ pub mod revision { /// Show the first resulting object similar to how `git cat-file` would, but don't show the resolved spec. #[clap(short = 'c', long, conflicts_with = "explain")] cat_file: bool, + #[clap(short = 't', long)] + tree_mode: Option<resolve::TreeMode>, /// rev-specs like `@`, `@~1` or `HEAD^2`. #[clap(required = true)] specs: Vec<std::ffi::OsString>,
refactor (#301) Avoid adding namespaces to bases and instead put them where they belong: in front of the ref name. This paves the way for handling reflog paths properly.
cebf73655c887d57bbe597938cfa376fac96b44c
refactor
https://github.com/Byron/gitoxide/commit/cebf73655c887d57bbe597938cfa376fac96b44c
in front of the ref name. This paves the way for handling reflog paths properly.
diff --git a/find.rs b/find.rs index b8ef424..b494a54 100644 --- a/find.rs +++ b/find.rs @@ -159,35 +159,38 @@ impl file::Store { } impl file::Store { - fn base_dir_and_rela_path_for_name<'a>(&self, name: FullNameRef<'a>) -> (&Path, Cow<'a, Path>) { + fn to_base_dir_and_relative_name<'a>(&self, name: FullNameRef<'a>) -> (&Path, FullNameRef<'a>) { let commondir = self.common_dir_resolved(); - let (base, relative_path) = name - .category_and_short_name() + name.category_and_short_name() .and_then(|(c, sn)| { use crate::Category::*; + let sn = FullNameRef(sn); Some(match c { - LinkedPseudoRef | Tag | LocalBranch | RemoteBranch | Note => (commondir, name.as_bstr()), + LinkedPseudoRef | Tag | LocalBranch | RemoteBranch | Note => (commondir, name), MainRef | MainPseudoRef => (commondir, sn), - LinkedRef => FullNameRef(sn) + LinkedRef => sn .category() .map_or(false, |cat| cat.is_worktree_private()) - .then(|| (commondir, name.as_bstr())) + .then(|| (commondir, name)) .unwrap_or((commondir, sn)), PseudoRef | Bisect | Rewritten | WorktreePrivate => return None, }) }) - .unwrap_or((self.git_dir.as_path(), name.as_bstr())); - let relative_path = git_path::to_native_path_on_windows(relative_path); - (base, relative_path) + .unwrap_or((self.git_dir.as_path(), name)) } /// Implements the logic required to transform a fully qualified refname into a filesystem path pub(crate) fn reference_path_with_base<'b>(&self, name: FullNameRef<'b>) -> (Cow<'_, Path>, Cow<'b, Path>) { - let (base, relative_path) = self.base_dir_and_rela_path_for_name(name); - match &self.namespace { - None => (base.into(), relative_path), - Some(namespace) => (base.join(namespace.to_path()).into(), relative_path), - } + let (base, name) = self.to_base_dir_and_relative_name(name); + ( + base.into(), + match &self.namespace { + None => git_path::to_native_path_on_windows(name.as_bstr()), + Some(namespace) => { + git_path::to_native_path_on_windows(namespace.to_owned().into_namespaced_name(name).into_inner()) + } + }, + ) } /// Implements the logic required to transform a fully qualified refname into a filesystem path
chore(deps): bump nix deps (#9286)
03811a324f19d71863c873696dedc2b15f242aa4
chore
https://github.com/ibis-project/ibis/commit/03811a324f19d71863c873696dedc2b15f242aa4
bump nix deps (#9286)
diff --git a/flake.lock b/flake.lock index 256cae1..384c859 100644 --- a/flake.lock +++ b/flake.lock @@ -61,11 +61,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1717112898, - "narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=", + "lastModified": 1717216113, + "narHash": "sha256-DniggN0kphCCBpGlS2WyDPoNqxQoRFlhN2GMk35OHiM=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0", + "rev": "21959d8d44197094aebc74ead6ca4a53bcce0adb", "type": "github" }, "original": { @@ -88,11 +88,11 @@ "treefmt-nix": "treefmt-nix" }, "locked": { - "lastModified": 1717171636, - "narHash": "sha256-SwqzDI7ddN8SkfJ0moYMRu9iastqI25YAHPpmI2PlYM=", + "lastModified": 1717250109, + "narHash": "sha256-MTivB5fsfaPtWwcibBhs20hHYEUe9o9bTRXsGJjQRso=", "owner": "nix-community", "repo": "poetry2nix", - "rev": "3bad7d0f33e6fd09205a19aab01e10af532198f9", + "rev": "11e97e742da5b4e43c27cfe13fca904e82fd4e56", "type": "github" }, "original": { @@ -146,11 +146,11 @@ ] }, "locked": { - "lastModified": 1715940852, - "narHash": "sha256-wJqHMg/K6X3JGAE9YLM0LsuKrKb4XiBeVaoeMNlReZg=", + "lastModified": 1717182148, + "narHash": "sha256-Hi09/RoizxubRf3PHToT2Nm7TL8B/abSVa6q82uEgNI=", "owner": "numtide", "repo": "treefmt-nix", - "rev": "2fba33a182602b9d49f0b2440513e5ee091d838b", + "rev": "03b982b77df58d5974c61c6022085bafe780c1cf", "type": "github" }, "original": {
docs: reorganize install section of contribute.md
6be503ad7b8f0435b0c867c3e6d354a199cc6637
docs
https://github.com/rohankumardubey/ibis/commit/6be503ad7b8f0435b0c867c3e6d354a199cc6637
reorganize install section of contribute.md
diff --git a/contribute.md b/contribute.md index 438d55b..c8fafff 100644 --- a/contribute.md +++ b/contribute.md @@ -11,22 +11,25 @@ There are two primary ways to setup a development environment. **Dependencies:** -- required: [`git`](https://git-scm.com/) -- required: [`gh`](https://github.com/cli/cli) -- optional: [`nix`](https://nixos.org/download.html#nix-quick-install) -- optional: [`conda`](https://docs.conda.io/en/latest/) +#### Required -Installing both `nix` and `conda` is fine, but you should use one or the other for contributing, not both. +- [`git`](https://git-scm.com/) -Use `gh` to fork and clone the `ibis-project/ibis` repository: +#### Package Management - gh repo fork --clone --remote ibis-project/ibis - cd ibis +At least one of `nix` or `conda` is required to contribute to ibis. + +- [`conda`](https://docs.conda.io/en/latest/) ### Nix -1. [Download and install `nix`](https://nixos.org/guides/install-nix.html) -2. Run `nix-shell` in the checkout directory: +1. [Download and install `nix`](https://nixos.org/download.html) +1. Install `gh`: `nix-shell -p gh` **or** `nix-env -iA gh` +1. Fork and clone the ibis repository: + + gh repo fork --clone --remote ibis-project/ibis + +1. Run `nix-shell` in the checkout directory: cd ibis @@ -41,18 +44,21 @@ Use `gh` to fork and clone the `ibis-project/ibis` repository: ### Miniconda 1. [Download](https://docs.conda.io/en/latest/miniconda.html) and install Miniconda -2. [Download the latest `environment.yaml`](https://github.com/ibis-project/ibis/releases/latest/download/environment.yaml) -3. Create a Conda environment suitable for ibis development: +1. Install `gh`: `conda install -c conda-forge gh` +1. Fork and clone the ibis repository: + + gh repo fork --clone --remote ibis-project/ibis + +1. Create a Conda environment suitable for ibis development: cd ibis conda create -n ibis-dev -f conda-lock/<platform-64-pyver>.lock - -4. Activate the environment +1. Activate the environment conda activate ibis-dev -5. Install your local copy of `ibis` into the Conda environment. In the root of the project run: +1. Install your local copy of `ibis` into the Conda environment. In the root of the project run: pip install -e . diff --git a/poetry.lock b/poetry.lock index cff6364..b749277 100644 --- a/poetry.lock +++ b/poetry.lock @@ -840,15 +840,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "markdown" -version = "3.3.4" +version = "3.3.5" description = "Python implementation of Markdown." category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \\"3.8\\""} - [package.extras] testing = ["coverage", "pyyaml"] @@ -2029,7 +2026,7 @@ visualization = ["graphviz"] [metadata] lock-version = "1.1" python-versions = ">=3.7.1,<4" -content-hash = "e6224ece8b0e2d6ac289efbdaa99853f445be84b1b3eb26af41ac53031795c96" +content-hash = "f0ee6e2c2045c13bac6542358e1fd3b995095a0c87cf0d64852455683b7e2ac6" [metadata.files] alabaster = [ @@ -2513,8 +2510,8 @@ locket = [ {file = "locket-0.2.1.tar.gz", hash = "sha256:3e1faba403619fe201552f083f1ecbf23f550941bc51985ac6ed4d02d25056dd"}, ] markdown = [ - {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, - {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, + {file = "Markdown-3.3.5-py3-none-any.whl", hash = "sha256:0d2d09f75cb8d1ffc6770c65c61770b23a61708101f47bda416a002a0edbc480"}, + {file = "Markdown-3.3.5.tar.gz", hash = "sha256:26e9546bfbcde5fcd072bd8f612c9c1b6e2677cb8aadbdf65206674f46dde069"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, diff --git a/pyproject.toml b/pyproject.toml index e73b163..5731d20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,6 @@ flake8 = "^4.0.0" ipykernel = "^6" ipython = "^7.27.0" isort = "^5.9.3" -markdown = "<3.3.5" mypy = "^0.921" nbsphinx = "^0.8.7" plumbum = "^1.7.0" diff --git a/shell.nix b/shell.nix index a923b6f..b994e29 100644 --- a/shell.nix +++ b/shell.nix @@ -7,7 +7,6 @@ let cacert cachix commitlint - gh git niv nix-linter @@ -56,4 +55,6 @@ pkgs.mkShell { buildInputs = devDeps ++ libraryDevDeps ++ [ pkgs."ibisDevEnv${pythonShortVersion}" ]; + + PYTHONPATH = builtins.toPath ./.; }
fix(trino,duckdb,postgres): make cumulative `notany`/`notall` aggregations work
c2e985f493fcb3306cc74793ac457661ee942967
fix
https://github.com/ibis-project/ibis/commit/c2e985f493fcb3306cc74793ac457661ee942967
make cumulative `notany`/`notall` aggregations work
diff --git a/registry.py b/registry.py index 6707831..f7497b5 100644 --- a/registry.py +++ b/registry.py @@ -369,8 +369,8 @@ def _window_function(t, window): end = _translate_window_boundary(window.frame.end) additional_params = {how: (start, end)} - result = reduction.over( - partition_by=partition_by, order_by=order_by, **additional_params + result = sa.over( + reduction, partition_by=partition_by, order_by=order_by, **additional_params ) if isinstance(window.func, (ops.RowNumber, ops.DenseRank, ops.MinRank, ops.NTile)): diff --git a/test_window.py b/test_window.py index 380ecb3..1501b06 100644 --- a/test_window.py +++ b/test_window.py @@ -155,14 +155,11 @@ def calc_zscore(s): id='cumnotany', marks=pytest.mark.notyet( ( - "duckdb", 'impala', - 'postgres', 'mssql', 'mysql', 'sqlite', 'snowflake', - 'trino', ), reason="notany() over window not supported", ), @@ -188,14 +185,11 @@ def calc_zscore(s): id='cumnotall', marks=pytest.mark.notyet( ( - "duckdb", 'impala', - 'postgres', 'mssql', 'mysql', 'sqlite', 'snowflake', - 'trino', ), reason="notall() over window not supported", ),
chore(dev-deps): give dev-deps a bound to make Renovate more automatic
e326864f1dfc92f66a0535d39b5c62b9c787cdb5
chore
https://github.com/ibis-project/ibis/commit/e326864f1dfc92f66a0535d39b5c62b9c787cdb5
give dev-deps a bound to make Renovate more automatic
diff --git a/poetry.lock b/poetry.lock index fbebc5a..0f02113 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2005,7 +2005,7 @@ visualization = ["graphviz"] [metadata] lock-version = "1.1" python-versions = ">=3.8,<3.11" -content-hash = "f3738d776bc84beb2abecf0212e12053e1c29079e4fe4dba48d1fdfadced83b8" +content-hash = "ad083fa93ef7572128cfdb553b742c95d2cb0dcbfb87f94fc50a49e27069f18d" [metadata.files] appnope = [ diff --git a/pyproject.toml b/pyproject.toml index f9c78ea..4bbbd58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,35 +51,34 @@ shapely = { version = ">=1.6,<2", optional = true } sqlalchemy = { version = ">=1.3,<1.5", optional = true } [tool.poetry.dev-dependencies] -black = "^22.1.0" -click = "^8.0.1" -commitizen = "^2.20.3" -flake8 = "^4.0.0" -ipykernel = "^6" +black = ">=22.1.0,<23" +click = ">=8.0.1,<9" +commitizen = ">=2.20.3,<3" +flake8 = ">=4.0.0,<5" +ipykernel = ">=6,<7" ipython = ">=7.27.0,<9" -isort = "^5.9.3" -markdown = "<3.3.5" -mkdocs = "^1.2.3" -mkdocs-jupyter = "^0.20.0" -mkdocs-macros-plugin = "^0.6.3" -mkdocs-material = "^8.1.3" -mkdocstrings = "^0.17.0" -mypy = "^0.931" -pydocstyle = "^6.1.1" -pymdown-extensions = "^9.1" -pytest = "^7.0.0" -pytest-benchmark = "^3.4.1" -pytest-cov = "^3.0.0" -pytest-mock = "^3.6.1" -pytest-randomly = "^3.10.1" -pytest-xdist = "^2.3.0" +isort = ">=5.9.3,<6" +mkdocs = ">=1.2.3,<2" +mkdocs-jupyter = ">=0.20.0,<1" +mkdocs-macros-plugin = ">=0.6.3,<1" +mkdocs-material = ">=8.1.3,<9" +mkdocstrings = ">=0.17.0,<1" +mypy = ">=0.931,<1" +pydocstyle = ">=6.1.1,<7" +pymdown-extensions = ">=9.1,<10" +pytest = ">=7.0.0,<8" +pytest-benchmark = ">=3.4.1,<4" +pytest-cov = ">=3.0.0,<4" +pytest-mock = ">=3.6.1,<4" +pytest-randomly = ">=3.10.1,<4" +pytest-xdist = ">=2.3.0,<3" pytkdocs = { version = ">=0.15.0,<0.17.0", extras = ["numpy-style"] } -pyupgrade = "^2.26.0" +pyupgrade = ">=2.26.0,<3" requests = ">=2,<3" setuptools = ">=57,<61" sqlalchemy = ">=1.3,<1.5" pyarrow = ">=1,<8" -types-requests = "^2.27.8" +types-requests = ">=2.27.8,<3" [tool.poetry.extras] all = [ @@ -131,6 +130,7 @@ addopts = [ "--strict-markers", "--benchmark-skip", ] +empty_parameter_set_mark = "fail_at_collect" norecursedirs = ["site-packages", "dist-packages"] markers = [ "backend: tests specific to a backend",