commit_message
stringlengths 9
4.28k
| sha
stringlengths 40
40
| type
stringclasses 10
values | commit_url
stringlengths 78
90
| masked_commit_message
stringlengths 2
4.26k
| author_email
stringclasses 8
values | git_diff
stringlengths 128
19.1M
|
|---|---|---|---|---|---|---|
feat(mysql): implement _get_schema_from_query
|
456cd44879c32bac7f8a798cb8e7e5851e94b4ec
|
feat
|
https://github.com/rohankumardubey/ibis/commit/456cd44879c32bac7f8a798cb8e7e5851e94b4ec
|
implement _get_schema_from_query
|
diff --git a/__init__.py b/__init__.py index 7ed37e0..7c461d7 100644 --- a/__init__.py +++ b/__init__.py @@ -10,9 +10,11 @@ import sqlalchemy as sa import sqlalchemy.dialects.mysql as mysql import ibis.expr.datatypes as dt +import ibis.expr.schema as sch from ibis.backends.base.sql.alchemy import BaseAlchemyBackend from .compiler import MySQLCompiler +from .datatypes import _type_from_cursor_info class Backend(BaseAlchemyBackend): @@ -121,6 +123,16 @@ class Backend(BaseAlchemyBackend): query = "SET @@session.time_zone = '{}'" bind.execute(query.format(previous_timezone)) + def _get_schema_using_query(self, query: str) -> sch.Schema: + """Infer the schema of `query`.""" + result = self.con.execute(f"SELECT * FROM ({query}) _ LIMIT 0") + cursor = result.cursor + fields = [ + (field.name, _type_from_cursor_info(descr, field)) + for descr, field in zip(cursor.description, cursor._result.fields) + ] + return sch.Schema.from_tuples(fields) + # TODO(kszucs): unsigned integers diff --git a/datatypes.py b/datatypes.py index c627f3f..e92e517 100644 --- a/datatypes.py +++ b/datatypes.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +from functools import partial + +import ibis.expr.datatypes as dt + +# binary character set +# used to distinguish blob binary vs blob text +MY_CHARSET_BIN = 63 + + +def _type_from_cursor_info(descr, field) -> dt.DataType: + """Construct an ibis type from MySQL field descr and field result metadata. + + This method is complex because the MySQL protocol is complex. + + Types are not encoded in a self contained way, meaning you need + multiple pieces of information coming from the result set metadata to + determine the most precise type for a field. Even then, the decoding is + not high fidelity in some cases: UUIDs for example are decoded as + strings, because the protocol does not appear to preserve the logical + type, only the physical type. + """ + from pymysql.connections import TEXT_TYPES + + _, type_code, _, _, field_length, scale, _ = descr + flags = _FieldFlags(field.flags) + typename = _type_codes.get(type_code) + if typename is None: + raise NotImplementedError( + f"MySQL type code {type_code:d} is not supported" + ) + + typ = _type_mapping[typename] + + if typename in ("DECIMAL", "NEWDECIMAL"): + precision = _decimal_length_to_precision( + length=field_length, + scale=scale, + is_unsigned=flags.is_unsigned, + ) + typ = partial(typ, precision=precision, scale=scale) + elif typename == "BIT": + if field_length <= 8: + typ = dt.int8 + elif field_length <= 16: + typ = dt.int16 + elif field_length <= 32: + typ = dt.int32 + elif field_length <= 64: + typ = dt.int64 + else: + assert False, "invalid field length for BIT type" + else: + if flags.is_set: + # sets are limited to strings + typ = dt.Set(dt.string) + elif flags.is_unsigned and flags.is_num: + typ = getattr(dt, f"U{typ.__name__}") + elif type_code in TEXT_TYPES: + # binary text + if field.charsetnr == MY_CHARSET_BIN: + typ = dt.Binary + else: + typ = dt.String + + # projection columns are always nullable + return typ(nullable=True) + + +# ported from my_decimal.h:my_decimal_length_to_precision in mariadb +def _decimal_length_to_precision( + *, + length: int, + scale: int, + is_unsigned: bool, +) -> int: + return length - (scale > 0) - (not (is_unsigned or not length)) + + +_type_codes = { + 0: "DECIMAL", + 1: "TINY", + 2: "SHORT", + 3: "LONG", + 4: "FLOAT", + 5: "DOUBLE", + 6: "NULL", + 7: "TIMESTAMP", + 8: "LONGLONG", + 9: "INT24", + 10: "DATE", + 11: "TIME", + 12: "DATETIME", + 13: "YEAR", + 15: "VARCHAR", + 16: "BIT", + 245: "JSON", + 246: "NEWDECIMAL", + 247: "ENUM", + 248: "SET", + 249: "TINY_BLOB", + 250: "MEDIUM_BLOB", + 251: "LONG_BLOB", + 252: "BLOB", + 253: "VAR_STRING", + 254: "STRING", + 255: "GEOMETRY", +} + + +_type_mapping = { + "DECIMAL": dt.Decimal, + "TINY": dt.Int8, + "SHORT": dt.Int16, + "LONG": dt.Int32, + "FLOAT": dt.Float32, + "DOUBLE": dt.Float64, + "NULL": dt.Null, + "TIMESTAMP": lambda nullable: dt.Timestamp( + timezone="UTC", + nullable=nullable, + ), + "LONGLONG": dt.Int64, + "INT24": dt.Int32, + "DATE": dt.Date, + "TIME": dt.Time, + "DATETIME": dt.Timestamp, + "YEAR": dt.Int16, + "VARCHAR": dt.String, + "BIT": dt.Int8, + "JSON": dt.JSON, + "NEWDECIMAL": dt.Decimal, + "ENUM": dt.String, + "SET": lambda nullable: dt.Set(dt.string, nullable=nullable), + "TINY_BLOB": dt.Binary, + "MEDIUM_BLOB": dt.Binary, + "LONG_BLOB": dt.Binary, + "BLOB": dt.Binary, + "VAR_STRING": dt.String, + "STRING": dt.String, + "GEOMETRY": dt.Geometry, +} + + +class _FieldFlags: + """Flags used to disambiguate field types. + + Gaps in the flag numbers are because we do not map in flags that are of no + use in determining the field's type, such as whether the field is a primary + key or not. + """ + + UNSIGNED = 1 << 5 + SET = 1 << 11 + NUM = 1 << 15 + + __slots__ = ("value",) + + def __init__(self, value: int) -> None: + self.value = value + + @property + def is_unsigned(self) -> bool: + return (self.UNSIGNED & self.value) != 0 + + @property + def is_set(self) -> bool: + return (self.SET & self.value) != 0 + + @property + def is_num(self) -> bool: + return (self.NUM & self.value) != 0 diff --git a/test_client.py b/test_client.py index 15036e0..e26913c 100644 --- a/test_client.py +++ b/test_client.py @@ -4,60 +4,64 @@ from pytest import param import ibis import ibis.expr.datatypes as dt +MYSQL_TYPES = [ + ("tinyint", dt.int8), + ("int1", dt.int8), + ("boolean", dt.int8), + ("smallint", dt.int16), + ("int2", dt.int16), + ("mediumint", dt.int32), + ("int3", dt.int32), + ("int", dt.int32), + ("int4", dt.int32), + ("integer", dt.int32), + ("bigint", dt.int64), + ("decimal", dt.Decimal(10, 0)), + ("decimal(5, 2)", dt.Decimal(5, 2)), + ("dec", dt.Decimal(10, 0)), + ("numeric", dt.Decimal(10, 0)), + ("fixed", dt.Decimal(10, 0)), + ("float", dt.float32), + ("double", dt.float64), + ("timestamp", dt.Timestamp("UTC")), + ("date", dt.date), + ("time", dt.time), + ("datetime", dt.timestamp), + ("year", dt.int16), + ("char(32)", dt.string), + ("char byte", dt.binary), + ("varchar(42)", dt.string), + ("mediumtext", dt.string), + ("text", dt.string), + ("binary(42)", dt.binary), + ("varbinary(42)", dt.binary), + ("bit(1)", dt.int8), + ("bit(9)", dt.int16), + ("bit(17)", dt.int32), + ("bit(33)", dt.int64), + # mariadb doesn't have a distinct json type + ("json", dt.string), + ("enum('small', 'medium', 'large')", dt.string), + ("inet6", dt.string), + ("set('a', 'b', 'c', 'd')", dt.Set(dt.string)), + ("mediumblob", dt.binary), + ("blob", dt.binary), + ("uuid", dt.string), +] + @pytest.mark.parametrize( ("mysql_type", "expected_type"), [ - param(mysql_type, ibis_type, id=mysql_type.lower()) - for mysql_type, ibis_type in [ - ("tinyint", dt.int8), - ("int1", dt.int8), - ("boolean", dt.int8), - ("smallint", dt.int16), - ("int2", dt.int16), - ("mediumint", dt.int32), - ("int3", dt.int32), - ("int", dt.int32), - ("int4", dt.int32), - ("integer", dt.int32), - ("bigint", dt.int64), - ("decimal", dt.Decimal(10, 0)), - ("decimal(5, 2)", dt.Decimal(5, 2)), - ("dec", dt.Decimal(10, 0)), - ("numeric", dt.Decimal(10, 0)), - ("fixed", dt.Decimal(10, 0)), - ("float", dt.float32), - ("double", dt.float64), - ("timestamp", dt.Timestamp("UTC")), - ("date", dt.date), - ("time", dt.time), - ("datetime", dt.timestamp), - ("year", dt.int16), - ("char(32)", dt.string), - ("char byte", dt.binary), - ("varchar(42)", dt.string), - ("mediumtext", dt.string), - ("text", dt.string), - ("binary(42)", dt.binary), - ("varbinary(42)", dt.binary), - ("bit(1)", dt.int8), - ("bit(9)", dt.int16), - ("bit(17)", dt.int32), - ("bit(33)", dt.int64), - # mariadb doesn't have a distinct json type - ("json", dt.string), - ("enum('small', 'medium', 'large')", dt.string), - ("inet6", dt.string), - ("set('a', 'b', 'c', 'd')", dt.Set(dt.string)), - ("mediumblob", dt.binary), - ("blob", dt.binary), - ("uuid", dt.string), - ] + param(mysql_type, ibis_type, id=mysql_type) + for mysql_type, ibis_type in MYSQL_TYPES ], ) def test_get_schema_from_query(con, mysql_type, expected_type): raw_name = ibis.util.guid() name = con.con.dialect.identifier_preparer.quote_identifier(raw_name) + # temporary tables get cleaned up by the db when the session ends, so we + # don't need to explicitly drop the table con.raw_sql( f"CREATE TEMPORARY TABLE {name} (x {mysql_type}, y {mysql_type})" )
|
|
chore: remove dead code
|
4e5a9859e3708a93a289d493cd37fc1fd3d1d7b0
|
chore
|
https://github.com/mikro-orm/mikro-orm/commit/4e5a9859e3708a93a289d493cd37fc1fd3d1d7b0
|
remove dead code
|
diff --git a/ObjectHydrator.ts b/ObjectHydrator.ts index 0b525e3..36861d0 100644 --- a/ObjectHydrator.ts +++ b/ObjectHydrator.ts @@ -191,8 +191,6 @@ export class ObjectHydrator extends Hydrator { context.set(`prototype_${convertorKey}`, prop.embeddable.prototype); if (!this.platform.convertsJsonAutomatically() && (prop.object || prop.array)) { - context.set(`convertToJSValue_${convertorKey}`, (val: any) => prop.customType.convertToJSValue(val, this.platform)); - ret.push( ` if (typeof data${dataKey} === 'string') {`, ` data${dataKey} = JSON.parse(data${dataKey});`,
|
|
fix: #510
|
50787a075f5387a3ad1f4c18aac85c38b8f11fa6
|
fix
|
https://github.com/erg-lang/erg/commit/50787a075f5387a3ad1f4c18aac85c38b8f11fa6
|
#510
|
diff --git a/link_hir.rs b/link_hir.rs index 2397e6b..81da5fb 100644 --- a/link_hir.rs +++ b/link_hir.rs @@ -233,6 +233,11 @@ impl<'a> HIRLinker<'a> { match acc { Accessor::Attr(attr) => { self.replace_import(&mut attr.obj); + if attr.ident.inspect() == "__file__" + && attr.ident.vi.def_loc.module.is_none() + { + *expr = self.__file__(); + } } Accessor::Ident(ident) => match &ident.inspect()[..] { "module" => { @@ -241,6 +246,9 @@ impl<'a> HIRLinker<'a> { "global" => { *expr = Expr::from(Identifier::static_public("__builtins__")); } + "__file__" if ident.vi.def_loc.module.is_none() => { + *expr = self.__file__(); + } _ => {} }, } @@ -363,6 +371,22 @@ impl<'a> HIRLinker<'a> { Expr::from(__import__).call1(Expr::from(__name__)) } + fn __file__(&self) -> Expr { + let path = self.cfg.input.path().to_path_buf(); + let token = Token::new_fake( + TokenKind::StrLit, + format!( + "\\"{}\\"", + path.canonicalize().unwrap_or(path).to_string_lossy() + ), + 0, + 0, + 0, + ); + let lit = Literal::try_from(token).unwrap(); + Expr::from(lit) + } + /// ```erg /// x = import "mod" /// ``` diff --git a/dunder.er b/dunder.er index 3ce51bd..e68859f 100644 --- a/dunder.er +++ b/dunder.er @@ -0,0 +1,15 @@ +imp = import "import" + +func() = + __file__ = "foo" + __file__ + +C = Class() +C. + __file__ = "bar" + +assert __file__.endswith "dunder.er" +assert func() == "foo" +assert module::__file__.endswith "dunder.er" +assert C.new().__file__ == "bar" +assert imp.func().endswith "import.er" diff --git a/import.er b/import.er index 5d15160..0a9ac0e 100644 --- a/import.er +++ b/import.er @@ -0,0 +1 @@ +.func() = __file__ diff --git a/test.rs b/test.rs index cf083a4..e814765 100644 --- a/test.rs +++ b/test.rs @@ -131,6 +131,11 @@ fn exec_dict_test() -> Result<(), ()> { expect_success("tests/should_ok/dict.er", 0) } +#[test] +fn exec_dunder() -> Result<(), ()> { + expect_success("tests/should_ok/dunder.er", 0) +} + #[test] fn exec_empty_check() -> Result<(), ()> { expect_success("tests/should_ok/dyn_type_check.er", 0)
|
|
fix(postgres): fix propagation of PKs with custom names
Closes #1990
|
41380de018a4984bbfbc5e4fb27ec399225f88ac
|
fix
|
https://github.com/mikro-orm/mikro-orm/commit/41380de018a4984bbfbc5e4fb27ec399225f88ac
|
fix propagation of PKs with custom names
Closes #1990
|
diff --git a/ChangeSetPersister.ts b/ChangeSetPersister.ts index 023c993..cedf870 100644 --- a/ChangeSetPersister.ts +++ b/ChangeSetPersister.ts @@ -284,6 +284,11 @@ export class ChangeSetPersister { private mapReturnedValues<T extends AnyEntity<T>>(changeSet: ChangeSet<T>, res: QueryResult, meta: EntityMetadata<T>): void { if (this.platform.usesReturningStatement() && res.row && Utils.hasObjectKeys(res.row)) { const data = meta.props.reduce((ret, prop) => { + if (prop.primary && !changeSet.entity.__helper!.hasPrimaryKey()) { + this.mapPrimaryKey(meta, res.row![prop.fieldNames[0]], changeSet); + return ret; + } + if (prop.fieldNames && Utils.isDefined(res.row![prop.fieldNames[0]], true) && !Utils.isDefined(changeSet.entity[prop.name], true)) { ret[prop.name] = changeSet.payload[prop.name] = res.row![prop.fieldNames[0]]; } diff --git a/GH1990.test.ts b/GH1990.test.ts index ef76509..52b6522 100644 --- a/GH1990.test.ts +++ b/GH1990.test.ts @@ -0,0 +1,56 @@ +import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/core'; + +@Entity() +class A { + + @PrimaryKey({ fieldName: 'prc_id' }) + id!: number; + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + @OneToMany(() => B, b => b.a) + b = new Collection<B>(this); + +} + +@Entity() +class B { + + @PrimaryKey({ fieldName: 'dec_id' }) + id!: number; + + @ManyToOne({ entity: () => A, fieldName: 'prc_id' }) + a!: A; + +} + +describe('GH issue 1990', () => { + + let orm: MikroORM; + + beforeAll(async () => { + orm = await MikroORM.init({ + entities: [A, B], + dbName: 'mikro_orm_test_1990', + type: 'postgresql', + }); + await orm.getSchemaGenerator().ensureDatabase(); + await orm.getSchemaGenerator().dropSchema(); + await orm.getSchemaGenerator().createSchema(); + }); + + afterAll(async () => { + await orm.close(true); + }); + + test(`GH issue 1990`, async () => { + const a = new A(); + const b = new B(); + a.b.add(b); + await orm.em.persistAndFlush(a); + orm.em.clear(); + + const a1 = await orm.em.findOneOrFail(A, a, { populate: ['b'] }); + expect(a1.b).toHaveLength(1); + }); + +});
|
|
test(risingwave): make unnest test not flaky
|
1e78e813a4cea5edfec912ccf56ff9dc99057f73
|
test
|
https://github.com/ibis-project/ibis/commit/1e78e813a4cea5edfec912ccf56ff9dc99057f73
|
make unnest test not flaky
|
diff --git a/test_array.py b/test_array.py index 0336185..aa3551d 100644 --- a/test_array.py +++ b/test_array.py @@ -251,6 +251,9 @@ def test_array_discovery(backend): raises=GoogleBadRequest, ) @pytest.mark.notimpl(["datafusion"], raises=com.OperationNotDefinedError) [email protected]( + ["risingwave"], raises=AssertionError, reason="ordering is different", strict=False +) def test_unnest_simple(backend): array_types = backend.array_types expected = (
|
|
fix: sort_index to satisfy pandas 1.4.x
|
6bac0fc2bec2434f5d2eb8b1c2b0328a0e5a80a3
|
fix
|
https://github.com/rohankumardubey/ibis/commit/6bac0fc2bec2434f5d2eb8b1c2b0328a0e5a80a3
|
sort_index to satisfy pandas 1.4.x
|
diff --git a/aggcontext.py b/aggcontext.py index 7bb9520..903040c 100644 --- a/aggcontext.py +++ b/aggcontext.py @@ -596,7 +596,9 @@ class Window(AggregationContext): indexed_by_ordering = frame[columns].copy() # placeholder column to compute window_sizes below indexed_by_ordering['_placeholder'] = 0 - indexed_by_ordering = indexed_by_ordering.set_index(order_by) + indexed_by_ordering = indexed_by_ordering.set_index( + order_by + ).sort_index(kind="stable") # regroup if needed if group_by:
|
|
feat: add `tree::Recorder::path_clone()` and `tree::Recorder::track_filename()` builder method.
That way, the recorder can be used in other applications to keep track of the full
path *or* the filename.
|
9e391e916402aafa7a20c704d11e21a91bda63b5
|
feat
|
https://github.com/Byron/gitoxide/commit/9e391e916402aafa7a20c704d11e21a91bda63b5
|
add `tree::Recorder::path_clone()` and `tree::Recorder::track_filename()` builder method.
That way, the recorder can be used in other applications to keep track of the full
path *or* the filename.
|
diff --git a/mod.rs b/mod.rs index 3652444..8d6ee88 100644 --- a/mod.rs +++ b/mod.rs @@ -1,5 +1,7 @@ +use gix_object::bstr::BString; use gix_odb::pack::FindExt; use gix_traverse::tree; +use gix_traverse::tree::recorder::Location; use crate::hex_to_id; @@ -10,13 +12,14 @@ fn db() -> crate::Result<gix_odb::Handle> { } #[test] -fn basic_nesting() -> crate::Result<()> { +fn breadth_first_full_path() -> crate::Result<()> { let db = db()?; let mut buf = Vec::new(); let mut buf2 = Vec::new(); let mut commit = db .find_commit_iter(hex_to_id("85df34aa34848b8138b2b3dcff5fb5c2b734e0ce"), &mut buf)? .0; + // Full paths - that's the default. let mut recorder = tree::Recorder::default(); gix_traverse::tree::breadthfirst( db.find_tree_iter(commit.tree_id().expect("a tree is available in a commit"), &mut buf2)? @@ -95,3 +98,53 @@ fn basic_nesting() -> crate::Result<()> { ); Ok(()) } + +#[test] +fn breadth_first_filename_only() -> crate::Result<()> { + let db = db()?; + let mut buf = Vec::new(); + let mut buf2 = Vec::new(); + let mut commit = db + .find_commit_iter(hex_to_id("85df34aa34848b8138b2b3dcff5fb5c2b734e0ce"), &mut buf)? + .0; + let mut recorder = tree::Recorder::default().track_location(Some(Location::FileName)); + gix_traverse::tree::breadthfirst( + db.find_tree_iter(commit.tree_id().expect("a tree is available in a commit"), &mut buf2)? + .0, + tree::breadthfirst::State::default(), + |oid, buf| db.find_tree_iter(oid, buf).ok().map(|t| t.0), + &mut recorder, + )?; + + assert_eq!( + recorder.records.into_iter().map(|e| e.filepath).collect::<Vec<_>>(), + ["a", "b", "c", "d", "e", "f", "a", "b", "c", "d", "z", "x"] + .into_iter() + .map(BString::from) + .collect::<Vec<_>>() + ); + Ok(()) +} + +#[test] +fn breadth_first_no_location() -> crate::Result<()> { + let db = db()?; + let mut buf = Vec::new(); + let mut buf2 = Vec::new(); + let mut commit = db + .find_commit_iter(hex_to_id("85df34aa34848b8138b2b3dcff5fb5c2b734e0ce"), &mut buf)? + .0; + let mut recorder = tree::Recorder::default().track_location(None); + gix_traverse::tree::breadthfirst( + db.find_tree_iter(commit.tree_id().expect("a tree is available in a commit"), &mut buf2)? + .0, + tree::breadthfirst::State::default(), + |oid, buf| db.find_tree_iter(oid, buf).ok().map(|t| t.0), + &mut recorder, + )?; + + for path in recorder.records.into_iter().map(|e| e.filepath) { + assert_eq!(path, "", "path should be empty as it's not tracked at all") + } + Ok(()) +} diff --git a/recorder.rs b/recorder.rs index 975dd63..3a31acf 100644 --- a/recorder.rs +++ b/recorder.rs @@ -6,6 +6,17 @@ use gix_object::{ use crate::tree::{visit::Action, Recorder, Visit}; +/// Describe how to track the location of an entry. +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum Location { + /// Track the entire path, relative to the repository. + Path, + /// Keep only the file-name as location, which may be enough for some calculations. + /// + /// This is less expensive than tracking the entire `Path`. + FileName, +} + /// An owned entry as observed by a call to [`visit_(tree|nontree)(…)`][Visit::visit_tree()], enhanced with the full path to it. /// Otherwise similar to [`gix_object::tree::EntryRef`]. #[derive(Clone, Debug, PartialEq, Eq)] @@ -30,6 +41,17 @@ impl Entry { } } +impl Default for Recorder { + fn default() -> Self { + Recorder { + path_deque: Default::default(), + path: Default::default(), + location: Location::Path.into(), + records: vec![], + } + } +} + impl Recorder { fn pop_element(&mut self) { if let Some(pos) = self.path.rfind_byte(b'/') { @@ -45,31 +67,64 @@ impl Recorder { } self.path.push_str(name); } +} - fn path_clone(&self) -> BString { +/// Builder +impl Recorder { + /// Obtain a copy of the currently tracked, full path of the entry. + pub fn track_location(mut self, location: Option<Location>) -> Self { + self.location = location; + self + } +} + +/// Access +impl Recorder { + /// Obtain a copy of the currently tracked, full path of the entry. + pub fn path_clone(&self) -> BString { self.path.clone() } + + /// Return the currently set path. + pub fn path(&self) -> &BStr { + self.path.as_ref() + } } impl Visit for Recorder { fn pop_front_tracked_path_and_set_current(&mut self) { - self.path = self - .path_deque - .pop_front() - .expect("every call is matched with push_tracked_path_component"); + if let Some(Location::Path) = self.location { + self.path = self + .path_deque + .pop_front() + .expect("every call is matched with push_tracked_path_component"); + } } fn push_back_tracked_path_component(&mut self, component: &BStr) { - self.push_element(component); - self.path_deque.push_back(self.path.clone()); + if let Some(Location::Path) = self.location { + self.push_element(component); + self.path_deque.push_back(self.path.clone()); + } } fn push_path_component(&mut self, component: &BStr) { - self.push_element(component); + match self.location { + None => {} + Some(Location::Path) => { + self.push_element(component); + } + Some(Location::FileName) => { + self.path.clear(); + self.path.extend_from_slice(component); + } + } } fn pop_path_component(&mut self) { - self.pop_element(); + if let Some(Location::Path) = self.location { + self.pop_element() + } } fn visit_tree(&mut self, entry: &tree::EntryRef<'_>) -> Action {
|
|
build: adding more segment types to path shape
|
2ecb4e5f1f8e50081880e7ce1fb15de0a5b865d5
|
build
|
https://github.com/tsparticles/tsparticles/commit/2ecb4e5f1f8e50081880e7ce1fb15de0a5b865d5
|
adding more segment types to path shape
|
diff --git a/SegmentType.ts b/SegmentType.ts index e703e55..15bf3b1 100644 --- a/SegmentType.ts +++ b/SegmentType.ts @@ -2,4 +2,6 @@ export const enum SegmentType { line = "line", bezier = "bezier", quadratic = "quadratic", + arc = "arc", + ellipse = "ellipse", } diff --git a/Utils.ts b/Utils.ts index f00d4bf..1851372 100644 --- a/Utils.ts +++ b/Utils.ts @@ -31,6 +31,27 @@ export function drawPath(ctx: CanvasRenderingContext2D, radius: number, path: IP segment.values[2].y * radius ); break; + + case SegmentType.arc: + ctx.arc( + segment.values[0].x * radius, + segment.values[0].y * radius, + segment.values[1].x * radius, + segment.values[2].x, + segment.values[2].y + ); + break; + + case SegmentType.ellipse: + ctx.ellipse( + segment.values[0].x * radius, + segment.values[0].y * radius, + segment.values[1].x * radius, + segment.values[1].y * radius, + segment.values[2].x, + segment.values[3].x, + segment.values[3].y + ); } }
|
|
chore: move website to custom domain and fix repo links after transfer
|
57b2d1747b83cf02b6d5281eb72db88bfec11563
|
chore
|
https://github.com/mikro-orm/mikro-orm/commit/57b2d1747b83cf02b6d5281eb72db88bfec11563
|
move website to custom domain and fix repo links after transfer
|
diff --git a/README.md b/README.md index 99ad7f2..003b389 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ <h1 align="center"> - <a href="https://b4nan.github.io/mikro-orm/"><img src="https://raw.githubusercontent.com/b4nan/mikro-orm/master/docs/assets/img/logo-readme.svg?sanitize=true" alt="MikroORM"></a> + <a href="https://mikro-orm.io"><img src="https://raw.githubusercontent.com/mikro-orm/mikro-orm/master/docs/assets/img/logo-readme.svg?sanitize=true" alt="MikroORM"></a> </h1> Simple TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, @@ -10,47 +10,47 @@ MySQL, PostgreSQL and SQLite databases. [](https://www.npmjs.com/package/mikro-orm) [](https://join.slack.com/t/mikroorm/shared_invite/enQtNTM1ODYzMzM4MDk3LTBmZDNlODBhYjcxNGZlMTkyYzJmODAwMDhjODc0ZTM2MzQ2Y2VkOGM0ODYzYTJjMDRiZDdjMmIxYjI2OTY0Y2U) [](https://www.npmjs.com/package/mikro-orm) -[](https://david-dm.org/B4nan/mikro-orm) -[](https://travis-ci.com/B4nan/mikro-orm) -[](https://coveralls.io/r/B4nan/mikro-orm?branch=master) -[](https://codeclimate.com/github/B4nan/mikro-orm/maintainability) +[](https://david-dm.org/mikro-orm/mikro-orm) +[](https://travis-ci.com/mikro-orm/mikro-orm) +[](https://coveralls.io/r/mikro-orm/mikro-orm?branch=master) +[](https://codeclimate.com/github/mikro-orm/mikro-orm/maintainability) ## 📖 Documentation MikroORM's documentation, included in this repo in the root directory, is built with -[Jekyll](https://jekyllrb.com/) and publicly hosted on GitHub Pages at https://b4nan.github.io/mikro-orm/. +[Jekyll](https://jekyllrb.com/) and publicly hosted on GitHub Pages at https://mikro-orm.io. There is also auto-generated [CHANGELOG.md](CHANGELOG.md) file based on commit messages (via `semantic-release`). ## ✨ Core features -- [Clean and simple entity definition](https://b4nan.github.io/mikro-orm/defining-entities/) -- [Identity Map](https://b4nan.github.io/mikro-orm/identity-map/) -- [Entity references](https://b4nan.github.io/mikro-orm/entity-references/) -- [Using entity constructors](https://b4nan.github.io/mikro-orm/using-entity-constructors/) -- [Collections](https://b4nan.github.io/mikro-orm/collections/) -- [Unit of Work](https://b4nan.github.io/mikro-orm/unit-of-work/) -- [Transactions](https://b4nan.github.io/mikro-orm/transactions/) -- [Cascading persist and remove](https://b4nan.github.io/mikro-orm/cascading/) -- [Using `QueryBuilder`](https://b4nan.github.io/mikro-orm/query-builder/) -- [Preloading deeply nested structures via populate](https://b4nan.github.io/mikro-orm/nested-populate/) -- [Property validation](https://b4nan.github.io/mikro-orm/property-validation/) -- [Lifecycle hooks](https://b4nan.github.io/mikro-orm/lifecycle-hooks/) -- [Vanilla JS support](https://b4nan.github.io/mikro-orm/usage-with-js/) +- [Clean and simple entity definition](https://mikro-orm.io/defining-entities/) +- [Identity Map](https://mikro-orm.io/identity-map/) +- [Entity references](https://mikro-orm.io/entity-references/) +- [Using entity constructors](https://mikro-orm.io/using-entity-constructors/) +- [Collections](https://mikro-orm.io/collections/) +- [Unit of Work](https://mikro-orm.io/unit-of-work/) +- [Transactions](https://mikro-orm.io/transactions/) +- [Cascading persist and remove](https://mikro-orm.io/cascading/) +- [Using `QueryBuilder`](https://mikro-orm.io/query-builder/) +- [Preloading deeply nested structures via populate](https://mikro-orm.io/nested-populate/) +- [Property validation](https://mikro-orm.io/property-validation/) +- [Lifecycle hooks](https://mikro-orm.io/lifecycle-hooks/) +- [Vanilla JS support](https://mikro-orm.io/usage-with-js/) ## 📦 Example integrations -You can find example integrations for some popular frameworks in the [`mikro-orm-examples` repository](https://github.com/B4nan/mikro-orm-examples): +You can find example integrations for some popular frameworks in the [`mikro-orm-examples` repository](https://github.com/mikro-orm/mikro-orm-examples): ### TypeScript examples -- [Express + MongoDB](https://github.com/B4nan/mikro-orm-examples/tree/master/express-ts) -- [Nest + MySQL](https://github.com/B4nan/mikro-orm-examples/tree/master/nest) +- [Express + MongoDB](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-ts) +- [Nest + MySQL](https://github.com/mikro-orm/mikro-orm-examples/tree/master/nest) - [`nestjs-mikro-orm` module](https://github.com/dario1985/nestjs-mikro-orm) ### JavaScript examples -- [Express + MongoDB](https://github.com/B4nan/mikro-orm-examples/tree/master/express-js) +- [Express + MongoDB](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-js) ## 🚀 Quick start @@ -86,16 +86,16 @@ const orm = await MikroORM.init({ entitiesDirs: ['./dist/entities'], // path to your JS entities (dist), relative to `baseDir` dbName: 'my-db-name', clientUrl: '...', // defaults to 'mongodb://localhost:27017' for mongodb driver - autoFlush: false, // read more here: https://b4nan.github.io/mikro-orm/unit-of-work/ + autoFlush: false, // read more here: https://mikro-orm.io/unit-of-work/ }); console.log(orm.em); // access EntityManager via `em` property ``` There are more ways to configure your entities, take a look at -[installation page](https://b4nan.github.io/mikro-orm/installation/). +[installation page](https://mikro-orm.io/installation/). Then you will need to fork entity manager for each request so their -[identity maps](https://b4nan.github.io/mikro-orm/identity-map/) will not collide. +[identity maps](https://mikro-orm.io/identity-map/) will not collide. To do so, use the `RequestContext` helper: ```typescript @@ -111,7 +111,7 @@ app.use((req, res, next) => { > it before request processing middleware like `queryParser` or `bodyParser`, so definitely > register the context after them. -More info about `RequestContext` is described [here](https://b4nan.github.io/mikro-orm/identity-map/#request-context). +More info about `RequestContext` is described [here](https://mikro-orm.io/identity-map/#request-context). Now you can start defining your entities (in one of the `entitiesDirs` folders): @@ -144,7 +144,7 @@ export interface Book extends IEntity { } ``` More information can be found in -[defining entities section](https://b4nan.github.io/mikro-orm/defining-entities/) in docs. +[defining entities section](https://mikro-orm.io/defining-entities/) in docs. When you have your entities defined, you can start using ORM either via `EntityManager` or via `EntityRepository`s. @@ -207,8 +207,8 @@ const books = await booksRepository.find({ author: '...' }, { console.log(books); // Book[] ``` -Take a look at docs about [working with `EntityManager`](https://b4nan.github.io/mikro-orm/entity-manager/) -or [using `EntityRepository` instead](https://b4nan.github.io/mikro-orm/repositories/). +Take a look at docs about [working with `EntityManager`](https://mikro-orm.io/entity-manager/) +or [using `EntityRepository` instead](https://mikro-orm.io/repositories/). ## 🤝 Contributing @@ -223,7 +223,7 @@ for details on the process for submitting pull requests to us. - Twitter: [@B4nan](https://twitter.com/B4nan) - Github: [@b4nan](https://github.com/b4nan) -See also the list of contributors who [participated](https://github.com/b4nan/mikro-orm/contributors) in this project. +See also the list of contributors who [participated](https://github.com/mikro-orm/mikro-orm/contributors) in this project. ## Show your support diff --git a/sidebar.html b/sidebar.html index 27b1ff1..ca27693 100644 --- a/sidebar.html +++ b/sidebar.html @@ -43,9 +43,9 @@ </ul> <h3>Example integrations</h3> <ul> - <li><a href="https://github.com/B4nan/mikro-orm-examples/tree/master/express-ts">Express + MongoDB + TypeScript</a></li> - <li><a href="https://github.com/B4nan/mikro-orm-examples/tree/master/nest">Nest + MySQL + TypeScript</a></li> - <li><a href="https://github.com/B4nan/mikro-orm-examples/tree/master/express-js">Express + MongoDB + JavaScript</a></li> + <li><a href="https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-ts">Express + MongoDB + TypeScript</a></li> + <li><a href="https://github.com/mikro-orm/mikro-orm-examples/tree/master/nest">Nest + MySQL + TypeScript</a></li> + <li><a href="https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-js">Express + MongoDB + JavaScript</a></li> </ul> <p class="repo-owner"><a href="{{ site.github.url | default: site.url }}{{ site.github.repository_url }}">{{ site.github.repository_name }}</a> is maintained by <a href="{{ site.github.url | default: site.url }}{{ site.github.owner_url }}">{{ site.github.owner_name }}</a>.</p> diff --git a/homepage.html b/homepage.html index 7cc97a6..d551943 100644 --- a/homepage.html +++ b/homepage.html @@ -45,7 +45,7 @@ {{ content }} <footer class="site-footer"> - <span class="site-footer-owner"><a href="{{ site.github.repository_url }}">{{ site.github.repository_name }}</a> is maintained by <a href="{{ site.github.owner_url }}">{{ site.github.owner_name }}</a>.</span> + <span class="site-footer-owner"><a href="{{ site.github.repository_url }}">{{ site.github.repository_name }}</a> is maintained by <a href="https://github.com/B4nan">B4nan</a>.</span> Found a typo and want to contribute to the documentation? <a href="{{ site.github.repository_url }}/edit/master/docs/{{ page.path }}">Edit this page on Github!</a> </footer> diff --git a/entity-manager.md b/entity-manager.md index 8349ffb..5b4abc4 100644 --- a/entity-manager.md +++ b/entity-manager.md @@ -126,13 +126,13 @@ As the last one is the least verbose, it should be preferred. ## Entity repositories Although you can use `EntityManager` directly, much more convenient way is to use -[`EntityRepository` instead](https://b4nan.github.io/mikro-orm/repositories/). You can register +[`EntityRepository` instead](https://mikro-orm.io/repositories/). You can register your repositories in dependency injection container like [InversifyJS](http://inversify.io/) so you do not need to get them from `EntityManager` each time. For more examples, take a look at -[`tests/EntityManager.mongo.test.ts`](https://github.com/B4nan/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts) -or [`tests/EntityManager.mysql.test.ts`](https://github.com/B4nan/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts). +[`tests/EntityManager.mongo.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts) +or [`tests/EntityManager.mysql.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts). ## EntityManager API diff --git a/favicon.ico b/favicon.ico index 03d2d95..49ecf1a 100644 --- a/favicon.ico +++ b/favicon.ico Binary files a/docs/favicon.ico and b/docs/favicon.ico differ diff --git a/index.md b/index.md index c9fd7cb..1819bf0 100644 --- a/index.md +++ b/index.md @@ -5,10 +5,10 @@ layout: homepage [](https://www.npmjs.com/package/mikro-orm) [](https://join.slack.com/t/mikroorm/shared_invite/enQtNTM1ODYzMzM4MDk3LTBmZDNlODBhYjcxNGZlMTkyYzJmODAwMDhjODc0ZTM2MzQ2Y2VkOGM0ODYzYTJjMDRiZDdjMmIxYjI2OTY0Y2U) [](https://www.npmjs.com/package/mikro-orm) -[](https://david-dm.org/B4nan/mikro-orm) -[](https://travis-ci.com/B4nan/mikro-orm) -[](https://coveralls.io/r/B4nan/mikro-orm?branch=master) -[](https://codeclimate.com/github/B4nan/mikro-orm/maintainability) +[](https://david-dm.org/mikro-orm/mikro-orm) +[](https://travis-ci.com/mikro-orm/mikro-orm) +[](https://coveralls.io/r/mikro-orm/mikro-orm?branch=master) +[](https://codeclimate.com/github/mikro-orm/mikro-orm/maintainability) MikroORM is TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. @@ -51,6 +51,6 @@ Currently it supports MongoDB, MySQL, PostgreSQL and SQLite databases, but more - [Usage with Vanilla JS](usage-with-js.md) - [Creating custom driver](custom-driver.md) - Example integrations - - [Express + MongoDB + TypeScript](https://github.com/B4nan/mikro-orm-examples/tree/master/express-ts) - - [Nest + MySQL + TypeScript](https://github.com/B4nan/mikro-orm-examples/tree/master/nest) - - [Express + MongoDB + JavaScript](https://github.com/B4nan/mikro-orm-examples/tree/master/express-js) + - [Express + MongoDB + TypeScript](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-ts) + - [Nest + MySQL + TypeScript](https://github.com/mikro-orm/mikro-orm-examples/tree/master/nest) + - [Express + MongoDB + JavaScript](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-js) diff --git a/installation.md b/installation.md index 9be7e2b..bcadc72 100644 --- a/installation.md +++ b/installation.md @@ -36,7 +36,7 @@ const orm = await MikroORM.init({ dbName: 'my-db-name', clientUrl: '...', // defaults to 'mongodb://localhost:27017' for mongodb driver baseDir: __dirname, // defaults to `process.cwd()` - autoFlush: false, // read more here: https://b4nan.github.io/mikro-orm/unit-of-work/ + autoFlush: false, // read more here: https://mikro-orm.io/unit-of-work/ }); console.log(orm.em); // access EntityManager via `em` property ``` diff --git a/repositories.md b/repositories.md index e29c9dd..9c4171c 100644 --- a/repositories.md +++ b/repositories.md @@ -58,8 +58,8 @@ Now you can access your custom repository via `EntityManager.getRepository()` me `customRepository`) globally, via `MikroORM.init({ entityRepository: CustomBaseRepository })` For more examples, take a look at -[`tests/EntityManager.mongo.test.ts`](https://github.com/B4nan/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts) -or [`tests/EntityManager.mysql.test.ts`](https://github.com/B4nan/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts). +[`tests/EntityManager.mongo.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts) +or [`tests/EntityManager.mysql.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts). ## EntityRepository\\<T\\> API diff --git a/usage-with-js.md b/usage-with-js.md index 4207f9a..d1af6bd 100644 --- a/usage-with-js.md +++ b/usage-with-js.md @@ -100,7 +100,7 @@ const orm = await MikroORM.init({ ``` For more examples of plain JavaScript entity definitions take a look -[at the tests](https://github.com/B4nan/mikro-orm/blob/master/tests/entities-js) or at -[Express JavaScript example](https://github.com/B4nan/mikro-orm-examples/tree/master/express-js). +[at the tests](https://github.com/mikro-orm/mikro-orm/blob/master/tests/entities-js) or at +[Express JavaScript example](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-js). [← Back to table of contents](index.md#table-of-contents) diff --git a/usage-with-nestjs.md b/usage-with-nestjs.md index 351c9e4..53474ae 100644 --- a/usage-with-nestjs.md +++ b/usage-with-nestjs.md @@ -36,7 +36,7 @@ create the request context for you automatically. entitiesDirsTs: ['src/entities'], dbName: 'my-db-name.sqlite3', type: 'sqlite', - autoFlush: false, // read more here: https://b4nan.github.io/mikro-orm/unit-of-work/ + autoFlush: false, // read more here: https://mikro-orm.io/unit-of-work/ }), // ... your feature modules ], diff --git a/usage-with-sql.md b/usage-with-sql.md index fe6b8c6..dde6e6d 100644 --- a/usage-with-sql.md +++ b/usage-with-sql.md @@ -113,7 +113,7 @@ QueryBuilder.clone(): QueryBuilder; ``` For more examples of how to work with `QueryBuilder`, take a look at `QueryBuilder` tests in -[`tests/QueryBuilder.test.ts`](https://github.com/B4nan/mikro-orm/blob/master/tests/QueryBuilder.test.ts). +[`tests/QueryBuilder.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/QueryBuilder.test.ts). ## Transactions diff --git a/EntityManager.mongo.test.ts b/EntityManager.mongo.test.ts index a56e099..8418806 100644 --- a/EntityManager.mongo.test.ts +++ b/EntityManager.mongo.test.ts @@ -250,6 +250,7 @@ describe('EntityManagerMongo', () => { const fork = orm.em.fork(); expect(fork).not.toBe(orm.em); + // @ts-ignore expect(fork.metadata).toBe(orm.em.metadata); expect(fork.getUnitOfWork().getIdentityMap()).toEqual({}); @@ -1222,11 +1223,13 @@ describe('EntityManagerMongo', () => { const baz2 = FooBaz.create('fz2'); bar.baz = baz1; await orm.em.persist(bar); + // @ts-ignore expect(orm.em.getUnitOfWork().originalEntityData[bar.__uuid].baz).toEqual(baz1._id); // replacing reference with value will trigger orphan removal bar.baz = baz2; await orm.em.persist(bar); + // @ts-ignore expect(orm.em.getUnitOfWork().originalEntityData[bar.__uuid].baz).toEqual(baz2._id); await expect(orm.em.findOne(FooBaz, baz1)).resolves.toBeNull(); await expect(orm.em.findOne(FooBaz, baz2)).resolves.not.toBeNull();
|
|
ci: run macOS in nix build
|
8b6623d2c180beddaedad6de6922b46ae24fbbf9
|
ci
|
https://github.com/ibis-project/ibis/commit/8b6623d2c180beddaedad6de6922b46ae24fbbf9
|
run macOS in nix build
|
diff --git a/nix-skip-helper.yml b/nix-skip-helper.yml index dbcd629..5bda8d6 100644 --- a/nix-skip-helper.yml +++ b/nix-skip-helper.yml @@ -23,10 +23,13 @@ on: jobs: nix: - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: + os: + - ubuntu-latest + - macos-latest python-version: - "3.8" - "3.9" diff --git a/nix.yml b/nix.yml index 92dd3c7..909bcd1 100644 --- a/nix.yml +++ b/nix.yml @@ -25,11 +25,13 @@ concurrency: jobs: nix: - # TODO: we can't use macos-latest here until watchdog is fixed - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: + os: + - ubuntu-latest + - macos-latest python-version: - "3.8" - "3.9" diff --git a/01_environment.md b/01_environment.md index de12470..e3f4acd 100644 --- a/01_environment.md +++ b/01_environment.md @@ -24,7 +24,7 @@ hide: | -----------------------------------------: | :----------------------------------------------------: | :------------------------------------------------: | :------------------------------------------------: | | **Operating System** :material-arrow-down: | | | | | **Linux** | {{ config.extra.support_levels.supported.icon }}[^1] | {{ config.extra.support_levels.supported.icon }} | {{ config.extra.support_levels.supported.icon }} | - | **macOS** | {{ config.extra.support_levels.bug.icon }}[^2] | {{ config.extra.support_levels.bug.icon }} | {{ config.extra.support_levels.bug.icon }} | + | **macOS (x86_64)** | {{ config.extra.support_levels.supported.icon }} | {{ config.extra.support_levels.supported.icon }} | {{ config.extra.support_levels.supported.icon }} | | **Windows** | {{ config.extra.support_levels.unsupported.icon }}[^3] | {{ config.extra.support_levels.unsupported.icon }} | {{ config.extra.support_levels.unsupported.icon }} | 1. [Install `nix`](https://nixos.org/download.html) diff --git a/datafusion-macos.patch b/datafusion-macos.patch index d983f00..d27bf98 100644 --- a/datafusion-macos.patch +++ b/datafusion-macos.patch @@ -0,0 +1,170 @@ +diff --git a/Cargo.lock b/Cargo.lock +index d73083f..489f301 100644 +--- a/Cargo.lock ++++ b/Cargo.lock +@@ -277,9 +277,9 @@ dependencies = [ + + [[package]] + name = "datafusion" +-version = "7.0.0" ++version = "7.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "30cf8e6735817bb021748d72cecc33e468d8775bf749470c52aa7f55ee5cdf9e" ++checksum = "79a0ea0a500cbfb6b683ad8cc6f403faa7c897432cc8ad0da40c09a9a705255f" + dependencies = [ + "ahash", + "arrow", +@@ -384,9 +384,9 @@ dependencies = [ + + [[package]] + name = "flate2" +-version = "1.0.22" ++version = "1.0.23" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" ++checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af" + dependencies = [ + "cfg-if", + "crc32fast", +@@ -701,9 +701,9 @@ dependencies = [ + + [[package]] + name = "libc" +-version = "0.2.121" ++version = "0.2.124" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f" ++checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50" + + [[package]] + name = "libmimalloc-sys" +@@ -779,12 +779,11 @@ dependencies = [ + + [[package]] + name = "miniz_oxide" +-version = "0.4.4" ++version = "0.5.1" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" ++checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082" + dependencies = [ + "adler", +- "autocfg", + ] + + [[package]] +@@ -1047,18 +1046,18 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" + + [[package]] + name = "proc-macro2" +-version = "1.0.36" ++version = "1.0.37" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" ++checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1" + dependencies = [ + "unicode-xid", + ] + + [[package]] + name = "pyo3" +-version = "0.15.1" ++version = "0.15.2" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "7cf01dbf1c05af0a14c7779ed6f3aa9deac9c3419606ac9de537a2d649005720" ++checksum = "d41d50a7271e08c7c8a54cd24af5d62f73ee3a6f6a314215281ebdec421d5752" + dependencies = [ + "cfg-if", + "indoc", +@@ -1072,18 +1071,18 @@ dependencies = [ + + [[package]] + name = "pyo3-build-config" +-version = "0.15.1" ++version = "0.15.2" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "dbf9e4d128bfbddc898ad3409900080d8d5095c379632fbbfbb9c8cfb1fb852b" ++checksum = "779239fc40b8e18bc8416d3a37d280ca9b9fb04bda54b98037bb6748595c2410" + dependencies = [ + "once_cell", + ] + + [[package]] + name = "pyo3-macros" +-version = "0.15.1" ++version = "0.15.2" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "67701eb32b1f9a9722b4bc54b548ff9d7ebfded011c12daece7b9063be1fd755" ++checksum = "00b247e8c664be87998d8628e86f282c25066165f1f8dda66100c48202fdb93a" + dependencies = [ + "pyo3-macros-backend", + "quote", +@@ -1092,9 +1091,9 @@ dependencies = [ + + [[package]] + name = "pyo3-macros-backend" +-version = "0.15.1" ++version = "0.15.2" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "f44f09e825ee49a105f2c7b23ebee50886a9aee0746f4dd5a704138a64b0218a" ++checksum = "5a8c2812c412e00e641d99eeb79dd478317d981d938aa60325dfa7157b607095" + dependencies = [ + "proc-macro2", + "pyo3-build-config", +@@ -1104,9 +1103,9 @@ dependencies = [ + + [[package]] + name = "quote" +-version = "1.0.17" ++version = "1.0.18" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "632d02bff7f874a36f33ea8bb416cd484b90cc66c1194b1a1110d067a7013f58" ++checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" + dependencies = [ + "proc-macro2", + ] +@@ -1341,9 +1340,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + + [[package]] + name = "syn" +-version = "1.0.90" ++version = "1.0.91" + source = "registry+https://github.com/rust-lang/crates.io-index" +-checksum = "704df27628939572cd88d33f171cd6f896f4eaca85252c6e0a72d8d8287ee86f" ++checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d" + dependencies = [ + "proc-macro2", + "quote", +diff --git a/Cargo.toml b/Cargo.toml +index ba2f337..40cba00 100644 +--- a/Cargo.toml ++++ b/Cargo.toml +@@ -35,7 +35,7 @@ datafusion = { version = "^7.0.0", features = ["pyarrow"] } + datafusion-expr = { version = "^7.0.0" } + datafusion-common = { version = "^7.0.0", features = ["pyarrow"] } + uuid = { version = "0.8", features = ["v4"] } +-mimalloc = { version = "*", default-features = false } ++mimalloc = { version = "*", optional = true, default-features = false } + + [lib] + name = "datafusion_python" +diff --git a/src/lib.rs b/src/lib.rs +index 977d9e8..ca1cd17 100644 +--- a/src/lib.rs ++++ b/src/lib.rs +@@ -15,6 +15,7 @@ + // specific language governing permissions and limitations + // under the License. + ++#[cfg(feature = "mimalloc")] + use mimalloc::MiMalloc; + use pyo3::prelude::*; + +@@ -28,6 +29,7 @@ mod udaf; + mod udf; + pub mod utils; + ++#[cfg(feature = "mimalloc")] + #[global_allocator] + static GLOBAL: MiMalloc = MiMalloc; + diff --git a/watchdog-force-kqueue.patch b/watchdog-force-kqueue.patch index 4b7b6f1..dfc2460 100644 --- a/watchdog-force-kqueue.patch +++ b/watchdog-force-kqueue.patch @@ -0,0 +1,13 @@ +diff --git a/setup.py b/setup.py +index 072dfc8..cb9aa7a 100644 +--- a/setup.py ++++ b/setup.py +@@ -39,7 +39,7 @@ _apple_devices = ('appletv', 'iphone', 'ipod', 'ipad', 'watch') + is_macos = sys.platform == 'darwin' and not machine().lower().startswith(_apple_devices) + + ext_modules = [] +-if is_macos or os.getenv('FORCE_MACOS_MACHINE', '0') == '1': ++if False: + ext_modules = [ + Extension( + name='_watchdog_fsevents', diff --git a/poetry-overrides.nix b/poetry-overrides.nix index 275159c..408a037 100644 --- a/poetry-overrides.nix +++ b/poetry-overrides.nix @@ -16,7 +16,18 @@ self: super: }); }); - datafusion = super.datafusion.overridePythonAttrs (attrs: { + datafusion = super.datafusion.overridePythonAttrs (attrs: rec { + inherit (attrs) version; + src = pkgs.fetchFromGitHub { + owner = "datafusion-contrib"; + repo = "datafusion-python"; + rev = attrs.version; + sha256 = "sha256-IWqlY4Cfil3cyQqXm+X9ViRYLzmNaiM3+i/7EyV5CK4="; + }; + + patches = (attrs.patches or [ ]) + ++ lib.optionals stdenv.isDarwin [ ./patches/datafusion-macos.patch ]; + nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ (with pkgs.rustPlatform; [ cargoSetupHook maturinBuildHook ]); @@ -24,9 +35,11 @@ self: super: ++ lib.optionals stdenv.isDarwin [ pkgs.libiconv ]; cargoDeps = pkgs.rustPlatform.fetchCargoTarball { - inherit (attrs) src; - sourceRoot = "${attrs.pname}-${attrs.version}"; - sha256 = "sha256-SHVJWbQROQVQ9qZDTSvHz/O9irCyEPgcmDowerMPYeI="; + inherit src patches; + sha256 = + if stdenv.isDarwin + then "sha256-qDXfSisgQ4qr8Sky0aNns8LldiHYs/N1cNatNlwEE18=" + else "sha256-bDuCbQYNai/mNrS2BqoW4qe7eLZcBhb7GhsFKn08G/U="; }; }); @@ -54,4 +67,10 @@ self: super: atpublic = super.atpublic.overridePythonAttrs (attrs: { nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ [ self.pdm-pep517 ]; }); + + watchdog = super.watchdog.overrideAttrs (attrs: lib.optionalAttrs + (stdenv.isDarwin && lib.versionAtLeast attrs.version "2") + { + patches = (attrs.patches or [ ]) ++ [ ./patches/watchdog-force-kqueue.patch ]; + }); }
|
|
refactor: split up custom nix code; remove unused derivations
|
57dff1073c8ef060a831df6176b78879a85b512e
|
refactor
|
https://github.com/ibis-project/ibis/commit/57dff1073c8ef060a831df6176b78879a85b512e
|
split up custom nix code; remove unused derivations
|
diff --git a/ibis-docs-lint.yml b/ibis-docs-lint.yml index b009452..df5e25b 100644 --- a/ibis-docs-lint.yml +++ b/ibis-docs-lint.yml @@ -60,7 +60,7 @@ jobs: # run against the full shell.nix on push so it gets pushed to cachix - name: pre-commit checks - run: nix develop --ignore-environment --keep-going -c pre-commit run --all-files + run: nix develop '.#preCommit' --ignore-environment --keep-going -c pre-commit run --all-files benchmarks: runs-on: ubuntu-latest @@ -139,10 +139,10 @@ jobs: fetch-depth: 0 - name: build docs - run: nix develop -c mkdocs build --strict + run: nix develop '.#docs' -c mkdocs build --strict - name: verify internal links - run: nix develop -c just checklinks --offline --no-progress + run: nix develop --ignore-environment '.#links' -c just checklinks --offline --no-progress docs_push: runs-on: ubuntu-latest @@ -188,7 +188,8 @@ jobs: - name: build and push dev docs run: | - nix develop -c mike deploy --push --rebase --prefix docs --message 'docs(dev): ibis@${{ github.sha }}' dev + nix develop '.#docs' -c \\ + mike deploy --push --rebase --prefix docs --message 'docs(dev): ibis@${{ github.sha }}' dev simulate_release: runs-on: ubuntu-latest diff --git a/ibis-docs-release.yml b/ibis-docs-release.yml index 3911989..382a638 100644 --- a/ibis-docs-release.yml +++ b/ibis-docs-release.yml @@ -48,4 +48,5 @@ jobs: - name: build and push docs on tag run: | - nix develop -c mike deploy --push --rebase --update-aliases --prefix docs --message "docs(release): ibis@${GITHUB_REF_NAME}" "${GITHUB_REF_NAME}" latest + nix develop '.#docs' -c \\ + mike deploy --push --rebase --update-aliases --prefix docs --message "docs(release): ibis@${GITHUB_REF_NAME}" "${GITHUB_REF_NAME}" latest diff --git a/nix.yml b/nix.yml index 8077c73..3114636 100644 --- a/nix.yml +++ b/nix.yml @@ -49,6 +49,10 @@ jobs: - name: install nix uses: cachix/install-nix-action@v18 + with: + nix_path: nixpkgs=channel:nixos-unstable-small + extra_nix_config: | + access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} - name: setup cachix uses: cachix/cachix-action@v12 @@ -62,3 +66,14 @@ jobs: version='${{ matrix.python-version }}' nix build ".#ibis${version//./}" --fallback --keep-going --print-build-logs + + # build the whole dev shell when pushing to upstream, so that the cachix cache is populated + - name: nix build devShell + if: github.event_name == 'push' + run: | + set -euo pipefail + + version='${{ matrix.python-version }}' + host_system="$(nix eval --raw 'nixpkgs#stdenv.hostPlatform.system')" + flake=".#devShells.${host_system}.ibis${version//./}" + nix build "$flake" --fallback --keep-going --print-build-logs diff --git a/update-deps.yml b/update-deps.yml index cb84e1f..6aeea9f 100644 --- a/update-deps.yml +++ b/update-deps.yml @@ -37,7 +37,7 @@ jobs: - uses: cachix/install-nix-action@v18 with: extra_nix_config: | - access-tokens = github.com=${{ secrets.github_token }} + access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} - name: setup cachix uses: cachix/cachix-action@v12 diff --git a/dry_run.sh b/dry_run.sh index a490ac8..2f89c14 100644 --- a/dry_run.sh +++ b/dry_run.sh @@ -6,31 +6,35 @@ curdir="$PWD" worktree="$(mktemp -d)" branch="$(basename "$worktree")" -nix develop -c git worktree add "$worktree" +nix develop '.#release' -c git worktree add "$worktree" function cleanup() { cd "$curdir" || exit 1 - nix develop -c git worktree remove --force "$worktree" - nix develop -c git worktree prune - nix develop -c git branch -D "$branch" + nix develop '.#release' -c git worktree remove --force "$worktree" + nix develop '.#release' -c git worktree prune + nix develop '.#release' -c git branch -D "$branch" } trap cleanup EXIT ERR cd "$worktree" || exit 1 -nix develop -c node <<< 'console.log(JSON.stringify(require("./.releaserc.js")))' | - jq '.plugins |= [.[] | select(.[0] != "@semantic-release/github")]' > .releaserc.json +nix develop '.#release' -c node <<< 'console.log(JSON.stringify(require("./.releaserc.js")))' | + nix develop '.#release' -c jq '.plugins |= [.[] | select(.[0] != "@semantic-release/github")]' > .releaserc.json -nix develop -c git rm .releaserc.js - -nix develop -c git add .releaserc.json - -nix develop -c git commit -m 'test: semantic-release dry run' --no-verify --no-gpg-sign +nix develop '.#release' -c git rm .releaserc.js +nix develop '.#release' -c git add .releaserc.json +nix develop '.#release' -c git commit -m 'test: semantic-release dry run' --no-verify --no-gpg-sign +# If this is set then semantic-release will assume the release is running +# against a PR. +# +# Normally this would be fine, except that most of the release process that is +# useful to test is prevented from running, even in dry-run mode, so we `unset` +# this variable here and pass `--dry-run` ourselves unset GITHUB_ACTIONS -nix develop -c npx --yes \\ +nix develop '.#release' -c npx --yes \\ -p semantic-release \\ -p "@semantic-release/commit-analyzer" \\ -p "@semantic-release/release-notes-generator" \\ diff --git a/prepare.sh b/prepare.sh index e9c50bd..c9b8413 100644 --- a/prepare.sh +++ b/prepare.sh @@ -5,10 +5,11 @@ set -euo pipefail version="${1}" # set version -nix develop -c poetry version "$version" +nix develop '.#release' -c poetry version "$version" # build artifacts -nix develop -c poetry build +nix develop '.#release' -c poetry build # ensure that the built wheel has the correct version number -nix develop -c unzip -p "dist/ibis_framework-${version}-py3-none-any.whl" ibis/__init__.py | grep -q "__version__ = \\"$version\\"" +nix develop '.#release' -c unzip -p "dist/ibis_framework-${version}-py3-none-any.whl" ibis/__init__.py | \\ + nix develop '.#release' -c grep -q "__version__ = \\"$version\\"" diff --git a/publish.sh b/publish.sh index 5a29828..4c57334 100644 --- a/publish.sh +++ b/publish.sh @@ -2,4 +2,4 @@ set -euo pipefail -nix develop -c poetry publish +nix develop '.#release' -c poetry publish diff --git a/run.sh b/run.sh index f8c8681..e8fad84 100644 --- a/run.sh +++ b/run.sh @@ -2,7 +2,7 @@ set -euo pipefail -nix develop -c npx --yes \\ +nix develop '.#release' -c npx --yes \\ -p semantic-release \\ -p "@semantic-release/commit-analyzer" \\ -p "@semantic-release/release-notes-generator" \\ diff --git a/verify.sh b/verify.sh index c1d6bd3..547559e 100644 --- a/verify.sh +++ b/verify.sh @@ -5,13 +5,13 @@ set -euo pipefail dry_run="${1:-false}" # verify pyproject.toml -nix develop -c poetry check +nix develop '.#release' -c poetry check # verify that the lock file matches pyproject.toml # # the lock file might not be the most fresh, but that's okay: it need only be # consistent with pyproject.toml -nix develop -c poetry lock --check +nix develop '.#release' -c poetry lock --check # verify that we have a token available to push to pypi using set -u if [ "${dry_run}" = "false" ]; then diff --git a/flake.lock b/flake.lock index 181683d..1044529 100644 --- a/flake.lock +++ b/flake.lock @@ -77,11 +77,11 @@ ] }, "locked": { - "lastModified": 1671547822, - "narHash": "sha256-bSYKAFg4kCZqobqAmM6CYZgiOz6dNyCRp4rqLFXjzTE=", + "lastModified": 1671550109, + "narHash": "sha256-4Iixlro1t75ze8TqXvfz0HTZ9TOBav3vMTuwuYqsxRE=", "owner": "nix-community", "repo": "poetry2nix", - "rev": "6ae0cf69896dbd52c0ffd72f460c773367238e48", + "rev": "f18984f99c67654a1f9fbde463c88a319a2c843e", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 51a78b6..2e63ed1 100644 --- a/flake.nix +++ b/flake.nix @@ -18,255 +18,149 @@ poetry2nix = { url = "github:nix-community/poetry2nix"; - inputs.nixpkgs.follows = "nixpkgs"; - inputs.flake-utils.follows = "flake-utils"; + inputs = { + nixpkgs.follows = "nixpkgs"; + flake-utils.follows = "flake-utils"; + }; }; }; - outputs = - { self - , flake-utils - , gitignore - , nixpkgs - , poetry2nix - , ... - }: + outputs = { self, flake-utils, gitignore, nixpkgs, poetry2nix, ... }: { + overlays.default = nixpkgs.lib.composeManyExtensions [ + gitignore.overlay + poetry2nix.overlay + (import ./nix/overlay.nix) + ]; + } // flake-utils.lib.eachDefaultSystem ( + localSystem: let - backends = [ - "dask" - "datafusion" - "duckdb" - "pandas" - "polars" - "sqlite" + pkgs = import nixpkgs { + inherit localSystem; + overlays = [ self.overlays.default ]; + }; + inherit (pkgs) lib; + + backendDevDeps = with pkgs; [ + # impala UDFs + clang_12 + cmake + ninja + # snowflake + openssl + # backend test suite + docker-compose + # visualization + graphviz-nox + # duckdb + duckdb + # mysql + mariadb-client + # pyspark + openjdk17_headless + # postgres + postgresql + # sqlite + sqlite-interactive + ]; + shellHook = '' + export IBIS_TEST_DATA_DIRECTORY="$PWD/ci/ibis-testing-data" + + ${pkgs.rsync}/bin/rsync \\ + --chmod=Du+rwx,Fu+rw --archive --delete \\ + "${pkgs.ibisTestingData}/" \\ + "$IBIS_TEST_DATA_DIRECTORY" + + export TEMPDIR + TEMPDIR="$(python -c 'import tempfile; print(tempfile.gettempdir())')" + + # necessary for mkdocs + export PYTHONPATH=''${PWD}''${PYTHONPATH:+:}''${PYTHONPATH} + ''; + + preCommitDeps = with pkgs; [ + actionlint + git + just + nix-linter + nixpkgs-fmt + pre-commit + prettier + shellcheck + shfmt ]; - drv = - { poetry2nix - , python - , lib - , gitignoreSource - , graphviz-nox - , sqlite - , rsync - , ibisTestingData - }: poetry2nix.mkPoetryApplication rec { - inherit python; - - groups = [ ]; - checkGroups = [ "test" ]; - projectDir = gitignoreSource ./.; - preferWheels = true; - src = gitignoreSource ./.; - - overrides = [ - (import ./poetry-overrides.nix) - poetry2nix.defaultPoetryOverrides - ]; - - buildInputs = [ graphviz-nox sqlite ]; - - checkInputs = buildInputs; - - preCheck = '' - set -euo pipefail - - export IBIS_TEST_DATA_DIRECTORY="$PWD/ci/ibis-testing-data" - - ${rsync}/bin/rsync \\ - --chmod=Du+rwx,Fu+rw --archive --delete \\ - "${ibisTestingData}/" \\ - "$IBIS_TEST_DATA_DIRECTORY" - ''; - - checkPhase = '' - set -euo pipefail - - runHook preCheck - - pytest \\ - --numprocesses "$NIX_BUILD_CORES" \\ - --dist loadgroup \\ - -m '${lib.concatStringsSep " or " backends} or core' - - runHook postCheck - ''; - - doCheck = true; - pythonImportsCheck = [ "ibis" ] ++ map (backend: "ibis.backends.${backend}") backends; - }; + mkDevShell = name: env: pkgs.mkShell { + inherit name; + nativeBuildInputs = (with pkgs; [ + # python dev environment + env + # rendering release notes + changelog + glow + # used in the justfile + jq + yj + # linting + commitlint + lychee + # release automation + nodejs + # poetry executable + env.pkgs.poetry + ]) + ++ preCommitDeps + ++ backendDevDeps; + + inherit shellHook; + + PGPASSWORD = "postgres"; + MYSQL_PWD = "ibis"; + MSSQL_SA_PASSWORD = "1bis_Testing!"; + }; in - { - overlays.default = nixpkgs.lib.composeManyExtensions [ - gitignore.overlay - poetry2nix.overlay - (pkgs: _: { - ibisTestingData = pkgs.fetchFromGitHub { - owner = "ibis-project"; - repo = "testing-data"; - rev = "master"; - sha256 = "sha256-BZWi4kEumZemQeYoAtlUSw922p+R6opSWp/bmX0DjAo="; - }; - - mkPoetryEnv = groups: python: pkgs.poetry2nix.mkPoetryEnv { - inherit python groups; - preferWheels = true; - projectDir = pkgs.gitignoreSource ./.; - editablePackageSources = { ibis = pkgs.gitignoreSource ./ibis; }; - overrides = [ - (import ./poetry-overrides.nix) - pkgs.poetry2nix.defaultPoetryOverrides - ]; - }; - - mkPoetryDocsEnv = pkgs.mkPoetryEnv [ "docs" ]; - mkPoetryDevEnv = pkgs.mkPoetryEnv [ "dev" "test" ]; - mkPoetryFullDevEnv = pkgs.mkPoetryEnv [ "dev" "docs" "test" ]; + rec { + packages = { + inherit (pkgs) ibis38 ibis39 ibis310; - prettierTOML = pkgs.writeShellScriptBin "prettier" '' - ${pkgs.nodePackages.prettier}/bin/prettier \\ - --plugin-search-dir "${pkgs.nodePackages.prettier-plugin-toml}/lib" "$@" - ''; + default = pkgs.ibis310; - ibis38 = pkgs.callPackage drv { python = pkgs.python38; }; - ibis39 = pkgs.callPackage drv { python = pkgs.python39; }; - ibis310 = pkgs.callPackage drv { python = pkgs.python310; }; + inherit (pkgs) update-lock-files; + }; - ibisDevEnv38 = pkgs.mkPoetryDevEnv pkgs.python38; - ibisDevEnv39 = pkgs.mkPoetryDevEnv pkgs.python39; - ibisDevEnv310 = pkgs.mkPoetryDevEnv pkgs.python310; + devShells = rec { + ibis38 = mkDevShell "ibis38" pkgs.ibisDevEnv38; + ibis39 = mkDevShell "ibis39" pkgs.ibisDevEnv39; + ibis310 = mkDevShell "ibis310" pkgs.ibisDevEnv310; - ibisDevEnv = pkgs.ibisDevEnv310; + default = ibis310; - ibisDocsEnv38 = pkgs.mkPoetryDocsEnv pkgs.python38; - ibisDocsEnv39 = pkgs.mkPoetryDocsEnv pkgs.python39; - ibisDocsEnv310 = pkgs.mkPoetryDocsEnv pkgs.python310; - - ibisDocsEnv = pkgs.ibisDocsEnv310; - - ibisFullDevEnv38 = pkgs.mkPoetryFullDevEnv pkgs.python38; - ibisFullDevEnv39 = pkgs.mkPoetryFullDevEnv pkgs.python39; - ibisFullDevEnv310 = pkgs.mkPoetryFullDevEnv pkgs.python310; - - ibisFullDevEnv = pkgs.ibisFullDevEnv310; - - changelog = pkgs.writeShellApplication { - name = "changelog"; - runtimeInputs = [ pkgs.nodePackages.conventional-changelog-cli ]; - text = "conventional-changelog --config ./.conventionalcommits.js"; - }; - }) - ]; - } // flake-utils.lib.eachDefaultSystem ( - localSystem: - let - pkgs = import nixpkgs { - inherit localSystem; - overlays = [ self.overlays.default ]; + docs = pkgs.mkShell { + name = "docs"; + nativeBuildInputs = [ pkgs.ibisDocsEnv ]; + inherit shellHook; }; - inherit (pkgs) lib; - - backendDevDeps = with pkgs; [ - # impala UDFs - clang_12 - cmake - ninja - # snowflake - openssl - # backend test suite - docker-compose - # visualization - graphviz-nox - # duckdb - duckdb - # mysql - mariadb-client - # pyspark - openjdk17_headless - # postgres - postgresql - # sqlite - sqlite-interactive - ]; - mkDevShell = env: pkgs.mkShell { - nativeBuildInputs = (with pkgs; [ - # python dev environment - env - # rendering release notes - changelog - glow - # used in the justfile - jq - yj - # linting - commitlint - lychee - # release automation - nodejs - # poetry executable - env.pkgs.poetry - # pre-commit deps - actionlint - git - just - nix-linter - nixpkgs-fmt - pre-commit - prettierTOML - shellcheck - shfmt - ]) - # backend development dependencies - ++ backendDevDeps; - - shellHook = '' - export IBIS_TEST_DATA_DIRECTORY="$PWD/ci/ibis-testing-data" - ${pkgs.rsync}/bin/rsync \\ - --chmod=Du+rwx,Fu+rw --archive --delete \\ - "${pkgs.ibisTestingData}/" \\ - "$IBIS_TEST_DATA_DIRECTORY" - - export TEMPDIR - TEMPDIR="$(python -c 'import tempfile; print(tempfile.gettempdir())')" - - # necessary for mkdocs - export PYTHONPATH=''${PWD}''${PYTHONPATH:+:}''${PYTHONPATH} - ''; - - PGPASSWORD = "postgres"; - MYSQL_PWD = "ibis"; - MSSQL_SA_PASSWORD = "1bis_Testing!"; + preCommit = pkgs.mkShell { + name = "preCommit"; + nativeBuildInputs = [ pkgs.ibisSmallDevEnv ] ++ preCommitDeps; }; - in - rec { - packages = rec { - ibis38 = pkgs.ibis38; - ibis39 = pkgs.ibis39; - ibis310 = pkgs.ibis310; - default = ibis310; - - update-lock-files = pkgs.writeShellApplication { - name = "update-lock-files"; - runtimeInputs = with pkgs; [ poetry ]; - - text = '' - export PYTHONHASHSEED=0 - TOP="''${PWD}" - - poetry lock --no-update - poetry export --with dev --with test --with docs --without-hashes --no-ansi > "''${TOP}/requirements.txt" - ''; - }; + links = pkgs.mkShell { + name = "links"; + nativeBuildInputs = with pkgs; [ just lychee ]; }; - devShells = rec { - ibis38 = mkDevShell pkgs.ibisFullDevEnv38; - ibis39 = mkDevShell pkgs.ibisFullDevEnv39; - ibis310 = mkDevShell pkgs.ibisFullDevEnv310; - default = ibis310; + release = pkgs.mkShell { + name = "release"; + nativeBuildInputs = with pkgs; [ + git + ibisSmallDevEnv.pkgs.poetry + nodejs + unzip + gnugrep + ]; }; - } - ); + }; + } + ); } diff --git a/ibis.nix b/ibis.nix index 59dfea4..ec83494 100644 --- a/ibis.nix +++ b/ibis.nix @@ -0,0 +1,56 @@ +{ poetry2nix +, python3 +, lib +, gitignoreSource +, graphviz-nox +, sqlite +, rsync +, ibisTestingData +}: +let + backends = [ "dask" "datafusion" "duckdb" "pandas" "polars" "sqlite" ]; +in +poetry2nix.mkPoetryApplication rec { + python = python3; + groups = [ ]; + checkGroups = [ "test" ]; + projectDir = gitignoreSource ../.; + src = gitignoreSource ../.; + extras = backends; + overrides = [ + (import ../poetry-overrides.nix) + poetry2nix.defaultPoetryOverrides + ]; + preferWheels = true; + + buildInputs = [ graphviz-nox sqlite ]; + checkInputs = buildInputs; + + preCheck = '' + set -euo pipefail + + export IBIS_TEST_DATA_DIRECTORY="$PWD/ci/ibis-testing-data" + + ${rsync}/bin/rsync \\ + --chmod=Du+rwx,Fu+rw --archive --delete \\ + "${ibisTestingData}/" \\ + "$IBIS_TEST_DATA_DIRECTORY" + ''; + + checkPhase = '' + set -euo pipefail + + runHook preCheck + + pytest \\ + --numprocesses "$NIX_BUILD_CORES" \\ + --dist loadgroup \\ + -m '${lib.concatStringsSep " or " backends} or core' + + runHook postCheck + ''; + + doCheck = true; + + pythonImportsCheck = [ "ibis" ] ++ (map (backend: "ibis.backends.${backend}") backends); +} diff --git a/overlay.nix b/overlay.nix index b5b6187..cc1ce28 100644 --- a/overlay.nix +++ b/overlay.nix @@ -0,0 +1,71 @@ +pkgs: _: +let + mkPoetryEnv = { groups, python, extras ? [ "*" ] }: pkgs.poetry2nix.mkPoetryEnv { + inherit python groups extras; + projectDir = pkgs.gitignoreSource ../.; + editablePackageSources = { ibis = pkgs.gitignoreSource ../ibis; }; + overrides = [ + (import ../poetry-overrides.nix) + pkgs.poetry2nix.defaultPoetryOverrides + ]; + preferWheels = true; + }; + + mkPoetryDevEnv = python: mkPoetryEnv { + inherit python; + groups = [ "dev" "docs" "test" ]; + }; +in +{ + ibisTestingData = pkgs.fetchFromGitHub { + owner = "ibis-project"; + repo = "testing-data"; + rev = "master"; + sha256 = "sha256-BZWi4kEumZemQeYoAtlUSw922p+R6opSWp/bmX0DjAo="; + }; + + rustNightly = pkgs.rust-bin.selectLatestNightlyWith (toolchain: toolchain.minimal); + + prettier = pkgs.writeShellApplication { + name = "prettier"; + runtimeInputs = [ ]; + text = '' + ${pkgs.nodePackages.prettier}/bin/prettier \\ + --plugin-search-dir "${pkgs.nodePackages.prettier-plugin-toml}/lib" "$@" + ''; + }; + + ibis38 = pkgs.python38Packages.callPackage ./ibis.nix { }; + ibis39 = pkgs.python39Packages.callPackage ./ibis.nix { }; + ibis310 = pkgs.python310Packages.callPackage ./ibis.nix { }; + + ibisDevEnv38 = mkPoetryDevEnv pkgs.python38; + ibisDevEnv39 = mkPoetryDevEnv pkgs.python39; + ibisDevEnv310 = mkPoetryDevEnv pkgs.python310; + + ibisSmallDevEnv = mkPoetryEnv { + python = pkgs.python310; + groups = [ "dev" ]; + extras = [ ]; + }; + + ibisDocsEnv = mkPoetryEnv { + python = pkgs.python310; + groups = [ "docs" ]; + }; + + changelog = pkgs.writeShellApplication { + name = "changelog"; + runtimeInputs = [ pkgs.nodePackages.conventional-changelog-cli ]; + text = "conventional-changelog --config ./.conventionalcommits.js"; + }; + + update-lock-files = pkgs.writeShellApplication { + name = "update-lock-files"; + runtimeInputs = [ pkgs.poetry ]; + text = '' + poetry lock --no-update + poetry export --with dev --with test --with docs --without-hashes --no-ansi > requirements.txt + ''; + }; +}
|
|
build: remove no cache flag due to repeated downloads
|
12504df328dd627ac5cd7fcbebe7bfc52bbc06c7
|
build
|
https://github.com/ibis-project/ibis/commit/12504df328dd627ac5cd7fcbebe7bfc52bbc06c7
|
remove no cache flag due to repeated downloads
|
diff --git a/update-lock-files.sh b/update-lock-files.sh index a0e9e13..ecb9574 100644 --- a/update-lock-files.sh +++ b/update-lock-files.sh @@ -8,6 +8,6 @@ export PYTHONHASHSEED=0 TOP="${1:-$(dirname "$(dirname "$(readlink -f "$0")")")}" pushd "${TOP}" > /dev/null || exit 1 -poetry lock --no-update --no-cache +poetry lock --no-update poetry export --with dev --with test --with docs --without-hashes --no-ansi > "${TOP}/requirements.txt" popd > /dev/null || exit 1
|
|
test(duckdb): add test for null short-circuiting udf
|
0972f6aac43eb728141cedb7c4b424621914ffa6
|
test
|
https://github.com/ibis-project/ibis/commit/0972f6aac43eb728141cedb7c4b424621914ffa6
|
add test for null short-circuiting udf
|
diff --git a/test_udf.py b/test_udf.py index 4e36ce9..4755924 100644 --- a/test_udf.py +++ b/test_udf.py @@ -1,6 +1,7 @@ from __future__ import annotations import pytest +from pytest import param from ibis import udf @@ -85,3 +86,21 @@ def test_builtin_agg(con, func): ).scalar() assert con.execute(expr) == expected + + [email protected] +def dont_intercept_null(x: int) -> int: + assert x is not None + return x + + [email protected]( + ("expr", "expected"), + [ + param(dont_intercept_null(5), 5, id="notnull"), + param(dont_intercept_null(None), None, id="null"), + param(dont_intercept_null(5) + dont_intercept_null(None), None, id="mixed"), + ], +) +def test_dont_intercept_null(con, expr, expected): + assert con.execute(expr) == expected
|
|
feat: support for 'generation v2' format for overlow correction in generations.
This leads to greater correctness of generation numbers when dealing with certain kinds of commit-graphs.
|
101dec0adb2def4016f01a102de19a47da6752cc
|
feat
|
https://github.com/Byron/gitoxide/commit/101dec0adb2def4016f01a102de19a47da6752cc
|
support for 'generation v2' format for overlow correction in generations.
This leads to greater correctness of generation numbers when dealing with certain kinds of commit-graphs.
|
diff --git a/Cargo.lock b/Cargo.lock index e6c6525..f63d4d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1451,6 +1451,7 @@ dependencies = [ "bstr", "document-features", "gix-chunk", + "gix-date 0.5.1", "gix-features 0.30.0", "gix-hash 0.11.2", "gix-testtools", diff --git a/Cargo.toml b/Cargo.toml index f4129c5..19be6f5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,6 +31,7 @@ document-features = { version = "0.2.0", optional = true } [dev-dependencies] gix-testtools = { path = "../tests/tools" } +gix-date = { path = "../gix-date" } [package.metadata.docs.rs] all-features = true diff --git a/mod.rs b/mod.rs index d4099fd..d75c4bc 100644 --- a/mod.rs +++ b/mod.rs @@ -1,25 +1,78 @@ -use gix_commitgraph::Graph; - -use crate::{check_common, inspect_refs, make_readonly_repo}; +use crate::{check_common, graph_and_expected, graph_and_expected_named}; #[test] -fn single_parent() -> crate::Result { - let repo_dir = make_readonly_repo("single_parent.sh"); - let refs = inspect_refs(&repo_dir, &["parent", "child"]); - let cg = Graph::from_info_dir(repo_dir.join(".git").join("objects").join("info"))?; +fn single_parent() { + let (cg, refs) = graph_and_expected("single_parent.sh", &["parent", "child"]); check_common(&cg, &refs); assert_eq!(cg.commit_at(refs["parent"].pos()).generation(), 1); assert_eq!(cg.commit_at(refs["child"].pos()).generation(), 2); +} + +#[test] +fn single_commit_huge_dates_generation_v2_also_do_not_allow_huge_dates() { + let (cg, refs) = graph_and_expected_named("single_commit_huge_dates.sh", "v2", &["HEAD"]); + let info = &refs["HEAD"]; + let actual = cg.commit_by_id(info.id).expect("present"); + assert_eq!( + actual.committer_timestamp(), + 1, + "overflow happened, can't represent huge dates" + ); + assert_eq!( + info.time.seconds, 68719476737, + "this is the value we would want to see, but it's not possible in V2 either, as that is just about generations" + ); + assert_eq!(actual.generation(), 1, "generations are fine though"); +} - Ok(()) +#[test] +fn single_commit_huge_dates_overflow_v1() { + let (cg, refs) = graph_and_expected_named("single_commit_huge_dates.sh", "v1", &["HEAD"]); + let info = &refs["HEAD"]; + let actual = cg.commit_by_id(info.id).expect("present"); + assert_eq!(actual.committer_timestamp(), 1, "overflow happened"); + assert_eq!( + info.time.seconds, 68719476737, + "this is the value we would want to see, but it's not possible in V1" + ); + assert_eq!(actual.generation(), 1, "generations are fine though"); } #[test] -fn octupus_merges() -> crate::Result { - let repo_dir = make_readonly_repo("octopus_merges.sh"); - let refs = inspect_refs( - &repo_dir, +fn single_commit_future_64bit_dates_work() { + let (cg, refs) = graph_and_expected_named("single_commit_huge_dates.sh", "max-date", &["HEAD"]); + let info = &refs["HEAD"]; + let actual = cg.commit_by_id(info.id).expect("present"); + assert_eq!( + actual.committer_timestamp(), + info.time.seconds, + "this is close the the highest representable value in the graph, like year 2500, so we are good for longer than I should care about" + ); + assert_eq!(actual.generation(), 1); +} + +#[test] +fn generation_numbers_overflow_is_handled_in_chained_graph() { + let names = ["extra", "old-2", "future-2", "old-1", "future-1"]; + let (cg, mut refs) = graph_and_expected("generation_number_overflow.sh", &names); + for (r, expected) in names + .iter() + .map(|n| refs.remove(n.to_owned()).expect("present")) + .zip((1..=5).rev()) + { + assert_eq!( + cg.commit_by_id(r.id).expect("present").generation(), + expected, + "actually, this test seems to have valid generation numbers from the get-go. How to repro the actual issue?" + ); + } +} + +#[test] +fn octupus_merges() { + let (cg, refs) = graph_and_expected( + "octopus_merges.sh", &[ "root", "parent1", @@ -30,7 +83,6 @@ fn octupus_merges() -> crate::Result { "four_parents", ], ); - let cg = Graph::at(repo_dir.join(".git").join("objects").join("info"))?; check_common(&cg, &refs); assert_eq!(cg.commit_at(refs["root"].pos()).generation(), 1); @@ -40,32 +92,22 @@ fn octupus_merges() -> crate::Result { assert_eq!(cg.commit_at(refs["parent4"].pos()).generation(), 2); assert_eq!(cg.commit_at(refs["three_parents"].pos()).generation(), 3); assert_eq!(cg.commit_at(refs["four_parents"].pos()).generation(), 3); - - Ok(()) } #[test] -fn single_commit() -> crate::Result { - let repo_dir = make_readonly_repo("single_commit.sh"); - let refs = inspect_refs(&repo_dir, &["commit"]); - let cg = gix_commitgraph::at(repo_dir.join(".git").join("objects").join("info"))?; +fn single_commit() { + let (cg, refs) = graph_and_expected("single_commit.sh", &["commit"]); check_common(&cg, &refs); assert_eq!(cg.commit_at(refs["commit"].pos()).generation(), 1); - - Ok(()) } #[test] -fn two_parents() -> crate::Result { - let repo_dir = make_readonly_repo("two_parents.sh"); - let refs = inspect_refs(&repo_dir, &["parent1", "parent2", "child"]); - let cg = Graph::from_info_dir(repo_dir.join(".git").join("objects").join("info"))?; +fn two_parents() { + let (cg, refs) = graph_and_expected("two_parents.sh", &["parent1", "parent2", "child"]); check_common(&cg, &refs); assert_eq!(cg.commit_at(refs["parent1"].pos()).generation(), 1); assert_eq!(cg.commit_at(refs["parent2"].pos()).generation(), 1); assert_eq!(cg.commit_at(refs["child"].pos()).generation(), 2); - - Ok(()) } diff --git a/commitgraph.rs b/commitgraph.rs index 27e36c3..bda9c57 100644 --- a/commitgraph.rs +++ b/commitgraph.rs @@ -8,12 +8,13 @@ use std::{ }; use gix_commitgraph::{Graph, Position as GraphPosition}; - -type Result = std::result::Result<(), Box<dyn std::error::Error>>; +use gix_testtools::scripted_fixture_read_only; mod access; pub fn check_common(cg: &Graph, expected: &HashMap<String, RefInfo, impl BuildHasher>) { + cg.verify_integrity(|_| Ok::<_, std::convert::Infallible>(())) + .expect("graph is valid"); assert_eq!( usize::try_from(cg.num_commits()).expect("an architecture able to hold 32 bits of integer"), expected.len() @@ -39,6 +40,7 @@ pub fn check_common(cg: &Graph, expected: &HashMap<String, RefInfo, impl BuildHa let commit = cg.commit_at(ref_info.pos()); assert_eq!(commit.id(), ref_info.id()); + assert_eq!(commit.committer_timestamp(), ref_info.time.seconds); assert_eq!(commit.root_tree_id(), ref_info.root_tree_id()); assert_eq!( commit.parent1().expect("failed to access commit's parent1"), @@ -59,13 +61,29 @@ pub fn check_common(cg: &Graph, expected: &HashMap<String, RefInfo, impl BuildHa ); } -use gix_testtools::scripted_fixture_read_only; -pub fn make_readonly_repo(script_path: &str) -> std::path::PathBuf { - scripted_fixture_read_only(script_path).expect("script succeeds all the time") +pub fn graph_and_expected( + script_path: &str, + refs: &[&'static str], +) -> (gix_commitgraph::Graph, HashMap<String, RefInfo>) { + graph_and_expected_named(script_path, "", refs) +} + +pub fn graph_and_expected_named( + script_path: &str, + name: &str, + refs: &[&'static str], +) -> (gix_commitgraph::Graph, HashMap<String, RefInfo>) { + let repo_dir = scripted_fixture_read_only(script_path) + .expect("script succeeds all the time") + .join(name); + let expected = inspect_refs(&repo_dir, refs); + let cg = Graph::from_info_dir(repo_dir.join(".git").join("objects").join("info")).expect("graph present and valid"); + (cg, expected) } pub struct RefInfo { id: gix_hash::ObjectId, + pub time: gix_date::Time, parent_ids: Vec<gix_hash::ObjectId>, pos: GraphPosition, root_tree_id: gix_hash::ObjectId, @@ -89,13 +107,13 @@ impl RefInfo { } } -pub fn inspect_refs(repo_dir: impl AsRef<Path>, refs: &[&'static str]) -> HashMap<String, RefInfo> { +fn inspect_refs(repo_dir: impl AsRef<Path>, refs: &[&'static str]) -> HashMap<String, RefInfo> { let output = Command::new("git") .arg("-C") .arg(repo_dir.as_ref()) .arg("show") .arg("--no-patch") - .arg("--pretty=format:%S %H %T %P") + .arg("--pretty=format:%S %H %T %ct %P") .args(refs) .arg("--") .env_remove("GIT_DIR") @@ -111,7 +129,8 @@ pub fn inspect_refs(repo_dir: impl AsRef<Path>, refs: &[&'static str]) -> HashMa parts[0].to_string(), gix_hash::ObjectId::from_hex(parts[1].as_bytes()).expect("40 bytes hex"), gix_hash::ObjectId::from_hex(parts[2].as_bytes()).expect("40 bytes hex"), - parts[3..] + gix_date::Time::new(parts[3].parse().expect("valid stamp"), 0), + parts[4..] .iter() .map(|x| gix_hash::ObjectId::from_hex(x.as_bytes()).expect("40 bytes hex")) .collect(), @@ -132,13 +151,14 @@ pub fn inspect_refs(repo_dir: impl AsRef<Path>, refs: &[&'static str]) -> HashMa infos .iter() .cloned() - .map(|(name, id, root_tree_id, parent_ids)| { + .map(|(name, id, root_tree_id, time, parent_ids)| { ( name, RefInfo { id, parent_ids, root_tree_id, + time, pos: get_pos(&id), }, ) diff --git a/generation_number_overflow.tar.xz b/generation_number_overflow.tar.xz index 0ebe803..f22885e 100644 --- a/generation_number_overflow.tar.xz +++ b/generation_number_overflow.tar.xz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:266655a2777562f495bfe6a6f9d57bef7d13fe3ff012a9a97402ca7f8801dccf +size 12504 diff --git a/octopus_merges.tar.xz b/octopus_merges.tar.xz index aabf4e5..548c204 100644 --- a/octopus_merges.tar.xz +++ b/octopus_merges.tar.xz @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:fdb7b214315cabdf81173aed5530c2030d3d4f5f2888ebc194f6d1268fca685a -size 11028 +oid sha256:e52a2e28465e3ac6b64cc7d9dac2486a216a9d99175e4ade52f68ff2602ea108 +size 11104 diff --git a/single_commit_huge_dates.tar.xz b/single_commit_huge_dates.tar.xz index d101b84..bc06ddc 100644 --- a/single_commit_huge_dates.tar.xz +++ b/single_commit_huge_dates.tar.xz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39cd8603f0e58ff9cf05173f2edcd9446128461bb647d8045e6d73c86205b141 +size 10900 diff --git a/single_parent_huge_dates.tar.xz b/single_parent_huge_dates.tar.xz index 4b3a5d0..1f96938 100644 --- a/single_parent_huge_dates.tar.xz +++ b/single_parent_huge_dates.tar.xz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:466157da5dcaae21d46aa5a1aa09e72e0c545eafae1c4513cfa75ad94115062f +size 10128 diff --git a/generation_number_overflow.sh b/generation_number_overflow.sh index 0cc44c6..bc694a1 100644 --- a/generation_number_overflow.sh +++ b/generation_number_overflow.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -eu -o pipefail + +function tick () { + if test -z "${tick+set}" + then + tick=1112911993 + else + tick=$(($tick + 60)) + fi + GIT_COMMITTER_DATE="$tick -0700" + GIT_AUTHOR_DATE="$tick -0700" + export GIT_COMMITTER_DATE GIT_AUTHOR_DATE +} + +tick +function commit() { + local message=${1:?first argument is the commit message} + local date=${2:-} + local file="$message.t" + echo "$1" > "$file" + git add -- "$file" + if [ -n "$date" ]; then + export GIT_COMMITTER_DATE="$date" + else + tick + fi + git commit -m "$message" + git tag "$message" +} + +# adapted from git/t/t5318 'lower layers have overflow chunk' +UNIX_EPOCH_ZERO="@0 +0000" +FUTURE_DATE="@4147483646 +0000" + +git init +git config commitGraph.generationVersion 2 + +commit future-1 "$FUTURE_DATE" +commit old-1 "$UNIX_EPOCH_ZERO" +git commit-graph write --reachable +commit future-2 "$FUTURE_DATE" +commit old-2 "$UNIX_EPOCH_ZERO" +git commit-graph write --reachable --split=no-merge +commit extra +# this makes sure it's actually in chain format. +git commit-graph write --reachable --split=no-merge diff --git a/single_commit_huge_dates.sh b/single_commit_huge_dates.sh index 893869c..ff626a6 100644 --- a/single_commit_huge_dates.sh +++ b/single_commit_huge_dates.sh @@ -0,0 +1,20 @@ +#!/bin/bash +set -eu -o pipefail + +function setup_repo() { + local version=${1:?need generation version} + local time=${2:?timestamp seconds since unix epoch} + git init -q + + # one past the max 32bit date git can represent + export GIT_COMMITTER_DATE="@${time} +0000" + git config commitGraph.generationVersion ${version} + + git commit -q --allow-empty -m c1 + + git commit-graph write --no-progress --reachable +} + +(mkdir v1 && cd v1 && setup_repo 1 68719476737) # the year 4000 something (overflows in graph) +(mkdir v2 && cd v2 && setup_repo 2 68719476737) +(mkdir max-date && cd max-date && setup_repo 1 17147483646) # the year 2500ish
|
|
docs: better comment for `_stopAnimation` method
|
148b7691ced2fb4a7bde53910b06fdcecf56d3fb
|
docs
|
https://github.com/pmndrs/react-spring/commit/148b7691ced2fb4a7bde53910b06fdcecf56d3fb
|
better comment for `_stopAnimation` method
|
diff --git a/Controller.ts b/Controller.ts index a97e08a..4b9b00b 100644 --- a/Controller.ts +++ b/Controller.ts @@ -605,7 +605,12 @@ export class Controller<State extends Indexable = any> { return this } - // Stop an animation by its key + /** + * Stop an animation by its key. + * + * This mutates the `timestamps.to[key]`, `props.to[key]`, and `animations[key]` properties. + * Notably, it does *not* mutate the `configs[key]` or `animated[key]` properties. + */ private _stopAnimation(key: string, isNew?: boolean) { const animated = this.animated[key] if (!animated) return
|
|
perf(core): don't propagate serialization context to hidden relations (#3592)
Co-authored-by: Wybren Kortstra <[email protected]>
|
e706ba276159a547dcfa855801ea7e46abf13212
|
perf
|
https://github.com/mikro-orm/mikro-orm/commit/e706ba276159a547dcfa855801ea7e46abf13212
|
don't propagate serialization context to hidden relations (#3592)
Co-authored-by: Wybren Kortstra <[email protected]>
|
diff --git a/EntityTransformer.ts b/EntityTransformer.ts index 2a5ad3d..0c45e2b 100644 --- a/EntityTransformer.ts +++ b/EntityTransformer.ts @@ -6,6 +6,14 @@ import type { Platform } from '../platforms'; import { Utils } from '../utils/Utils'; import { ReferenceType } from '../enums'; +function isVisible<T>(meta: EntityMetadata<T>, propName: string, ignoreFields: string[]): boolean { + const prop = meta.properties[propName]; + const visible = prop && !prop.hidden; + const prefixed = prop && !prop.primary && propName.startsWith('_'); // ignore prefixed properties, if it's not a PK + + return visible && !prefixed && !ignoreFields.includes(propName); +} + /** * Helper that allows to keep track of where we are currently at when serializing complex entity graph with cycles. * Before we process a property, we call `visit` that checks if it is not a cycle path (but allows to pass cycles that @@ -52,21 +60,25 @@ export class SerializationContext<T> { /** * When initializing new context, we need to propagate it to the whole entity graph recursively. */ - static propagate(root: SerializationContext<AnyEntity>, entity: AnyEntity): void { + static propagate(root: SerializationContext<AnyEntity>, entity: AnyEntity, raw: boolean): void { root.register(entity); + const wrapped = helper(entity); + const meta = wrapped.__meta; const items: AnyEntity[] = []; - Object.keys(entity).forEach(key => { - if (Utils.isEntity(entity[key], true)) { - items.push(entity[key]); - } else if (Utils.isCollection(entity[key])) { - items.push(...(entity[key] as Collection<any>).getItems(false)); - } - }); + Object.keys(entity) + .filter(prop => raw ? meta.properties[prop] : isVisible(meta, prop, [])) + .forEach(key => { + if (Utils.isEntity(entity[key], true)) { + items.push(entity[key]); + } else if (Utils.isCollection(entity[key])) { + items.push(...(entity[key] as Collection<any>).getItems(false)); + } + }); items .filter(item => !item.__helper!.__serializationContext.root) - .forEach(item => this.propagate(root, item)); + .forEach(item => this.propagate(root, item, raw)); } private isMarkedAsPopulated(prop: string): boolean { @@ -106,7 +118,7 @@ export class EntityTransformer { if (!wrapped.__serializationContext.root) { const root = new SerializationContext<T>(wrapped.__serializationContext.populate ?? []); - SerializationContext.propagate(root, entity); + SerializationContext.propagate(root, entity, raw); contextCreated = true; } @@ -132,7 +144,7 @@ export class EntityTransformer { } [...keys] - .filter(prop => raw ? meta.properties[prop] : this.isVisible<T>(meta, prop, ignoreFields)) + .filter(prop => raw ? meta.properties[prop] : isVisible<T>(meta, prop, ignoreFields)) .map(prop => { const cycle = root.visit(meta.className, prop); @@ -176,14 +188,6 @@ export class EntityTransformer { return ret; } - private static isVisible<T>(meta: EntityMetadata<T>, propName: string, ignoreFields: string[]): boolean { - const prop = meta.properties[propName]; - const visible = prop && !prop.hidden; - const prefixed = prop && !prop.primary && propName.startsWith('_'); // ignore prefixed properties, if it's not a PK - - return visible && !prefixed && !ignoreFields.includes(propName); - } - private static propertyName<T>(meta: EntityMetadata<T>, prop: keyof T & string, platform?: Platform): string { if (meta.properties[prop].serializedName) { return meta.properties[prop].serializedName as keyof T & string; diff --git a/entities.ts b/entities.ts index 9a57535..a6f2479 100644 --- a/entities.ts +++ b/entities.ts @@ -0,0 +1,98 @@ +import { + Collection, + Entity, + IdentifiedReference, + ManyToMany, + ManyToOne, + OneToMany, + PrimaryKey, + Property, +} from '@mikro-orm/core'; + +@Entity() +export class Project { + + @PrimaryKey() + id!: number; + + @Property() + name!: string; + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + @OneToMany(() => Filter, filters => filters.project) + filters = new Collection<Filter>(this); + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + @OneToMany(() => Risk, e => e.project) + risks = new Collection<Risk>(this); + +} + +@Entity() +export class Risk { + + @PrimaryKey() + id!: number; + + @Property() + title!: string; + + @ManyToOne(() => Project, { serializer: p => p.id, wrappedReference: true }) + project!: IdentifiedReference<Project>; + + @ManyToMany({ + // eslint-disable-next-line @typescript-eslint/no-use-before-define + entity: () => FilterValue, + pivotTable: 'risk_filter_values', + joinColumn: 'risk_id', + inverseJoinColumn: 'filter_value_id', + }) + filterValues = new Collection<FilterValue>(this); + +} + +@Entity() +export class Filter { + + @PrimaryKey() + id!: number; + + @Property() + name!: string; + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + @OneToMany(() => FilterValue, values => values.filter) + values = new Collection<FilterValue>(this); + + @ManyToOne(() => Project, { + serializer: p => p.id, + wrappedReference: true, + onDelete: 'cascade', + }) + project!: IdentifiedReference<Project>; + +} + +@Entity() +export class FilterValue { + + @PrimaryKey() + id!: number; + + @Property({ length: 16000 }) + value!: string; + + @ManyToOne(() => Filter, { + serializer: f => f.id, + wrappedReference: true, + onDelete: 'cascade', + }) + filter!: IdentifiedReference<Filter>; + + @ManyToMany(() => Risk, risk => risk.filterValues, { + hidden: true, + owner: false, + }) + risks = new Collection<Risk>(this); + +} diff --git a/factories.ts b/factories.ts index 5f89295..8a740fa 100644 --- a/factories.ts +++ b/factories.ts @@ -0,0 +1,52 @@ +import { Filter, FilterValue, Project, Risk } from './entities'; +import { Factory, Faker } from '@mikro-orm/seeder'; + +export class ProjectFactory extends Factory<Project> { + + model = Project; + + definition(faker: Faker): Partial<Project> { + return { + name: faker.company.name(), + }; + } + +} + +export class RiskFactory extends Factory<Risk> { + + model = Risk; + + definition(faker: Faker): Partial<Risk> { + return { + title: faker.internet.domainWord(), + }; + } + +} + +export class FilterFactory extends Factory<Filter> { + + model = Filter; + + definition(faker: Faker): Partial<Filter> { + return { + name: faker.word.noun(), + }; + } + +} + +export class FilterValueFactory extends Factory<FilterValue> { + + model = FilterValue; + + definition(faker: Faker): Partial<FilterValue> { + return { + value: faker.word.noun(), + }; + } + +} + + diff --git a/seeder.ts b/seeder.ts index d65b32c..d7d4622 100644 --- a/seeder.ts +++ b/seeder.ts @@ -0,0 +1,23 @@ +import { EntityManager } from '@mikro-orm/core'; +import { Seeder } from '@mikro-orm/seeder'; +import { FilterFactory, FilterValueFactory, ProjectFactory, RiskFactory } from './factories'; + +export class DatabaseSeeder extends Seeder { + + async run(em: EntityManager): Promise<void> { + const NUM_FILTER_VALUES = 6; + const filters = new FilterFactory(em).each(entity => { + entity.values.set(new FilterValueFactory(em).make(NUM_FILTER_VALUES)); + }).make(5); + + new ProjectFactory(em).makeOne({ + filters, + risks: new RiskFactory(em).each(risk => { + risk.filterValues.set(filters.map(filter => filter.values.getItems()[Math.floor(Math.random() * NUM_FILTER_VALUES)])); + }).make(100), + }); + + await em.flush(); + } + +} diff --git a/serializing-nested-entities.test.ts b/serializing-nested-entities.test.ts index 0019d58..131eb00 100644 --- a/serializing-nested-entities.test.ts +++ b/serializing-nested-entities.test.ts @@ -0,0 +1,39 @@ +import { MikroORM } from '@mikro-orm/better-sqlite'; +import { Filter, FilterValue, Project, Risk } from './entities'; +import { DatabaseSeeder } from './seeder'; + +let orm: MikroORM; + +beforeAll(async () => { + orm = await MikroORM.init({ + entities: [Project, Risk, Filter, FilterValue], + dbName: ':memory:', + type: 'better-sqlite', + }); + await orm.schema.createSchema(); + const seeder = new DatabaseSeeder(); + await seeder.run(orm.em); + orm.em.clear(); +}); + +afterAll(() => orm.close(true)); + +test('perf: serialize nested entities', async () => { + const risks = await orm.em.find(Risk, {}, { populate: true }); + const project = await orm.em.findOneOrFail(Project, { id: 1 }); + + // Serialize a collection of 150 entities + console.time('perf: serialize risks'); + const stringRisks1 = JSON.stringify(risks); + console.timeEnd('perf: serialize risks'); + + // Serialize an entity with the same collection of 150 entities as above + // Next extract the 150 entities, giving the same result as stringRisks above + console.time('perf: serialize project'); + const stringProject = JSON.stringify(project); + const stringRisks2 = JSON.stringify(JSON.parse(stringProject).risks); + console.timeEnd('perf: serialize project'); + + // See that the stringified results are the same + expect(stringRisks1).toBe(stringRisks2); +});
|
|
feat(clickhouse): implement struct field access
|
fff69f32276c30280b7fd17d7457940514293c00
|
feat
|
https://github.com/rohankumardubey/ibis/commit/fff69f32276c30280b7fd17d7457940514293c00
|
implement struct field access
|
diff --git a/registry.py b/registry.py index ac8edb1..127c9bd 100644 --- a/registry.py +++ b/registry.py @@ -635,6 +635,11 @@ def _clip(translator, expr): return arg +def _struct_field(translator, expr): + op = expr.op() + return f"{translator.translate(op.arg)}.`{op.field}`" + + # TODO: clickhouse uses different string functions # for ascii and utf-8 encodings, @@ -790,6 +795,7 @@ operation_registry = { ops.Strftime: _fixed_arity("formatDateTime", 2), ops.ArrayColumn: _array_column, ops.Clip: _clip, + ops.StructField: _struct_field, }
|
|
perf: improve {Dict, Set}::hash
|
e7ff9f4ea743de0baf9c01cda2e72883896c5d82
|
perf
|
https://github.com/erg-lang/erg/commit/e7ff9f4ea743de0baf9c01cda2e72883896c5d82
|
improve {Dict, Set}::hash
|
diff --git a/dict.rs b/dict.rs index bc10a9b..df70bf0 100644 --- a/dict.rs +++ b/dict.rs @@ -36,14 +36,23 @@ impl<K: Hash + Eq + Immutable, V: Hash + Eq> Eq for Dict<K, V> {} impl<K: Hash, V: Hash> Hash for Dict<K, V> { fn hash<H: Hasher>(&self, state: &mut H) { - let mut v = self - .iter() - .map(|(key, val)| (get_hash(key), key, val)) - .collect::<Vec<_>>(); - v.sort_by_key(|(h, _, _)| *h); - for (_, key, val) in v.iter() { - key.hash(state); - val.hash(state); + let len = self.len(); + len.hash(state); + if len <= 1 { + for (key, val) in self.iter() { + key.hash(state); + val.hash(state); + } + } else { + let mut v = self + .iter() + .map(|(key, val)| (get_hash(key), val)) + .collect::<Vec<_>>(); + v.sort_unstable_by_key(|(h, _)| *h); + for (h, val) in v.iter() { + state.write_usize(*h); + val.hash(state); + } } } } diff --git a/set.rs b/set.rs index e37e973..2fc3d7d 100644 --- a/set.rs +++ b/set.rs @@ -63,9 +63,12 @@ impl<T> Default for Set<T> { impl<T: Hash> Hash for Set<T> { fn hash<H: Hasher>(&self, state: &mut H) { - let mut v = self.iter().map(get_hash).collect::<Vec<_>>(); - v.sort(); - v.hash(state); + self.len().hash(state); + let sum = self + .iter() + .map(get_hash) + .fold(0usize, |acc, x| acc.wrapping_add(x)); + sum.hash(state); } }
|
|
refactor: "reconcileDeleted" helper
|
5ebe66a147f610437586d499b69d2acccf54e279
|
refactor
|
https://github.com/pmndrs/react-spring/commit/5ebe66a147f610437586d499b69d2acccf54e279
|
"reconcileDeleted" helper
|
diff --git a/helpers.test.ts b/helpers.test.ts index 7de9ad0..ad1e694 100644 --- a/helpers.test.ts +++ b/helpers.test.ts @@ -61,8 +61,8 @@ describe('helpers', () => { expect(reconcileWrapper('1:4:|:5:4', '1|2|3')).toEqual([1, 5, 4, 2, 3]) }) - it('should handle interupted chain', () => { - expect(reconcileWrapper('9:4:|4:5:', '1|2|3')).toEqual([4, 5, 1, 2, 3]) + it('should handle interrupted chain', () => { + expect(reconcileWrapper('5:3:|9:5:', '1|2|4')).toEqual([1, 2, 4, 3, 5]) }) it('should handle empty arrays', () => { diff --git a/helpers.ts b/helpers.ts index 5d82157..47d9daa 100644 --- a/helpers.ts +++ b/helpers.ts @@ -147,65 +147,82 @@ export function fillArray<T>(length: number, mapIndex: (index: number) => T) { return arr } +type ItemKey = number | string +interface Item { + key: ItemKey + originalKey: ItemKey + phase: string + item: any + props: object + destroyed?: boolean +} +interface DeletedItem extends Item { + left?: ItemKey + right?: ItemKey +} + /** - * This tries to put deleted items back into out list in correct order. Deleted - * items need to have a left and right property with id of their sibling which - * is used to find the correct placement. - * @param deleted - * @param out + * This tries to put deleted items back into the given `out` list in correct + * order. Deleted items must have a `left` and `right` property with key of + * their sibling which is used to find the correct placement. */ export function reconcileDeleted( - deleted: { left?: number; right?: number }[], - out: { originalKey: number }[] + deleted: DeletedItem[], + current: Item[] ): any[] { // Copy as we will be mutating the arrays deleted = [...deleted] - let result: any[] = [...out] + current = [...current] - // Keep track of how many times we were not able to insert an item + // Used to detect deadlock (when a pass finds 0 siblings) let failedTries = 0 - // Either try to insert all deleted items or bail if we went through whole - // list and did not insert single item. Bailing means the chain was - // interrupted somewhere and we cannot recreate the ordering. - while (deleted.length && failedTries < deleted.length) { - const d = deleted.shift()! - let indexToInsert = null - - result.forEach((item, index) => { - // try find a sibling in out array - if (item.originalKey == d.left) { - indexToInsert = index + 1 - return + // Track where the current pass start/ends + let passIndex = 0 + let nextPassIndex = deleted.length + + // Insert all deleted items into `current` + for (let i = 0; i < deleted.length; i++) { + if (i === nextPassIndex) { + // Sanity test: Push to end if somehow no siblings were found + if (passIndex + failedTries === nextPassIndex) { + for (let j = i; j < deleted.length; j++) { + const { left, right, ...deletedItem } = deleted[j] + current.push(deletedItem) + } + break } + // Update local state at the end of each pass + passIndex = nextPassIndex + nextPassIndex = deleted.length + failedTries = 0 + } + + // The index of the deleted item in `current` + let index = -1 - if (item.originalKey == d.right) { - indexToInsert = index - return + // Look for the left or right sibling in `current` + const { left, right, ...deletedItem } = deleted[i] + for (let j = current.length; --j >= 0; ) { + const { originalKey: key } = current[j] + if (key === right) { + index = j + break + } + if (key === left) { + index = j + 1 + break } - }) - - if (indexToInsert === null) { - // we did not find where it should be inserted, probably the sibling is - // in deleted array and we did not insert it yet so put it back on stack - // and try later - deleted.push(d) - failedTries += 1 - } else { - result.splice(Math.max(indexToInsert, 0), 0, d) - indexToInsert = null - failedTries = 0 } - } - // We were not able to recreate the ordering just put them in the beginning. - // We assume deleted item are already ordered properly. There are some - // (not sure if bugs or not) cases where we get here, for example items without - // siblings have left set to their own key so if items are added one by one - // they won't be linked - if (deleted.length) { - result = [...deleted, ...result] + // Items with no index are revisited in the next pass + if (index < 0) { + failedTries++ + deleted.push(deleted[i]) + } else { + current.splice(index, 0, deletedItem) + } } - return result + return current } diff --git a/useTransition.js b/useTransition.js index 92c9769..ead92fc 100644 --- a/useTransition.js +++ b/useTransition.js @@ -239,8 +239,8 @@ function diffItems({ first, current, deleted, prevProps, ...state }, props) { ...current[key], phase, destroyed: true, - left: _keys[Math.max(0, i - 1)], - right: _keys[Math.min(_keys.length, i + 1)], + left: _keys[i - 1], + right: _keys[i + 1], props: { delay: (delay += trail), config: callProp(config, item, phase),
|
|
docs(snowflake): add blog post showing insertion into snowflake from postgres (#8426)
|
3a8c7ccb39a48a567be71c78c23afc8261bb8fb1
|
docs
|
https://github.com/ibis-project/ibis/commit/3a8c7ccb39a48a567be71c78c23afc8261bb8fb1
|
add blog post showing insertion into snowflake from postgres (#8426)
|
diff --git a/html.json b/html.json index 6158534..98a967d 100644 --- a/html.json +++ b/html.json @@ -0,0 +1,16 @@ +{ + "hash": "b777fdee8d50ae460617f4078c6145bf", + "result": { + "engine": "jupyter", + "markdown": "---\\ntitle: \\"Snow IO: loading data from other DBs into Snowflake\\"\\nauthor: \\"Phillip Cloud\\"\\nerror: false\\ndate: \\"2024-03-06\\"\\ncategories:\\n - blog\\n - snowflake\\n - io\\n - productivity\\n---\\n\\n## Recap\\n\\nWe've [blogged about Snowflake IO before](../snowflake-io/index.qmd), in the\\ncontext of getting local files into Snowflake as fast as possible.\\n\\nIn this post, we'll show how to insert query results from another system into\\nSnowflake, using Ibis.\\n\\n## Setup\\n\\n### Connect to your non-Snowflake system\\n\\nWe'll connect to a postgres database running locally in a container. You\\nshould be able to swap in your own connection details as needed.\\n\\n::: {#52dc2246 .cell execution_count=1}\\n``` {.python .cell-code}\\nfrom ibis.interactive import * # <1>\\n\\npg_con = ibis.connect(\\"postgres://postgres:postgres@localhost/postgres\\")\\n```\\n:::\\n\\n\\n1. Import Ibis for maximum productivity in interactive analysis.\\n\\nWe'll use a test dataset that contains some baseball batting statistics.\\n\\nIbis provides that example data, so we can dump that into postgres.\\n\\n::: {#c9ed5f4c .cell execution_count=2}\\n``` {.python .cell-code}\\npg_batting = pg_con.create_table(\\n \\"batting\\",\\n ibis.examples.Batting.fetch().to_pandas(), # <1>\\n temp=True, # <2>\\n)\\n```\\n:::\\n\\n\\n1. Yep, I'm using pandas here!\\n2. Use a temporary table to avoid cluttering up the database.\\n\\n### Connect to Snowflake\\n\\n::: {#5f332c9a .cell execution_count=3}\\n``` {.python .cell-code}\\nimport os\\n\\n# snowflake://user:pass@account/database/schema?warehouse=my_warehouse\\nsnow_con = ibis.connect(os.environ[\\"SNOWFLAKE_URL\\"]) # <1>\\n```\\n:::\\n\\n\\n1. Set the `SNOWFLAKE_URL` environment variable to your Snowflake connection string.\\n\\n## Profit\\n\\n### Construct an Ibis expression from the postgres data\\n\\nLet's build an Ibis expression based on the `batting` table in our postgres database.\\n\\n::: {#842f6246 .cell execution_count=4}\\n``` {.python .cell-code}\\npg_batting\\n```\\n\\n::: {.cell-output .cell-output-display execution_count=5}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\">┏━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━┳━━━━━━━━━┳━━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓\\n┃<span style=\\"font-weight: bold\\"> player_id </span>┃<span style=\\"font-weight: bold\\"> year_id </span>┃<span style=\\"font-weight: bold\\"> stint </span>┃<span style=\\"font-weight: bold\\"> team_id </span>┃<span style=\\"font-weight: bold\\"> lg_id </span>┃<span style=\\"font-weight: bold\\"> g </span>┃<span style=\\"font-weight: bold\\"> ab </span>┃<span style=\\"font-weight: bold\\"> r </span>┃<span style=\\"font-weight: bold\\"> h </span>┃<span style=\\"font-weight: bold\\"> x2b </span>┃<span style=\\"font-weight: bold\\"> x3b </span>┃<span style=\\"font-weight: bold\\"> hr </span>┃<span style=\\"font-weight: bold\\"> rbi </span>┃<span style=\\"font-weight: bold\\"> sb </span>┃<span style=\\"font-weight: bold\\"> cs </span>┃<span style=\\"font-weight: bold\\"> bb </span>┃<span style=\\"font-weight: bold\\"> so </span>┃<span style=\\"font-weight: bold\\"> ibb </span>┃<span style=\\"font-weight: bold\\"> hbp </span>┃<span style=\\"font-weight: bold\\"> sh </span>┃<span style=\\"font-weight: bold\\"> sf </span>┃<span style=\\"font-weight: bold\\"> gidp </span>┃\\n┡━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━╇━━━━━━━━━╇━━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │\\n├───────────┼─────────┼───────┼─────────┼────────┼───────┼───────┼───────┼───────┼───────┼───────┼───────┼─────────┼─────────┼─────────┼───────┼─────────┼─────────┼─────────┼─────────┼─────────┼─────────┤\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">abercda01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">TRO </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">4</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">addybo01 </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">RC1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">25</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">118</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">30</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">32</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">6</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">8.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">4</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">allisar01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">CL1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">29</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">137</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">28</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">40</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">4</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">5</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">19.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">3.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">5.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">allisdo01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">WS3 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">27</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">133</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">28</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">44</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">10</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">27.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">ansonca01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">RC1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">25</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">120</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">29</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">39</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">11</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">3</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">16.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">6.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">armstbo01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">FW1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">12</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">49</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">9</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">11</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">5.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">barkeal01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">RC1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">4</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">barnero01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">BS1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">31</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">157</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">66</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">63</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">10</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">9</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">34.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">11.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">6.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">barrebi01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">FW1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">5</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #008000; text-decoration-color: #008000\\">barrofr01</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1871</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">BS1 </span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">NA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">18</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">86</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">11.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">nan</span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">0.0</span> │\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │\\n└───────────┴─────────┴───────┴─────────┴────────┴───────┴───────┴───────┴───────┴───────┴───────┴───────┴─────────┴─────────┴─────────┴───────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┘\\n</pre>\\n```\\n:::\\n:::\\n\\n\\nWe can compute the average [RBI](https://en.wikipedia.org/wiki/Run_batted_in) per year per team.\\n\\n::: {#0fe95f00 .cell execution_count=5}\\n``` {.python .cell-code}\\npg_expr = pg_batting.group_by((\\"year_id\\", \\"team_id\\")).agg(avg_rbi=_.rbi.mean())\\npg_expr\\n```\\n\\n::: {.cell-output .cell-output-display execution_count=6}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\">┏━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━┓\\n┃<span style=\\"font-weight: bold\\"> year_id </span>┃<span style=\\"font-weight: bold\\"> team_id </span>┃<span style=\\"font-weight: bold\\"> avg_rbi </span>┃\\n┡━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━┩\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │\\n├─────────┼─────────┼───────────┤\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1891</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">PIT </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">22.782609</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1895</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">BSN </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">34.363636</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1940</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">SLA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">22.343750</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1981</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">HOU </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">9.972973</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1913</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">CLE </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13.512821</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1971</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">MON </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">17.181818</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2008</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">PIT </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">15.000000</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1895</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">WAS </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">23.096774</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2011</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">KCA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">16.785714</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2007</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">MIL </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">19.350000</span> │\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │\\n└─────────┴─────────┴───────────┘\\n</pre>\\n```\\n:::\\n:::\\n\\n\\nWe can also rename columns to be more consistent with typical Snowflake usage.\\n\\n::: {#c75c8ff3 .cell execution_count=6}\\n``` {.python .cell-code}\\npg_expr = pg_expr.rename(\\"ALL_CAPS\\")\\npg_expr\\n```\\n\\n::: {.cell-output .cell-output-display execution_count=7}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\">┏━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━┓\\n┃<span style=\\"font-weight: bold\\"> YEAR_ID </span>┃<span style=\\"font-weight: bold\\"> TEAM_ID </span>┃<span style=\\"font-weight: bold\\"> AVG_RBI </span>┃\\n┡━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━┩\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │\\n├─────────┼─────────┼───────────┤\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1891</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">PIT </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">22.782609</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1895</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">BSN </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">34.363636</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1940</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">SLA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">22.343750</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1981</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">HOU </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">9.972973</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1913</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">CLE </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13.512821</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1971</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">MON </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">17.181818</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2008</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">PIT </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">15.000000</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1895</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">WAS </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">23.096774</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2011</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">KCA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">16.785714</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2007</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">MIL </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">19.350000</span> │\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │\\n└─────────┴─────────┴───────────┘\\n</pre>\\n```\\n:::\\n:::\\n\\n\\nLet's show how many rows we have in the result.\\n\\n::: {#ac4befe6 .cell execution_count=7}\\n``` {.python .cell-code}\\npg_expr.count()\\n```\\n\\n::: {.cell-output .cell-output-display}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\"></pre>\\n```\\n:::\\n\\n::: {.cell-output .cell-output-display execution_count=8}\\n\\n::: {.ansi-escaped-output}\\n```{=html}\\n<pre><span class=\\"ansi-cyan-fg ansi-bold\\">3015</span></pre>\\n```\\n:::\\n\\n:::\\n:::\\n\\n\\n### Insert the computed results into Snowflake\\n\\nBecause all Ibis backends implement the `to_pyarrow()` method, we can\\nget data out of another system and into Snowflake with a few lines of code.\\n\\nFirst we'll create a table in Snowflake to hold the data.\\n\\nIbis helps here by providing an API to access the schema from the\\n**postgres**-based expression, and automatically translates postgres types into\\nSnowflake types.\\n\\n::: {#22568a53 .cell execution_count=8}\\n``` {.python .cell-code}\\nsnow_table = snow_con.create_table(\\"pg_batting\\", schema=pg_expr.schema(), temp=True) # <1>\\n```\\n:::\\n\\n\\n1. By default the table will be created in the database and schema of the\\n current connection.\\n\\n We create a temporary table for the same reason we do with postgres above.\\n\\n\\nWe'll show that the table is empty to sanity check ourselves.\\n\\n::: {#dc8f71fa .cell execution_count=9}\\n``` {.python .cell-code}\\nsnow_table\\n```\\n\\n::: {.cell-output .cell-output-display execution_count=10}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\">┏━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓\\n┃<span style=\\"font-weight: bold\\"> YEAR_ID </span>┃<span style=\\"font-weight: bold\\"> TEAM_ID </span>┃<span style=\\"font-weight: bold\\"> AVG_RBI </span>┃\\n┡━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │\\n└─────────┴─────────┴─────────┘\\n</pre>\\n```\\n:::\\n:::\\n\\n\\nInsert the expression's result table into Snowflake.\\n\\n::: {#01bcdc29 .cell execution_count=10}\\n``` {.python .cell-code}\\nsnow_con.insert(\\"pg_batting\\", pg_expr.to_pyarrow())\\n```\\n:::\\n\\n\\nTo sanity check what we've done let's peek at the table.\\n\\n::: {#e7a29528 .cell execution_count=11}\\n``` {.python .cell-code}\\nsnow_table\\n```\\n\\n::: {.cell-output .cell-output-display execution_count=12}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\">┏━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━┓\\n┃<span style=\\"font-weight: bold\\"> YEAR_ID </span>┃<span style=\\"font-weight: bold\\"> TEAM_ID </span>┃<span style=\\"font-weight: bold\\"> AVG_RBI </span>┃\\n┡━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━┩\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">int64</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">string</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">float64</span> │\\n├─────────┼─────────┼───────────┤\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1891</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">PIT </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">22.782609</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1895</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">BSN </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">34.363636</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1940</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">SLA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">22.343750</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1981</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">HOU </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">9.972973</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1913</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">CLE </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">13.512821</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1971</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">MON </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">17.181818</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2008</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">PIT </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">15.000000</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">1895</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">WAS </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">23.096774</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2011</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">KCA </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">16.785714</span> │\\n│ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">2007</span> │ <span style=\\"color: #008000; text-decoration-color: #008000\\">MIL </span> │ <span style=\\"color: #008080; text-decoration-color: #008080; font-weight: bold\\">19.350000</span> │\\n│ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │ <span style=\\"color: #7f7f7f; text-decoration-color: #7f7f7f\\">…</span> │\\n└─────────┴─────────┴───────────┘\\n</pre>\\n```\\n:::\\n:::\\n\\n\\nWe'll count them too, to be extra sure.\\n\\n::: {#0b854a6c .cell execution_count=12}\\n``` {.python .cell-code}\\nsnow_table.count()\\n```\\n\\n::: {.cell-output .cell-output-display}\\n```{=html}\\n<pre style=\\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\\"></pre>\\n```\\n:::\\n\\n::: {.cell-output .cell-output-display execution_count=13}\\n\\n::: {.ansi-escaped-output}\\n```{=html}\\n<pre><span class=\\"ansi-cyan-fg ansi-bold\\">3015</span></pre>\\n```\\n:::\\n\\n:::\\n:::\\n\\n\\n## Conclusion\\n\\nIn this post we show how easy it is to move data from one backend into Snowflake using Ibis.\\n\\nPlease try it out and get in touch on [Zulip](https://ibis-project.zulipchat.com/) or\\n[GitHub](https://github.com/ibis-project/ibis), we'd love to hear from you!\\n\\n", + "supporting": [ + "index_files" + ], + "filters": [], + "includes": { + "include-in-header": [ + "<script src=\\"https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js\\" integrity=\\"sha512-c3Nl8+7g4LMSTdrm621y7kf9v3SDPnhxLNhcjFJbKECVnmZHTdo+IRO05sNLTH/D3vA6u1X32ehoLC7WFVdheg==\\" crossorigin=\\"anonymous\\"></script>\\n<script src=\\"https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.min.js\\" integrity=\\"sha512-bLT0Qm9VnAYZDflyKcBaQ2gg0hSYNQrJ8RilYldYQ1FxQYoCLtUjuuRuZo+fjqhx/qtq/1itJ0C2ejDxltZVFg==\\" crossorigin=\\"anonymous\\" data-relocate-top=\\"true\\"></script>\\n<script type=\\"application/javascript\\">define('jquery', [],function() {return window.jQuery;})</script>\\n" + ] + } + } +} \\ No newline at end of file diff --git a/index.qmd b/index.qmd index 5c39895..3c09c74 100644 --- a/index.qmd +++ b/index.qmd @@ -0,0 +1,143 @@ +--- +title: "Snow IO: loading data from other DBs into Snowflake" +author: "Phillip Cloud" +error: false +date: "2024-03-06" +categories: + - blog + - snowflake + - io + - productivity +--- + +## Recap + +We've [blogged about Snowflake IO before](../snowflake-io/index.qmd), in the +context of getting local files into Snowflake as fast as possible. + +In this post, we'll show how to insert query results from another system into +Snowflake, using Ibis. + +## Setup + +### Connect to your non-Snowflake system + +We'll connect to a postgres database running locally in a container. You +should be able to swap in your own connection details as needed. + +```{python} +from ibis.interactive import * # <1> + +pg_con = ibis.connect("postgres://postgres:postgres@localhost/postgres") +``` + +1. Import Ibis for maximum productivity in interactive analysis. + +We'll use a test dataset that contains some baseball batting statistics. + +Ibis provides that example data, so we can dump that into postgres. + + +```{python} +pg_batting = pg_con.create_table( + "batting", + ibis.examples.Batting.fetch().to_pandas(), # <1> + temp=True, # <2> +) +``` + +1. Yep, I'm using pandas here! +2. Use a temporary table to avoid cluttering up the database. + +### Connect to Snowflake + +```{python} +import os + +# snowflake://user:pass@account/database/schema?warehouse=my_warehouse +snow_con = ibis.connect(os.environ["SNOWFLAKE_URL"]) # <1> +``` + +1. Set the `SNOWFLAKE_URL` environment variable to your Snowflake connection string. + +## Profit + +### Construct an Ibis expression from the postgres data + +Let's build an Ibis expression based on the `batting` table in our postgres database. + +```{python} +pg_batting +``` + +We can compute the average [RBI](https://en.wikipedia.org/wiki/Run_batted_in) per year per team. + +```{python} +pg_expr = pg_batting.group_by(("year_id", "team_id")).agg(avg_rbi=_.rbi.mean()) +pg_expr +``` + +We can also rename columns to be more consistent with typical Snowflake usage. + +```{python} +pg_expr = pg_expr.rename("ALL_CAPS") +pg_expr +``` + +Let's show how many rows we have in the result. + +```{python} +pg_expr.count() +``` + +### Insert the computed results into Snowflake + +Because all Ibis backends implement the `to_pyarrow()` method, we can +get data out of another system and into Snowflake with a few lines of code. + +First we'll create a table in Snowflake to hold the data. + +Ibis helps here by providing an API to access the schema from the +**postgres**-based expression, and automatically translates postgres types into +Snowflake types. + +```{python} +snow_table = snow_con.create_table("pg_batting", schema=pg_expr.schema(), temp=True) # <1> +``` + +1. By default the table will be created in the database and schema of the + current connection. + + We create a temporary table for the same reason we do with postgres above. + + +We'll show that the table is empty to sanity check ourselves. + +```{python} +snow_table +``` + +Insert the expression's result table into Snowflake. + +```{python} +snow_con.insert("pg_batting", pg_expr.to_pyarrow()) +``` + +To sanity check what we've done let's peek at the table. + +```{python} +snow_table +``` + +We'll count them too, to be extra sure. + +```{python} +snow_table.count() +``` + +## Conclusion + +In this post we show how easy it is to move data from one backend into Snowflake using Ibis. + +Please try it out and get in touch on [Zulip](https://ibis-project.zulipchat.com/) or +[GitHub](https://github.com/ibis-project/ibis), we'd love to hear from you!
|
|
fix: build correct path for `$HOME/.config/…` files. (#450)
The special per-user `ignore` and `attributes` files can also be
defaulted if some environment variables are set and may be accessed.
Previously the default for `$HOME` was incorrect, as it was missing the
intermediate `.config/` directory. This is now present to build paths
exactly like git.
|
5c11b84f4e74e3eefdd0f5804976ebfc505e0f2f
|
fix
|
https://github.com/Byron/gitoxide/commit/5c11b84f4e74e3eefdd0f5804976ebfc505e0f2f
|
build correct path for `$HOME/.config/…` files. (#450)
The special per-user `ignore` and `attributes` files can also be
defaulted if some environment variables are set and may be accessed.
Previously the default for `$HOME` was incorrect, as it was missing the
intermediate `.config/` directory. This is now present to build paths
exactly like git.
|
diff --git a/access.rs b/access.rs index b86bde6..401967b 100644 --- a/access.rs +++ b/access.rs @@ -129,7 +129,11 @@ impl Cache { } fn assemble_attribute_globals(me: &Cache) -> Result<git_attributes::MatchGroup, checkout_options::Error> { - let _attributes_file = me.trusted_file_path("core", None, "attributesFile").transpose()?; + let _attributes_file = match me.trusted_file_path("core", None, "attributesFile").transpose()? { + Some(attributes) => Some(attributes.into_owned()), + None => me.xdg_config_path("attributes").ok().flatten(), + }; + // let group = git_attributes::MatchGroup::<git_attributes::Attributes>::from_git_dir() Ok(Default::default()) } @@ -153,13 +157,24 @@ impl Cache { attribute_globals: assemble_attribute_globals(self)?, }) } - pub fn xdg_config_path( + pub(crate) fn xdg_config_path( &self, resource_file_name: &str, ) -> Result<Option<PathBuf>, git_sec::permission::Error<PathBuf>> { std::env::var_os("XDG_CONFIG_HOME") - .map(|path| (path, &self.xdg_config_home_env)) - .or_else(|| std::env::var_os("HOME").map(|path| (path, &self.home_env))) + .map(|path| (PathBuf::from(path), &self.xdg_config_home_env)) + .or_else(|| { + std::env::var_os("HOME").map(|path| { + ( + { + let mut p = PathBuf::from(path); + p.push(".config"); + p + }, + &self.home_env, + ) + }) + }) .and_then(|(base, permission)| { let resource = std::path::PathBuf::from(base).join("git").join(resource_file_name); permission.check(resource).transpose() @@ -171,7 +186,7 @@ impl Cache { /// /// We never fail for here even if the permission is set to deny as we `git-config` will fail later /// if it actually wants to use the home directory - we don't want to fail prematurely. - pub fn home_dir(&self) -> Option<PathBuf> { + pub(crate) fn home_dir(&self) -> Option<PathBuf> { std::env::var_os("HOME") .map(PathBuf::from) .and_then(|path| self.home_env.check_opt(path)) diff --git a/mod.rs b/mod.rs index ae11a51..a348065 100644 --- a/mod.rs +++ b/mod.rs @@ -133,14 +133,15 @@ pub mod excludes { .then(|| git_glob::pattern::Case::Fold) .unwrap_or_default(); let mut buf = Vec::with_capacity(512); + let excludes_file = match repo.config.excludes_file().transpose()? { + Some(user_path) => Some(user_path), + None => repo.config.xdg_config_path("ignore")?, + }; let state = git_worktree::fs::cache::State::IgnoreStack(git_worktree::fs::cache::state::Ignore::new( overrides.unwrap_or_default(), git_attributes::MatchGroup::<git_attributes::Ignore>::from_git_dir( repo.git_dir(), - match repo.config.excludes_file().transpose()?.as_ref() { - Some(user_path) => Some(user_path.to_owned()), - None => repo.config.xdg_config_path("ignore")?, - }, + excludes_file, &mut buf, )?, None,
|
|
docs: use components instead of pieces
|
179ca1e05258fda1424da4f271fed60230c85b3a
|
docs
|
https://github.com/ibis-project/ibis/commit/179ca1e05258fda1424da4f271fed60230c85b3a
|
use components instead of pieces
|
diff --git a/README.md b/README.md index 7a3fbd8..c826daa 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,8 @@ Ibis is a Python library that provides a lightweight, universal interface for data wrangling. It helps Python users explore and transform data of any size, stored anywhere. -Ibis has three primary pieces: +Ibis has three primary components: + 1. **A dataframe API for Python**. This means that Python users can write Ibis code to manipulate tabular data. 2. **Utilities for connecting to 10+ query engines.**
|
|
feat(pandas): implement degrees
|
25b4f69883099f2ad161faefa861a40ca7460cd4
|
feat
|
https://github.com/rohankumardubey/ibis/commit/25b4f69883099f2ad161faefa861a40ca7460cd4
|
implement degrees
|
diff --git a/generic.py b/generic.py index afd8f91..edd8d43 100644 --- a/generic.py +++ b/generic.py @@ -256,6 +256,11 @@ def execute_series_radians(_, data, **kwargs): return np.radians(data) +@execute_node.register(ops.Degrees, (pd.Series, *numeric_types)) +def execute_series_degrees(_, data, **kwargs): + return np.degrees(data) + + @execute_node.register((ops.Ceil, ops.Floor), pd.Series) def execute_series_ceil(op, data, **kwargs): return_type = np.object_ if data.dtype == np.object_ else np.int64 diff --git a/test_numeric.py b/test_numeric.py index b436782..849dae5 100644 --- a/test_numeric.py +++ b/test_numeric.py @@ -161,14 +161,7 @@ def test_isnan_isinf( L(5.556).degrees(), math.degrees(5.556), id='degrees', - marks=pytest.mark.notimpl( - [ - "dask", - "datafusion", - "impala", - "pandas", - ] - ), + marks=pytest.mark.notimpl(["dask", "datafusion", "impala"]), ), param(L(11) % 3, 11 % 3, id='mod'), ],
|
|
feat(clickhouse): implement trim, pad and string predicates
|
a5b72934c38d3a6422274737d5b766d7b4dd9766
|
feat
|
https://github.com/rohankumardubey/ibis/commit/a5b72934c38d3a6422274737d5b766d7b4dd9766
|
implement trim, pad and string predicates
|
diff --git a/registry.py b/registry.py index 3b28b84..2a7db47 100644 --- a/registry.py +++ b/registry.py @@ -566,6 +566,46 @@ def _string_like(translator, expr): ) +def _string_ilike(translator, expr): + op = expr.op() + return 'lower({}) LIKE lower({})'.format( + translator.translate(op.arg), + translator.translate(op.pattern), + ) + + +def _startswith(translator, expr): + op = expr.op() + arg = op.arg + start = op.start + tr_arg = translator.translate(arg) + tr_start = translator.translate(start) + return f"startsWith({tr_arg}, {tr_start})" + + +def _endswith(translator, expr): + op = expr.op() + arg = translator.translate(op.arg) + end = translator.translate(op.end) + return f"endsWith({arg}, {end})" + + +def _lpad(translator, expr): + op = expr.op() + arg = translator.translate(op.arg) + length = translator.translate(op.length) + pad = translator.translate(op.pad) + return f"leftPad({arg}, {length}, {pad})" + + +def _rpad(translator, expr): + op = expr.op() + arg = translator.translate(op.arg) + length = translator.translate(op.length) + pad = translator.translate(op.pad) + return f"rightPad({arg}, {length}, {pad})" + + def _group_concat(translator, expr): arg, sep, where = expr.op().args if where is not None: @@ -646,6 +686,14 @@ operation_registry = { ops.StringJoin: _string_join, ops.StringSplit: _string_split, ops.StringSQLLike: _string_like, + ops.StringSQLILike: _string_ilike, + ops.StartsWith: _startswith, + ops.EndsWith: _endswith, + ops.LPad: _lpad, + ops.RPad: _rpad, + ops.LStrip: _unary('trimLeft'), + ops.RStrip: _unary('trimRight'), + ops.Strip: _unary('trimBoth'), ops.Repeat: _string_repeat, ops.RegexSearch: _fixed_arity('match', 2), # TODO: extractAll(haystack, pattern)[index + 1] diff --git a/test_string.py b/test_string.py index 5a65d88..706cf97 100644 --- a/test_string.py +++ b/test_string.py @@ -48,14 +48,7 @@ def test_string_col_is_unicode(backend, alltypes, df): lambda t: t.string_col.ilike('6%'), lambda t: t.string_col.str.contains('6.*'), id='ilike', - marks=pytest.mark.notimpl( - [ - "clickhouse", - "datafusion", - "impala", - "pyspark", - ] - ), + marks=pytest.mark.notimpl(["datafusion", "impala", "pyspark"]), ), param( lambda t: t.string_col.re_search(r'[[:digit:]]+'), @@ -124,13 +117,11 @@ def test_string_col_is_unicode(backend, alltypes, df): lambda t: t.string_col.lpad(10, 'a'), lambda t: t.string_col.str.pad(10, fillchar='a', side='left'), id='lpad', - marks=pytest.mark.notimpl(["clickhouse"]), ), param( lambda t: t.string_col.rpad(10, 'a'), lambda t: t.string_col.str.pad(10, fillchar='a', side='right'), id='rpad', - marks=pytest.mark.notimpl(["clickhouse"]), ), param( lambda t: t.string_col.find_in_set(['1']), @@ -179,43 +170,28 @@ def test_string_col_is_unicode(backend, alltypes, df): lambda t: t.string_col.startswith('foo'), lambda t: t.string_col.str.startswith('foo'), id='startswith', - marks=pytest.mark.notimpl( - ["clickhouse", "dask", "datafusion", "pandas"] - ), + marks=pytest.mark.notimpl(["dask", "datafusion", "pandas"]), ), param( lambda t: t.string_col.endswith('foo'), lambda t: t.string_col.str.endswith('foo'), id='endswith', - marks=pytest.mark.notimpl( - ["clickhouse", "dask", "datafusion", "pandas"] - ), + marks=pytest.mark.notimpl(["dask", "datafusion", "pandas"]), ), param( lambda t: t.string_col.strip(), lambda t: t.string_col.str.strip(), id='strip', - marks=pytest.mark.notimpl(["clickhouse"]), ), param( lambda t: t.string_col.lstrip(), lambda t: t.string_col.str.lstrip(), id='lstrip', - marks=pytest.mark.notimpl( - [ - "clickhouse", - ] - ), ), param( lambda t: t.string_col.rstrip(), lambda t: t.string_col.str.rstrip(), id='rstrip', - marks=pytest.mark.notimpl( - [ - "clickhouse", - ] - ), ), param( lambda t: t.string_col.capitalize(),
|
|
build: fixed wp plugin version
|
b0e2c49a1fe7972161e7298fb4d90df34b7d7145
|
build
|
https://github.com/tsparticles/tsparticles/commit/b0e2c49a1fe7972161e7298fb4d90df34b7d7145
|
fixed wp plugin version
|
diff --git a/package.json b/package.json index fdf2841..1838d67 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "wordpress-particles", - "version": "2.3.1", + "version": "2.3.2", "description": "Official tsParticles WordPress Plugin - Easily create highly customizable particle, confetti and fireworks animations and use them as animated backgrounds for your website. Ready to use components available also for Web Components, Vue.js (2.x and 3.x), Angular, Svelte, jQuery, Preact, React, Riot.js, Solid.js, Inferno.", "author": "Matteo Bruni <[email protected]>", "license": "MIT",
|
|
fix(duckdb): support casting to unsigned integer types
|
066c1582cab394896625b93bf8b05d07c2a321db
|
fix
|
https://github.com/rohankumardubey/ibis/commit/066c1582cab394896625b93bf8b05d07c2a321db
|
support casting to unsigned integer types
|
diff --git a/compiler.py b/compiler.py index f3c58ce..a267661 100644 --- a/compiler.py +++ b/compiler.py @@ -20,12 +20,24 @@ class DuckDBSQLExprTranslator(AlchemyExprTranslator): _dialect_name = "duckdb" -@compiles(sat.UInt64, "duckdb") -@compiles(sat.UInt32, "duckdb") -@compiles(sat.UInt16, "duckdb") @compiles(sat.UInt8, "duckdb") +def compile_uint8(element, compiler, **kw): + return "UTINYINT" + + +@compiles(sat.UInt16, "duckdb") +def compile_uint16(element, compiler, **kw): + return "USMALLINT" + + +@compiles(sat.UInt32, "duckdb") +def compile_uint32(element, compiler, **kw): + return "UINTEGER" + + +@compiles(sat.UInt64, "duckdb") def compile_uint(element, compiler, **kw): - return element.__class__.__name__.upper() + return "UBIGINT" @compiles(sat.ArrayType, "duckdb") diff --git a/out.sql b/out.sql index 2a20dc8..e7e7244 100644 --- a/out.sql +++ b/out.sql @@ -0,0 +1,3 @@ +SELECT + CAST(t0.a AS UTINYINT) AS "Cast(a, uint8)" +FROM t AS t0 \\ No newline at end of file diff --git a/test_datatypes.py b/test_datatypes.py index 70f437e..0d32d0d 100644 --- a/test_datatypes.py +++ b/test_datatypes.py @@ -1,4 +1,6 @@ import pytest +import sqlglot as sg +from packaging.version import parse as vparse from pytest import param import ibis.expr.datatypes as dt @@ -78,3 +80,18 @@ from ibis.backends.duckdb.datatypes import parse def test_parser(typ, expected): ty = parse(typ) assert ty == expected + + [email protected]("uint_type", ["uint8", "uint16", "uint32", "uint64"]) [email protected]( + vparse(sg.__version__) < vparse("11.3.4"), + raises=sg.ParseError, + reason="sqlglot version doesn't support duckdb unsigned integer types", +) +def test_cast_uints(uint_type, snapshot): + import ibis + + t = ibis.table(dict(a="int8"), name="t") + snapshot.assert_match( + str(ibis.to_sql(t.a.cast(uint_type), dialect="duckdb")), "out.sql" + ) diff --git a/poetry.lock b/poetry.lock index 3c56de9..5811879 100644 --- a/poetry.lock +++ b/poetry.lock @@ -5067,14 +5067,14 @@ sqlalchemy = ">=1.0.0" [[package]] name = "sqlglot" -version = "11.3.0" +version = "11.3.6" description = "An easily customizable SQL parser and transpiler" category = "main" optional = false python-versions = "*" files = [ - {file = "sqlglot-11.3.0-py3-none-any.whl", hash = "sha256:a95d22c4d5de61ba3bf96414f5c000525b4345e337d4b73c9736bfd421e354e7"}, - {file = "sqlglot-11.3.0.tar.gz", hash = "sha256:5bd317d8d08c77d7459a3043fe8c4fda942d64054461daa424e717e55642892e"}, + {file = "sqlglot-11.3.6-py3-none-any.whl", hash = "sha256:c16e8889faa09caa43943fa16c0735b8dddcf97f3700b9b1e681227375357aa8"}, + {file = "sqlglot-11.3.6.tar.gz", hash = "sha256:70dcaa528c9c99ef7fc328cfe0dbd7c1ae25843ea3cd5c61bd5c9fda57d1b467"}, ] [package.extras] diff --git a/requirements.txt b/requirements.txt index 7b94442..ed38d0c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -197,7 +197,7 @@ sortedcontainers==2.4.0 ; python_version >= "3.8" and python_version < "4.0" soupsieve==2.4 ; python_version >= "3.8" and python_version < "4" sqlalchemy-views==0.3.2 ; python_version >= "3.8" and python_version < "4.0" sqlalchemy==1.4.46 ; python_version >= "3.8" and python_version < "4.0" -sqlglot==11.3.0 ; python_version >= "3.8" and python_version < "4.0" +sqlglot==11.3.6 ; python_version >= "3.8" and python_version < "4.0" stack-data==0.6.2 ; python_version >= "3.8" and python_version < "4.0" termcolor==2.2.0 ; python_version >= "3.8" and python_version < "4.0" thrift-sasl==0.4.3 ; python_version >= "3.8" and python_version < "4.0"
|
|
fix(core): respect `undefined` when assigning to object properties
Closes #4428
|
217ff8f7321b6ed2551df5214f9f5934bb6d8896
|
fix
|
https://github.com/mikro-orm/mikro-orm/commit/217ff8f7321b6ed2551df5214f9f5934bb6d8896
|
respect `undefined` when assigning to object properties
Closes #4428
|
diff --git a/BetterSqliteConnection.ts b/BetterSqliteConnection.ts index 9a570b1..88d7ff6 100644 --- a/BetterSqliteConnection.ts +++ b/BetterSqliteConnection.ts @@ -32,7 +32,7 @@ export class BetterSqliteConnection extends AbstractSqlConnection { } protected getKnexOptions(type: string): Knex.Config { - return Utils.merge({ + return Utils.mergeConfig({ client: type, connection: { filename: this.config.get('dbName'), diff --git a/MikroORM.ts b/MikroORM.ts index 02697db..72990a5 100644 --- a/MikroORM.ts +++ b/MikroORM.ts @@ -35,7 +35,7 @@ export class MikroORM<D extends IDatabaseDriver = IDatabaseDriver> { } let opts = options instanceof Configuration ? options.getAll() : options; - opts = Utils.merge(opts, env); + opts = Utils.mergeConfig(opts, env); await ConfigurationLoader.commonJSCompat(opts as object); if ('DRIVER' in this && !opts.driver && !opts.type) { diff --git a/Entity.ts b/Entity.ts index a07369d..26ec49f 100644 --- a/Entity.ts +++ b/Entity.ts @@ -6,7 +6,7 @@ import type { FindOptions } from '../drivers/IDatabaseDriver'; export function Entity(options: EntityOptions<any> = {}) { return function <T>(target: T & Dictionary) { const meta = MetadataStorage.getMetadataFromDecorator(target); - Utils.merge(meta, options); + Utils.mergeConfig(meta, options); meta.class = target as unknown as Constructor<T>; if (!options.abstract || meta.discriminatorColumn) { diff --git a/JavaScriptMetadataProvider.ts b/JavaScriptMetadataProvider.ts index ceb2259..78e02f2 100644 --- a/JavaScriptMetadataProvider.ts +++ b/JavaScriptMetadataProvider.ts @@ -16,7 +16,7 @@ export class JavaScriptMetadataProvider extends MetadataProvider { } }); - Utils.merge(meta, schema); + Utils.mergeConfig(meta, schema); Object.entries(meta.properties).forEach(([name, prop]) => { this.initProperty(prop, name); }); @@ -26,7 +26,7 @@ export class JavaScriptMetadataProvider extends MetadataProvider { * Re-hydrates missing attributes like `onUpdate` (functions are lost when caching to JSON) */ loadFromCache(meta: EntityMetadata, cache: EntityMetadata): void { - Utils.merge(meta, cache); + Utils.mergeConfig(meta, cache); const schema = this.getSchema(meta); Object.entries(schema.properties).forEach(([name, prop]) => { diff --git a/MetadataProvider.ts b/MetadataProvider.ts index 9ac2ab5..3d18610 100644 --- a/MetadataProvider.ts +++ b/MetadataProvider.ts @@ -28,7 +28,7 @@ export abstract class MetadataProvider { } }); - Utils.merge(meta, cache); + Utils.mergeConfig(meta, cache); } useCache(): boolean { diff --git a/Configuration.ts b/Configuration.ts index 685c27b..7671d9f 100644 --- a/Configuration.ts +++ b/Configuration.ts @@ -154,7 +154,7 @@ export class Configuration<D extends IDatabaseDriver = IDatabaseDriver> { Utils.setDynamicImportProvider(options.dynamicImportProvider); } - this.options = Utils.merge({}, Configuration.DEFAULTS, options); + this.options = Utils.mergeConfig({}, Configuration.DEFAULTS, options); this.options.baseDir = Utils.absolutePath(this.options.baseDir); if (validate) { diff --git a/ConfigurationLoader.ts b/ConfigurationLoader.ts index f8ee4fc..50bce27 100644 --- a/ConfigurationLoader.ts +++ b/ConfigurationLoader.ts @@ -40,12 +40,12 @@ export class ConfigurationLoader { const esmConfigOptions = await this.isESM() ? { entityGenerator: { esmImport: true } } : {}; - return new Configuration(Utils.merge({}, esmConfigOptions, tmp, options, env), validate); + return new Configuration(Utils.mergeConfig({}, esmConfigOptions, tmp, options, env), validate); } } if (Utils.hasObjectKeys(env)) { - return new Configuration(Utils.merge({}, options, env), validate); + return new Configuration(Utils.mergeConfig({}, options, env), validate); } throw new Error(`MikroORM config file not found in ['${paths.join(`', '`)}']`); diff --git a/Utils.ts b/Utils.ts index 60abf8d..525a22f 100644 --- a/Utils.ts +++ b/Utils.ts @@ -311,6 +311,20 @@ export class Utils { * Merges all sources into the target recursively. */ static merge(target: any, ...sources: any[]): any { + return Utils._merge(target, sources, false); + } + + /** + * Merges all sources into the target recursively. Ignores `undefined` values. + */ + static mergeConfig(target: any, ...sources: any[]): any { + return Utils._merge(target, sources, true); + } + + /** + * Merges all sources into the target recursively. + */ + private static _merge(target: any, sources: any[], ignoreUndefined: boolean): any { if (!sources.length) { return target; } @@ -319,7 +333,7 @@ export class Utils { if (Utils.isObject(target) && Utils.isPlainObject(source)) { Object.entries(source).forEach(([key, value]) => { - if (typeof value === 'undefined') { + if (ignoreUndefined && typeof value === 'undefined') { return; } @@ -333,14 +347,14 @@ export class Utils { Object.assign(target, { [key]: {} }); } - Utils.merge(target[key], value); + Utils._merge(target[key], [value], ignoreUndefined); } else { Object.assign(target, { [key]: value }); } }); } - return Utils.merge(target, ...sources); + return Utils._merge(target, sources, ignoreUndefined); } static getRootEntity(metadata: IMetadataStorage, meta: EntityMetadata): EntityMetadata { diff --git a/AbstractSqlConnection.ts b/AbstractSqlConnection.ts index 68d0aa9..bd21d13 100644 --- a/AbstractSqlConnection.ts +++ b/AbstractSqlConnection.ts @@ -150,7 +150,7 @@ export abstract class AbstractSqlConnection extends Connection { } protected getKnexOptions(type: string): Knex.Config { - const config = Utils.merge({ + const config = Utils.mergeConfig({ client: type, connection: this.getConnectionOptions(), pool: this.config.get('pool'), diff --git a/MongoConnection.ts b/MongoConnection.ts index 4ec5022..378ecc4 100644 --- a/MongoConnection.ts +++ b/MongoConnection.ts @@ -123,7 +123,7 @@ export class MongoConnection extends Connection { ret.maxPoolSize = pool.max; } - return Utils.merge(ret, this.config.get('driverOptions')); + return Utils.mergeConfig(ret, this.config.get('driverOptions')); } getClientUrl(): string { diff --git a/SqliteConnection.ts b/SqliteConnection.ts index 5e3bac2..07b6ef9 100644 --- a/SqliteConnection.ts +++ b/SqliteConnection.ts @@ -31,7 +31,7 @@ export class SqliteConnection extends AbstractSqlConnection { } protected getKnexOptions(type: string): Knex.Config { - return Utils.merge({ + return Utils.mergeConfig({ client: type, connection: { filename: this.config.get('dbName'), diff --git a/Utils.test.ts b/Utils.test.ts index 2f68448..762884c 100644 --- a/Utils.test.ts +++ b/Utils.test.ts @@ -96,8 +96,9 @@ describe('Utils', () => { expect(Utils.merge({ a: 'a', b: ['c'] }, { b: [] })).toEqual({ a: 'a', b: [] }); expect(Utils.merge({ a: 'a', b: ['c'] }, { a: 'b' })).toEqual({ a: 'b', b: ['c'] }); expect(Utils.merge({ a: 'a', b: ['c'] }, { a: null })).toEqual({ a: null, b: ['c'] }); - expect(Utils.merge({ a: 'a', b: ['c'] }, { a: undefined })).toEqual({ a: 'a', b: ['c'] }); + expect(Utils.merge({ a: 'a', b: ['c'] }, { a: undefined })).toEqual({ a: undefined, b: ['c'] }); expect(Utils.merge('a', 'b')).toEqual('a'); + expect(Utils.mergeConfig({ a: 'a', b: ['c'] }, { a: undefined })).toEqual({ a: 'a', b: ['c'] }); // GH #4101 const source = { diff --git a/GH4428.test.ts b/GH4428.test.ts index cc1cdea..a33cac3 100644 --- a/GH4428.test.ts +++ b/GH4428.test.ts @@ -0,0 +1,244 @@ +import { Entity, JsonType, PrimaryKey, Property, Utils, wrap } from '@mikro-orm/core'; +import { MikroORM } from '@mikro-orm/better-sqlite'; + +type UnitOfMeasure = 'pcs' | 'gram'; + +interface Ingredient { + name: string; + quantity: { + units: number; + uom: UnitOfMeasure; + }; +} + +enum CookingDevice { + OVEN = 'Oven', + MICRO = 'Microwave' +} + +type CookingInstructions = { + [device in CookingDevice]?: { + degrees: number; + time: number; + } +}; + +interface Instructions { + ingredients: Ingredient[]; + cooking: CookingInstructions; + notes?: string; +} + +@Entity() +class Recipe { + + @PrimaryKey() + id!: number; + + @Property() + name!: string; + + @Property({ type: JsonType }) + instructions!: Instructions; + +} + +let orm: MikroORM; + +beforeAll(async () => { + orm = await MikroORM.init({ + entities: [Recipe], + dbName: ':memory:', + }); + await orm.schema.createSchema(); +}); + +afterAll(async () => { + await orm.close(true); +}); + +test(`GH 4428: issue updating nested props`, async () => { + const e = orm.em.create(Recipe, { + id: 1, + name: 'Pizza', + instructions: { + ingredients: [ + { + name: 'Tomato', + quantity: { + units: 1, + uom: 'pcs', + }, + }, + { + name: 'Salami', + quantity: { + units: 2, + uom: 'pcs', + }, + }, + { + name: 'Cheese', + quantity: { + units: 1, + uom: 'pcs', + }, + }, + ], + cooking: { + Oven : { + degrees: 200, + time: 12, + }, + Microwave: { + degrees: 180, + time: 15, + }, + }, + notes: 'do not cook it too long', + }, + }); + await orm.em.persistAndFlush(e); + + const e1 = await orm.em.findOneOrFail(Recipe, 1); + const updatedRecipe: Recipe = { + id: 1, + name: 'Pizza', + instructions: { + ingredients: [ + { + name: 'Tomato', + quantity: { + units: 1, + uom: 'pcs', + }, + }, + { + name: 'Salami', + quantity: { + units: 2, + uom: 'pcs', + }, + }, + { + name: 'Cheese', + quantity: { + units: 100, + uom: 'gram', + }, + }, + ], + cooking: { + Oven : { + degrees: 200, + time: 12, + }, + Microwave: undefined, + }, + // test only succeeds when providing null, or omitting the next prop + notes: undefined, + }, + }; + wrap(e1).assign(updatedRecipe); + + await orm.em.flush(); + + const reloadedRecipe = await orm.em.fork().findOneOrFail(Recipe, 1); + const finalRecipe = wrap(reloadedRecipe).toObject(); + + Utils.dropUndefinedProperties(updatedRecipe); + expect(finalRecipe).toMatchObject(updatedRecipe); +}); + +test(`GH 4428: issue updating nested props directly`, async () => { + const e = orm.em.create(Recipe, { + id: 1, + name: 'Pizza', + instructions: { + ingredients: [ + { + name: 'Tomato', + quantity: { + units: 1, + uom: 'pcs', + }, + }, + { + name: 'Salami', + quantity: { + units: 2, + uom: 'pcs', + }, + }, + { + name: 'Cheese', + quantity: { + units: 1, + uom: 'pcs', + }, + }, + ], + cooking: { + Oven : { + degrees: 200, + time: 12, + }, + Microwave: { + degrees: 180, + time: 15, + }, + }, + notes: 'do not cook it too long', + }, + }); + await orm.em.persistAndFlush(e); + + const e1 = await orm.em.findOneOrFail(Recipe, 1); + const updatedRecipe: Recipe = { + id: 1, + name: 'Pizza', + instructions: { + ingredients: [ + { + name: 'Tomato', + quantity: { + units: 1, + uom: 'pcs', + }, + }, + { + name: 'Salami', + quantity: { + units: 2, + uom: 'pcs', + }, + }, + { + name: 'Cheese', + quantity: { + units: 100, + uom: 'gram', + }, + }, + ], + cooking: { + Oven : { + degrees: 200, + time: 12, + }, + }, + }, + }; + e1.instructions.ingredients[2].quantity = { + units: 100, + uom: 'gram', + }; + e1.instructions.notes = undefined; + + await orm.em.flush(); + + const reloadedRecipe = await orm.em.fork().findOneOrFail(Recipe, 1); + const finalRecipe = wrap(reloadedRecipe).toObject(); + + expect(finalRecipe).toMatchObject(updatedRecipe); +});
|
|
fix(snowflake): snowflake now likes Tuesdays
|
1bf9d7cc530a22e466dd5e29afa7a32a42824d00
|
fix
|
https://github.com/ibis-project/ibis/commit/1bf9d7cc530a22e466dd5e29afa7a32a42824d00
|
snowflake now likes Tuesdays
|
diff --git a/registry.py b/registry.py index cbb732d..eee5c86 100644 --- a/registry.py +++ b/registry.py @@ -47,7 +47,7 @@ def _day_of_week_name(t, op): whens=[ ("Sun", "Sunday"), ("Mon", "Monday"), - ("Tues", "Tuesday"), + ("Tue", "Tuesday"), ("Wed", "Wednesday"), ("Thu", "Thursday"), ("Fri", "Friday"), diff --git a/test_temporal.py b/test_temporal.py index 9ac6427..a915019 100644 --- a/test_temporal.py +++ b/test_temporal.py @@ -710,17 +710,7 @@ def test_string_to_timestamp_tz_error(alltypes): [ param('2017-01-01', 6, 'Sunday', id="sunday"), param('2017-01-02', 0, 'Monday', id="monday"), - param( - '2017-01-03', - 1, - 'Tuesday', - id="tuesday", - marks=[ - pytest.mark.notyet( - ["snowflake"], reason="snowflake is not a fan of Tuesdays" - ) - ], - ), + param('2017-01-03', 1, 'Tuesday', id="tuesday"), param('2017-01-04', 2, 'Wednesday', id="wednesday"), param('2017-01-05', 3, 'Thursday', id="thursday"), param('2017-01-06', 4, 'Friday', id="friday"),
|
|
chore(canvasMask): renamed image mask plugin to canvas mask
|
505a9e721457ec4a675562c6c5465a22cb1b1d71
|
chore
|
https://github.com/tsparticles/tsparticles/commit/505a9e721457ec4a675562c6c5465a22cb1b1d71
|
renamed image mask plugin to canvas mask
|
diff --git a/.eslintignore b/.eslintignore index 47b3979..49dcf1e 100644 --- a/.eslintignore +++ b/.eslintignore diff --git a/.eslintrc.js b/.eslintrc.js index 6dd3092..84886e7 100644 --- a/.eslintrc.js +++ b/.eslintrc.js diff --git a/.npmignore b/.npmignore index 06c6b11..5834cdc 100644 --- a/.npmignore +++ b/.npmignore diff --git a/LICENSE b/LICENSE index e34672b..faf0053 100644 --- a/LICENSE +++ b/LICENSE diff --git a/README.md b/README.md index 890df68..19d03d8 100644 --- a/README.md +++ b/README.md @@ -1,67 +0,0 @@ -[](https://particles.js.org) - -# tsParticles Image Mask Plugin - -[](https://www.jsdelivr.com/package/npm/tsparticles-plugin-image-mask) -[](https://www.npmjs.com/package/tsparticles-plugin-image-mask) -[](https://www.npmjs.com/package/tsparticles-plugin-image-mask) [](https://github.com/sponsors/matteobruni) - -[tsParticles](https://github.com/matteobruni/tsparticles) plugin for particles image mask effect. - -## How to use it - -### CDN / Vanilla JS / jQuery - -The CDN/Vanilla version JS has one required file in vanilla configuration: - -Including the `tsparticles.plugin.image-mask.min.js` file will export the function to load the plugin: - -```javascript -loadImageMaskPlugin; -``` - -### Usage - -Once the scripts are loaded you can set up `tsParticles` and the plugin like this: - -```javascript -(async () => { - await loadImageMaskPlugin(tsParticles); - - await tsParticles.load("tsparticles", { - /* options */ - }); -})(); -``` - -### ESM / CommonJS - -This package is compatible also with ES or CommonJS modules, firstly this needs to be installed, like this: - -```shell -$ npm install tsparticles-plugin-image-mask -``` - -or - -```shell -$ yarn add tsparticles-plugin-image-mask -``` - -Then you need to import it in the app, like this: - -```javascript -const { tsParticles } = require("tsparticles-engine"); -const { loadImageMaskPlugin } = require("tsparticles-plugin-image-mask"); - -loadImageMaskPlugin(tsParticles); // awaitable -``` - -or - -```javascript -import { tsParticles } from "tsparticles-engine"; -import { loadImageMaskPlugin } from "tsparticles-plugin-image-mask"; - -loadImageMaskPlugin(tsParticles); // awaitable -``` diff --git a/package.dist.json b/package.dist.json index 3bd2747..655b0b3 100644 --- a/package.dist.json +++ b/package.dist.json @@ -1,12 +1,12 @@ { - "name": "tsparticles-plugin-image-mask", + "name": "tsparticles-plugin-canvas-mask", "version": "2.2.4", - "description": "tsParticles image mask plugin", + "description": "tsParticles canvas mask plugin", "homepage": "https://particles.js.org", "repository": { "type": "git", "url": "git+https://github.com/matteobruni/tsparticles.git", - "directory": "plugins/imageMask" + "directory": "plugins/canvasMask" }, "keywords": [ "front-end", @@ -71,11 +71,11 @@ "url": "https://github.com/matteobruni/tsparticles/issues" }, "main": "cjs/index.js", - "jsdelivr": "tsparticles.plugin.image-mask.min.js", - "unpkg": "tsparticles.plugin.image-mask.min.js", + "jsdelivr": "tsparticles.plugin.canvas-mask.min.js", + "unpkg": "tsparticles.plugin.canvas-mask.min.js", "module": "esm/index.js", "types": "types/index.d.ts", "dependencies": { "tsparticles-engine": "^2.2.4" } -} \\ No newline at end of file +} diff --git a/package.json b/package.json index 8a58c0f..3741cdb 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { - "name": "tsparticles-plugin-image-mask", + "name": "tsparticles-plugin-canvas-mask", "version": "2.2.4", - "description": "tsParticles image mask plugin", + "description": "tsParticles canvas mask plugin", "homepage": "https://particles.js.org", "scripts": { "build": "tsparticles-build", @@ -13,7 +13,7 @@ "repository": { "type": "git", "url": "git+https://github.com/matteobruni/tsparticles.git", - "directory": "plugins/imageMask" + "directory": "plugins/canvasMask" }, "keywords": [ "front-end", @@ -82,7 +82,7 @@ "types": "dist/types/index.d.ts", "typedoc": { "entryPoint": "./src/index.ts", - "displayName": "tsParticles image mask plugin", + "displayName": "tsParticles canvas mask plugin", "readmeFile": "./README.md" }, "prettier": "@tsparticles/prettier-config", diff --git a/CanvasMaskInstance.ts b/CanvasMaskInstance.ts index ba349d2..47b8979 100644 --- a/CanvasMaskInstance.ts +++ b/CanvasMaskInstance.ts @@ -1,9 +1,9 @@ import type { Container, Engine, IContainerPlugin, RecursivePartial } from "tsparticles-engine"; import { addParticlesFromCanvasPixels, getImageData } from "./utils"; -import { ImageMask } from "./Options/Classes/ImageMask"; -import type { ImageMaskOptions } from "./types"; +import { CanvasMask } from "./Options/Classes/CanvasMask"; +import type { CanvasMaskOptions } from "./types"; -export class ImageMaskInstance implements IContainerPlugin { +export class CanvasMaskInstance implements IContainerPlugin { readonly options; private readonly _container; @@ -12,10 +12,10 @@ export class ImageMaskInstance implements IContainerPlugin { constructor(container: Container, engine: Engine) { this._container = container; this._engine = engine; - this.options = new ImageMask(); + this.options = new CanvasMask(); } - async initAsync(data?: RecursivePartial<ImageMaskOptions>): Promise<void> { + async initAsync(data?: RecursivePartial<CanvasMaskOptions>): Promise<void> { const options = this.options, container = this._container; diff --git a/CanvasMask.ts b/CanvasMask.ts index 0717a4f..d5dc64d 100644 --- a/CanvasMask.ts +++ b/CanvasMask.ts @@ -1,13 +1,13 @@ import type { IOptionLoader, RecursivePartial } from "tsparticles-engine"; -import type { IImageMask } from "../Interfaces/IImageMask"; -import { ImageMaskOverride } from "./ImageMaskOverride"; -import { ImageMaskPixels } from "./ImageMaskPixels"; +import type { ICanvasMask } from "../Interfaces/ICanvasMask"; +import { CanvasMaskOverride } from "./CanvasMaskOverride"; +import { CanvasMaskPixels } from "./CanvasMaskPixels"; /** - * [[include:Options/Plugins/ImageMask.md]] - * @category Image Mask Plugin + * [[include:Options/Plugins/CanvasMask.md]] + * @category Canvas Mask Plugin */ -export class ImageMask implements IImageMask, IOptionLoader<IImageMask> { +export class CanvasMask implements ICanvasMask, IOptionLoader<ICanvasMask> { enable; override; pixels; @@ -15,13 +15,13 @@ export class ImageMask implements IImageMask, IOptionLoader<IImageMask> { src?: string; constructor() { - this.pixels = new ImageMaskPixels(); - this.override = new ImageMaskOverride(); + this.pixels = new CanvasMaskPixels(); + this.override = new CanvasMaskOverride(); this.scale = 1; this.enable = false; } - load(data?: RecursivePartial<IImageMask>): void { + load(data?: RecursivePartial<ICanvasMask>): void { if (!data) { return; } diff --git a/CanvasMaskOverride.ts b/CanvasMaskOverride.ts index 7408d09..0c3001d 100644 --- a/CanvasMaskOverride.ts +++ b/CanvasMaskOverride.ts @@ -1,7 +1,7 @@ import type { IOptionLoader, RecursivePartial } from "tsparticles-engine"; -import type { IImageMaskOverride } from "../Interfaces/IImageMaskOverride"; +import type { ICanvasMaskOverride } from "../Interfaces/ICanvasMaskOverride"; -export class ImageMaskOverride implements IImageMaskOverride, IOptionLoader<IImageMaskOverride> { +export class CanvasMaskOverride implements ICanvasMaskOverride, IOptionLoader<ICanvasMaskOverride> { color: boolean; opacity: boolean; @@ -10,7 +10,7 @@ export class ImageMaskOverride implements IImageMaskOverride, IOptionLoader<IIma this.opacity = false; } - load(data?: RecursivePartial<IImageMaskOverride>): void { + load(data?: RecursivePartial<ICanvasMaskOverride>): void { if (!data) { return; } diff --git a/CanvasMaskPixels.ts b/CanvasMaskPixels.ts index eda160c..e010115 100644 --- a/CanvasMaskPixels.ts +++ b/CanvasMaskPixels.ts @@ -1,7 +1,7 @@ import type { IOptionLoader, IRgba, RecursivePartial } from "tsparticles-engine"; -import type { IImageMaskPixels } from "../Interfaces/IImageMaskPixels"; +import type { ICanvasMaskPixels } from "../Interfaces/ICanvasMaskPixels"; -export class ImageMaskPixels implements IImageMaskPixels, IOptionLoader<IImageMaskPixels> { +export class CanvasMaskPixels implements ICanvasMaskPixels, IOptionLoader<ICanvasMaskPixels> { filter: (pixel: IRgba) => boolean; offset: number; @@ -10,7 +10,7 @@ export class ImageMaskPixels implements IImageMaskPixels, IOptionLoader<IImageMa this.offset = 4; } - load(data?: RecursivePartial<IImageMaskPixels> | undefined): void { + load(data?: RecursivePartial<ICanvasMaskPixels> | undefined): void { if (!data) { return; } diff --git a/ICanvasMask.ts b/ICanvasMask.ts index 5e2d22e..f32da22 100644 --- a/ICanvasMask.ts +++ b/ICanvasMask.ts @@ -0,0 +1,13 @@ +import type { ICanvasMaskOverride } from "./ICanvasMaskOverride"; +import type { ICanvasMaskPixels } from "./ICanvasMaskPixels"; + +/** + * @category Canvas Mask Plugin + */ +export interface ICanvasMask { + enable: boolean; + override: ICanvasMaskOverride; + pixels: ICanvasMaskPixels; + scale: number; + src?: string; +} diff --git a/ICanvasMaskOptions.ts b/ICanvasMaskOptions.ts index e1a2681..894fce3 100644 --- a/ICanvasMaskOptions.ts +++ b/ICanvasMaskOptions.ts @@ -0,0 +1,8 @@ +import type { ICanvasMask } from "./ICanvasMask"; + +/** + * @category Canvas Mask Plugin + */ +export interface ICanvasMaskOptions { + image: ICanvasMask; +} diff --git a/ICanvasMaskOverride.ts b/ICanvasMaskOverride.ts index 7752482..3d58b96 100644 --- a/ICanvasMaskOverride.ts +++ b/ICanvasMaskOverride.ts @@ -0,0 +1,7 @@ +/** + * @category Canvas Mask Plugin + */ +export interface ICanvasMaskOverride { + color: boolean; + opacity: boolean; +} diff --git a/ICanvasMaskPixels.ts b/ICanvasMaskPixels.ts index 75c1f49..f5f9960 100644 --- a/ICanvasMaskPixels.ts +++ b/ICanvasMaskPixels.ts @@ -1,9 +1,9 @@ import type { IRgba } from "tsparticles-engine"; /** - * @category Image Mask Plugin + * @category Canvas Mask Plugin */ -export interface IImageMaskPixels { +export interface ICanvasMaskPixels { filter: string | ((pixel: IRgba) => boolean); offset: number; } diff --git a/index.ts b/index.ts index 1b885a8..283b1fa 100644 --- a/index.ts +++ b/index.ts @@ -1,48 +0,0 @@ -import type { Container, Engine, IPlugin, Options, RecursivePartial } from "tsparticles-engine"; -import { ImageMask } from "./Options/Classes/ImageMask"; -import { ImageMaskInstance } from "./ImageMaskInstance"; -import type { ImageMaskOptions } from "./types"; - -/** - * @category Image Mask Plugin - */ -class ImageMaskPlugin implements IPlugin { - readonly id; - - private readonly _engine; - - constructor(engine: Engine) { - this.id = "imageMask"; - - this._engine = engine; - } - - getPlugin(container: Container): ImageMaskInstance { - return new ImageMaskInstance(container, this._engine); - } - - loadOptions(options: Options, source?: RecursivePartial<ImageMaskOptions>): void { - if (!this.needsPlugin(source)) { - return; - } - - const optionsCast = options as unknown as ImageMaskOptions; - let imageOptions = optionsCast.image as ImageMask; - - if (imageOptions?.load === undefined) { - optionsCast.image = imageOptions = new ImageMask(); - } - - imageOptions.load(source?.image); - } - - needsPlugin(options?: RecursivePartial<ImageMaskOptions>): boolean { - return options?.image?.enable ?? false; - } -} - -export async function loadImageMaskPlugin(engine: Engine): Promise<void> { - const plugin = new ImageMaskPlugin(engine); - - await engine.addPlugin(plugin); -} diff --git a/types.ts b/types.ts index 114c44a..149feac 100644 --- a/types.ts +++ b/types.ts @@ -1,4 +0,0 @@ -import type { IImageMaskOptions } from "./Options/Interfaces/IImageMaskOptions"; -import type { IOptions } from "tsparticles-engine"; - -export type ImageMaskOptions = IOptions & IImageMaskOptions; diff --git a/utils.ts b/utils.ts index 91e64cc..09c0cf1 100644 --- a/utils.ts +++ b/utils.ts @@ -1,5 +1,5 @@ import type { Container, IDimension, IParticlesOptions, IRgba, RecursivePartial } from "tsparticles-engine"; -import type { IImageMaskOverride } from "./Options/Interfaces/IImageMaskOverride"; +import type { ICanvasMaskOverride } from "./Options/Interfaces/ICanvasMaskOverride"; export type CanvasPixelData = { height: number; @@ -21,7 +21,7 @@ export function addParticlesFromCanvasPixels( container: Container, data: CanvasPixelData, scale: number, - override: IImageMaskOverride, + override: ICanvasMaskOverride, filter: (pixel: IRgba) => boolean ): void { const height = data.height, diff --git a/tsconfig.base.json b/tsconfig.base.json index 7e1e3e3..094fb97 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json diff --git a/tsconfig.browser.json b/tsconfig.browser.json index d5e5ac4..19ce755 100644 --- a/tsconfig.browser.json +++ b/tsconfig.browser.json diff --git a/tsconfig.json b/tsconfig.json index 9873fbd..3983af1 100644 --- a/tsconfig.json +++ b/tsconfig.json diff --git a/tsconfig.module.json b/tsconfig.module.json index 7741c7e..c5e5321 100644 --- a/tsconfig.module.json +++ b/tsconfig.module.json diff --git a/tsconfig.types.json b/tsconfig.types.json index f86aaa8..9b2f257 100644 --- a/tsconfig.types.json +++ b/tsconfig.types.json diff --git a/tsconfig.umd.json b/tsconfig.umd.json index 5852946..4f93a82 100644 --- a/tsconfig.umd.json +++ b/tsconfig.umd.json diff --git a/typedoc.json b/typedoc.json index e338ba3..f4851ea 100644 --- a/typedoc.json +++ b/typedoc.json diff --git a/webpack.config.js b/webpack.config.js index fc34bac..ebc4711 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -1,4 +1,4 @@ const {loadParticlesPlugin} = require("webpack-tsparticles-plugin"); const version = require("./package.json").version; -module.exports = loadParticlesPlugin("image-mask", "Image Mask", version, __dirname); +module.exports = loadParticlesPlugin("canvas-mask", "Canvas Mask", version, __dirname); diff --git a/IImageMask.ts b/IImageMask.ts index ff4ddd9..76f8864 100644 --- a/IImageMask.ts +++ b/IImageMask.ts @@ -1,13 +0,0 @@ -import type { IImageMaskOverride } from "./IImageMaskOverride"; -import type { IImageMaskPixels } from "./IImageMaskPixels"; - -/** - * @category Image Mask Plugin - */ -export interface IImageMask { - enable: boolean; - override: IImageMaskOverride; - pixels: IImageMaskPixels; - scale: number; - src?: string; -} diff --git a/IImageMaskOptions.ts b/IImageMaskOptions.ts index 641d657..dcdcf94 100644 --- a/IImageMaskOptions.ts +++ b/IImageMaskOptions.ts @@ -1,8 +0,0 @@ -import type { IImageMask } from "./IImageMask"; - -/** - * @category Image Mask Plugin - */ -export interface IImageMaskOptions { - image: IImageMask; -} diff --git a/IImageMaskOverride.ts b/IImageMaskOverride.ts index e35a97a..1db85c4 100644 --- a/IImageMaskOverride.ts +++ b/IImageMaskOverride.ts @@ -1,7 +0,0 @@ -/** - * @category Image Mask Plugin - */ -export interface IImageMaskOverride { - color: boolean; - opacity: boolean; -}
|
|
refactor(sqlalchemy): centralize reduction compilation
|
505352b03e3d28fcc602e659c3a7aca30423b6ad
|
refactor
|
https://github.com/rohankumardubey/ibis/commit/505352b03e3d28fcc602e659c3a7aca30423b6ad
|
centralize reduction compilation
|
diff --git a/translator.py b/translator.py index e295404..3e084c8 100644 --- a/translator.py +++ b/translator.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import ibis +import ibis.expr.datatypes as dt import ibis.expr.operations as ops import ibis.expr.types as ir from ibis import util @@ -40,12 +43,39 @@ class AlchemyExprTranslator(ExprTranslator): context_class = AlchemyContext + _bool_aggs_need_cast_to_int32 = True + _boolean_cast_ops = ops.Sum, ops.Mean, ops.Min, ops.Max + _has_filter_syntax = False + def name(self, translated, name, force=True): return translated.label(name) def get_sqla_type(self, data_type): return to_sqla_type(data_type, type_map=self._type_map) + def _reduction(self, sa_func, expr): + op = expr.op() + arg = op.arg + if ( + self._bool_aggs_need_cast_to_int32 + and isinstance(op, self._boolean_cast_ops) + and isinstance( + type := arg.type(), + dt.Boolean, + ) + ): + arg = arg.cast(dt.Int32(nullable=type.nullable)) + + if (where := op.where) is not None: + if self._has_filter_syntax: + sa_arg = self.translate(arg).filter(self.translate(where)) + else: + sa_arg = self.translate(where.ifelse(arg, None)) + else: + sa_arg = self.translate(arg) + + return sa_func(sa_arg) + rewrites = AlchemyExprTranslator.rewrites diff --git a/compiler.py b/compiler.py index 824379b..124be9d 100644 --- a/compiler.py +++ b/compiler.py @@ -27,6 +27,7 @@ class PostgreSQLExprTranslator(AlchemyExprTranslator): dt.Float64: postgresql.DOUBLE_PRECISION, } ) + _has_filter_syntax = True rewrites = PostgreSQLExprTranslator.rewrites
|
|
fix(core): lock entities in `flush()` to get around race conditions with `Promise.all`
This should fix a flaky test for #2934. It should also help with random
`Transaction query already completed` issues.
Related: #3383
|
b62799a2ee4e0b1dc57207c4fe2700a70e3eb0dc
|
fix
|
https://github.com/mikro-orm/mikro-orm/commit/b62799a2ee4e0b1dc57207c4fe2700a70e3eb0dc
|
lock entities in `flush()` to get around race conditions with `Promise.all`
This should fix a flaky test for #2934. It should also help with random
`Transaction query already completed` issues.
Related: #3383
|
diff --git a/EntityManager.ts b/EntityManager.ts index 187606f..a4a1422 100644 --- a/EntityManager.ts +++ b/EntityManager.ts @@ -454,7 +454,7 @@ export class EntityManager<D extends IDatabaseDriver = IDatabaseDriver> { return fork.getConnection().transactional(async trx => { fork.transactionContext = trx; fork.eventManager.registerSubscriber({ - afterFlush: async (args: FlushEventArgs) => { + afterFlush(args: FlushEventArgs) { args.uow.getChangeSets() .filter(cs => [ChangeSetType.DELETE, ChangeSetType.DELETE_EARLY].includes(cs.type)) .forEach(cs => em.unitOfWork.unsetIdentity(cs.entity)); diff --git a/EntityFactory.ts b/EntityFactory.ts index 9fe18dd..c6e739a 100644 --- a/EntityFactory.ts +++ b/EntityFactory.ts @@ -52,14 +52,17 @@ export class EntityFactory { const exists = this.findEntity<T>(data, meta2, options); if (exists && exists.__helper!.__initialized && !options.refresh) { + exists.__helper!.__processing = true; exists.__helper!.__initialized = options.initialized; this.mergeData(meta2, exists, data, options); + exists.__helper!.__processing = false; return exists as New<T, P>; } data = { ...data }; const entity = exists ?? this.createEntity<T>(data, meta2, options); + entity.__helper!.__processing = true; entity.__helper!.__initialized = options.initialized; this.hydrate(entity, meta2, data, options); entity.__helper!.__touched = false; @@ -80,6 +83,8 @@ export class EntityFactory { this.eventManager.dispatchEvent(EventType.onInit, { entity, em: this.em }); } + entity.__helper!.__processing = false; + return entity as New<T, P>; } @@ -204,6 +209,7 @@ export class EntityFactory { // creates new entity instance, bypassing constructor call as its already persisted entity const entity = Object.create(meta.class.prototype) as T; entity.__helper!.__managed = true; + entity.__helper!.__processing = !meta.embeddable && !meta.virtual; entity.__helper!.__schema = this.driver.getSchemaName(meta, options); if (options.merge && !options.newEntity) { diff --git a/WrappedEntity.ts b/WrappedEntity.ts index a91931d..f6c85bb 100644 --- a/WrappedEntity.ts +++ b/WrappedEntity.ts @@ -25,6 +25,7 @@ export class WrappedEntity<T extends AnyEntity<T>, PK extends keyof T> { __serializationContext: { root?: SerializationContext<T>; populate?: PopulateOptions<T>[] } = {}; __loadedProperties = new Set<string>(); __data: Dictionary = {}; + __processing = false; /** holds last entity data snapshot so we can compute changes when persisting managed entities */ __originalEntityData?: EntityData<T>; diff --git a/EventSubscriber.ts b/EventSubscriber.ts index ff833ee..311ca5a 100644 --- a/EventSubscriber.ts +++ b/EventSubscriber.ts @@ -21,21 +21,21 @@ export interface TransactionEventArgs extends Omit<EventArgs<unknown>, 'entity' export interface EventSubscriber<T = any> { getSubscribedEntities?(): EntityName<T>[]; onInit?(args: EventArgs<T>): void; - onLoad?(args: EventArgs<T>): Promise<void>; - beforeCreate?(args: EventArgs<T>): Promise<void>; - afterCreate?(args: EventArgs<T>): Promise<void>; - beforeUpdate?(args: EventArgs<T>): Promise<void>; - afterUpdate?(args: EventArgs<T>): Promise<void>; - beforeDelete?(args: EventArgs<T>): Promise<void>; - afterDelete?(args: EventArgs<T>): Promise<void>; - beforeFlush?(args: FlushEventArgs): Promise<void>; - onFlush?(args: FlushEventArgs): Promise<void>; - afterFlush?(args: FlushEventArgs): Promise<void>; + onLoad?(args: EventArgs<T>): void | Promise<void>; + beforeCreate?(args: EventArgs<T>): void | Promise<void>; + afterCreate?(args: EventArgs<T>): void | Promise<void>; + beforeUpdate?(args: EventArgs<T>): void | Promise<void>; + afterUpdate?(args: EventArgs<T>): void | Promise<void>; + beforeDelete?(args: EventArgs<T>): void | Promise<void>; + afterDelete?(args: EventArgs<T>): void | Promise<void>; + beforeFlush?(args: FlushEventArgs): void | Promise<void>; + onFlush?(args: FlushEventArgs): void | Promise<void>; + afterFlush?(args: FlushEventArgs): void | Promise<void>; - beforeTransactionStart?(args: TransactionEventArgs): Promise<void>; - afterTransactionStart?(args: TransactionEventArgs): Promise<void>; - beforeTransactionCommit?(args: TransactionEventArgs): Promise<void>; - afterTransactionCommit?(args: TransactionEventArgs): Promise<void>; - beforeTransactionRollback?(args: TransactionEventArgs): Promise<void>; - afterTransactionRollback?(args: TransactionEventArgs): Promise<void>; + beforeTransactionStart?(args: TransactionEventArgs): void | Promise<void>; + afterTransactionStart?(args: TransactionEventArgs): void | Promise<void>; + beforeTransactionCommit?(args: TransactionEventArgs): void | Promise<void>; + afterTransactionCommit?(args: TransactionEventArgs): void | Promise<void>; + beforeTransactionRollback?(args: TransactionEventArgs): void | Promise<void>; + afterTransactionRollback?(args: TransactionEventArgs): void | Promise<void>; } diff --git a/typings.ts b/typings.ts index 8a66f23..6c69c85 100644 --- a/typings.ts +++ b/typings.ts @@ -126,6 +126,7 @@ export interface IWrappedEntityInternal<T, PK extends keyof T | unknown = Primar __loadedProperties: Set<string>; __identifier?: EntityIdentifier; __managed: boolean; + __processing: boolean; __schema?: string; __populated: boolean; __onLoadFired: boolean; diff --git a/UnitOfWork.ts b/UnitOfWork.ts index bd20841..301cf0e 100644 --- a/UnitOfWork.ts +++ b/UnitOfWork.ts @@ -314,6 +314,9 @@ export class UnitOfWork { try { await this.eventManager.dispatchEvent(EventType.beforeFlush, { em: this.em, uow: this }); this.computeChangeSets(); + this.changeSets.forEach(cs => { + cs.entity.__helper.__processing = true; + }); await this.eventManager.dispatchEvent(EventType.onFlush, { em: this.em, uow: this }); // nothing to do, do not start transaction @@ -335,8 +338,9 @@ export class UnitOfWork { } this.resetTransaction(oldTx); - // To allow working with the UoW in after flush handlers we need to unset the `working` flag early. - this.working = false; + this.changeSets.forEach(cs => { + cs.entity.__helper.__processing = false; + }); // To allow flushing via `Promise.all()` while still supporting queries inside after flush handler, // we need to run the flush hooks in a separate async context, as we need to skip flush hooks if they @@ -417,7 +421,9 @@ export class UnitOfWork { } for (const entity of this.orphanRemoveStack) { - this.removeStack.add(entity); + if (!entity.__helper!.__processing) { + this.removeStack.add(entity); + } } // Check insert stack if there are any entities matching something from delete stack. This can happen when recreating entities. @@ -430,6 +436,10 @@ export class UnitOfWork { } for (const entity of this.removeStack) { + if (entity.__helper!.__processing) { + continue; + } + const deletePkHash = [entity.__helper!.getSerializedPrimaryKey(), ...this.expandUniqueProps(entity)]; let type = ChangeSetType.DELETE; @@ -478,7 +488,7 @@ export class UnitOfWork { visited.add(entity); const wrapped = entity.__helper!; - if (!wrapped.__initialized || this.removeStack.has(entity) || this.orphanRemoveStack.has(entity)) { + if (!wrapped.__initialized || entity.__helper!.__processing || this.removeStack.has(entity) || this.orphanRemoveStack.has(entity)) { return; } @@ -625,6 +635,7 @@ export class UnitOfWork { } private postCommitCleanup(): void { + this.changeSets.forEach(cs => cs.entity.__helper!.__processing = false); this.persistStack.clear(); this.removeStack.clear(); this.orphanRemoveStack.clear(); diff --git a/EntityManager.postgre.test.ts b/EntityManager.postgre.test.ts index 9e54528..44bf924 100644 --- a/EntityManager.postgre.test.ts +++ b/EntityManager.postgre.test.ts @@ -2072,12 +2072,13 @@ describe('EntityManagerPostgre', () => { }); test('GH #2934', async () => { - const users = [ - { name: 'A', email: 'A' }, - { name: 'B', email: 'B' }, - { name: 'C', email: 'C' }, - { name: 'D', email: 'D' }, - ]; + // This test used to be flaky in CI where it runs with fewer resources. To mimic this behaviour, we can run it with + // larger payload and many times in a row via turning `heavy` to `true`. + const heavy = false; // heavy mode takes around 10 minutes to complete (half a million entities, each doing select + insert) + const length = heavy ? 50 : 4; + const runs = heavy ? 10000 : 3; + + const users = Array.from({ length }).map((_, i) => ({ name: `name ${i}`, email: `email ${i}` })); async function saveUser(options: FilterQuery<Author2>): Promise<Author2> { let user = await orm.em.findOne(Author2, options); @@ -2092,8 +2093,12 @@ describe('EntityManagerPostgre', () => { return user; } - const res = await Promise.all(users.map(userData => saveUser(userData))); - res.forEach(user => expect(user.id).toBeDefined()); + for (let i = 0; i < runs; i++) { + await orm.em.nativeDelete(Author2, {}); + orm.em.clear(); + const res = await Promise.all(users.map(userData => saveUser(userData))); + res.forEach(user => expect(user.id).toBeDefined()); + } }); test('required fields validation', async () => {
|
|
docs(schema): improve cube and view documentation
|
ac564278363320df8d63dd896674ac1e49225bf7
|
docs
|
https://github.com/wzhiqing/cube/commit/ac564278363320df8d63dd896674ac1e49225bf7
|
improve cube and view documentation
|
diff --git a/controlling-access-to-cubes-and-views.md b/controlling-access-to-cubes-and-views.md index b5f9071..959b7d5 100644 --- a/controlling-access-to-cubes-and-views.md +++ b/controlling-access-to-cubes-and-views.md @@ -22,9 +22,7 @@ module.exports = { extendContext: (req) => { const { department } = jwtDecode(req.headers['authorization']); return { - permissions: { - finance: department === 'finance', - }, + isFinance: department === 'finance', }; }, }; @@ -51,13 +49,13 @@ cube(`Users`, { // TotalRevenuePerCustomer.js view('TotalRevenuePerCustomer', { - description: `Total revenue per customer`, - shown: COMPILE_CONTEXT.permissions['finance'], + description: `Total revenue per customer`, + shown: COMPILE_CONTEXT.permissions.isFinance, - includes: [ - Orders.totalRevenue, - Users.company, - ], + includes: [ + Orders.totalRevenue, + Users.company, + ], }); ``` diff --git a/Getting-Started-with-YAML.mdx b/Getting-Started-with-YAML.mdx index b3f54e7..96f1953 100644 --- a/Getting-Started-with-YAML.mdx +++ b/Getting-Started-with-YAML.mdx @@ -197,11 +197,11 @@ Use dot-notation to specify the property from [`COMPILE_CONTEXT` cubes: - name: Orders sql: SELECT * FROM public.orders - shown: COMPILE_CONTEXT.securityContext.property + shown: COMPILE_CONTEXT.securityContext.<PROPERTY_NAME> ``` -If referencing the security context, ensure that you use camel-case (i.e. -`securityContext`). +If referencing the [security context][ref-security-ctx], ensure that you use +camel-case (i.e. `securityContext`). ### <--{"id" : "Using context variables"}--> FILTER_PARAMS @@ -248,3 +248,4 @@ please post an issue on GitHub. [ref-schema-ref-measure-types]: /schema/reference/types-and-formats#measures-types [ref-schema-ref-preaggs]: schema/reference/pre-aggregations +[ref-security-ctx]: /security/context diff --git a/cube.mdx b/cube.mdx index 97bebc4..313ffe1 100644 --- a/cube.mdx +++ b/cube.mdx @@ -374,6 +374,24 @@ cube(`Tickets`, { }); ``` +### <--{"id" : "Parameters"}--> shown + +The `shown` property is used to manage the visibility of a cube. Some data may +be available only to specific roles within an organization or to specific +customers. In the example below, we’re making the `Orders` cube only visible to +users with finance permissions: + +```typescript +cube(`Orders`, { + sql: `SELECT * FROM orders`, + shown: COMPILE_CONTEXT.securityContext.isFinance, +}); +``` + +To learn more about using `shown` to control visibility based on security +context, read the [Controlling access to cubes and views +recipe][ref-recipe-control-access-cubes-views]. + ### <--{"id" : "Parameters"}--> sql The `sql` parameter specifies the SQL that will be used to generate a table that @@ -666,6 +684,8 @@ cube(`Users`, { [ref-config-queryrewrite]: /config#query-rewrite [ref-config-req-ctx]: /config#request-context [ref-dev-playground]: /dev-tools/dev-playground +[ref-recipe-control-access-cubes-views]: + /recipes/controlling-access-to-cubes-and-views [ref-restapi-meta]: /rest-api#v-1-meta [ref-restapi-sql]: /rest-api#v-1-sql [ref-sec-ctx]: /security/context diff --git a/view.mdx b/view.mdx index ccb3c91..804b40f 100644 --- a/view.mdx +++ b/view.mdx @@ -27,6 +27,7 @@ example below, we create a new view `ActiveUsers` which is made up of properties from the `Users` cube: ```typescript +// schema/ActiveUsers.ts view(`ActiveUsers`, { description: `14 days rolling count of active users`, includes: [ @@ -40,10 +41,10 @@ view(`ActiveUsers`, { ``` You also can reference measures and dimensions one-by-one. This is helpful when -there is a need to control the join path or to rename measures and dimensions -from the underlying cubes. In the example below, we create a view -`CompletedOrders` which brings in dimensions from the `Users` cube through -nested joins: +there is a need to [control the join path][ref-schema-joins-direction] or to +rename measures and dimensions from the underlying cubes. In the example below, +we create a view `CompletedOrders` which brings in dimensions from the `Users` +cube through nested joins: ```typescript view(`CompletedOrders`, { @@ -124,7 +125,15 @@ users with finance permissions: ```typescript view(`ARR`, { description: `Annual Recurring Revenue`, - shown: COMPILE_CONTEXT.permissions['finance'], + shown: COMPILE_CONTEXT.securityContext.isFinance, includes: [Revenue.arr, Revenue.date, Customers.plan], }); ``` + +To learn more about using `shown` to control visibility based on security +context, read the [Controlling access to cubes and views +recipe][ref-recipe-control-access-cubes-views]. + +[ref-recipe-control-access-cubes-views]: + /recipes/controlling-access-to-cubes-and-views +[ref-schema-joins-direction]: /schema/fundamentals/joins#directions-of-joins
|
|
fix: VarName::hash
|
d230bb7374e0f16c7e80bdf0ecb7cda43f7c6447
|
fix
|
https://github.com/erg-lang/erg/commit/d230bb7374e0f16c7e80bdf0ecb7cda43f7c6447
|
VarName::hash
|
diff --git a/dict.rs b/dict.rs index 48f7b69..acda810 100644 --- a/dict.rs +++ b/dict.rs @@ -394,3 +394,40 @@ impl<K: Hash + Eq, V> Dict<K, V> { self.dict.entry(k) } } + +#[cfg(test)] +mod tests { + use super::*; + + use crate::str::Str; + + #[test] + fn test_dict() { + let mut dict = Dict::new(); + dict.insert(Str::from("a"), 1); + dict.insert(Str::from("b"), 2); + dict.insert(Str::from("c"), 3); + assert_eq!(dict.len(), 3); + assert_eq!(dict.get(&Str::from("a")), Some(&1)); + assert_eq!(dict.get(&Str::from("b")), Some(&2)); + assert_eq!(dict.get(&Str::from("c")), Some(&3)); + assert_eq!(dict.get(&Str::from("d")), None); + assert_eq!(dict.get("a"), Some(&1)); + assert_eq!(dict.get("b"), Some(&2)); + assert_eq!(dict.get("c"), Some(&3)); + assert_eq!(dict.get("d"), None); + assert_eq!(dict.remove(&Str::from("a")), Some(1)); + assert_eq!(dict.remove(&Str::from("a")), None); + assert_eq!(dict.len(), 2); + assert_eq!(dict.get(&Str::from("a")), None); + assert_eq!(dict.get(&Str::from("b")), Some(&2)); + assert_eq!(dict.get(&Str::from("c")), Some(&3)); + assert_eq!(dict.get(&Str::from("d")), None); + dict.clear(); + assert_eq!(dict.len(), 0); + assert_eq!(dict.get(&Str::from("a")), None); + assert_eq!(dict.get(&Str::from("b")), None); + assert_eq!(dict.get(&Str::from("c")), None); + assert_eq!(dict.get(&Str::from("d")), None); + } +} diff --git a/ast.rs b/ast.rs index 31e9963..fff47ad 100644 --- a/ast.rs +++ b/ast.rs @@ -3835,13 +3835,13 @@ impl Immutable for VarName {} impl PartialEq for VarName { fn eq(&self, other: &Self) -> bool { - self.0 == other.0 + self.0.content == other.0.content } } impl std::hash::Hash for VarName { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { - self.0.hash(state) + self.0.content.hash(state) } } @@ -6310,3 +6310,26 @@ impl InlineModule { Self { input, ast, import } } } + +#[cfg(test)] +mod tests { + use super::*; + + use erg_common::dict::Dict; + + #[test] + fn test_dict() { + let mut dict = Dict::new(); + let a = Token::new(TokenKind::Symbol, "a", 1, 1); + let a = VarName::new(a); + let a2 = Token::new(TokenKind::Symbol, "a", 2, 3); + let a2 = VarName::new(a2); + dict.insert(a.clone(), 1); + assert_eq!(dict.len(), 1); + assert_eq!(dict.get(&a2), Some(&1)); + assert_eq!(dict.get("a"), Some(&1)); + assert_eq!(dict.get(&Str::from("a")), Some(&1)); + assert_eq!(dict.remove(&a2), Some(1)); + assert_eq!(dict.remove(&a2), None); + } +}
|
|
chore(release): update internal dependencies to use tilde [skip ci]
|
522c3e583186c98bd6090d734fcc2fae88c520d2
|
chore
|
https://github.com/mikro-orm/mikro-orm/commit/522c3e583186c98bd6090d734fcc2fae88c520d2
|
update internal dependencies to use tilde [skip ci]
|
diff --git a/package.json b/package.json index 43168b3..0b1994c 100644 --- a/package.json +++ b/package.json @@ -58,7 +58,7 @@ "access": "public" }, "dependencies": { - "@mikro-orm/knex": "^5.7.12", + "@mikro-orm/knex": "~5.7.12", "fs-extra": "11.1.1", "sqlite3": "5.1.6", "sqlstring-sqlite": "0.1.1" diff --git a/yarn.lock b/yarn.lock index c347f83..331e09f 100644 --- a/yarn.lock +++ b/yarn.lock Binary files a/yarn.lock and b/yarn.lock differ
|
|
refactor: allow cascade merge and remove on not fully init. collections
Only cascade persist needs all collection items to be initialized, while
it is correct to cascade merge and remove those items (references).
|
009df76d3177b9aeab151a785ed5cb930b77d559
|
refactor
|
https://github.com/mikro-orm/mikro-orm/commit/009df76d3177b9aeab151a785ed5cb930b77d559
|
allow cascade merge and remove on not fully init. collections
Only cascade persist needs all collection items to be initialized, while
it is correct to cascade merge and remove those items (references).
|
diff --git a/UnitOfWork.ts b/UnitOfWork.ts index 8cb1c29..963f6f0 100644 --- a/UnitOfWork.ts +++ b/UnitOfWork.ts @@ -259,8 +259,9 @@ export class UnitOfWork { } const collection = entity[prop.name as keyof T] as Collection<IEntity>; + const requireFullyInitialized = type === Cascade.PERSIST; // only cascade persist needs fully initialized items - if ([ReferenceType.ONE_TO_MANY, ReferenceType.MANY_TO_MANY].includes(prop.reference) && collection.isInitialized(true)) { + if ([ReferenceType.ONE_TO_MANY, ReferenceType.MANY_TO_MANY].includes(prop.reference) && collection.isInitialized(requireFullyInitialized)) { collection.getItems().forEach(item => this.cascade(item, type, visited)); } }
|
|
fix(core): implement joined filters via `populateFilter` option, separately from `populateWhere` (#6003)
#5893 introduced the concept of nested inner joins, used for the
`populateWhere` conditions when the parent is a to-many relation. This
ensures the collection items are discarded if they conflict with the
child condition. While it makes sense for the `populateWhere` to behave
this way, we also use this option internally for filters on joined
relations, and there we can't just discard items because the children
are not correct.
This PR introduces new `populateFilter` option, which is now used for
the joined filters and will produce simple `left join`s, unlike the
`populateWhere` option.
|
9d0a0227e474d0bcdf5e4757d2be13595df669c7
|
fix
|
https://github.com/mikro-orm/mikro-orm/commit/9d0a0227e474d0bcdf5e4757d2be13595df669c7
|
implement joined filters via `populateFilter` option, separately from `populateWhere` (#6003)
#5893 introduced the concept of nested inner joins, used for the
`populateWhere` conditions when the parent is a to-many relation. This
ensures the collection items are discarded if they conflict with the
child condition. While it makes sense for the `populateWhere` to behave
this way, we also use this option internally for filters on joined
relations, and there we can't just discard items because the children
are not correct.
This PR introduces new `populateFilter` option, which is now used for
the joined filters and will produce simple `left join`s, unlike the
`populateWhere` option.
|
diff --git a/EntityManager.ts b/EntityManager.ts index e522a58..2668d52 100644 --- a/EntityManager.ts +++ b/EntityManager.ts @@ -238,7 +238,8 @@ export class EntityManager<Driver extends IDatabaseDriver = IDatabaseDriver> { options = { ...options }; // save the original hint value so we know it was infer/all (options as Dictionary)._populateWhere = options.populateWhere ?? this.config.get('populateWhere'); - options.populateWhere = await this.applyJoinedFilters(meta, { ...where } as ObjectQuery<Entity>, options); + options.populateWhere = this.createPopulateWhere({ ...where } as ObjectQuery<Entity>, options); + options.populateFilter = await this.getJoinedFilters(meta, { ...where } as ObjectQuery<Entity>, options); const results = await em.driver.find(entityName, where, { ctx: em.transactionContext, ...options }); if (results.length === 0) { @@ -423,7 +424,7 @@ export class EntityManager<Driver extends IDatabaseDriver = IDatabaseDriver> { return where; } - protected async applyJoinedFilters<Entity extends object>(meta: EntityMetadata<Entity>, cond: ObjectQuery<Entity>, options: FindOptions<Entity, any, any, any> | FindOneOptions<Entity, any, any, any>): Promise<ObjectQuery<Entity>> { + protected createPopulateWhere<Entity extends object>(cond: ObjectQuery<Entity>, options: FindOptions<Entity, any, any, any> | FindOneOptions<Entity, any, any, any>): ObjectQuery<Entity> { const ret = {} as ObjectQuery<Entity>; const populateWhere = options.populateWhere ?? this.config.get('populateWhere'); @@ -433,6 +434,12 @@ export class EntityManager<Driver extends IDatabaseDriver = IDatabaseDriver> { Utils.merge(ret, populateWhere); } + return ret; + } + + protected async getJoinedFilters<Entity extends object>(meta: EntityMetadata<Entity>, cond: ObjectQuery<Entity>, options: FindOptions<Entity, any, any, any> | FindOneOptions<Entity, any, any, any>): Promise<ObjectQuery<Entity>> { + const ret = {} as ObjectQuery<Entity>; + if (options.populate) { for (const hint of (options.populate as unknown as PopulateOptions<Entity>[])) { const field = hint.field.split(':')[0] as EntityKey<Entity>; @@ -444,7 +451,7 @@ export class EntityManager<Driver extends IDatabaseDriver = IDatabaseDriver> { } const where = await this.applyFilters<Entity>(prop.type, {}, options.filters ?? {}, 'read', { ...options, populate: hint.children }); - const where2 = await this.applyJoinedFilters<Entity>(prop.targetMeta!, {} as ObjectQuery<Entity>, { ...options, populate: hint.children as any, populateWhere: PopulateHint.ALL }); + const where2 = await this.getJoinedFilters<Entity>(prop.targetMeta!, {} as ObjectQuery<Entity>, { ...options, populate: hint.children as any, populateWhere: PopulateHint.ALL }); if (Utils.hasObjectKeys(where!)) { ret[field] = ret[field] ? { $and: [where, ret[field]] } : where as any; @@ -770,7 +777,8 @@ export class EntityManager<Driver extends IDatabaseDriver = IDatabaseDriver> { options = { ...options }; // save the original hint value so we know it was infer/all (options as Dictionary)._populateWhere = options.populateWhere ?? this.config.get('populateWhere'); - options.populateWhere = await this.applyJoinedFilters(meta, { ...where } as ObjectQuery<Entity>, options); + options.populateWhere = this.createPopulateWhere({ ...where } as ObjectQuery<Entity>, options); + options.populateFilter = await this.getJoinedFilters(meta, { ...where } as ObjectQuery<Entity>, options); const data = await em.driver.findOne<Entity, Hint, Fields, Excludes>(entityName, where, { ctx: em.transactionContext, ...options, diff --git a/IDatabaseDriver.ts b/IDatabaseDriver.ts index f5b06b2..e5f6134 100644 --- a/IDatabaseDriver.ts +++ b/IDatabaseDriver.ts @@ -118,22 +118,59 @@ export interface FindOptions< Fields extends string = PopulatePath.ALL, Excludes extends string = never, > extends LoadHint<Entity, Hint, Fields, Excludes> { + /** + * Where condition for populated relations. This will have no effect on the root entity. + * With `select-in` strategy, this is applied only to the populate queries. + * With `joined` strategy, those are applied as `join on` conditions. + * When you use a nested condition on a to-many relation, it will produce a nested inner join, + * discarding the collection items based on the child condition. + */ populateWhere?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`; + + /** + * Filter condition for populated relations. This is similar to `populateWhere`, but will produce a `left join` + * when nesting the condition. This is used for implementation of joined filters. + */ + populateFilter?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`; + + /** Used for ordering of the populate queries. If not specified, the value of `options.orderBy` is used. */ populateOrderBy?: OrderDefinition<Entity>; + + /** Ordering of the results.Can be an object or array of objects, keys are property names, values are ordering (asc/desc) */ orderBy?: OrderDefinition<Entity>; + + /** Control result caching for this query. Result cache is by default disabled, not to be confused with the identity map. */ cache?: boolean | number | [string, number]; + + /** + * Limit the number of returned results. If you try to use limit/offset on a query that joins a to-many relation, pagination mechanism + * will be triggered, resulting in a subquery condition, to apply this limit only to the root entities + * instead of the cartesian product you get from a database in this case. + */ limit?: number; + + /** + * Sets the offset. If you try to use limit/offset on a query that joins a to-many relation, pagination mechanism + * will be triggered, resulting in a subquery condition, to apply this limit only to the root entities + * instead of the cartesian product you get from a database in this case. + */ offset?: number; + /** Fetch items `before` this cursor. */ before?: string | { startCursor: string | null } | FilterObject<Entity>; + /** Fetch items `after` this cursor. */ after?: string | { endCursor: string | null } | FilterObject<Entity>; + /** Fetch `first` N items. */ first?: number; + /** Fetch `last` N items. */ last?: number; + /** Fetch one more item than `first`/`last`, enabled automatically in `em.findByCursor` to check if there is a next page. */ overfetch?: boolean; + refresh?: boolean; convertCustomTypes?: boolean; disableIdentityMap?: boolean; diff --git a/AbstractSqlDriver.ts b/AbstractSqlDriver.ts index 603411c..dba0ab5 100644 --- a/AbstractSqlDriver.ts +++ b/AbstractSqlDriver.ts @@ -112,7 +112,11 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection qb.__populateWhere = (options as Dictionary)._populateWhere; qb.select(fields) // only add populateWhere if we are populate-joining, as this will be used to add `on` conditions - .populate(populate, joinedProps.length > 0 ? populateWhere : undefined) + .populate( + populate, + joinedProps.length > 0 ? populateWhere : undefined, + joinedProps.length > 0 ? options.populateFilter : undefined, + ) .where(where) .groupBy(options.groupBy!) .having(options.having!) diff --git a/QueryBuilder.ts b/QueryBuilder.ts index f9f6c24..e528317 100644 --- a/QueryBuilder.ts +++ b/QueryBuilder.ts @@ -161,6 +161,8 @@ export class QueryBuilder< /** @internal */ _populateWhere?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`; /** @internal */ + _populateFilter?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`; + /** @internal */ __populateWhere?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`; /** @internal */ _populateMap: Dictionary<string> = {}; @@ -627,10 +629,15 @@ export class QueryBuilder< /** * @internal */ - populate(populate: PopulateOptions<Entity>[], populateWhere?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`): this { + populate( + populate: PopulateOptions<Entity>[], + populateWhere?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`, + populateFilter?: ObjectQuery<Entity> | PopulateHint | `${PopulateHint}`, + ): this { this.ensureNotFinalized(); this._populate = populate; this._populateWhere = populateWhere; + this._populateFilter = populateFilter; return this; } @@ -1111,7 +1118,7 @@ export class QueryBuilder< // clone array/object properties const properties = [ - 'flags', '_populate', '_populateWhere', '__populateWhere', '_populateMap', '_joins', '_joinedProps', '_cond', '_data', '_orderBy', + 'flags', '_populate', '_populateWhere', '_populateFilter', '__populateWhere', '_populateMap', '_joins', '_joinedProps', '_cond', '_data', '_orderBy', '_schema', '_indexHint', '_cache', 'subQueries', 'lockMode', 'lockTables', '_groupBy', '_having', '_returning', '_comments', '_hintComments', 'rawFragments', 'aliasCounter', ]; @@ -1551,7 +1558,8 @@ export class QueryBuilder< .forEach(field => this._fields!.push(raw(field))); } - this.processPopulateWhere(); + this.processPopulateWhere(false); + this.processPopulateWhere(true); QueryHelper.processObjectParams(this._data); QueryHelper.processObjectParams(this._cond); @@ -1573,61 +1581,67 @@ export class QueryBuilder< this.finalized = true; } - private processPopulateWhere() { - if (this._populateWhere == null || this._populateWhere === PopulateHint.ALL) { + private processPopulateWhere(filter: boolean) { + const key = filter ? '_populateFilter' : '_populateWhere'; + + if (this[key] == null || this[key] === PopulateHint.ALL) { return; } let joins = Object.values(this._joins); - joins.forEach(join => { - join.cond_ = join.cond; - join.cond = {}; - }); - const replaceOnConditions = (cond: Dictionary, op?: string) => { - Object.keys(cond).forEach(k => { - if (Utils.isOperator(k)) { - if (Array.isArray(cond[k])) { - return cond[k].forEach((c: Dictionary) => replaceOnConditions(c, k)); - } + for (const join of joins) { + join.cond_ ??= join.cond; + // join.cond = {}; + join.cond = filter ? { ...join.cond } : {}; + } - /* istanbul ignore next */ - return replaceOnConditions(cond[k], k); + if (typeof this[key] === 'object') { + const cond = CriteriaNodeFactory + .createNode<Entity>(this.metadata, this.mainAlias.entityName, this[key]) + .process(this, { matchPopulateJoins: true, ignoreBranching: true, preferNoBranch: true }); + // there might be new joins created by processing the `populateWhere` object + joins = Object.values(this._joins); + this.mergeOnConditions(joins, cond, filter); + } + } + + private mergeOnConditions(joins: JoinOptions[], cond: Dictionary, filter: boolean, op?: string) { + for (const k of Object.keys(cond)) { + if (Utils.isOperator(k)) { + if (Array.isArray(cond[k])) { + cond[k].forEach((c: Dictionary) => this.mergeOnConditions(joins, c, filter, k)); } - const [alias] = this.helper.splitField(k as EntityKey<Entity>); - const join = joins.find(j => j.alias === alias); + /* istanbul ignore next */ + this.mergeOnConditions(joins, cond[k], filter, k); + } - if (join) { - const parentJoin = joins.find(j => j.alias === join.ownerAlias); + const [alias] = this.helper.splitField(k as EntityKey<Entity>); + const join = joins.find(j => j.alias === alias); - // https://stackoverflow.com/a/56815807/3665878 - if (parentJoin) { - const nested = (parentJoin!.nested ??= new Set()); - join.type = join.type === JoinType.innerJoin || [ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(parentJoin.prop.kind) - ? JoinType.nestedInnerJoin - : JoinType.nestedLeftJoin; - nested.add(join); - } + if (join) { + const parentJoin = joins.find(j => j.alias === join.ownerAlias); - if (join.cond[k]) { - join.cond = { [op ?? '$and']: [join.cond, { [k]: cond[k] }] }; - } else if (op === '$or') { - join.cond.$or ??= []; - join.cond.$or.push({ [k]: cond[k] }); - } else { - join.cond = { ...join.cond, [k]: cond[k] }; - } + // https://stackoverflow.com/a/56815807/3665878 + if (parentJoin && !filter) { + const nested = (parentJoin!.nested ??= new Set()); + join.type = join.type === JoinType.innerJoin || ([ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(parentJoin.prop.kind)) + ? JoinType.nestedInnerJoin + : JoinType.nestedLeftJoin; + nested.add(join); } - }); - }; - if (typeof this._populateWhere === 'object') { - const cond = CriteriaNodeFactory - .createNode<Entity>(this.metadata, this.mainAlias.entityName, this._populateWhere) - .process(this, { matchPopulateJoins: true, ignoreBranching: true, preferNoBranch: true }); - joins = Object.values(this._joins); // there might be new joins created by processing the `populateWhere` object - replaceOnConditions(cond); + if (join.cond[k]) { + /* istanbul ignore next */ + join.cond = { [op ?? '$and']: [join.cond, { [k]: cond[k] }] }; + } else if (op === '$or') { + join.cond.$or ??= []; + join.cond.$or.push({ [k]: cond[k] }); + } else { + join.cond = { ...join.cond, [k]: cond[k] }; + } + } } } diff --git a/EntityManager.mysql.test.ts b/EntityManager.mysql.test.ts index c8dfd34..f11f1d1 100644 --- a/EntityManager.mysql.test.ts +++ b/EntityManager.mysql.test.ts @@ -1867,7 +1867,7 @@ describe('EntityManagerMySql', () => { expect(mock.mock.calls[0][0]).toMatch('select `b0`.*, `b1`.`uuid_pk` as `b1__uuid_pk`, `b1`.`created_at` as `b1__created_at`, `b1`.`isbn` as `b1__isbn`, `b1`.`title` as `b1__title`, `b1`.`perex` as `b1__perex`, `b1`.`price` as `b1__price`, `b1`.price * 1.19 as `b1__price_taxed`, `b1`.`double` as `b1__double`, `b1`.`meta` as `b1__meta`, `b1`.`author_id` as `b1__author_id`, `b1`.`publisher_id` as `b1__publisher_id` ' + 'from `book_tag2` as `b0` ' + 'left join (`book_to_tag_unordered` as `b2` ' + - 'inner join `book2` as `b1` on `b2`.`book2_uuid_pk` = `b1`.`uuid_pk` and `b1`.`author_id` is not null and `b1`.`title` != ?) on `b0`.`id` = `b2`.`book_tag2_id` ' + + 'inner join `book2` as `b1` on `b2`.`book2_uuid_pk` = `b1`.`uuid_pk` and `b1`.`title` != ? and `b1`.`author_id` is not null) on `b0`.`id` = `b2`.`book_tag2_id` ' + 'where `b1`.`title` != ? ' + 'order by `b0`.`name` asc'); }); diff --git a/dataloader.test.ts.snap b/dataloader.test.ts.snap index 6fc9b1a..481db7b 100644 --- a/dataloader.test.ts.snap +++ b/dataloader.test.ts.snap @@ -50,7 +50,7 @@ exports[`Dataloader Collection.load 1`] = ` "[query] select \\`m0\\`.* from \\`message\\` as \\`m0\\` where (\\`m0\\`.\\`chat_owner_id\\`, \\`m0\\`.\\`chat_recipient_id\\`) in ( values (1, 2), (1, 3))", ], [ - "[query] select \\`a0\\`.*, \\`b1\\`.\\`id\\` as \\`b1__id\\`, \\`b1\\`.\\`name\\` as \\`b1__name\\`, \\`b1\\`.\\`age\\` as \\`b1__age\\`, \\`b1\\`.\\`email\\` as \\`b1__email\\` from \\`author\\` as \\`a0\\` left join (\\`author_buddies\\` as \\`a2\\` inner join \\`author\\` as \\`b1\\` on \\`a2\\`.\\`author_1_id\\` = \\`b1\\`.\\`id\\` and \\`b1\\`.\\`age\\` < 80) on \\`a0\\`.\\`id\\` = \\`a2\\`.\\`author_2_id\\` left join \\`author_buddies\\` as \\`a3\\` on \\`a0\\`.\\`id\\` = \\`a3\\`.\\`author_2_id\\` where \\`a0\\`.\\`age\\` < 80 and \\`a3\\`.\\`author_1_id\\` in (1, 2, 3)", + "[query] select \\`a0\\`.*, \\`b1\\`.\\`id\\` as \\`b1__id\\`, \\`b1\\`.\\`name\\` as \\`b1__name\\`, \\`b1\\`.\\`age\\` as \\`b1__age\\`, \\`b1\\`.\\`email\\` as \\`b1__email\\` from \\`author\\` as \\`a0\\` left join \\`author_buddies\\` as \\`a2\\` on \\`a0\\`.\\`id\\` = \\`a2\\`.\\`author_2_id\\` left join \\`author\\` as \\`b1\\` on \\`a2\\`.\\`author_1_id\\` = \\`b1\\`.\\`id\\` and \\`b1\\`.\\`age\\` < 80 left join \\`author_buddies\\` as \\`a3\\` on \\`a0\\`.\\`id\\` = \\`a3\\`.\\`author_2_id\\` where \\`a0\\`.\\`age\\` < 80 and \\`a3\\`.\\`author_1_id\\` in (1, 2, 3)", ], [ "[query] select \\`b0\\`.*, \\`a1\\`.\\`id\\` as \\`a1__id\\` from \\`book\\` as \\`b0\\` inner join \\`author\\` as \\`a1\\` on \\`b0\\`.\\`author_id\\` = \\`a1\\`.\\`id\\` and \\`a1\\`.\\`age\\` < 80 where (\\`b0\\`.\\`author_id\\` in (1, 2, 3) or \\`b0\\`.\\`publisher_id\\` in (1, 2))", @@ -105,7 +105,7 @@ exports[`Dataloader Dataloader can be globally enabled for Collections with true "[query] select \\`m0\\`.* from \\`message\\` as \\`m0\\` where (\\`m0\\`.\\`chat_owner_id\\`, \\`m0\\`.\\`chat_recipient_id\\`) in ( values (1, 2), (1, 3))", ], [ - "[query] select \\`a0\\`.*, \\`b1\\`.\\`id\\` as \\`b1__id\\`, \\`b1\\`.\\`name\\` as \\`b1__name\\`, \\`b1\\`.\\`age\\` as \\`b1__age\\`, \\`b1\\`.\\`email\\` as \\`b1__email\\` from \\`author\\` as \\`a0\\` left join (\\`author_buddies\\` as \\`a2\\` inner join \\`author\\` as \\`b1\\` on \\`a2\\`.\\`author_1_id\\` = \\`b1\\`.\\`id\\` and \\`b1\\`.\\`age\\` < 80) on \\`a0\\`.\\`id\\` = \\`a2\\`.\\`author_2_id\\` left join \\`author_buddies\\` as \\`a3\\` on \\`a0\\`.\\`id\\` = \\`a3\\`.\\`author_2_id\\` where \\`a0\\`.\\`age\\` < 80 and \\`a3\\`.\\`author_1_id\\` in (1, 2, 3)", + "[query] select \\`a0\\`.*, \\`b1\\`.\\`id\\` as \\`b1__id\\`, \\`b1\\`.\\`name\\` as \\`b1__name\\`, \\`b1\\`.\\`age\\` as \\`b1__age\\`, \\`b1\\`.\\`email\\` as \\`b1__email\\` from \\`author\\` as \\`a0\\` left join \\`author_buddies\\` as \\`a2\\` on \\`a0\\`.\\`id\\` = \\`a2\\`.\\`author_2_id\\` left join \\`author\\` as \\`b1\\` on \\`a2\\`.\\`author_1_id\\` = \\`b1\\`.\\`id\\` and \\`b1\\`.\\`age\\` < 80 left join \\`author_buddies\\` as \\`a3\\` on \\`a0\\`.\\`id\\` = \\`a3\\`.\\`author_2_id\\` where \\`a0\\`.\\`age\\` < 80 and \\`a3\\`.\\`author_1_id\\` in (1, 2, 3)", ], [ "[query] select \\`b0\\`.*, \\`a1\\`.\\`id\\` as \\`a1__id\\` from \\`book\\` as \\`b0\\` inner join \\`author\\` as \\`a1\\` on \\`b0\\`.\\`author_id\\` = \\`a1\\`.\\`id\\` and \\`a1\\`.\\`age\\` < 80 where (\\`b0\\`.\\`author_id\\` in (1, 2, 3) or \\`b0\\`.\\`publisher_id\\` in (1, 2))", @@ -252,7 +252,7 @@ exports[`Dataloader getColBatchLoadFn 1`] = ` "[query] select \\`m0\\`.* from \\`message\\` as \\`m0\\` where (\\`m0\\`.\\`chat_owner_id\\`, \\`m0\\`.\\`chat_recipient_id\\`) in ( values (1, 2), (1, 3))", ], [ - "[query] select \\`a0\\`.*, \\`b1\\`.\\`id\\` as \\`b1__id\\`, \\`b1\\`.\\`name\\` as \\`b1__name\\`, \\`b1\\`.\\`age\\` as \\`b1__age\\`, \\`b1\\`.\\`email\\` as \\`b1__email\\` from \\`author\\` as \\`a0\\` left join (\\`author_buddies\\` as \\`a2\\` inner join \\`author\\` as \\`b1\\` on \\`a2\\`.\\`author_1_id\\` = \\`b1\\`.\\`id\\` and \\`b1\\`.\\`age\\` < 80) on \\`a0\\`.\\`id\\` = \\`a2\\`.\\`author_2_id\\` left join \\`author_buddies\\` as \\`a3\\` on \\`a0\\`.\\`id\\` = \\`a3\\`.\\`author_2_id\\` where \\`a0\\`.\\`age\\` < 80 and \\`a3\\`.\\`author_1_id\\` in (1, 2, 3)", + "[query] select \\`a0\\`.*, \\`b1\\`.\\`id\\` as \\`b1__id\\`, \\`b1\\`.\\`name\\` as \\`b1__name\\`, \\`b1\\`.\\`age\\` as \\`b1__age\\`, \\`b1\\`.\\`email\\` as \\`b1__email\\` from \\`author\\` as \\`a0\\` left join \\`author_buddies\\` as \\`a2\\` on \\`a0\\`.\\`id\\` = \\`a2\\`.\\`author_2_id\\` left join \\`author\\` as \\`b1\\` on \\`a2\\`.\\`author_1_id\\` = \\`b1\\`.\\`id\\` and \\`b1\\`.\\`age\\` < 80 left join \\`author_buddies\\` as \\`a3\\` on \\`a0\\`.\\`id\\` = \\`a3\\`.\\`author_2_id\\` where \\`a0\\`.\\`age\\` < 80 and \\`a3\\`.\\`author_1_id\\` in (1, 2, 3)", ], [ "[query] select \\`b0\\`.*, \\`a1\\`.\\`id\\` as \\`a1__id\\` from \\`book\\` as \\`b0\\` inner join \\`author\\` as \\`a1\\` on \\`b0\\`.\\`author_id\\` = \\`a1\\`.\\`id\\` and \\`a1\\`.\\`age\\` < 80 where (\\`b0\\`.\\`author_id\\` in (1, 2, 3) or \\`b0\\`.\\`publisher_id\\` in (1, 2))", diff --git a/GH5325.test.ts b/GH5325.test.ts index 644df1f..4782a58 100644 --- a/GH5325.test.ts +++ b/GH5325.test.ts @@ -175,8 +175,8 @@ test('5325', async () => { ['[query] commit'], ['[query] select `d0`.*, `d1`.`id` as `d1__id`, `d1`.`entity_state` as `d1__entity_state`, `d1`.`drug_info_ingredients` as `d1__drug_info_ingredients`, `d1`.`clinic_id` as `d1__clinic_id` ' + 'from `drug` as `d0` ' + - 'left join (`drug_info` as `d1` left join `ingredient` as `i2` on json_extract(`d1`.`drug_info_ingredients`, \\'$.ingredient_id\\') = `i2`.`id` and `i2`.`entity_state` = \\'Available\\') on `d0`.`drug_info_id` = `d1`.`id` and `d1`.`entity_state` = \\'Available\\' ' + + "left join `drug_info` as `d1` on `d0`.`drug_info_id` = `d1`.`id` and `d1`.`entity_state` = 'Available' left join `ingredient` as `i2` on json_extract(`d1`.`drug_info_ingredients`, '$.ingredient_id') = `i2`.`id` and `i2`.`entity_state` = 'Available' " + 'where `d0`.`entity_state` = \\'Available\\''], - ["[query] select `i0`.* from `ingredient` as `i0` where `i0`.`entity_state` = 'Available' and `i0`.`id` in ('11', '22') and `i0`.`entity_state` = 'Available'"], + ["[query] select `i0`.* from `ingredient` as `i0` where `i0`.`entity_state` = 'Available' and `i0`.`id` in ('11', '22')"], ]); }); diff --git a/filters.postgres.test.ts b/filters.postgres.test.ts index e18b34b..fbf1f54 100644 --- a/filters.postgres.test.ts +++ b/filters.postgres.test.ts @@ -197,7 +197,9 @@ describe('filters [postgres]', () => { const e2 = await orm.em.findOneOrFail(Employee, employee.id, { populate: ['benefits.details'], strategy: 'joined' }); expect(mock.mock.calls[0][0]).toMatch('select "e0".*, "b1"."id" as "b1__id", "b1"."benefit_status" as "b1__benefit_status", "b1"."name" as "b1__name", "d3"."id" as "d3__id", "d3"."description" as "d3__description", "d3"."benefit_id" as "d3__benefit_id", "d3"."active" as "d3__active" ' + 'from "employee" as "e0" ' + - 'left join ("employee_benefits" as "e2" inner join ("public"."benefit" as "b1" left join "public"."benefit_detail" as "d3" on "b1"."id" = "d3"."benefit_id" and "d3"."active" = $1) on "e2"."benefit_id" = "b1"."id" and "b1"."benefit_status" = $2) on "e0"."id" = "e2"."employee_id" ' + + 'left join "employee_benefits" as "e2" on "e0"."id" = "e2"."employee_id" ' + + 'left join "public"."benefit" as "b1" on "e2"."benefit_id" = "b1"."id" and "b1"."benefit_status" = $1 ' + + 'left join "benefit_detail" as "d3" on "b1"."id" = "d3"."benefit_id" and "d3"."active" = $2 ' + 'where "e0"."id" = $3'); expect(e2.benefits).toHaveLength(1); expect(e2.benefits[0].details).toHaveLength(1); diff --git a/joined-strategy.postgre.test.ts b/joined-strategy.postgre.test.ts index 0e1c8b6..5cdfbdb 100644 --- a/joined-strategy.postgre.test.ts +++ b/joined-strategy.postgre.test.ts @@ -423,7 +423,8 @@ describe('Joined loading strategy', () => { '"p4"."id" as "p4__id", "p4"."name" as "p4__name", "p4"."type" as "p4__type", "p4"."type2" as "p4__type2", "p4"."enum1" as "p4__enum1", "p4"."enum2" as "p4__enum2", "p4"."enum3" as "p4__enum3", "p4"."enum4" as "p4__enum4", "p4"."enum5" as "p4__enum5", ' + '"t5"."id" as "t5__id", "t5"."name" as "t5__name", "t5"."book_uuid_pk" as "t5__book_uuid_pk", "t5"."parent_id" as "t5__parent_id", "t5"."version" as "t5__version" ' + 'from "book_tag2" as "b0" ' + - 'left join ("book2_tags" as "b2" inner join "public"."book2" as "b1" on "b2"."book2_uuid_pk" = "b1"."uuid_pk" and "b1"."author_id" is not null) on "b0"."id" = "b2"."book_tag2_id" ' + + 'left join "book2_tags" as "b2" on "b0"."id" = "b2"."book_tag2_id" ' + + 'left join "public"."book2" as "b1" on "b2"."book2_uuid_pk" = "b1"."uuid_pk" and "b1"."author_id" is not null ' + 'left join "author2" as "a3" on "b1"."author_id" = "a3"."id" ' + 'left join "publisher2" as "p4" on "b1"."publisher_id" = "p4"."id" ' + 'left join "publisher2_tests" as "p6" on "p4"."id" = "p6"."publisher2_id" ' + @@ -508,11 +509,12 @@ describe('Joined loading strategy', () => { expect(mock.mock.calls.length).toBe(1); expect(mock.mock.calls[0][0]).toMatch('select "b0".*, "b0".price * 1.19 as "price_taxed", ' + '"a1"."id" as "a1__id", "a1"."created_at" as "a1__created_at", "a1"."updated_at" as "a1__updated_at", "a1"."name" as "a1__name", "a1"."email" as "a1__email", "a1"."age" as "a1__age", "a1"."terms_accepted" as "a1__terms_accepted", "a1"."optional" as "a1__optional", "a1"."identities" as "a1__identities", "a1"."born" as "a1__born", "a1"."born_time" as "a1__born_time", "a1"."favourite_book_uuid_pk" as "a1__favourite_book_uuid_pk", "a1"."favourite_author_id" as "a1__favourite_author_id", "a1"."identity" as "a1__identity", ' + - '"f2"."uuid_pk" as "f2__uuid_pk", "f2"."created_at" as "f2__created_at", "f2"."isbn" as "f2__isbn", "f2"."title" as "f2__title", "f2"."price" as "f2__price", "f2".price * 1.19 as "f2__price_taxed", "f2"."double" as "f2__double", "f2"."meta" as "f2__meta", "f2"."author_id" as "f2__author_id", "f2"."publisher_id" as "f2__publisher_id", ' + - '"a3"."id" as "a3__id", "a3"."created_at" as "a3__created_at", "a3"."updated_at" as "a3__updated_at", "a3"."name" as "a3__name", "a3"."email" as "a3__email", "a3"."age" as "a3__age", "a3"."terms_accepted" as "a3__terms_accepted", "a3"."optional" as "a3__optional", "a3"."identities" as "a3__identities", "a3"."born" as "a3__born", "a3"."born_time" as "a3__born_time", "a3"."favourite_book_uuid_pk" as "a3__favourite_book_uuid_pk", "a3"."favourite_author_id" as "a3__favourite_author_id", "a3"."identity" as "a3__identity" ' + + '"f2"."uuid_pk" as "f2__uuid_pk", "f2"."created_at" as "f2__created_at", "f2"."isbn" as "f2__isbn", "f2"."title" as "f2__title", "f2"."price" as "f2__price", "f2".price * 1.19 as "f2__price_taxed", "f2"."double" as "f2__double", "f2"."meta" as "f2__meta", "f2"."author_id" as "f2__author_id", "f2"."publisher_id" as "f2__publisher_id", "a3"."id" as "a3__id", "a3"."created_at" as "a3__created_at", "a3"."updated_at" as "a3__updated_at", "a3"."name" as "a3__name", "a3"."email" as "a3__email", "a3"."age" as "a3__age", ' + + '"a3"."terms_accepted" as "a3__terms_accepted", "a3"."optional" as "a3__optional", "a3"."identities" as "a3__identities", "a3"."born" as "a3__born", "a3"."born_time" as "a3__born_time", "a3"."favourite_book_uuid_pk" as "a3__favourite_book_uuid_pk", "a3"."favourite_author_id" as "a3__favourite_author_id", "a3"."identity" as "a3__identity" ' + 'from "book2" as "b0" ' + // populateHint: all - 'left join ("author2" as "a1" left join "book2" as "f2" on "a1"."favourite_book_uuid_pk" = "f2"."uuid_pk" and "f2"."author_id" is not null) on "b0"."author_id" = "a1"."id" ' + + 'left join "author2" as "a1" on "b0"."author_id" = "a1"."id" ' + + 'left join "book2" as "f2" on "a1"."favourite_book_uuid_pk" = "f2"."uuid_pk" and "f2"."author_id" is not null ' + 'left join "author2" as "a3" on "f2"."author_id" = "a3"."id" ' + // where joins 'left join "author2" as "a4" on "b0"."author_id" = "a4"."id" ' + @@ -544,7 +546,8 @@ describe('Joined loading strategy', () => { '"f2"."uuid_pk" as "f2__uuid_pk", "f2"."created_at" as "f2__created_at", "f2"."isbn" as "f2__isbn", "f2"."title" as "f2__title", "f2"."price" as "f2__price", "f2".price * 1.19 as "f2__price_taxed", "f2"."double" as "f2__double", "f2"."meta" as "f2__meta", "f2"."author_id" as "f2__author_id", "f2"."publisher_id" as "f2__publisher_id", ' + '"a3"."id" as "a3__id", "a3"."created_at" as "a3__created_at", "a3"."updated_at" as "a3__updated_at", "a3"."name" as "a3__name", "a3"."email" as "a3__email", "a3"."age" as "a3__age", "a3"."terms_accepted" as "a3__terms_accepted", "a3"."optional" as "a3__optional", "a3"."identities" as "a3__identities", "a3"."born" as "a3__born", "a3"."born_time" as "a3__born_time", "a3"."favourite_book_uuid_pk" as "a3__favourite_book_uuid_pk", "a3"."favourite_author_id" as "a3__favourite_author_id", "a3"."identity" as "a3__identity" ' + 'from "book2" as "b0" ' + - 'left join ("author2" as "a1" left join ("book2" as "f2" left join "author2" as "a3" on "f2"."author_id" = "a3"."id" and "a3"."name" = $1) on "a1"."favourite_book_uuid_pk" = "f2"."uuid_pk" and "f2"."author_id" is not null) on "b0"."author_id" = "a1"."id" ' + + 'left join "author2" as "a1" on "b0"."author_id" = "a1"."id" ' + + 'left join ("book2" as "f2" left join "author2" as "a3" on "f2"."author_id" = "a3"."id" and "a3"."name" = $1) on "a1"."favourite_book_uuid_pk" = "f2"."uuid_pk" and "f2"."author_id" is not null ' + 'where "b0"."author_id" is not null and "a3"."name" = $2'); }); diff --git a/GHx16.test.ts b/GHx16.test.ts index e04a958..b8a96c5 100644 --- a/GHx16.test.ts +++ b/GHx16.test.ts @@ -0,0 +1,85 @@ +import { Collection, Entity, Filter, ManyToOne, MikroORM, OneToMany, PrimaryKey, Property } from '@mikro-orm/sqlite'; + +@Filter({ name: 'softDelete', cond: { removedAt: null }, default: true }) +@Entity() +class MicroCloud { + + @PrimaryKey() + id!: number; + + @Property({ length: 6, nullable: true }) + removedAt?: Date; + +} + +@Entity() +class DatacenterTask { + + @PrimaryKey() + id!: number; + + @OneToMany(() => DatacenterTaskDevice, x => x.datacenterTask) + datacenterTaskDevices = new Collection<DatacenterTaskDevice>(this); + +} + +@Entity() +class DatacenterTaskDevice { + + @PrimaryKey() + id!: number; + + @ManyToOne(() => DatacenterTask) + datacenterTask!: DatacenterTask; + + @ManyToOne(() => MicroCloud, { nullable: true }) + microCloud?: MicroCloud; + +} + +let orm: MikroORM; + +beforeAll(async () => { + orm = await MikroORM.init({ + entities: [DatacenterTaskDevice], + dbName: ':memory:', + populateWhere: 'infer', + forceUndefined: true, + }); + await orm.schema.createSchema(); +}); + +afterAll(() => orm.close(true)); + +test('filters and nested populate of to-many relations', async () => { + orm.em.create(DatacenterTask, { + datacenterTaskDevices: [ + { + microCloud: undefined, + }, + { + microCloud: {}, + }, + ], + }); + + await orm.em.flush(); + orm.em.clear(); + + const datacenterTask1 = await orm.em.findOneOrFail(DatacenterTask, 1, { + populate: ['datacenterTaskDevices'], + }); + + // Should be 2 devices + expect(datacenterTask1.datacenterTaskDevices).toHaveLength(2); + orm.em.clear(); + + const datacenterTask2 = await orm.em.findOneOrFail(DatacenterTask, 1, { + populate: ['datacenterTaskDevices.microCloud'], + // populateWhere: { datacenterTaskDevices: { microCloud: { removedAt: null } } }, + // populateFilter: { datacenterTaskDevices: { microCloud: { removedAt: null } } }, + }); + + // Should be 2 devices, even if some does not have (nullable field) microCloud + expect(datacenterTask2.datacenterTaskDevices).toHaveLength(2); +});
|
|
test: try to fix flaky test
|
926be5735c8144519999d1071d36b9ff40d0842f
|
test
|
https://github.com/Hardeepex/crawlee/commit/926be5735c8144519999d1071d36b9ff40d0842f
|
try to fix flaky test
|
diff --git a/snapshotter.test.js b/snapshotter.test.js index 3619aea..ba47de2 100644 --- a/snapshotter.test.js +++ b/snapshotter.test.js @@ -88,7 +88,7 @@ describe('Snapshotter', () => { }; const snapshotter = new Snapshotter(options); await snapshotter.start(); - await new Promise((resolve) => setTimeout(resolve, 500)); + await new Promise((resolve) => setTimeout(resolve, 510)); await snapshotter.stop(); // const memorySnapshots = snapshotter.getMemorySample(); const eventLoopSnapshots = snapshotter.getEventLoopSample();
|
|
fix: prevent the popup of terminal Windows on any command invocation.
|
f6cd9191af9d80cd5a1c37a58d4879451c8faddc
|
fix
|
https://github.com/Byron/gitoxide/commit/f6cd9191af9d80cd5a1c37a58d4879451c8faddc
|
prevent the popup of terminal Windows on any command invocation.
|
diff --git a/lib.rs b/lib.rs index 046e9c3..ea520aa 100644 --- a/lib.rs +++ b/lib.rs @@ -237,6 +237,13 @@ mod prepare { } else { Command::new(prep.command) }; + // We never want to have terminals pop-up on Windows if this runs from a GUI application. + #[cfg(windows)] + { + use std::os::windows::process::CommandExt; + const CREATE_NO_WINDOW: u32 = 0x08000000; + cmd.creation_flags(CREATE_NO_WINDOW); + } cmd.stdin(prep.stdin) .stdout(prep.stdout) .stderr(prep.stderr) @@ -401,6 +408,8 @@ pub mod shebang { /// - `stdout` is captured for consumption by the caller /// - `stderr` is inherited to allow the command to provide context to the user /// +/// On Windows, terminal Windows will be suppressed automatically. +/// /// ### Warning /// /// When using this method, be sure that the invoked program doesn't rely on the current working dir and/or
|
|
feat(api): add API for unwrapping JSON values into backend-native values (#8958)
Adds `str`, `int`, `float`, and `bool` properties to `JSONValue` as well
as an `unwrap_as` method for easier programmatic usage and more
fine-grained casting.
Unless someone really hates the static property names, I'd prefer to
keep them as they are. Open to alternative names for `unwrap_as` though.
In theory this can all be done with casting, but if you look at what's
being done in the various backends it's typically a lot more involved
than that. Trino in particular requires queries over JSON to be
`VARCHAR` inputs, when then have to be cast **back** to its `JSON` type
to be able to cast _that_ to the desired output type.
Complicating the cast branching _just_ for the `JSON -> not JSON` case
seemed like the wrong tradeoff.
I went with these names to match the `map` and `array` APIs, and to
match the short type names we have for the specific types (`str`, `int`,
`float`, and `bool`), which exist to match the equivalent Python types.
|
aebb5cfa8d683903495efc1a1aa849335b1a2cf7
|
feat
|
https://github.com/ibis-project/ibis/commit/aebb5cfa8d683903495efc1a1aa849335b1a2cf7
|
add API for unwrapping JSON values into backend-native values (#8958)
Adds `str`, `int`, `float`, and `bool` properties to `JSONValue` as well
as an `unwrap_as` method for easier programmatic usage and more
fine-grained casting.
Unless someone really hates the static property names, I'd prefer to
keep them as they are. Open to alternative names for `unwrap_as` though.
In theory this can all be done with casting, but if you look at what's
being done in the various backends it's typically a lot more involved
than that. Trino in particular requires queries over JSON to be
`VARCHAR` inputs, when then have to be cast **back** to its `JSON` type
to be able to cast _that_ to the desired output type.
Complicating the cast branching _just_ for the `JSON -> not JSON` case
seemed like the wrong tradeoff.
I went with these names to match the `map` and `array` APIs, and to
match the short type names we have for the specific types (`str`, `int`,
`float`, and `bool`), which exist to match the equivalent Python types.
|
diff --git a/bigquery.sql b/bigquery.sql index 6af257f..ed0db15 100644 --- a/bigquery.sql +++ b/bigquery.sql @@ -65,7 +65,15 @@ INSERT INTO {dataset}.json_t VALUES (JSON '{{"a":"foo", "c":null}}'), (JSON 'null'), (JSON '[42,47,55]'), - (JSON '[]'); + (JSON '[]'), + (JSON '"a"'), + (JSON '""'), + (JSON '"b"'), + (NULL), + (JSON 'true'), + (JSON 'false'), + (JSON '42'), + (JSON '37.37'); LOAD DATA OVERWRITE {dataset}.functional_alltypes ( diff --git a/duckdb.sql b/duckdb.sql index 3bc850d..561afc9 100644 --- a/duckdb.sql +++ b/duckdb.sql @@ -29,7 +29,7 @@ INSERT INTO struct VALUES (NULL), ({'a': 3.0, 'b': 'orange', 'c': NULL}); -CREATE OR REPLACE TABLE json_t (js TEXT); +CREATE OR REPLACE TABLE json_t (js JSON); INSERT INTO json_t VALUES ('{"a": [1,2,3,4], "b": 1}'), @@ -37,7 +37,15 @@ INSERT INTO json_t VALUES ('{"a":"foo", "c":null}'), ('null'), ('[42,47,55]'), - ('[]'); + ('[]'), + ('"a"'), + ('""'), + ('"b"'), + (NULL), + ('true'), + ('false'), + ('42'), + ('37.37'); CREATE OR REPLACE TABLE win (g TEXT, x BIGINT NOT NULL, y BIGINT); INSERT INTO win VALUES diff --git a/mysql.sql b/mysql.sql index acf36de..72f1d3b 100644 --- a/mysql.sql +++ b/mysql.sql @@ -108,7 +108,15 @@ INSERT INTO json_t VALUES ('{"a":"foo", "c":null}'), ('null'), ('[42,47,55]'), - ('[]'); + ('[]'), + ('"a"'), + ('""'), + ('"b"'), + (NULL), + ('true'), + ('false'), + ('42'), + ('37.37'); DROP TABLE IF EXISTS win CASCADE; diff --git a/postgres.sql b/postgres.sql index 6824929..f2d0906 100644 --- a/postgres.sql +++ b/postgres.sql @@ -273,7 +273,15 @@ INSERT INTO json_t VALUES ('{"a":"foo", "c":null}'), ('null'), ('[42,47,55]'), - ('[]'); + ('[]'), + ('"a"'), + ('""'), + ('"b"'), + (NULL), + ('true'), + ('false'), + ('42'), + ('37.37'); DROP TABLE IF EXISTS win CASCADE; CREATE TABLE win (g TEXT, x BIGINT NOT NULL, y BIGINT); diff --git a/risingwave.sql b/risingwave.sql index ce7df4e..6f77c8c 100644 --- a/risingwave.sql +++ b/risingwave.sql @@ -165,7 +165,15 @@ INSERT INTO "json_t" VALUES ('{"a":"foo", "c":null}'), ('null'), ('[42,47,55]'), - ('[]'); + ('[]'), + ('"a"'), + ('""'), + ('"b"'), + (NULL), + ('true'), + ('false'), + ('42'), + ('37.37'); DROP TABLE IF EXISTS "win" CASCADE; CREATE TABLE "win" ("g" TEXT, "x" BIGINT, "y" BIGINT); diff --git a/snowflake.sql b/snowflake.sql index dcceb5a..b7d0887 100644 --- a/snowflake.sql +++ b/snowflake.sql @@ -131,7 +131,15 @@ INSERT INTO "json_t" ("js") SELECT parse_json('{"a":"foo", "c":null}') UNION SELECT parse_json('null') UNION SELECT parse_json('[42,47,55]') UNION - SELECT parse_json('[]'); + SELECT parse_json('[]') UNION + SELECT parse_json('"a"') UNION + SELECT parse_json('""') UNION + SELECT parse_json('"b"') UNION + SELECT NULL UNION + SELECT parse_json('true') UNION + SELECT parse_json('false') UNION + SELECT parse_json('42') UNION + SELECT parse_json('37.37'); CREATE OR REPLACE TABLE "win" ("g" TEXT, "x" BIGINT NOT NULL, "y" BIGINT); INSERT INTO "win" VALUES diff --git a/sqlite.sql b/sqlite.sql index bc1e996..35bf41d 100644 --- a/sqlite.sql +++ b/sqlite.sql @@ -109,7 +109,15 @@ INSERT INTO json_t VALUES ('{"a":"foo", "c":null}'), ('null'), ('[42,47,55]'), - ('[]'); + ('[]'), + ('"a"'), + ('""'), + ('"b"'), + (NULL), + ('true'), + ('false'), + ('42'), + ('37.37'); DROP TABLE IF EXISTS win; CREATE TABLE win (g TEXT, x BIGINT NOT NULL, y BIGINT); diff --git a/trino.sql b/trino.sql index 724522d..cf4f18b 100644 --- a/trino.sql +++ b/trino.sql @@ -168,7 +168,15 @@ INSERT INTO memory.default.json_t VALUES (JSON '{"a":"foo", "c":null}'), (JSON 'null'), (JSON '[42,47,55]'), - (JSON '[]'); + (JSON '[]'), + (JSON '"a"'), + (JSON '""'), + (JSON '"b"'), + (NULL), + (JSON 'true'), + (JSON 'false'), + (JSON '42'), + (JSON '37.37'); DROP TABLE IF EXISTS win; CREATE TABLE win (g VARCHAR, x BIGINT, y BIGINT); diff --git a/compiler.py b/compiler.py index 12dd47e..b81b213 100644 --- a/compiler.py +++ b/compiler.py @@ -173,6 +173,35 @@ class TrinoCompiler(SQLGlotCompiler): fmt = "%d" if op.index.dtype.is_integer() else '"%s"' return self.f.json_extract(arg, self.f.format(f"$[{fmt}]", index)) + def visit_UnwrapJSONString(self, op, *, arg): + return self.f.json_value( + self.f.json_format(arg), 'strict $?($.type() == "string")' + ) + + def visit_UnwrapJSONInt64(self, op, *, arg): + value = self.f.json_value( + self.f.json_format(arg), 'strict $?($.type() == "number")' + ) + return self.cast( + self.if_(self.f.regexp_like(value, r"^\\d+$"), value, NULL), op.dtype + ) + + def visit_UnwrapJSONFloat64(self, op, *, arg): + return self.cast( + self.f.json_value( + self.f.json_format(arg), 'strict $?($.type() == "number")' + ), + op.dtype, + ) + + def visit_UnwrapJSONBoolean(self, op, *, arg): + return self.cast( + self.f.json_value( + self.f.json_format(arg), 'strict $?($.type() == "boolean")' + ), + op.dtype, + ) + def visit_DayOfWeekIndex(self, op, *, arg): return self.cast( sge.paren(self.f.day_of_week(arg) + 6, copy=False) % 7, op.dtype diff --git a/__init__.py b/__init__.py index 04ef8ea..e26c198 100644 --- a/__init__.py +++ b/__init__.py @@ -11,6 +11,7 @@ import sqlglot.expressions as sge from pyspark import SparkConf from pyspark.sql import DataFrame, SparkSession from pyspark.sql.functions import PandasUDFType, pandas_udf +from pyspark.sql.types import BooleanType, DoubleType, LongType, StringType import ibis.common.exceptions as com import ibis.config @@ -40,6 +41,47 @@ def normalize_filenames(source_list): return list(map(util.normalize_filename, source_list)) +@pandas_udf(returnType=DoubleType(), functionType=PandasUDFType.SCALAR) +def unwrap_json_float(s: pd.Series) -> pd.Series: + import json + + import pandas as pd + + def nullify_type_mismatched_value(raw): + if pd.isna(raw): + return None + + value = json.loads(raw) + # exact type check because we want to distinguish between integer + # and booleans and bool is a subclass of int + return value if type(value) in (float, int) else None + + return s.map(nullify_type_mismatched_value) + + +def unwrap_json(typ): + import json + + import pandas as pd + + type_mapping = {str: StringType(), int: LongType(), bool: BooleanType()} + + @pandas_udf(returnType=type_mapping[typ], functionType=PandasUDFType.SCALAR) + def unwrap(s: pd.Series) -> pd.Series: + def nullify_type_mismatched_value(raw): + if pd.isna(raw): + return None + + value = json.loads(raw) + # exact type check because we want to distinguish between integer + # and booleans and bool is a subclass of int + return value if type(value) == typ else None + + return s.map(nullify_type_mismatched_value) + + return unwrap + + class _PySparkCursor: """Spark cursor. @@ -252,6 +294,10 @@ class Backend(SQLBackend, CanCreateDatabase): spark_udf = pandas_udf(udf_func, udf_return, PandasUDFType.GROUPED_AGG) self._session.udf.register(udf_name, spark_udf) + for typ in (str, int, bool): + self._session.udf.register(f"unwrap_json_{typ.__name__}", unwrap_json(typ)) + self._session.udf.register("unwrap_json_float", unwrap_json_float) + def _register_in_memory_table(self, op: ops.InMemoryTable) -> None: schema = PySparkSchema.from_ibis(op.schema) df = self._session.createDataFrame(data=op.data.to_frame(), schema=schema) diff --git a/data.py b/data.py index b086bc1..d007649 100644 --- a/data.py +++ b/data.py @@ -100,6 +100,14 @@ json_types = pd.DataFrame( "null", "[42,47,55]", "[]", + '"a"', + '""', + '"b"', + None, + "true", + "false", + "42", + "37.37", ] } ) diff --git a/test_client.py b/test_client.py index 03f39d2..bf2472a 100644 --- a/test_client.py +++ b/test_client.py @@ -1585,6 +1585,14 @@ def test_json_to_pyarrow(con): None, [42, 47, 55], [], + "a", + "", + "b", + None, + True, + False, + 42, + 37.37, ] expected = {json.dumps(val) for val in expected} @@ -1592,5 +1600,10 @@ def test_json_to_pyarrow(con): # loads and dumps so the string representation is the same json.dumps(json.loads(val)) for val in js.to_pylist() + # proper null values must be ignored because they cannot be + # deserialized as JSON + # + # they exist in the json_t table, so the `js` value contains them + if val is not None } assert result == expected diff --git a/test_json.py b/test_json.py index e814311..9cc69a5 100644 --- a/test_json.py +++ b/test_json.py @@ -9,6 +9,8 @@ import pandas as pd import pytest from packaging.version import parse as vparse +import ibis.expr.types as ir + pytestmark = [ pytest.mark.never(["impala"], reason="doesn't support JSON and never will"), pytest.mark.notyet(["clickhouse"], reason="upstream is broken"), @@ -74,10 +76,8 @@ def test_json_map(backend, json_t): {"a": [1, 2, 3, 4], "b": 1}, {"a": None, "b": 2}, {"a": "foo", "c": None}, - None, - None, - None, - ], + ] + + [None] * 11, dtype="object", name="res", ) @@ -94,6 +94,42 @@ def test_json_array(backend, json_t): expr = json_t.js.array.name("res") result = expr.execute() expected = pd.Series( - [None, None, None, None, [42, 47, 55], []], name="res", dtype="object" + [None, None, None, None, [42, 47, 55], []] + [None] * 8, + name="res", + dtype="object", ) backend.assert_series_equal(result, expected) + + [email protected]( + ["sqlite"], + condition=vparse(sqlite3.sqlite_version) < vparse("3.38.0"), + reason="JSON not supported in SQLite < 3.38.0", +) [email protected](["dask", "pandas", "risingwave"]) [email protected](["flink"], reason="should work but doesn't deserialize JSON") [email protected]( + ("typ", "expected_data"), + [ + ("str", [None] * 6 + ["a", "", "b"] + [None] * 5), + ("int", [None] * 12 + [42, None]), + ("float", [None] * 12 + [42.0, 37.37]), + ("bool", [None] * 10 + [True, False, None, None]), + ], + ids=["str", "int", "float", "bool"], +) [email protected]( + "expr_fn", [getattr, ir.JSONValue.unwrap_as], ids=["getattr", "unwrap_as"] +) +def test_json_unwrap(backend, json_t, typ, expected_data, expr_fn): + expr = expr_fn(json_t.js, typ).name("res") + result = expr.execute() + expected = pd.Series(expected_data, name="res", dtype="object") + backend.assert_series_equal( + result.replace(np.nan, None).fillna(pd.NA).sort_values().reset_index(drop=True), + expected.replace(np.nan, None) + .fillna(pd.NA) + .sort_values() + .reset_index(drop=True), + check_dtype=False, + ) diff --git a/json.py b/json.py index 82a0b20..13699a5 100644 --- a/json.py +++ b/json.py @@ -6,6 +6,8 @@ from typing import TYPE_CHECKING from public import public +import ibis.common.exceptions as exc +import ibis.expr.datatypes as dt import ibis.expr.operations as ops from ibis.expr.types import Column, Scalar, Value @@ -98,6 +100,155 @@ class JSONValue(Value): """ return ops.JSONGetItem(self, key).to_expr() + def unwrap_as(self, dtype: dt.DataType | str) -> ir.Value: + """Unwrap JSON into a specific data type. + + Returns + ------- + Value + An Ibis expression of a more specific type than JSON + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> data = { + ... "jstring": ['"a"', '""', None, "null"], + ... "jbool": ["true", "false", "null", None], + ... "jint": ["1", "null", None, "2"], + ... "jfloat": ["42.42", None, "null", "37.37"], + ... "jmap": ['{"a": 1}', "null", None, "{}"], + ... "jarray": ["[]", "null", None, '[{},"1",2]'], + ... } + >>> t = ibis.memtable(data, schema=dict.fromkeys(data.keys(), "json")) + >>> t + ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┳━━━┓ + ┃ jstring ┃ jbool ┃ jint ┃ … ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━╇━━━┩ + │ json │ json │ json │ … │ + ├──────────────────────┼──────────────────────┼──────────────────────┼───┤ + │ 'a' │ True │ 1 │ … │ + │ '' │ False │ None │ … │ + │ NULL │ None │ NULL │ … │ + │ None │ NULL │ 2 │ … │ + └──────────────────────┴──────────────────────┴──────────────────────┴───┘ + >>> t.select(unwrapped=t.jstring.unwrap_as(str), original=t.jstring) + ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ unwrapped ┃ original ┃ + ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ json │ + ├───────────┼──────────────────────┤ + │ a │ 'a' │ + │ ~ │ '' │ + │ NULL │ NULL │ + │ NULL │ None │ + └───────────┴──────────────────────┘ + >>> t.select(unwrapped=t.jbool.unwrap_as("bool"), original=t.jbool) + ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ unwrapped ┃ original ┃ + ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ json │ + ├───────────┼──────────────────────┤ + │ True │ True │ + │ False │ False │ + │ NULL │ None │ + │ NULL │ NULL │ + └───────────┴──────────────────────┘ + >>> t.select( + ... unwrapped_int64=t.jint.unwrap_as("int64"), + ... unwrapped_int32=t.jint.unwrap_as("int32"), + ... original=t.jint, + ... ) + ┏━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ unwrapped_int64 ┃ unwrapped_int32 ┃ original ┃ + ┡━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ + │ int64 │ int32 │ json │ + ├─────────────────┼─────────────────┼──────────────────────┤ + │ 1 │ 1 │ 1 │ + │ NULL │ NULL │ None │ + │ NULL │ NULL │ NULL │ + │ 2 │ 2 │ 2 │ + └─────────────────┴─────────────────┴──────────────────────┘ + + You can cast to a more specific type than the types available in standards-compliant JSON. + + Here's an example of casting JSON numbers to `float32`: + + >>> t.select(unwrapped=t.jfloat.unwrap_as("float32"), original=t.jfloat) + ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ unwrapped ┃ original ┃ + ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ + │ float32 │ json │ + ├───────────┼──────────────────────┤ + │ 42.419998 │ 42.42 │ + │ NULL │ NULL │ + │ NULL │ None │ + │ 37.369999 │ 37.37 │ + └───────────┴──────────────────────┘ + + You can cast JSON objects to a more specific `map` type: + + >>> t.select(unwrapped=t.jmap.unwrap_as("map<string, int>"), original=t.jmap) + ┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ unwrapped ┃ original ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ + │ map<string, int64> │ json │ + ├──────────────────────┼──────────────────────┤ + │ {'a': 1} │ {'a': 1} │ + │ NULL │ None │ + │ NULL │ NULL │ + │ {} │ {} │ + └──────────────────────┴──────────────────────┘ + + You can cast JSON arrays to an array type as well. In this case the + array values don't have a single element type so we cast to + `array<json>`. + + >>> t.select(unwrapped=t.jarray.unwrap_as("array<json>"), original=t.jarray) + ┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ unwrapped ┃ original ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ + │ array<json> │ json │ + ├───────────────────────┼──────────────────────┤ + │ [] │ [] │ + │ NULL │ None │ + │ NULL │ NULL │ + │ ['{}', '"1"', ... +1] │ [{...}, '1', ... +1] │ + └───────────────────────┴──────────────────────┘ + + See Also + -------- + [`JSONValue.str`](#ibis.expr.types.json.JSONValue.str) + [`JSONValue.int`](#ibis.expr.types.json.JSONValue.int) + [`JSONValue.float`](#ibis.expr.types.json.JSONValue.float) + [`JSONValue.bool`](#ibis.expr.types.json.JSONValue.bool) + [`JSONValue.map`](#ibis.expr.types.json.JSONValue.map) + [`JSONValue.array`](#ibis.expr.types.json.JSONValue.array) + [`Value.cast`](#ibis.expr.types.generic.Value.cast) + """ + dtype = dt.dtype(dtype) + if dtype.is_string(): + return self.str + elif dtype.is_boolean(): + return self.bool + elif dtype.is_integer(): + i = self.int + return i.cast(dtype) if i.type() != dtype else i + elif dtype.is_floating(): + f = self.float + return f.cast(dtype) if f.type() != dtype else f + elif dtype.is_map(): + m = self.map + return m.cast(dtype) if m.type() != dtype else m + elif dtype.is_array(): + a = self.array + return a.cast(dtype) if a.type() != dtype else a + else: + raise exc.IbisTypeError( + f"Data type {dtype} is unsupported for unwrapping JSON values. Supported " + "data types are strings, integers, floats, booleans, maps, and arrays." + ) + @property def map(self) -> ir.MapValue: """Cast JSON to a map of string to JSON. @@ -124,6 +275,226 @@ class JSONValue(Value): """ return ops.ToJSONArray(self).to_expr() + @property + def int(self) -> ir.IntegerValue: + """Unwrap a JSON value into a backend-native int. + + Any non-float JSON values are returned as `NULL`. + + Examples + -------- + >>> import json, ibis + >>> ibis.options.interactive = True + >>> data = [ + ... {"name": "Alice", "json_data": '{"last_name":"Smith","age":40}'}, + ... {"name": "Bob", "json_data": '{"last_name":"Jones", "age":39}'}, + ... {"name": "Charlie", "json_data": '{"last_name":"Davies","age":54}'}, + ... ] + >>> t = ibis.memtable(data, schema={"name": "string", "json_data": "json"}) + >>> t + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ name ┃ json_data ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ json │ + ├─────────┼────────────────────────────────────┤ + │ Alice │ {'last_name': 'Smith', 'age': 40} │ + │ Bob │ {'last_name': 'Jones', 'age': 39} │ + │ Charlie │ {'last_name': 'Davies', 'age': 54} │ + └─────────┴────────────────────────────────────┘ + >>> t.mutate(age=t.json_data["age"].int) + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┓ + ┃ name ┃ json_data ┃ age ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━┩ + │ string │ json │ int64 │ + ├─────────┼────────────────────────────────────┼───────┤ + │ Alice │ {'last_name': 'Smith', 'age': 40} │ 40 │ + │ Bob │ {'last_name': 'Jones', 'age': 39} │ 39 │ + │ Charlie │ {'last_name': 'Davies', 'age': 54} │ 54 │ + └─────────┴────────────────────────────────────┴───────┘ + """ + return ops.UnwrapJSONInt64(self).to_expr() + + @property + def float(self) -> ir.FloatingValue: + """Unwrap a JSON value into a backend-native float. + + Any non-float JSON values are returned as `NULL`. + + ::: {.callout-warning} + ## The `float` property is lax with respect to integers + + The `float` property will attempt to coerce integers to floating point numbers. + ::: + + Examples + -------- + >>> import json, ibis + >>> ibis.options.interactive = True + >>> data = [ + ... {"name": "Alice", "json_data": '{"last_name":"Smith","salary":42.42}'}, + ... {"name": "Bob", "json_data": '{"last_name":"Jones", "salary":37.37}'}, + ... {"name": "Charlie", "json_data": '{"last_name":"Davies","salary":"NA"}'}, + ... {"name": "Joan", "json_data": '{"last_name":"Davies","salary":78}'}, + ... ] + >>> t = ibis.memtable(data, schema={"name": "string", "json_data": "json"}) + >>> t + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ name ┃ json_data ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ json │ + ├─────────┼─────────────────────────────────────────┤ + │ Alice │ {'last_name': 'Smith', 'salary': 42.42} │ + │ Bob │ {'last_name': 'Jones', 'salary': 37.37} │ + │ Charlie │ {'last_name': 'Davies', 'salary': 'NA'} │ + │ Joan │ {'last_name': 'Davies', 'salary': 78} │ + └─────────┴─────────────────────────────────────────┘ + >>> t.mutate(salary=t.json_data["salary"].float) + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┓ + ┃ name ┃ json_data ┃ salary ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━┩ + │ string │ json │ float64 │ + ├─────────┼─────────────────────────────────────────┼─────────┤ + │ Alice │ {'last_name': 'Smith', 'salary': 42.42} │ 42.42 │ + │ Bob │ {'last_name': 'Jones', 'salary': 37.37} │ 37.37 │ + │ Charlie │ {'last_name': 'Davies', 'salary': 'NA'} │ NULL │ + │ Joan │ {'last_name': 'Davies', 'salary': 78} │ 78.00 │ + └─────────┴─────────────────────────────────────────┴─────────┘ + """ + return ops.UnwrapJSONFloat64(self).to_expr() + + @property + def bool(self) -> ir.BooleanValue: + """Unwrap a JSON value into a backend-native boolean. + + Any non-boolean JSON values are returned as `NULL`. + + Examples + -------- + >>> import json, ibis + >>> ibis.options.interactive = True + >>> data = [ + ... {"name": "Alice", "json_data": '{"last_name":"Smith","is_bot":false}'}, + ... {"name": "Bob", "json_data": '{"last_name":"Jones","is_bot":true}'}, + ... {"name": "Charlie", "json_data": '{"last_name":"Davies","is_bot":false}'}, + ... ] + >>> t = ibis.memtable(data, schema={"name": "string", "json_data": "json"}) + >>> t + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ name ┃ json_data ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ json │ + ├─────────┼──────────────────────────────────────────┤ + │ Alice │ {'last_name': 'Smith', 'is_bot': False} │ + │ Bob │ {'last_name': 'Jones', 'is_bot': True} │ + │ Charlie │ {'last_name': 'Davies', 'is_bot': False} │ + └─────────┴──────────────────────────────────────────┘ + >>> t.mutate(is_bot=t.json_data["is_bot"].bool) + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┓ + ┃ name ┃ json_data ┃ is_bot ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━┩ + │ string │ json │ boolean │ + ├─────────┼──────────────────────────────────────────┼─────────┤ + │ Alice │ {'last_name': 'Smith', 'is_bot': False} │ False │ + │ Bob │ {'last_name': 'Jones', 'is_bot': True} │ True │ + │ Charlie │ {'last_name': 'Davies', 'is_bot': False} │ False │ + └─────────┴──────────────────────────────────────────┴─────────┘ + """ + return ops.UnwrapJSONBoolean(self).to_expr() + + @property + def str(self) -> ir.StringValue: + """Unwrap a JSON string into a backend-native string. + + Any non-string JSON values are returned as `NULL`. + + Returns + ------- + StringValue + A string expression + + Examples + -------- + >>> import json, ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable( + ... {"js": ['"a"', '"b"', "1", "{}", '[{"a": 1}]']}, + ... schema=ibis.schema(dict(js="json")), + ... ) + >>> t + ┏━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ js ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━┩ + │ json │ + ├──────────────────────┤ + │ 'a' │ + │ 'b' │ + │ 1 │ + │ {} │ + │ [{...}] │ + └──────────────────────┘ + >>> t.js.str + ┏━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ UnwrapJSONString(js) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────────────┤ + │ a │ + │ b │ + │ NULL │ + │ NULL │ + │ NULL │ + └──────────────────────┘ + + Note the difference between `.string` and `.cast("string")`. + + The latter preserves quotes for JSON string values and returns a valid + JSON string. + + >>> t.js.cast("string") + ┏━━━━━━━━━━━━━━━━━━┓ + ┃ Cast(js, string) ┃ + ┡━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────────┤ + │ "a" │ + │ "b" │ + │ 1 │ + │ {} │ + │ [{"a": 1}] │ + └──────────────────┘ + + Here's a more complex example with a table containing a JSON column + with nested fields. + + >>> data = [ + ... {"name": "Alice", "json_data": '{"last_name":"Smith"}'}, + ... {"name": "Bob", "json_data": '{"last_name":"Jones"}'}, + ... {"name": "Charlie", "json_data": '{"last_name":"Davies"}'}, + ... ] + >>> t = ibis.memtable(data, schema={"name": "string", "json_data": "json"}) + >>> t + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ name ┃ json_data ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ json │ + ├─────────┼─────────────────────────┤ + │ Alice │ {'last_name': 'Smith'} │ + │ Bob │ {'last_name': 'Jones'} │ + │ Charlie │ {'last_name': 'Davies'} │ + └─────────┴─────────────────────────┘ + >>> t.mutate(last_name=t.json_data["last_name"].str) + ┏━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━┓ + ┃ name ┃ json_data ┃ last_name ┃ + ┡━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━┩ + │ string │ json │ string │ + ├─────────┼─────────────────────────┼───────────┤ + │ Alice │ {'last_name': 'Smith'} │ Smith │ + │ Bob │ {'last_name': 'Jones'} │ Jones │ + │ Charlie │ {'last_name': 'Davies'} │ Davies │ + └─────────┴─────────────────────────┴───────────┘ + """ + return ops.UnwrapJSONString(self).to_expr() + @public class JSONScalar(Scalar, JSONValue):
|
|
feat(core): add `connect: boolean` param to `MikroORM.init()`
|
43a9ce9c108945b1b48677919113e4a2c1c3886e
|
feat
|
https://github.com/mikro-orm/mikro-orm/commit/43a9ce9c108945b1b48677919113e4a2c1c3886e
|
add `connect: boolean` param to `MikroORM.init()`
|
diff --git a/MikroORM.ts b/MikroORM.ts index 176f193..da14834 100644 --- a/MikroORM.ts +++ b/MikroORM.ts @@ -22,7 +22,7 @@ export class MikroORM<D extends IDatabaseDriver = IDatabaseDriver> { * Initialize the ORM, load entity metadata, create EntityManager and connect to the database. * If you omit the `options` parameter, your CLI config will be used. */ - static async init<D extends IDatabaseDriver = IDatabaseDriver>(options?: Options<D> | Configuration<D>): Promise<MikroORM<D>> { + static async init<D extends IDatabaseDriver = IDatabaseDriver>(options?: Options<D> | Configuration<D>, connect = true): Promise<MikroORM<D>> { if (!options) { options = await ConfigurationLoader.getConfiguration<D>(); } @@ -34,10 +34,13 @@ export class MikroORM<D extends IDatabaseDriver = IDatabaseDriver> { orm.em = orm.driver.createEntityManager<D>(); orm.metadata.decorate(orm.em); orm.driver.setMetadata(orm.metadata); - await orm.connect(); - if (orm.config.get('ensureIndexes')) { - await orm.driver.ensureIndexes(); + if (connect) { + await orm.connect(); + + if (orm.config.get('ensureIndexes')) { + await orm.driver.ensureIndexes(); + } } return orm; diff --git a/MongoConnection.ts b/MongoConnection.ts index ac309b8..11cdeda 100644 --- a/MongoConnection.ts +++ b/MongoConnection.ts @@ -20,7 +20,8 @@ export class MongoConnection extends Connection { } async isConnected(): Promise<boolean> { - return this.client.isConnected(); + const ret = this.client?.isConnected(); + return !!ret; } getCollection(name: EntityName<AnyEntity>): Collection { diff --git a/MikroORM.test.ts b/MikroORM.test.ts index b802cee..2cf8df1 100644 --- a/MikroORM.test.ts +++ b/MikroORM.test.ts @@ -91,11 +91,14 @@ describe('MikroORM', () => { const pkg = { 'mikro-orm': { useTsNode: true } } as any; jest.mock('../package.json', () => pkg, { virtual: true }); - const orm = await MikroORM.init(); + const orm = await MikroORM.init(undefined, false); expect(orm).toBeInstanceOf(MikroORM); expect(orm.em).toBeInstanceOf(EntityManager); expect(Object.keys(orm.getMetadata().getAll()).sort()).toEqual(['Test']); + expect(await orm.isConnected()).toBe(false); + + await orm.connect(); expect(await orm.isConnected()).toBe(true); await orm.close();
|
|
docs: add 'Dynamic Schema Creation' page
|
38ff87aabd14a62882aff4ddb33b7af7edaf0138
|
docs
|
https://github.com/wzhiqing/cube/commit/38ff87aabd14a62882aff4ddb33b7af7edaf0138
|
add 'Dynamic Schema Creation' page
|
diff --git a/Dynamic-Schema-Creation.md b/Dynamic-Schema-Creation.md index 8d71dd9..6923f26 100644 --- a/Dynamic-Schema-Creation.md +++ b/Dynamic-Schema-Creation.md @@ -0,0 +1,268 @@ +--- +title: Dynamic Schema Creation +permalink: /schema/dynamic-schema-creation +category: Data Schema +menuOrder: 2 +--- + +Cube.js allows schemas to be created on-the-fly using a special +[`asyncModule()`][ref-async-module] function only available in the [schema +execution environment][ref-schema-env]. `asyncModule()` allows registering an +async function to be executed at the end of the data schema compile phase so +additional definitions can be added. This is often useful in situations where +schema properties can be dynamically updated through an API, for example. + +<!-- prettier-ignore-start --> +[[warning | Note]] +| Each `asyncModule` call will be invoked only once per schema compilation. +<!-- prettier-ignore-end --> + +[ref-schema-env]: /schema-execution-environment +[ref-async-module]: /schema-execution-environment#asyncmodule + +When creating schemas via `asyncModule()`, it is important to be aware of the +following differences compared to statically defining schemas with `cube()`: + +- The `sql` and `drillMembers` properties for both dimensions and measures must + be of type `() => string` + +Cube.js supports importing JavaScript logic from other files in a schema, so it +is useful to declare utility functions for handling the above differences in a +separate file: + +[ref-import-export]: /export-import + +```javascript +// schema/utils.js +export const convertStringPropToFunction = (propNames, dimensionDefinition) => { + let newResult = { ...dimensionDefinition }; + propNames.forEach((propName) => { + const propValue = newResult[propName]; + + if (!propValue) { + return; + } + + newResult[propName] = () => propValue; + }); + return newResult; +}; + +export const transformDimensions = (dimensions) => { + return Object.keys(dimensions).reduce((result, dimensionName) => { + const dimensionDefinition = dimensions[dimensionName]; + return { + ...result, + [dimensionName]: convertStringPropToFunction( + ['sql'], + dimensionDefinition + ), + }; + }, {}); +}; + +export const transformMeasures = (measures) => { + return Object.keys(measures).reduce((result, dimensionName) => { + const dimensionDefinition = measures[dimensionName]; + return { + ...result, + [dimensionName]: convertStringPropToFunction( + ['sql, drillMembers'], + dimensionDefinition + ), + }; + }, {}); +}; +``` + +## Generation + +In the following example, we retrieve a JSON object representing all our cubes +using `fetch()`, transform some of the properties to be functions that return a +string, and then finally use the [`cube()` global function][ref-globals] to +generate schemas from that data: + +[ref-globals]: /schema-execution-environment#cube-js-globals-cube-and-others + +```javascript +// schema/DynamicSchema.js +const fetch = require('node-fetch'); +import { + convertStringPropToFunction, + transformDimensions, + transformMeasures, +} from './utils'; + +asyncModule(async () => { + const dynamicCubes = await ( + await fetch('http://your-api-endpoint/dynamicCubes') + ).json(); + + console.log(dynamicCubes); + // [ + // { + // dimensions: { + // color: { + // sql: `color`, + // type: `string`, + // }, + // }, + // measures: { + // price: { + // sql: `price`, + // type: `number`, + // } + // }, + // title: 'DynamicCubeSchema', + // sql: 'SELECT * FROM public.my_table', + // }, + // ] + + dynamicCubes.forEach((dynamicCube) => { + const dimensions = transformDimensions(dynamicCube.dimensions); + const measures = transformMeasures(dynamicCube.measures); + + cube(dynamicCube.title, { + sql: dynamicCube.sql, + dimensions, + measures, + preAggregations: { + main: { + type: `originalSql`, + }, + }, + }); + }); +}); +``` + +## Usage with `schemaVersion` + +It is also useful to be able to recompile the schema when there are changes in +the underlying input data. For this purpose, the [`schemaVersion` +][link-config-schema-version] value in the `cube.js` configuration options can +be specified as an asynchronous function: + +```javascript +// cube.js +module.exports = { + schemaVersion: async ({ authInfo }) => { + const schemaVersions = await ( + await fetch('http://your-api-endpoint/schemaVersion') + ).json(); + + return schemaVersions[authInfo.tenantId]; + }, +}; +``` + +[link-config-schema-version]: /config#options-reference-schema-version + +## Usage with `COMPILE_CONTEXT` + +The `COMPILE_CONTEXT` global object can also be used in conjunction with async +schema creation to allow for multi-tenant deployments of Cube.js. + +In an example scenario where all tenants share the same cube, but see different +dimensions and measures, you could do the following: + +```javascript +// schema/DynamicSchema.js +const fetch = require('node-fetch'); +import { convertStringPropToFunction, transformDimensions, transformMeasures } from './utils'; + +asyncModule(async () => { + const { + authInfo: { tenantId }, + } = COMPILE_CONTEXT; + + const dynamicCubes = await ( + await fetch(`http://your-api-endpoint/dynamicCubes`) + ).json(); + + const allowedDimensions = await ( + await fetch(`http://your-api-endpoint/dynamicDimensions/${tenantId}`) + ).json(); + + const allowedMeasures = await ( + await fetch(`http://your-api-endpoint/dynamicMeasures/${tenantId}`) + ).json(); + + dynamicCubes.forEach((dynamicCube) => { + const dimensions = transformDimensions(allowedDimensions); + const measures = transformMeasures(allowedMeasures); + + cube(dynamicCube.title, { + sql: dynamicCube.sql, + title: `${dynamicCube.title}-${tenantId}`, + dimensions, + measures, + preAggregations: { + main: { + type: `originalSql`, + }, + }, + }); + }); +}); +``` + +## Usage with `dataSource` + +When using multiple databases, you'll need to ensure you set the +[`dataSource`][ref-schema-datasource] property for any asynchronously-created +schemas, as well as ensuring the corresponding database drivers are set up with +[`driverFactory()`][ref-config-driverfactory] in your [`cube.js` configuration +file][ref-config]. + +[ref-schema-datasource]: /cube#parameters-data-source +[ref-config-driverfactory]: /config#options-reference-driver-factory +[ref-config]: /config + +For an example scenario where schemas may use either MySQL or Postgres +databases, you could do the following: + +```javascript +// schema/DynamicSchema.js +const fetch = require('node-fetch'); +import { convertStringPropToFunction, transformDimensions, transformMeasures } from './utils'; + +asyncModule(async () => { + const dynamicCubes = await ( + await fetch('http://your-api-endpoint/dynamicCubes') + ).json(); + + dynamicCubes.forEach((dynamicCube) => { + const dimensions = transformDimensions(dynamicCube.dimensions); + const measures = transformMeasures(dynamicCube.measures); + + cube(dynamicCube.title, { + dataSource: dynamicCube.dataSource, + sql: dynamicCube.sql, + dimensions, + measures, + preAggregations: { + main: { + type: `originalSql`, + }, + }, + }); + }); +}); +``` + +```javascript +// cube.js +const MySQLDriver = require('@cubejs-backend/mysql-driver'); +const PostgresDriver = require('@cubejs-backend/postgres-driver'); + +module.exports = { + driverFactory: ({ dataSource }) => { + if (dataSource === 'mysql') { + return new MySQLDriver({ database: dataSource }); + } + + return new PostgresDriver({ database: dataSource }); + }, +}; +``` diff --git a/schema-execution-environment.md b/schema-execution-environment.md index 8ca1079..d82aec5 100644 --- a/schema-execution-environment.md +++ b/schema-execution-environment.md @@ -110,89 +110,11 @@ cube(`Users`, { ## asyncModule -<!-- prettier-ignore-start --> -[[warning | Note]] -| Each `asyncModule` call will be invoked only once per schema compilation. -<!-- prettier-ignore-end --> +Schemas can be externally stored and retrieved through an asynchronous operation +using the `asyncModule()`. For more information, consult the [Dynamic Schema +Creation][ref-dynamic-schemas] page. -To trigger a schema recompile based on changes of underlying input data, -[schemaVersion][link-config-schema-version] value should change accordingly. - -[link-config-schema-version]: - https://cube.dev/docs/config#options-reference-schema-version - -If there's a need to generate schema based on values from external API or -database, the `asyncModule` method can be used for such scenario. `asyncModule` -allows registering an async function to be executed at the end of the data -schema file compile phase so additional definitions can be added. - -For example: - -```javascript -const fetch = require('node-fetch'); -const Funnels = require('Funnels'); - -asyncModule(async () => { - const funnels = await ( - await fetch('http://your-api-endpoint/funnels') - ).json(); - - class Funnel { - constructor({ title, steps }) { - this.title = title; - this.steps = steps; - } - - get transformedSteps() { - return Object.keys(this.steps).map((key, index) => { - const value = this.steps[key]; - let where = null; - if (value[0] === PAGE_VIEW_EVENT) { - if (value.length === 1) { - where = `event = '${value[0]}'`; - } else { - where = `event = '${value[0]}' AND page_title = '${value[1]}'`; - } - } else { - where = `event = 'se' AND se_category = '${value[0]}' AND se_action = '${value[1]}'`; - } - - return { - name: key, - eventsView: { - sql: () => `select * from (${eventsSQl}) WHERE ${where}`, - }, - timeToConvert: index > 0 ? '30 day' : null, - }; - }); - } - - get config() { - return { - userId: { - sql: () => `user_id`, - }, - time: { - sql: () => `time`, - }, - steps: this.transformedSteps, - }; - } - } - - funnels.forEach((funnel) => { - const funnelObject = new Funnel(funnel); - cube(funnelObject.title, { - extends: Funnels.eventFunnel(funnelObject.config), - preAggregations: { - main: { - type: `originalSql`, - }, - }, - }); - }); -}); -``` +[ref-dynamic-schemas]: /schema/dynamic-schema-creation ## Context symbols transpile
|
|
fix(datafusion): fix incorrect order of predicate -> select compilation
|
009230421b2bc1f86591e8b850d37a489e8e4f06
|
fix
|
https://github.com/rohankumardubey/ibis/commit/009230421b2bc1f86591e8b850d37a489e8e4f06
|
fix incorrect order of predicate -> select compilation
|
diff --git a/compiler.py b/compiler.py index c8343b3..1a31f4f 100644 --- a/compiler.py +++ b/compiler.py @@ -77,6 +77,11 @@ def sort_key(op): def selection(op): plan = translate(op.table) + if op.predicates: + predicates = map(translate, op.predicates) + predicate = functools.reduce(operator.and_, predicates) + plan = plan.filter(predicate) + selections = [] for arg in op.selections or [op.table]: # TODO(kszucs) it would be nice if we wouldn't need to handle the @@ -100,11 +105,6 @@ def selection(op): plan = plan.select(*selections) - if op.predicates: - predicates = map(translate, op.predicates) - predicate = functools.reduce(operator.and_, predicates) - plan = plan.filter(predicate) - if op.sort_keys: sort_keys = map(translate, op.sort_keys) plan = plan.sort(*sort_keys)
|
|
docs: remove underscores that are not deferreds in doctest
|
5d300a98fe3259cd0c8c3acf593dcbebc9cab5e9
|
docs
|
https://github.com/ibis-project/ibis/commit/5d300a98fe3259cd0c8c3acf593dcbebc9cab5e9
|
remove underscores that are not deferreds in doctest
|
diff --git a/client.py b/client.py index 692f082..1b642b8 100644 --- a/client.py +++ b/client.py @@ -195,7 +195,7 @@ def parse_project_and_dataset(project: str, dataset: str = "") -> tuple[str, str 'ibis-gbq' >>> dataset 'my_dataset' - >>> data_project, billing_project, _ = parse_project_and_dataset("ibis-gbq") + >>> data_project, billing_project, _dataset = parse_project_and_dataset("ibis-gbq") >>> data_project 'ibis-gbq' """ diff --git a/__init__.py b/__init__.py index 431d1a0..7fc507e 100644 --- a/__init__.py +++ b/__init__.py @@ -747,10 +747,13 @@ WHERE catalog_name = :database""" >>> import sqlite3 >>> ibis.options.interactive = True >>> with sqlite3.connect("/tmp/sqlite.db") as con: - ... _ = con.execute("DROP TABLE IF EXISTS t") - ... _ = con.execute("CREATE TABLE t (a INT, b TEXT)") - ... _ = con.execute("INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c')") + ... con.execute("DROP TABLE IF EXISTS t") # doctest: +ELLIPSIS + ... con.execute("CREATE TABLE t (a INT, b TEXT)") # doctest: +ELLIPSIS + ... con.execute( + ... "INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c')" + ... ) # doctest: +ELLIPSIS ... + <...> >>> con = ibis.connect("duckdb://") >>> t = con.read_sqlite("/tmp/sqlite.db", table_name="t") >>> t @@ -798,10 +801,13 @@ WHERE catalog_name = :database""" >>> import ibis >>> import sqlite3 >>> with sqlite3.connect("/tmp/attach_sqlite.db") as con: - ... _ = con.execute("DROP TABLE IF EXISTS t") - ... _ = con.execute("CREATE TABLE t (a INT, b TEXT)") - ... _ = con.execute("INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c')") + ... con.execute("DROP TABLE IF EXISTS t") # doctest: +ELLIPSIS + ... con.execute("CREATE TABLE t (a INT, b TEXT)") # doctest: +ELLIPSIS + ... con.execute( + ... "INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c')" + ... ) # doctest: +ELLIPSIS ... + <...> >>> con = ibis.connect("duckdb://") >>> con.list_tables() [] diff --git a/api.py b/api.py index 8eb7674..020ccca 100644 --- a/api.py +++ b/api.py @@ -1058,7 +1058,7 @@ def read_csv( ... ,f ... ''' >>> with open("/tmp/lines.csv", mode="w") as f: - ... _ = f.write(lines) + ... nbytes = f.write(lines) # nbytes is unused ... >>> t = ibis.read_csv("/tmp/lines.csv") >>> t @@ -1114,7 +1114,7 @@ def read_json( ... {"a": null, "b": "f"} ... ''' >>> with open("/tmp/lines.json", mode="w") as f: - ... _ = f.write(lines) + ... nbytes = f.write(lines) # nbytes is unused ... >>> t = ibis.read_json("/tmp/lines.json") >>> t diff --git a/relations.py b/relations.py index 6991ddd..dd628cd 100644 --- a/relations.py +++ b/relations.py @@ -2379,7 +2379,7 @@ class Table(Expr, _FixedTextJupyterMixin): ... {"name": "c", "pos": {"lat": 10.3, "lon": 30.1}} ... ''' >>> with open("/tmp/lines.json", "w") as f: - ... _ = f.write(lines) + ... nbytes = f.write(lines) # nbytes is unused ... >>> t = ibis.read_json("/tmp/lines.json") >>> t diff --git a/temporal.py b/temporal.py index 59ac1fa..557bb8f 100644 --- a/temporal.py +++ b/temporal.py @@ -291,7 +291,7 @@ class TimeValue(_TimeComponentMixin, TemporalValue): ... 2016-02-01T00:55:11,2016-02-01T01:24:34 ... 2016-02-01T00:11:13,2016-02-01T00:16:59''' >>> with open("/tmp/triptimes.csv", "w") as f: - ... _ = f.write(data) + ... nbytes = f.write(data) # nbytes is unused ... >>> taxi = ibis.read_csv("/tmp/triptimes.csv") >>> ride_duration = ( @@ -591,7 +591,7 @@ class TimestampValue(_DateComponentMixin, _TimeComponentMixin, TemporalValue): ... 2016-02-01T00:55:11,2016-02-01T01:24:34 ... 2016-02-01T00:11:13,2016-02-01T00:16:59''' >>> with open("/tmp/triptimes.csv", "w") as f: - ... _ = f.write(data) + ... nbytes = f.write(data) # nbytes is unused ... >>> taxi = ibis.read_csv("/tmp/triptimes.csv") >>> ride_duration = taxi.tpep_dropoff_datetime.delta(
|
|
test(bigquery): remove unnecessary use of `s.c` selector in tests
|
af0550a51ae2c5097089de02dfdab9e93de07c19
|
test
|
https://github.com/ibis-project/ibis/commit/af0550a51ae2c5097089de02dfdab9e93de07c19
|
remove unnecessary use of `s.c` selector in tests
|
diff --git a/conftest.py b/conftest.py index 9467505..0b74bb3 100644 --- a/conftest.py +++ b/conftest.py @@ -14,7 +14,6 @@ from google.cloud import bigquery as bq import ibis import ibis.expr.datatypes as dt -import ibis.selectors as s from ibis.backends.bigquery import EXTERNAL_DATA_SCOPES, Backend from ibis.backends.bigquery.datatypes import BigQueryType from ibis.backends.conftest import TEST_TABLES @@ -24,8 +23,6 @@ from ibis.backends.tests.data import json_types, non_null_array_types, struct_ty if TYPE_CHECKING: from collections.abc import Mapping - import ibis.expr.types as ir - DATASET_ID = "ibis_gbq_testing" DATASET_ID_TOKYO = "ibis_gbq_testing_tokyo" REGION_TOKYO = "asia-northeast1" @@ -302,11 +299,6 @@ class TestConf(UnorderedComparator, BackendTest, RoundAwayFromZero): for fut in concurrent.futures.as_completed(futures): fut.result() - @property - def functional_alltypes(self) -> ir.Table: - t = super().functional_alltypes - return t.select(~s.c("index", "Unnamed_0")) - @staticmethod def connect(*, tmpdir, worker_id, **kw) -> Backend: """Connect to the test project and dataset.""" diff --git a/out.sql b/out.sql index 074cda1..ed47015 100644 --- a/out.sql +++ b/out.sql @@ -1,7 +1,7 @@ -SELECT t0.`index`, t0.`Unnamed_0`, t0.`id`, t0.`bool_col`, t0.`tinyint_col`, - t0.`smallint_col`, t0.`int_col`, t0.`bigint_col`, t0.`float_col`, - t0.`double_col`, t0.`date_string_col`, t0.`string_col`, - t0.`timestamp_col`, t0.`year`, t0.`month` +SELECT t0.`id`, t0.`bool_col`, t0.`tinyint_col`, t0.`smallint_col`, + t0.`int_col`, t0.`bigint_col`, t0.`float_col`, t0.`double_col`, + t0.`date_string_col`, t0.`string_col`, t0.`timestamp_col`, + t0.`year`, t0.`month` FROM ( SELECT t1.* FROM functional_alltypes t1 diff --git a/test_compiler.py b/test_compiler.py index e99c381..7469ee1 100644 --- a/test_compiler.py +++ b/test_compiler.py @@ -23,8 +23,6 @@ def alltypes(): return ibis.table( ibis.schema( dict( - index="int64", - Unnamed_0="int64", id="int32", bool_col="boolean", tinyint_col="int8",
|
|
test: localize table creation instead of altering existing batting table
|
2ab2205184a8ff582768bb651dcd13a753e489aa
|
test
|
https://github.com/ibis-project/ibis/commit/2ab2205184a8ff582768bb651dcd13a753e489aa
|
localize table creation instead of altering existing batting table
|
diff --git a/__init__.py b/__init__.py index 9872787..a7a47aa 100644 --- a/__init__.py +++ b/__init__.py @@ -93,10 +93,9 @@ class BaseAlchemyBackend(BaseSQLBackend): return '.'.join(map(str, self.con.dialect.server_version_info)) def list_tables(self, like=None, database=None): - tables = self._inspector.get_table_names( - schema=database - ) + self._inspector.get_view_names(schema=database) - return self._filter_with_like(tables, like) + tables = self.inspector.get_table_names(schema=database) + views = self.inspector.get_view_names(schema=database) + return self._filter_with_like(tables + views, like) def list_databases(self, like=None): """List databases in the current server.""" diff --git a/conftest.py b/conftest.py index f3c8aca..a68e07d 100644 --- a/conftest.py +++ b/conftest.py @@ -15,8 +15,6 @@ import pyspark.sql.functions as F # noqa: E402 import pyspark.sql.types as pt # noqa: E402 from pyspark.sql import SparkSession # noqa: E402 -_pyspark_testing_client = None - def get_common_spark_testing_client(data_directory, connect): spark = ( @@ -115,9 +113,7 @@ def get_common_spark_testing_client(data_directory, connect): .repartition(num_partitions) .sort('playerID') ) - df_batting.write.saveAsTable( - "batting", format="parquet", mode="overwrite", database="default" - ) + df_batting.createOrReplaceTempView("batting") df_awards_players = ( s.read.csv( @@ -203,13 +199,10 @@ def get_common_spark_testing_client(data_directory, connect): def get_pyspark_testing_client(data_directory): - global _pyspark_testing_client - if _pyspark_testing_client is None: - _pyspark_testing_client = get_common_spark_testing_client( - data_directory, - lambda session: ibis.backends.pyspark.Backend().connect(session), - ) - return _pyspark_testing_client + return get_common_spark_testing_client( + data_directory, + lambda session: ibis.backends.pyspark.Backend().connect(session), + ) class TestConf(BackendTest, RoundAwayFromZero): @@ -219,9 +212,6 @@ class TestConf(BackendTest, RoundAwayFromZero): def connect(data_directory): return get_pyspark_testing_client(data_directory) - def cleanup(self): - self.connection._session.sql("DROP TABLE IF EXISTS batting") - @pytest.fixture(scope='session') def client(data_directory): @@ -274,10 +264,7 @@ def client(data_directory): df_time_indexed.createTempView('time_indexed_table') - try: - yield client - finally: - client._session.sql("DROP TABLE IF EXISTS batting") + return client class IbisWindow: diff --git a/base.py b/base.py index 4719e86..6c171ea 100644 --- a/base.py +++ b/base.py @@ -150,6 +150,3 @@ class BackendTest(abc.ABC): self, params: Optional[Mapping[ir.ValueExpr, Any]] = None ): return self.api.compiler.make_context(params=params) - - def cleanup(self): - pass diff --git a/test_dot_sql.py b/test_dot_sql.py index 40a2ff5..21c7408 100644 --- a/test_dot_sql.py +++ b/test_dot_sql.py @@ -1,6 +1,9 @@ import pandas as pd import pytest +import ibis +from ibis import util + dot_sql_notimpl = pytest.mark.notimpl( ["clickhouse", "datafusion", "impala", "sqlite"] ) @@ -116,8 +119,12 @@ def test_dot_sql_repr(con): @dot_sql_notimpl @dot_sql_never def test_dot_sql_does_not_clobber_existing_tables(con): - expr = con.table("functional_alltypes").sql( - "SELECT 1 as x FROM functional_alltypes" - ) - with pytest.raises(ValueError): - expr.alias("batting") + name = f"ibis_{util.guid()}" + con.create_table(name, schema=ibis.schema(dict(a="string"))) + try: + expr = con.table(name).sql("SELECT 1 as x FROM functional_alltypes") + with pytest.raises(ValueError): + expr.alias(name) + finally: + con.drop_table(name, force=True) + assert name not in con.list_tables()
|
|
docs(workspace): update Query Inspection page
|
c546222396e282da5ac8f71c6516a8aeb7c543c2
|
docs
|
https://github.com/wzhiqing/cube/commit/c546222396e282da5ac8f71c6516a8aeb7c543c2
|
update Query Inspection page
|
diff --git a/Inspecting-Queries.mdx b/Inspecting-Queries.mdx index 637fdcb..09a9faf 100644 --- a/Inspecting-Queries.mdx +++ b/Inspecting-Queries.mdx @@ -5,55 +5,177 @@ category: Workspace menuOrder: 4 --- -You can inspect Cube queries in a web-based interface and improve their performance with pre-aggregations in Cube Cloud. +The Queries page in Cube Cloud is a one-stop shop for all performance and +diagnostic information about queries issued for a deployment. It is kept +up-to-date in real time and provides a quick way to check whether queries are +being accelerated with [pre-aggregations][ref-caching-gs-preaggs], how long they +took to execute, and if they failed. -<SuccessBox> +<Screenshot src="https://ucarecdn.com/44bb6d8d-5943-4978-8b15-73e1e43fd988/" /> -Query inspection is available in Cube Cloud on [all tiers](https://cube.dev/pricing). +## Setting the time range -</SuccessBox> +By default, Cube Cloud shows you a live feed of queries made to the API and +connected [data sources][ref-conf-db]. You can see query history for the last 72 +hours by using the date picker in the top right corner of the page and selecting +a time period: -<div style="text-align: center"> - <img - src="https://ucarecdn.com/815b2afb-3d8d-40d7-bfe6-ce2f917b421b/" - style="border: none" - width="100%" - /> -</div> +<Screenshot + highlight="inset(15% 1% 60% 21% round 10px)" + src="https://ucarecdn.com/691e7cfa-e7fd-4df1-8cfa-f8f0d74634f6/" +/> -## Find slow queries +To go back to live mode, click <Btn>▶</Btn>: -You can use filters to find slow and not pre-aggregated queries. +<Screenshot + highlight="inset(15% 6% 79% 88% round 10px)" + src="https://ucarecdn.com/691e7cfa-e7fd-4df1-8cfa-f8f0d74634f6/" +/> -<div style="text-align: center"> - <img - src="https://ucarecdn.com/a242026b-3a99-46fc-8803-147559985543/" - style="border: none" - width="100%" - /> -</div> +## Exploring queries -## Pre-aggregations +You can switch between [queries made to the API](#api-queries) and +[queries made to connected data sources](#database-queries) by switching between +the <Btn>API</Btn> and <Btn>Database</Btn> tabs. -When inspecting queries on the <Btn>Queries</Btn> page, you can now also see -whether the query used a pre-aggregation: +<Screenshot + highlight="inset(22% 64% 70% 20% round 10px)" + src="https://ucarecdn.com/44bb6d8d-5943-4978-8b15-73e1e43fd988/" +/> - +### <--{"id" : "API and Database"}--> All Queries and Top Queries -If you click a query from the list, we now provide more relevant information, -like if the query built a pre-aggregation or not: +Clicking <Btn>All Queries</Btn> will show all queries in order of recency, +while <Btn>Top Queries</Btn> will show the most-frequently executed +queries: - +<Screenshot + highlight="inset(30% 50% 64% 20% round 10px)" + src="https://ucarecdn.com/44bb6d8d-5943-4978-8b15-73e1e43fd988/" +/> -Cube Cloud also provides useful tips for pre-aggregation strategies and -recommends the best pre-aggregation for a specific query. Click on any query on -the <Btn>Queries</Btn> page and navigate to the <Btn>Pre-Aggregations</Btn> tab -inside the query details page. +### <--{"id" : "API and Database"}--> Filtering -<div style="text-align: center"> - <img - src="https://ucarecdn.com/aefcb733-12c2-4446-9520-c968062dd500/" - style="border: none" - width="100%" - /> -</div> +You can also use filters to find problematic queries; you can filter by query +duration, cache status, whether the query was accelerated, and by failing +queries: + +<Screenshot + highlight="inset(36% 8% 58% 20% round 10px)" + src="https://ucarecdn.com/44bb6d8d-5943-4978-8b15-73e1e43fd988/" +/> + +## Inspecting API Queries + +To see an expanded view of a query, click on <Btn>❯</Btn> next to any query: + +<Screenshot + highlight="inset(-1% 87% -1% 9% round 10px)" + src="https://ucarecdn.com/7ec40dc3-813d-4e3e-881b-dc01a6fdfb50/" +/> + +This will show us how long the query took, whether the query was accelerated, +and even view the raw query by clicking <Btn>❮/❯</Btn>: + +<Screenshot + highlight="inset(1% 2% 93% 68% round 10px)" + src="https://ucarecdn.com/e3c00115-49f9-48a6-8f2b-949c53523c9f/" +/> + +To drill down on a specific query, click it to see more information. + +### <--{"id" : "Inspecting API Queries"}--> Query + +The <Btn>Query</Btn> tab shows the raw JSON query sent to the Cube Cloud +deployment. + +<Screenshot src="https://ucarecdn.com/3c0490be-757d-46cd-ba2d-d7c93a7acf4e/" /> + +### <--{"id" : "Inspecting API Queries"}--> Errors + +If the query failed, the <Btn>Errors</Btn> tab will show you the error message +and stacktrace: + +<Screenshot src="https://ucarecdn.com/858633a0-4ee3-4dec-adb6-1d91ca30919f/" /> + +### <--{"id" : "Inspecting API Queries"}--> SQL + +The <Btn>SQL</Btn> tab shows the generated SQL query sent by Cube to either your +data source **or** Cube Store if the query was accelerated with a +pre-aggregation: + +<Screenshot src="https://ucarecdn.com/537f7269-654d-479c-a0cc-abc2941f9b5a/" /> + +### <--{"id" : "Inspecting API Queries"}--> Pre-Aggregations + +The <Btn>Pre-Aggregations</Btn> tab shows the +[pre-aggregation][ref-caching-gs-preaggs] used to accelerate this query, if one +was used: + +<Screenshot src="https://ucarecdn.com/e0a8d722-d43a-4d6f-8d3c-dbdd3173638e/" /> + +If no pre-aggregations were used for this query, you should see the following +screen: + +<Screenshot src="https://ucarecdn.com/b5cadb89-fbda-404a-806c-da99dc046845/" /> + +Clicking <Btn>Accelerate</Btn> takes you to the Rollup Designer, where you can +add a pre-aggregation to accelerate similar queries in the future. + +### <--{"id" : "Inspecting API Queries"}--> Queue Graph + +The <Btn>Queue Graph</Btn> tab details any activity in the query queue while +processing the query. This may include other queries that were being processed +or were waiting in the queue by Cube Cloud at the same time as this query: + +<Screenshot src="https://ucarecdn.com/dbac27fa-5722-415f-9374-11682b4159d7/" /> + +A large number of queries in the queue may indicate that your deployment is +under-provisioned, and you may want to consider scaling up your deployment. + +### <--{"id" : "Inspecting API Queries"}--> Flame Graph + +The <Btn>Flame Graph</Btn> tab shows a [flame graph][datadog-kb-flamegraph] of a +query's execution time across resources in the Cube Cloud deployment. This is +extremely useful for diagnosing where time is being spent in a query, and can +help identify bottlenecks in your Cube deployment or data source. + +<Screenshot src="https://ucarecdn.com/56188be4-0614-48e1-97c8-6d8d89639849/" /> + +## Inspecting Database Queries + +### <--{"id" : "Inspecting Database Queries"}--> Query + +For Database requests, the <Btn>Query</Btn> tab shows the SQL query compiled by +Cube that is executed on the data source: + +<Screenshot src="https://ucarecdn.com/5a5779f6-b484-49bc-9269-6e145b89f5eb/" /> + +This can be useful for debugging queries that are failing or taking a long time, +as you can copy the query and run it directly against your data source. + +### <--{"id" : "Inspecting Database Queries"}--> Errors + +If the query failed, the <Btn>Errors</Btn> tab will show you the error message +and stacktrace: + +<Screenshot src="https://ucarecdn.com/efd45e3a-8b44-4ee9-9f9e-284f0fd8eb96/" /> + +Errors here generally indicate a problem with querying the data source. The +generated SQL query can be copied from the +<Btn>[Query](#inspecting-database-queries-query)</Btn> tab and run directly +against your data source to debug the issue. + +### <--{"id" : "Inspecting Database Queries"}--> Events + +The <Btn>Events</Btn> tab shows all data source-related events that occurred +while the query is in the query execution queue: + +<Screenshot src="https://ucarecdn.com/24eb11a1-4312-4d06-ac1b-6a0575c7dbe6/" /> + +[datadog-kb-flamegraph]: + https://www.datadoghq.com/knowledge-center/distributed-tracing/flame-graph/ +[ref-caching-gs-preaggs]: /caching/pre-aggregations/getting-started +[ref-conf-db]: /config/databases +[ref-deployment-api-instance]: /deployment/overview#api-instances +[ref-deployment-refresh-worker]: /deployment/overview#refresh-worker
|
|
build: website updated
|
e56389dc364f4fd4f3426a36486475d496c6870f
|
build
|
https://github.com/tsparticles/tsparticles/commit/e56389dc364f4fd4f3426a36486475d496c6870f
|
website updated
|
diff --git a/404.html b/404.html index 6e96a35..ccbfc5a 100644 --- a/404.html +++ b/404.html @@ -48,8 +48,8 @@ crossorigin="anonymous"></script> <script src="https://cdn.jsdelivr.net/npm/@fortawesome/[email protected]/js/fontawesome.min.js" integrity="sha256-NP9NujdEzS5m4ZxvNqkcbxyHB0dTRy9hG13RwTVBGwo=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.min.js" - integrity="sha256-DwlHFiVCFC/Js3K90eGBepzCPPAahB+mIz20FKcWUyU=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.min.js" + integrity="sha256-FCz5ToEA27payrGYaVGRidiIA+68Z31TBXFzcIT1/gU=" crossorigin="anonymous"></script> <script src="/js/404.min.js"> </script> </body> diff --git a/index.html b/index.html index e8700fb..63975ce 100644 --- a/index.html +++ b/index.html @@ -394,28 +394,28 @@ integrity="sha256-ur/YlHMU96MxHEsy3fHGszZHas7NzH4RQlD4tDVvFhw=" crossorigin="anonymous"></script> <script async defer src="https://buttons.github.io/buttons.js"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.min.js" - integrity="sha256-DwlHFiVCFC/Js3K90eGBepzCPPAahB+mIz20FKcWUyU=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.plugin.infection.min.js" - integrity="sha256-XRAJo21rJOZrNeor9sJxIl+yNwek/+fxFRjkJB0zZN4=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.interaction.light.min.js" - integrity="sha256-MCFSnwvJF6+rm7890kCNOpaqxecN2J1F87UXRmydRW8=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.interaction.particles.repulse.min.js" - integrity="sha256-BHvByJOYJq7p5YrPVLulFtbbWjE3SGfo+r+ikIq+hqQ=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.updater.gradient.min.js" - integrity="sha256-jU3+26SB4AgSQzW/SJiGw/x6UNOU0gd+ZJHfoU8sTx4=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.updater.orbit.min.js" - integrity="sha256-vV5R83k8aXX3bcNpasUQ2H46IRw1NerQeG+u2BdOnHs=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.curves.min.js" - integrity="sha256-f4Xbw2FvfHFGaVKo9aTXevHAd7YfDjBuGVL6VOKiMtE=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.polygon.min.js" - integrity="sha256-R5z0ZVN0QUAOeJL/HYorFGOsUii7TlqI5TOrE5ffvfU=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.perlin.noise.min.js" - integrity="sha256-RwXqXRcbCXDFM1o65eQq65WYhXouFB9S8idkfno0d1M=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.simplex.noise.min.js" - integrity="sha256-vntbegrvc2nU/VzqcMnNJwV1500NilA32pPmIC1yOfM=" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.links.min.js" - integrity="sha256-J3JN+Yu5HGPTNpsCwdDGXyZxqpzYl1l906QYkRLOQtk=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.min.js" + integrity="sha256-FCz5ToEA27payrGYaVGRidiIA+68Z31TBXFzcIT1/gU=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.plugin.infection.min.js" + integrity="sha256-4uE3SpYkEkw/0uWi33MCb4/VpyNZ5Wf9adtWDophuEw=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.interaction.light.min.js" + integrity="sha256-q85bR28UO+xCmwTs0MbhxQiBI+/CBX1VZUfBM7IOlpg=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.interaction.particles.repulse.min.js" + integrity="sha256-i4uc6HJXAY2QssX/X8UHyxgUXf9gOvOfgYe3+XjWCLo=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.updater.gradient.min.js" + integrity="sha256-0kqGJVZLNg/mBr8/rJUtlwWqOFbvmhOhOn/45Mav570=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.updater.orbit.min.js" + integrity="sha256-dnBA9y2Ph4TEs3//1EKFqo6lIhvm/1ULV4w4KKNc4Vg=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.curves.min.js" + integrity="sha256-9eI1MHgjzuapo54JotvNyOUU0PP6uNsHAH8syd9zSGo=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.polygon.min.js" + integrity="sha256-Myn75s/Kkn2Ez9VYK1b1/ULnv+8izbyyyO3WQVDAPGI=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.perlin.noise.min.js" + integrity="sha256-L0cOacI2irvill143OysaCDFztZ5KkUU9OIOLZzGH9s=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.path.simplex.noise.min.js" + integrity="sha256-FBYfSr6YYIh6PBVcA7ErIExUJZcdOTd9MmVdnSjEk/4=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.links.min.js" + integrity="sha256-3/iC2vcldUt6Qp8EFQBA6Bk7SC6cWmTnFJhdqrv1jIM=" crossorigin="anonymous"></script> <script src="../js/demo.min.js"></script> </body> diff --git a/demo.js b/demo.js index 22c05d2..db5e2dd 100644 --- a/demo.js +++ b/demo.js @@ -323,7 +323,7 @@ canvas { background-position: ${particlesContainer.style.backgroundPosition}; }`, js: `tsParticles.load("tsparticles", ${JSON.stringify(container.options)});`, - js_external: "https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.min.js", + js_external: "https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.min.js", title: "tsParticles example", description: "This pen was created with tsParticles from https://particles.js.org", tags: "tsparticles, javascript, typescript, design, animation", diff --git a/bigCircles.html b/bigCircles.html index f27fb82..a9615ad 100644 --- a/bigCircles.html +++ b/bigCircles.html @@ -66,8 +66,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.bigCircles.bundle.min.js" - integrity="sha256-d/P8suAOoEqUV0ynAWlttIgBhZ5+dyb+4nOlaRpHfPU=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.bigCircles.bundle.min.js" + integrity="sha256-tqL4leQGABOblheBwJ6wcMW3PZCW/yoHF1ZZ26SzSko=" crossorigin="anonymous"></script> <script src="https://cdn.jsdelivr.net/gh/highlightjs/[email protected]/build/highlight.min.js"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "bigCircles" }); diff --git a/bubbles.html b/bubbles.html index ffae723..d156742 100644 --- a/bubbles.html +++ b/bubbles.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.bubbles.bundle.min.js" - integrity="sha256-cHGFgV/sUCHUIrwgGjzq09SXBPDvwlS/Jg6fxhw1Nk4=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.bubbles.bundle.min.js" + integrity="sha256-Xj7IdPelA51XCQaDsBODLcgMJhKdIUdYiLe3KzCtiLM=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "bubbles" }); </script> diff --git a/confetti.html b/confetti.html index 67f296b..7cf6058 100644 --- a/confetti.html +++ b/confetti.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.confetti.bundle.min.js" - integrity="sha256-SrbMmICzFcW0/FcigJ67JVo9WhyMc395HQn+ufH9ros=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.confetti.bundle.min.js" + integrity="sha256-KQ+7q8kkJ4UoIBt78S69JPh9DWsvBTCr5k2cci6z9v8=" crossorigin="anonymous"></script> <script type="text/javascript"> confetti("tsparticles", {}); </script> diff --git a/fire.html b/fire.html index fe95d22..3851f21 100644 --- a/fire.html +++ b/fire.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.fire.bundle.min.js" - integrity="sha256-gkn0J5TJduWbP0+VVku0vndbTuMlKQFp8INOdnZgIJA=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.fire.bundle.min.js" + integrity="sha256-Or+vl56HP6fB7/J/SRcMNfGjlo9c6tPPyphzBrLGG9Y=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "fire" }); </script> diff --git a/firefly.html b/firefly.html index 50de5fd..7185790 100644 --- a/firefly.html +++ b/firefly.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.firefly.bundle.min.js" - integrity="sha256-ftgGPFZPX3+tJ7uN8Gt+ZQ0RjpO1tZM2lCNtO8XT380=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.firefly.bundle.min.js" + integrity="sha256-qD3H6JtSCFzZ89FcndhFOANuyt/9AkZ7sM8Y5R768fY=" crossorigin="anonymous"></script> <script> tsParticles.load("tsparticles", { preset: "firefly" }); </script> diff --git a/fireworks.html b/fireworks.html index 2c25f68..3807ee7 100644 --- a/fireworks.html +++ b/fireworks.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.fireworks.bundle.min.js" - integrity="sha256-1muNpE6olU461Kq6khIgNhRc5ki1TzkJaFIV040utAg=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.fireworks.bundle.min.js" + integrity="sha256-OSPU6sLJVUdOHRjzN1jwSdwpiF1ias0Jbc4WQyRcBwA=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "fireworks" }); </script> diff --git a/fountain.html b/fountain.html index 90c2f32..985e0b0 100644 --- a/fountain.html +++ b/fountain.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.fountain.bundle.min.js" - integrity="sha256-F0TARc7V1GZ7mw/ntZxz6pafne0IMUCx0WJw6PCEKFE=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.fountain.bundle.min.js" + integrity="sha256-vr/UgdwL+Lswcm01r0WAJE06GwH+bREOpKyQ8Icl2jw=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "fountain" }); </script> diff --git a/links.html b/links.html index 6e9a3e2..bb972f0 100644 --- a/links.html +++ b/links.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.links.bundle.min.js" - integrity="sha256-z6WulfWbP63pSLnqoXxVPzebWluq5v7xF8Ijee08nVI=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.links.bundle.min.js" + integrity="sha256-uN01MDTbiC8Vop2wbdWXEOaJM7cc+j95qScCDUdO2iY=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "links" }); </script> diff --git a/seaAnemone.html b/seaAnemone.html index 63c4085..f28da1a 100644 --- a/seaAnemone.html +++ b/seaAnemone.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.seaAnemone.bundle.min.js" - integrity="sha256-7XFPU55oH1FYYKH9/Og9WnALcR6hTRszm/9NNrxLpuE=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.seaAnemone.bundle.min.js" + integrity="sha256-dlFr1p7z4/7Wlw4N3J7Au/8nn6SA2svcsKX4hWJIcHw=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "seaAnemone" }); </script> diff --git a/snow.html b/snow.html index 9f9f325..976df93 100644 --- a/snow.html +++ b/snow.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.snow.bundle.min.js" - integrity="sha256-gOaYcEuwqZYujAzM2dHCjzcPe8LGES5w4803I0xGIzE=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.snow.bundle.min.js" + integrity="sha256-O6be0+aM+Qy9lPYFfPjz57av6yRWgMyFnYU9hZHUc0I=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "snow" }); </script> diff --git a/stars.html b/stars.html index 02fc803..565ffae 100644 --- a/stars.html +++ b/stars.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.stars.bundle.min.js" - integrity="sha256-TFyNhcAXSTVqVG6+EKtcIyC/78o4EvAgNiclZVdoxjQ=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.stars.bundle.min.js" + integrity="sha256-Jum6EFp/yzLvXK0TjudmmmahzRPkkSNquTEVtmly6JY=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "stars" }); </script> diff --git a/triangles.html b/triangles.html index 5479f98..07652a5 100644 --- a/triangles.html +++ b/triangles.html @@ -64,8 +64,8 @@ </script> <script async src="https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-1784552607103901" crossorigin="anonymous"></script> -<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.triangles.bundle.min.js" - integrity="sha256-5HWIar9PubjfVClmOS/0ZZPGvARZz25Xc4T0d0OXha8=" crossorigin="anonymous"></script> +<script src="https://cdn.jsdelivr.net/npm/[email protected]/tsparticles.preset.triangles.bundle.min.js" + integrity="sha256-o04P6LuC3pTjvNHUUQ2lmdgJ6jWK96R89yHk/69I3K0=" crossorigin="anonymous"></script> <script type="text/javascript"> tsParticles.load("tsparticles", { preset: "triangles" }); </script>
|
|
build: updates
|
a6cd85e72f08c0c3ebff2edcbdd7e4dae38ec37a
|
build
|
https://github.com/tsparticles/tsparticles/commit/a6cd85e72f08c0c3ebff2edcbdd7e4dae38ec37a
|
updates
|
diff --git a/lerna.json b/lerna.json index 399e90e..a39c894 100644 --- a/lerna.json +++ b/lerna.json @@ -17,7 +17,6 @@ ], "version": "2.10.1", "npmClient": "pnpm", - "useWorkspaces": true, "conventionalCommits": true, "command": { "version": { diff --git a/package.json b/package.json index 3a1add4..02a55b8 100644 --- a/package.json +++ b/package.json @@ -23,9 +23,8 @@ "@babel/core": "^7.22.1", "@commitlint/cli": "^17.6.5", "@commitlint/config-conventional": "^17.6.5", - "@nrwl/devkit": "^15.9.4", - "@nrwl/nx-cloud": "^15.3.5", - "@nrwl/workspace": "^15.9.4", + "@nrwl/devkit": "^16.3.2", + "@nrwl/workspace": "^16.3.2", "@skypack/package-check": "^0.2.2", "@tsparticles/cli": "^1.5.1", "@tsparticles/eslint-config": "^1.12.1", @@ -55,10 +54,10 @@ "install": "^0.13.0", "jsdom": "^22.1.0", "jsdom-global": "^3.0.2", - "lerna": "^6.6.2", + "lerna": "^7.0.1", "mocha": "^10.2.0", - "nx": "^15.9.4", - "nx-cloud": "^15.3.5", + "nx": "^16.3.2", + "nx-cloud": "^16.0.5", "nyc": "^15.1.0", "prettier": "^2.8.8", "reflect-metadata": "^0.1.13", @@ -81,4 +80,3 @@ "yorkie": "^2.0.0" } } - diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9362302..7b212ab 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -18,14 +18,11 @@ importers: specifier: ^17.6.5 version: 17.6.5 '@nrwl/devkit': - specifier: ^15.9.4 - version: 15.9.4([email protected]) - '@nrwl/nx-cloud': - specifier: ^15.3.5 - version: 15.3.5 + specifier: ^16.3.2 + version: 16.3.2([email protected]) '@nrwl/workspace': - specifier: ^15.9.4 - version: 15.9.4 + specifier: ^16.3.2 + version: 16.3.2 '@skypack/package-check': specifier: ^0.2.2 version: 0.2.2 @@ -114,17 +111,17 @@ importers: specifier: ^3.0.2 version: 3.0.2([email protected]) lerna: - specifier: ^6.6.2 - version: 6.6.2 + specifier: ^7.0.1 + version: 7.0.1 mocha: specifier: ^10.2.0 version: 10.2.0 nx: - specifier: ^15.9.4 - version: 15.9.4 + specifier: ^16.3.2 + version: 16.3.2 nx-cloud: - specifier: ^15.3.5 - version: 15.3.5 + specifier: ^16.0.5 + version: 16.0.5 nyc: specifier: ^15.1.0 version: 15.1.0 @@ -4180,10 +4177,6 @@ packages: wrap-ansi: 8.1.0 wrap-ansi-cjs: /[email protected] - /@isaacs/[email protected]: - resolution: {integrity: sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==} - dev: true - /@istanbuljs/[email protected]: resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} engines: {node: '>=8'} @@ -4253,110 +4246,37 @@ packages: '@jridgewell/sourcemap-codec': 1.4.15 dev: true - /@lerna/[email protected]: - resolution: {integrity: sha512-QyKIWEnKQFnYu2ey+SAAm1A5xjzJLJJj3bhIZd3QKyXKKjaJ0hlxam/OsWSltxTNbcyH1jRJjC6Cxv31usv0Ag==} + /@lerna/[email protected]: + resolution: {integrity: sha512-lov3hXcX+g76fjQ5kg6+QdffL6kFw/aH3sG7NGk61mZfsWCDum9kcp9biqIRAmD6xJbrQsr1i0i9YeCMnHJ6pA==} engines: {node: ^14.17.0 || >=16.0.0} dependencies: chalk: 4.1.2 - execa: 5.0.0 + execa: 5.1.1 strong-log-transformer: 2.1.0 dev: true - /@lerna/[email protected]: - resolution: {integrity: sha512-xQ+1Y7D+9etvUlE+unhG/TwmM6XBzGIdFBaNoW8D8kyOa9M2Jf3vdEtAxVa7mhRz66CENfhL/+I/QkVaa7pwbQ==} + /@lerna/[email protected]: + resolution: {integrity: sha512-vv9gtbrn/gBwQLdDlUeatO3uY58nxMktv9h9/5GEFcBesV9MAeQ0zRz8zkr2C0DX/m25lE+J5KTzJqZtMb03kw==} engines: {node: ^14.17.0 || >=16.0.0} dependencies: - '@lerna/child-process': 6.6.2 + '@lerna/child-process': 7.0.1 dedent: 0.7.0 - fs-extra: 9.1.0 - init-package-json: 3.0.2 + fs-extra: 11.1.1 + init-package-json: 5.0.0 npm-package-arg: 8.1.1 p-reduce: 2.1.0 - pacote: 15.1.1 + pacote: 15.2.0 pify: 5.0.0 semver: 7.5.1 slash: 3.0.0 validate-npm-package-license: 3.0.4 - validate-npm-package-name: 4.0.0 + validate-npm-package-name: 5.0.0 yargs-parser: 20.2.4 transitivePeerDependencies: - bluebird - supports-color dev: true - /@lerna/[email protected]([email protected]): - resolution: {integrity: sha512-0hZxUPKnHwehUO2xC4ldtdX9bW0W1UosxebDIQlZL2STnZnA2IFmIk2lJVUyFW+cmTPQzV93jfS0i69T9Z+teg==} - engines: {node: ^14.17.0 || >=16.0.0} - dependencies: - '@npmcli/arborist': 6.2.3 - '@npmcli/run-script': 4.1.7 - '@nrwl/devkit': 15.9.4([email protected]) - '@octokit/rest': 19.0.3 - byte-size: 7.0.0 - chalk: 4.1.0 - clone-deep: 4.0.1 - cmd-shim: 5.0.0 - columnify: 1.6.0 - config-chain: 1.1.12 - conventional-changelog-core: 4.2.4 - conventional-recommended-bump: 6.1.0 - cosmiconfig: 7.0.0 - dedent: 0.7.0 - dot-prop: 6.0.1 - execa: 5.0.0 - file-url: 3.0.0 - find-up: 5.0.0 - fs-extra: 9.1.0 - get-port: 5.1.1 - get-stream: 6.0.0 - git-url-parse: 13.1.0 - glob-parent: 5.1.2 - globby: 11.1.0 - graceful-fs: 4.2.10 - has-unicode: 2.0.1 - inquirer: 8.2.4 - is-ci: 2.0.0 - is-stream: 2.0.0 - libnpmpublish: 7.1.4 - load-json-file: 6.2.0 - make-dir: 3.1.0 - minimatch: 3.0.5 - multimatch: 5.0.0 - node-fetch: 2.6.7 - npm-package-arg: 8.1.1 - npm-packlist: 5.1.1 - npm-registry-fetch: 14.0.3 - npmlog: 6.0.2 - p-map: 4.0.0 - p-map-series: 2.1.0 - p-queue: 6.6.2 - p-waterfall: 2.1.1 - pacote: 15.1.1 - pify: 5.0.0 - pretty-format: 29.4.3 - read-cmd-shim: 3.0.0 - read-package-json: 5.0.1 - resolve-from: 5.0.0 - semver: 7.3.8 - signal-exit: 3.0.7 - slash: 3.0.0 - ssri: 9.0.1 - strong-log-transformer: 2.1.0 - tar: 6.1.11 - temp-dir: 1.0.0 - tempy: 1.0.0 - upath: 2.0.1 - uuid: 8.3.2 - write-file-atomic: 4.0.1 - write-pkg: 4.0.0 - yargs: 16.2.0 - transitivePeerDependencies: - - bluebird - - encoding - - nx - - supports-color - dev: true - /@mapbox/[email protected]: resolution: {integrity: sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==} hasBin: true @@ -4403,49 +4323,6 @@ packages: '@nodelib/fs.scandir': 2.1.5 fastq: 1.15.0 - /@npmcli/[email protected]: - resolution: {integrity: sha512-lpGOC2ilSJXcc2zfW9QtukcCTcMbl3fVI0z4wvFB2AFIl0C+Q6Wv7ccrpdrQa8rvJ1ZVuc6qkX7HVTyKlzGqKA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - hasBin: true - dependencies: - '@isaacs/string-locale-compare': 1.1.0 - '@npmcli/fs': 3.1.0 - '@npmcli/installed-package-contents': 2.0.2 - '@npmcli/map-workspaces': 3.0.4 - '@npmcli/metavuln-calculator': 5.0.1 - '@npmcli/name-from-folder': 2.0.0 - '@npmcli/node-gyp': 3.0.0 - '@npmcli/package-json': 3.1.0 - '@npmcli/query': 3.0.0 - '@npmcli/run-script': 6.0.2 - bin-links: 4.0.1 - cacache: 17.1.3 - common-ancestor-path: 1.0.1 - hosted-git-info: 6.1.1 - json-parse-even-better-errors: 3.0.0 - json-stringify-nice: 1.1.4 - minimatch: 6.2.0 - nopt: 7.1.0 - npm-install-checks: 6.1.1 - npm-package-arg: 10.1.0 - npm-pick-manifest: 8.0.1 - npm-registry-fetch: 14.0.5 - npmlog: 7.0.1 - pacote: 15.1.1 - parse-conflict-json: 3.0.1 - proc-log: 3.0.0 - promise-all-reject-late: 1.0.1 - promise-call-limit: 1.0.2 - read-package-json-fast: 3.0.2 - semver: 7.5.1 - ssri: 10.0.4 - treeverse: 3.0.0 - walk-up-path: 1.0.0 - transitivePeerDependencies: - - bluebird - - supports-color - dev: true - /@npmcli/[email protected]: resolution: {integrity: sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -4486,29 +4363,6 @@ packages: npm-normalize-package-bin: 3.0.1 dev: true - /@npmcli/[email protected]: - resolution: {integrity: sha512-Z0TbvXkRbacjFFLpVpV0e2mheCh+WzQpcqL+4xp49uNJOxOnIAPZyXtUxZ5Qn3QBTGKA11Exjd9a5411rBrhDg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - '@npmcli/name-from-folder': 2.0.0 - glob: 10.2.5 - minimatch: 9.0.0 - read-package-json-fast: 3.0.2 - dev: true - - /@npmcli/[email protected]: - resolution: {integrity: sha512-qb8Q9wIIlEPj3WeA1Lba91R4ZboPL0uspzV0F9uwP+9AYMVB2zOoa7Pbk12g6D2NHAinSbHh6QYmGuRyHZ874Q==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - cacache: 17.1.3 - json-parse-even-better-errors: 3.0.0 - pacote: 15.1.1 - semver: 7.5.1 - transitivePeerDependencies: - - bluebird - - supports-color - dev: true - /@npmcli/[email protected]: resolution: {integrity: sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -4518,38 +4372,11 @@ packages: rimraf: 3.0.2 dev: true - /@npmcli/[email protected]: - resolution: {integrity: sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dev: true - - /@npmcli/[email protected]: - resolution: {integrity: sha512-doNI35wIe3bBaEgrlPfdJPaCpUR89pJWep4Hq3aRdh6gKazIVWfs0jHttvSSoq47ZXgC7h73kDsUl8AoIQUB+A==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dev: true - /@npmcli/[email protected]: resolution: {integrity: sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dev: true - /@npmcli/[email protected]: - resolution: {integrity: sha512-qNPy6Yf9ruFST99xcrl5EWAvrb7qFrwgVbwdzcTJlIgxbArKOq5e/bgZ6rTL1X9hDgAdPbvL8RWx/OTLSB0ToA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - glob: 10.2.5 - json-parse-even-better-errors: 3.0.0 - normalize-package-data: 5.0.0 - npm-normalize-package-bin: 3.0.1 - dev: true - - /@npmcli/[email protected]: - resolution: {integrity: sha512-s9SgS+p3a9Eohe68cSI3fi+hpcZUmXq5P7w0kMlAsWVtR7XbK3ptkZqKT2cK1zLDObJ3sR+8P59sJE0w/KTL1g==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - infer-owner: 1.0.4 - dev: true - /@npmcli/[email protected]: resolution: {integrity: sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -4557,27 +4384,6 @@ packages: which: 3.0.1 dev: true - /@npmcli/[email protected]: - resolution: {integrity: sha512-MFNDSJNgsLZIEBVZ0Q9w9K7o07j5N4o4yjtdz2uEpuCZlXGMuPENiRaFYk0vRqAA64qVuUQwC05g27fRtfUgnA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - postcss-selector-parser: 6.0.13 - dev: true - - /@npmcli/[email protected]: - resolution: {integrity: sha512-WXr/MyM4tpKA4BotB81NccGAv8B48lNH0gRoILucbcAhTQXLCoi6HflMV3KdXubIqvP9SuLsFn68Z7r4jl+ppw==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - '@npmcli/node-gyp': 2.0.0 - '@npmcli/promise-spawn': 3.0.0 - node-gyp: 9.3.1 - read-package-json-fast: 2.0.3 - which: 2.0.2 - transitivePeerDependencies: - - bluebird - - supports-color - dev: true - /@npmcli/[email protected]: resolution: {integrity: sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -4592,47 +4398,59 @@ packages: - supports-color dev: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-FoiGFCLpb/r4HXCM3KYqT0xteP+MRV6bIHjz3bdPHIDLmBNQQnRRaV2K47jtJ6zjh1eOU5UHKyDtDDYf80Idpw==} + /@nrwl/[email protected]([email protected]): + resolution: {integrity: sha512-EiDwVIvh6AcClXv22Q7auQh7Iy/ONISEFWzTswy/J6ZmVGCQesbiwg4cGV0MKiScr+awdVzqyNey+wD6IR5Lkw==} + dependencies: + '@nx/devkit': 16.3.2([email protected]) + transitivePeerDependencies: + - nx + dev: true + + /@nrwl/[email protected]: + resolution: {integrity: sha512-1p82ym8WE9ziejwgPslstn19iV/VkHfHfKr/5YOnfCHQS+NxUf92ogcYhHXtqWLblVZ9Zs4W4pkSXK4e04wCmQ==} + dependencies: + nx-cloud: 16.0.5 + transitivePeerDependencies: + - debug + dev: true + + /@nrwl/[email protected]: + resolution: {integrity: sha512-2Kg7dtv6JcQagCZPSq+okceI81NqmXGGgbKWqS7sOfdmp1otxS9uiUFNXw+Pdtnw38mdRviMtSOXScntu4sUKg==} + hasBin: true dependencies: - nx: 15.9.4 + nx: 16.3.2 transitivePeerDependencies: - '@swc-node/register' - '@swc/core' - debug dev: true - /@nrwl/[email protected]([email protected]): - resolution: {integrity: sha512-mUX1kXTuPMdTzFxIzH+MsSNvdppOmstPDOEtiGFZJTuJ625ki0HhNJILO3N2mJ7MeMrLqIlAiNdvelQaObxYsQ==} + /@nrwl/[email protected]: + resolution: {integrity: sha512-ORVzEEJIMOFYEOtOQHLU7N4vT4mYZ/JzKiwHZrHkCaVhgkiGBLoX3tOwVZjafKaa/24cGISv0J7WRtnfRKl2cA==} + dependencies: + '@nx/workspace': 16.3.2 + transitivePeerDependencies: + - '@swc-node/register' + - '@swc/core' + - debug + dev: true + + /@nx/[email protected]([email protected]): + resolution: {integrity: sha512-1ev3EDm2Sx/ibziZroL1SheqxDR7UgC49tkBgJz1GrQLQnfdhBYroCPSyBSWGPMLHjIuHb3+hyGSV1Bz+BIYOA==} peerDependencies: - nx: '>= 14.1 <= 16' + nx: '>= 15 <= 17' dependencies: + '@nrwl/devkit': 16.3.2([email protected]) ejs: 3.1.9 ignore: 5.2.4 - nx: 15.9.4 + nx: 16.3.2 semver: 7.3.4 tmp: 0.2.1 tslib: 2.5.1 dev: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-JMKLY0HhdzQ/6jEvfL/EecPPdsdBIM0SyFrWAjikSJAh5MqhpFJWnr6FfTc5P57PJZ+IUNLkJ21VMuoTrA4+4w==} - hasBin: true - dependencies: - axios: 0.21.4 - chalk: 4.1.0 - dotenv: 10.0.0 - fs-extra: 10.1.0 - node-machine-id: 1.1.12 - strip-json-comments: 3.1.1 - tar: 6.1.11 - yargs-parser: 21.1.1 - transitivePeerDependencies: - - debug - dev: true - - /@nrwl/[email protected]: - resolution: {integrity: sha512-XnvrnT9BJsgThY/4xUcYtE077ERq/img8CkRj7MOOBNOh0/nVcR4LGbBKDHtwE3HPk0ikyS/SxRyNa9msvi3QQ==} + /@nx/[email protected]: + resolution: {integrity: sha512-YfYVNfsJBzBcBnJUU4AcA6A4QMkgnVlETfp4KGL36Otq542mRY1ISGHdox63ocI5AKh5gay5AaGcR4wR9PU9Vg==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] @@ -4640,8 +4458,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-WKSfSlpVMLchpXkax0geeUNyhvNxwO7qUz/s0/HJWBekt8fizwKDwDj1gP7fOu+YWb/tHiSscbR1km8PtdjhQw==} + /@nx/[email protected]: + resolution: {integrity: sha512-bJtpozz0zSRVRrcQ76GrlT3TWEGTymLYWrVG51bH5KZ46t6/a4EQBI3uL3vubMmOZ0jR4ywybOcPBBhxmBJ68w==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] @@ -4649,8 +4467,17 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-a/b4PP7lP/Cgrh0LjC4O2YTt5pyf4DQTGtuE8qlo8o486UiofCtk4QGJX72q80s23L0ejCaKY2ULKx/3zMLjuA==} + /@nx/[email protected]: + resolution: {integrity: sha512-ZvufI0bWqT67nLbBo6ejrIGxypdoedRQTP/tudWbs/4isvxLe1uVku1BfKCTQUsJG367SqNOU1H5kzI/MRr3ow==} + engines: {node: '>= 10'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@nx/[email protected]: + resolution: {integrity: sha512-IQL4kxdiZLvifar7+SIum3glRuVsxtE0dL8RvteSDXrxDQnaTUrjILC+VGhalRmk7ngBbGKNrhWOeeL7390CzQ==} engines: {node: '>= 10'} cpu: [arm] os: [linux] @@ -4658,8 +4485,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-ibBV8fMhSfLVd/2WzcDuUm32BoZsattuKkvMmOoyU6Pzoznc3AqyDjJR4xCIoAn5Rf+Nu1oeQONr5FAtb1Ugow==} + /@nx/[email protected]: + resolution: {integrity: sha512-f6AWgPVu3mfUEoOBa0rY2/7QY0Or9eR0KtLFpcPh7RUpxPw2EXzIbjD/0RGipdpspSrgiMKbZpsUjo6mXBFsQA==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] @@ -4667,8 +4494,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-iIjvVYd7+uM4jVD461+PvU5XTALgSvJOODUaMRGOoDl0KlMuTe6pQZlw0eXjl5rcTd6paKaVFWT5j6awr8kj7w==} + /@nx/[email protected]: + resolution: {integrity: sha512-AvrWcYz7021E3b5P9/0i26p60XMZfw86Epks51L6AhlflarlOH4AcEChc7APMtb1ELAIbDWx2S6oIDRbQ7rtVA==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] @@ -4676,8 +4503,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-q4OyH72mdrE4KellBWtwpr5EwfxHKNoFP9//7FAILO68ROh0rpMd7YQMlTB7T04UEUHjKEEsFGTlVXIee3Viwg==} + /@nx/[email protected]: + resolution: {integrity: sha512-K2pWGAcbCNm6b7UZI9cc8z4Rb540QcuepBXD7akjPjWerzXriT6VCn4i9mVKsCg2mwSfknTJJVJ1PZwJSmTl/Q==} engines: {node: '>= 10'} cpu: [x64] os: [linux] @@ -4685,8 +4512,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-67+/XNMR1CgLPyeGX8jqSG6l8yYD0iiwUgcu1Vaxq6N05WwnqVisIW8XzLSRUtKt4WyVQgOWk3aspImpMVOG3Q==} + /@nx/[email protected]: + resolution: {integrity: sha512-sY1QDuQlqyYiRPJZanrtV07tU0DOXiCrWb0pDsGiO0qHuUSmW5Vw17GWEY4z3rt0/5U8fJ+/9WQrneviOmsOKg==} engines: {node: '>= 10'} cpu: [x64] os: [linux] @@ -4694,8 +4521,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-2rEsq3eOGVCYpYJn2tTJkOGNJm/U8rP/FmqtZXYa6VJv/00XP3Gl00IXFEDaYV6rZo7SWqLxtEPUbjK5LwPzZA==} + /@nx/[email protected]: + resolution: {integrity: sha512-wBfohT2hjrLKn9WFHvG0MFVk7uYhgYNiptnTLdTouziHgFyZ08vyl7XYBq55BwHPMQ5iswVoEfjn/5ZBfCPscg==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] @@ -4703,8 +4530,8 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-bogVju4Z/hy1jbppqaTNbmV1R4Kg0R5fKxXAXC2LaL7FL0dup31wPumdV+mXttXBNOBDjV8V/Oz1ZqdmxpOJUw==} + /@nx/[email protected]: + resolution: {integrity: sha512-QC0sWrfQm0/WdvvM//7UAgm+otbak6bznZ0zawTeqmLBh1hLjNeweyzSVKQEtZtlzDMKpzCVuuwkJq+VKBLvmw==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -4712,21 +4539,11 @@ packages: dev: true optional: true - /@nrwl/[email protected]: - resolution: {integrity: sha512-m90iz8UsXx1rgPm1dxsBQjSrCViWYZIrp8bpwjSCW24j3kifyilYSXGuKaRwZwUn7eNmH/kZcI9/8qeGIPF4Sg==} - hasBin: true - dependencies: - nx: 15.9.4 - transitivePeerDependencies: - - '@swc-node/register' - - '@swc/core' - - debug - dev: true - - /@nrwl/[email protected]: - resolution: {integrity: sha512-CvF6Bv0WetYD4eurTiLKyGz3LOLoEVur81RMvpijPeM2tKOhG3DrgX+x55a5NVbXimTU2hJcxk7GSGEtZmJvZg==} + /@nx/[email protected]: + resolution: {integrity: sha512-gFrJEv3+Jn2leu3RKFTakPHY8okI8hjOg8RO4OWA2ZemFXRyh9oIm/xsCsOyqYlGt06eqV2mD3GUun/05z1nhg==} dependencies: - '@nrwl/devkit': 15.9.4([email protected]) + '@nrwl/workspace': 16.3.2 + '@nx/devkit': 16.3.2([email protected]) '@parcel/watcher': 2.0.4 chalk: 4.1.2 chokidar: 3.5.3 @@ -4735,13 +4552,12 @@ packages: dotenv: 10.0.0 figures: 3.2.0 flat: 5.0.2 - glob: 7.1.4 ignore: 5.2.4 minimatch: 3.0.5 npm-run-path: 4.0.1 - nx: 15.9.4 + nx: 16.3.2 open: 8.4.2 - rxjs: 6.6.7 + rxjs: 7.8.1 tmp: 0.2.1 tslib: 2.5.1 yargs: 17.7.2 @@ -4756,18 +4572,18 @@ packages: resolution: {integrity: sha512-/aFM2M4HVDBT/jjDBa84sJniv1t9Gm/rLkalaz9htOm+L+8JMj1k9w0CkUdcxNyNxZPlTxKPVko+m1VlM58ZVA==} engines: {node: '>= 14'} dependencies: - '@octokit/types': 9.2.2 + '@octokit/types': 9.2.3 dev: true - /@octokit/[email protected]: - resolution: {integrity: sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg==} + /@octokit/[email protected]: + resolution: {integrity: sha512-tEDxFx8E38zF3gT7sSMDrT1tGumDgsw5yPG6BBh/X+5ClIQfMH/Yqocxz1PnHx6CHyF6pxmovUTOfZAUvQ0Lvw==} engines: {node: '>= 14'} dependencies: '@octokit/auth-token': 3.0.3 '@octokit/graphql': 5.0.5 '@octokit/request': 6.2.4 '@octokit/request-error': 3.0.3 - '@octokit/types': 9.2.2 + '@octokit/types': 9.2.3 before-after-hook: 2.2.3 universal-user-agent: 6.0.0 transitivePeerDependencies: @@ -4778,7 +4594,7 @@ packages: resolution: {integrity: sha512-LG4o4HMY1Xoaec87IqQ41TQ+glvIeTKqfjkCEmt5AIwDZJwQeVZFIEYXrYY6yLwK+pAScb9Gj4q+Nz2qSw1roA==} engines: {node: '>= 14'} dependencies: - '@octokit/types': 9.2.2 + '@octokit/types': 9.2.3 is-plain-object: 5.0.0 universal-user-agent: 6.0.0 dev: true @@ -4788,20 +4604,12 @@ packages: engines: {node: '>= 14'} dependencies: '@octokit/request': 6.2.4 - '@octokit/types': 9.2.2 + '@octokit/types': 9.2.3 universal-user-agent: 6.0.0 transitivePeerDependencies: - encoding dev: true - /@octokit/[email protected]: - resolution: {integrity: sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ==} - dev: true - - /@octokit/[email protected]: - resolution: {integrity: sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==} - dev: true - /@octokit/[email protected]: resolution: {integrity: sha512-MazrFNx4plbLsGl+LFesMo96eIXkFgEtaKbnNpdh4aQ0VM10aoylFsTYP1AEjkeoRNZiiPe3T6Gl2Hr8dJWdlQ==} dev: true @@ -4810,40 +4618,40 @@ packages: resolution: {integrity: sha512-93uGjlhUD+iNg1iWhUENAtJata6w5nE+V4urXOAlIXdco6xNZtUSfYY8dzp3Udy74aqO/B5UZL80x/YMa5PKRw==} dev: true - /@octokit/[email protected](@octokit/[email protected]): - resolution: {integrity: sha512-+cfc40pMzWcLkoDcLb1KXqjX0jTGYXjKuQdFQDc6UAknISJHnZTiBqld6HDwRJvD4DsouDKrWXNbNV0lE/3AXA==} + /@octokit/[email protected](@octokit/[email protected]): + resolution: {integrity: sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==} engines: {node: '>= 14'} peerDependencies: '@octokit/core': '>=4' dependencies: - '@octokit/core': 4.2.0 - '@octokit/types': 6.41.0 + '@octokit/core': 4.2.1 + '@octokit/tsconfig': 1.0.2 + '@octokit/types': 9.2.3 dev: true - /@octokit/[email protected](@octokit/[email protected]): + /@octokit/[email protected](@octokit/[email protected]): resolution: {integrity: sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==} peerDependencies: '@octokit/core': '>=3' dependencies: - '@octokit/core': 4.2.0 + '@octokit/core': 4.2.1 dev: true - /@octokit/[email protected](@octokit/[email protected]): - resolution: {integrity: sha512-QrlaTm8Lyc/TbU7BL/8bO49vp+RZ6W3McxxmmQTgYxf2sWkO8ZKuj4dLhPNJD6VCUW1hetCmeIM0m6FTVpDiEg==} + /@octokit/[email protected](@octokit/[email protected]): + resolution: {integrity: sha512-0aoPd4f1k/KXPTGSX0NbxcBrShBHArgcW3pujEvLa6wUfcfA1BehxQ2Ifwa6CbJ4SfzaO79FvGgaUipoxDsgjA==} engines: {node: '>= 14'} peerDependencies: '@octokit/core': '>=3' dependencies: - '@octokit/core': 4.2.0 - '@octokit/types': 8.2.1 - deprecation: 2.3.1 + '@octokit/core': 4.2.1 + '@octokit/types': 9.2.3 dev: true /@octokit/[email protected]: resolution: {integrity: sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==} engines: {node: '>= 14'} dependencies: - '@octokit/types': 9.2.2 + '@octokit/types': 9.2.3 deprecation: 2.3.1 once: 1.4.0 dev: true @@ -4854,40 +4662,32 @@ packages: dependencies: '@octokit/endpoint': 7.0.5 '@octokit/request-error': 3.0.3 - '@octokit/types': 9.2.2 + '@octokit/types': 9.2.3 is-plain-object: 5.0.0 - node-fetch: 2.6.7 + node-fetch: 2.6.11 universal-user-agent: 6.0.0 transitivePeerDependencies: - encoding dev: true - /@octokit/[email protected]: - resolution: {integrity: sha512-5arkTsnnRT7/sbI4fqgSJ35KiFaN7zQm0uQiQtivNQLI8RQx8EHwJCajcTUwmaCMNDg7tdCvqAnc7uvHHPxrtQ==} + /@octokit/[email protected]: + resolution: {integrity: sha512-m2a9VhaP5/tUw8FwfnW2ICXlXpLPIqxtg3XcAiGMLj/Xhw3RSBfZ8le/466ktO1Gcjr8oXudGnHhxV1TXJgFxw==} engines: {node: '>= 14'} dependencies: - '@octokit/core': 4.2.0 - '@octokit/plugin-paginate-rest': 3.1.0(@octokit/[email protected]) - '@octokit/plugin-request-log': 1.0.4(@octokit/[email protected]) - '@octokit/plugin-rest-endpoint-methods': 6.8.1(@octokit/[email protected]) + '@octokit/core': 4.2.1 + '@octokit/plugin-paginate-rest': 6.1.2(@octokit/[email protected]) + '@octokit/plugin-request-log': 1.0.4(@octokit/[email protected]) + '@octokit/plugin-rest-endpoint-methods': 7.1.3(@octokit/[email protected]) transitivePeerDependencies: - encoding dev: true - /@octokit/[email protected]: - resolution: {integrity: sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==} - dependencies: - '@octokit/openapi-types': 12.11.0 - dev: true - - /@octokit/[email protected]: - resolution: {integrity: sha512-8oWMUji8be66q2B9PmEIUyQm00VPDPun07umUWSaCwxmeaquFBro4Hcc3ruVoDo3zkQyZBlRvhIMEYS3pBhanw==} - dependencies: - '@octokit/openapi-types': 14.0.0 + /@octokit/[email protected]: + resolution: {integrity: sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==} dev: true - /@octokit/[email protected]: - resolution: {integrity: sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==} + /@octokit/[email protected]: + resolution: {integrity: sha512-MMeLdHyFIALioycq+LFcA71v0S2xpQUX2cw6pPbHQjaibcHYwLnmK/kMZaWuGfGfjBJZ3wRUq+dOaWsvrPJVvA==} dependencies: '@octokit/openapi-types': 17.2.0 dev: true @@ -5313,10 +5113,6 @@ packages: resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} dev: true - /@types/[email protected]: - resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} - dev: true - /@types/[email protected]: resolution: {integrity: sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg==} dev: true @@ -5996,18 +5792,6 @@ packages: /[email protected]: resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==} - /[email protected]: - resolution: {integrity: sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dev: true - - /[email protected]: - resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} - engines: {node: '>=6.5'} - dependencies: - event-target-shim: 5.0.1 - dev: true - /[email protected]: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} @@ -6249,14 +6033,6 @@ packages: readable-stream: 3.6.2 dev: true - /[email protected]: - resolution: {integrity: sha512-nSXlV+u3vtVjRgihdTzbfWYzxPWGo424zPgQbHD0ZqIla3jqYAewDcvee0Ua2hjS5IfTAmjGlx1Jf0PKwjZDEw==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - delegates: 1.0.0 - readable-stream: 4.4.0 - dev: true - /[email protected]: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} dev: true @@ -6316,15 +6092,12 @@ packages: /[email protected]: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - /[email protected]: - resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} - engines: {node: '>= 4.0.0'} - dev: true - - /[email protected]: - resolution: {integrity: sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==} + /[email protected]: + resolution: {integrity: sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA==} dependencies: follow-redirects: 1.15.2 + form-data: 4.0.0 + proxy-from-env: 1.1.0 transitivePeerDependencies: - debug dev: true @@ -6476,16 +6249,6 @@ packages: resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} dev: true - /[email protected]: - resolution: {integrity: sha512-bmFEM39CyX336ZGGRsGPlc6jZHriIoHacOQcTt72MktIjpPhZoP4te2jOyUXF3BLILmJ8aNLncoPVeIIFlrDeA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - cmd-shim: 6.0.1 - npm-normalize-package-bin: 3.0.1 - read-cmd-shim: 4.0.0 - write-file-atomic: 5.0.1 - dev: true - /[email protected]: resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} engines: {node: '>=8'} @@ -6588,13 +6351,6 @@ packages: ieee754: 1.2.1 dev: true - /[email protected]: - resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - dev: true - /[email protected]: resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==} dev: true @@ -6605,9 +6361,9 @@ packages: semver: 7.5.1 dev: true - /[email protected]: - resolution: {integrity: sha512-NNiBxKgxybMBtWdmvx7ZITJi4ZG+CYUgwOSZTfqB1qogkRHrhbQE/R2r5Fh94X+InN5MCYz6SvB/ejHMj/HbsQ==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-tUkzZWK0M/qdoLEqikxBWe4kumyuwjl3HO6zHTr4yEI23EojPtLYXdG1+AQY7MN0cGyNDvEaJ8wiYQm6P2bPxg==} + engines: {node: '>=12.17'} dev: true /[email protected]: @@ -6840,10 +6596,6 @@ packages: resolution: {integrity: sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==} dev: true - /[email protected]: - resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==} - dev: true - /[email protected]: resolution: {integrity: sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==} engines: {node: '>=8'} @@ -6941,13 +6693,6 @@ packages: engines: {node: '>=0.8'} dev: true - /[email protected]: - resolution: {integrity: sha512-qkCtZ59BidfEwHltnJwkyVZn+XQojdAySM1D1gSeh11Z4pW1Kpolkyo53L5noc0nrxmIvyFwTmJRo4xs7FFLPw==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - mkdirp-infer-owner: 2.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-S9iI9y0nKR4hwEQsVWpyxld/6kRfGepGfzff83FcaiEBpmvlbA2nnGe7Cylgrx2f/p1P5S5wpRm9oL8z1PbS3Q==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -7011,10 +6756,6 @@ packages: resolution: {integrity: sha512-B52sN2VNghyq5ofvUsqZjmk6YkihBX5vMSChmSK9v4ShjKf3Vk5Xcmgpw4o+iIgtrnM/u5FiMpz9VKb8lpBveA==} engines: {node: '>= 12.0.0'} - /[email protected]: - resolution: {integrity: sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w==} - dev: true - /[email protected]: resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} @@ -7058,13 +6799,6 @@ packages: yargs: 17.7.2 dev: true - /[email protected]: - resolution: {integrity: sha512-a1eOIcu8+7lUInge4Rpf/n4Krkf3Dd9lqhljRzII1/Zno/kRtUWnznPO3jOKBmTEktkt3fkxisUcivoj0ebzoA==} - dependencies: - ini: 1.3.8 - proto-list: 1.2.4 - dev: true - /[email protected]: resolution: {integrity: sha512-3R0kMOdL7CjJpU66fzAkCe6HNtd3AavCS4m+uW4KtJjrdGPT0SQEZieAYd+cm+lJoBznNQ4lqipYWkhBMgk00g==} dev: true @@ -7100,20 +6834,19 @@ packages: engines: {node: '>= 0.6'} dev: true - /[email protected]: - resolution: {integrity: sha512-5GLsbnkR/7A89RyHLvvoExbiGbd9xKdKqDTrArnPbOqBqG/2wIosu0fHwpeIRI8Tl94MhVNBXcLJZl92ZQ5USw==} + /[email protected]: + resolution: {integrity: sha512-i/gipMxs7s8L/QeuavPF2hLnJgH6pEZAttySB6aiQLWcX3puWDL3ACVmvBhJGxnAy52Qc15ua26BufY6KpmrVA==} engines: {node: '>=10'} dependencies: compare-func: 2.0.0 q: 1.5.1 dev: true - /[email protected]: - resolution: {integrity: sha512-i/gipMxs7s8L/QeuavPF2hLnJgH6pEZAttySB6aiQLWcX3puWDL3ACVmvBhJGxnAy52Qc15ua26BufY6KpmrVA==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-6qLgrBF4gueoC7AFVHu51nHL9pF9FRjXrH+ceVf7WmAfH3gs+gEYOkvxhjMPjZu57I4AGUGoNTY8V7Hrgf1uqg==} + engines: {node: '>=14'} dependencies: compare-func: 2.0.0 - q: 1.5.1 dev: true /[email protected]: @@ -7125,50 +6858,45 @@ packages: q: 1.5.1 dev: true - /[email protected]: - resolution: {integrity: sha512-gDVS+zVJHE2v4SLc6B0sLsPiloR0ygU7HaDW14aNJE1v4SlqJPILPl/aJC7YdtRE4CybBf8gDwObBvKha8Xlyg==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-Rvi5pH+LvgsqGwZPZ3Cq/tz4ty7mjijhr3qR4m9IBXNbxGGYgTVVO+duXzz9aArmHxFtwZ+LRkrNIMDQzgoY4A==} + engines: {node: '>=14'} dependencies: add-stream: 1.0.0 - conventional-changelog-writer: 5.0.1 - conventional-commits-parser: 3.2.4 + conventional-changelog-writer: 6.0.0 + conventional-commits-parser: 4.0.0 dateformat: 3.0.3 get-pkg-repo: 4.2.1 - git-raw-commits: 2.0.11 + git-raw-commits: 3.0.0 git-remote-origin-url: 2.0.0 - git-semver-tags: 4.1.1 - lodash: 4.17.21 + git-semver-tags: 5.0.0 normalize-package-data: 3.0.3 - q: 1.5.1 read-pkg: 3.0.0 read-pkg-up: 3.0.0 - through2: 4.0.2 dev: true - /[email protected]: - resolution: {integrity: sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-qy9XbdSLmVnwnvzEisjxdDiLA4OmV3o8db+Zdg4WiFw14fP3B6XNz98X0swPPpkTd/pc1K7+adKgEDM1JCUMiA==} + engines: {node: '>=14'} dev: true - /[email protected]: - resolution: {integrity: sha512-5WsuKUfxW7suLblAbFnxAcrvf6r+0b7GvNaWUwUIk0bXMnENP/PEieGKVUQrjPqwPT4o3EPAASBXiY6iHooLOQ==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-8PyWTnn7zBIt9l4hj4UusFs1TyG+9Ulu1zlOAc72L7Sdv9Hsc8E86ot7htY3HXCVhXHB/NO0pVGvZpwsyJvFfw==} + engines: {node: '>=14'} hasBin: true dependencies: - conventional-commits-filter: 2.0.7 + conventional-commits-filter: 3.0.0 dateformat: 3.0.3 handlebars: 4.7.7 json-stringify-safe: 5.0.1 - lodash: 4.17.21 meow: 8.1.2 semver: 6.3.0 split: 1.0.1 - through2: 4.0.2 dev: true - /[email protected]: - resolution: {integrity: sha512-ASS9SamOP4TbCClsRHxIHXRfcGCnIoQqkvAzCSbZzTFLfcTqJVugB0agRgsEELsqaeWgsXv513eS116wnlSSPA==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==} + engines: {node: '>=14'} dependencies: lodash.ismatch: 4.4.0 modify-values: 1.0.1 @@ -7187,19 +6915,29 @@ packages: through2: 4.0.2 dev: true - /[email protected]: - resolution: {integrity: sha512-uiApbSiNGM/kkdL9GTOLAqC4hbptObFo4wW2QRyHsKciGAfQuLU1ShZ1BIVI/+K2BE/W1AWYQMCXAsv4dyKPaw==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-WRv5j1FsVM5FISJkoYMR6tPk07fkKT0UodruX4je86V4owk451yjXAKzKAPOs9l7y59E2viHUS9eQ+dfUA9NSg==} + engines: {node: '>=14'} + hasBin: true + dependencies: + JSONStream: 1.3.5 + is-text-path: 1.0.1 + meow: 8.1.2 + split2: 3.2.2 + dev: true + + /[email protected]: + resolution: {integrity: sha512-Ft79FF4SlOFvX4PkwFDRnaNiIVX7YbmqGU0RwccUaiGvgp3S0a8ipR2/Qxk31vclDNM+GSdJOVs2KrsUCjblVA==} + engines: {node: '>=14'} hasBin: true dependencies: concat-stream: 2.0.0 - conventional-changelog-preset-loader: 2.3.4 - conventional-commits-filter: 2.0.7 - conventional-commits-parser: 3.2.4 - git-raw-commits: 2.0.11 - git-semver-tags: 4.1.1 + conventional-changelog-preset-loader: 3.0.0 + conventional-commits-filter: 3.0.0 + conventional-commits-parser: 4.0.0 + git-raw-commits: 3.0.0 + git-semver-tags: 5.0.0 meow: 8.1.2 - q: 1.5.1 dev: true /[email protected]: @@ -7255,19 +6993,18 @@ packages: typescript: 5.1.3 dev: true - /[email protected]: - resolution: {integrity: sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-/UkO2JKI18b5jVMJUp0lvKFMpa/Gye+ZgZjKD+DGEN9y7NRcf/nK1A0sp67ONmKtnDCNMS44E6jrk0Yc3bDuUw==} + engines: {node: '>=14'} dependencies: - '@types/parse-json': 4.0.0 import-fresh: 3.3.0 + js-yaml: 4.1.0 parse-json: 5.2.0 path-type: 4.0.0 - yaml: 1.10.2 dev: true - /[email protected]: - resolution: {integrity: sha512-/UkO2JKI18b5jVMJUp0lvKFMpa/Gye+ZgZjKD+DGEN9y7NRcf/nK1A0sp67ONmKtnDCNMS44E6jrk0Yc3bDuUw==} + /[email protected]: + resolution: {integrity: sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==} engines: {node: '>=14'} dependencies: import-fresh: 3.3.0 @@ -7296,22 +7033,11 @@ packages: shebang-command: 2.0.0 which: 2.0.2 - /[email protected]: - resolution: {integrity: sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==} - engines: {node: '>=8'} - dev: true - /[email protected]: resolution: {integrity: sha512-TgQBEdP07adhrDfXvI5o6bHGukKBNMzp2Ngckc/6d09zpjD2gc1Hl3Ca1CKgb8FXjHi88+Phv2Uegs2kTL4zjg==} hasBin: true dev: true - /[email protected]: - resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} - engines: {node: '>=4'} - hasBin: true - dev: true - /[email protected]: resolution: {integrity: sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg==} engines: {node: '>=14'} @@ -7457,20 +7183,6 @@ packages: dev: true optional: true - /[email protected]: - resolution: {integrity: sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==} - engines: {node: '>=10'} - dependencies: - globby: 11.1.0 - graceful-fs: 4.2.11 - is-glob: 4.0.3 - is-path-cwd: 2.2.0 - is-path-inside: 3.0.3 - p-map: 4.0.0 - rimraf: 3.0.2 - slash: 3.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} @@ -7506,6 +7218,11 @@ packages: dev: true optional: true + /[email protected]: + resolution: {integrity: sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dev: true + /[email protected]: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} @@ -7552,13 +7269,6 @@ packages: is-obj: 2.0.0 dev: true - /[email protected]: - resolution: {integrity: sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==} - engines: {node: '>=10'} - dependencies: - is-obj: 2.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==} engines: {node: '>=10'} @@ -7988,11 +7698,6 @@ packages: engines: {node: '>= 0.6'} dev: true - /[email protected]: - resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} - engines: {node: '>=6'} - dev: true - /[email protected]: resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} dev: true @@ -8021,7 +7726,7 @@ packages: cross-spawn: 7.0.3 get-stream: 6.0.1 human-signals: 2.1.0 - is-stream: 2.0.0 + is-stream: 2.0.1 merge-stream: 2.0.0 npm-run-path: 4.0.1 onetime: 5.1.2 @@ -8173,11 +7878,6 @@ packages: dependencies: flat-cache: 3.0.4 - /[email protected]: - resolution: {integrity: sha512-g872QGsHexznxkIAdK8UiZRe7SkE6kvylShU4Nsj8NvfvZag7S0QuQ4IgvPDkk75HxgjIVDwycFTDAgIiO4nDA==} - engines: {node: '>=8'} - dev: true - /[email protected]: resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} dependencies: @@ -8308,15 +8008,6 @@ packages: resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} dev: true - /[email protected]: - resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} - engines: {node: '>=12'} - dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ==} engines: {node: '>=14.14'} @@ -8326,22 +8017,12 @@ packages: universalify: 2.0.0 /[email protected]: - resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} - engines: {node: '>=6 <7 || >=8'} - dependencies: - graceful-fs: 4.2.11 - jsonfile: 4.0.0 - universalify: 0.1.2 - dev: true - - /[email protected]: - resolution: {integrity: sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==} - engines: {node: '>=10'} + resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} + engines: {node: '>=6 <7 || >=8'} dependencies: - at-least-node: 1.0.0 graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.0 + jsonfile: 4.0.0 + universalify: 0.1.2 dev: true /[email protected]: @@ -8399,20 +8080,6 @@ packages: wide-align: 1.1.5 dev: true - /[email protected]: - resolution: {integrity: sha512-CmykPMJGuNan/3S4kZOpvvPYSNqSHANiWnh9XcMU2pSjtBfF0XzZ2p1bFAxTbnFxyBuPxQYHhzwaoOmUdqzvxQ==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - aproba: 2.0.0 - color-support: 1.1.3 - console-control-strings: 1.1.0 - has-unicode: 2.0.1 - signal-exit: 4.0.2 - string-width: 4.2.3 - strip-ansi: 6.0.1 - wide-align: 1.1.5 - dev: true - /[email protected]: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} @@ -8489,6 +8156,16 @@ packages: through2: 4.0.2 dev: true + /[email protected]: + resolution: {integrity: sha512-b5OHmZ3vAgGrDn/X0kS+9qCfNKWe4K/jFnhwzVWWg0/k5eLa3060tZShrRg8Dja5kPc+YjS0Gc6y7cRr44Lpjw==} + engines: {node: '>=14'} + hasBin: true + dependencies: + dargs: 7.0.0 + meow: 8.1.2 + split2: 3.2.2 + dev: true + /[email protected]: resolution: {integrity: sha512-eU+GGrZgccNJcsDH5LkXR3PB9M958hxc7sbA8DFJjrv9j4L2P/eZfKhM+QD6wyzpiv+b1BpK0XrYCxkovtjSLw==} engines: {node: '>=4'} @@ -8497,9 +8174,9 @@ packages: pify: 2.3.0 dev: true - /[email protected]: - resolution: {integrity: sha512-OWyMt5zBe7xFs8vglMmhM9lRQzCWL3WjHtxNNfJTMngGym7pC1kh8sP6jevfydJ6LP3ZvGxfb6ABYgPUM0mtsA==} - engines: {node: '>=10'} + /[email protected]: + resolution: {integrity: sha512-fZ+tmZ1O5aXW/T5nLzZLbxWAHdQTLLXalOECMNAmhoEQSfqZjtaeMjpsXH4C5qVhrICTkVQeQFujB1lKzIHljA==} + engines: {node: '>=14'} hasBin: true dependencies: meow: 8.1.2 @@ -8671,10 +8348,6 @@ packages: responselike: 2.0.1 dev: true - /[email protected]: - resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} - dev: true - /[email protected]: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -8793,13 +8466,6 @@ packages: lru-cache: 6.0.0 dev: true - /[email protected]: - resolution: {integrity: sha512-xIcQYMnhcx2Nr4JTjsFmwwnr9vldugPy9uVm0o87bjqqWMv9GaqsTeT+i99wTl0mk1uLxJtHxLb8kymqTENQsw==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - lru-cache: 7.18.3 - dev: true - /[email protected]: resolution: {integrity: sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -8979,38 +8645,17 @@ packages: engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dev: true - /[email protected]: - resolution: {integrity: sha512-YhlQPEjNFqlGdzrBfDNRLhvoSgX7iQRgSxgsNknRQ9ITXFT7UMfVMWhBTOh2Y+25lRnGrv5Xz8yZwQ3ACR6T3A==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + /[email protected]: + resolution: {integrity: sha512-kBhlSheBfYmq3e0L1ii+VKe3zBTLL5lDCDWR+f9dLmEGSB3MqLlMlsolubSsyI88Bg6EA+BIMlomAnQ1SwgQBw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: - npm-package-arg: 9.1.2 - promzard: 0.3.0 - read: 1.0.7 - read-package-json: 5.0.1 + npm-package-arg: 10.1.0 + promzard: 1.0.0 + read: 2.1.0 + read-package-json: 6.0.4 semver: 7.5.1 validate-npm-package-license: 3.0.4 - validate-npm-package-name: 4.0.0 - dev: true - - /[email protected]: - resolution: {integrity: sha512-nn4F01dxU8VeKfq192IjLsxu0/OmMZ4Lg3xKAns148rCaXP6ntAoEkVYZThWjwON8AlzdZZi6oqnhNbxUG9hVg==} - engines: {node: '>=12.0.0'} - dependencies: - ansi-escapes: 4.3.2 - chalk: 4.1.2 - cli-cursor: 3.1.0 - cli-width: 3.0.0 - external-editor: 3.1.0 - figures: 3.2.0 - lodash: 4.17.21 - mute-stream: 0.0.8 - ora: 5.4.1 - run-async: 2.4.1 - rxjs: 7.8.1 - string-width: 4.2.3 - strip-ansi: 6.0.1 - through: 2.3.8 - wrap-ansi: 7.0.0 + validate-npm-package-name: 5.0.0 dev: true /[email protected]: @@ -9074,11 +8719,11 @@ packages: ci-info: 1.6.0 dev: true - /[email protected]: - resolution: {integrity: sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==} + /[email protected]: + resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} hasBin: true dependencies: - ci-info: 2.0.0 + ci-info: 3.8.0 dev: true /[email protected]: @@ -9138,11 +8783,6 @@ packages: engines: {node: '>=8'} dev: true - /[email protected]: - resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} - engines: {node: '>=6'} - dev: true - /[email protected]: resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} engines: {node: '>=8'} @@ -9332,6 +8972,21 @@ packages: resolution: {integrity: sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==} dev: true + /[email protected]: + resolution: {integrity: sha512-LtxijLLZBduXnHSniy0WMdaHjmQnt3g5sa16W4p0HqukYTTsyTW3GD1q41TyGl5YFXj/5B2U6dlh5FM1LIMgxw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + chalk: 4.1.2 + diff-sequences: 29.4.3 + jest-get-type: 29.4.3 + pretty-format: 29.5.0 + dev: true + + /[email protected]: + resolution: {integrity: sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dev: true + /[email protected]: resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} engines: {node: '>= 10.13.0'} @@ -9505,10 +9160,6 @@ packages: /[email protected]: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} - /[email protected]: - resolution: {integrity: sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw==} - dev: true - /[email protected]: resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==} dev: true @@ -9566,14 +9217,6 @@ packages: promise: 7.3.1 dev: true - /[email protected]: - resolution: {integrity: sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==} - dev: true - - /[email protected]: - resolution: {integrity: sha512-S59eriX5u3/QhMNq3v/gm8Kd0w8OS6Tz2FS1NG4blv+z0MuQcBRJyFWjdovM0Rad4/P4aUPFtnkNjMjyMlMSYA==} - dev: true - /[email protected]: resolution: {integrity: sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g==} dependencies: @@ -9610,49 +9253,47 @@ packages: engines: {node: '>=0.10.0'} dev: true - /[email protected]: - resolution: {integrity: sha512-W4qrGhcdutkRdHEaDf9eqp7u4JvI+1TwFy5woX6OI8WPe4PYBdxuILAsvhp614fUG41rKSGDKlOh+AWzdSidTg==} + /[email protected]: + resolution: {integrity: sha512-kX279o8N/L2URwoR3Pf4TdIl5P8G443qAFy095ZD+Vu1tOMo8U6xOc221EgHoMuYhdqlT3f0vgn5bMMr/xNYhQ==} engines: {node: ^14.17.0 || >=16.0.0} hasBin: true dependencies: - '@lerna/child-process': 6.6.2 - '@lerna/create': 6.6.2 - '@lerna/legacy-package-management': 6.6.2([email protected]) - '@npmcli/arborist': 6.2.3 - '@npmcli/run-script': 4.1.7 - '@nrwl/devkit': 15.9.4([email protected]) + '@lerna/child-process': 7.0.1 + '@lerna/create': 7.0.1 + '@npmcli/run-script': 6.0.2 + '@nx/devkit': 16.3.2([email protected]) '@octokit/plugin-enterprise-rest': 6.0.1 - '@octokit/rest': 19.0.3 - byte-size: 7.0.0 + '@octokit/rest': 19.0.11 + byte-size: 8.1.1 chalk: 4.1.0 clone-deep: 4.0.1 - cmd-shim: 5.0.0 + cmd-shim: 6.0.1 columnify: 1.6.0 - config-chain: 1.1.12 - conventional-changelog-angular: 5.0.12 - conventional-changelog-core: 4.2.4 - conventional-recommended-bump: 6.1.0 - cosmiconfig: 7.0.0 + conventional-changelog-angular: 6.0.0 + conventional-changelog-core: 5.0.1 + conventional-recommended-bump: 7.0.1 + cosmiconfig: 8.2.0 dedent: 0.7.0 - dot-prop: 6.0.1 envinfo: 7.8.1 execa: 5.0.0 - fs-extra: 9.1.0 + fs-extra: 11.1.1 get-port: 5.1.1 get-stream: 6.0.0 git-url-parse: 13.1.0 glob-parent: 5.1.2 globby: 11.1.0 - graceful-fs: 4.2.10 + graceful-fs: 4.2.11 has-unicode: 2.0.1 import-local: 3.1.0 - init-package-json: 3.0.2 + ini: 1.3.8 + init-package-json: 5.0.0 inquirer: 8.2.5 - is-ci: 2.0.0 + is-ci: 3.0.1 is-stream: 2.0.0 + jest-diff: 29.5.0 js-yaml: 4.1.0 - libnpmaccess: 6.0.4 - libnpmpublish: 7.1.4 + libnpmaccess: 7.0.2 + libnpmpublish: 7.3.0 load-json-file: 6.2.0 make-dir: 3.1.0 minimatch: 3.0.5 @@ -9662,17 +9303,17 @@ packages: npm-packlist: 5.1.1 npm-registry-fetch: 14.0.5 npmlog: 6.0.2 - nx: 15.9.4 + nx: 16.3.2 p-map: 4.0.0 p-map-series: 2.1.0 p-pipe: 3.1.0 p-queue: 6.6.2 p-reduce: 2.1.0 p-waterfall: 2.1.1 - pacote: 15.1.1 + pacote: 15.2.0 pify: 5.0.0 - read-cmd-shim: 3.0.0 - read-package-json: 5.0.1 + read-cmd-shim: 4.0.0 + read-package-json: 6.0.4 resolve-from: 5.0.0 rimraf: 4.4.1 semver: 7.5.1 @@ -9682,12 +9323,12 @@ packages: strong-log-transformer: 2.1.0 tar: 6.1.11 temp-dir: 1.0.0 - typescript: 4.9.5 + typescript: 5.1.3 upath: 2.0.1 - uuid: 8.3.2 + uuid: 9.0.0 validate-npm-package-license: 3.0.4 - validate-npm-package-name: 4.0.0 - write-file-atomic: 4.0.1 + validate-npm-package-name: 5.0.0 + write-file-atomic: 5.0.1 write-pkg: 4.0.0 yargs: 16.2.0 yargs-parser: 20.2.4 @@ -9707,21 +9348,18 @@ packages: prelude-ls: 1.2.1 type-check: 0.4.0 - /[email protected]: - resolution: {integrity: sha512-qZ3wcfIyUoW0+qSFkMBovcTrSGJ3ZeyvpR7d5N9pEYv/kXs8sHP2wiqEIXBKLFrZlmM0kR0RJD7mtfLngtlLag==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + /[email protected]: + resolution: {integrity: sha512-vHBVMw1JFMTgEk15zRsJuSAg7QtGGHpUSEfnbcRL1/gTBag9iEfJbyjpDmdJmwMhvpoLoNBtdAUCdGnaP32hhw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: - aproba: 2.0.0 - minipass: 3.3.6 - npm-package-arg: 9.1.2 - npm-registry-fetch: 13.3.1 + npm-package-arg: 10.1.0 + npm-registry-fetch: 14.0.5 transitivePeerDependencies: - - bluebird - supports-color dev: true - /[email protected]: - resolution: {integrity: sha512-mMntrhVwut5prP4rJ228eEbEyvIzLWhqFuY90j5QeXBCTT2pWSMno7Yo2S2qplPUr02zPurGH4heGLZ+wORczg==} + /[email protected]: + resolution: {integrity: sha512-fHUxw5VJhZCNSls0KLNEG0mCD2PN1i14gH5elGOgiVnU3VgTcRahagYP2LKI1m0tFCJ+XrAm0zVYyF5RCbXzcg==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: ci-info: 3.8.0 @@ -10147,13 +9785,6 @@ packages: brace-expansion: 2.0.1 dev: true - /[email protected]: - resolution: {integrity: sha512-sauLxniAmvnhhRjFwPNnJKaPFYyddAgbYdeUpHULtCT/GhzdCx/MDNy+Y40lBxTQUrMzDE8e0S43Z5uqfO0REg==} - engines: {node: '>=10'} - dependencies: - brace-expansion: 2.0.1 - dev: true - /[email protected]: resolution: {integrity: sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA==} engines: {node: '>=16 || 14 >=14.17'} @@ -10263,15 +9894,6 @@ packages: minipass: 3.3.6 yallist: 4.0.0 - /[email protected]: - resolution: {integrity: sha512-sdqtiFt3lkOaYvTXSRIUjkIdPTcxgv5+fgqYE/5qgwdw12cOrAuzzgzvVExIkH/ul1oeHN3bCLOWSG3XOqbKKw==} - engines: {node: '>=10'} - dependencies: - chownr: 2.0.0 - infer-owner: 1.0.4 - mkdirp: 1.0.4 - dev: true - /[email protected]: resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} hasBin: true @@ -10351,6 +9973,11 @@ packages: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} dev: true + /[email protected]: + resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: true + /[email protected]: resolution: {integrity: sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==} @@ -10492,14 +10119,6 @@ packages: abbrev: 1.1.1 dev: true - /[email protected]: - resolution: {integrity: sha512-ZFPLe9Iu0tnx7oWhFxAo4s7QTn8+NNDDxYNaKLjE7Dp0tbakQ3M1QhQzsnzXHQBTUO3K9BmwaxnyO8Ayn2I95Q==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - hasBin: true - dependencies: - abbrev: 2.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} dependencies: @@ -10519,16 +10138,6 @@ packages: validate-npm-package-license: 3.0.4 dev: true - /[email protected]: - resolution: {integrity: sha512-EBk5QKKuocMJhB3BILuKhmaPjI8vNRSpIfO9woLC6NyHVkKKdVEdAO1mrT0ZfxNR1lKwCcTkuZfmGIFdizZ8Pg==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - hosted-git-info: 5.2.1 - is-core-module: 2.12.1 - semver: 7.5.1 - validate-npm-package-license: 3.0.4 - dev: true - /[email protected]: resolution: {integrity: sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -10602,16 +10211,6 @@ packages: validate-npm-package-name: 3.0.0 dev: true - /[email protected]: - resolution: {integrity: sha512-pzd9rLEx4TfNJkovvlBSLGhq31gGu2QDexFPWT19yCDh0JgnRhlBLNo5759N0AJmBk+kQ9Y/hXoLnlgFD+ukmg==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - hosted-git-info: 5.2.1 - proc-log: 2.0.1 - semver: 7.5.1 - validate-npm-package-name: 4.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-UfpSvQ5YKwctmodvPPkK6Fwk603aoVsf8AEbmVKAEECrfvL8SSe1A2YIwrJ6xmTHAITKPwwZsWo7WwEbNk0kxw==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -10640,37 +10239,6 @@ packages: semver: 7.5.1 dev: true - /[email protected]: - resolution: {integrity: sha512-eukJPi++DKRTjSBRcDZSDDsGqRK3ehbxfFUcgaRd0Yp6kRwOwh2WVn0r+8rMB4nnuzvAk6rQVzl6K5CkYOmnvw==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - make-fetch-happen: 10.2.1 - minipass: 3.3.6 - minipass-fetch: 2.1.2 - minipass-json-stream: 1.0.1 - minizlib: 2.1.2 - npm-package-arg: 9.1.2 - proc-log: 2.0.1 - transitivePeerDependencies: - - bluebird - - supports-color - dev: true - - /[email protected]: - resolution: {integrity: sha512-YaeRbVNpnWvsGOjX2wk5s85XJ7l1qQBGAp724h8e2CZFFhMSuw9enom7K1mWVUtvXO1uUSFIAPofQK0pPN0ZcA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - make-fetch-happen: 11.1.1 - minipass: 4.2.8 - minipass-fetch: 3.0.3 - minipass-json-stream: 1.0.1 - minizlib: 2.1.2 - npm-package-arg: 10.1.0 - proc-log: 3.0.0 - transitivePeerDependencies: - - supports-color - dev: true - /[email protected]: resolution: {integrity: sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -10718,28 +10286,20 @@ packages: set-blocking: 2.0.0 dev: true - /[email protected]: - resolution: {integrity: sha512-uJ0YFk/mCQpLBt+bxN88AKd+gyqZvZDbtiNxk6Waqcj2aPRyfVx8ITawkyQynxUagInjdYT1+qj4NfA5KJJUxg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - are-we-there-yet: 4.0.0 - console-control-strings: 1.1.0 - gauge: 5.0.1 - set-blocking: 2.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-NHj4rzRo0tQdijE9ZqAx6kYDcoRwYwSYzCA8MY3JzfxlrvEU0jhnhJT9BhqhJs7I/dKcrDm6TyulaRqZPIhN5g==} - /[email protected]: - resolution: {integrity: sha512-z4oW3RbPK2P3VphPOzaSIkXfuXpFkHf/M0q2/cGEB7cMQraXdlc/yw1x+xX8qAF/iI2gL27lnhIHvEozKe4A7g==} + /[email protected]: + resolution: {integrity: sha512-13P7r0aKikjBtmdZrNorwXzVPeVIV4MLEwqGY+DEG6doLBtI5KqEQk/d5B5l2dCF2BEi/LXEmLYCmf9gwbOJ+Q==} hasBin: true dependencies: - axios: 0.21.4 - chalk: 4.1.0 + '@nrwl/nx-cloud': 16.0.5 + axios: 1.1.3 + chalk: 4.1.2 dotenv: 10.0.0 - fs-extra: 10.1.0 + fs-extra: 11.1.1 node-machine-id: 1.1.12 + open: 8.4.2 strip-json-comments: 3.1.1 tar: 6.1.11 yargs-parser: 21.1.1 @@ -10747,8 +10307,8 @@ packages: - debug dev: true - /[email protected]: - resolution: {integrity: sha512-P1G4t59UvE/lkHyruLeSOB5ZuNyh01IwU0tTUOi8f9s/NbP7+OQ8MYVwDV74JHTr6mQgjlS+n+4Eox8tVm9itA==} + /[email protected]: + resolution: {integrity: sha512-fOzCVL7qoCJAcYTJwvJ9j+PSaL791ro4AICWuLxaphZsp2jcLoav4Ev7ONPks2Wlkt8FS9bee3nqQ3w1ya36Og==} hasBin: true requiresBuild: true peerDependencies: @@ -10760,8 +10320,7 @@ packages: '@swc/core': optional: true dependencies: - '@nrwl/cli': 15.9.4 - '@nrwl/tao': 15.9.4 + '@nrwl/tao': 16.3.2 '@parcel/watcher': 2.0.4 '@yarnpkg/lockfile': 1.1.0 '@yarnpkg/parsers': 3.0.0-rc.44 @@ -10796,15 +10355,16 @@ packages: yargs: 17.7.2 yargs-parser: 21.1.1 optionalDependencies: - '@nrwl/nx-darwin-arm64': 15.9.4 - '@nrwl/nx-darwin-x64': 15.9.4 - '@nrwl/nx-linux-arm-gnueabihf': 15.9.4 - '@nrwl/nx-linux-arm64-gnu': 15.9.4 - '@nrwl/nx-linux-arm64-musl': 15.9.4 - '@nrwl/nx-linux-x64-gnu': 15.9.4 - '@nrwl/nx-linux-x64-musl': 15.9.4 - '@nrwl/nx-win32-arm64-msvc': 15.9.4 - '@nrwl/nx-win32-x64-msvc': 15.9.4 + '@nx/nx-darwin-arm64': 16.3.2 + '@nx/nx-darwin-x64': 16.3.2 + '@nx/nx-freebsd-x64': 16.3.2 + '@nx/nx-linux-arm-gnueabihf': 16.3.2 + '@nx/nx-linux-arm64-gnu': 16.3.2 + '@nx/nx-linux-arm64-musl': 16.3.2 + '@nx/nx-linux-x64-gnu': 16.3.2 + '@nx/nx-linux-x64-musl': 16.3.2 + '@nx/nx-win32-arm64-msvc': 16.3.2 + '@nx/nx-win32-x64-msvc': 16.3.2 transitivePeerDependencies: - debug dev: true @@ -11058,8 +10618,8 @@ packages: release-zalgo: 1.0.0 dev: true - /[email protected]: - resolution: {integrity: sha512-eeqEe77QrA6auZxNHIp+1TzHQ0HBKf5V6c8zcaYZ134EJe1lCi+fjXATkNiEEfbG+e50nu02GLvUtmZcGOYabQ==} + /[email protected]: + resolution: {integrity: sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} hasBin: true dependencies: @@ -11069,14 +10629,14 @@ packages: '@npmcli/run-script': 6.0.2 cacache: 17.1.3 fs-minipass: 3.0.2 - minipass: 4.2.8 + minipass: 5.0.0 npm-package-arg: 10.1.0 npm-packlist: 7.0.4 npm-pick-manifest: 8.0.1 npm-registry-fetch: 14.0.5 proc-log: 3.0.0 promise-retry: 2.0.1 - read-package-json: 6.0.3 + read-package-json: 6.0.4 read-package-json-fast: 3.0.2 sigstore: 1.5.2 ssri: 10.0.4 @@ -11099,15 +10659,6 @@ packages: dependencies: callsites: 3.1.0 - /[email protected]: - resolution: {integrity: sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dependencies: - json-parse-even-better-errors: 3.0.0 - just-diff: 6.0.2 - just-diff-apply: 5.5.0 - dev: true - /[email protected]: resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} engines: {node: '>=4'} @@ -11256,14 +10807,6 @@ packages: dependencies: find-up: 4.1.0 - /[email protected]: - resolution: {integrity: sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ==} - engines: {node: '>=4'} - dependencies: - cssesc: 3.0.0 - util-deprecate: 1.0.2 - dev: true - /[email protected]: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -11273,8 +10816,8 @@ packages: engines: {node: '>=10.13.0'} hasBin: true - /[email protected]: - resolution: {integrity: sha512-cvpcHTc42lcsvOOAzd3XuNWTcvk1Jmnzqeu+WsOuiPmxUJTnkbAcFNsRKvEpBEUFVUgy/GTZLulZDcDEi+CIlA==} + /[email protected]: + resolution: {integrity: sha512-V2mGkI31qdttvTFX7Mt4efOqHXqJWMu4/r66Xh3Z3BwZaPfPJgp6/gbwoujRpPUtfEF6AUUWx3Jim3GCw5g/Qw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@jest/schemas': 29.4.3 @@ -11282,11 +10825,6 @@ packages: react-is: 18.2.0 dev: true - /[email protected]: - resolution: {integrity: sha512-Kcmo2FhfDTXdcbfDH76N7uBYHINxc/8GW7UAVuVP9I+Va3uHSerrnKV6dLooga/gh7GlgzuCCr/eoldnL1muGw==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dev: true - /[email protected]: resolution: {integrity: sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -11302,24 +10840,11 @@ packages: fromentries: 1.3.2 dev: true - /[email protected]: - resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} - engines: {node: '>= 0.6.0'} - dev: true - /[email protected]: resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==} engines: {node: '>=0.4.0'} dev: true - /[email protected]: - resolution: {integrity: sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==} - dev: true - - /[email protected]: - resolution: {integrity: sha512-1vTUnfI2hzui8AEIixbdAJlFY4LFDXqQswy/2eOlThAscXCY4It8FdVuI0fMJGAB2aWGbdQf/gv0skKYXmdrHA==} - dev: true - /[email protected]: resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} peerDependencies: @@ -11351,14 +10876,11 @@ packages: sisteransi: 1.0.5 dev: true - /[email protected]: - resolution: {integrity: sha512-JZeYqd7UAcHCwI+sTOeUDYkvEU+1bQ7iE0UT1MgB/tERkAPkesW46MrpIySzODi+owTjZtiF8Ay5j9m60KmMBw==} + /[email protected]: + resolution: {integrity: sha512-KQVDEubSUHGSt5xLakaToDFrSoZhStB8dXLzk2xvwR67gJktrHFvpR63oZgHyK19WKbHFLXJqCPXdVR3aBP8Ig==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: - read: 1.0.7 - dev: true - - /[email protected]: - resolution: {integrity: sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==} + read: 2.1.0 dev: true /[email protected]: @@ -11645,24 +11167,11 @@ packages: resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} dev: true - /[email protected]: - resolution: {integrity: sha512-KQDVjGqhZk92PPNRj9ZEXEuqg8bUobSKRw+q0YQ3TKI5xkce7bUJobL4Z/OtiEbAAv70yEpYIXp4iQ9L8oPVog==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dev: true - /[email protected]: resolution: {integrity: sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dev: true - /[email protected]: - resolution: {integrity: sha512-W/BKtbL+dUjTuRL2vziuYhp76s5HZ9qQhd/dKfWIZveD0O40453QNyZhC0e63lqZrAQ4jiOapVoeJ7JrszenQQ==} - engines: {node: '>=10'} - dependencies: - json-parse-even-better-errors: 2.3.1 - npm-normalize-package-bin: 1.0.1 - dev: true - /[email protected]: resolution: {integrity: sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -11671,18 +11180,8 @@ packages: npm-normalize-package-bin: 3.0.1 dev: true - /[email protected]: - resolution: {integrity: sha512-MALHuNgYWdGW3gKzuNMuYtcSSZbGQm94fAp16xt8VsYTLBjUSc55bLMKe6gzpWue0Tfi6CBgwCSdDAqutGDhMg==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - glob: 8.1.0 - json-parse-even-better-errors: 2.3.1 - normalize-package-data: 4.0.1 - npm-normalize-package-bin: 1.0.1 - dev: true - - /[email protected]: - resolution: {integrity: sha512-4QbpReW4kxFgeBQ0vPAqh2y8sXEB3D4t3jsXbJKIhBiF80KT6XRo45reqwtftju5J6ru1ax06A2Gb/wM1qCOEQ==} + /[email protected]: + resolution: {integrity: sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: glob: 10.2.5 @@ -11727,11 +11226,11 @@ packages: type-fest: 0.6.0 dev: true - /[email protected]: - resolution: {integrity: sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==} - engines: {node: '>=0.8'} + /[email protected]: + resolution: {integrity: sha512-bvxi1QLJHcaywCAEsAk4DG3nVoqiY2Csps3qzWalhj5hFqRn1d/OixkFXtLO1PrgHUcAP0FNaSY/5GYNfENFFQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dependencies: - mute-stream: 0.0.8 + mute-stream: 1.0.0 dev: true /[email protected]: @@ -11761,16 +11260,6 @@ packages: string_decoder: 1.3.0 util-deprecate: 1.0.2 - /[email protected]: - resolution: {integrity: sha512-kDMOq0qLtxV9f/SQv522h8cxZBqNZXuXNyjyezmfAAuribMyVXziljpQ/uQhfE1XLg2/TLTW2DsnoE4VAi/krg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - dependencies: - abort-controller: 3.0.0 - buffer: 6.0.3 - events: 3.3.0 - process: 0.11.10 - dev: true - /[email protected]: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -11990,13 +11479,6 @@ packages: dependencies: queue-microtask: 1.2.3 - /[email protected]: - resolution: {integrity: sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==} - engines: {npm: '>=2.0.0'} - dependencies: - tslib: 1.14.1 - dev: true - /[email protected]: resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} dependencies: @@ -12081,14 +11563,6 @@ packages: lru-cache: 6.0.0 dev: true - /[email protected]: - resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==} engines: {node: '>=10'} @@ -12596,22 +12070,6 @@ packages: engines: {node: '>=4'} dev: true - /[email protected]: - resolution: {integrity: sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==} - engines: {node: '>=8'} - dev: true - - /[email protected]: - resolution: {integrity: sha512-eLXG5B1G0mRPHmgH2WydPl5v4jH35qEn3y/rA/aahKhIa91Pn119SsU7n7v/433gtT9ONzC8ISvNHIh2JSTm0w==} - engines: {node: '>=10'} - dependencies: - del: 6.1.1 - is-stream: 2.0.0 - temp-dir: 2.0.0 - type-fest: 0.16.0 - unique-string: 2.0.0 - dev: true - /[email protected]([email protected]): resolution: {integrity: sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==} engines: {node: '>= 10.13.0'} @@ -12778,11 +12236,6 @@ packages: hasBin: true dev: true - /[email protected]: - resolution: {integrity: sha512-gcANaAnd2QDZFmHFEOF4k7uc1J/6a6z3DJMd/QwEyxLoKGiptJRwid582r7QIsFlFMIZ3SnxfS52S4hm2DHkuQ==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - dev: true - /[email protected]: resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==} engines: {node: '>=8'} @@ -12938,11 +12391,6 @@ packages: dev: true optional: true - /[email protected]: - resolution: {integrity: sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==} - engines: {node: '>=10'} - dev: true - /[email protected]: resolution: {integrity: sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==} engines: {node: '>=10'} @@ -13159,13 +12607,6 @@ packages: imurmurhash: 0.1.4 dev: true - /[email protected]: - resolution: {integrity: sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==} - engines: {node: '>=8'} - dependencies: - crypto-random-string: 2.0.0 - dev: true - /[email protected]: resolution: {integrity: sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==} dev: true @@ -13242,6 +12683,11 @@ packages: hasBin: true dev: true + /[email protected]: + resolution: {integrity: sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==} + hasBin: true + dev: true + /[email protected]: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} dev: true @@ -13263,13 +12709,6 @@ packages: builtins: 1.0.3 dev: true - /[email protected]: - resolution: {integrity: sha512-mzR0L8ZDktZjpX4OB46KT+56MAhl4EIazWP/+G/HPGuvfdaqg4YsCdtOm6U9+LOFyYDoh4dpnpxZRB9MQQns5Q==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - dependencies: - builtins: 5.0.1 - dev: true - /[email protected]: resolution: {integrity: sha512-YuKoXDAhBYxY7SfOKxHBDoSyENFeW5VvIIQp2TGQuit8gpK6MnWaQelBKxso72DoxTZfZdcP3W90LqpSkgPzLQ==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -13312,10 +12751,6 @@ packages: dependencies: xml-name-validator: 4.0.0 - /[email protected]: - resolution: {integrity: sha512-hwj/qMDUEjCU5h0xr90KGCf0tg0/LgJbmOWgrWKYlcJZM7XvquvUJZ0G/HMGr7F7OQMOUuPHWP9JpriinkAlkg==} - dev: true - /[email protected]: resolution: {integrity: sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==} engines: {node: '>=10.13.0'} @@ -13750,14 +13185,6 @@ packages: typedarray-to-buffer: 3.1.5 dev: true - /[email protected]: - resolution: {integrity: sha512-nSKUxgAbyioruk6hU87QzVbY279oYT6uiwgDoujth2ju4mJ+TZau7SQBhtbTmUyuNYTuXnSyRn66FV0+eCgcrQ==} - engines: {node: ^12.13.0 || ^14.15.0 || >=16} - dependencies: - imurmurhash: 0.1.4 - signal-exit: 3.0.7 - dev: true - /[email protected]: resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} @@ -13840,11 +13267,6 @@ packages: /[email protected]: resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - /[email protected]: - resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} - engines: {node: '>= 6'} - dev: true - /[email protected]: resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} engines: {node: '>=6'}
|
|
chore: fix tests
|
0b0cdab68d7d7693115ab613aaa0e137b689c341
|
chore
|
https://github.com/mikro-orm/mikro-orm/commit/0b0cdab68d7d7693115ab613aaa0e137b689c341
|
fix tests
|
diff --git a/Configuration.ts b/Configuration.ts index 338a147..7268f72 100644 --- a/Configuration.ts +++ b/Configuration.ts @@ -252,7 +252,7 @@ export class Configuration<D extends IDatabaseDriver = IDatabaseDriver> { throw new Error('No platform type specified, please fill in `type` or provide custom driver class in `driver` option. Available platforms types: ' + inspect(Object.keys(Configuration.PLATFORMS))); } - if (!(this.options.type! in Configuration.PLATFORMS)) { + if (this.options.type && !(this.options.type in Configuration.PLATFORMS)) { throw new Error(`Invalid platform type specified: '${this.options.type}', please fill in valid \\`type\\` or provide custom driver class in \\`driver\\` option. Available platforms types: ${inspect(Object.keys(Configuration.PLATFORMS))}`); }
|
|
fix: prevent terminal Windows to popup on Windows when run from a GUI application.
|
cacc8af96a41e601c5136cba4b1f9b3b9d5e6844
|
fix
|
https://github.com/Byron/gitoxide/commit/cacc8af96a41e601c5136cba4b1f9b3b9d5e6844
|
prevent terminal Windows to popup on Windows when run from a GUI application.
|
diff --git a/mod.rs b/mod.rs index f6cf042..d3d1703 100644 --- a/mod.rs +++ b/mod.rs @@ -71,7 +71,7 @@ impl Program { let git_program = gix_path::env::exe_invocation(); let mut cmd = match &self.kind { Kind::Builtin => { - let mut cmd = Command::new(git_program); + let mut cmd = Command::from(gix_command::prepare(git_program)); cmd.arg("credential").arg(action.as_arg(false)); cmd }
|
|
feat(duckdb): implement `RegexSplit`
|
229a1f4e5d5153dea95375c22fadf339cba3f8c6
|
feat
|
https://github.com/ibis-project/ibis/commit/229a1f4e5d5153dea95375c22fadf339cba3f8c6
|
implement `RegexSplit`
|
diff --git a/registry.py b/registry.py index 25f78e4..e2015a9 100644 --- a/registry.py +++ b/registry.py @@ -562,6 +562,7 @@ operation_registry.update( ops.GeoConvert: _geo_convert, # other ops ops.TimestampRange: fixed_arity(sa.func.range, 3), + ops.RegexSplit: fixed_arity(sa.func.str_split_regex, 2), } )
|
|
docs: add a bunch of string expression examples
|
18d3112b984bc9cd7b295e6c40ce8ee2232bcf57
|
docs
|
https://github.com/ibis-project/ibis/commit/18d3112b984bc9cd7b295e6c40ce8ee2232bcf57
|
add a bunch of string expression examples
|
diff --git a/strings.py b/strings.py index dea7176..9f399e9 100644 --- a/strings.py +++ b/strings.py @@ -17,7 +17,74 @@ if TYPE_CHECKING: @public class StringValue(Value): - def __getitem__(self, key: slice | int | ir.IntegerValue) -> StringValue: + def __getitem__(self, key: slice | int | ir.IntegerScalar) -> StringValue: + """Index or slice a string expression. + + Parameters + ---------- + key + [`int`][int], [`slice`][slice] or integer scalar expression + + Returns + ------- + StringValue + Indexed or sliced string value + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"food": ["bread", "cheese", "rice"], "idx": [1, 2, 4]}) + ┏━━━━━━━━┳━━━━━━━┓ + ┃ food ┃ idx ┃ + ┡━━━━━━━━╇━━━━━━━┩ + │ string │ int64 │ + ├────────┼───────┤ + │ bread │ 1 │ + │ cheese │ 2 │ + │ rice │ 4 │ + └────────┴───────┘ + >>> t.food[0] + ┏━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Substring(food, 0, 1) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├───────────────────────┤ + │ b │ + │ c │ + │ r │ + └───────────────────────┘ + >>> t.food[:3] + ┏━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Substring(food, 0, 3) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├───────────────────────┤ + │ bre │ + │ che │ + │ ric │ + └───────────────────────┘ + >>> t.food[3:5] + ┏━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Substring(food, 3, 2) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├───────────────────────┤ + │ ad │ + │ es │ + │ e │ + └───────────────────────┘ + >>> t.food[7] + ┏━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Substring(food, 7, 1) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├───────────────────────┤ + │ ~ │ + │ ~ │ + │ ~ │ + └───────────────────────┘ + """ from ibis.expr import types as ir if isinstance(key, slice): @@ -55,7 +122,23 @@ class StringValue(Value): Returns ------- IntegerValue - The length of the input + The length of each string in the expression + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["aaa", "a", "aa"]}) + >>> t.s.length() + ┏━━━━━━━━━━━━━━━━━┓ + ┃ StringLength(s) ┃ + ┡━━━━━━━━━━━━━━━━━┩ + │ int32 │ + ├─────────────────┤ + │ 3 │ + │ 1 │ + │ 2 │ + └─────────────────┘ """ return ops.StringLength(self).to_expr() @@ -66,6 +149,32 @@ class StringValue(Value): ------- StringValue Lowercase string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["AAA", "a", "AA"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ AAA │ + │ a │ + │ AA │ + └────────┘ + >>> t.s.lower() + ┏━━━━━━━━━━━━━━┓ + ┃ Lowercase(s) ┃ + ┡━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────┤ + │ aaa │ + │ a │ + │ aa │ + └──────────────┘ """ return ops.Lowercase(self).to_expr() @@ -76,6 +185,32 @@ class StringValue(Value): ------- StringValue Uppercase string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["aaa", "A", "aa"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ aaa │ + │ A │ + │ aa │ + └────────┘ + >>> t.s.upper() + ┏━━━━━━━━━━━━━━┓ + ┃ Uppercase(s) ┃ + ┡━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────┤ + │ AAA │ + │ A │ + │ AA │ + └──────────────┘ """ return ops.Uppercase(self).to_expr() @@ -86,6 +221,32 @@ class StringValue(Value): ------- StringValue Reversed string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "def", "ghi"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ abc │ + │ def │ + │ ghi │ + └────────┘ + >>> t.s.reverse() + ┏━━━━━━━━━━━━┓ + ┃ Reverse(s) ┃ + ┡━━━━━━━━━━━━┩ + │ string │ + ├────────────┤ + │ cba │ + │ fed │ + │ ihg │ + └────────────┘ """ return ops.Reverse(self).to_expr() @@ -96,16 +257,58 @@ class StringValue(Value): ------- IntegerValue ASCII code of the first character of the input + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "def", "ghi"]}) + >>> t.s.ascii_str() + ┏━━━━━━━━━━━━━━━━┓ + ┃ StringAscii(s) ┃ + ┡━━━━━━━━━━━━━━━━┩ + │ int32 │ + ├────────────────┤ + │ 97 │ + │ 100 │ + │ 103 │ + └────────────────┘ """ return ops.StringAscii(self).to_expr() def strip(self) -> StringValue: - """Remove whitespace from left and right sides of a string. + r"""Remove whitespace from left and right sides of a string. Returns ------- StringValue Stripped string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["\\ta\\t", "\\nb\\n", "\\vc\\t"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ \\ta\\t │ + │ \\nb\\n │ + │ \\vc\\t │ + └────────┘ + >>> t.s.strip() + ┏━━━━━━━━━━┓ + ┃ Strip(s) ┃ + ┡━━━━━━━━━━┩ + │ string │ + ├──────────┤ + │ a │ + │ b │ + │ c │ + └──────────┘ """ return ops.Strip(self).to_expr() @@ -116,6 +319,32 @@ class StringValue(Value): ------- StringValue Left-stripped string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["\\ta\\t", "\\nb\\n", "\\vc\\t"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ \\ta\\t │ + │ \\nb\\n │ + │ \\vc\\t │ + └────────┘ + >>> t.s.lstrip() + ┏━━━━━━━━━━━┓ + ┃ LStrip(s) ┃ + ┡━━━━━━━━━━━┩ + │ string │ + ├───────────┤ + │ a\\t │ + │ b\\n │ + │ c\\t │ + └───────────┘ """ return ops.LStrip(self).to_expr() @@ -126,6 +355,32 @@ class StringValue(Value): ------- StringValue Right-stripped string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["\\ta\\t", "\\nb\\n", "\\vc\\t"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ \\ta\\t │ + │ \\nb\\n │ + │ \\vc\\t │ + └────────┘ + >>> t.s.rstrip() + ┏━━━━━━━━━━━┓ + ┃ RStrip(s) ┃ + ┡━━━━━━━━━━━┩ + │ string │ + ├───────────┤ + │ \\ta │ + │ \\nb │ + │ \\vc │ + └───────────┘ """ return ops.RStrip(self).to_expr() @@ -136,6 +391,22 @@ class StringValue(Value): ------- StringValue Capitalized string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "def", "ghi"]}) + >>> t.s.capitalize() + ┏━━━━━━━━━━━━━━━┓ + ┃ Capitalize(s) ┃ + ┡━━━━━━━━━━━━━━━┩ + │ string │ + ├───────────────┤ + │ Abc │ + │ Def │ + │ Ghi │ + └───────────────┘ """ return ops.Capitalize(self).to_expr() @@ -156,6 +427,22 @@ class StringValue(Value): ------- BooleanValue Boolean indicating the presence of `substr` in the expression + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["bab", "ddd", "eaf"]}) + >>> t.s.contains("a") + ┏━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringContains(s, 'a') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ + ├────────────────────────┤ + │ True │ + │ False │ + │ True │ + └────────────────────────┘ """ return ops.StringContains(self, substr).to_expr() @@ -196,6 +483,22 @@ class StringValue(Value): ------- StringValue Found substring + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "defg", "hijlk"]}) + >>> t.s.substr(2) + ┏━━━━━━━━━━━━━━━━━┓ + ┃ Substring(s, 2) ┃ + ┡━━━━━━━━━━━━━━━━━┩ + │ string │ + ├─────────────────┤ + │ c │ + │ fg │ + │ jlk │ + └─────────────────┘ """ return ops.Substring(self, start, length).to_expr() @@ -211,6 +514,22 @@ class StringValue(Value): ------- StringValue Characters from the start + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "defg", "hijlk"]}) + >>> t.s.left(2) + ┏━━━━━━━━━━━━━━━━━━━━┓ + ┃ Substring(s, 0, 2) ┃ + ┡━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────────┤ + │ ab │ + │ de │ + │ hi │ + └────────────────────┘ """ return self.substr(0, length=nchars) @@ -226,6 +545,22 @@ class StringValue(Value): ------- StringValue Characters from the end + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "defg", "hijlk"]}) + >>> t.s.right(2) + ┏━━━━━━━━━━━━━━━━┓ + ┃ StrRight(s, 2) ┃ + ┡━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────┤ + │ bc │ + │ fg │ + │ lk │ + └────────────────┘ """ return ops.StrRight(self, nchars).to_expr() @@ -241,6 +576,22 @@ class StringValue(Value): ------- StringValue Repeated string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["a", "b", "c"]}) + >>> t.s.repeat(5) + ┏━━━━━━━━━━━━━━┓ + ┃ Repeat(s, 5) ┃ + ┡━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────┤ + │ aaaaa │ + │ bbbbb │ + │ ccccc │ + └──────────────┘ """ return ops.Repeat(self, n).to_expr() @@ -294,9 +645,35 @@ class StringValue(Value): ------- IntegerValue Position of `substr` in `arg` starting from `start` + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + >>> t.s.find("a") + ┏━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringFind(s, 'a') ┃ + ┡━━━━━━━━━━━━━━━━━━━━┩ + │ int64 │ + ├────────────────────┤ + │ 0 │ + │ 1 │ + │ 2 │ + └────────────────────┘ + >>> t.s.find("z") + ┏━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringFind(s, 'z') ┃ + ┡━━━━━━━━━━━━━━━━━━━━┩ + │ int64 │ + ├────────────────────┤ + │ -1 │ + │ -1 │ + │ -1 │ + └────────────────────┘ """ if end is not None: - raise NotImplementedError + raise NotImplementedError("`end` parameter is not yet implemented") return ops.StringFind(self, substr, start, end).to_expr() def lpad( @@ -316,15 +693,23 @@ class StringValue(Value): Returns ------- StringValue - Padded string + Left-padded string Examples -------- >>> import ibis - >>> short_str = ibis.literal("a") - >>> result = short_str.lpad(5, "-") # ----a - >>> long_str = ibis.literal("abcdefg") - >>> result = long_str.lpad(5, "-") # abcde + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "def", "ghi"]}) + >>> t.s.lpad(5, "-") + ┏━━━━━━━━━━━━━━━━━┓ + ┃ LPad(s, 5, '-') ┃ + ┡━━━━━━━━━━━━━━━━━┩ + │ string │ + ├─────────────────┤ + │ --abc │ + │ --def │ + │ --ghi │ + └─────────────────┘ """ return ops.LPad(self, length, pad).to_expr() @@ -344,18 +729,26 @@ class StringValue(Value): pad Pad character - Examples - -------- - >>> import ibis - >>> short_str = ibis.literal("a") - >>> result = short_str.lpad(5, "-") # a---- - >>> long_str = ibis.literal("abcdefg") - >>> result = long_str.lpad(5, "-") # abcde - Returns ------- StringValue - Padded string + Right-padded string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "def", "ghi"]}) + >>> t.s.rpad(5, "-") + ┏━━━━━━━━━━━━━━━━━┓ + ┃ RPad(s, 5, '-') ┃ + ┡━━━━━━━━━━━━━━━━━┩ + │ string │ + ├─────────────────┤ + │ abc-- │ + │ def-- │ + │ ghi-- │ + └─────────────────┘ """ return ops.RPad(self, length, pad).to_expr() @@ -369,17 +762,17 @@ class StringValue(Value): str_list Sequence of strings - Examples - -------- - >>> import ibis - >>> table = ibis.table(dict(string_col='string')) - >>> result = table.string_col.find_in_set(['a', 'b']) - Returns ------- IntegerValue Position of `str_list` in `self`. Returns -1 if `self` isn't found or if `self` contains `','`. + + Examples + -------- + >>> import ibis + >>> table = ibis.table(dict(string_col='string')) + >>> result = table.string_col.find_in_set(['a', 'b']) """ return ops.FindInSet(self, str_list).to_expr() @@ -391,12 +784,6 @@ class StringValue(Value): strings Strings to join with `arg` - Examples - -------- - >>> import ibis - >>> sep = ibis.literal(',') - >>> result = sep.join(['a', 'b', 'c']) - Returns ------- StringValue @@ -450,16 +837,25 @@ class StringValue(Value): start prefix to check for - Examples - -------- - >>> import ibis - >>> text = ibis.literal('Ibis project') - >>> result = text.startswith('Ibis') - Returns ------- BooleanValue Boolean indicating whether `self` starts with `start` + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["Ibis project", "GitHub"]}) + >>> t.s.startswith("Ibis") + ┏━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StartsWith(s, 'Ibis') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ + ├───────────────────────┤ + │ True │ + │ False │ + └───────────────────────┘ """ return ops.StartsWith(self, start).to_expr() @@ -471,16 +867,25 @@ class StringValue(Value): end Suffix to check for - Examples - -------- - >>> import ibis - >>> text = ibis.literal('Ibis project') - >>> result = text.endswith('project') - Returns ------- BooleanValue Boolean indicating whether `self` ends with `end` + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["Ibis project", "GitHub"]}) + >>> t.s.endswith("project") + ┏━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ EndsWith(s, 'project') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ + ├────────────────────────┤ + │ True │ + │ False │ + └────────────────────────┘ """ return ops.EndsWith(self, end).to_expr() @@ -505,6 +910,21 @@ class StringValue(Value): ------- BooleanValue Column indicating matches + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["Ibis project", "GitHub"]}) + >>> t.s.like("%project") + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringSQLLike(s, '%project') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ + ├──────────────────────────────┤ + │ True │ + │ False │ + └──────────────────────────────┘ """ return functools.reduce( operator.or_, @@ -535,6 +955,21 @@ class StringValue(Value): ------- BooleanValue Column indicating matches + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["Ibis project", "GitHub"]}) + >>> t.s.ilike("%PROJect") + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringSQLILike(s, '%PROJect') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ + ├───────────────────────────────┤ + │ True │ + │ False │ + └───────────────────────────────┘ """ return functools.reduce( operator.or_, @@ -561,6 +996,21 @@ class StringValue(Value): ------- BooleanValue Indicator of matches + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["Ibis project", "GitHub"]}) + >>> t.s.re_search(".+Hub") + ┏━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ RegexSearch(s, '.+Hub') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ boolean │ + ├─────────────────────────┤ + │ False │ + │ True │ + └─────────────────────────┘ """ return ops.RegexSearch(self, pattern).to_expr() @@ -592,6 +1042,38 @@ class StringValue(Value): ------- StringValue Extracted match or whole string if `index` is zero + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + + Extract a specific group + + >>> t.s.re_extract(r"^(a)bc", 1) + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ RegexExtract(s, '^(a)', 1) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────────────────┤ + │ a │ + │ ~ │ + │ ~ │ + └────────────────────────────┘ + + Extract the entire match + + >>> t.s.re_extract(r"^(a)bc", 0) + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ RegexExtract(s, '^(a)bc', 0) ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────────────────────┤ + │ abc │ + │ ~ │ + │ ~ │ + └──────────────────────────────┘ """ return ops.RegexExtract(self, pattern, index).to_expr() @@ -612,16 +1094,26 @@ class StringValue(Value): replacement Replacement string or regular expression - Examples - -------- - >>> import ibis - >>> str_literal = ibis.literal("aaabbbaaa") - >>> result = str_literal.re_replace("(b+)", r"<\\1>") # aaa<bbb>aaa - Returns ------- StringValue Modified string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + >>> t.s.re_replace("^(a)", "b") + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ RegexReplace(s, '^(a)', 'b') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├──────────────────────────────┤ + │ bbc │ + │ bac │ + │ bca │ + └──────────────────────────────┘ """ return ops.RegexReplace(self, pattern, replacement).to_expr() @@ -639,16 +1131,26 @@ class StringValue(Value): replacement String replacement - Examples - -------- - >>> import ibis - >>> str_literal = ibis.literal("aaabbbaaa") - >>> result = str_literal.replace("aaa", "ccc") # cccbbbccc - Returns ------- StringValue Replaced string + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + >>> t.s.replace("b", "z") + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringReplace(s, 'b', 'z') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────────────────┤ + │ azc │ + │ zac │ + │ zca │ + └────────────────────────────┘ """ return ops.StringReplace(self, pattern, replacement).to_expr() @@ -660,16 +1162,24 @@ class StringValue(Value): format_str Format string in `strptime` format - Examples - -------- - >>> import ibis - >>> date_as_str = ibis.literal('20170206') - >>> result = date_as_str.to_timestamp('%Y%m%d') - Returns ------- TimestampValue Parsed timestamp value + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"ts": ["20170206"]}) + >>> t.ts.to_timestamp("%Y%m%d") + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringToTimestamp(ts, '%Y%m%d') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ timestamp('UTC') │ + ├─────────────────────────────────┤ + │ 2017-02-06 00:00:00+00:00 │ + └─────────────────────────────────┘ """ return ops.StringToTimestamp(self, format_str).to_expr() @@ -813,6 +1323,8 @@ class StringValue(Value): def split(self, delimiter: str | StringValue) -> ir.ArrayValue: """Split as string on `delimiter`. + !!! note "This API only works on backends with array support." + Parameters ---------- delimiter @@ -822,14 +1334,36 @@ class StringValue(Value): ------- ArrayValue The string split by `delimiter` + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"col": ["a,b,c", "d,e", "f"]}) + >>> t + ┏━━━━━━━━┓ + ┃ col ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ a,b,c │ + │ d,e │ + │ f │ + └────────┘ + >>> t.col.split(",") + ┏━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ StringSplit(col, ',') ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━┩ + │ array<string> │ + ├───────────────────────┤ + │ ['a', 'b', ... +1] │ + │ ['d', 'e'] │ + │ ['f'] │ + └───────────────────────┘ """ return ops.StringSplit(self, delimiter).to_expr() - def concat( - self, - other: str | StringValue, - *args: str | StringValue, - ) -> StringValue: + def concat(self, other: str | StringValue, *args: str | StringValue) -> StringValue: """Concatenate strings. Parameters @@ -843,6 +1377,22 @@ class StringValue(Value): ------- StringValue All strings concatenated + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + >>> t.s.concat("xyz") + ┏━━━━━━━━━━━━━━━━┓ + ┃ StringConcat() ┃ + ┡━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────┤ + │ abcxyz │ + │ bacxyz │ + │ bcaxyz │ + └────────────────┘ """ return ops.StringConcat((self, other, *args)).to_expr() @@ -858,8 +1408,44 @@ class StringValue(Value): ------- StringValue All strings concatenated + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ abc │ + │ bac │ + │ bca │ + └────────┘ + >>> t.s + "z" + ┏━━━━━━━━━━━━━━━━┓ + ┃ StringConcat() ┃ + ┡━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────┤ + │ abcz │ + │ bacz │ + │ bcaz │ + └────────────────┘ + >>> t.s + t.s + ┏━━━━━━━━━━━━━━━━┓ + ┃ StringConcat() ┃ + ┡━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────┤ + │ abcabc │ + │ bacbac │ + │ bcabca │ + └────────────────┘ """ - return ops.StringConcat((self, other)).to_expr() + return self.concat(other) def __radd__(self, other: str | StringValue) -> StringValue: """Concatenate strings. @@ -873,6 +1459,32 @@ class StringValue(Value): ------- StringValue All strings concatenated + + Examples + -------- + >>> import ibis + >>> ibis.options.interactive = True + >>> t = ibis.memtable({"s": ["abc", "bac", "bca"]}) + >>> t + ┏━━━━━━━━┓ + ┃ s ┃ + ┡━━━━━━━━┩ + │ string │ + ├────────┤ + │ abc │ + │ bac │ + │ bca │ + └────────┘ + >>> "z" + t.s + ┏━━━━━━━━━━━━━━━━┓ + ┃ StringConcat() ┃ + ┡━━━━━━━━━━━━━━━━┩ + │ string │ + ├────────────────┤ + │ zabc │ + │ zbac │ + │ zbca │ + └────────────────┘ """ return ops.StringConcat((other, self)).to_expr()
|
|
fix(pyspark): make sure `ibis.connect` works with pyspark
|
a7ab107200effd7a1f71fd2ae0af33ae2845232f
|
fix
|
https://github.com/ibis-project/ibis/commit/a7ab107200effd7a1f71fd2ae0af33ae2845232f
|
make sure `ibis.connect` works with pyspark
|
diff --git a/__init__.py b/__init__.py index bbc0180..611697a 100644 --- a/__init__.py +++ b/__init__.py @@ -1,6 +1,5 @@ from __future__ import annotations -import itertools from pathlib import Path from typing import TYPE_CHECKING, Any, Mapping @@ -8,6 +7,7 @@ import pandas as pd import pyspark import sqlalchemy as sa from pydantic import Field +from pyspark import SparkConf from pyspark.sql import DataFrame, SparkSession from pyspark.sql.column import Column @@ -101,12 +101,16 @@ class Backend(BaseSQLBackend): def _from_url(self, url: str) -> Backend: """Construct a PySpark backend from a URL `url`.""" url = sa.engine.make_url(url) - params = list(itertools.chain.from_iterable(url.query.items())) + + conf = SparkConf().setAll(url.query.items()) + if database := url.database: - params.append("spark.sql.warehouse.dir") - params.append(str(Path(database).absolute())) + conf = conf.set( + "spark.sql.warehouse.dir", + str(Path(database).absolute()), + ) - builder = SparkSession.builder.config(*params) + builder = SparkSession.builder.config(conf=conf) session = builder.getOrCreate() return self.connect(session) diff --git a/test_client.py b/test_client.py index 0a00994..fa900f6 100644 --- a/test_client.py +++ b/test_client.py @@ -432,6 +432,11 @@ def test_unsigned_integer_type(alchemy_con, coltype): marks=mark.pyspark, id="pyspark_with_warehouse", ), + param( + "pyspark://my-warehouse-dir", + marks=mark.pyspark, + id="pyspark_with_warehouse_no_params", + ), ], ) def test_connect_url(url):
|
|
refactor: make sure negation on boolean cannot be Negate
|
6ae48a7d4a544c2706a486f9d3105ccdf6b2d3f7
|
refactor
|
https://github.com/ibis-project/ibis/commit/6ae48a7d4a544c2706a486f9d3105ccdf6b2d3f7
|
make sure negation on boolean cannot be Negate
|
diff --git a/logical.py b/logical.py index 258db59..a15e8ff 100644 --- a/logical.py +++ b/logical.py @@ -75,6 +75,12 @@ class BooleanValue(NumericValue): def __invert__(self) -> BooleanValue: return self.negate() + @staticmethod + def __negate_op__(): + from ibis.expr import operations as ops + + return ops.Not + @public class BooleanScalar(NumericScalar, BooleanValue): diff --git a/numeric.py b/numeric.py index 073e74c..255a16e 100644 --- a/numeric.py +++ b/numeric.py @@ -14,6 +14,12 @@ if TYPE_CHECKING: @public class NumericValue(Value): + @staticmethod + def __negate_op__(): + from ibis.expr import operations as ops + + return ops.Negate + def negate(self) -> NumericValue: """Negate a numeric expression. @@ -22,13 +28,12 @@ class NumericValue(Value): NumericValue A numeric value expression """ - from ibis.expr import operations as ops - op = self.op() - if hasattr(op, 'negate'): + try: result = op.negate() - else: - result = ops.Negate(self) + except AttributeError: + op_class = self.__negate_op__() + result = op_class(self) return result.to_expr()
|
|
ci: run datafusion tests in series to avoid high memory usage (#10158)
|
e35d639978aa0337c9dba6de480c62bbb05ae634
|
ci
|
https://github.com/rohankumardubey/ibis/commit/e35d639978aa0337c9dba6de480c62bbb05ae634
|
run datafusion tests in series to avoid high memory usage (#10158)
|
diff --git a/ibis-backends.yml b/ibis-backends.yml index 23d8677..8d1b64a 100644 --- a/ibis-backends.yml +++ b/ibis-backends.yml @@ -123,6 +123,7 @@ jobs: - sqlite - name: datafusion title: DataFusion + serial: true extras: - datafusion - name: polars
|
|
test(postgres): remove redundant timestamp literal test
|
fbb0eaf64477fb6a5690961190dcedad0213021f
|
test
|
https://github.com/rohankumardubey/ibis/commit/fbb0eaf64477fb6a5690961190dcedad0213021f
|
remove redundant timestamp literal test
|
diff --git a/test_functions.py b/test_functions.py index 404cbae..fbbd12d 100644 --- a/test_functions.py +++ b/test_functions.py @@ -131,29 +131,6 @@ def test_timestamp_cast_noop(alltypes, at, translate): assert str(translate(result2)) == str(expected2) [email protected]( - ('func', 'expected'), - [ - param(operator.methodcaller('year'), 2015, id='year'), - param(operator.methodcaller('month'), 9, id='month'), - param(operator.methodcaller('day'), 1, id='day'), - param(operator.methodcaller('hour'), 14, id='hour'), - param(operator.methodcaller('minute'), 48, id='minute'), - param(operator.methodcaller('second'), 5, id='second'), - param(operator.methodcaller('millisecond'), 359, id='millisecond'), - param(lambda x: x.day_of_week.index(), 1, id='day_of_week_index'), - param( - lambda x: x.day_of_week.full_name(), - 'Tuesday', - id='day_of_week_full_name', - ), - ], -) -def test_simple_datetime_operations(con, func, expected, translate): - value = ibis.timestamp('2015-09-01 14:48:05.359') - assert con.execute(func(value)) == expected - - @pytest.mark.parametrize( 'pattern', [
|
|
chore(deps): fix macos build with scipy>=1.14 by bumping poetry2nix (#9493)
|
cec03749e271d6f4490a20e5c97667ec4f2f21f7
|
chore
|
https://github.com/rohankumardubey/ibis/commit/cec03749e271d6f4490a20e5c97667ec4f2f21f7
|
fix macos build with scipy>=1.14 by bumping poetry2nix (#9493)
|
diff --git a/flake.lock b/flake.lock index 36b24f5..dada7a0 100644 --- a/flake.lock +++ b/flake.lock @@ -88,11 +88,11 @@ "treefmt-nix": "treefmt-nix" }, "locked": { - "lastModified": 1719395064, - "narHash": "sha256-SsutCU+IytywS9HHGKtVZaGINcm6lpHXcyJzy7Rv0Co=", + "lastModified": 1719850884, + "narHash": "sha256-UU/lVTHFx0GpEkihoLJrMuM9DcuhZmNe3db45vshSyI=", "owner": "nix-community", "repo": "poetry2nix", - "rev": "0e52508053e3dcb568bf432a144bff367978d199", + "rev": "42262f382c68afab1113ebd1911d0c93822d756e", "type": "github" }, "original": { @@ -146,11 +146,11 @@ ] }, "locked": { - "lastModified": 1718522839, - "narHash": "sha256-ULzoKzEaBOiLRtjeY3YoGFJMwWSKRYOic6VNw2UyTls=", + "lastModified": 1719749022, + "narHash": "sha256-ddPKHcqaKCIFSFc/cvxS14goUhCOAwsM1PbMr0ZtHMg=", "owner": "numtide", "repo": "treefmt-nix", - "rev": "68eb1dc333ce82d0ab0c0357363ea17c31ea1f81", + "rev": "8df5ff62195d4e67e2264df0b7f5e8c9995fd0bd", "type": "github" }, "original": {
|
|
test(windows): give int64 dtype to series constructor
|
6fa4e8157d7546b28fd6a18968cbec3be0bf5912
|
test
|
https://github.com/rohankumardubey/ibis/commit/6fa4e8157d7546b28fd6a18968cbec3be0bf5912
|
give int64 dtype to series constructor
|
diff --git a/test_generic.py b/test_generic.py index 745a39a..465be1a 100644 --- a/test_generic.py +++ b/test_generic.py @@ -621,6 +621,7 @@ def test_where_column(backend, alltypes, df): expected = pd.Series( np.where(df.int_col == 0, 42, -1), name="where_col", + dtype="int64", ) backend.assert_series_equal(result, expected)
|
|
fix: fixed issues with links colors and themes, fixes #4841
|
34b570d780aa305d6e731a8c0b66375d65d40835
|
fix
|
https://github.com/tsparticles/tsparticles/commit/34b570d780aa305d6e731a8c0b66375d65d40835
|
fixed issues with links colors and themes, fixes #4841
|
diff --git a/themes.pug b/themes.pug index 0dd0204..f3ed87e 100644 --- a/themes.pug +++ b/themes.pug @@ -111,6 +111,9 @@ html(lang="en") }, options: { particles: { + links: { + color: "#ff0000" + }, shape: { options: { images: { @@ -127,6 +130,9 @@ html(lang="en") name: "image2", options: { particles: { + links: { + color: "#00ff00" + }, shape: { options: { images: { @@ -145,6 +151,9 @@ html(lang="en") }, fpsLimit: 120, particles: { + links: { + enable: true + }, number: { value: 30, density: { diff --git a/Linker.ts b/Linker.ts index 85518b8..f9500f0 100644 --- a/Linker.ts +++ b/Linker.ts @@ -69,6 +69,7 @@ class Linker extends ParticlesInteractorBase { } init(): void { + this.linkContainer.particles.linksColor = undefined; this.linkContainer.particles.linksColors = new Map<string, IRgb | string | undefined>(); } diff --git a/WhatsApp.png b/WhatsApp.png index 8393d91..4dc040a 100644 --- a/WhatsApp.png +++ b/WhatsApp.png Binary files /dev/null and b/websites/particles.js.org/images/WhatsApp.png differ diff --git a/WhatsApp.svg b/WhatsApp.svg index b839ab0..7d5f951 100644 --- a/WhatsApp.svg +++ b/WhatsApp.svg @@ -0,0 +1 @@ +<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 175.216 175.552"><defs><linearGradient id="b" x1="85.915" x2="86.535" y1="32.567" y2="137.092" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#57d163"/><stop offset="1" stop-color="#23b33a"/></linearGradient><filter id="a" width="1.115" height="1.114" x="-.057" y="-.057" color-interpolation-filters="sRGB"><feGaussianBlur stdDeviation="3.531"/></filter></defs><path fill="#b3b3b3" d="m54.532 138.45 2.235 1.324c9.387 5.571 20.15 8.518 31.126 8.523h.023c33.707 0 61.139-27.426 61.153-61.135.006-16.335-6.349-31.696-17.895-43.251A60.75 60.75 0 0 0 87.94 25.983c-33.733 0-61.166 27.423-61.178 61.13a60.98 60.98 0 0 0 9.349 32.535l1.455 2.312-6.179 22.558zm-40.811 23.544L24.16 123.88c-6.438-11.154-9.825-23.808-9.821-36.772.017-40.556 33.021-73.55 73.578-73.55 19.681.01 38.154 7.669 52.047 21.572s21.537 32.383 21.53 52.037c-.018 40.553-33.027 73.553-73.578 73.553h-.032c-12.313-.005-24.412-3.094-35.159-8.954zm0 0" filter="url(#a)"/><path fill="#fff" d="m12.966 161.238 10.439-38.114a73.42 73.42 0 0 1-9.821-36.772c.017-40.556 33.021-73.55 73.578-73.55 19.681.01 38.154 7.669 52.047 21.572s21.537 32.383 21.53 52.037c-.018 40.553-33.027 73.553-73.578 73.553h-.032c-12.313-.005-24.412-3.094-35.159-8.954z"/><path fill="url(#linearGradient1780)" d="M87.184 25.227c-33.733 0-61.166 27.423-61.178 61.13a60.98 60.98 0 0 0 9.349 32.535l1.455 2.312-6.179 22.559 23.146-6.069 2.235 1.324c9.387 5.571 20.15 8.518 31.126 8.524h.023c33.707 0 61.14-27.426 61.153-61.135a60.75 60.75 0 0 0-17.895-43.251 60.75 60.75 0 0 0-43.235-17.929z"/><path fill="url(#b)" d="M87.184 25.227c-33.733 0-61.166 27.423-61.178 61.13a60.98 60.98 0 0 0 9.349 32.535l1.455 2.313-6.179 22.558 23.146-6.069 2.235 1.324c9.387 5.571 20.15 8.517 31.126 8.523h.023c33.707 0 61.14-27.426 61.153-61.135a60.75 60.75 0 0 0-17.895-43.251 60.75 60.75 0 0 0-43.235-17.928z"/><path fill="#fff" fill-rule="evenodd" d="M68.772 55.603c-1.378-3.061-2.828-3.123-4.137-3.176l-3.524-.043c-1.226 0-3.218.46-4.902 2.3s-6.435 6.287-6.435 15.332 6.588 17.785 7.506 19.013 12.718 20.381 31.405 27.75c15.529 6.124 18.689 4.906 22.061 4.6s10.877-4.447 12.408-8.74 1.532-7.971 1.073-8.74-1.685-1.226-3.525-2.146-10.877-5.367-12.562-5.981-2.91-.919-4.137.921-4.746 5.979-5.819 7.206-2.144 1.381-3.984.462-7.76-2.861-14.784-9.124c-5.465-4.873-9.154-10.891-10.228-12.73s-.114-2.835.808-3.751c.825-.824 1.838-2.147 2.759-3.22s1.224-1.84 1.836-3.065.307-2.301-.153-3.22-4.032-10.011-5.666-13.647"/></svg>
|
|
fix(impala): replace `time_mapping` with `TIME_MAPPING` and backwards compatible check
|
4c3ca2003c03639e6bc78ca75ec78fe38708c019
|
fix
|
https://github.com/ibis-project/ibis/commit/4c3ca2003c03639e6bc78ca75ec78fe38708c019
|
replace `time_mapping` with `TIME_MAPPING` and backwards compatible check
|
diff --git a/timestamp.py b/timestamp.py index 6d1884e..2faf990 100644 --- a/timestamp.py +++ b/timestamp.py @@ -93,7 +93,10 @@ def day_of_week_index(t, op): def strftime(t, op): import sqlglot as sg - reverse_hive_mapping = {v: k for k, v in sg.dialects.hive.Hive.time_mapping.items()} + hive_dialect = sg.dialects.hive.Hive + if (time_mapping := getattr(hive_dialect, "TIME_MAPPING", None)) is None: + time_mapping = hive_dialect.time_mapping + reverse_hive_mapping = {v: k for k, v in time_mapping.items()} format_str = sg.time.format_time(op.format_str.value, reverse_hive_mapping) targ = t.translate(ops.Cast(op.arg, to=dt.string)) return f"from_unixtime(unix_timestamp({targ}), {format_str!r})"
|
|
build: added twinkle updater to full bundle
|
ef283040de19a22a0e1f77a4646defee2d866e73
|
build
|
https://github.com/tsparticles/tsparticles/commit/ef283040de19a22a0e1f77a4646defee2d866e73
|
added twinkle updater to full bundle
|
diff --git a/package.dist.json b/package.dist.json index c938309..55c456e 100644 --- a/package.dist.json +++ b/package.dist.json @@ -71,6 +71,7 @@ "tsparticles-slim": "^2.0.3", "tsparticles-updater-roll": "^2.0.3", "tsparticles-updater-tilt": "^2.0.3", + "tsparticles-updater-twinkle": "^2.0.3", "tsparticles-updater-wobble": "^2.0.3" } } \\ No newline at end of file diff --git a/package.json b/package.json index 53110ec..1fdeafe 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,6 @@ "react": "18.0.0", "react-dom": "18.0.0", "react-tsparticles": "^2.0.3", - "tsparticles": "^2.0.3", "tsparticles-preset-big-circles": "^2.0.3" }, "devDependencies": { diff --git a/index.ts b/index.ts index b0ab7b0..5fa34dc 100644 --- a/index.ts +++ b/index.ts @@ -6,13 +6,15 @@ import { loadPolygonMaskPlugin } from "tsparticles-plugin-polygon-mask"; import { loadRollUpdater } from "tsparticles-updater-roll"; import { loadSlim } from "tsparticles-slim"; import { loadTiltUpdater } from "tsparticles-updater-tilt"; +import { loadTwinkleUpdater } from "tsparticles-updater-twinkle"; import { loadWobbleUpdater } from "tsparticles-updater-wobble"; export async function loadFull(engine: Engine): Promise<void> { await loadSlim(engine); - await loadTiltUpdater(engine); await loadRollUpdater(engine); + await loadTiltUpdater(engine); + await loadTwinkleUpdater(engine); await loadWobbleUpdater(engine); await loadExternalTrailInteraction(engine); diff --git a/yarn.lock b/yarn.lock index 07a35d8..9166853 100644 --- a/yarn.lock +++ b/yarn.lock @@ -649,33 +649,33 @@ tslib@^2.0.3: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== -tsparticles-editor@^1.43.0: - version "1.43.0" - resolved "https://registry.yarnpkg.com/tsparticles-editor/-/tsparticles-editor-1.43.0.tgz#09d29a995636b55af0652e68013d2cbf65416ae8" - integrity sha512-nTXwjqNJgJPqhYA+brHh8+B3rAAMJ1NbGk/jk7A3tbBfB730jcqxhqFhw4kNLwV4uIzVALX+hiWRpFtWexKnqA== +tsparticles-editor@^1.43.1: + version "1.43.1" + resolved "https://registry.yarnpkg.com/tsparticles-editor/-/tsparticles-editor-1.43.1.tgz#f53c3193cab149f6876196b012616cc75bb9ff5d" + integrity sha512-OkXT889jvVVPRgr7v3/BXaqK1nMVTWAtan0f7vkklf+tTwi7jZOLUnYqRFnLbdcYi/RiamxX2P3if1/5YofXzQ== dependencies: object-gui "^1.1.1" - tsparticles "^1.43.0" - tsparticles-plugin-infection "^1.43.0" + tsparticles "^1.43.1" + tsparticles-plugin-infection "^1.43.1" -tsparticles-plugin-infection@^1.43.0: - version "1.43.0" - resolved "https://registry.yarnpkg.com/tsparticles-plugin-infection/-/tsparticles-plugin-infection-1.43.0.tgz#fb5277d71d39b0558cf954c315d9937a4d313e83" - integrity sha512-AolH0XadD5bphIHOTziGkrLCZAWKDCROZ3klQuiCVHO5lKfHjwWpiabtYAp0LpkLZs+wGBoM0Ug3NN9Vp87UwA== +tsparticles-plugin-infection@^1.43.1: + version "1.43.1" + resolved "https://registry.yarnpkg.com/tsparticles-plugin-infection/-/tsparticles-plugin-infection-1.43.1.tgz#7fea1dbd5b792ab10273aa833642953e098bd649" + integrity sha512-1PNrTNrSd+b1SzSe/Yv7NuXuR7caw32ldhOcFec5fay8H4sRno6WufkmEkfbMZmJ+Yi4kaEovpYVCl3VNQ/5bA== dependencies: - tsparticles "^1.43.0" + tsparticles "^1.43.1" -tsparticles-preset-links@^1.43.0: - version "1.43.0" - resolved "https://registry.yarnpkg.com/tsparticles-preset-links/-/tsparticles-preset-links-1.43.0.tgz#b153e238c9289907138e712fa8edccc509c6f7c2" - integrity sha512-x4ckkHi6sf4HqYAPVDYtwaQXYnY+AyflEmJnvIMD7NOc38DxPquuRFc5P1A4NZe9jZ2vSM/kwoyRVfyPz3Fk5Q== +tsparticles-preset-links@^1.43.1: + version "1.43.1" + resolved "https://registry.yarnpkg.com/tsparticles-preset-links/-/tsparticles-preset-links-1.43.1.tgz#ed0fac1b8c4d74a9bef7c2034638464f9284efc0" + integrity sha512-3izVtbbZvXcmXRHopNQ3QqNGBXbXh5VvBJyVjc1yQvxk5bH/jJdzmNQZuPwBt3MfDavra1eYg9mw4kF+yIpXJg== dependencies: - tsparticles "^1.43.0" + tsparticles "^1.43.1" -tsparticles@^1.43.0: - version "1.43.0" - resolved "https://registry.yarnpkg.com/tsparticles/-/tsparticles-1.43.0.tgz#bb26ca852ae85f870fd15bbcb71b83d52b396068" - integrity sha512-0r9hdGaImm0cxp6FraLgKUBJy03utZ2j2LaBZus6hSpPMnsP48UrJLQKmcgRRlGWeOUq6YI6WywpbSbSoA/cKA== +tsparticles@^1.43.1: + version "1.43.1" + resolved "https://registry.yarnpkg.com/tsparticles/-/tsparticles-1.43.1.tgz#1f6b37ee41b6ab043d66a0e20bfe0855a1aea006" + integrity sha512-6EuHncwqzoyTlUxc11YH8LVlwVUgpYaZD0yMOeA2OvRqFZ9VQV8EjjQ6ZfXt6pfGA1ObPwU929jveFatxwTQkg== universalify@^0.1.0: version "0.1.2"
|
|
feat(snowflake): use upstream `array_sort`
|
96243416771109fbaf383167470ac2481fe938ef
|
feat
|
https://github.com/rohankumardubey/ibis/commit/96243416771109fbaf383167470ac2481fe938ef
|
use upstream `array_sort`
|
diff --git a/__init__.py b/__init__.py index e1b8e2d..bcf494b 100644 --- a/__init__.py +++ b/__init__.py @@ -110,11 +110,6 @@ return longest.map((_, i) => { return Object.assign(...keys.map((key, j) => ({[key]: arrays[j][i]}))); })""", }, - "ibis_udfs.public.array_sort": { - "inputs": {"array": ARRAY}, - "returns": ARRAY, - "source": """return array.sort();""", - }, "ibis_udfs.public.array_repeat": { # Integer inputs are not allowed because JavaScript only supports # doubles diff --git a/registry.py b/registry.py index 98e15d8..142bd58 100644 --- a/registry.py +++ b/registry.py @@ -420,7 +420,7 @@ operation_registry.update( ops.ArrayRemove: fixed_arity(sa.func.array_remove, 2), ops.ArrayIntersect: fixed_arity(sa.func.array_intersection, 2), ops.ArrayZip: _array_zip, - ops.ArraySort: unary(sa.func.ibis_udfs.public.array_sort), + ops.ArraySort: unary(sa.func.array_sort), ops.ArrayRepeat: fixed_arity(sa.func.ibis_udfs.public.array_repeat, 2), ops.StringSplit: fixed_arity(sa.func.split, 2), # snowflake typeof only accepts VARIANT, so we cast
|
|
test(bigquery): fix struct/schema deprecation warnings
|
3b1ce442d53696c6583fc01692b28d662f3032c8
|
test
|
https://github.com/ibis-project/ibis/commit/3b1ce442d53696c6583fc01692b28d662f3032c8
|
fix struct/schema deprecation warnings
|
diff --git a/client.py b/client.py index bccd6a4..e63af6c 100644 --- a/client.py +++ b/client.py @@ -48,7 +48,7 @@ def bigquery_field_to_ibis_dtype(field): assert fields, "RECORD fields are empty" names = [el.name for el in fields] ibis_types = list(map(dt.dtype, fields)) - ibis_type = dt.Struct(names, ibis_types) + ibis_type = dt.Struct(dict(zip(names, ibis_types))) else: ibis_type = _LEGACY_TO_STANDARD.get(typ, typ) ibis_type = _DTYPE_TO_IBIS_TYPE.get(ibis_type, ibis_type) diff --git a/test_compiler.py b/test_compiler.py index c1983a1..df8914a 100644 --- a/test_compiler.py +++ b/test_compiler.py @@ -253,7 +253,7 @@ def test_large_compile(): pass names = [f"col_{i}" for i in range(num_columns)] - schema = ibis.Schema(names, ["string"] * num_columns) + schema = ibis.Schema(dict.fromkeys(names, "string")) ibis_client = MockBackend() table = ops.SQLQueryResult("select * from t", schema, ibis_client).to_expr() for _ in range(num_joins): # noqa: F402 diff --git a/schema.py b/schema.py index 227294e..ff8ac0f 100644 --- a/schema.py +++ b/schema.py @@ -56,7 +56,7 @@ class Schema(Concrete): return super().__create__(fields=names) else: warn_deprecated( - "Struct(names, types)", + "Schema(names, types)", as_of="4.1", removed_in="5.0", instead=(
|
|
chore: add TEMPDIR to shell.nix hook
|
40019b1c20b588485bab6b3053e4db19c8c1b368
|
chore
|
https://github.com/ibis-project/ibis/commit/40019b1c20b588485bab6b3053e4db19c8c1b368
|
add TEMPDIR to shell.nix hook
|
diff --git a/elementwise.ipynb b/elementwise.ipynb index e623e85..4eb138f 100644 --- a/elementwise.ipynb +++ b/elementwise.ipynb @@ -159,7 +159,9 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "!curl -LsS -o $TEMPDIR/geography.db 'https://storage.googleapis.com/ibis-tutorial-data/geography.db'" + ] }, { "cell_type": "code", diff --git a/shell.nix b/shell.nix index 45b1580..9bab9b3 100644 --- a/shell.nix +++ b/shell.nix @@ -49,6 +49,8 @@ pkgs.mkShell { name = "ibis${pythonShortVersion}"; shellHook = '' + set -euo pipefail + data_dir="$PWD/ci/ibis-testing-data" mkdir -p "$data_dir" chmod u+rwx "$data_dir" @@ -56,6 +58,9 @@ pkgs.mkShell { chmod --recursive u+rw "$data_dir" export IBIS_TEST_DATA_DIRECTORY="$data_dir" + + export TEMPDIR + TEMPDIR="$(python -c 'import tempfile; print(tempfile.gettempdir())')" ''; buildInputs = devDeps ++ libraryDevDeps ++ [
|
|
build: moved out circle warp class to linker since it's the only one using it
|
ce97dcb0078d03e7278a9b179a5ec9cc14b7fb32
|
build
|
https://github.com/tsparticles/tsparticles/commit/ce97dcb0078d03e7278a9b179a5ec9cc14b7fb32
|
moved out circle warp class to linker since it's the only one using it
|
diff --git a/QuadTree.ts b/QuadTree.ts index 59f8131..866f90f 100644 --- a/QuadTree.ts +++ b/QuadTree.ts @@ -1,4 +1,4 @@ -import { Circle, CircleWarp, Point, Rectangle } from "../src"; +import { Circle, Point, Rectangle } from "../src"; import { describe, it } from "mocha"; import { QuadTree } from "../src/Core/Utils/QuadTree"; import { TestContainer } from "./Fixture/TestContainer"; @@ -33,7 +33,7 @@ describe("QuadTree tests", () => { }); }); - describe("CircleWarp (0, 0, 30) in canvas (200, 200) tests", () => { + /*describe("CircleWarp (0, 0, 30) in canvas (200, 200) tests", () => { const circle1 = new CircleWarp(0, 0, 30, canvasSize); it("should intersect with a (180, 180, 20, 20) rectangle", () => { @@ -47,7 +47,7 @@ describe("QuadTree tests", () => { expect(circle1.intersects(rect2)).to.be.true; }); - }); + });*/ describe("Quad Tree (200x200) tests", () => { const quadTree = new QuadTree(new Rectangle(0, 0, 200, 200), 4); @@ -65,7 +65,7 @@ describe("QuadTree tests", () => { quadTree.insert(new Point(pos1, p1.particle)); - it("query (radius 10) with p1 (5, 5) center should have at least p2 (10, 10)", () => { + /*it("query (radius 10) with p1 (5, 5) center should have at least p2 (10, 10)", () => { const p2 = new TestParticle(testContainer.container, { x: 10, y: 10 }); expect(p2.particle).to.not.be.undefined; @@ -146,6 +146,7 @@ describe("QuadTree tests", () => { expect(quadTree.queryCircleWarp(pos1, 10, canvasSize)).to.be.not.empty; }); + */ }); describe("Particle (100, 5) tests", () => { @@ -161,7 +162,7 @@ describe("QuadTree tests", () => { quadTree.insert(new Point(pos1, p1.particle)); - it("query (radius 10) with p1 (100, 5) center should have at least p2 (100, 199)", () => { + /*it("query (radius 10) with p1 (100, 5) center should have at least p2 (100, 199)", () => { const p2 = new TestParticle(testContainer.container, { x: 100, y: 199 }); expect(p2.particle).to.not.be.undefined; @@ -175,7 +176,7 @@ describe("QuadTree tests", () => { quadTree.insert(new Point(pos2, p2.particle)); expect(quadTree.queryCircleWarp(pos1, 10, canvasSize)).to.be.not.empty; - }); + });*/ }); describe("Particle (5, 100) tests", () => { @@ -193,7 +194,7 @@ describe("QuadTree tests", () => { quadTree.insert(new Point(pos1, p1.particle)); - it("query (radius 10) with p1 (5, 100) center should have at least p2 (199, 100)", () => { + /*it("query (radius 10) with p1 (5, 100) center should have at least p2 (199, 100)", () => { const p2 = new TestParticle(testContainer.container, { x: 199, y: 100 }); expect(p2.particle).to.not.be.undefined; @@ -207,7 +208,7 @@ describe("QuadTree tests", () => { quadTree.insert(new Point(pos2, p2.particle)); expect(quadTree.queryCircleWarp(pos1, 10, canvasSize)).to.be.not.empty; - }); + });*/ }); }); }); diff --git a/bundle.ts b/bundle.ts index 44c51bc..9691f93 100644 --- a/bundle.ts +++ b/bundle.ts @@ -19,7 +19,6 @@ const tsParticles = new Engine(); tsParticles.init(); export * from "./Core/Utils/Circle"; -export * from "./Core/Utils/CircleWarp"; export * from "./Core/Utils/Constants"; export * from "./Core/Utils/ExternalInteractorBase"; export * from "./Core/Utils/ParticlesInteractorBase"; diff --git a/index.ts b/index.ts index 2b98e13..a4d74c8 100644 --- a/index.ts +++ b/index.ts @@ -52,7 +52,6 @@ export * from "./Core/Interfaces/IShapeDrawer"; export * from "./Core/Interfaces/IShapeValues"; export * from "./Core/Interfaces/ISlowParticleData"; export * from "./Core/Utils/Circle"; -export * from "./Core/Utils/CircleWarp"; export * from "./Core/Utils/Constants"; export * from "./Core/Utils/ExternalInteractorBase"; export * from "./Core/Utils/ParticlesInteractorBase"; diff --git a/CircleWarp.ts b/CircleWarp.ts index 41894e5..1f295c1 100644 --- a/CircleWarp.ts +++ b/CircleWarp.ts @@ -1,8 +1,5 @@ -import { Circle } from "./Circle"; -import type { ICoordinates } from "../Interfaces/ICoordinates"; -import type { IDimension } from "../Interfaces/IDimension"; -import type { Range } from "./Range"; -import { Rectangle } from "./Rectangle"; +import { Circle, Rectangle } from "tsparticles-engine"; +import type { ICoordinates, IDimension, Range } from "tsparticles-engine"; /** * @category Utils diff --git a/Linker.ts b/Linker.ts index 22ecf14..45568e7 100644 --- a/Linker.ts +++ b/Linker.ts @@ -1,10 +1,11 @@ -import { Circle, CircleWarp, ParticlesInteractorBase, getDistance, getLinkRandomColor } from "tsparticles-engine"; +import { Circle, ParticlesInteractorBase, getDistance, getLinkRandomColor } from "tsparticles-engine"; import type { ICoordinates, IDimension, IRgb, RecursivePartial } from "tsparticles-engine"; import type { IParticlesLinkOptions } from "./Options/Interfaces/IParticlesLinkOptions"; import type { LinkContainer } from "./LinkContainer"; import type { LinkParticle } from "./LinkParticle"; import { Links } from "./Options/Classes/Links"; import type { ParticlesLinkOptions } from "./Options/Classes/ParticlesLinkOptions"; +import { CircleWarp } from "./CircleWarp"; function getLinkDistance( pos1: ICoordinates,
|
|
test(polars): skip broken functional test
|
b1137f70679eb3dca05b794f717383afe66117da
|
test
|
https://github.com/rohankumardubey/ibis/commit/b1137f70679eb3dca05b794f717383afe66117da
|
skip broken functional test
|
diff --git a/test_array.py b/test_array.py index b1f65e4..3e197df 100644 --- a/test_array.py +++ b/test_array.py @@ -996,7 +996,11 @@ timestamp_range_tzinfos = pytest.mark.parametrize( ibis.interval(hours=-1), "-1H", id="neg_inner", - marks=[pytest.mark.notyet(["polars"], raises=PolarsComputeError)], + marks=[ + pytest.mark.broken( + ["polars"], raises=AssertionError, reason="returns an empty array" + ) + ], ), param( datetime(2017, 1, 2),
|
|
feat(sql): add support for computed properties via `@Formula()` (#553)
`@Formula()` decorator can be used to map some SQL snippet to your entity.
The SQL fragment can be as complex as you want and even include subselects.
```typescript
@Formula('obj_length * obj_height * obj_width')
objectVolume?: number;
```
Formulas will be added to the select clause automatically. In case you are facing
problems with `NonUniqueFieldNameException`, you can define the formula as a
callback that will receive the entity alias in the parameter:
```typescript
@Formula(alias => `${alias}.obj_length * ${alias}.obj_height * ${alias}.obj_width`)
objectVolume?: number;
```
|
68b9336aed3f098dea9c91fc3a060fb87449f0e0
|
feat
|
https://github.com/mikro-orm/mikro-orm/commit/68b9336aed3f098dea9c91fc3a060fb87449f0e0
|
add support for computed properties via `@Formula()` (#553)
`@Formula()` decorator can be used to map some SQL snippet to your entity.
The SQL fragment can be as complex as you want and even include subselects.
```typescript
@Formula('obj_length * obj_height * obj_width')
objectVolume?: number;
```
Formulas will be added to the select clause automatically. In case you are facing
problems with `NonUniqueFieldNameException`, you can define the formula as a
callback that will receive the entity alias in the parameter:
```typescript
@Formula(alias => `${alias}.obj_length * ${alias}.obj_height * ${alias}.obj_width`)
objectVolume?: number;
```
|
diff --git a/ROADMAP.md b/ROADMAP.md index 7c8b570..77e0d24 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -31,8 +31,8 @@ discuss specifics. - [x] Use custom errors for specific cases (unique constraint violation, db not accessible, ...) - [x] Paginator helper or something similar ([doctrine docs](https://www.doctrine-project.org/projects/doctrine-orm/en/latest/tutorials/pagination.html)) - [x] Add `groupBy` and `distinct` to `FindOptions` and `FindOneOptions` +- [x] Support computed properties via `@Formula()` decorator - [ ] Lazy scalar properties (allow having props that won't be loaded by default, but can be populated) -- [ ] Support computed properties - [ ] Association scopes/filters ([hibernate docs](https://docs.jboss.org/hibernate/orm/3.6/reference/en-US/html/filters.html)) - [ ] Support external hooks when using EntitySchema (hooks outside of entity) - [ ] Cache metadata only with ts-morph provider diff --git a/decorators.md b/decorators.md index 90f649a..7865392 100644 --- a/decorators.md +++ b/decorators.md @@ -137,6 +137,22 @@ enum3 = 3; enum4 = 'a'; ``` +### @Formula() + +`@Formula()` decorator can be used to map some SQL snippet to your entity. +The SQL fragment can be as complex as you want and even include subselects. + +See [Defining Entities](defining-entities.md#formulas). + +| Parameter | Type | Optional | Description | +|-----------|------|----------|-------------| +| `formula` | `string` | `() => string` | no | SQL fragment that will be part of the select clause. | + +```typescript +@Formula('obj_length * obj_height * obj_width') +objectVolume?: number; +``` + ### @Index() and @Unique() Use `@Index()` to create an index, or `@Unique()` to create unique constraint. You can diff --git a/defining-entities.md b/defining-entities.md index 2deb5ec..e371d3d 100644 --- a/defining-entities.md +++ b/defining-entities.md @@ -256,6 +256,25 @@ export const enum UserStatus { // export { OutsideEnum } from './OutsideEnum.ts'; ``` +## Formulas + +`@Formula()` decorator can be used to map some SQL snippet to your entity. +The SQL fragment can be as complex as you want and even include subselects. + +```typescript +@Formula('obj_length * obj_height * obj_width') +objectVolume?: number; +``` + +Formulas will be added to the select clause automatically. In case you are facing +problems with `NonUniqueFieldNameException`, you can define the formula as a +callback that will receive the entity alias in the parameter: + +```typescript +@Formula(alias => `${alias}.obj_length * ${alias}.obj_height * ${alias}.obj_width`) +objectVolume?: number; +``` + ## Indexes You can define indexes via `@Index()` decorator, for unique indexes, use `@Unique()` decorator. diff --git a/Formula.ts b/Formula.ts index d5d01ae..5350537 100644 --- a/Formula.ts +++ b/Formula.ts @@ -0,0 +1,10 @@ +import { MetadataStorage } from '../metadata'; +import { ReferenceType } from '../entity'; +import { EntityProperty, AnyEntity } from '../typings'; + +export function Formula(formula: string | ((alias: string) => string)): Function { + return function (target: AnyEntity, propertyName: string) { + const meta = MetadataStorage.getMetadataFromDecorator(target.constructor); + meta.properties[propertyName] = { name: propertyName, reference: ReferenceType.SCALAR, persist: false, formula } as EntityProperty; + }; +} diff --git a/Property.ts b/Property.ts index 8ba4b80..de10e39 100644 --- a/Property.ts +++ b/Property.ts @@ -44,6 +44,7 @@ export type PropertyOptions = { onUpdate?: () => any; default?: string | number | boolean | null; defaultRaw?: string; + formula?: string | ((alias: string) => string); nullable?: boolean; unsigned?: boolean; persist?: boolean; diff --git a/index.ts b/index.ts index 683426c..fb52cc7 100644 --- a/index.ts +++ b/index.ts @@ -6,6 +6,7 @@ export * from './ManyToMany'; export { OneToMany, OneToManyOptions } from './OneToMany'; export * from './Property'; export * from './Enum'; +export * from './Formula'; export * from './Indexed'; export * from './Repository'; export * from './Embeddable'; diff --git a/EntitySchema.ts b/EntitySchema.ts index da70e26..5105c34 100644 --- a/EntitySchema.ts +++ b/EntitySchema.ts @@ -66,6 +66,11 @@ export class EntitySchema<T extends AnyEntity<T> = AnyEntity, U extends AnyEntit prop.type = type as string; } + if (Utils.isString(prop.formula)) { + const formula = prop.formula as string; // tmp var is needed here + prop.formula = () => formula; + } + this._meta.properties[name] = prop; } diff --git a/typings.ts b/typings.ts index c510ba4..c8ef9f2 100644 --- a/typings.ts +++ b/typings.ts @@ -110,6 +110,7 @@ export interface EntityProperty<T extends AnyEntity<T> = any> { fieldNames: string[]; default?: string | number | boolean | null; defaultRaw?: string; + formula?: (alias: string) => string; prefix?: string | boolean; embedded?: [string, string]; embeddable: Constructor<T>; diff --git a/QueryBuilder.ts b/QueryBuilder.ts index 30387a0..348a504 100644 --- a/QueryBuilder.ts +++ b/QueryBuilder.ts @@ -458,6 +458,16 @@ export class QueryBuilder<T extends AnyEntity<T> = AnyEntity> { } }); + if (this.metadata.has(this.entityName) && (this._fields?.includes('*') || this._fields?.includes(`${this.alias}.*`))) { + Object.values(meta.properties) + .filter(prop => prop.formula) + .forEach(prop => { + const formula = this.knex.ref(this.alias).toString(); + const alias = this.knex.ref(prop.fieldNames[0]).toString(); + this.addSelect(`${prop.formula!(formula)} as ${alias}`); + }); + } + SmartQueryHelper.processParams([this._data, this._cond, this._having]); this.finalized = true; diff --git a/EntityHelper.mysql.test.ts b/EntityHelper.mysql.test.ts index 81cf8cb..c18f544 100644 --- a/EntityHelper.mysql.test.ts +++ b/EntityHelper.mysql.test.ts @@ -114,6 +114,7 @@ describe('EntityHelperMySql', () => { fooBar: null, id: 1, name: 'fb', + random: 123, version: a.version, }); }); diff --git a/EntityManager.mysql.test.ts b/EntityManager.mysql.test.ts index 83df96f..1620d6b 100644 --- a/EntityManager.mysql.test.ts +++ b/EntityManager.mysql.test.ts @@ -816,7 +816,7 @@ describe('EntityManagerMySql', () => { const b1 = (await orm.em.findOne(FooBaz2, { id: baz.id }, ['bar']))!; expect(mock.mock.calls[1][0]).toMatch('select `e0`.*, `e1`.`id` as `bar_id` from `foo_baz2` as `e0` left join `foo_bar2` as `e1` on `e0`.`id` = `e1`.`baz_id` where `e0`.`id` = ? limit ?'); - expect(mock.mock.calls[2][0]).toMatch('select `e0`.* from `foo_bar2` as `e0` where `e0`.`id` in (?) order by `e0`.`id` asc'); + expect(mock.mock.calls[2][0]).toMatch('select `e0`.*, (select 123) as `random` from `foo_bar2` as `e0` where `e0`.`id` in (?) order by `e0`.`id` asc'); expect(b1.bar).toBeInstanceOf(FooBar2); expect(b1.bar!.id).toBe(bar.id); expect(wrap(b1).toJSON()).toMatchObject({ bar: wrap(bar).toJSON() }); @@ -824,7 +824,7 @@ describe('EntityManagerMySql', () => { const b2 = (await orm.em.findOne(FooBaz2, { bar: bar.id }, ['bar']))!; expect(mock.mock.calls[3][0]).toMatch('select `e0`.*, `e1`.`id` as `bar_id` from `foo_baz2` as `e0` left join `foo_bar2` as `e1` on `e0`.`id` = `e1`.`baz_id` where `e1`.`id` = ? limit ?'); - expect(mock.mock.calls[4][0]).toMatch('select `e0`.* from `foo_bar2` as `e0` where `e0`.`id` in (?) order by `e0`.`id` asc'); + expect(mock.mock.calls[4][0]).toMatch('select `e0`.*, (select 123) as `random` from `foo_bar2` as `e0` where `e0`.`id` in (?) order by `e0`.`id` asc'); expect(b2.bar).toBeInstanceOf(FooBar2); expect(b2.bar!.id).toBe(bar.id); expect(wrap(b2).toJSON()).toMatchObject({ bar: wrap(bar).toJSON() }); @@ -1293,7 +1293,7 @@ describe('EntityManagerMySql', () => { orm.em.clear(); const books = await orm.em.find(Book2, { tagsUnordered: { name: { $ne: 'funny' } } }, ['tagsUnordered'], { title: QueryOrder.DESC }); - expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e3`.`id` as `test_id` from `book2` as `e0` ' + + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e3`.`id` as `test_id` from `book2` as `e0` ' + 'left join `book_to_tag_unordered` as `e2` on `e0`.`uuid_pk` = `e2`.`book2_uuid_pk` ' + 'left join `book_tag2` as `e1` on `e2`.`book_tag2_id` = `e1`.`id` ' + 'left join `test2` as `e3` on `e0`.`uuid_pk` = `e3`.`book_uuid_pk` ' + @@ -1310,7 +1310,7 @@ describe('EntityManagerMySql', () => { orm.em.clear(); mock.mock.calls.length = 0; const tags = await orm.em.find(BookTag2, { booksUnordered: { title: { $ne: 'My Life on The Wall, part 3' } } }, ['booksUnordered'], { name: QueryOrder.ASC }); - expect(mock.mock.calls[1][0]).toMatch('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id`, `e2`.`id` as `test_id` from `book2` as `e0` ' + + expect(mock.mock.calls[1][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id`, `e2`.`id` as `test_id` from `book2` as `e0` ' + 'left join `book_to_tag_unordered` as `e1` on `e0`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + 'left join `test2` as `e2` on `e0`.`uuid_pk` = `e2`.`book_uuid_pk` ' + 'where `e0`.`title` != ? and `e1`.`book_tag2_id` in (?, ?, ?, ?, ?, ?)'); @@ -1590,7 +1590,7 @@ describe('EntityManagerMySql', () => { expect(res1[0].test).toBeInstanceOf(Test2); expect(wrap(res1[0].test).isInitialized()).toBe(false); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e2`.`id` as `test_id` ' + + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e2`.`id` as `test_id` ' + 'from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `test2` as `e2` on `e0`.`uuid_pk` = `e2`.`book_uuid_pk` ' + // auto-joined 1:1 to get test id as book is inverse side @@ -1603,7 +1603,7 @@ describe('EntityManagerMySql', () => { expect(res2[0].test).toBeInstanceOf(Test2); expect(wrap(res2[0].test).isInitialized()).toBe(false); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e4`.`id` as `test_id` ' + + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e4`.`id` as `test_id` ' + 'from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `book2` as `e2` on `e1`.`favourite_book_uuid_pk` = `e2`.`uuid_pk` ' + @@ -1618,7 +1618,7 @@ describe('EntityManagerMySql', () => { expect(res3[0].test).toBeInstanceOf(Test2); expect(wrap(res3[0].test).isInitialized()).toBe(false); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e2`.`id` as `test_id` ' + + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e2`.`id` as `test_id` ' + 'from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `test2` as `e2` on `e0`.`uuid_pk` = `e2`.`book_uuid_pk` ' + @@ -1631,7 +1631,7 @@ describe('EntityManagerMySql', () => { expect(res4[0].test).toBeInstanceOf(Test2); expect(wrap(res4[0].test).isInitialized()).toBe(false); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e4`.`id` as `test_id` ' + + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e4`.`id` as `test_id` ' + 'from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `book2` as `e2` on `e1`.`favourite_book_uuid_pk` = `e2`.`uuid_pk` ' + @@ -1815,8 +1815,8 @@ describe('EntityManagerMySql', () => { const res1 = await orm.em.find(Book2, { publisher: { $ne: null } }, { schema: 'mikro_orm_test_schema_2' }); const res2 = await orm.em.find(Book2, { publisher: { $ne: null } }); - expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e1`.`id` as `test_id` from `mikro_orm_test_schema_2`.`book2` as `e0` left join `mikro_orm_test_schema_2`.`test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e0`.`publisher_id` is not null'); - expect(mock.mock.calls[1][0]).toMatch('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e0`.`publisher_id` is not null'); + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e1`.`id` as `test_id` from `mikro_orm_test_schema_2`.`book2` as `e0` left join `mikro_orm_test_schema_2`.`test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e0`.`publisher_id` is not null'); + expect(mock.mock.calls[1][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e0`.`publisher_id` is not null'); expect(res1.length).toBe(0); expect(res2.length).toBe(1); }); @@ -2033,6 +2033,27 @@ describe('EntityManagerMySql', () => { expect(res3.map(a => a.name)).toEqual(['God 01', 'God 02', 'God 03', 'God 04', 'God 05']); }); + test('formulas', async () => { + const god = new Author2('God', '[email protected]'); + const bible = new Book2('Bible', god); + bible.price = 1000; + await orm.em.persistAndFlush(bible); + orm.em.clear(); + + const mock = jest.fn(); + const logger = new Logger(mock, true); + Object.assign(orm.em.config, { logger }); + + const b = await orm.em.findOneOrFail(Book2, { author: { name: 'God' } }); + expect(b.price).toBe(1000); + expect(b.priceTaxed).toBe(1190); + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e0`.price * 1.19 as `price_taxed`, `e2`.`id` as `test_id` ' + + 'from `book2` as `e0` ' + + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + + 'left join `test2` as `e2` on `e0`.`uuid_pk` = `e2`.`book_uuid_pk` ' + + 'where `e1`.`name` = ? limit ?'); + }); + test('exceptions', async () => { await orm.em.nativeInsert(Author2, { name: 'author', email: 'email' }); await expect(orm.em.nativeInsert(Author2, { name: 'author', email: 'email' })).rejects.toThrow(UniqueConstraintViolationException); diff --git a/EntityManager.postgre.test.ts b/EntityManager.postgre.test.ts index 8d0ec22..57c900d 100644 --- a/EntityManager.postgre.test.ts +++ b/EntityManager.postgre.test.ts @@ -642,7 +642,7 @@ describe('EntityManagerPostgre', () => { const b1 = await orm.em.findOneOrFail(FooBaz2, { id: baz.id }, ['bar']); expect(mock.mock.calls[1][0]).toMatch('select "e0".*, "e1"."id" as "bar_id" from "foo_baz2" as "e0" left join "foo_bar2" as "e1" on "e0"."id" = "e1"."baz_id" where "e0"."id" = $1 limit $2'); - expect(mock.mock.calls[2][0]).toMatch('select "e0".* from "foo_bar2" as "e0" where "e0"."baz_id" in ($1) order by "e0"."baz_id" asc'); + expect(mock.mock.calls[2][0]).toMatch('select "e0".*, (select 123) as "random" from "foo_bar2" as "e0" where "e0"."baz_id" in ($1) order by "e0"."baz_id" asc'); expect(b1.bar).toBeInstanceOf(FooBar2); expect(b1.bar!.id).toBe(bar.id); expect(wrap(b1).toJSON()).toMatchObject({ bar: wrap(bar).toJSON() }); @@ -650,7 +650,7 @@ describe('EntityManagerPostgre', () => { const b2 = await orm.em.findOneOrFail(FooBaz2, { bar: bar.id }, ['bar']); expect(mock.mock.calls[3][0]).toMatch('select "e0".*, "e1"."id" as "bar_id" from "foo_baz2" as "e0" left join "foo_bar2" as "e1" on "e0"."id" = "e1"."baz_id" where "e1"."id" = $1 limit $2'); - expect(mock.mock.calls[4][0]).toMatch('select "e0".* from "foo_bar2" as "e0" where "e0"."baz_id" in ($1) order by "e0"."baz_id" asc'); + expect(mock.mock.calls[4][0]).toMatch('select "e0".*, (select 123) as "random" from "foo_bar2" as "e0" where "e0"."baz_id" in ($1) order by "e0"."baz_id" asc'); expect(b2.bar).toBeInstanceOf(FooBar2); expect(b2.bar!.id).toBe(bar.id); expect(wrap(b2).toJSON()).toMatchObject({ bar: wrap(bar).toJSON() }); @@ -1126,7 +1126,7 @@ describe('EntityManagerPostgre', () => { expect(res1).toHaveLength(3); expect(res1[0].test).toBeUndefined(); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select "e0".* ' + + expect(mock.mock.calls[0][0]).toMatch('select "e0".*, "e0".price * 1.19 as "price_taxed" ' + 'from "book2" as "e0" ' + 'left join "author2" as "e1" on "e0"."author_id" = "e1"."id" ' + 'where "e1"."name" = $1'); @@ -1136,7 +1136,7 @@ describe('EntityManagerPostgre', () => { const res2 = await orm.em.find(Book2, { author: { favouriteBook: { author: { name: 'Jon Snow' } } } }); expect(res2).toHaveLength(3); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select "e0".* ' + + expect(mock.mock.calls[0][0]).toMatch('select "e0".*, "e0".price * 1.19 as "price_taxed" ' + 'from "book2" as "e0" ' + 'left join "author2" as "e1" on "e0"."author_id" = "e1"."id" ' + 'left join "book2" as "e2" on "e1"."favourite_book_uuid_pk" = "e2"."uuid_pk" ' + @@ -1148,7 +1148,7 @@ describe('EntityManagerPostgre', () => { const res3 = await orm.em.find(Book2, { author: { favouriteBook: book3 } }); expect(res3).toHaveLength(3); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select "e0".* ' + + expect(mock.mock.calls[0][0]).toMatch('select "e0".*, "e0".price * 1.19 as "price_taxed" ' + 'from "book2" as "e0" ' + 'left join "author2" as "e1" on "e0"."author_id" = "e1"."id" ' + 'where "e1"."favourite_book_uuid_pk" = $1'); @@ -1158,7 +1158,7 @@ describe('EntityManagerPostgre', () => { const res4 = await orm.em.find(Book2, { author: { favouriteBook: { $or: [{ author: { name: 'Jon Snow' } }] } } }); expect(res4).toHaveLength(3); expect(mock.mock.calls.length).toBe(1); - expect(mock.mock.calls[0][0]).toMatch('select "e0".* ' + + expect(mock.mock.calls[0][0]).toMatch('select "e0".*, "e0".price * 1.19 as "price_taxed" ' + 'from "book2" as "e0" ' + 'left join "author2" as "e1" on "e0"."author_id" = "e1"."id" ' + 'left join "book2" as "e2" on "e1"."favourite_book_uuid_pk" = "e2"."uuid_pk" ' + diff --git a/QueryBuilder.test.ts b/QueryBuilder.test.ts index f9ff454..6f925d8 100644 --- a/QueryBuilder.test.ts +++ b/QueryBuilder.test.ts @@ -159,7 +159,7 @@ describe('QueryBuilder', () => { .leftJoin('fb.baz', 'fz') .where({ 'fz.name': 'test 123' }) .limit(2, 1); - const sql = 'select `fb`.*, `fz`.* from `foo_bar2` as `fb` ' + + const sql = 'select `fb`.*, `fz`.*, (select 123) as `random` from `foo_bar2` as `fb` ' + 'left join `foo_baz2` as `fz` on `fb`.`baz_id` = `fz`.`id` ' + 'where `fz`.`name` = ? ' + 'limit ? offset ?'; @@ -187,7 +187,7 @@ describe('QueryBuilder', () => { .leftJoin('b.author', 'a') .where({ 'a.name': 'test 123' }) .limit(2, 1); - const sql = 'select `a`.*, `b`.* from `book2` as `b` ' + + const sql = 'select `a`.*, `b`.*, `b`.price * 1.19 as `price_taxed` from `book2` as `b` ' + 'left join `author2` as `a` on `b`.`author_id` = `a`.`id` ' + 'where `a`.`name` = ? ' + 'limit ? offset ?'; @@ -241,7 +241,7 @@ describe('QueryBuilder', () => { .leftJoin('b.tags', 't') .where({ 't.name': 'test 123' }) .limit(2, 1); - const sql = 'select `b`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `book2` as `b` ' + + const sql = 'select `b`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id`, `b`.price * 1.19 as `price_taxed` from `book2` as `b` ' + 'left join `book2_tags` as `e1` on `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + 'left join `book_tag2` as `t` on `e1`.`book_tag2_id` = `t`.`id` ' + 'where `t`.`name` = ? ' + @@ -294,12 +294,12 @@ describe('QueryBuilder', () => { test('select with custom expression', async () => { const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').where({ 'json_contains(`e0`.`meta`, ?)': [{ foo: 'bar' }] }); - expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where json_contains(`e0`.`meta`, ?)'); + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where json_contains(`e0`.`meta`, ?)'); expect(qb1.getParams()).toEqual(['{"foo":"bar"}']); const qb2 = orm.em.createQueryBuilder(Book2); qb2.select('*').where({ 'json_contains(`e0`.`meta`, ?) = ?': [{ foo: 'baz' }, false] }); - expect(qb2.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where json_contains(`e0`.`meta`, ?) = ?'); + expect(qb2.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where json_contains(`e0`.`meta`, ?) = ?'); expect(qb2.getParams()).toEqual(['{"foo":"baz"}', false]); }); @@ -391,7 +391,7 @@ describe('QueryBuilder', () => { test('select by 1:1', async () => { const qb = orm.em.createQueryBuilder(FooBar2); qb.select('*').where({ baz: 123 }); - expect(qb.getQuery()).toEqual('select `e0`.* from `foo_bar2` as `e0` where `e0`.`baz_id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, (select 123) as `random` from `foo_bar2` as `e0` where `e0`.`baz_id` = ?'); expect(qb.getParams()).toEqual([123]); }); @@ -419,28 +419,28 @@ describe('QueryBuilder', () => { test('select by 1:1 inversed (uuid pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').where({ test: 123 }); - expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id`, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed with populate (uuid pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').where({ test: 123 }).populate(['test']); - expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id`, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed with populate() before where() (uuid pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').populate(['test']).where({ test: 123 }); - expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id`, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by m:n', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').where({ tags: 123 }); - expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `book2` as `e0` ' + + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id`, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `book2_tags` as `e1` on `e0`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + 'where `e1`.`book_tag2_id` = ?'); expect(qb.getParams()).toEqual([123]); @@ -699,12 +699,12 @@ describe('QueryBuilder', () => { test('select where (not) null via $eq/$ne operators', async () => { const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').where({ publisher: { $ne: null } }); - expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`publisher_id` is not null'); + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `e0`.`publisher_id` is not null'); expect(qb1.getParams()).toEqual([]); const qb2 = orm.em.createQueryBuilder(Book2); qb2.select('*').where({ publisher: { $eq: null } }); - expect(qb2.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`publisher_id` is null'); + expect(qb2.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `e0`.`publisher_id` is null'); expect(qb2.getParams()).toEqual([]); }); @@ -737,15 +737,15 @@ describe('QueryBuilder', () => { await orm.em.transactional(async em => { const qb2 = em.createQueryBuilder(Book2); qb2.select('*').where({ title: 'test 123' }).setLockMode(LockMode.NONE); - expect(qb2.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`title` = ?'); + expect(qb2.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `e0`.`title` = ?'); const qb3 = em.createQueryBuilder(Book2); qb3.select('*').where({ title: 'test 123' }).setLockMode(LockMode.PESSIMISTIC_READ); - expect(qb3.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`title` = ? lock in share mode'); + expect(qb3.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `e0`.`title` = ? lock in share mode'); const qb4 = em.createQueryBuilder(Book2); qb4.select('*').where({ title: 'test 123' }).setLockMode(LockMode.PESSIMISTIC_WRITE); - expect(qb4.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`title` = ? for update'); + expect(qb4.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` where `e0`.`title` = ? for update'); }); }); @@ -753,7 +753,7 @@ describe('QueryBuilder', () => { // m:1 const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').where({ author: { name: 'Jon Snow', termsAccepted: true } }); - expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` where `e1`.`name` = ? and `e1`.`terms_accepted` = ?'); + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` where `e1`.`name` = ? and `e1`.`terms_accepted` = ?'); expect(qb1.getParams()).toEqual(['Jon Snow', true]); // 1:m @@ -783,7 +783,7 @@ describe('QueryBuilder', () => { // m:n owner pivot join const qb5 = orm.em.createQueryBuilder(Book2); qb5.select('*').where({ tags: [1, 2, 3] }); - expect(qb5.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` ' + + expect(qb5.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id`, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `book2_tags` as `e1` on `e0`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + 'where `e1`.`book_tag2_id` in (?, ?, ?)'); @@ -792,7 +792,7 @@ describe('QueryBuilder', () => { // m:n owner const qb6 = orm.em.createQueryBuilder(Book2); qb6.select('*').where({ tags: { name: 'Tag 3' } }); - expect(qb6.getQuery()).toEqual('select `e0`.* ' + + expect(qb6.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `book2_tags` as `e2` on `e0`.`uuid_pk` = `e2`.`book2_uuid_pk` ' + 'left join `book_tag2` as `e1` on `e2`.`book_tag2_id` = `e1`.`id` ' + @@ -821,7 +821,7 @@ describe('QueryBuilder', () => { // 1:1 -> 1:1 self-reference -> 1:1 const qb9 = orm.em.createQueryBuilder(FooBar2); qb9.select('*').where({ fooBar: { baz: { name: 'Foo Baz' } } }); - expect(qb9.getQuery()).toEqual('select `e0`.* from `foo_bar2` as `e0` ' + + expect(qb9.getQuery()).toEqual('select `e0`.*, (select 123) as `random` from `foo_bar2` as `e0` ' + 'left join `foo_bar2` as `e1` on `e0`.`foo_bar_id` = `e1`.`id` ' + 'left join `foo_baz2` as `e2` on `e1`.`baz_id` = `e2`.`id` ' + 'where `e2`.`name` = ?'); @@ -830,7 +830,7 @@ describe('QueryBuilder', () => { // m:1 -> m:1 -> m:1 self-reference const qb10 = orm.em.createQueryBuilder(Book2); qb10.select('*').where({ author: { favouriteBook: { author: { name: 'Jon Snow' } } } }); - expect(qb10.getQuery()).toEqual('select `e0`.* from `book2` as `e0` ' + + expect(qb10.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `book2` as `e2` on `e1`.`favourite_book_uuid_pk` = `e2`.`uuid_pk` ' + 'left join `author2` as `e3` on `e2`.`author_id` = `e3`.`id` ' + @@ -849,7 +849,7 @@ describe('QueryBuilder', () => { test('select with deep where condition with self-reference', async () => { const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').where({ author: { favouriteAuthor: { name: 'Jon Snow', termsAccepted: true } } }); - expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` ' + + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `author2` as `e2` on `e1`.`favourite_author_id` = `e2`.`id` ' + 'where `e2`.`name` = ? and `e2`.`terms_accepted` = ?'); @@ -859,7 +859,7 @@ describe('QueryBuilder', () => { test('select with deep order by', async () => { const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').orderBy({ author: { name: QueryOrder.DESC } }); - expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` order by `e1`.`name` desc'); + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` order by `e1`.`name` desc'); const qb2 = orm.em.createQueryBuilder(Author2); qb2.select('*').orderBy({ books: { title: QueryOrder.ASC } }); @@ -881,7 +881,7 @@ describe('QueryBuilder', () => { const qb5 = orm.em.createQueryBuilder(Book2); qb5.select('*').orderBy({ tags: { name: QueryOrder.DESC } }); - expect(qb5.getQuery()).toEqual('select `e0`.* ' + + expect(qb5.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `book2_tags` as `e2` on `e0`.`uuid_pk` = `e2`.`book2_uuid_pk` ' + 'left join `book_tag2` as `e1` on `e2`.`book_tag2_id` = `e1`.`id` ' + @@ -899,7 +899,7 @@ describe('QueryBuilder', () => { test('select with populate and join of m:n', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').populate(['tags']).leftJoin('tags', 't'); - expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` ' + + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id`, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `book2_tags` as `e1` on `e0`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + 'left join `book_tag2` as `t` on `e1`.`book_tag2_id` = `t`.`id`'); @@ -908,7 +908,7 @@ describe('QueryBuilder', () => { test('select with deep where and deep order by', async () => { const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').where({ author: { name: 'Jon Snow' } }).orderBy({ author: { name: QueryOrder.DESC } }); - expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` where `e1`.`name` = ? order by `e1`.`name` desc'); + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` where `e1`.`name` = ? order by `e1`.`name` desc'); expect(qb1.getParams()).toEqual(['Jon Snow']); const qb2 = orm.em.createQueryBuilder(Author2); @@ -935,7 +935,7 @@ describe('QueryBuilder', () => { const qb5 = orm.em.createQueryBuilder(Book2); qb5.select('*').where({ tags: { name: 'Tag 3' } }).orderBy({ tags: { name: QueryOrder.DESC } }); - expect(qb5.getQuery()).toEqual('select `e0`.* ' + + expect(qb5.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `book2_tags` as `e2` on `e0`.`uuid_pk` = `e2`.`book2_uuid_pk` ' + 'left join `book_tag2` as `e1` on `e2`.`book_tag2_id` = `e1`.`id` ' + @@ -946,14 +946,14 @@ describe('QueryBuilder', () => { test('select with deep where condition with operators', async () => { const qb0 = orm.em.createQueryBuilder(Book2); qb0.select('*').where({ author: { $or: [{ name: 'Jon Snow 1' }, { email: /^snow@/ }] } }); - expect(qb0.getQuery()).toEqual('select `e0`.* from `book2` as `e0` ' + + expect(qb0.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'where (`e1`.`name` = ? or `e1`.`email` like ?)'); expect(qb0.getParams()).toEqual(['Jon Snow 1', 'snow@%']); const qb1 = orm.em.createQueryBuilder(Book2); qb1.select('*').where({ $or: [{ author: { name: 'Jon Snow 1', termsAccepted: true } }, { author: { name: 'Jon Snow 2' } }] }); - expect(qb1.getQuery()).toEqual('select `e0`.* ' + + expect(qb1.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'where ((`e1`.`name` = ? and `e1`.`terms_accepted` = ?) or `e1`.`name` = ?)'); @@ -961,7 +961,7 @@ describe('QueryBuilder', () => { const qb2 = orm.em.createQueryBuilder(Book2); qb2.select('*').where({ $or: [{ author: { $or: [{ name: 'Jon Snow 1' }, { email: /^snow@/ }] } }, { publisher: { name: 'My Publisher' } }] }); - expect(qb2.getQuery()).toEqual('select `e0`.* from `book2` as `e0` ' + + expect(qb2.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `publisher2` as `e2` on `e0`.`publisher_id` = `e2`.`id` ' + 'where ((`e1`.`name` = ? or `e1`.`email` like ?) or `e2`.`name` = ?)'); @@ -969,7 +969,7 @@ describe('QueryBuilder', () => { const qb3 = orm.em.createQueryBuilder(Book2); qb3.select('*').where({ $or: [{ author: { $or: [{ name: { $in: ['Jon Snow 1', 'Jon Snow 2'] } }, { email: /^snow@/ }] } }, { publisher: { name: 'My Publisher' } }] }); - expect(qb3.getQuery()).toEqual('select `e0`.* from `book2` as `e0` ' + + expect(qb3.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `publisher2` as `e2` on `e0`.`publisher_id` = `e2`.`id` ' + 'where ((`e1`.`name` in (?, ?) or `e1`.`email` like ?) or `e2`.`name` = ?)'); @@ -977,7 +977,7 @@ describe('QueryBuilder', () => { const qb4 = orm.em.createQueryBuilder(Book2); qb4.select('*').where({ $or: [{ author: { $or: [{ $not: { name: 'Jon Snow 1' } }, { email: /^snow@/ }] } }, { publisher: { name: 'My Publisher' } }] }); - expect(qb4.getQuery()).toEqual('select `e0`.* from `book2` as `e0` ' + + expect(qb4.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` from `book2` as `e0` ' + 'left join `author2` as `e1` on `e0`.`author_id` = `e1`.`id` ' + 'left join `publisher2` as `e2` on `e0`.`publisher_id` = `e2`.`id` ' + 'where ((not (`e1`.`name` = ?) or `e1`.`email` like ?) or `e2`.`name` = ?)'); @@ -1001,7 +1001,7 @@ describe('QueryBuilder', () => { const qb7 = orm.em.createQueryBuilder(Book2); qb7.select('*').where({ tags: { name: { $in: ['Tag 1', 'Tag 2'] } } }); - expect(qb7.getQuery()).toEqual('select `e0`.* ' + + expect(qb7.getQuery()).toEqual('select `e0`.*, `e0`.price * 1.19 as `price_taxed` ' + 'from `book2` as `e0` ' + 'left join `book2_tags` as `e2` on `e0`.`uuid_pk` = `e2`.`book2_uuid_pk` ' + 'left join `book_tag2` as `e1` on `e2`.`book_tag2_id` = `e1`.`id` ' + @@ -1227,7 +1227,7 @@ describe('QueryBuilder', () => { }).orderBy({ author: { email: 'ASC' }, }); - expect(qb1.getQuery()).toEqual('select `a`.* from `book2` as `a` ' + + expect(qb1.getQuery()).toEqual('select `a`.*, `a`.price * 1.19 as `price_taxed` from `book2` as `a` ' + 'left join `author2` as `e1` on `a`.`author_id` = `e1`.`id` ' + 'left join `publisher2` as `e2` on `a`.`publisher_id` = `e2`.`id` ' + 'where (`e1`.`name` = ? or `e2`.`name` = ?) ' + diff --git a/Book2.ts b/Book2.ts index 9b58128..f4106c5 100644 --- a/Book2.ts +++ b/Book2.ts @@ -1,5 +1,5 @@ import { v4 } from 'uuid'; -import { Cascade, Collection, Entity, IdentifiedReference, ManyToMany, ManyToOne, OneToOne, PrimaryKey, Property, QueryOrder } from '@mikro-orm/core'; +import { Cascade, Collection, Entity, Formula, IdentifiedReference, ManyToMany, ManyToOne, OneToOne, PrimaryKey, Property, QueryOrder } from '@mikro-orm/core'; import { Publisher2 } from './Publisher2'; import { Author2 } from './Author2'; import { BookTag2 } from './BookTag2'; @@ -23,6 +23,9 @@ export class Book2 { @Property({ type: 'float', nullable: true }) price?: number; + @Formula(alias => `${alias}.price * 1.19`) + priceTaxed?: number; + @Property({ type: 'double', nullable: true }) double?: number; diff --git a/FooBar2.ts b/FooBar2.ts index 966e4e4..902f3df 100644 --- a/FooBar2.ts +++ b/FooBar2.ts @@ -1,4 +1,4 @@ -import { Entity, OneToOne, PrimaryKey, Property } from '@mikro-orm/core'; +import { Entity, Formula, OneToOne, PrimaryKey, Property } from '@mikro-orm/core'; import { BaseEntity22 } from './BaseEntity22'; import { FooBaz2 } from './FooBaz2'; @@ -20,6 +20,9 @@ export class FooBar2 extends BaseEntity22 { @Property({ version: true, length: 0 }) version!: Date; + @Formula(`(select 123)`) + random?: number; + static create(name: string) { const bar = new FooBar2(); bar.name = name;
|
|
feat(clickhouse): properly support native boolean types
|
31cc7ba3a3e542cc626804c4c3eb6d67fa357ba8
|
feat
|
https://github.com/rohankumardubey/ibis/commit/31cc7ba3a3e542cc626804c4c3eb6d67fa357ba8
|
properly support native boolean types
|
diff --git a/__init__.py b/__init__.py index 2898630..a018412 100644 --- a/__init__.py +++ b/__init__.py @@ -81,9 +81,12 @@ class Backend(BaseBackend): ---------- temp_db : str Database to use for temporary objects. + bool_type : str + Type to use for boolean columns """ temp_db: str = "__ibis_tmp" + bool_type: str = "Boolean" def __init__(self, *args, external_tables=None, **kwargs): super().__init__(*args, **kwargs) diff --git a/datatypes.py b/datatypes.py index 794b96e..c453171 100644 --- a/datatypes.py +++ b/datatypes.py @@ -4,6 +4,7 @@ import functools import parsy +import ibis import ibis.expr.datatypes as dt from ibis.common.parsing import ( COMMA, @@ -19,6 +20,10 @@ from ibis.common.parsing import ( ) +def _bool_type(): + return getattr(getattr(ibis.options, "clickhouse", None), "bool_type", "Boolean") + + def parse(text: str) -> dt.DataType: @parsy.generate def datetime(): @@ -35,8 +40,9 @@ def parse(text: str) -> dt.DataType: | spaceless_string("smallint", "int16", "int2").result(dt.Int16(nullable=False)) | spaceless_string("date32", "date").result(dt.Date(nullable=False)) | spaceless_string("time").result(dt.Time(nullable=False)) - | spaceless_string("tinyint", "int8", "int1", "boolean", "bool").result( - dt.Int8(nullable=False) + | spaceless_string("tinyint", "int8", "int1").result(dt.Int8(nullable=False)) + | spaceless_string("boolean", "bool").result( + getattr(dt, _bool_type())(nullable=False) ) | spaceless_string("integer", "int32", "int4", "int").result( dt.Int32(nullable=False) @@ -223,6 +229,11 @@ def _(ty: dt.DataType) -> str: return type(ty).__name__.capitalize() +@serialize_raw.register(dt.Boolean) +def _(_: dt.Boolean) -> str: + return _bool_type() + + @serialize_raw.register(dt.Array) def _(ty: dt.Array) -> str: return f"Array({serialize(ty.value_type)})" diff --git a/conftest.py b/conftest.py index 3d0e293..8ba2749 100644 --- a/conftest.py +++ b/conftest.py @@ -26,7 +26,7 @@ class TestConf(BackendTest, RoundHalfToEven): returned_timestamp_unit = 's' supports_arrays = False supports_arrays_outside_of_select = supports_arrays - bool_is_int = True + native_bool = False supports_structs = False def __init__(self, data_directory: Path) -> None: diff --git a/test_operators.py b/test_operators.py index 08e176f..322e3b6 100644 --- a/test_operators.py +++ b/test_operators.py @@ -5,6 +5,7 @@ import numpy as np import pandas as pd import pandas.testing as tm import pytest +from pytest import param import ibis import ibis.expr.datatypes as dt @@ -156,11 +157,17 @@ def test_field_in_literals(con, alltypes, translate, container): assert len(con.execute(expr)) [email protected]('column', ['int_col', 'float_col', 'bool_col']) -def test_negate(con, alltypes, translate, column): - # clickhouse represent boolean as UInt8 [email protected]( + ("column", "operator"), + [ + param("int_col", "-", id="int_col"), + param("float_col", "-", id="float_col"), + param("bool_col", "NOT ", id="bool_col"), + ], +) +def test_negate(con, alltypes, translate, column, operator): expr = -alltypes[column] - assert translate(expr.op()) == f'-{column}' + assert translate(expr.op()) == f"{operator}{column}" assert len(con.execute(expr)) diff --git a/base.py b/base.py index 9f6f962..17c327b 100644 --- a/base.py +++ b/base.py @@ -72,7 +72,7 @@ class BackendTest(abc.ABC): returned_timestamp_unit = 'us' supported_to_timestamp_units = {'s', 'ms', 'us'} supports_floating_modulus = True - bool_is_int = False + native_bool = True supports_structs = True supports_json = True reduction_tolerance = 1e-7 @@ -163,7 +163,7 @@ class BackendTest(abc.ABC): @property def functional_alltypes(self) -> ir.Table: t = self.connection.table('functional_alltypes') - if self.bool_is_int: + if not self.native_bool: return t.mutate(bool_col=t.bool_col == 1) return t
|
|
build: update version (nightly.3)
|
ca140680db879111a623642437a12d162e1c628a
|
build
|
https://github.com/erg-lang/erg/commit/ca140680db879111a623642437a12d162e1c628a
|
update version (nightly.3)
|
diff --git a/Cargo.lock b/Cargo.lock index 66ed0c7..73fd754 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -57,12 +57,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "cc" -version = "1.0.83" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" -dependencies = [ - "libc", -] +checksum = "a0ba8f7aaa012f30d5b2861462f6708eccd49c3c39863fe083a308035f63d723" [[package]] name = "cfg-if" @@ -97,7 +94,7 @@ dependencies = [ [[package]] name = "els" -version = "0.1.44-nightly.2" +version = "0.1.44-nightly.3" dependencies = [ "erg_common", "erg_compiler", @@ -110,7 +107,7 @@ dependencies = [ [[package]] name = "erg" -version = "0.6.32-nightly.2" +version = "0.6.32-nightly.3" dependencies = [ "els", "erg_common", @@ -120,7 +117,7 @@ dependencies = [ [[package]] name = "erg_common" -version = "0.6.32-nightly.2" +version = "0.6.32-nightly.3" dependencies = [ "backtrace-on-stack-overflow", "crossterm", @@ -132,7 +129,7 @@ dependencies = [ [[package]] name = "erg_compiler" -version = "0.6.32-nightly.2" +version = "0.6.32-nightly.3" dependencies = [ "erg_common", "erg_parser", @@ -141,7 +138,7 @@ dependencies = [ [[package]] name = "erg_parser" -version = "0.6.32-nightly.2" +version = "0.6.32-nightly.3" dependencies = [ "erg_common", "erg_proc_macros", @@ -151,7 +148,7 @@ dependencies = [ [[package]] name = "erg_proc_macros" -version = "0.6.32-nightly.2" +version = "0.6.32-nightly.3" dependencies = [ "quote", "syn 1.0.109", @@ -218,9 +215,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "lsp-types" @@ -270,9 +267,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", @@ -348,6 +345,12 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "proc-macro2" version = "1.0.78" @@ -359,15 +362,16 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0" +checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" dependencies = [ "cfg-if", "indoc", "libc", "memoffset 0.9.0", "parking_lot", + "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", @@ -376,9 +380,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be" +checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" dependencies = [ "once_cell", "target-lexicon", @@ -386,9 +390,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1" +checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" dependencies = [ "libc", "pyo3-build-config", @@ -396,26 +400,27 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3" +checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.49", + "syn 2.0.52", ] [[package]] name = "pyo3-macros-backend" -version = "0.20.2" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f" +checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" dependencies = [ "heck", "proc-macro2", + "pyo3-build-config", "quote", - "syn 2.0.49", + "syn 2.0.52", ] [[package]] @@ -444,9 +449,9 @@ checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] name = "scopeguard" @@ -456,29 +461,29 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.52", ] [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -493,7 +498,7 @@ checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.49", + "syn 2.0.52", ] [[package]] @@ -545,9 +550,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.49" +version = "2.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" dependencies = [ "proc-macro2", "quote", @@ -556,15 +561,15 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.13" +version = "0.12.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69758bda2e78f098e4ccb393021a0963bb3442eac05f135c30f61b7370bbafae" +checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", @@ -599,9 +604,9 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] diff --git a/Cargo.toml b/Cargo.toml index 437b9e9..c40b6b5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,7 @@ name = "els" description = "An Erg compiler frontend for IDEs, implements LSP." documentation = "http://docs.rs/els" -version = "0.1.44-nightly.2" +version = "0.1.44-nightly.3" authors.workspace = true license.workspace = true edition.workspace = true
|
|
feat(druid): add re_search support
|
946202b8058ee8df83a8cb4ac326d4c9c94ff1a5
|
feat
|
https://github.com/ibis-project/ibis/commit/946202b8058ee8df83a8cb4ac326d4c9c94ff1a5
|
add re_search support
|
diff --git a/registry.py b/registry.py index bd7efa6..fdbd1e3 100644 --- a/registry.py +++ b/registry.py @@ -43,6 +43,7 @@ operation_registry.update( ops.Log10: fixed_arity(sa.func.log10, 1), ops.Sign: _sign, ops.StringJoin: _join, + ops.RegexSearch: fixed_arity(sa.func.regexp_like, 2), } ) diff --git a/test_string.py b/test_string.py index ca1684f..aaf9a7e 100644 --- a/test_string.py +++ b/test_string.py @@ -171,8 +171,7 @@ def test_string_col_is_unicode(alltypes, df): id='re_search', marks=[ pytest.mark.notimpl( - ["datafusion", "mssql", "druid"], - raises=com.OperationNotDefinedError, + ["datafusion", "mssql"], raises=com.OperationNotDefinedError ), pytest.mark.notimpl(["impala"], raises=AssertionError), ], @@ -183,10 +182,15 @@ def test_string_col_is_unicode(alltypes, df): id='re_search_posix', marks=[ pytest.mark.notimpl( - ["datafusion", "mssql", "druid"], + ["datafusion", "mssql"], raises=com.OperationNotDefinedError, ), pytest.mark.broken(["pyspark"], raises=AssertionError), + pytest.mark.never( + ["druid"], + reason="No posix support; regex is interpreted literally", + raises=AssertionError, + ), ], ), param(
|
|
chore: improve templates and upgrading guide
|
e0f6e628596dcca3da14e31a68d04b7b11316fb2
|
chore
|
https://github.com/Hardeepex/crawlee/commit/e0f6e628596dcca3da14e31a68d04b7b11316fb2
|
improve templates and upgrading guide
|
diff --git a/upgrading_v3.md b/upgrading_v3.md index ebb5913..d66fc57 100644 --- a/upgrading_v3.md +++ b/upgrading_v3.md @@ -20,18 +20,18 @@ Up until version 3 of `apify`, the package contained both scraping related tools The [`crawlee`](https://www.npmjs.com/package/crawlee) package consists of several smaller packages, released separately under `@crawlee` namespace: -- `@crawlee/core`: the base for all the crawler implementations, also contains things like `Request`, `RequestQueue`, `RequestList` or `Dataset` classes -- `@crawlee/basic`: exports `BasicCrawler` -- `@crawlee/http`: exports `HttpCrawler` (which is used for creating `@crawlee/jsdom` and `@crawlee/cheerio`) -- `@crawlee/jsdom`: exports `JSDOMCrawler` -- `@crawlee/cheerio`: exports `CheerioCrawler` -- `@crawlee/browser`: exports `BrowserCrawler` (which is used for creating `@crawlee/playwright` and `@crawlee/puppeteer`) -- `@crawlee/playwright`: exports `PlaywrightCrawler` -- `@crawlee/puppeteer`: exports `PuppeteerCrawler` -- `@crawlee/memory-storage`: `@apify/storage-local` alternative -- `@crawlee/browser-pool`: previously `browser-pool` package -- `@crawlee/utils`: utility methods -- `@crawlee/types`: holds TS interfaces mainly about the `StorageClient` +- [`@crawlee/core`](https://crawlee.dev/api/core): the base for all the crawler implementations, also contains things like `Request`, `RequestQueue`, `RequestList` or `Dataset` classes +- [`@crawlee/cheerio`](https://crawlee.dev/api/cheerio-crawler): exports `CheerioCrawler` +- [`@crawlee/playwright`](https://crawlee.dev/api/playwright-crawler): exports `PlaywrightCrawler` +- [`@crawlee/puppeteer`](https://crawlee.dev/api/puppeteer-crawler): exports `PuppeteerCrawler` +- [`@crawlee/jsdom`](https://crawlee.dev/api/jsdom-crawler): exports `JSDOMCrawler` +- [`@crawlee/basic`](https://crawlee.dev/api/basic-crawler): exports `BasicCrawler` +- [`@crawlee/http`](https://crawlee.dev/api/http-crawler): exports `HttpCrawler` (which is used for creating [`@crawlee/jsdom`](https://crawlee.dev/api/jsdom-crawler) and [`@crawlee/cheerio`](https://crawlee.dev/api/cheerio-crawler)) +- [`@crawlee/browser`](https://crawlee.dev/api/browser-crawler): exports `BrowserCrawler` (which is used for creating [`@crawlee/playwright`](https://crawlee.dev/api/playwright-crawler) and [`@crawlee/puppeteer`](https://crawlee.dev/api/puppeteer-crawler)) +- [`@crawlee/memory-storage`](https://crawlee.dev/api/memory-storage): [`@apify/storage-local`](https://npmjs.com/package/@apify/storage-local) alternative +- [`@crawlee/browser-pool`](https://crawlee.dev/api/browser-pool): previously [`browser-pool`](https://npmjs.com/package/browser-pool) package +- [`@crawlee/utils`](https://crawlee.dev/api/utils): utility methods +- [`@crawlee/types`](https://crawlee.dev/api/types): holds TS interfaces mainly about the [`StorageClient`](https://crawlee.dev/api/core/interface/StorageClient) ### Installing Crawlee diff --git a/manifest.json b/manifest.json index ad89cef..dc0efdc 100644 --- a/manifest.json +++ b/manifest.json @@ -30,7 +30,7 @@ }, { "name": "puppeteer-js", - "description": "PlaywrightCrawler template project [JavaScript]" + "description": "PuppeteerCrawler template project [JavaScript]" } ] } diff --git a/routes.js b/routes.js index ccd702e..121f8ce 100644 --- a/routes.js +++ b/routes.js @@ -5,7 +5,7 @@ export const router = createPuppeteerRouter(); router.addDefaultHandler(async ({ enqueueLinks, log }) => { log.info(`enqueueing new URLs`); await enqueueLinks({ - globs: ['https://crawlee.dev/*'], + globs: ['https://crawlee.dev/**'], label: 'detail', }); }); diff --git a/routes.ts b/routes.ts index 4f12109..6ae4c62 100644 --- a/routes.ts +++ b/routes.ts @@ -5,7 +5,7 @@ export const router = createPuppeteerRouter(); router.addDefaultHandler(async ({ enqueueLinks, log }) => { log.info(`enqueueing new URLs`); await enqueueLinks({ - globs: ['https://crawlee.dev/*'], + globs: ['https://crawlee.dev/**'], label: 'detail', }); });
|
|
feat: fixed fountain presets
|
3b478673153181396446f510d7ca5ad09abfcd4f
|
feat
|
https://github.com/tsparticles/tsparticles/commit/3b478673153181396446f510d7ca5ad09abfcd4f
|
fixed fountain presets
|
diff --git a/fountain.pug b/fountain.pug index a139c51..fc0bf2e 100644 --- a/fountain.pug +++ b/fountain.pug @@ -20,6 +20,7 @@ html(lang="en") link(rel="stylesheet" href="/stylesheets/presets.css") body + #stats #tsparticles script(src="/preset-fountain/tsparticles.preset.fountain.bundle.min.js") diff --git a/links.pug b/links.pug index 69a0d96..c774a6e 100644 --- a/links.pug +++ b/links.pug @@ -20,6 +20,7 @@ html(lang="en") link(rel="stylesheet" href="/stylesheets/presets.css") body + #stats #tsparticles script(src="/preset-links/tsparticles.preset.links.bundle.min.js") diff --git a/seaAnemone.pug b/seaAnemone.pug index e1a4a2e..6e5ddbf 100644 --- a/seaAnemone.pug +++ b/seaAnemone.pug @@ -20,6 +20,7 @@ html(lang="en") link(rel="stylesheet" href="/stylesheets/presets.css") body + #stats #tsparticles script(src="/preset-sea-anemone/tsparticles.preset.seaAnemone.bundle.min.js") diff --git a/snow.pug b/snow.pug index b73d3e3..f90982c 100644 --- a/snow.pug +++ b/snow.pug @@ -20,6 +20,7 @@ html(lang="en") link(rel="stylesheet" href="/stylesheets/presets.css") body + #stats #tsparticles script(src="/preset-snow/tsparticles.preset.snow.bundle.min.js") diff --git a/stars.pug b/stars.pug index 2a793fb..d8772d2 100644 --- a/stars.pug +++ b/stars.pug @@ -20,6 +20,7 @@ html(lang="en") link(rel="stylesheet" href="/stylesheets/presets.css") body + #stats #tsparticles script(src="/preset-stars/tsparticles.preset.stars.bundle.min.js") diff --git a/triangles.pug b/triangles.pug index ce050de..e68f173 100644 --- a/triangles.pug +++ b/triangles.pug @@ -20,6 +20,7 @@ html(lang="en") link(rel="stylesheet" href="/stylesheets/presets.css") body + #stats #tsparticles script(src="/preset-triangles/tsparticles.preset.triangles.bundle.min.js") diff --git a/package.dist.json b/package.dist.json index fea4064..20afb69 100644 --- a/package.dist.json +++ b/package.dist.json @@ -45,6 +45,12 @@ "module": "index.js", "types": "index.d.ts", "dependencies": { - "tsparticles-engine": "^1.35.2" + "tsparticles-engine": "^1.35.2", + "tsparticles-plugin-emitters": "^1.35.2", + "tsparticles-shape-circle": "^1.20.2", + "tsparticles-updater-color": "^1.35.2", + "tsparticles-updater-opacity": "^1.35.2", + "tsparticles-updater-out-modes": "^1.35.2", + "tsparticles-updater-size": "^1.35.2" } } \\ No newline at end of file diff --git a/package.json b/package.json index bd4d51d..c9ff693 100644 --- a/package.json +++ b/package.json @@ -71,6 +71,12 @@ "webpack-tsparticles-plugin": "^1.0.0" }, "dependencies": { - "tsparticles-engine": "^1.35.2" + "tsparticles-engine": "^1.35.2", + "tsparticles-plugin-emitters": "^1.35.2", + "tsparticles-shape-circle": "^1.20.2", + "tsparticles-updater-color": "^1.35.2", + "tsparticles-updater-opacity": "^1.35.2", + "tsparticles-updater-out-modes": "^1.35.2", + "tsparticles-updater-size": "^1.35.2" } } diff --git a/index.ts b/index.ts index 53980bb..c4f8a0d 100644 --- a/index.ts +++ b/index.ts @@ -1,6 +1,19 @@ import type { Main } from "tsparticles-engine"; import { options } from "./options"; +import { loadCircleShape } from "tsparticles-shape-circle"; +import { loadOpacityUpdater } from "tsparticles-updater-opacity"; +import { loadColorUpdater } from "tsparticles-updater-color"; +import { loadSizeUpdater } from "tsparticles-updater-size"; +import { loadOutModesUpdater } from "tsparticles-updater-out-modes"; +import { loadEmittersPlugin } from "tsparticles-plugin-emitters"; export function loadFountainPreset(tsParticles: Main): void { + loadCircleShape(tsParticles); + loadEmittersPlugin(tsParticles); + loadColorUpdater(tsParticles); + loadOpacityUpdater(tsParticles); + loadOutModesUpdater(tsParticles); + loadSizeUpdater(tsParticles); + tsParticles.addPreset("fountain", options); } diff --git a/options.ts b/options.ts index 73f6ac9..9b7cdc4 100644 --- a/options.ts +++ b/options.ts @@ -42,9 +42,6 @@ export const options: ISourceOptions = { }, }, }, - shape: { - type: "circle", - }, opacity: { value: 0.5, },
|
|
feat: Add `Repository::has_object()` as a high-level alternative.
Previously, one would have to call `repo.objects.contains()`, which
is fine, but this method is necessary for symmetry of the API
and one shouldn't have to drop down a level to do this.
This method also knows empty trees as special case.
|
787a9aa91c1abaa7572f5d19f8a2acbb7ecc0732
|
feat
|
https://github.com/Byron/gitoxide/commit/787a9aa91c1abaa7572f5d19f8a2acbb7ecc0732
|
Add `Repository::has_object()` as a high-level alternative.
Previously, one would have to call `repo.objects.contains()`, which
is fine, but this method is necessary for symmetry of the API
and one shouldn't have to drop down a level to do this.
This method also knows empty trees as special case.
|
diff --git a/object.rs b/object.rs index bdb2ae4..0419eef 100644 --- a/object.rs +++ b/object.rs @@ -68,6 +68,7 @@ fn writes_avoid_io_using_duplicate_check() -> crate::Result { for id in repo.objects.iter()? { let id = id?; + assert!(repo.has_object(id)); let obj = repo.find_object(id)?; let header = repo.find_header(id)?; assert_eq!(obj.kind, header.kind(), "header and object agree"); @@ -156,6 +157,7 @@ mod find { let repo = basic_repo()?; let empty_tree = gix::hash::ObjectId::empty_tree(repo.object_hash()); assert_eq!(repo.find_object(empty_tree)?.into_tree().iter().count(), 0); + assert!(repo.has_object(empty_tree)); assert_eq!( repo.find_header(empty_tree)?, gix_odb::find::Header::Loose {
|
|
test(bigquery): update generated code from sqlglot update
|
db449567711c20e3b4bfc68d5186892bd570f71a
|
test
|
https://github.com/rohankumardubey/ibis/commit/db449567711c20e3b4bfc68d5186892bd570f71a
|
update generated code from sqlglot update
|
diff --git a/out.sql b/out.sql index 3c8d08a..5953ae2 100644 --- a/out.sql +++ b/out.sql @@ -14,4 +14,4 @@ SELECT t0.`month` FROM `ibis-gbq.ibis_gbq_testing`.functional_alltypes AS t0 WHERE - REGEXP_CONTAINS(t0.`string_col`, '0') \\ No newline at end of file + REGEXP_CONTAINS(t0.`string_col`, r'0') \\ No newline at end of file
|
|
docs(product): add API quotas to Limits page (#6979)
|
f804cc707ce4cff42ee6687a4ffd9d64eb9e7aac
|
docs
|
https://github.com/wzhiqing/cube/commit/f804cc707ce4cff42ee6687a4ffd9d64eb9e7aac
|
add API quotas to Limits page (#6979)
|
diff --git a/limits.mdx b/limits.mdx index e77f56e..423e4d1 100644 --- a/limits.mdx +++ b/limits.mdx @@ -67,6 +67,18 @@ for all deployments within an account. When a threshold is hit, query processing will be stopped. Please [contact support][cube-contact-us] for further assistance. +## Quotas + +The [REST][ref-rest-api] and [GraphQL][ref-gql-api] APIs both have a standard +quota of 100 requests per second per deployment; this can go up to 1000 requests +per second for short bursts of traffic. These limits can be raised on request, +[contact support][cube-contact-us] for more details. + +When the quota is exceeded, the API will return a `429 Too Many Requests` +response. + +[ref-rest-api]: /product/apis-integrations/rest-api +[ref-gql-api]: /product/apis-integrations/graphql-api [ref-deployment-types]: /product/deployment/cloud/deployment-types [ref-pricing]: /product/deployment/cloud/pricing [ref-query-history]: /product/workspace/query-history
|
|
build: restoring nx cloud
|
430efd45795c37f11338943864b20a967471af6f
|
build
|
https://github.com/tsparticles/tsparticles/commit/430efd45795c37f11338943864b20a967471af6f
|
restoring nx cloud
|
diff --git a/nodejs.yml b/nodejs.yml index a906f20..ae80053 100644 --- a/nodejs.yml +++ b/nodejs.yml @@ -135,7 +135,7 @@ jobs: - run: pnpm run --filter @tsparticles/build build:ci - run: pnpm install --no-frozen-lockfile - run: npx nx-cloud start-ci-run - - run: npx lerna run build:ci #--concurrency 3 + - run: npx lerna run build:ci --concurrency 3 - run: pnpm run prettify:ci:readme - run: npx nx-cloud stop-all-agents - run: echo ${{ github.repository_owner }}
|
|
fix(docs): surround executable code blocks with interactive mode on/off
|
4c660e0aec8a6b369ebd79ae6bc86c246e177c0d
|
fix
|
https://github.com/ibis-project/ibis/commit/4c660e0aec8a6b369ebd79ae6bc86c246e177c0d
|
surround executable code blocks with interactive mode on/off
|
diff --git a/.gitignore b/.gitignore index b34e81c..f36b8fc 100644 --- a/.gitignore +++ b/.gitignore @@ -134,3 +134,4 @@ ibis/examples/descriptions # automatically generated odbc file for ci ci/odbc/odbc.ini +*-citibike-tripdata.tar.xz diff --git a/_renderer.py b/_renderer.py index fb4804b..7dcb4f9 100644 --- a/_renderer.py +++ b/_renderer.py @@ -28,8 +28,6 @@ class Renderer(qd.MdRenderer): lambda line: quartodoc_skip_doctest in line or skip_doctest in line ) - has_executed_chunks = False - for chunk in toolz.partitionby(chunker, lines): first, *rest = chunk @@ -39,11 +37,22 @@ class Renderer(qd.MdRenderer): # check whether to skip execution and if so, render the code # block as `python` (not `{python}`) if it's marked with # skip_doctest, expect_failure or quartodoc_skip_doctest - if any(map(should_skip, chunk)): + if skipped := any(map(should_skip, chunk)): start = end = "" else: - has_executed_chunks = True start, end = "{}" + result.append( + dedent( + """ + ```{python} + #| echo: false + + import ibis + ibis.options.interactive = True + ``` + """ + ) + ) result.append(f"```{start}python{end}") @@ -67,22 +76,16 @@ class Renderer(qd.MdRenderer): result.extend(rest) result.append("```\\n") - examples = "\\n".join(result) - - if has_executed_chunks: - # turn off interactive mode before rendering - return ( - dedent( - """ - ```{python} - #| echo: false - - import ibis - ibis.options.interactive = False - ``` - """ - ) - + examples - ) - else: - return examples + if not skipped: + result.append( + dedent( + """ + ```{python} + #| echo: false + ibis.options.interactive = False + ``` + """ + ) + ) + + return "\\n".join(result) diff --git a/api.py b/api.py index 7454133..ded087c 100644 --- a/api.py +++ b/api.py @@ -326,8 +326,7 @@ def table( Create a table with no data backing it >>> import ibis - >>> ibis.options.interactive - False + >>> ibis.options.interactive = False >>> t = ibis.table(schema=dict(a="int", b="string"), name="t") >>> t UnboundTable: t diff --git a/selectors.py b/selectors.py index a84da70..c9298af 100644 --- a/selectors.py +++ b/selectors.py @@ -180,11 +180,8 @@ def numeric() -> Predicate: >>> import ibis >>> import ibis.selectors as s >>> t = ibis.table(dict(a="int", b="string", c="array<string>"), name="t") - >>> t - UnboundTable: t - a int64 - b string - c array<string> + >>> t.columns + ['a', 'b', 'c'] >>> expr = t.select(s.numeric()) # `a` has integer type, so it's numeric >>> expr.columns ['a']
|
|
fix(sqlalchemy): use indexed group by key references everywhere possible
|
9f1ddd8328a9c10155ff934b0fa157d58b63e4fe
|
fix
|
https://github.com/ibis-project/ibis/commit/9f1ddd8328a9c10155ff934b0fa157d58b63e4fe
|
use indexed group by key references everywhere possible
|
diff --git a/query_builder.py b/query_builder.py index 28ec0a9..081c527 100644 --- a/query_builder.py +++ b/query_builder.py @@ -251,13 +251,18 @@ class AlchemySelect(Select): def _add_group_by(self, fragment): # GROUP BY and HAVING - if not len(self.group_by): + nkeys = len(self.group_by) + if not nkeys: return fragment - group_keys = [self._translate(arg) for arg in self.group_by] + if self.context.compiler.supports_indexed_grouping_keys: + group_keys = map(sa.literal_column, map(str, range(1, nkeys + 1))) + else: + group_keys = map(self._translate, self.group_by) + fragment = fragment.group_by(*group_keys) - if len(self.having) > 0: + if self.having: having_args = [self._translate(arg) for arg in self.having] having_clause = functools.reduce(sql.and_, having_args) fragment = fragment.having(having_clause) @@ -265,7 +270,7 @@ class AlchemySelect(Select): return fragment def _add_where(self, fragment): - if not len(self.where): + if not self.where: return fragment args = [self._translate(pred, permit_subquery=True) for pred in self.where] @@ -273,7 +278,7 @@ class AlchemySelect(Select): return fragment.where(clause) def _add_order_by(self, fragment): - if not len(self.order_by): + if not self.order_by: return fragment clauses = [] @@ -358,6 +363,8 @@ class AlchemyCompiler(Compiler): intersect_class = AlchemyIntersection difference_class = AlchemyDifference + supports_indexed_grouping_keys = True + @classmethod def to_sql(cls, expr, context=None, params=None, exists=False): if context is None: diff --git a/relations.py b/relations.py index 043dc2d..e2a4b38 100644 --- a/relations.py +++ b/relations.py @@ -78,7 +78,7 @@ def _aggregation(op: ops.Aggregation, *, table, **kw): sel = sg.select(*selections).from_(table) if by: - sel = sel.group_by(*by, dialect="clickhouse") + sel = sel.group_by(*map(str, range(1, len(by) + 1)), dialect="clickhouse") if predicates := op.predicates: sel = sel.where(*map(tr_val, predicates), dialect="clickhouse") diff --git a/out.sql b/out.sql index 7f247f7..8a50823 100644 --- a/out.sql +++ b/out.sql @@ -1,3 +1,3 @@ SELECT t0.foo_id, t0.total, t1.value1 FROM (SELECT t2.foo_id AS foo_id, sum(t2.f) AS total -FROM star1 AS t2 GROUP BY t2.foo_id) AS t0 JOIN star2 AS t1 ON t0.foo_id = t1.foo_id \\ No newline at end of file +FROM star1 AS t2 GROUP BY 1) AS t0 JOIN star2 AS t1 ON t0.foo_id = t1.foo_id \\ No newline at end of file diff --git a/out1.sql b/out1.sql index 067ecb3..909d3ac 100644 --- a/out1.sql +++ b/out1.sql @@ -9,4 +9,4 @@ FROM t0 AS t0 WHERE t0.value = 42 GROUP BY - t0.key \\ No newline at end of file + 1 \\ No newline at end of file diff --git a/out2.sql b/out2.sql index a785587..77ecfa5 100644 --- a/out2.sql +++ b/out2.sql @@ -9,4 +9,4 @@ FROM t0 AS t0 WHERE t0.value = 42 GROUP BY - t0.key \\ No newline at end of file + 1 \\ No newline at end of file diff --git a/compiler.py b/compiler.py index fe0f85e..17379e5 100644 --- a/compiler.py +++ b/compiler.py @@ -28,3 +28,5 @@ rewrites = MsSqlExprTranslator.rewrites
class MsSqlCompiler(AlchemyCompiler):
translator_class = MsSqlExprTranslator
+
+ supports_indexed_grouping_keys = False diff --git a/test_functions.py b/test_functions.py index 0a3b322..8f037f5 100644 --- a/test_functions.py +++ b/test_functions.py @@ -526,11 +526,8 @@ def test_category_label(alltypes, df): tm.assert_series_equal(result, expected) [email protected]( - ('distinct', 'union'), - [(True, 'UNION'), (False, 'UNION ALL')], -) -def test_union_cte(alltypes, distinct, union): [email protected]("distinct", [True, False]) +def test_union_cte(alltypes, distinct, snapshot): t = alltypes expr1 = t.group_by(t.string_col).aggregate(metric=t.double_col.sum()) expr2 = expr1.view() @@ -542,22 +539,7 @@ def test_union_cte(alltypes, distinct, union): expr.compile().compile(compile_kwargs={'literal_binds': True}) ).splitlines() ) - expected = ( - "WITH anon_1 AS " - "(SELECT t0.string_col AS string_col, sum(t0.double_col) AS metric " - "FROM functional_alltypes AS t0 GROUP BY t0.string_col), " - "anon_2 AS " - "(SELECT t0.string_col AS string_col, sum(t0.double_col) AS metric " - "FROM functional_alltypes AS t0 GROUP BY t0.string_col), " - "anon_3 AS " - "(SELECT t0.string_col AS string_col, sum(t0.double_col) AS metric " - "FROM functional_alltypes AS t0 GROUP BY t0.string_col) " - "SELECT anon_1.string_col, anon_1.metric " - f"FROM anon_1 {union} SELECT anon_2.string_col, anon_2.metric " - f"FROM anon_2 {union} SELECT anon_3.string_col, anon_3.metric " - "FROM anon_3" - ) - assert str(result) == expected + snapshot.assert_match(result, "out.sql") @pytest.mark.parametrize( diff --git a/test_sql.py b/test_sql.py index 2e40063..e9b7882 100644 --- a/test_sql.py +++ b/test_sql.py @@ -88,3 +88,29 @@ no_sql_extraction = mark.notimpl( @no_sql_extraction def test_literal(backend, expr): assert ibis.to_sql(expr, dialect=backend.name()) + + [email protected]( + ["pandas", "dask", "datafusion", "polars", "pyspark"], reason="not SQL" +) [email protected](["mssql"], reason="sqlglot doesn't support an mssql dialect") +def test_group_by_has_index(backend, snapshot): + countries = ibis.table( + dict(continent="string", population="int64"), name="countries" + ) + expr = countries.group_by( + cont=( + _.continent.case() + .when("NA", "North America") + .when("SA", "South America") + .when("EU", "Europe") + .when("AF", "Africa") + .when("AS", "Asia") + .when("OC", "Oceania") + .when("AN", "Antarctica") + .else_("Unknown continent") + .end() + ) + ).agg(total_pop=_.population.sum()) + sql = str(ibis.to_sql(expr, dialect=backend.name())) + snapshot.assert_match(sql, "out.sql") diff --git a/test_sqlalchemy.py b/test_sqlalchemy.py index 0661322..242c654 100644 --- a/test_sqlalchemy.py +++ b/test_sqlalchemy.py @@ -358,51 +358,33 @@ def test_where_simple_comparisons(sa_star1, star1, snapshot): @pytest.mark.parametrize( - ("expr_fn", "expected_fn"), + "expr_fn", [ - ( - lambda t: t.aggregate([t['f'].sum().name('total')], [t['foo_id']]), - lambda st: sa.select(st.c.foo_id, F.sum(st.c.f).label('total')).group_by( - st.c.foo_id - ), + param( + lambda t: t.agg([t['f'].sum().name('total')], [t['foo_id']]), + id="single_key", ), - ( - lambda t: t.aggregate([t['f'].sum().name('total')], ['foo_id', 'bar_id']), - lambda st: sa.select( - st.c.foo_id, st.c.bar_id, F.sum(st.c.f).label('total') - ).group_by(st.c.foo_id, st.c.bar_id), + param( + lambda t: t.agg([t['f'].sum().name('total')], ['foo_id', 'bar_id']), + id="two_keys", ), - ( - lambda t: t.aggregate( - [t.f.sum().name("total")], - by=["foo_id"], - having=[t.f.sum() > 10], - ), - lambda st: ( - sa.select(st.c.foo_id, F.sum(st.c.f).label("total")) - .group_by(st.c.foo_id) - .having(F.sum(st.c.f).label("total") > L(10)) + param( + lambda t: t.agg( + [t.f.sum().name("total")], by=["foo_id"], having=[t.f.sum() > 10] ), + id="having_sum", ), - ( - lambda t: t.aggregate( - [t.f.sum().name("total")], - by=["foo_id"], - having=[t.count() > 100], - ), - lambda st: ( - sa.select(st.c.foo_id, F.sum(st.c.f).label("total")) - .group_by(st.c.foo_id) - .having(F.count() > L(100)) + param( + lambda t: t.agg( + [t.f.sum().name("total")], by=["foo_id"], having=[t.count() > 100] ), + id="having_count", ), ], ) -def test_aggregate(con, star1, sa_star1, expr_fn, expected_fn): - st = sa_star1.alias('t0') +def test_aggregate(star1, expr_fn, snapshot): expr = expr_fn(star1) - expected = expected_fn(st) - _check(expr, expected) + snapshot.assert_match(to_sql(expr), "out.sql") @pytest.mark.parametrize( @@ -479,7 +461,11 @@ def test_cte_factor_distinct_but_equal(con, sa_alltypes, snapshot): # t2 = sa_alltypes.alias('t2') - t0 = sa.select(t2.c.g, F.sum(t2.c.f).label('metric')).group_by(t2.c.g).cte('t0') + t0 = ( + sa.select(t2.c.g, F.sum(t2.c.f).label('metric')) + .group_by(sa.literal_column("1")) + .cte('t0') + ) t1 = t0.alias('t1') table_set = t0.join(t1, t0.c.g == t1.c.g) @@ -570,7 +556,7 @@ def test_subquery_aliased(con, star1, star2, snapshot): agged = ( sa.select(s1.c.foo_id, F.sum(s1.c.f).label('total')) - .group_by(s1.c.foo_id) + .group_by(sa.literal_column("1")) .alias('t0') ) @@ -581,37 +567,14 @@ def test_subquery_aliased(con, star1, star2, snapshot): snapshot.assert_match(to_sql(expr), "out.sql") -def test_lower_projection_sort_key(con, star1, star2, snapshot): +def test_lower_projection_sort_key(star1, star2, snapshot): t1 = star1 t2 = star2 agged = t1.aggregate([t1.f.sum().name('total')], by=['foo_id']) expr = agged.inner_join(t2, [agged.foo_id == t2.foo_id])[agged, t2.value1] - # - t4 = con.meta.tables["star1"].alias("t4") - t3 = con.meta.tables["star2"].alias("t3") - - t2 = ( - sa.select(t4.c.foo_id, F.sum(t4.c.f).label('total')) - .group_by(t4.c.foo_id) - .alias('t2') - ) - t1 = ( - sa.select(t2.c.foo_id, t2.c.total, t3.c.value1) - .select_from(t2.join(t3, t2.c.foo_id == t3.c.foo_id)) - .alias('t1') - ) - t0 = ( - sa.select(t1.c.foo_id, t1.c.total, t1.c.value1) - .where(t1.c.total > L(100)) - .alias('t0') - ) - expected = sa.select(t0.c.foo_id, t0.c.total, t0.c.value1).order_by( - t0.c.total.desc() - ) expr2 = expr[expr.total > 100].order_by(ibis.desc('total')) - _check(expr2, expected) snapshot.assert_match(to_sql(expr2), "out.sql") assert_decompile_roundtrip(expr2, snapshot) @@ -661,41 +624,27 @@ def test_not_exists(con, not_exists, snapshot): @pytest.mark.parametrize( - ("expr_fn", "expected_fn"), + "expr_fn", [ - (lambda t: t.distinct(), lambda sat: sa.select(sat).distinct()), - ( - lambda t: t['string_col', 'int_col'].distinct(), - lambda sat: sa.select(sat.c.string_col, sat.c.int_col).distinct(), + param(lambda t: t.distinct(), id="table_distinct"), + param( + lambda t: t['string_col', 'int_col'].distinct(), id="projection_distinct" ), - ( - lambda t: t[t.string_col].distinct(), - lambda sat: sa.select(sat.c.string_col.distinct()), - ), - ( - lambda t: t.int_col.nunique().name('nunique'), - lambda sat: sa.select(F.count(sat.c.int_col.distinct()).label('nunique')), + param( + lambda t: t[t.string_col].distinct(), id="single_column_projection_distinct" ), - ( + param(lambda t: t.int_col.nunique().name('nunique'), id="count_distinct"), + param( lambda t: t.group_by('string_col').aggregate( t.int_col.nunique().name('nunique') ), - lambda sat: sa.select( - sat.c.string_col, - F.count(sat.c.int_col.distinct()).label('nunique'), - ).group_by(sat.c.string_col), + id="group_by_count_distinct", ), ], ) -def test_distinct( - sa_functional_alltypes, - functional_alltypes, - expr_fn, - expected_fn, -): +def test_distinct(functional_alltypes, expr_fn, snapshot): expr = expr_fn(functional_alltypes) - expected = expected_fn(sa_functional_alltypes) - _check(expr, expected) + snapshot.assert_match(to_sql(expr), "out.sql") def test_sort_aggregation_translation_failure( @@ -713,7 +662,7 @@ def test_sort_aggregation_translation_failure( sat = sa_functional_alltypes.alias("t1") base = ( sa.select(sat.c.string_col, F.max(sat.c.double_col).label('foo')).group_by( - sat.c.string_col + sa.literal_column("1") ) ).alias('t0') @@ -824,7 +773,7 @@ def test_mutate_filter_join_no_cross_join(): _check(expr, ex) -def test_filter_group_by_agg_with_same_name(): +def test_filter_group_by_agg_with_same_name(snapshot): # GH 2907 t = ibis.table([("int_col", "int32"), ("bigint_col", "int64")], name="t") expr = ( @@ -832,18 +781,7 @@ def test_filter_group_by_agg_with_same_name(): .aggregate(bigint_col=lambda t: t.bigint_col.sum()) .filter(lambda t: t.bigint_col == 60) ) - - t1 = sa.table("t", sa.column("int_col"), sa.column("bigint_col")).alias("t1") - t0 = ( - sa.select( - t1.c.int_col.label("int_col"), - sa.func.sum(t1.c.bigint_col).label("bigint_col"), - ) - .group_by(t1.c.int_col) - .alias("t0") - ) - ex = sa.select(t0).where(t0.c.bigint_col == 60) - _check(expr, ex) + snapshot.assert_match(to_sql(expr), "out.sql") @pytest.fixture @@ -1075,7 +1013,7 @@ def test_tpc_h11(h11): ) ) .where(t4.c.n_name == NATION) - .group_by(t2.c.ps_partkey) + .group_by(sa.literal_column("1")) ).alias("t1") anon_1 = ( @@ -1109,7 +1047,7 @@ def test_to_sqla_type_array_of_non_primitive(): assert isinstance(result, ArrayType) -def test_no_cart_join(con, snapshot): +def test_no_cart_join(snapshot): facts = ibis.table(dict(product_id="!int32"), name="facts") products = ibis.table( dict( @@ -1134,7 +1072,7 @@ def test_no_cart_join(con, snapshot): agg = gb.aggregate(n=ibis.literal(1)) ob = agg.order_by(products.ancestor_node_sort_order) - out = str(con.compile(ob).compile(compile_kwargs=dict(literal_binds=True))) + out = to_sql(ob) snapshot.assert_match(out, "out.sql")
|
|
refactor (#222)
Use semver::Version everywhere instead of str/String. Strongly typed
things are definitely better, don't know how that happened even.
|
03c7dbabff14bd5dd150bd5174f53148d4ee0fec
|
refactor
|
https://github.com/Byron/gitoxide/commit/03c7dbabff14bd5dd150bd5174f53148d4ee0fec
|
:Version everywhere instead of str/String. Strongly typed
things are definitely better, don't know how that happened even.
|
diff --git a/changelog.rs b/changelog.rs index 0e91dec..abc1ea4 100644 --- a/changelog.rs +++ b/changelog.rs @@ -1,14 +1,13 @@ use std::io::Write; -use crate::utils::package_by_name; -use crate::version::BumpSpec; use crate::{ bat, changelog::write::{Components, Linkables}, command::changelog::Options, git, traverse::dependency, - utils::will, + utils::{package_by_name, will}, + version::BumpSpec, ChangeLog, }; diff --git a/git.rs b/git.rs index d8c5d8a..b551247 100644 --- a/git.rs +++ b/git.rs @@ -32,7 +32,7 @@ pub(in crate::command::release_impl) fn commit_changes( pub(in crate::command::release_impl) fn create_version_tag<'repo>( publishee: &Package, - new_version: &str, + new_version: &semver::Version, commit_id: Option<Oid<'repo>>, tag_message: Option<String>, ctx: &'repo crate::Context, diff --git a/github.rs b/github.rs index 05c5369..211fb4a 100644 --- a/github.rs +++ b/github.rs @@ -30,7 +30,7 @@ impl Support { pub fn create_release( publishee: &Package, - new_version: &str, + new_version: &semver::Version, notes: &str, Options { dry_run, .. }: Options, ctx: &Context, diff --git a/manifest.rs b/manifest.rs index f66fc85..c28fa21 100644 --- a/manifest.rs +++ b/manifest.rs @@ -4,6 +4,7 @@ use anyhow::bail; use cargo_metadata::{camino::Utf8PathBuf, Metadata, Package}; use semver::{Op, Version, VersionReq}; +use super::{cargo, git, Context, Oid, Options}; use crate::{ changelog, changelog::write::Linkables, @@ -11,17 +12,15 @@ use crate::{ version, ChangeLog, }; -use super::{cargo, git, Context, Oid, Options}; - pub struct Outcome<'repo, 'meta> { pub commit_id: Option<Oid<'repo>>, pub section_by_package: BTreeMap<&'meta str, changelog::Section>, - pub safety_bumped_packages: Vec<(&'meta Package, String)>, + pub safety_bumped_packages: Vec<(&'meta Package, semver::Version)>, } pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_and_handle_changelog<'repo, 'meta>( meta: &'meta Metadata, - publishees: &[(&'meta Package, String)], + publishees: &[(&'meta Package, semver::Version)], opts: Options, ctx: &'repo Context, ) -> anyhow::Result<Outcome<'repo, 'meta>> { @@ -72,7 +71,6 @@ pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_ } else if recent_release_section_in_log.is_probably_lacking_user_edits() { changelog_ids_probably_lacking_user_edits.push(pending_changelogs.len()); } - let new_version: semver::Version = new_version.parse()?; match recent_release_section_in_log { changelog::Section::Release { name: name @ changelog::Version::Unreleased, @@ -93,7 +91,7 @@ pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_ match log .sections .iter_mut() - .find(|s| matches!(s, changelog::Section::Release {name: changelog::Version::Semantic(v), ..} if *v == new_version)) + .find(|s| matches!(s, changelog::Section::Release {name: changelog::Version::Semantic(v), ..} if v == new_version)) { Some(version_section) => { version_section.merge(recent_section); @@ -116,7 +114,7 @@ pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_ date, .. } => { - if *recent_version != new_version { + if recent_version != new_version { anyhow::bail!( "'{}' does not have an unreleased version, and most recent release is unexpected. Wanted {}, got {}.", publishee.name, @@ -167,7 +165,7 @@ pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_ .expect("lock available"); made_change |= set_version_and_update_package_dependency( publishee, - Some(&new_version.to_string()), + Some(&new_version), &publishees_and_bumped_dependent_packages, &mut lock, opts, @@ -180,7 +178,7 @@ pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_ .expect("lock written once"); made_change |= set_version_and_update_package_dependency( dependant_on_publishee, - possibly_new_version.as_deref(), + possibly_new_version.as_ref(), &publishees_and_bumped_dependent_packages, &mut lock, opts, @@ -420,7 +418,7 @@ pub(in crate::command::release_impl) fn edit_version_and_fixup_dependent_crates_ /// Packages that depend on any of the publishees, where publishee is used by them, and possibly propose a new version. fn collect_directly_dependent_packages<'a>( meta: &'a Metadata, - publishees: &[(&Package, String)], + publishees: &[(&Package, semver::Version)], locks_by_manifest_path: &mut BTreeMap<&'a Utf8PathBuf, git_repository::lock::File>, ctx: &Context, Options { @@ -429,8 +427,8 @@ fn collect_directly_dependent_packages<'a>( verbose, .. }: Options, -) -> anyhow::Result<Vec<(&'a Package, Option<String>)>> { - let mut packages_to_fix = Vec::<(&Package, Option<String>)>::new(); +) -> anyhow::Result<Vec<(&'a Package, Option<semver::Version>)>> { + let mut packages_to_fix = Vec::<(&Package, Option<semver::Version>)>::new(); let mut dependent_packages_this_round = Vec::new(); let publishees_backing = publishees .iter() @@ -464,7 +462,7 @@ fn collect_directly_dependent_packages<'a>( for (publishee_as_dependency, new_version) in publishees_and_dependents.iter().filter_map(|(publishee, new_version)| { new_version - .as_deref() + .as_ref() .and_then(|v| package_eq_dependency(publishee, dep).then(|| (*publishee, v))) }) { @@ -487,7 +485,7 @@ fn collect_directly_dependent_packages<'a>( let greatest_version = desired_versions.pop().expect("at least one version"); let new_version = version::rhs_is_breaking_bump_for_lhs(&workspace_package.version, &greatest_version) - .then(|| greatest_version.to_string()); + .then(|| greatest_version); if locks_by_manifest_path.contains_key(&workspace_package.manifest_path) { if let Some(previous_version) = packages_to_fix @@ -527,7 +525,7 @@ fn collect_directly_dependent_packages<'a>( Ok(packages_to_fix) } -fn is_direct_dependency_of(publishees: &[(&Package, String)], package_to_fix: &Package) -> bool { +fn is_direct_dependency_of(publishees: &[(&Package, semver::Version)], package_to_fix: &Package) -> bool { package_to_fix.dependencies.iter().any(|dep| { publishees .iter() @@ -537,8 +535,8 @@ fn is_direct_dependency_of(publishees: &[(&Package, String)], package_to_fix: &P fn set_version_and_update_package_dependency( package_to_update: &Package, - new_package_version: Option<&str>, - publishees: &[(&Package, String)], + new_package_version: Option<&semver::Version>, + publishees: &[(&Package, semver::Version)], mut out: impl std::io::Write, Options { conservative_pre_release_version_handling, @@ -549,18 +547,18 @@ fn set_version_and_update_package_dependency( let mut doc = toml_edit::Document::from_str(&manifest)?; if let Some(new_version) = new_package_version { - if doc["package"]["version"].as_str() != Some(new_version) { - doc["package"]["version"] = toml_edit::value(new_version); + let new_version = new_version.to_string(); + if doc["package"]["version"].as_str() != Some(new_version.as_str()) { log::trace!( "Pending '{}' manifest version update: \\"{}\\"", package_to_update.name, new_version ); + doc["package"]["version"] = toml_edit::value(new_version); } } for dep_type in &["dependencies", "dev-dependencies", "build-dependencies"] { for (name_to_find, new_version) in publishees.iter().map(|(p, nv)| (&p.name, nv)) { - let new_version = Version::parse(new_version)?; for name_to_find in package_to_update .dependencies .iter() diff --git a/mod.rs b/mod.rs index 6531ad8..72f6ca0 100644 --- a/mod.rs +++ b/mod.rs @@ -21,7 +21,7 @@ pub enum PackageChangeKind { } pub fn change_since_last_release(package: &Package, ctx: &crate::Context) -> anyhow::Result<Option<PackageChangeKind>> { - let version_tag_name = tag_name(package, &package.version.to_string(), &ctx.repo); + let version_tag_name = tag_name(package, &package.version, &ctx.repo); let mut tag_ref = match ctx.repo.try_find_reference(&version_tag_name)? { None => { return Ok(Some(PackageChangeKind::Untagged { diff --git a/context.rs b/context.rs index 52a0419..eaca0ac 100644 --- a/context.rs +++ b/context.rs @@ -1,4 +1,3 @@ -use crate::version::BumpSpec; use cargo_metadata::{ camino::{Utf8Path, Utf8PathBuf}, Metadata, Package, @@ -7,6 +6,8 @@ use crates_index::Index; use git_repository as git; use git_repository::prelude::CacheAccessExt; +use crate::version::BumpSpec; + pub struct Context { pub root: Utf8PathBuf, pub meta: Metadata, diff --git a/utils.rs b/utils.rs index b18e3e1..6fa50b9 100644 --- a/utils.rs +++ b/utils.rs @@ -81,7 +81,7 @@ pub fn package_for_dependency<'a>(meta: &'a Metadata, dep: &Dependency) -> &'a P .expect("dependency always available as package") } -pub fn names_and_versions(publishees: &[(&Package, String)]) -> String { +pub fn names_and_versions(publishees: &[(&Package, semver::Version)]) -> String { publishees .iter() .map(|(p, nv)| format!("{} v{}", p.name, nv)) @@ -104,11 +104,11 @@ pub fn tag_prefix<'p>(package: &'p Package, repo: &git::Easy) -> Option<&'p str> } } -pub fn tag_name(package: &Package, version: &str, repo: &git::Easy) -> String { +pub fn tag_name(package: &Package, version: &semver::Version, repo: &git::Easy) -> String { tag_name_inner(tag_prefix(package, repo), version) } -fn tag_name_inner(package_name: Option<&str>, version: &str) -> String { +fn tag_name_inner(package_name: Option<&str>, version: &semver::Version) -> String { match package_name { Some(name) => format!("{}-v{}", name, version), None => format!("v{}", version), @@ -156,6 +156,8 @@ pub fn component_to_bytes(c: Utf8Component<'_>) -> &[u8] { mod tests { mod parse_possibly_prefixed_tag_version { mod matches { + use std::str::FromStr; + use git_repository::bstr::ByteSlice; use semver::Version; @@ -166,7 +168,9 @@ mod tests { assert_eq!( parse_possibly_prefixed_tag_version( "git-test".into(), - tag_name_inner("git-test".into(), "1.0.1").as_bytes().as_bstr() + tag_name_inner("git-test".into(), &Version::from_str("1.0.1").unwrap()) + .as_bytes() + .as_bstr() ), Version::parse("1.0.1").expect("valid").into() ); @@ -174,13 +178,20 @@ mod tests { assert_eq!( parse_possibly_prefixed_tag_version( "single".into(), - tag_name_inner("single".into(), "0.0.1-beta.1").as_bytes().as_bstr() + tag_name_inner("single".into(), &Version::from_str("0.0.1-beta.1").unwrap()) + .as_bytes() + .as_bstr() ), Version::parse("0.0.1-beta.1").expect("valid").into() ); assert_eq!( - parse_possibly_prefixed_tag_version(None, tag_name_inner(None, "0.0.1+123.x").as_bytes().as_bstr()), + parse_possibly_prefixed_tag_version( + None, + tag_name_inner(None, &Version::from_str("0.0.1+123.x").unwrap()) + .as_bytes() + .as_bstr() + ), Version::parse("0.0.1+123.x").expect("valid").into() ); } @@ -189,7 +200,10 @@ mod tests { mod is_tag_name { mod no_match { + use std::str::FromStr; + use git_repository::bstr::ByteSlice; + use semver::Version; use crate::utils::{is_tag_name, tag_name_inner}; @@ -197,12 +211,17 @@ mod tests { fn due_to_crate_name() { assert!(!is_tag_name( "foo", - tag_name_inner("bar".into(), "0.0.1-beta.1").as_bytes().as_bstr() + tag_name_inner("bar".into(), &Version::from_str("0.0.1-beta.1").unwrap()) + .as_bytes() + .as_bstr() )); } } mod matches { + use std::str::FromStr; + use git_repository::bstr::ByteSlice; + use semver::Version; use crate::utils::{is_tag_name, tag_name_inner}; @@ -210,12 +229,16 @@ mod tests { fn whatever_tag_name_would_return() { assert!(is_tag_name( "git-test", - tag_name_inner("git-test".into(), "1.0.1").as_bytes().as_bstr() + tag_name_inner("git-test".into(), &Version::from_str("1.0.1").unwrap()) + .as_bytes() + .as_bstr() )); assert!(is_tag_name( "single", - tag_name_inner("single".into(), "0.0.1-beta.1").as_bytes().as_bstr() + tag_name_inner("single".into(), &Version::from_str("0.0.1-beta.1").unwrap()) + .as_bytes() + .as_bstr() )); } } diff --git a/version.rs b/version.rs index 559031e..e0baa62 100644 --- a/version.rs +++ b/version.rs @@ -333,13 +333,12 @@ pub(crate) fn is_pre_release(semver: &Version) -> bool { pub(crate) fn conservative_dependent_version( publishee: &Package, - new_publishee_version: &str, + new_publishee_version: &semver::Version, dependent: &Package, ctx: &Context, bump_when_needed: bool, verbose: bool, ) -> Option<Version> { - let new_publishee_version: Version = new_publishee_version.parse().expect("new versions are always valid"); if !rhs_is_breaking_bump_for_lhs(&publishee.version, &new_publishee_version) { return None; }
|
|
fix(backends): fix notall/notany translation
|
56b56b363cea406df67d807a2c2067bb71c226ab
|
fix
|
https://github.com/rohankumardubey/ibis/commit/56b56b363cea406df67d807a2c2067bb71c226ab
|
fix notall/notany translation
|
diff --git a/translator.py b/translator.py index ededa25..069b5a2 100644 --- a/translator.py +++ b/translator.py @@ -323,22 +323,24 @@ def _bucket(op): @rewrites(ops.Any) def _any_expand(op): - return ops.Max(op.arg) + return ops.Max(op.arg, where=op.where) @rewrites(ops.NotAny) def _notany_expand(op): - return ops.Equals(ops.Max(op.arg), ops.Literal(0, dtype=op.arg.output_dtype)) + zero = ops.Literal(0, dtype=op.arg.output_dtype) + return ops.Min(ops.Equals(op.arg, zero), where=op.where) @rewrites(ops.All) def _all_expand(op): - return ops.Min(op.arg) + return ops.Min(op.arg, where=op.where) @rewrites(ops.NotAll) def _notall_expand(op): - return ops.Equals(ops.Min(op.arg), ops.Literal(0, dtype=op.arg.output_dtype)) + zero = ops.Literal(0, dtype=op.arg.output_dtype) + return ops.Max(ops.Equals(op.arg, zero), where=op.where) @rewrites(ops.Cast) diff --git a/compiler.py b/compiler.py index ee98bd5..8379667 100644 --- a/compiler.py +++ b/compiler.py @@ -99,6 +99,16 @@ class BigQueryExprTranslator(sql_compiler.ExprTranslator): compiles = BigQueryExprTranslator.compiles [email protected](ops.NotAll) +def _rewrite_notall(op): + return ops.Any(ops.Not(op.arg), where=op.where) + + [email protected](ops.NotAny) +def _rewrite_notany(op): + return ops.All(ops.Not(op.arg), where=op.where) + + class BigQueryTableSetFormatter(sql_compiler.TableSetFormatter): def _quote_identifier(self, name): if re.match(r"^[A-Za-z][A-Za-z_0-9]*$", name): diff --git a/registry.py b/registry.py index 0682ed8..be20a3c 100644 --- a/registry.py +++ b/registry.py @@ -235,8 +235,8 @@ operation_registry.update( # boolean reductions ops.Any: reduction(sa.func.bool_or), ops.All: reduction(sa.func.bool_and), - ops.NotAny: reduction(lambda x: sa.not_(sa.func.bool_or(x))), - ops.NotAll: reduction(lambda x: sa.not_(sa.func.bool_and(x))), + ops.NotAny: reduction(lambda x: sa.func.bool_and(~x)), + ops.NotAll: reduction(lambda x: sa.func.bool_or(~x)), ops.ArgMin: reduction(sa.func.min_by), ops.ArgMax: reduction(sa.func.max_by), # array ops diff --git a/values.py b/values.py index 2139977..f05a02a 100644 --- a/values.py +++ b/values.py @@ -177,12 +177,12 @@ def _count_star(op, **kw): @translate_val.register(ops.NotAny) def _not_any(op, **kw): - return translate_val(ops.Not(ops.Any(op.arg)), **kw) + return translate_val(ops.All(ops.Not(op.arg), where=op.where), **kw) @translate_val.register(ops.NotAll) def _not_all(op, **kw): - return translate_val(ops.Not(ops.All(op.arg)), **kw) + return translate_val(ops.Any(ops.Not(op.arg), where=op.where), **kw) def _quantile_like(func_name: str, op: ops.Node, quantile: str, **kw): diff --git a/out.sql b/out.sql index bca1e4a..f0a0037 100644 --- a/out.sql +++ b/out.sql @@ -1 +1 @@ -max(`f` = 0) = FALSE \\ No newline at end of file +min((`f` = 0) = FALSE) \\ No newline at end of file diff --git a/generic.py b/generic.py index 0906f2d..96ce20a 100644 --- a/generic.py +++ b/generic.py @@ -804,6 +804,8 @@ def execute_any_all_series(op, data, mask, aggcontext=None, **kwargs): @execute_node.register((ops.Any, ops.All), SeriesGroupBy, type(None)) def execute_any_all_series_group_by(op, data, mask, aggcontext=None, **kwargs): + if mask is not None: + data = data.obj.loc[mask].groupby(get_grouping(data.grouper.groupings)) if isinstance(aggcontext, (agg_ctx.Summarize, agg_ctx.Transform)): result = aggcontext.agg(data, type(op).__name__.lower()) else: @@ -819,6 +821,8 @@ def execute_any_all_series_group_by(op, data, mask, aggcontext=None, **kwargs): @execute_node.register((ops.NotAny, ops.NotAll), pd.Series, (pd.Series, type(None))) def execute_notany_notall_series(op, data, mask, aggcontext=None, **kwargs): name = type(op).__name__.lower()[len("Not") :] + if mask is not None: + data = data.loc[mask] if isinstance(aggcontext, (agg_ctx.Summarize, agg_ctx.Transform)): result = ~aggcontext.agg(data, name) else: @@ -833,6 +837,8 @@ def execute_notany_notall_series(op, data, mask, aggcontext=None, **kwargs): @execute_node.register((ops.NotAny, ops.NotAll), SeriesGroupBy, type(None)) def execute_notany_notall_series_group_by(op, data, mask, aggcontext=None, **kwargs): name = type(op).__name__.lower()[len("Not") :] + if mask is not None: + data = data.obj.loc[mask].groupby(get_grouping(data.grouper.groupings)) if isinstance(aggcontext, (agg_ctx.Summarize, agg_ctx.Transform)): result = ~aggcontext.agg(data, name) else: diff --git a/test_aggregation.py b/test_aggregation.py index 30c79c0..41b5294 100644 --- a/test_aggregation.py +++ b/test_aggregation.py @@ -286,8 +286,8 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df): ), ), param( - lambda t, _: t.bool_col.any(), - lambda t, _: t.bool_col.any(), + lambda t, where: t.bool_col.any(where=where), + lambda t, where: t.bool_col[where].any(), id='any', marks=[ pytest.mark.notimpl( @@ -302,18 +302,14 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df): ], ), param( - lambda t, _: t.bool_col.notany(), - lambda t, _: ~t.bool_col.any(), + lambda t, where: t.bool_col.notany(where=where), + lambda t, where: ~t.bool_col[where].any(), id='notany', marks=[ pytest.mark.notimpl( ["polars", "datafusion"], raises=com.OperationNotDefinedError, ), - pytest.mark.notimpl( - ['mssql'], - raises=sa.exc.ProgrammingError, - ), pytest.mark.broken( ["druid"], raises=AttributeError, @@ -322,19 +318,14 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df): ], ), param( - lambda t, _: -t.bool_col.any(), - lambda t, _: ~t.bool_col.any(), + lambda t, where: -t.bool_col.any(where=where), + lambda t, where: ~t.bool_col[where].any(), id='any_negate', marks=[ pytest.mark.notimpl( ["polars", "datafusion"], raises=com.OperationNotDefinedError, ), - pytest.mark.broken( - ['mssql'], - raises=sa.exc.ProgrammingError, - reason="Incorrect syntax near '='", - ), pytest.mark.broken( ["druid"], raises=AttributeError, @@ -343,8 +334,8 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df): ], ), param( - lambda t, _: t.bool_col.all(), - lambda t, _: t.bool_col.all(), + lambda t, where: t.bool_col.all(where=where), + lambda t, where: t.bool_col[where].all(), id='all', marks=[ pytest.mark.notimpl( @@ -359,8 +350,8 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df): ], ), param( - lambda t, _: t.bool_col.notall(), - lambda t, _: ~t.bool_col.all(), + lambda t, where: t.bool_col.notall(where=where), + lambda t, where: ~t.bool_col[where].all(), id='notall', marks=[ pytest.mark.notimpl( @@ -372,27 +363,17 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df): raises=AttributeError, reason="'IntegerColumn' object has no attribute 'notall'", ), - pytest.mark.broken( - ['mssql'], - raises=sa.exc.ProgrammingError, - reason="Incorrect syntax near '='", - ), ], ), param( - lambda t, _: -t.bool_col.all(), - lambda t, _: ~t.bool_col.all(), + lambda t, where: -t.bool_col.all(where=where), + lambda t, where: ~t.bool_col[where].all(), id='all_negate', marks=[ pytest.mark.notimpl( ["polars", "datafusion"], raises=com.OperationNotDefinedError, ), - pytest.mark.broken( - ['mssql'], - raises=sa.exc.ProgrammingError, - reason="Incorrect syntax near '='", - ), pytest.mark.broken( ["druid"], raises=AttributeError, diff --git a/test_window.py b/test_window.py index 34632e9..5a33b91 100644 --- a/test_window.py +++ b/test_window.py @@ -211,19 +211,7 @@ def calc_zscore(s): ), id='cumnotany', marks=[ - pytest.mark.notyet( - ["sqlite"], - reason="notany() over window not supported", - raises=sa.exc.OperationalError, - ), - pytest.mark.notyet( - ["impala"], - reason="notany() over window not supported", - raises=HiveServer2Error, - ), - pytest.mark.broken( - ["mssql", "mysql", "snowflake"], raises=sa.exc.ProgrammingError - ), + pytest.mark.broken(["mssql"], raises=sa.exc.ProgrammingError), pytest.mark.notimpl(["dask"], raises=NotImplementedError), ], ), @@ -248,19 +236,7 @@ def calc_zscore(s): ), id='cumnotall', marks=[ - pytest.mark.notyet( - ["sqlite"], - reason="notall() over window not supported", - raises=sa.exc.OperationalError, - ), - pytest.mark.notyet( - ["impala"], - reason="notall() over window not supported", - raises=HiveServer2Error, - ), - pytest.mark.broken( - ["mssql", "mysql", "snowflake"], raises=sa.exc.ProgrammingError - ), + pytest.mark.broken(["mssql"], raises=sa.exc.ProgrammingError), pytest.mark.notimpl(["dask"], raises=NotImplementedError), ], ),
|
|
ci: run arm64 job on slightly beefier runner (#9974)
|
4e50250ef18f4dab5b1bc8343d5e243880567110
|
ci
|
https://github.com/ibis-project/ibis/commit/4e50250ef18f4dab5b1bc8343d5e243880567110
|
run arm64 job on slightly beefier runner (#9974)
|
diff --git a/actionlint.yaml b/actionlint.yaml index 88aefa8..8916e6a 100644 --- a/actionlint.yaml +++ b/actionlint.yaml @@ -1,3 +1,3 @@ self-hosted-runner: labels: - - ubuntu-arm64-small + - ubuntu-arm64-24.04 diff --git a/nix-skip-helper.yml b/nix-skip-helper.yml index 371aba3..2a9e37b 100644 --- a/nix-skip-helper.yml +++ b/nix-skip-helper.yml @@ -37,7 +37,7 @@ jobs: - "3.11" - "3.12" include: - - os: ubuntu-arm64-small + - os: ubuntu-arm64-24.04 python-version: "3.12" - os: macos-14 python-version: "3.10" diff --git a/nix.yml b/nix.yml index 77c97ad..382ea6b 100644 --- a/nix.yml +++ b/nix.yml @@ -38,7 +38,7 @@ jobs: - "3.11" - "3.12" include: - - os: ubuntu-arm64-small + - os: ubuntu-arm64-24.04 python-version: "3.12" - os: macos-14 python-version: "3.10"
|
|
fix: add default "mass" and "clamp" props
|
7621b5096d9ca7ca2ca84871216e03834682a76b
|
fix
|
https://github.com/pmndrs/react-spring/commit/7621b5096d9ca7ca2ca84871216e03834682a76b
|
add default "mass" and "clamp" props
|
diff --git a/SpringValue.ts b/SpringValue.ts index 2b15f93..d80e0ab 100644 --- a/SpringValue.ts +++ b/SpringValue.ts @@ -116,11 +116,13 @@ const ACTIVE = 4 as Phase const noop = () => {} -const defaultConfig: SpringConfig = { +const BASE_CONFIG: SpringConfig = { ...config.default, + mass: 1, velocity: 0, progress: 0, easing: t => t, + clamp: false, } /** An observer of a `SpringValue` */ @@ -478,7 +480,7 @@ export class SpringValue<T = any, P extends string = string> if (config && (diff('config') || changed)) { config = callProp(config as any, key) if (config) { - config = { ...defaultConfig, ...config } + config = { ...BASE_CONFIG, ...config } // Cache the angular frequency in rad/ms config.w0 = Math.sqrt(config.tension / config.mass) / 1000
|
|
refactor(postgres): use alchemy backend approx_nunique implementation
|
072b85b77d3ecbfa20a238567c817b42366a7f23
|
refactor
|
https://github.com/rohankumardubey/ibis/commit/072b85b77d3ecbfa20a238567c817b42366a7f23
|
use alchemy backend approx_nunique implementation
|
diff --git a/registry.py b/registry.py index 78bbec1..6a31e0e 100644 --- a/registry.py +++ b/registry.py @@ -504,14 +504,6 @@ def _identical_to(t, expr): return left.op('IS NOT DISTINCT FROM')(right) -def _hll_cardinality(t, expr): - # postgres doesn't have a builtin HLL algorithm, so we default to standard - # count distinct for now - arg, _ = expr.op().args - sa_arg = t.translate(arg) - return sa.func.count(sa.distinct(sa_arg)) - - def _table_column(t, expr): op = expr.op() ctx = t.context @@ -720,7 +712,6 @@ operation_registry.update( ), ops.ArrayRepeat: _array_repeat, ops.IdenticalTo: _identical_to, - ops.HLLCardinality: _hll_cardinality, ops.Unnest: unary(sa.func.unnest), } )
|
|
chore: typo
|
aaa9457bec78384e6c0573d34cc609a442605a4c
|
chore
|
https://github.com/ibis-project/ibis/commit/aaa9457bec78384e6c0573d34cc609a442605a4c
|
typo
|
diff --git a/_quarto.yml b/_quarto.yml index 3750635..d20bbdd 100644 --- a/_quarto.yml +++ b/_quarto.yml @@ -143,7 +143,7 @@ quartodoc: path: top_level summary: name: Top-level APIs - desc: These methods and objecst are available directly on the `ibis` module. + desc: These methods and objects are available directly on the `ibis` module. contents: - name: and_ dynamic: true
|
|
fix: instead of erroring if loose iteration is performed on missing base, correctly yield zero references. (#595)
Previously it reported an error, now it does not and instead performs no
iteration, which is more helpful to the user of the API I believe as
they won't randomly fail just because somebody deleted the `refs`
folder.
|
e9853dd640cf4545134aa6e0d093e560af090a2b
|
fix
|
https://github.com/Byron/gitoxide/commit/e9853dd640cf4545134aa6e0d093e560af090a2b
|
instead of erroring if loose iteration is performed on missing base, correctly yield zero references. (#595)
Previously it reported an error, now it does not and instead performs no
iteration, which is more helpful to the user of the API I believe as
they won't randomly fail just because somebody deleted the `refs`
folder.
|
diff --git a/iter.rs b/iter.rs index bc63ef1..6fcb524 100644 --- a/iter.rs +++ b/iter.rs @@ -9,28 +9,19 @@ use crate::{file::iter::LooseThenPacked, store_impl::file, BString, FullName}; pub(in crate::store_impl::file) struct SortedLoosePaths { pub(crate) base: PathBuf, filename_prefix: Option<BString>, - file_walk: DirEntryIter, + file_walk: Option<DirEntryIter>, } impl SortedLoosePaths { - pub fn at( - path: impl AsRef<Path>, - base: impl Into<PathBuf>, - filename_prefix: Option<BString>, - ) -> std::io::Result<Self> { + pub fn at(path: impl AsRef<Path>, base: impl Into<PathBuf>, filename_prefix: Option<BString>) -> Self { let path = path.as_ref(); - if !path.is_dir() { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!("loose reference iteration path does not exist: \\"{}\\"", path.display()), - )); - } - let file_walk = git_features::fs::walkdir_sorted_new(path).into_iter(); - Ok(SortedLoosePaths { + SortedLoosePaths { base: base.into(), filename_prefix, - file_walk, - }) + file_walk: path + .is_dir() + .then(|| git_features::fs::walkdir_sorted_new(path).into_iter()), + } } } @@ -38,7 +29,7 @@ impl Iterator for SortedLoosePaths { type Item = std::io::Result<(PathBuf, FullName)>; fn next(&mut self) -> Option<Self::Item> { - for entry in self.file_walk.by_ref() { + for entry in self.file_walk.as_mut()?.by_ref() { match entry { Ok(entry) => { if !entry.file_type().is_file() { diff --git a/overlay_iter.rs b/overlay_iter.rs index 373806b..34874f6 100644 --- a/overlay_iter.rs +++ b/overlay_iter.rs @@ -253,23 +253,23 @@ impl<'a> IterInfo<'a> { } } - fn into_iter(self) -> std::io::Result<Peekable<SortedLoosePaths>> { - Ok(match self { - IterInfo::Base { base } => SortedLoosePaths::at(base.join("refs"), base, None)?, + fn into_iter(self) -> Peekable<SortedLoosePaths> { + match self { + IterInfo::Base { base } => SortedLoosePaths::at(base.join("refs"), base, None), IterInfo::BaseAndIterRoot { base, iter_root, prefix: _, - } => SortedLoosePaths::at(iter_root, base, None)?, - IterInfo::PrefixAndBase { base, prefix } => SortedLoosePaths::at(base.join(prefix), base, None)?, + } => SortedLoosePaths::at(iter_root, base, None), + IterInfo::PrefixAndBase { base, prefix } => SortedLoosePaths::at(base.join(prefix), base, None), IterInfo::ComputedIterationRoot { iter_root, base, prefix: _, remainder, - } => SortedLoosePaths::at(iter_root, base, remainder)?, + } => SortedLoosePaths::at(iter_root, base, remainder), } - .peekable()) + .peekable() } fn from_prefix(base: &'a Path, prefix: Cow<'a, Path>) -> std::io::Result<Self> { @@ -397,8 +397,8 @@ impl file::Store { ), None => None, }, - iter_git_dir: git_dir_info.into_iter()?, - iter_common_dir: common_dir_info.map(IterInfo::into_iter).transpose()?, + iter_git_dir: git_dir_info.into_iter(), + iter_common_dir: common_dir_info.map(IterInfo::into_iter), buf: Vec::new(), namespace: self.namespace.as_ref(), }) diff --git a/create_or_update.rs b/create_or_update.rs index 6373a27..b951dd8 100644 --- a/create_or_update.rs +++ b/create_or_update.rs @@ -77,8 +77,9 @@ mod collisions { "packed-refs can store everything in case-insensitive manner" ); - assert!( - store.loose_iter().is_err(), + assert_eq!( + store.loose_iter()?.count(), + 0, "refs/ directory isn't present as there is no loose ref - it removed every up to the base dir" );
|
|
refactor(ux): return expression from `Table.info`
BREAKING CHANGE: `Table.info` now returns an expression
|
71cc0e092695435fe4be79e2b12002388af86a97
|
refactor
|
https://github.com/ibis-project/ibis/commit/71cc0e092695435fe4be79e2b12002388af86a97
|
return expression from `Table.info`
BREAKING CHANGE: `Table.info` now returns an expression
|
diff --git a/test_generic.py b/test_generic.py index 666d29c..c84f84c 100644 --- a/test_generic.py +++ b/test_generic.py @@ -1,13 +1,11 @@ import contextlib import decimal -import io -from contextlib import redirect_stdout from operator import invert, methodcaller, neg import numpy as np import pandas as pd import pytest -import sqlalchemy.exc +import sqlalchemy as sa import toolz from pytest import param @@ -548,25 +546,26 @@ def test_order_by_random(alltypes): assert not r1.equals(r2) -def check_table_info(buf, schema): - info_str = buf.getvalue() - - assert "Null" in info_str - assert all(type.__class__.__name__ in info_str for type in schema.types) - assert all(name in info_str for name in schema.names) - - -def test_table_info_buf(alltypes): - buf = io.StringIO() - alltypes.info(buf=buf) - check_table_info(buf, alltypes.schema()) - - -def test_table_info_no_buf(alltypes): - buf = io.StringIO() - with redirect_stdout(buf): - alltypes.info() - check_table_info(buf, alltypes.schema()) [email protected]( + ["druid"], + raises=sa.exc.ProgrammingError, + reason="Druid only supports trivial unions", +) [email protected](["datafusion"], raises=com.OperationNotDefinedError) +def test_table_info(alltypes): + expr = alltypes.info() + df = expr.execute() + assert alltypes.columns == list(df.name) + assert expr.columns == [ + "name", + "type", + "nullable", + "nulls", + "non_nulls", + "null_frac", + "pos", + ] + assert expr.columns == list(df.columns) @pytest.mark.parametrize( @@ -750,7 +749,7 @@ def test_select_filter_select(backend, alltypes, df): @pytest.mark.notimpl(["datafusion"], raises=com.OperationNotDefinedError) [email protected](["mssql"], raises=sqlalchemy.exc.OperationalError) [email protected](["mssql"], raises=sa.exc.OperationalError) def test_between(backend, alltypes, df): expr = alltypes.double_col.between(5, 10) result = expr.execute().rename("double_col") diff --git a/relations.py b/relations.py index 1918895..ae6a677 100644 --- a/relations.py +++ b/relations.py @@ -5,10 +5,9 @@ import contextlib import functools import itertools import re -import sys import warnings from keyword import iskeyword -from typing import IO, TYPE_CHECKING, Callable, Iterable, Literal, Mapping, Sequence +from typing import TYPE_CHECKING, Callable, Iterable, Literal, Mapping, Sequence from public import public @@ -1936,105 +1935,55 @@ class Table(Expr, _FixedTextJupyterMixin): result_columns.append(column) return self[result_columns] - def info(self, buf: IO[str] | None = None) -> None: - """Show summary information about a table. - - Parameters - ---------- - buf - A writable buffer, defaults to stdout + def info(self) -> Table: + """Return summary information about a table. Returns ------- - None - This method prints to a buffer (stdout by default) and returns nothing. + Table + Summary of `self` Examples -------- >>> import ibis >>> ibis.options.interactive = True >>> t = ibis.examples.penguins.fetch(table_name="penguins") - >>> t - ┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━┓ - ┃ species ┃ island ┃ bill_length_mm ┃ bill_depth_mm ┃ flipper_length_mm ┃ … ┃ - ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━┩ - │ string │ string │ float64 │ float64 │ int64 │ … │ - ├─────────┼───────────┼────────────────┼───────────────┼───────────────────┼───┤ - │ Adelie │ Torgersen │ 39.1 │ 18.7 │ 181 │ … │ - │ Adelie │ Torgersen │ 39.5 │ 17.4 │ 186 │ … │ - │ Adelie │ Torgersen │ 40.3 │ 18.0 │ 195 │ … │ - │ Adelie │ Torgersen │ nan │ nan │ ∅ │ … │ - │ Adelie │ Torgersen │ 36.7 │ 19.3 │ 193 │ … │ - │ Adelie │ Torgersen │ 39.3 │ 20.6 │ 190 │ … │ - │ Adelie │ Torgersen │ 38.9 │ 17.8 │ 181 │ … │ - │ Adelie │ Torgersen │ 39.2 │ 19.6 │ 195 │ … │ - │ Adelie │ Torgersen │ 34.1 │ 18.1 │ 193 │ … │ - │ Adelie │ Torgersen │ 42.0 │ 20.2 │ 190 │ … │ - │ … │ … │ … │ … │ … │ … │ - └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴───┘ - - Default implementation prints to stdout - - >>> t.info() # doctest: +SKIP - Summary of penguins - 344 rows - ┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ - ┃ Name ┃ Type ┃ # Nulls ┃ % Nulls ┃ - ┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ - │ species │ String(nullable=True) │ 0 │ 0.00 │ - │ island │ String(nullable=True) │ 0 │ 0.00 │ - │ bill_length_mm │ Float64(nullable=True) │ 2 │ 0.58 │ - │ bill_depth_mm │ Float64(nullable=True) │ 2 │ 0.58 │ - │ flipper_length_mm │ Int64(nullable=True) │ 2 │ 0.58 │ - │ body_mass_g │ Int64(nullable=True) │ 2 │ 0.58 │ - │ sex │ String(nullable=True) │ 11 │ 3.20 │ - │ year │ Int64(nullable=True) │ 0 │ 0.00 │ - └───────────────────┴────────────────────────┴─────────┴─────────┘ - - Store the info into a buffer - - >>> import io - >>> buf = io.StringIO() - >>> t.info(buf=buf) - >>> "Summary of penguins" in buf.getvalue() - True + >>> t.info() + ┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━┳━━━┓ + ┃ name ┃ type ┃ nullable ┃ nulls ┃ non_nulls ┃ null_frac ┃ … ┃ + ┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━╇━━━┩ + │ string │ string │ boolean │ int64 │ int64 │ float64 │ … │ + ├───────────────────┼─────────┼──────────┼───────┼───────────┼───────────┼───┤ + │ species │ string │ True │ 0 │ 344 │ 0.000000 │ … │ + │ island │ string │ True │ 0 │ 344 │ 0.000000 │ … │ + │ bill_length_mm │ float64 │ True │ 2 │ 342 │ 0.005814 │ … │ + │ bill_depth_mm │ float64 │ True │ 2 │ 342 │ 0.005814 │ … │ + │ flipper_length_mm │ int64 │ True │ 2 │ 342 │ 0.005814 │ … │ + │ body_mass_g │ int64 │ True │ 2 │ 342 │ 0.005814 │ … │ + │ sex │ string │ True │ 11 │ 333 │ 0.031977 │ … │ + │ year │ int64 │ True │ 0 │ 344 │ 0.000000 │ … │ + └───────────────────┴─────────┴──────────┴───────┴───────────┴───────────┴───┘ """ - import rich - import rich.table - from rich.pretty import Pretty - - if buf is None: - buf = sys.stdout - - metrics = [self[col].count().name(col) for col in self.columns] - metrics.append(self.count().name("nrows")) - - schema = self.schema() - - *items, (_, n) = self.aggregate(metrics).execute().squeeze().items() - - op = self.op() - title = getattr(op, "name", type(op).__name__) - - table = rich.table.Table(title=f"Summary of {title}\\n{n:d} rows") - - table.add_column("Name", justify="left") - table.add_column("Type", justify="left") - table.add_column("# Nulls", justify="right") - table.add_column("% Nulls", justify="right") - - for column, non_nulls in items: - table.add_row( - column, - Pretty(schema[column]), - str(n - non_nulls), - f"{100 * (1.0 - non_nulls / n):>3.2f}", + from ibis import literal as lit + + aggs = [] + + for pos, colname in enumerate(self.columns): + col = self[colname] + typ = col.type() + agg = self.select( + isna=ibis.case().when(col.isnull(), 1).else_(0).end() + ).agg( + name=lit(colname), + type=lit(str(typ)), + nullable=lit(int(typ.nullable)).cast("bool"), + nulls=lambda t: t.isna.sum(), + non_nulls=lambda t: (1 - t.isna).sum(), + null_frac=lambda t: t.isna.mean(), + pos=lit(pos), ) - - console = rich.get_console() - with console.capture() as capture: - console.print(table) - buf.write(capture.get()) + aggs.append(agg) + return ibis.union(*aggs).order_by(ibis.asc("pos")) def set_column(self, name: str, expr: ir.Value) -> Table: """Replace an existing column with a new expression. diff --git a/pyproject.toml b/pyproject.toml index 141bef5..ac4eaf6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -300,6 +300,10 @@ filterwarnings = [ "ignore:'cgi' is deprecated and slated for removal in Python 3\\\\.13:DeprecationWarning", # warnings from google's use of pkg_resources "ignore:pkg_resources is deprecated as an API:DeprecationWarning", + # sqlalchemy warns about mysql's inability to cast to bool; + # this has no effect on ibis's output because we convert types after + # execution + "ignore:Datatype BOOL does not support CAST on MySQL/MariaDB; the cast will be skipped:sqlalchemy.exc.SAWarning" ] empty_parameter_set_mark = "fail_at_collect" markers = [
|
|
test(trino): setup `memory` catalog and enable should-pass tests
|
602999d42721690c8d3b066dac92c1a017110fc8
|
test
|
https://github.com/ibis-project/ibis/commit/602999d42721690c8d3b066dac92c1a017110fc8
|
setup `memory` catalog and enable should-pass tests
|
diff --git a/docker-compose.yml b/docker-compose.yml index 36f584d..29c9921 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -166,6 +166,7 @@ services: - trino volumes: - $PWD/docker/trino/catalog/postgresql.properties:/etc/trino/catalog/postgresql.properties:ro + - $PWD/docker/trino/catalog/memory.properties:/etc/trino/catalog/memory.properties:ro - $PWD/docker/trino/jvm.config:/etc/trino/jvm.config:ro networks: diff --git a/memory.properties b/memory.properties index fab3078..baee8fe 100644 --- a/memory.properties +++ b/memory.properties @@ -0,0 +1 @@ +connector.name=memory diff --git a/test_client.py b/test_client.py index 5ba5367..10c9581 100644 --- a/test_client.py +++ b/test_client.py @@ -144,13 +144,12 @@ def test_rename_table(con, temp_table, new_schema): @mark.notimpl(["bigquery", "clickhouse", "datafusion", "polars"]) @mark.never(["impala", "pyspark"], reason="No non-nullable datatypes") [email protected]( + ["trino"], reason="trino doesn't support NOT NULL in its in-memory catalog" +) def test_nullable_input_output(con, temp_table): sch = ibis.schema( - [ - ('foo', 'int64'), - ('bar', ibis.expr.datatypes.int64(nullable=False)), - ('baz', 'boolean'), - ] + [('foo', 'int64'), ('bar', dt.int64(nullable=False)), ('baz', 'boolean')] ) con.create_table(temp_table, schema=sch) diff --git a/test_param.py b/test_param.py index cd85448..3abcdb3 100644 --- a/test_param.py +++ b/test_param.py @@ -70,15 +70,7 @@ def test_scalar_param_array(con): @pytest.mark.notimpl( - [ - "clickhouse", - "datafusion", - "impala", - "postgres", - "pyspark", - "snowflake", - "trino", - ] + ["clickhouse", "datafusion", "impala", "postgres", "pyspark", "snowflake"] ) @pytest.mark.never( ["mysql", "sqlite", "mssql"], diff --git a/test_sql.py b/test_sql.py index e7ae9d7..9db948c 100644 --- a/test_sql.py +++ b/test_sql.py @@ -59,7 +59,7 @@ no_structs = mark.never( reason="structs not supported in the backend", ) no_struct_literals = mark.notimpl( - ["bigquery", "postgres", "snowflake", "mssql", "trino"], + ["bigquery", "postgres", "snowflake", "mssql"], reason="struct literals are not yet implemented", ) not_sql = mark.never( diff --git a/test_struct.py b/test_struct.py index 69d5b96..28ab317 100644 --- a/test_struct.py +++ b/test_struct.py @@ -10,7 +10,7 @@ import ibis.expr.datatypes as dt pytestmark = [ pytest.mark.never(["mysql", "sqlite", "mssql"], reason="No struct support"), pytest.mark.notyet(["impala"]), - pytest.mark.notimpl(["datafusion", "pyspark", "trino"]), + pytest.mark.notimpl(["datafusion", "pyspark"]), ] @@ -18,10 +18,15 @@ pytestmark = [ @pytest.mark.parametrize("field", ["a", "b", "c"]) def test_single_field(backend, struct, struct_df, field): expr = struct.abc[field] - result = expr.execute() - expected = struct_df.abc.map( - lambda value: value[field] if isinstance(value, dict) else value - ).rename(field) + result = expr.execute().sort_values().reset_index(drop=True) + expected = ( + struct_df.abc.map( + lambda value: value[field] if isinstance(value, dict) else value + ) + .rename(field) + .sort_values() + .reset_index(drop=True) + ) backend.assert_series_equal(result, expected) @@ -29,7 +34,9 @@ def test_single_field(backend, struct, struct_df, field): def test_all_fields(struct, struct_df): result = struct.abc.execute() expected = struct_df.abc - tm.assert_series_equal(result, expected) + assert set(row if pd.isna(row) else tuple(row.items()) for row in result) == set( + row if pd.isna(row) else tuple(row.items()) for row in expected + ) _SIMPLE_DICT = dict(a=1, b="2", c=3.0) diff --git a/conftest.py b/conftest.py index 2befe47..ca2f491 100644 --- a/conftest.py +++ b/conftest.py @@ -2,11 +2,13 @@ import os from pathlib import Path from typing import Any, Generator +import pandas as pd import pytest import ibis from ibis.backends.conftest import _random_identifier from ibis.backends.tests.base import BackendTest, RoundHalfToEven +from ibis.backends.tests.data import struct_types TRINO_USER = os.environ.get( 'IBIS_TEST_TRINO_USER', os.environ.get('TRINO_USER', 'user') @@ -20,7 +22,7 @@ TRINO_HOST = os.environ.get( TRINO_PORT = os.environ.get('IBIS_TEST_TRINO_PORT', os.environ.get('TRINO_PORT', 8080)) IBIS_TEST_TRINO_DB = os.environ.get( 'IBIS_TEST_TRINO_DATABASE', - os.environ.get('TRINO_DATABASE', 'postgresql'), + os.environ.get('TRINO_DATABASE', 'memory'), ) @@ -29,7 +31,7 @@ class TestConf(BackendTest, RoundHalfToEven): # for numeric and decimal returned_timestamp_unit = 's' - supports_structs = False + supports_structs = True @staticmethod def _load_data(data_dir: Path, script_dir: Path, **_: Any) -> None: @@ -42,9 +44,49 @@ class TestConf(BackendTest, RoundHalfToEven): script_dir Location of scripts defining schemas """ - from ibis.backends.postgres.tests.conftest import TestConf + from ibis.backends.postgres.tests.conftest import ( + IBIS_TEST_POSTGRES_DB, + PG_HOST, + PG_PASS, + PG_USER, + ) + from ibis.backends.postgres.tests.conftest import TestConf as PostgresTestConf + + PostgresTestConf._load_data(data_dir, script_dir, port=5433) + pgcon = ibis.postgres.connect( + host=PG_HOST, + port=5433, + user=PG_USER, + password=PG_PASS, + database=IBIS_TEST_POSTGRES_DB, + ) - TestConf._load_data(data_dir, script_dir, port=5433) + con = TestConf.connect(data_dir) + + # mirror the existing tables + unsupported_memory_tables = {"intervals", "not_supported_intervals"} + for table in pgcon.list_tables(): + if table not in unsupported_memory_tables: + source = f"postgresql.public.{table}" + dest = f"memory.default.{table}" + with con.begin() as c: + c.execute(f"DROP TABLE IF EXISTS {dest}") + c.execute(f"CREATE TABLE {dest} AS SELECT * FROM {source}") + + selects = [] + for row in struct_types.abc: + if pd.isna(row): + datarow = "NULL" + else: + datarow = ", ".join( + "NULL" if pd.isna(val) else repr(val) for val in row.values() + ) + datarow = f"CAST(ROW({datarow}) AS ROW(a DOUBLE, b VARCHAR, c BIGINT))" + selects.append(f"SELECT {datarow} AS abc") + + with con.begin() as c: + c.execute("DROP TABLE IF EXISTS struct") + c.execute(f"CREATE TABLE struct AS {' UNION ALL '.join(selects)}") @staticmethod def connect(data_directory: Path): @@ -54,7 +96,7 @@ class TestConf(BackendTest, RoundHalfToEven): user=TRINO_USER, password=TRINO_PASS, database=IBIS_TEST_TRINO_DB, - schema="public", + schema="default", ) @property
|
|
test(datafusion): execute udf
|
9c58b299650e58c8d9e9c628d4583861dd89e282
|
test
|
https://github.com/ibis-project/ibis/commit/9c58b299650e58c8d9e9c628d4583861dd89e282
|
execute udf
|
diff --git a/test_udf.py b/test_udf.py index fa75e08..02818e1 100644 --- a/test_udf.py +++ b/test_udf.py @@ -74,4 +74,4 @@ def test_builtin_agg_udf_filtered(con): def median(a: float, where: bool = True) -> float: """Median of a column.""" - median(con.tables.batting.G) + median(con.tables.batting.G).execute()
|
|
docs: use current version from core package
As lerna does not update the root package.json version.
|
9dd345dec8bb08c8217060f10f76a056b9d705ee
|
docs
|
https://github.com/mikro-orm/mikro-orm/commit/9dd345dec8bb08c8217060f10f76a056b9d705ee
|
use current version from core package
As lerna does not update the root package.json version.
|
diff --git a/docusaurus.config.js b/docusaurus.config.js index c4b3bf7..53761f0 100644 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -5,7 +5,7 @@ * LICENSE file in the root directory of this source tree. */ -const pkg = require('../package.json'); +const pkg = require('../packages/core/package.json'); const versions = require('./versions.json'); module.exports = {
|
|
fix(core): try to fix merging of large collections loaded via joined strategy
Closes #4694
|
faae84e19b40f0a5fcbf057cce5370602b34ec80
|
fix
|
https://github.com/mikro-orm/mikro-orm/commit/faae84e19b40f0a5fcbf057cce5370602b34ec80
|
try to fix merging of large collections loaded via joined strategy
Closes #4694
|
diff --git a/EntityLoader.ts b/EntityLoader.ts index 219237b..355c3a3 100644 --- a/EntityLoader.ts +++ b/EntityLoader.ts @@ -234,14 +234,20 @@ export class EntityLoader { if (mapToPk) { children.forEach(child => { const pk = child.__helper.__data[prop.mappedBy] ?? child[prop.mappedBy]; - const key = helper(this.em.getReference(prop.type, pk)).getSerializedPrimaryKey(); - map[key].push(child as T); + + if (pk) { + const key = helper(this.em.getReference(prop.type, pk)).getSerializedPrimaryKey(); + map[key].push(child as T); + } }); } else { children.forEach(child => { const entity = child.__helper.__data[prop.mappedBy] ?? child[prop.mappedBy]; - const key = helper(entity).getSerializedPrimaryKey(); - map[key].push(child as T); + + if (entity) { + const key = helper(entity).getSerializedPrimaryKey(); + map[key].push(child as T); + } }); } diff --git a/AbstractSqlDriver.ts b/AbstractSqlDriver.ts index 4486e0e..805526c 100644 --- a/AbstractSqlDriver.ts +++ b/AbstractSqlDriver.ts @@ -780,7 +780,7 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection if (map[pk]) { joinedProps.forEach(hint => { if (Array.isArray(map[pk][hint.field]) && Array.isArray(item[hint.field])) { - map[pk][hint.field].push(...item[hint.field]); + item[hint.field].forEach((el: T) => map[pk][hint.field].push(el)); } }); } else { diff --git a/adding-composite-fk.postgres.test.ts b/adding-composite-fk.postgres.test.ts index 3544dde..38f23ae 100644 --- a/adding-composite-fk.postgres.test.ts +++ b/adding-composite-fk.postgres.test.ts @@ -114,7 +114,7 @@ describe('adding m:1 with composite PK (FK as PK + scalar PK) (GH 1687)', () => beforeAll(async () => { orm = await MikroORM.init({ entities: [City, User, Country, State], - dbName: `mikro_orm_test_gh_1687`, + dbName: `mikro_orm_test_composite_fks`, driver: PostgreSqlDriver, }); await orm.schema.ensureDatabase();
|
|
feat(els): improve doc-comment display
|
1ef11fc2b9cc30918670cea1fd155e2fae9722f6
|
feat
|
https://github.com/erg-lang/erg/commit/1ef11fc2b9cc30918670cea1fd155e2fae9722f6
|
improve doc-comment display
|
diff --git a/hover.rs b/hover.rs index 4896d0b..62851d8 100644 --- a/hover.rs +++ b/hover.rs @@ -246,13 +246,13 @@ impl<Checker: BuildRunnable> Server<Checker> { LanguageCode::Erg => { MarkedString::from_language_code("erg".into(), code_block) } - LanguageCode::Python => { + LanguageCode::Python | LanguageCode::ErgOrPython => { MarkedString::from_language_code("python".into(), code_block) } _ => MarkedString::from_markdown(code_block), }; contents.push(marked); - if lang.is_erg() { + if lang.is_pl() { next!(def_pos, default_code_block, contents, prev_token, token); } else { break; diff --git a/lang.rs b/lang.rs index 3b1384a..e1fa7ee 100644 --- a/lang.rs +++ b/lang.rs @@ -8,6 +8,7 @@ pub enum LanguageCode { TraditionalChinese, Erg, Python, + ErgOrPython, } impl FromStr for LanguageCode { @@ -20,6 +21,7 @@ impl FromStr for LanguageCode { "traditional_chinese" | "zh-TW" => Ok(Self::TraditionalChinese), "erg" => Ok(Self::Erg), "python" => Ok(Self::Python), + "erg,python" | "python,erg" => Ok(Self::ErgOrPython), _ => Err(()), } } @@ -34,6 +36,7 @@ impl From<LanguageCode> for &str { LanguageCode::TraditionalChinese => "traditional_chinese", LanguageCode::Erg => "erg", LanguageCode::Python => "python", + LanguageCode::ErgOrPython => "erg,python", } } } @@ -57,6 +60,9 @@ impl LanguageCode { pub const fn python_patterns() -> [&'static str; 2] { ["python", "python"] } + pub const fn erg_or_python_patterns() -> [&'static str; 2] { + ["erg,python", "python,erg"] + } pub const fn patterns(&self) -> [&'static str; 2] { match self { Self::English => Self::en_patterns(), @@ -65,6 +71,7 @@ impl LanguageCode { Self::TraditionalChinese => Self::zh_tw_patterns(), Self::Erg => Self::erg_patterns(), Self::Python => Self::python_patterns(), + Self::ErgOrPython => Self::erg_or_python_patterns(), } } @@ -81,10 +88,13 @@ impl LanguageCode { matches!(self, Self::TraditionalChinese) } pub const fn is_erg(&self) -> bool { - matches!(self, Self::Erg) + matches!(self, Self::Erg | Self::ErgOrPython) } pub const fn is_python(&self) -> bool { - matches!(self, Self::Python) + matches!(self, Self::Python | Self::ErgOrPython) + } + pub const fn is_pl(&self) -> bool { + matches!(self, Self::Erg | Self::Python | Self::ErgOrPython) } pub const fn matches_feature(&self) -> bool { @@ -97,8 +107,9 @@ impl LanguageCode { Self::Japanese => cfg!(feature = "japanese"), Self::SimplifiedChinese => cfg!(feature = "simplified_chinese"), Self::TraditionalChinese => cfg!(feature = "traditional_chinese"), - Self::Erg => true, + Self::Erg => !cfg!(feature = "py_compat"), Self::Python => cfg!(feature = "py_compat"), + Self::ErgOrPython => true, } } pub fn as_str(&self) -> &str { diff --git a/classes.rs b/classes.rs index 80dcd02..871566f 100644 --- a/classes.rs +++ b/classes.rs @@ -740,7 +740,7 @@ impl Context { Immutable, Visibility::BUILTIN_PUBLIC, ); - str_.register_builtin_erg_impl( + str_.register_py_builtin( FUNC_ENCODE, fn_met( Str, @@ -749,8 +749,8 @@ impl Context { vec![kw(KW_ENCODING, Str), kw(KW_ERRORS, Str)], mono(BYTES), ), - Immutable, - Visibility::BUILTIN_PUBLIC, + Some(FUNC_ENCODE), + 60, ); str_.register_builtin_erg_impl( FUNC_FORMAT, @@ -783,12 +783,11 @@ impl Context { Visibility::BUILTIN_PUBLIC, Some(FUNC_STARTSWITH), ); - str_.register_builtin_py_impl( + str_.register_py_builtin( FUNC_ENDSWITH, fn1_met(Str, Str, Bool), - Immutable, - Visibility::BUILTIN_PUBLIC, Some(FUNC_ENDSWITH), + 69, ); str_.register_builtin_py_impl( FUNC_SPLIT, @@ -818,12 +817,12 @@ impl Context { ); str_.register_builtin_py_impl( FUNC_JOIN, - fn1_met(unknown_len_array_t(Str), Str, Str), + fn1_met(Str, poly(ITERABLE, vec![ty_tp(Str)]), Str), Immutable, Visibility::BUILTIN_PUBLIC, Some(FUNC_JOIN), ); - str_.register_builtin_py_impl( + str_.register_py_builtin( FUNC_INDEX, fn_met( Str, @@ -832,9 +831,8 @@ impl Context { vec![kw(KW_START, Nat), kw(KW_END, Nat)], or(Nat, Never), ), - Immutable, - Visibility::BUILTIN_PUBLIC, Some(FUNC_INDEX), + 126, ); str_.register_builtin_py_impl( FUNC_RINDEX, @@ -849,7 +847,7 @@ impl Context { Visibility::BUILTIN_PUBLIC, Some(FUNC_RINDEX), ); - str_.register_builtin_py_impl( + str_.register_py_builtin( FUNC_FIND, fn_met( Str, @@ -858,9 +856,8 @@ impl Context { vec![kw(KW_START, Nat), kw(KW_END, Nat)], or(Nat, v_enum(set! {(-1).into()})), ), - Immutable, - Visibility::BUILTIN_PUBLIC, Some(FUNC_FIND), + 93, ); str_.register_builtin_py_impl( FUNC_RFIND, @@ -875,7 +872,7 @@ impl Context { Visibility::BUILTIN_PUBLIC, Some(FUNC_RFIND), ); - str_.register_builtin_py_impl( + str_.register_py_builtin( FUNC_COUNT, fn_met( Str, @@ -884,9 +881,8 @@ impl Context { vec![kw(KW_START, Nat), kw(KW_END, Nat)], Nat, ), - Immutable, - Visibility::BUILTIN_PUBLIC, Some(FUNC_COUNT), + 43, ); str_.register_py_builtin( FUNC_CAPITALIZE, diff --git a/Str.d.er b/Str.d.er index be666b6..e91699b 100644 --- a/Str.d.er +++ b/Str.d.er @@ -6,7 +6,7 @@ More specifically, make the first character have upper case and the rest lower case. ''' - '''erg + '''erg,python assert "hello".capitalize() == "Hello" assert "HELLO".capitalize() == "Hello" ''' @@ -14,7 +14,7 @@ ''' Return a version of the string suitable for caseless comparisons. ''' - '''erg + '''erg,python assert "camelCase".casefold() == "camelcase" assert "CamelCase".casefold() == "camelcase" assert "FULLCAPS".casefold() == "fullcaps" @@ -26,8 +26,158 @@ Padding is done using the specified fill character (default is a space). ''' - '''erg + '''erg,python assert "hello".center(10) == " hello " assert "hello".center(10, "-") == "--hello---" ''' - center: (self: .Str, width: Int, fillchar := .Str) -> .Str + center: (self: .Str, width: Nat, fillchar := .Str) -> .Str + ''' + Return the number of non-overlapping occurrences of substring `sub` in + string `S[start:end]`. Optional arguments `start` and `end` are + interpreted as in slice notation. + ''' + '''erg,python + assert "hello".count("l") == 2 + assert "hello".count("l", 0, 3) == 1 + ''' + count: (self: .Str, sub: .Str, start := Nat, end := Nat) -> Nat + ''' + Encode the string using the codec registered for encoding. + + * `encoding`: + The encoding in which to encode the string. + * `errors`: + The error handling scheme to use for encoding errors. + The default is `"strict"` meaning that encoding errors raise a + `UnicodeEncodeError`. Other possible values are `"ignore"`, `"replace"` and + `"xmlcharrefreplace"` as well as any other name registered with + `codecs.register_error` that can handle `UnicodeEncodeErrors`. + ''' + '''erg + assert "hello".encode() == bytes "hello", "utf-8" + assert "hello".encode("utf-8") == bytes "hello", "utf-8" + ''' + encode: (self: .Str, encoding := .Str, errors := .Str) -> Bytes + ''' + Return True if the string ends with the specified suffix, False otherwise. + ''' + '''erg,python + assert "hello".endswith("o") + assert "hello".endswith("llo") + assert not "hello".endswith("llo", 0, 2) + ''' + endswith: (self: .Str, suffix: .Str, start := Nat, end := Nat) -> Bool + ''' + Return a copy where all tab characters are expanded using spaces. + + If `tabsize` is not given, a tab size of 8 characters is assumed. + ''' + '''erg,python + assert "hello\\tworld".expandtabs() == "hello world" + assert "hello\\tworld".expandtabs(4) == "hello world" + ''' + expandtabs: (self: .Str, tabsize := Nat) -> .Str + ''' + Return the lowest index in `S` where substring `sub` is found, + such that sub is contained within `S[start:end]`. + + Optional arguments `start` and `end` are interpreted as in slice notation. + + Return -1 on failure. + ''' + '''erg,python + assert "hello".find("l") == 2 + assert "hello".find("l", 3) == 3 + assert "hello".find("l", 3, 4) == -1 + ''' + find: (self: .Str, sub: .Str, start := Nat, end := Nat) -> Nat or {-1} + ''' + Return a formatted version of `S`, using substitutions from `args` and `kwargs`. + The substitutions are identified by braces ('{' and '}'). + ''' + '''erg + assert "hello".format() == "hello" + assert "hello {}".format("world") == "hello world" + assert "hello {0}".format("world") == "hello world" + assert "hello {0} {1}".format("world", "again") == "hello world again" + assert "hello {1} {0}".format("world", "again") == "hello again world" + ''' + format: (self: .Str, *args: Obj) -> .Str + ''' + Return a formatted version of `S`, using substitutions from `mapping`. + The substitutions are identified by braces ('{' and '}'). + ''' + '''erg,python + assert "hello {name}".format_map({"name": "world"}) == "hello world" + ''' + format_map: (self: .Str, mapping: .Dict) -> .Str + ''' + Return the lowest index in `S` where substring `sub` is found, + such that `sub` is contained within `S[start:end]`. + + Optional arguments `start` and `end` are interpreted as in slice notation. + + Raises `ValueError` when the substring is not found. + ''' + '''erg,python + assert "hello".index("l") == 2 + assert "hello".index("l", 3) == 3 + ''' + index: (self: .Str, sub: .Str, start := Nat, end := Nat) -> Nat + isalnum: (self: .Str) -> Bool + isalpha: (self: .Str) -> Bool + isascii: (self: .Str) -> Bool + isdecimal: (self: .Str) -> Bool + isdigit: (self: .Str) -> Bool + isidentifier: (self: .Str) -> Bool + islower: (self: .Str) -> Bool + isnumeric: (self: .Str) -> Bool + isprintable: (self: .Str) -> Bool + isspace: (self: .Str) -> Bool + istitle: (self: .Str) -> Bool + isupper: (self: .Str) -> Bool + join: (self: .Str, iterable: Iterable Str) -> .Str + ljust: (self: .Str, width: Nat, fillchar := .Str) -> .Str + lower: (self: .Str) -> .Str + lstrip: (self: .Str, chars := .Str) -> .Str + ''' + Return a translation table usable for `str.translate()`. + + If there is only one argument, it must be a dictionary mapping Unicode + ordinals (integers) or characters to Unicode ordinals, strings or None. + Character keys will be then converted to ordinals. + If there are two arguments, they must be strings of equal length, and + in the resulting dictionary, each character in `x` will be mapped to the + character at the same position in `y`. If there is a third argument, it + must be a string, whose characters will be mapped to None in the result. + ''' + maketrans: ((self: .Str, x: {Str: Str}) -> {Nat: Str}) and ((self: .Str, x: .Str, y: .Str) -> {Nat: Nat}) and (self: .Str, x: .Str, y: .Str, z := .Str) -> {Nat: Nat, Nat: NoneType} + ''' + Partition the string into three parts using the given separator. + + This will search for the separator in the string. If the separator is found, + returns a 3-tuple containing the part before the separator, the separator + itself, and the part after it. + + If the separator is not found, returns a 3-tuple containing the original string + and two empty strings. + ''' + partition: (self: .Str, sep: .Str) -> (.Str, .Str, .Str) + removeprefix: (self: .Str, prefix: .Str) -> .Str + removesuffix: (self: .Str, suffix: .Str) -> .Str + replace: (self: .Str, old: .Str, new: .Str, count := Nat) -> .Str + rfind: (self: .Str, sub: .Str, start := Nat, end := Nat) -> Nat or {-1} + rindex: (self: .Str, sub: .Str, start := Nat, end := Nat) -> Nat + rjust: (self: .Str, width: Nat, fillchar := .Str) -> .Str + rpartition: (self: .Str, sep: .Str) -> (.Str, .Str, .Str) + rsplit: (self: .Str, sep := .Str, maxsplit := Nat) -> [Str; _] + rstrip: (self: .Str, chars := .Str) -> .Str + split: (self: .Str, sep := .Str, maxsplit := Nat) -> [Str; _] + splitlines: (self: .Str, keepends := Bool) -> [Str; _] + startswith: (self: .Str, prefix: .Str, start := Nat, end := Nat) -> Bool + strip: (self: .Str, chars := .Str) -> .Str + swapcase: (self: .Str) -> .Str + title: (self: .Str) -> .Str + translate: (self: .Str, table: {Nat: Nat or NoneType}) -> .Str + upper: (self: .Str) -> .Str + zfill: (self: .Str, width: Nat) -> .Str
|
|
refactor: inline and simplify subtables
|
be558f63072c1c1ee07d132dfb691bdddb26faaf
|
refactor
|
https://github.com/ibis-project/ibis/commit/be558f63072c1c1ee07d132dfb691bdddb26faaf
|
inline and simplify subtables
|
diff --git a/select_builder.py b/select_builder.py index af7e4db..9d045a3 100644 --- a/select_builder.py +++ b/select_builder.py @@ -223,28 +223,6 @@ class SelectBuilder: f'Do not know how to execute: {type(expr)}' ) - @staticmethod - def _get_subtables(expr): - subtables = [] - - stack = [expr] - seen = set() - - while stack: - e = stack.pop() - op = e.op() - - if op not in seen: - seen.add(op) - - if isinstance(op, ops.Join): - stack.append(op.right) - stack.append(op.left) - else: - subtables.append(e) - - return subtables - @classmethod def _blocking_base(cls, expr): node = expr.op() @@ -585,7 +563,11 @@ class SelectBuilder: self.table_set = subbed self.select_set = [subbed] - subtables = self._get_subtables(expr) + subtables = [ + op.to_expr() + for op in util.to_op_dag(expr) + if isinstance(op, ops.TableNode) and not isinstance(op, ops.Join) + ] # If any of the joined tables are non-blocking modified versions of the # same table, then it's not safe to continue walking down the tree (see
|
|
fix: `match` type check
|
e6b56d44699edda5868d4c14fbe3b4f93ed4c349
|
fix
|
https://github.com/erg-lang/erg/commit/e6b56d44699edda5868d4c14fbe3b4f93ed4c349
|
`match` type check
|
diff --git a/inquire.rs b/inquire.rs index c87981b..589a9bf 100644 --- a/inquire.rs +++ b/inquire.rs @@ -372,33 +372,40 @@ impl Context { kind: SubrKind, pos_args: &[hir::PosArg], kw_args: &[hir::KwArg], - ) -> TyCheckResult<VarInfo> { + ) -> FailableOption<VarInfo> { + let mut errs = TyCheckErrors::empty(); if !kw_args.is_empty() { // TODO: this error desc is not good - return Err(TyCheckErrors::from(TyCheckError::default_param_error( - self.cfg.input.clone(), - line!() as usize, - kw_args[0].loc(), - self.caused_by(), - "match", - ))); + return Err(( + None, + TyCheckErrors::from(TyCheckError::default_param_error( + self.cfg.input.clone(), + line!() as usize, + kw_args[0].loc(), + self.caused_by(), + "match", + )), + )); } for pos_arg in pos_args.iter().skip(1) { let t = pos_arg.expr.ref_t(); // Allow only anonymous functions to be passed as match arguments (for aesthetic reasons) if !matches!(&pos_arg.expr, hir::Expr::Lambda(_)) { - return Err(TyCheckErrors::from(TyCheckError::type_mismatch_error( - self.cfg.input.clone(), - line!() as usize, - pos_arg.loc(), - self.caused_by(), - "match", + return Err(( None, - &mono("LambdaFunc"), - t, - self.get_candidates(t), - self.get_simple_type_mismatch_hint(&mono("LambdaFunc"), t), - ))); + TyCheckErrors::from(TyCheckError::type_mismatch_error( + self.cfg.input.clone(), + line!() as usize, + pos_arg.loc(), + self.caused_by(), + "match", + None, + &mono("LambdaFunc"), + t, + self.get_candidates(t), + self.get_simple_type_mismatch_hint(&mono("LambdaFunc"), t), + )), + )); } } let match_target_expr_t = pos_args[0].expr.ref_t(); @@ -408,36 +415,48 @@ impl Context { for (i, pos_arg) in pos_args.iter().skip(1).enumerate() { let lambda = erg_common::enum_unwrap!(&pos_arg.expr, hir::Expr::Lambda); // already checked if !lambda.params.defaults.is_empty() { - return Err(TyCheckErrors::from(TyCheckError::default_param_error( - self.cfg.input.clone(), - line!() as usize, - pos_args[i + 1].loc(), - self.caused_by(), - "match", - ))); + return Err(( + None, + TyCheckErrors::from(TyCheckError::default_param_error( + self.cfg.input.clone(), + line!() as usize, + pos_args[i + 1].loc(), + self.caused_by(), + "match", + )), + )); } if lambda.params.len() != 1 { - return Err(TyCheckErrors::from(TyCheckError::param_error( - self.cfg.input.clone(), - line!() as usize, - pos_args[i + 1].loc(), - self.caused_by(), - 1, - lambda.params.len(), - ))); + return Err(( + None, + TyCheckErrors::from(TyCheckError::param_error( + self.cfg.input.clone(), + line!() as usize, + pos_args[i + 1].loc(), + self.caused_by(), + 1, + lambda.params.len(), + )), + )); } let mut dummy_tv_cache = TyVarCache::new(self.level, self); - let rhs = self - .instantiate_param_sig_t( - &lambda.params.non_defaults[0].raw, - None, - &mut dummy_tv_cache, - Normal, - ParamKind::NonDefault, - false, - ) - // TODO: continue - .map_err(|(_, errs)| errs)?; + let rhs = match self.instantiate_param_sig_t( + &lambda.params.non_defaults[0].raw, + None, + &mut dummy_tv_cache, + Normal, + ParamKind::NonDefault, + false, + ) { + Ok(ty) => ty, + Err((ty, es)) => { + errs.extend(es); + ty + } + }; + if lambda.params.non_defaults[0].raw.t_spec.is_none() && rhs.is_free_var() { + rhs.link(&Obj, None); + } union_pat_t = self.union(&union_pat_t, &rhs); arm_ts.push(rhs); } @@ -447,7 +466,7 @@ impl Context { if cfg!(feature = "debug") { eprintln!("match error: {err}"); } - return Err(TyCheckErrors::from(TyCheckError::match_error( + errs.push(TyCheckError::match_error( self.cfg.input.clone(), line!() as usize, pos_args[0].loc(), @@ -455,7 +474,8 @@ impl Context { match_target_expr_t, &union_pat_t, arm_ts, - ))); + )); + return Err((None, errs)); } let branch_ts = pos_args .iter() @@ -466,17 +486,17 @@ impl Context { .get(0) .and_then(|branch| branch.typ().return_t().cloned()) else { - return Err(TyCheckErrors::from(TyCheckError::args_missing_error( + errs.push(TyCheckError::args_missing_error( self.cfg.input.clone(), line!() as usize, pos_args[0].loc(), "match", self.caused_by(), vec![Str::ever("obj")], - ))); + )); + return Err((None, errs)); }; for arg_t in branch_ts.iter().skip(1) { - // TODO: handle unwrap errors return_t = self.union(&return_t, arg_t.typ().return_t().unwrap_or(&Type::Never)); } let param_ty = ParamTy::Pos(match_target_expr_t.clone()); @@ -486,10 +506,15 @@ impl Context { } else { proc(param_ts, None, vec![], return_t) }; - Ok(VarInfo { + let vi = VarInfo { t, ..VarInfo::default() - }) + }; + if errs.is_empty() { + Ok(vi) + } else { + Err((Some(vi), errs)) + } } pub(crate) fn rec_get_var_info( @@ -2149,14 +2174,10 @@ impl Context { if local.vis().is_private() { match &local.inspect()[..] { "match" => { - return self - .get_match_call_t(SubrKind::Func, pos_args, kw_args) - .map_err(|errs| (None, errs)); + return self.get_match_call_t(SubrKind::Func, pos_args, kw_args); } "match!" => { - return self - .get_match_call_t(SubrKind::Proc, pos_args, kw_args) - .map_err(|errs| (None, errs)); + return self.get_match_call_t(SubrKind::Proc, pos_args, kw_args); } _ => {} } diff --git a/match.er b/match.er index 7a34c09..bef36e5 100644 --- a/match.er +++ b/match.er @@ -0,0 +1,9 @@ +f x: Obj = + match x: + (s: Str) -> s + "a" + { foo; bar } -> foo + bar + a -> a + +assert f("a") == "aa" +assert f({ foo = "a"; bar = "b" }) == "ab" +assert str(f(1)) == "1" diff --git a/test.rs b/test.rs index 2bf35d5..e0700fe 100644 --- a/test.rs +++ b/test.rs @@ -211,6 +211,11 @@ fn exec_map() -> Result<(), ()> { expect_success("tests/should_ok/map.er", 0) } +#[test] +fn exec_match() -> Result<(), ()> { + expect_success("tests/should_ok/match.er", 0) +} + #[test] fn exec_method() -> Result<(), ()> { expect_success("tests/should_ok/method.er", 0)
|
|
refactor: improve `EntityData` type with scalars
|
37c7db3026816587ccc87ca9ce8e7a9fff77d17e
|
refactor
|
https://github.com/mikro-orm/mikro-orm/commit/37c7db3026816587ccc87ca9ce8e7a9fff77d17e
|
improve `EntityData` type with scalars
|
diff --git a/typings.ts b/typings.ts index 9359b92..772ff4e 100644 --- a/typings.ts +++ b/typings.ts @@ -92,7 +92,7 @@ export type AnyEntity<T = any> = { [K in keyof T]?: T[K] } & { [PrimaryKeyType]? export type EntityClass<T extends AnyEntity<T>> = Function & { prototype: T }; export type EntityClassGroup<T extends AnyEntity<T>> = { entity: EntityClass<T>; schema: EntityMetadata<T> | EntitySchema<T> }; export type EntityName<T extends AnyEntity<T>> = string | EntityClass<T> | EntitySchema<T, any>; -export type EntityDataProp<T> = T extends Scalar ? T : (T | EntityData<T> | Primary<T>); +export type EntityDataProp<T> = T extends Scalar ? ExpandScalar<T> : (T | EntityData<T> | Primary<T>); export type CollectionItem<T> = T extends Collection<any> | undefined ? EntityDataProp<ExpandProperty<T>>[] : EntityDataProp<T>; export type EntityData<T> = T | { [K in keyof T | NonFunctionPropertyNames<T>]?: CollectionItem<T[K]> } & Dictionary; export type GetRepository<T extends AnyEntity<T>, U> = T[typeof EntityRepositoryType] extends EntityRepository<any> | undefined ? NonNullable<T[typeof EntityRepositoryType]> : U;
|
|
fix: rename default storage folder to just `storage` (#1403)
|
c253985e534b5106ffa5bd8d214afae2eb34de58
|
fix
|
https://github.com/Hardeepex/crawlee/commit/c253985e534b5106ffa5bd8d214afae2eb34de58
|
rename default storage folder to just `storage` (#1403)
|
diff --git a/.gitignore b/.gitignore index 8bee5b7..36af18d 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,5 @@ node_modules package-lock.json apify_storage +crawlee_storage +storage diff --git a/MIGRATIONS.md b/MIGRATIONS.md index 5008d9b..697e6b4 100644 --- a/MIGRATIONS.md +++ b/MIGRATIONS.md @@ -1,3 +1,6 @@ +# Migration from 2.x.x to 3.0.0 +Check the v3 [upgrading guide](https://crawlee.dev/docs/upgrading/upgrading-to-v3). + # Migration from 1.x.x to 2.0.0 There should be no changes needed apart from upgrading your Node.js version to >= 15.10. If you encounter issues with `cheerio`, [read their CHANGELOG](https://github.com/cheeriojs/cheerio/releases). We bumped it from `rc.3` to `rc.10`. diff --git a/README.md b/README.md index 208df31..5acbcb7 100644 --- a/README.md +++ b/README.md @@ -100,7 +100,7 @@ When you run the example, you should see Crawlee automating a Chrome browser.  -By default, Crawlee stores data to `./crawlee_storage` in the current working directory. You can override this directory via `CRAWLEE_STORAGE_DIR` env var. For details, see [Environment variables](https://crawlee.dev/docs/guides/environment-variables), [Request storage](https://crawlee.dev/docs/guides/request-storage) and [Result storage](https://crawlee.dev/docs/guides/result-storage). +By default, Crawlee stores data to `./storage` in the current working directory. You can override this directory via `CRAWLEE_STORAGE_DIR` env var. For details, see [Environment variables](https://crawlee.dev/docs/guides/environment-variables), [Request storage](https://crawlee.dev/docs/guides/request-storage) and [Result storage](https://crawlee.dev/docs/guides/result-storage). ### Local usage with Crawlee command-line interface (CLI) @@ -119,7 +119,7 @@ cd my-hello-world npx crawlee run ``` -By default, the crawling data will be stored in a local directory at `./crawlee_storage`. For example, the input JSON file for the actor is expected to be in the default key-value store in `./crawlee_storage/key_value_stores/default/INPUT.json`. +By default, the crawling data will be stored in a local directory at `./storage`. For example, the input JSON file for the actor is expected to be in the default key-value store in `./storage/key_value_stores/default/INPUT.json`. ### Usage on the Apify platform diff --git a/accept_user_input.mdx b/accept_user_input.mdx index 710d998..50552d4 100644 --- a/accept_user_input.mdx +++ b/accept_user_input.mdx @@ -15,7 +15,7 @@ This example accepts and logs user input: To provide the actor with input, create a `INPUT.json` file inside the "default" key-value store: ```bash -{PROJECT_FOLDER}/crawlee_storage/key_value_stores/default/INPUT.json +{PROJECT_FOLDER}/storage/key_value_stores/default/INPUT.json ``` Anything in this file will be available to the actor when it runs. diff --git a/add_data_to_dataset.mdx b/add_data_to_dataset.mdx index c55b9ef..f907bc7 100644 --- a/add_data_to_dataset.mdx +++ b/add_data_to_dataset.mdx @@ -17,5 +17,5 @@ You can save data to custom datasets by using <ApiLink to="core/class/Dataset#op Each item in this dataset will be saved to its own file in the following directory: ```bash -{PROJECT_FOLDER}/crawlee_storage/datasets/default/ +{PROJECT_FOLDER}/storage/datasets/default/ ``` diff --git a/basic_crawler.mdx b/basic_crawler.mdx index 09993ad..fd2127a 100644 --- a/basic_crawler.mdx +++ b/basic_crawler.mdx @@ -12,7 +12,7 @@ like <ApiLink to="cheerio-crawler/class/CheerioCrawler">`CheerioCrawler`</ApiLin The script simply downloads several web pages with plain HTTP requests using the <ApiLink to="basic-crawler/interface/BasicCrawlingContext#sendRequest">`sendRequest`</ApiLink> utility function (which uses the [`got-scraping`](https://github.com/apify/got-scraping) npm module internally) and stores their raw HTML and URL in the default dataset. In local configuration, the data will be stored as JSON files in -`./crawlee_storage/datasets/default`. +`./storage/datasets/default`. <CodeBlock className="language-js"> {BasicCrawlerSource} diff --git a/cheerio_crawler.ts b/cheerio_crawler.ts index 80cd1fc..443d41a 100644 --- a/cheerio_crawler.ts +++ b/cheerio_crawler.ts @@ -44,7 +44,7 @@ const crawler = new CheerioCrawler({ }); // Store the results to the dataset. In local configuration, - // the data will be stored as JSON files in ./crawlee_storage/datasets/default + // the data will be stored as JSON files in ./storage/datasets/default await dataset.pushData({ url: request.url, title, diff --git a/forms.mdx b/forms.mdx index c643aa5..4182f77 100644 --- a/forms.mdx +++ b/forms.mdx @@ -11,7 +11,7 @@ This example demonstrates how to use <ApiLink to="puppeteer-crawler/class/Puppet automatically fill and submit a search form to look up repositories on [GitHub](https://github.com) using headless Chrome / Puppeteer. The crawler first fills in the search term, repository owner, start date and language of the repository, then submits the form and prints out the results. Finally, the results are saved either on the Apify platform to the -default <ApiLink to="core/class/Dataset">`dataset`</ApiLink> or on the local machine as JSON files in `./crawlee_storage/datasets/default`. +default <ApiLink to="core/class/Dataset">`dataset`</ApiLink> or on the local machine as JSON files in `./storage/datasets/default`. :::tip diff --git a/map_and_reduce.mdx b/map_and_reduce.mdx index 8cea39d..bbadebe 100644 --- a/map_and_reduce.mdx +++ b/map_and_reduce.mdx @@ -18,7 +18,7 @@ the dataset in any way. Examples for both methods are demonstrated on a simple dataset containing the results scraped from a page: the `URL` and a hypothetical number of `h1` - `h3` header elements under the `headingCount` key. -This data structure is stored in the default dataset under `{PROJECT_FOLDER}/crawlee_storage/datasets/default/`. If you want to simulate the +This data structure is stored in the default dataset under `{PROJECT_FOLDER}/storage/datasets/default/`. If you want to simulate the functionality, you can use the <ApiLink to="core/class/Dataset#pushData">`dataset.pushData()`</ApiLink> method to save the example `JSON array` to your dataset. diff --git a/playwright_crawler.mdx b/playwright_crawler.mdx index f01d968..4537e7b 100644 --- a/playwright_crawler.mdx +++ b/playwright_crawler.mdx @@ -9,7 +9,7 @@ import CrawlSource from '!!raw-loader!./playwright_crawler.ts'; This example demonstrates how to use <ApiLink to="playwright-crawler/class/PlaywrightCrawler">`PlaywrightCrawler`</ApiLink> in combination with <ApiLink to="core/class/RequestQueue">`RequestQueue`</ApiLink> to recursively scrape the [Hacker News website](https://news.ycombinator.com) using headless Chrome / Playwright. -The crawler starts with a single URL, finds links to next pages, enqueues them and continues until no more desired links are available. The results are stored to the default dataset. In local configuration, the results are stored as JSON files in `./crawlee_storage/datasets/default`. +The crawler starts with a single URL, finds links to next pages, enqueues them and continues until no more desired links are available. The results are stored to the default dataset. In local configuration, the results are stored as JSON files in `./storage/datasets/default`. :::tip diff --git a/puppeteer_crawler.mdx b/puppeteer_crawler.mdx index 335e389..bf384f3 100644 --- a/puppeteer_crawler.mdx +++ b/puppeteer_crawler.mdx @@ -12,7 +12,7 @@ with <ApiLink to="core/class/RequestQueue">`RequestQueue`</ApiLink> to recursively scrape the [Hacker News website](https://news.ycombinator.com) using headless Chrome / Puppeteer. The crawler starts with a single URL, finds links to next pages, enqueues them and continues until no more desired links are available. The results -are stored to the default dataset. In local configuration, the results are stored as JSON files in `./crawlee_storage/datasets/default` +are stored to the default dataset. In local configuration, the results are stored as JSON files in `./storage/datasets/default` :::tip diff --git a/environment_variables.mdx b/environment_variables.mdx index 8ff86c2..7bb6727 100644 --- a/environment_variables.mdx +++ b/environment_variables.mdx @@ -17,7 +17,7 @@ can be changed significantly by setting or unsetting them. ### `CRAWLEE_STORAGE_DIR` -Defines the path to a local directory where <ApiLink to="core/class/KeyValueStore">`KeyValueStore`</ApiLink>, <ApiLink to="core/class/Dataset">`Dataset`</ApiLink>, and <ApiLink to="core/class/RequestQueue">`RequestQueue`</ApiLink> store their data. By default, it is set to `./crawlee_storage`. +Defines the path to a local directory where <ApiLink to="core/class/KeyValueStore">`KeyValueStore`</ApiLink>, <ApiLink to="core/class/Dataset">`Dataset`</ApiLink>, and <ApiLink to="core/class/RequestQueue">`RequestQueue`</ApiLink> store their data. By default, it is set to `./storage`. ### `CRAWLEE_DEFAULT_DATASET_ID` diff --git a/request_storage.mdx b/request_storage.mdx index 972dafd..45f3e48 100644 --- a/request_storage.mdx +++ b/request_storage.mdx @@ -17,7 +17,7 @@ import CrawlerSource from '!!raw-loader!./request_storage_queue_crawler.ts'; import RequestQueueListSource from '!!raw-loader!./request_storage_queue_list.ts'; import RequestQueueAddRequestsSource from '!!raw-loader!./request_storage_queue_only.ts'; -Crawlee has several request storage types that are useful for specific tasks. The requests are stored on local disk to a directory defined by the `CRAWLEE_STORAGE_DIR` environment variable. If this variable is not defined, by default Crawlee sets `CRAWLEE_STORAGE_DIR` to `./crawlee_storage` in the current working directory. +Crawlee has several request storage types that are useful for specific tasks. The requests are stored on local disk to a directory defined by the `CRAWLEE_STORAGE_DIR` environment variable. If this variable is not defined, by default Crawlee sets `CRAWLEE_STORAGE_DIR` to `./storage` in the current working directory. ## Request queue diff --git a/result_storage.mdx b/result_storage.mdx index 6d53b1e..688d3cf 100644 --- a/result_storage.mdx +++ b/result_storage.mdx @@ -6,7 +6,7 @@ description: Where are you going to store all of that juicy scraped data?! import ApiLink from '@site/src/components/ApiLink'; -Crawlee has several result storage types that are useful for specific tasks. The data is stored on a local disk to the directory defined by the `CRAWLEE_STORAGE_DIR` environment variable. If this variable is not defined, by default Crawlee sets `CRAWLEE_STORAGE_DIR` to `./crawlee_storage` in the current working directory. +Crawlee has several result storage types that are useful for specific tasks. The data is stored on a local disk to the directory defined by the `CRAWLEE_STORAGE_DIR` environment variable. If this variable is not defined, by default Crawlee sets `CRAWLEE_STORAGE_DIR` to `./storage` in the current working directory. Crawlee storage is managed by <ApiLink to="memory-storage/class/MemoryStorage">`MemoryStorage`</ApiLink> class. During the crawler run all information is stored in memory, while also being off-loaded to the local files in respective storage type folders. diff --git a/05-realworld-example.mdx b/05-realworld-example.mdx index 1d45b9d..c27b9c2 100644 --- a/05-realworld-example.mdx +++ b/05-realworld-example.mdx @@ -535,10 +535,10 @@ await crawler.run(); #### Finding your saved data -It might not be perfectly obvious where the data you saved using the previous command went. Unless you changed the environment variables that Crawlee uses locally, which would suggest that you knew what you were doing, and you didn't need this tutorial anyway, you'll find your data in the `crawlee_storage` directory: +It might not be perfectly obvious where the data you saved using the previous command went. Unless you changed the environment variables that Crawlee uses locally, which would suggest that you knew what you were doing, and you didn't need this tutorial anyway, you'll find your data in the `storage` directory: ``` -{PROJECT_FOLDER}/crawlee_storage/datasets/default/ +{PROJECT_FOLDER}/storage/datasets/default/ ``` The above folder will hold all your saved data in numbered files, as they were pushed into the dataset. Each file represents one invocation of `Dataset.pushData()` or one table row. @@ -562,7 +562,7 @@ const input = await KeyValueStore.getInput(); You need to place an `INPUT.json` file in your default key-value store for this to work. ``` -{PROJECT_FOLDER}/crawlee_storage/key_value_stores/default/INPUT.json +{PROJECT_FOLDER}/storage/key_value_stores/default/INPUT.json ``` #### Use `INPUT` to seed your crawler with users diff --git a/index.mdx b/index.mdx index 044ddd4..a8b3f83 100644 --- a/index.mdx +++ b/index.mdx @@ -84,7 +84,7 @@ Besides the logs, you should also see Crawlee automating the browser: </TabItem> </Tabs> -By default, Crawlee stores data to `./crawlee_storage` in the current working directory. You can override this behavior by setting the `CRAWLEE_STORAGE_DIR` environment variable. +By default, Crawlee stores data to `./storage` in the current working directory. You can override this behavior by setting the `storage_DIR` environment variable. More examples showcasing various features of Crawlee could be found in [Examples](./examples) section of the documentation. @@ -116,4 +116,4 @@ cd my-cheerio-crawler npm start ``` -By default, the crawling data will be stored in a local directory at `./crawlee_storage`. For example, the input JSON file for the actor is expected to be in the default key-value store in `./crawlee_storage/key_value_stores/default/INPUT.json`. +By default, the crawling data will be stored in a local directory at `./storage`. For example, the input JSON file for the actor is expected to be in the default key-value store in `./storage/key_value_stores/default/INPUT.json`. diff --git a/lerna.json b/lerna.json index e365d9b..6122645 100644 --- a/lerna.json +++ b/lerna.json @@ -2,7 +2,7 @@ "packages": [ "packages/*" ], - "version": "3.0.0", + "version": "3.0.1", "command": { "version": { "conventionalCommits": true, diff --git a/package-lock.json b/package-lock.json index a527ec9..09ed00e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "crawlee", - "version": "3.0.0", + "version": "3.0.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "crawlee", - "version": "3.0.0", + "version": "3.0.1", "hasInstallScript": true, "license": "Apache-2.0", "workspaces": [ diff --git a/package.json b/package.json index ef21d4a..c30914e 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "crawlee", "private": true, - "version": "3.0.0", + "version": "3.0.1", "description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.", "workspaces": [ "packages/*" diff --git a/utils.ts b/utils.ts index bd19389..562edf4 100644 --- a/utils.ts +++ b/utils.ts @@ -2,7 +2,7 @@ import type { StorageClient } from '@crawlee/types'; import { Configuration } from '../configuration'; /** - * Cleans up the local storage folder (defaults to `./crawlee_storage`) created when running code locally. + * Cleans up the local storage folder (defaults to `./storage`) created when running code locally. * Purging will remove all the files in all storages except for INPUT.json in the default KV store. * * Purging of storages is happening automatically when we run our crawler (or when we open some storage diff --git a/memory-storage.ts b/memory-storage.ts index ef6a712..56f758b 100644 --- a/memory-storage.ts +++ b/memory-storage.ts @@ -2,7 +2,7 @@ import type * as storage from '@crawlee/types'; import type { Dictionary } from '@crawlee/types'; import { s } from '@sapphire/shapeshift'; -import { pathExists } from 'fs-extra'; +import { pathExistsSync } from 'fs-extra'; import { readdir, rm } from 'node:fs/promises'; import { join, resolve } from 'node:path'; import { DatasetClient } from './resource-clients/dataset'; @@ -16,9 +16,10 @@ import { initWorkerIfNeeded } from './workers/instance'; export interface MemoryStorageOptions { /** * Path to directory where the data will also be saved. - * @default process.env.CRAWLEE_STORAGE_DIR ?? './crawlee_storage' + * @default process.env.CRAWLEE_STORAGE_DIR ?? './storage' */ localDataDirectory?: string; + /** * Whether to also write optional metadata files when storing to disk. * @default process.env.DEBUG?.includes('*') ?? process.env.DEBUG?.includes('crawlee:memory-storage') ?? false @@ -43,7 +44,18 @@ export class MemoryStorage implements storage.StorageClient { writeMetadata: s.boolean.optional, }).parse(options); - this.localDataDirectory = options.localDataDirectory ?? process.env.CRAWLEE_STORAGE_DIR ?? './crawlee_storage'; + // v3.0.0 used `crawlee_storage` as the default, we changed this in v3.0.1 to just `storage`, + // this function handles it without making BC breaks - it respects existing `crawlee_storage` + // directories, and uses the `storage` only if it's not there. + const defaultStorageDir = () => { + if (pathExistsSync(resolve('./crawlee_storage'))) { + return './crawlee_storage'; + } + + return './storage'; + }; + + this.localDataDirectory = options.localDataDirectory ?? process.env.CRAWLEE_STORAGE_DIR ?? defaultStorageDir(); this.datasetsDirectory = resolve(this.localDataDirectory, 'datasets'); this.keyValueStoresDirectory = resolve(this.localDataDirectory, 'key_value_stores'); this.requestQueuesDirectory = resolve(this.localDataDirectory, 'request_queues'); @@ -113,7 +125,7 @@ export class MemoryStorage implements storage.StorageClient { } private async removeFiles(folder: string): Promise<void> { - const storagePathExists = await pathExists(folder); + const storagePathExists = pathExistsSync(folder); if (storagePathExists) { const direntNames = await readdir(folder); diff --git a/copy-templates.mjs b/copy-templates.mjs index 9c075dd..0a7af94 100644 --- a/copy-templates.mjs +++ b/copy-templates.mjs @@ -6,7 +6,7 @@ const templates = await readdir('./templates'); await copy('./manifest.json', './dist/manifest.json', { override: true }); console.info(`Successfully copied 'manifest.json' to dist`); -const ignoreFolders = ['node_modules', 'dist', 'crawlee_storage', 'apify_storage', 'package-lock.json']; +const ignoreFolders = ['node_modules', 'dist', 'crawlee_storage', 'storage', 'apify_storage', 'package-lock.json']; for (const tpl of templates) { console.info(tpl); diff --git a/copy.ts b/copy.ts index 9410f40..a07cc09 100644 --- a/copy.ts +++ b/copy.ts @@ -23,36 +23,49 @@ function rewrite(path: string, replacer: (from: string) => string): void { } } +let rootVersion: string; + +function getRootVersion(): string { + if (rootVersion) { + return rootVersion; + } + + // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-dynamic-require,global-require + rootVersion = require(resolve(root, './packages/core/package.json')).version.replace(/^(\\d+\\.\\d+\\.\\d+)-?.*$/, '$1'); + + return rootVersion; +} + /** * Checks next dev version number based on the `@crawlee/core` package via `npm show`. * We always use this package, so we ensure the version is the same for each package in the monorepo. */ function getNextVersion() { const versions: string[] = []; - // eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-dynamic-require,global-require - const pkgJson = require(resolve(root, 'package.json')); try { - const versionString = execSync(`npm show ${pkgJson.name} versions --json`, { encoding: 'utf8', stdio: 'pipe' }); + const versionString = execSync(`npm show crawlee versions --json`, { encoding: 'utf8', stdio: 'pipe' }); const parsed = JSON.parse(versionString) as string[]; versions.push(...parsed); } catch { // the package might not have been published yet } - if (versions.some((v) => v === pkgJson.version)) { + const version = getRootVersion(); + + if (versions.some((v) => v === version)) { // eslint-disable-next-line no-console - console.error(`before-deploy: A release with version ${pkgJson.version} already exists. Please increment version accordingly.`); + console.error(`before-deploy: A release with version ${version} already exists. Please increment version accordingly.`); process.exit(1); } const preid = options.preid ?? 'alpha'; const prereleaseNumbers = versions - .filter((v) => v.startsWith(`${pkgJson.version}-${preid}.`)) + .filter((v) => v.startsWith(`${version}-${preid}.`)) .map((v) => Number(v.match(/\\.(\\d+)$/)?.[1])); const lastPrereleaseNumber = Math.max(-1, ...prereleaseNumbers); - return `${pkgJson.version}-${preid}.${lastPrereleaseNumber + 1}`; + return `${version}-${preid}.${lastPrereleaseNumber + 1}`; } // as we publish only the dist folder, we need to copy some meta files inside (readme/license/package.json) diff --git a/run.mjs b/run.mjs index 4dc541b..304b9ae 100644 --- a/run.mjs +++ b/run.mjs @@ -16,7 +16,7 @@ process.env.APIFY_CONTAINER_PORT ??= '8000'; /** * Depending on STORAGE_IMPLEMENTATION the workflow of the tests slightly differs: - * - for 'MEMORY': the 'crawlee_storage' folder should be removed after the test actor finishes; + * - for 'MEMORY': the 'storage' folder should be removed after the test actor finishes; * - for 'LOCAL': the 'apify_storage' folder should be removed after the test actor finishes; * - for 'PLATFORM': SDK packages should be copied to respective test actor folders * (and also should be removed after pushing the actor to platform and starting the test run there) diff --git a/tools.mjs b/tools.mjs index b6b147f..07d94d8 100644 --- a/tools.mjs +++ b/tools.mjs @@ -25,7 +25,7 @@ export const colors = { export function getStorage(dirName) { let folderName; if (process.env.STORAGE_IMPLEMENTATION === 'LOCAL') folderName = 'apify_storage'; - if (process.env.STORAGE_IMPLEMENTATION === 'MEMORY') folderName = 'crawlee_storage'; + if (process.env.STORAGE_IMPLEMENTATION === 'MEMORY') folderName = 'storage'; return join(dirName, folderName); } @@ -161,7 +161,7 @@ export async function clearPackages(dirName) { export async function clearStorage(dirName) { let folderName; if (process.env.STORAGE_IMPLEMENTATION === 'LOCAL') folderName = 'apify_storage'; - if (process.env.STORAGE_IMPLEMENTATION === 'MEMORY') folderName = 'crawlee_storage'; + if (process.env.STORAGE_IMPLEMENTATION === 'MEMORY') folderName = 'storage'; const destPackagesDir = join(dirName, 'actor', folderName); await fs.remove(destPackagesDir); }
|
|
feat(bigquery): implement JSON getitem functionality
|
9c0e7755ea0157aa3473e8699a04277b795c8a9c
|
feat
|
https://github.com/ibis-project/ibis/commit/9c0e7755ea0157aa3473e8699a04277b795c8a9c
|
implement JSON getitem functionality
|
diff --git a/datatypes.py b/datatypes.py index 24ac550..5722767 100644 --- a/datatypes.py +++ b/datatypes.py @@ -114,3 +114,8 @@ def trans_numeric(t, context): @ibis_type_to_bigquery_type.register(dt.Decimal, UDFContext) def trans_numeric_udf(t, context): raise TypeError("Decimal types are not supported in BigQuery UDFs") + + +@ibis_type_to_bigquery_type.register(dt.JSON, TypeTranslationContext) +def trans_json(t, context): + return "JSON" diff --git a/registry.py b/registry.py index 9d29a18..50318f6 100644 --- a/registry.py +++ b/registry.py @@ -680,6 +680,7 @@ OPERATION_REGISTRY = { ops.E: lambda *_: "EXP(1)", ops.RandomScalar: fixed_arity("RAND", 0), ops.NthValue: _nth_value, + ops.JSONGetItem: lambda t, op: f"{t.translate(op.arg)}[{t.translate(op.index)}]", } _invalid_operations = { diff --git a/conftest.py b/conftest.py index 6d1eb01..308d551 100644 --- a/conftest.py +++ b/conftest.py @@ -19,7 +19,7 @@ from ibis.backends.bigquery import EXTERNAL_DATA_SCOPES, Backend from ibis.backends.bigquery.datatypes import ibis_type_to_bigquery_type from ibis.backends.conftest import TEST_TABLES from ibis.backends.tests.base import BackendTest, RoundAwayFromZero, UnorderedComparator -from ibis.backends.tests.data import non_null_array_types, struct_types, win +from ibis.backends.tests.data import json_types, non_null_array_types, struct_types, win DATASET_ID = "ibis_gbq_testing" DEFAULT_PROJECT_ID = "ibis-gbq" @@ -73,7 +73,7 @@ class TestConf(UnorderedComparator, BackendTest, RoundAwayFromZero): supports_floating_modulus = False returned_timestamp_unit = "us" supports_structs = True - supports_json = False + supports_json = True check_names = False @staticmethod @@ -241,6 +241,22 @@ class TestConf(UnorderedComparator, BackendTest, RoundAwayFromZero): ) ) + futures.append( + e.submit( + make_job, + client.load_table_from_file, + io.StringIO( + "\\n".join(f"{{\\"js\\": {row}}}" for row in json_types.js) + ), + bq.TableReference(testing_dataset, "json_t"), + job_config=bq.LoadJobConfig( + write_disposition=write_disposition, + schema=ibis_schema_to_bq_schema(dict(js="json")), + source_format=bq.SourceFormat.NEWLINE_DELIMITED_JSON, + ), + ) + ) + for table, schema in TEST_TABLES.items(): futures.append( e.submit( diff --git a/test_json.py b/test_json.py index eb6eb02..6f80ab0 100644 --- a/test_json.py +++ b/test_json.py @@ -1,11 +1,12 @@ """Tests for JSON operations.""" import pandas as pd +import pandas.testing as tm import pytest from pytest import param [email protected](["bigquery", "datafusion", "mssql"]) [email protected](["datafusion", "mssql"]) @pytest.mark.notyet(["clickhouse"], reason="upstream is broken") @pytest.mark.never(["impala"], reason="doesn't support JSON and never will") @pytest.mark.parametrize( @@ -33,7 +34,7 @@ from pytest import param ), ], ) -def test_json_getitem(backend, json_t, expr_fn, expected): +def test_json_getitem(json_t, expr_fn, expected): expr = expr_fn(json_t) result = expr.execute() - backend.assert_series_equal(result, expected) + tm.assert_series_equal(result, expected)
|
|
fix(ts): avoid "React.CSSProperties" as much as possible
|
4e30a7c08097d1a2b3bb2c359a4bdc11016280ba
|
fix
|
https://github.com/pmndrs/react-spring/commit/4e30a7c08097d1a2b3bb2c359a4bdc11016280ba
|
avoid "React.CSSProperties" as much as possible
|
diff --git a/animated.ts b/animated.ts index 118e028..66807aa 100644 --- a/animated.ts +++ b/animated.ts @@ -8,7 +8,7 @@ import { withExtend, WithExtend, } from '@react-spring/animated' -import { AssignableKeys, SpringValue } from 'shared' +import { SpringValue } from 'shared' type JSXElements = keyof JSX.IntrinsicElements @@ -175,6 +175,11 @@ export type AnimatedProps<Props extends object> = { [P in keyof Props]: (P extends 'ref' ? Props[P] : AnimatedProp<Props[P]>) } +type CSSPropertyNames = keyof CSSProperties +type CSSValidProperties<T extends object> = { + [P in keyof T & CSSPropertyNames]: T[P] extends CSSProperties[P] ? P : never +}[keyof T & CSSPropertyNames] + // The animated prop value of a React element type AnimatedProp<T> = [T, T] extends [infer T, infer DT] ? [DT] extends [never] @@ -182,7 +187,7 @@ type AnimatedProp<T> = [T, T] extends [infer T, infer DT] : DT extends void ? undefined : DT extends object - ? [AssignableKeys<DT, CSSProperties>] extends [never] + ? [CSSValidProperties<DT>] extends [never] ? DT extends ReadonlyArray<any> ? AnimatedStyles<DT> : DT @@ -194,7 +199,7 @@ type AnimatedProp<T> = [T, T] extends [infer T, infer DT] type AnimatedStyles<T extends ReadonlyArray<any>> = { [P in keyof T]: [T[P]] extends [infer DT] ? DT extends object - ? [AssignableKeys<DT, CSSProperties>] extends [never] + ? [CSSValidProperties<DT>] extends [never] ? DT extends ReadonlyArray<any> ? AnimatedStyles<DT> : DT
|
|
test(duckdb): regenerate SQL snapshot for pivot/distinct test case
|
3b0cb7c97698ae2e80a259b6c0d815d6a10d9261
|
test
|
https://github.com/rohankumardubey/ibis/commit/3b0cb7c97698ae2e80a259b6c0d815d6a10d9261
|
regenerate SQL snapshot for pivot/distinct test case
|
diff --git a/out.sql b/out.sql index 58657c0..48e20e8 100644 --- a/out.sql +++ b/out.sql @@ -1,102 +1,86 @@ WITH t0 AS ( SELECT - t6.field_of_study AS field_of_study, + t7.field_of_study AS field_of_study, UNNEST( CAST(LIST_VALUE( - '1970-71', - '1975-76', - '1980-81', - '1985-86', - '1990-91', - '1995-96', - '2000-01', - '2005-06', - '2010-11', - '2011-12', - '2012-13', - '2013-14', - '2014-15', - '2015-16', - '2016-17', - '2017-18', - '2018-19', - '2019-20' - ) AS TEXT[]) - ) AS years, - UNNEST( - CAST(LIST_VALUE( - t6."1970-71", - t6."1975-76", - t6."1980-81", - t6."1985-86", - t6."1990-91", - t6."1995-96", - t6."2000-01", - t6."2005-06", - t6."2010-11", - t6."2011-12", - t6."2012-13", - t6."2013-14", - t6."2014-15", - t6."2015-16", - t6."2016-17", - t6."2017-18", - t6."2018-19", - t6."2019-20" - ) AS BIGINT[]) - ) AS degrees - FROM humanities AS t6 + {'years': '1970-71', 'degrees': t7."1970-71"}, + {'years': '1975-76', 'degrees': t7."1975-76"}, + {'years': '1980-81', 'degrees': t7."1980-81"}, + {'years': '1985-86', 'degrees': t7."1985-86"}, + {'years': '1990-91', 'degrees': t7."1990-91"}, + {'years': '1995-96', 'degrees': t7."1995-96"}, + {'years': '2000-01', 'degrees': t7."2000-01"}, + {'years': '2005-06', 'degrees': t7."2005-06"}, + {'years': '2010-11', 'degrees': t7."2010-11"}, + {'years': '2011-12', 'degrees': t7."2011-12"}, + {'years': '2012-13', 'degrees': t7."2012-13"}, + {'years': '2013-14', 'degrees': t7."2013-14"}, + {'years': '2014-15', 'degrees': t7."2014-15"}, + {'years': '2015-16', 'degrees': t7."2015-16"}, + {'years': '2016-17', 'degrees': t7."2016-17"}, + {'years': '2017-18', 'degrees': t7."2017-18"}, + {'years': '2018-19', 'degrees': t7."2018-19"}, + {'years': '2019-20', 'degrees': t7."2019-20"} + ) AS STRUCT(years TEXT, degrees BIGINT)[]) + ) AS __pivoted__ + FROM humanities AS t7 ), t1 AS ( SELECT t0.field_of_study AS field_of_study, - t0.years AS years, - t0.degrees AS degrees, - FIRST_VALUE(t0.degrees) OVER (PARTITION BY t0.field_of_study ORDER BY t0.years ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS earliest_degrees, - LAST_VALUE(t0.degrees) OVER (PARTITION BY t0.field_of_study ORDER BY t0.years ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS latest_degrees + STRUCT_EXTRACT(t0.__pivoted__, 'years') AS years, + STRUCT_EXTRACT(t0.__pivoted__, 'degrees') AS degrees FROM t0 ), t2 AS ( SELECT t1.field_of_study AS field_of_study, t1.years AS years, t1.degrees AS degrees, - t1.earliest_degrees AS earliest_degrees, - t1.latest_degrees AS latest_degrees, - t1.latest_degrees - t1.earliest_degrees AS diff + FIRST_VALUE(t1.degrees) OVER (PARTITION BY t1.field_of_study ORDER BY t1.years ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS earliest_degrees, + LAST_VALUE(t1.degrees) OVER (PARTITION BY t1.field_of_study ORDER BY t1.years ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS latest_degrees FROM t1 ), t3 AS ( SELECT t2.field_of_study AS field_of_study, - FIRST(t2.diff) AS diff + t2.years AS years, + t2.degrees AS degrees, + t2.earliest_degrees AS earliest_degrees, + t2.latest_degrees AS latest_degrees, + t2.latest_degrees - t2.earliest_degrees AS diff FROM t2 +), t4 AS ( + SELECT + t3.field_of_study AS field_of_study, + FIRST(t3.diff) AS diff + FROM t3 GROUP BY 1 ), anon_1 AS ( SELECT - t3.field_of_study AS field_of_study, - t3.diff AS diff - FROM t3 + t4.field_of_study AS field_of_study, + t4.diff AS diff + FROM t4 ORDER BY - t3.diff DESC + t4.diff DESC LIMIT 10 -), t4 AS ( - SELECT - t3.field_of_study AS field_of_study, - t3.diff AS diff - FROM t3 - WHERE - t3.diff < CAST(0 AS SMALLINT) -), anon_2 AS ( +), t5 AS ( SELECT t4.field_of_study AS field_of_study, t4.diff AS diff FROM t4 + WHERE + t4.diff < CAST(0 AS SMALLINT) +), anon_2 AS ( + SELECT + t5.field_of_study AS field_of_study, + t5.diff AS diff + FROM t5 ORDER BY - t4.diff + t5.diff LIMIT 10 ) SELECT - t5.field_of_study, - t5.diff + t6.field_of_study, + t6.diff FROM ( SELECT anon_1.field_of_study AS field_of_study, @@ -107,4 +91,4 @@ FROM ( anon_2.field_of_study AS field_of_study, anon_2.diff AS diff FROM anon_2 -) AS t5 \\ No newline at end of file +) AS t6 \\ No newline at end of file
|
|
feat: `describe()` aborts search early if there is no input name in the name map.
|
df62f5081291f65f994b2aa66f0599f47eea8d4d
|
feat
|
https://github.com/Byron/gitoxide/commit/df62f5081291f65f994b2aa66f0599f47eea8d4d
|
`describe()` aborts search early if there is no input name in the name map.
|
diff --git a/describe.rs b/describe.rs index 8d79dcb..580fa09 100644 --- a/describe.rs +++ b/describe.rs @@ -177,18 +177,8 @@ pub(crate) mod function { Find: for<'b> FnMut(&oid, &'b mut Vec<u8>) -> Result<Option<CommitRefIter<'b>>, E>, E: std::error::Error + Send + Sync + 'static, { - if let Some(name) = name_by_oid.get(commit) { - return Ok(Some(Outcome { - name: name.clone().into(), - id: commit.to_owned(), - depth: 0, - name_by_oid, - commits_seen: 0, - })); - } - max_candidates = max_candidates.min(MAX_CANDIDATES); - if max_candidates == 0 { + if max_candidates == 0 || name_by_oid.is_empty() { return if fallback_to_oid { Ok(Some(Outcome { id: commit.to_owned(), @@ -202,6 +192,16 @@ pub(crate) mod function { }; } + if let Some(name) = name_by_oid.get(commit) { + return Ok(Some(Outcome { + name: name.clone().into(), + id: commit.to_owned(), + depth: 0, + name_by_oid, + commits_seen: 0, + })); + } + let mut buf = Vec::new(); let mut parent_buf = Vec::new(); diff --git a/mod.rs b/mod.rs index 33d7f77..d3d9149 100644 --- a/mod.rs +++ b/mod.rs @@ -24,7 +24,7 @@ fn option_none_if_no_tag_found() -> crate::Result { } #[test] -fn fallback_if_configured_in_options_but_no_candidate() -> crate::Result { +fn fallback_if_configured_in_options_but_no_candidate_or_names() -> crate::Result { let repo = repo(); let commit = repo.head_commit()?; let res = git_revision::describe( @@ -38,7 +38,10 @@ fn fallback_if_configured_in_options_but_no_candidate() -> crate::Result { .expect("fallback activated"); assert!(res.name.is_none(), "no name can be found"); assert_eq!(res.depth, 0, "just a default, not relevant as there is no name"); - assert_eq!(res.commits_seen, 8, "a traversal is performed"); + assert_eq!( + res.commits_seen, 0, + "a traversal is isn't performed as name map is empty, and that's the whole point" + ); assert_eq!(res.into_format(7).to_string(), "01ec18a"); Ok(()) } @@ -91,6 +94,7 @@ fn not_enough_candidates() -> crate::Result { assert_eq!(res.name, Some(name), "it finds the youngest/most-recent name"); assert_eq!(res.id, commit.id); + assert_eq!(res.commits_seen, 6, "it has to traverse commits"); assert_eq!( res.depth, 3, "it calculates the final number of commits even though it aborted early" @@ -146,6 +150,7 @@ fn typical_usecases() { ); assert_eq!(res.id, commit.id); assert_eq!(res.depth, 3); + assert_eq!(res.commits_seen, 6); let res = git_revision::describe( &commit.id,
|
|
docs: fix card grids (#4893)
|
3fc5a5c0d6841d31de898869933afc574e9eebc4
|
docs
|
https://github.com/wzhiqing/cube/commit/3fc5a5c0d6841d31de898869933afc574e9eebc4
|
fix card grids (#4893)
|
diff --git a/util.d.ts b/util.d.ts index 023d23a..0feb092 100644 --- a/util.d.ts +++ b/util.d.ts @@ -1,3 +1,5 @@ +declare module "*.module.css"; + declare module '*.scss' { const content: { [className: string]: string }; export default content; diff --git a/Grid.tsx b/Grid.tsx index 55c9911..208d2f3 100644 --- a/Grid.tsx +++ b/Grid.tsx @@ -3,8 +3,7 @@ import React from 'react'; export type GridProps = { children: React.ReactNode; cols?: number; - imageSize?: [height: number, width: number]; - slim?: boolean; + imageSize?: [ width?: number, height?: number]; }; export const GridContext = React.createContext('grid'); @@ -12,27 +11,19 @@ export const GridContext = React.createContext('grid'); const defaultProps = { cols: 3, imageSize: [], - slim: false, }; - -export const COL_CLASS_MAP: Record<number, string> = { - 2: 'gettingStarted', - 3: 'connectingToDatabase', -}; - export const Grid = ({ children, ...restProps }: GridProps) => { - const normalizedProps = { ...defaultProps, ...restProps }; const settingsString = JSON.stringify(normalizedProps); - const wrapperClassName = `${COL_CLASS_MAP[normalizedProps.cols]}Grid`; + const className = `grid__col-${normalizedProps.cols}`; return ( <GridContext.Provider value={settingsString}> - <div className={wrapperClassName}> - <div className="ant-row"> + <div className="ant-row"> + <div className={className}> {children} </div> </div> diff --git a/GridItem.tsx b/GridItem.tsx index afb7709..f9d0f9b 100644 --- a/GridItem.tsx +++ b/GridItem.tsx @@ -1,6 +1,6 @@ import React from 'react'; -import { COL_CLASS_MAP, GridContext } from './Grid'; +import { GridContext } from './Grid'; export type GridItemProps = { imageUrl: string; @@ -8,30 +8,22 @@ export type GridItemProps = { url: string; }; -const WRAPPER_CLASS_MAP: Record<number, string> = { - 2: 'ant-col ant-col-xs-24 ant-col-sm-24 ant-col-md-24 ant-col-xl-12', - 3: 'ant-col ant-col-xs-24 ant-col-sm-24 ant-col-md-24 ant-col-lg-12 ant-col-xl-8', -} - -export const GridItem = ({ imageUrl, title, url }: GridItemProps) => { +export const GridItem = ({ + imageUrl, + title, + url, +}: GridItemProps) => { return ( <GridContext.Consumer> {(value) => { const settings = JSON.parse(value); - const classPrefix = COL_CLASS_MAP[settings.cols]; - const [ height, width ] = settings.imageSize; - - const wrapperClassName = [ - `${classPrefix}Item`, - WRAPPER_CLASS_MAP[settings.cols], - settings.slim ? `${classPrefix}ItemSlim` : '' - ].join(' '); + const [ width, height ] = settings.imageSize; return ( - <div className={wrapperClassName}> + <div className="grid-item"> <a href={url}> - <div className={`${classPrefix}ItemContent`}> - <div className={`${classPrefix}ItemImage`}> + <div> + <div className="grid-item-image"> <img src={imageUrl} alt={title} @@ -39,11 +31,11 @@ export const GridItem = ({ imageUrl, title, url }: GridItemProps) => { height={height} /> </div> - <div className={`${classPrefix}ItemLink`}>{title}</div> + <div className="grid-item-title">{title}</div> </div> </a> </div> - ) + ); }} </GridContext.Consumer> ); diff --git a/_grids.scss b/_grids.scss index 8ae48c8..b747e4f 100644 --- a/_grids.scss +++ b/_grids.scss @@ -0,0 +1,80 @@ +@import 'variables'; + +@mixin create-grid($cols) { + width: 100%; + display: grid; + grid-template-columns: repeat($cols, 1fr); + grid-template-rows: auto; + grid-row-gap: 1.25rem; + grid-column-gap: 1.25rem; + margin-bottom: 1.25rem; +} + +:global { + .grid__col-3 { + @include create-grid(3); + + @media screen and (max-width: $bp-xl) { + @include create-grid(2); + } + + @media screen and (max-width: $bp-lg) { + @include create-grid(1); + } + } + + .grid__col-2 { + @include create-grid(2); + + @media screen and (max-width: $bp-lg) { + @include create-grid(1); + } + } + + .grid-item { + display: flex; + align-items: center; + justify-content: center; + padding: 1.25rem; + border: 1px solid #ECECF0; + border-radius: 4px; + box-sizing: border-box; + box-shadow: none; + transition: all .2s ease; + cursor: pointer; + + &:hover { + position: relative; + top: -3px; + box-shadow: 0px 2px 10px rgba(177, 183, 197, 0.2); + } + + a { + border: none; + + img { + // @TODO I'm sorry future me + box-shadow: none !important; + border: none !important; + max-height: 160px; + } + } + } + + .grid-item-image { + display: flex; + flex: 1; + justify-content: center; + } + + .grid-item-title { + font-family: $header-font; + font-weight: bold; + line-height: 32px; + font-size: 19px; + letter-spacing: 0.02em; + color: $brand-color; + text-align: center; + } +} + diff --git a/_layout.scss b/_layout.scss index 65933f5..1da745f 100644 --- a/_layout.scss +++ b/_layout.scss @@ -983,119 +983,3 @@ section[type=h2] h2 { } } } - - -:global { - .gettingStartedGrid { - @include card-grid; - - .gettingStartedItem { - @include card-grid-item; - - a { - border-bottom: none; - } - - &.originalWidth { - width: calc(100% / 3); - } - - &:nth-child(even) { - @include card-grid-item-adjacent; - } - - &.gettingStartedItemSlim { - .gettingStartedItemContent { - min-height: unset; - text-align: center;; - - .gettingStartedItemImage { - margin-right: 1.25rem; - - > img { - margin: unset; - } - } - - .gettingStartedItemImage, - .gettingStartedItemLink { - display: inline-block; - } - } - } - - .gettingStartedItemContent { - @include card-grid-item-content; - margin: 1.5rem; - - .gettingStartedItemImage { - @include card-grid-item-image; - position: unset; - display: flex; - justify-content: center; - - > img { - box-shadow: none !important; - border: none; - max-height: 160px; - } - } - - .gettingStartedItemLink { - @include card-grid-item-link; - margin-top: 0.5rem; - text-align: center; - } - - .gettingStartedItemText { - @include card-grid-item-text; - } - } - } - } - - .connectingToDatabaseGrid { - @include card-grid; - - > .ant-row { - display: flex; - flex-flow: row wrap; - } - - .connectingToDatabaseItem { - @include card-grid-item-without-width; - - a { - border-bottom: none; - } - - .connectingToDatabaseItemContent { - @include card-grid-item-content; - margin: 1.5rem; - - .connectingToDatabaseItemImage { - @include card-grid-item-image; - position: unset; - display: flex; - justify-content: center; - - > img { - box-shadow: none !important; - border: none; - max-height: 160px; - } - } - - .connectingToDatabaseItemLink { - @include card-grid-item-link; - margin-top: 0.5rem; - text-align: center; - } - - .connectingToDatabaseItemText { - @include card-grid-item-text; - } - } - } - } -} diff --git a/index.module.scss b/index.module.scss index 41710c0..5fc34a9 100644 --- a/index.module.scss +++ b/index.module.scss @@ -66,3 +66,4 @@ @import "stb-js-syntax"; @import "layout"; @import 'blockquote'; +@import "grids"; diff --git a/variables.scss b/variables.scss index 15bc1d3..247f189 100644 --- a/variables.scss +++ b/variables.scss @@ -34,3 +34,10 @@ $space: 16px; $header-font: "Cera Pro", "Avenir Next", Avenir, "Helvetica Neue", Helvetica, Ubuntu, "DejaVu Sans", Arial, sans-serif; $body-font: "Inter", -apple-system, BlinkMacSystemFont, "Segoe UI", "Avenir Next", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; $code-font: "Source Code Pro", Inconsolata, Consolas, Courier, "Courier New", "Liberation Mono", monospace; + +// @see https://ant.design/components/grid/#Col +$bp-sm: 576px; +$bp-md: 768px; +$bp-lg: 992px; +$bp-xl: 1200px; +$bp-xxl: 1600px;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.