commit_message
stringlengths 9
4.28k
| sha
stringlengths 40
40
| type
stringclasses 10
values | commit_url
stringlengths 78
90
| masked_commit_message
stringlengths 2
4.26k
| author_email
stringclasses 8
values | git_diff
stringlengths 129
19.1M
|
|---|---|---|---|---|---|---|
ci: disable doc publishing until geo blog can be fixed
|
989ad4f1e7a11b06af7e0e3ef840022687fbd7af
|
ci
|
https://github.com/ibis-project/ibis/commit/989ad4f1e7a11b06af7e0e3ef840022687fbd7af
|
disable doc publishing until geo blog can be fixed
|
diff --git a/ibis-docs-main.yml b/ibis-docs-main.yml
index b98f80c..66d8192 100644
--- a/ibis-docs-main.yml
+++ b/ibis-docs-main.yml
@@ -51,7 +51,8 @@ jobs:
- name: verify internal links
run: nix develop --ignore-environment '.#links' -c just checklinks --offline --no-progress
- - name: build and push quarto docs
- run: nix develop --ignore-environment --keep NETLIFY_AUTH_TOKEN -c just docs-deploy
- env:
- NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
+ # TODO: re-enable when geo blog is fixed (to_array)
+ # - name: build and push quarto docs
+ # run: nix develop --ignore-environment --keep NETLIFY_AUTH_TOKEN -c just docs-deploy
+ # env:
+ # NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
|
test: remove unused `spread_type` function
|
527b7501e7b458d092baf18b6605f8a7f4595036
|
test
|
https://github.com/ibis-project/ibis/commit/527b7501e7b458d092baf18b6605f8a7f4595036
|
remove unused `spread_type` function
|
diff --git a/datatypes.py b/datatypes.py
index 5caadfd..57f3dd8 100644
--- a/datatypes.py
+++ b/datatypes.py
@@ -65,21 +65,3 @@ class BigQuerySchema(SchemaMapper):
@classmethod
def to_ibis(cls, fields: list[bq.SchemaField]) -> sch.Schema:
return sch.Schema({f.name: cls._dtype_from_bigquery_field(f) for f in fields})
-
-
-# TODO(kszucs): we can eliminate this function by making dt.DataType traversible
-# using ibis.common.graph.Node, similarly to how we traverse ops.Node instances:
-# node.find(types)
-def spread_type(dt: dt.DataType):
- """Returns a generator that contains all the types in the given type.
-
- For complex types like set and array, it returns the types of the elements.
- """
- if dt.is_array():
- yield from spread_type(dt.value_type)
- elif dt.is_struct():
- for type_ in dt.types:
- yield from spread_type(type_)
- elif dt.is_map():
- raise NotImplementedError("Maps are not supported in BigQuery")
- yield dt
diff --git a/test_datatypes.py b/test_datatypes.py
index cab721f..37eb49c 100644
--- a/test_datatypes.py
+++ b/test_datatypes.py
@@ -5,10 +5,7 @@ import sqlglot as sg
from pytest import param
import ibis.expr.datatypes as dt
-from ibis.backends.bigquery.datatypes import (
- BigQueryType,
- spread_type,
-)
+from ibis.backends.bigquery.datatypes import BigQueryType
@pytest.mark.parametrize(
@@ -79,31 +76,6 @@ def test_simple_failure_mode(datatype):
BigQueryType.to_string(datatype)
[email protected](
- ("type_", "expected"),
- [
- param(
- dt.int64,
- [dt.int64],
- ),
- param(
- dt.Array(dt.int64),
- [dt.int64, dt.Array(value_type=dt.int64)],
- ),
- param(
- dt.Struct.from_tuples([("a", dt.Array(dt.int64))]),
- [
- dt.int64,
- dt.Array(value_type=dt.int64),
- dt.Struct.from_tuples([("a", dt.Array(value_type=dt.int64))]),
- ],
- ),
- ],
-)
-def test_spread_type(type_, expected):
- assert list(spread_type(type_)) == expected
-
-
def test_struct_type():
dtype = dt.Array(dt.int64)
parsed_type = sg.parse_one("BIGINT[]", into=sg.exp.DataType, read="duckdb")
|
|
chore(engine): removed fallbacks for rAF, it's useless
|
d6151fe959532afc5228aa63fd963406d9de777d
|
chore
|
https://github.com/tsparticles/tsparticles/commit/d6151fe959532afc5228aa63fd963406d9de777d
|
removed fallbacks for rAF, it's useless
|
diff --git a/Container.ts b/Container.ts
index 5a1c134..ed57c94 100644
--- a/Container.ts
+++ b/Container.ts
@@ -1,4 +1,3 @@
-import { animate, cancelAnimation, isFunction } from "../Utils/Utils";
import { Canvas } from "./Canvas";
import type { ClickMode } from "../Enums/Modes/ClickMode";
import type { Engine } from "../engine";
@@ -17,6 +16,7 @@ import { Particles } from "./Particles";
import { Retina } from "./Retina";
import type { Vector } from "./Utils/Vector";
import { getRangeValue } from "../Utils/NumberUtils";
+import { isFunction } from "../Utils/Utils";
import { loadOptions } from "../Utils/OptionsUtils";
/**
@@ -400,7 +400,7 @@ export class Container {
let refreshTime = force;
- this._drawAnimationFrame = animate()(async (timestamp) => {
+ this._drawAnimationFrame = requestAnimationFrame(async (timestamp) => {
if (refreshTime) {
this.lastFrameTime = undefined;
@@ -562,7 +562,7 @@ export class Container {
}
if (this._drawAnimationFrame !== undefined) {
- cancelAnimation()(this._drawAnimationFrame);
+ cancelAnimationFrame(this._drawAnimationFrame);
delete this._drawAnimationFrame;
}
diff --git a/Utils.ts b/Utils.ts
index 5ae52e3..975a51f 100644
--- a/Utils.ts
+++ b/Utils.ts
@@ -169,26 +169,6 @@ export function safeMatchMedia(query: string): MediaQueryList | undefined {
return matchMedia(query);
}
-/**
- * Calls the requestAnimationFrame function or a polyfill
- * @returns the animation callback id, so it can be canceled
- */
-export function animate(): (callback: FrameRequestCallback) => number {
- return isSsr()
- ? (callback: FrameRequestCallback): number => setTimeout(callback)
- : (callback: FrameRequestCallback): number => (requestAnimationFrame || setTimeout)(callback);
-}
-
-/**
- * Cancels the requestAnimationFrame function or a polyfill
- * @returns the animation cancelling function
- */
-export function cancelAnimation(): (handle: number) => void {
- return isSsr()
- ? (handle: number): void => clearTimeout(handle)
- : (handle: number): void => (cancelAnimationFrame || clearTimeout)(handle);
-}
-
/**
* Checks if a value is equal to the destination, if same type, or is in the provided array
* @param value - the value to check
|
|
docs: fix broken links from Semrush report (#7025)
|
39eebf622666fdf220f889850fe7e4981cd90d08
|
docs
|
https://github.com/wzhiqing/cube/commit/39eebf622666fdf220f889850fe7e4981cd90d08
|
fix broken links from Semrush report (#7025)
|
diff --git a/AlertBox.tsx b/AlertBox.tsx
index 5cfb33d..3a3c6bd 100644
--- a/AlertBox.tsx
+++ b/AlertBox.tsx
@@ -1,13 +1,13 @@
-import React from 'react';
-import classes from './AlertBox.module.css';
-import classnames from 'classnames/bind';
+import React from "react";
+import classes from "./AlertBox.module.css";
+import classnames from "classnames/bind";
const cn = classnames.bind(classes);
export enum AlertBoxTypes {
- DANGER = 'danger',
- INFO = 'info',
- SUCCESS = 'success',
- WARNING = 'warning',
+ DANGER = "danger",
+ INFO = "info",
+ SUCCESS = "success",
+ WARNING = "warning",
}
declare const TypeToEmoji: {
@@ -19,55 +19,64 @@ declare const TypeToEmoji: {
type CalloutType = keyof typeof TypeToEmoji;
export type AlertBoxProps = {
- children: string;
+ children: React.ReactNode;
heading?: string;
type: AlertBoxTypes;
-}
+};
const typeMapping: Record<AlertBoxTypes, CalloutType> = {
- 'danger': 'error',
- info: 'info',
- warning: 'warning',
- success: 'default',
-}
+ danger: "error",
+ info: "info",
+ warning: "warning",
+ success: "default",
+};
const iconMapping: Record<string, any> = {
- 'danger': '🚫',
- info: 'ℹ️',
- warning: '⚠️',
- success: '✅',
+ danger: "🚫",
+ info: "ℹ️",
+ warning: "⚠️",
+ success: "✅",
};
export const AlertBox = ({ children, heading, type }: AlertBoxProps) => {
- const header = heading
- ? (
- <div className={classes.AlertBox__header}>
- <span className={cn('AlertBox__HeaderIcon')}>{iconMapping[type]}</span>
- {heading}
- </div>
- )
- : null;
+ const header = heading ? (
+ <div className={classes.AlertBox__header}>
+ <span className={cn("AlertBox__HeaderIcon")}>{iconMapping[type]}</span>
+ {heading}
+ </div>
+ ) : null;
return (
- <div className={cn('AlertBox__Wrapper', `AlertBox__Wrapper--${typeMapping[type]}`)}>
+ <div
+ className={cn(
+ "AlertBox__Wrapper",
+ `AlertBox__Wrapper--${typeMapping[type]}`
+ )}
+ >
{header}
- <div className={classes.AlertBox__content}>
- {children}
- </div>
+ <div className={classes.AlertBox__content}>{children}</div>
</div>
- )
-}
+ );
+};
-export type AlertBoxSubclass = Omit<AlertBoxProps, 'type'>;
+export type AlertBoxSubclass = Omit<AlertBoxProps, "type">;
export type DangerBoxProps = AlertBoxSubclass;
-export const DangerBox = (props: DangerBoxProps) => <AlertBox type={AlertBoxTypes.DANGER} {...props} />;
+export const DangerBox = (props: DangerBoxProps) => (
+ <AlertBox type={AlertBoxTypes.DANGER} {...props} />
+);
export type InfoBoxProps = AlertBoxSubclass;
-export const InfoBox = (props: InfoBoxProps) => <AlertBox type={AlertBoxTypes.INFO} {...props} />;
+export const InfoBox = (props: InfoBoxProps) => (
+ <AlertBox type={AlertBoxTypes.INFO} {...props} />
+);
export type SuccessBoxProps = AlertBoxSubclass;
-export const SuccessBox = (props: SuccessBoxProps) => <AlertBox type={AlertBoxTypes.SUCCESS} {...props} />;
+export const SuccessBox = (props: SuccessBoxProps) => (
+ <AlertBox type={AlertBoxTypes.SUCCESS} {...props} />
+);
export type WarningBoxProps = AlertBoxSubclass;
-export const WarningBox = (props: WarningBoxProps) => <AlertBox type={AlertBoxTypes.WARNING} {...props} />;
+export const WarningBox = (props: WarningBoxProps) => (
+ <AlertBox type={AlertBoxTypes.WARNING} {...props} />
+);
diff --git a/CommunitySupportedDriver.tsx b/CommunitySupportedDriver.tsx
index 482bd24..7702ed3 100644
--- a/CommunitySupportedDriver.tsx
+++ b/CommunitySupportedDriver.tsx
@@ -0,0 +1,20 @@
+import { WarningBox } from "@/components/mdx/AlertBox/AlertBox";
+import { Link } from "@/components/overrides/Anchor/Link";
+
+export interface CommunitySupportedDriverProps {
+ dataSource: string;
+}
+
+export const CommunitySupportedDriver = ({
+ dataSource,
+}: CommunitySupportedDriverProps) => {
+ return (
+ <WarningBox>
+ The driver for {dataSource} is{" "}
+ <Link href="/product/configuration/data-sources#driver-support">
+ community-supported
+ </Link>{" "}
+ and is not supported by Cube or the vendor.
+ </WarningBox>
+ );
+};
diff --git a/index.ts b/index.ts
index 567209e..e1ac6f6 100644
--- a/index.ts
+++ b/index.ts
@@ -26,6 +26,7 @@ import { Table } from '@/components/overrides/Table/Table';
import { Td } from '@/components/overrides/Table/Td';
import { Th } from '@/components/overrides/Table/Th';
import { Tr } from '@/components/overrides/Table/Tr';
+import { CommunitySupportedDriver } from '@/components/mdx/Banners/CommunitySupportedDriver';
export const components = {
...Buttons,
@@ -54,6 +55,8 @@ export const components = {
Diagram,
YouTubeVideo,
+ CommunitySupportedDriver,
+
// Overrides
h1: H1,
a: Link,
diff --git a/real-time-data-fetch.mdx b/real-time-data-fetch.mdx
index 9ca5f31..4b8fe95 100644
--- a/real-time-data-fetch.mdx
+++ b/real-time-data-fetch.mdx
@@ -108,9 +108,9 @@ const Chart = ({ query }) => {
## Refresh Rate
As in the case of a regular data fetch, real-time data fetch obeys
-[`refresh_key` refresh rules](caching#refresh-keys). In order to provide a
-desired refresh rate, `refresh_key` should reflect the rate of change of the
-underlying data set; the querying time should also be much less than the desired
-refresh rate. Please use the
+[`refresh_key` refresh rules](/product/caching#refresh-keys). In order to
+provide a desired refresh rate, `refresh_key` should reflect the rate of change
+of the underlying data set; the querying time should also be much less than the
+desired refresh rate. Please use the
[`every`](/product/data-modeling/reference/cube#refresh_key) parameter to adjust
the refresh interval.
diff --git a/_meta.js b/_meta.js
index b927493..f2b3c90 100644
--- a/_meta.js
+++ b/_meta.js
@@ -7,7 +7,7 @@ module.exports = {
"elasticsearch": "Elasticsearch",
"firebolt": "Firebolt",
"google-bigquery": "Google BigQuery",
- "hive": "Hive",
+ "hive": "Hive / SparkSQL",
"ksqldb": "ksqlDB",
"materialize": "Materialize",
"mongodb": "MongoDB",
diff --git a/druid.mdx b/druid.mdx
index 353d9e4..874095c 100644
--- a/druid.mdx
+++ b/druid.mdx
@@ -5,11 +5,7 @@ redirect_from:
# Druid
-<WarningBox>
- The driver for Druid is{" "}
- <a href="../databases#driver-support">community-supported</a> and is not
- supported by Cube or the vendor.
-</WarningBox>
+<CommunitySupportedDriver dataSource="Druid" />
## Prerequisites
diff --git a/elasticsearch.mdx b/elasticsearch.mdx
index cc0ec04..5cbc32f 100644
--- a/elasticsearch.mdx
+++ b/elasticsearch.mdx
@@ -5,11 +5,7 @@ redirect_from:
# Elasticsearch
-<WarningBox>
- The driver for Elasticsearch is{" "}
- <a href="../databases#driver-support">community-supported</a> and is not
- supported by Cube or the vendor.
-</WarningBox>
+<CommunitySupportedDriver dataSource="Elasticsearch" />
## Prerequisites
diff --git a/hive.mdx b/hive.mdx
index 20c9170..8b2b734 100644
--- a/hive.mdx
+++ b/hive.mdx
@@ -3,13 +3,9 @@ redirect_from:
- /config/databases/hive-sparksql
---
-# Hive
+# Hive / SparkSQL
-<WarningBox>
- The driver for Hive/SparkSQL is{" "}
- <a href="../databases#driver-support">community-supported</a> and is not
- supported by Cube or the vendor.
-</WarningBox>
+<CommunitySupportedDriver dataSource="Hive / SparkSQL" />
## Prerequisites
diff --git a/mongodb.mdx b/mongodb.mdx
index 75779f4..9872170 100644
--- a/mongodb.mdx
+++ b/mongodb.mdx
@@ -5,11 +5,7 @@ redirect_from:
# MongoDB
-<WarningBox>
- The driver for MongoDB is{" "}
- <a href="../databases#driver-support">community-supported</a> and is not
- supported by Cube or the vendor.
-</WarningBox>
+<CommunitySupportedDriver dataSource="MongoDB" />
## Prerequisites
diff --git a/oracle.mdx b/oracle.mdx
index 47d0201..2840b42 100644
--- a/oracle.mdx
+++ b/oracle.mdx
@@ -5,11 +5,7 @@ redirect_from:
# Oracle
-<WarningBox>
- The driver for Oracle is{" "}
- <a href="../databases#driver-support">community-supported</a> and is not
- supported by Cube or the vendor.
-</WarningBox>
+<CommunitySupportedDriver dataSource="Oracle" />
## Prerequisites
diff --git a/sqlite.mdx b/sqlite.mdx
index 30b1013..e295c76 100644
--- a/sqlite.mdx
+++ b/sqlite.mdx
@@ -5,11 +5,7 @@ redirect_from:
# SQLite
-<WarningBox>
- The driver for SQLite is{" "}
- <a href="../databases#driver-support">community-supported</a> and is not
- supported by Cube or the vendor.
-</WarningBox>
+<CommunitySupportedDriver dataSource="SQLite" />
## Prerequisites
diff --git a/visualization-tools.mdx b/visualization-tools.mdx
index 8685612..8fa5073 100644
--- a/visualization-tools.mdx
+++ b/visualization-tools.mdx
@@ -135,17 +135,17 @@ Cube provides integration libraries for popular front-end frameworks:
<Grid imageSize={[56, 56]}>
<GridItem
- url="../frontend-introduction/react"
+ url="/product/apis-integrations/javascript-sdk/react"
imageUrl="https://static.cube.dev/icons/react.svg"
title="React"
/>
<GridItem
- url="../frontend-introduction/vue"
+ url="/product/apis-integrations/javascript-sdk/vue"
imageUrl="https://static.cube.dev/icons/vue.svg"
title="Vue"
/>
<GridItem
- url="../frontend-introduction/angular"
+ url="/product/apis-integrations/javascript-sdk/angular"
imageUrl="https://static.cube.dev/icons/angular.svg"
title="Angular"
/>
@@ -159,17 +159,17 @@ out REST and GraphQL APIs.
<Grid imageSize={[56, 56]}>
<GridItem
- url="../backend/sql"
+ url="/product/apis-integrations/sql-api"
imageUrl="https://raw.githubusercontent.com/cube-js/cube.js/master/docs/static/icons/sql.svg"
title="SQL API"
/>
<GridItem
- url="../rest-api"
+ url="/product/apis-integrations/rest-api"
imageUrl="https://raw.githubusercontent.com/cube-js/cube.js/master/docs/static/icons/rest.svg"
title="REST API"
/>
<GridItem
- url="../backend/graphql"
+ url="/product/apis-integrations/graphql-api"
imageUrl="https://raw.githubusercontent.com/cube-js/cube.js/master/docs/static/icons/graphql.svg"
title="GraphQL API"
/>
diff --git a/observable.mdx b/observable.mdx
index c4aa4e4..1872486 100644
--- a/observable.mdx
+++ b/observable.mdx
@@ -211,4 +211,4 @@ You can also create a visualization of the executed REST API request.
[ref-getting-started]: /product/getting-started/cloud
[ref-sql-api]: /product/apis-integrations/sql-api
-[ref-rest-api]: /backend/rest-api
+[ref-rest-api]: /product/apis-integrations/rest-api
diff --git a/concepts.mdx b/concepts.mdx
index a57cfcc..b4e71be 100644
--- a/concepts.mdx
+++ b/concepts.mdx
@@ -536,7 +536,8 @@ Pre-Aggregations][ref-caching-preaggs-intro].
/product/data-modeling/reference/joins#relationship
[ref-schema-ref-sql]: /product/data-modeling/reference/cube#sql
[ref-schema-ref-sql-table]: /product/data-modeling/reference/cube#sql_table
-[ref-tutorial-incremental-preagg]: /incremental-pre-aggregations
+[ref-tutorial-incremental-preagg]:
+ /product/data-modeling/reference/pre-aggregations#incremental
[self-dimensions]: #dimensions
[self-measures]: #measures
[wiki-olap]: https://en.wikipedia.org/wiki/Online_analytical_processing
diff --git a/learn-more.mdx b/learn-more.mdx
index 5d70713..ecbe7d7 100644
--- a/learn-more.mdx
+++ b/learn-more.mdx
@@ -24,7 +24,7 @@ Cube can be queried in a variety of ways. Explore how to use
## Caching
-Learn more about the [two-level cache](/docs/caching) and how
+Learn more about the [two-level cache](/product/caching) and how
[pre-aggregations help speed up queries](/product/caching/getting-started-pre-aggregations).
For a deeper dive, take a look at the
[related recipes](/guides/recipes/overview#recipes-query-acceleration).
diff --git a/integrations.mdx b/integrations.mdx
index 59754a9..a4c681f 100644
--- a/integrations.mdx
+++ b/integrations.mdx
@@ -38,12 +38,12 @@ following guides and configuration examples to get tool-specific instructions:
<Grid imageSize={[56, 56]}>
<GridItem
- url="datadog"
+ url="integrations/datadog"
imageUrl="https://static.cube.dev/icons/datadog.svg"
title="Datadog"
/>
<GridItem
- url="grafana-cloud"
+ url="integrations/grafana-cloud"
imageUrl="https://static.cube.dev/icons/grafana.svg"
title="Grafana Cloud"
/>
diff --git a/index.d.ts b/index.d.ts
index 0e32eef..8cd9ab8 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -116,12 +116,12 @@ declare module '@cubejs-client/react' {
type QueryRendererProps = {
/**
- * Analytic query. [Learn more about it's format](query-format)
+ * Analytic query. [Learn more about it's format](/product/apis-integrations/rest-api/query-format)
*/
query: Query | Query[];
queries?: { [key: string]: Query };
/**
- * Indicates whether the generated by `Cube.js` SQL Code should be requested. See [rest-api#sql](rest-api#api-reference-v-1-sql). When set to `only` then only the request to [/v1/sql](rest-api#api-reference-v-1-sql) will be performed. When set to `true` the sql request will be performed along with the query request. Will not be performed if set to `false`
+ * Indicates whether the generated by `Cube.js` SQL Code should be requested. See [rest-api#sql](/reference/rest-api#v1sql). When set to `only` then only the request to [/v1/sql](/reference/rest-api#v1sql) will be performed. When set to `true` the sql request will be performed along with the query request. Will not be performed if set to `false`
*/
loadSql?: 'only' | boolean;
/**
@@ -459,7 +459,7 @@ declare module '@cubejs-client/react' {
*/
skip?: boolean;
/**
- * Use continuous fetch behavior. See [Real-Time Data Fetch](real-time-data-fetch)
+ * Use continuous fetch behavior. See [Real-Time Data Fetch](/product/apis-integrations/rest-api/real-time-data-fetch)
*/
subscribe?: boolean;
/**
|
|
feat: octal Debug representation of `tree::EntryMode`.
This makes it easier to reason about.
|
cd61c25369d3e39b6160bac4b332b177dabddf4b
|
feat
|
https://github.com/Byron/gitoxide/commit/cd61c25369d3e39b6160bac4b332b177dabddf4b
|
octal Debug representation of `tree::EntryMode`.
This makes it easier to reason about.
|
diff --git a/tree_with_rewrites.rs b/tree_with_rewrites.rs
index e34cde3..c51922c 100644
--- a/tree_with_rewrites.rs
+++ b/tree_with_rewrites.rs
@@ -14,25 +14,19 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {
Addition {
location: "a",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "b",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "d",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -42,9 +36,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(587ff082e0b98914788500eae5dd6a33f04883c9),
},
Addition {
@@ -54,9 +46,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
]
@@ -76,25 +66,19 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {
Addition {
location: "a",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "b",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "d",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -104,9 +88,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(587ff082e0b98914788500eae5dd6a33f04883c9),
},
Addition {
@@ -116,9 +98,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
]
@@ -153,35 +133,23 @@ fn changes_against_modified_tree_with_filename_tracking() -> crate::Result {
[
Modification {
location: "a",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(78981922613b2afb6025042ff6bd878ac1994e85),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(b4f17b61de71d9b2e54ac9e62b1629ae2d97a6a7),
},
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(e5c63aefe4327cb1c780c71966b678ce8e4225da),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(c7ac5f82f536976f3561c9999b5f11e5893358be),
},
Modification {
location: "dir/c",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(6695780ceb14b05e076a99bbd2babf34723b3464),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),
},
]
@@ -198,35 +166,23 @@ fn changes_against_modified_tree_with_filename_tracking() -> crate::Result {
[
Modification {
location: "a",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(78981922613b2afb6025042ff6bd878ac1994e85),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(b4f17b61de71d9b2e54ac9e62b1629ae2d97a6a7),
},
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(e5c63aefe4327cb1c780c71966b678ce8e4225da),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(c7ac5f82f536976f3561c9999b5f11e5893358be),
},
Modification {
location: "c",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(6695780ceb14b05e076a99bbd2babf34723b3464),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),
},
]
@@ -340,40 +296,28 @@ fn rename_by_similarity() -> crate::Result {
[
Modification {
location: "b",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(61780798228d17af2d34fce4cfbdf35556832472),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),
},
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(d1622e275dbb2cb3215a0bdcd2fc77273891f360),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(6602e61ea053525e4907e155c0b3da3a269e1385),
},
Deletion {
location: "dir/c",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),
},
Addition {
location: "dir/c-moved",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f01e8ddf5adc56985b9a1cda6d7c7ef9e3abe034),
},
]
@@ -404,31 +348,21 @@ fn rename_by_similarity() -> crate::Result {
[
Modification {
location: "b",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(61780798228d17af2d34fce4cfbdf35556832472),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),
},
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(d1622e275dbb2cb3215a0bdcd2fc77273891f360),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(6602e61ea053525e4907e155c0b3da3a269e1385),
},
Rewrite {
source_location: "dir/c",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),
diff: Some(
@@ -440,9 +374,7 @@ fn rename_by_similarity() -> crate::Result {
similarity: 0.65,
},
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f01e8ddf5adc56985b9a1cda6d7c7ef9e3abe034),
location: "dir/c-moved",
relation: None,
@@ -508,26 +440,18 @@ fn copies_by_identity() -> crate::Result {
[
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(6602e61ea053525e4907e155c0b3da3a269e1385),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(f01fd5b4d733a4ae749cbb58a828cdb3f342f298),
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(f00c965d8307308469e537302baa73048488f162),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f00c965d8307308469e537302baa73048488f162),
location: "c1",
relation: None,
@@ -535,15 +459,11 @@ fn copies_by_identity() -> crate::Result {
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(f00c965d8307308469e537302baa73048488f162),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f00c965d8307308469e537302baa73048488f162),
location: "c2",
relation: None,
@@ -551,15 +471,11 @@ fn copies_by_identity() -> crate::Result {
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(f00c965d8307308469e537302baa73048488f162),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f00c965d8307308469e537302baa73048488f162),
location: "dir/c3",
relation: None,
@@ -592,26 +508,18 @@ fn copies_by_similarity() -> crate::Result {
[
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(f01fd5b4d733a4ae749cbb58a828cdb3f342f298),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(1d7e20e07562a54af0408fd2669b0c56a6faa6f0),
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
location: "c4",
relation: None,
@@ -619,9 +527,7 @@ fn copies_by_similarity() -> crate::Result {
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
diff: Some(
@@ -633,9 +539,7 @@ fn copies_by_similarity() -> crate::Result {
similarity: 0.8888889,
},
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(08fe19ca4d2f79624f35333157d610811efc1aed),
location: "c5",
relation: None,
@@ -643,9 +547,7 @@ fn copies_by_similarity() -> crate::Result {
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
diff: Some(
@@ -657,9 +559,7 @@ fn copies_by_similarity() -> crate::Result {
similarity: 0.8888889,
},
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),
location: "dir/c6",
relation: None,
@@ -729,15 +629,11 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {
[
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
location: "c6",
relation: None,
@@ -745,15 +641,11 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {
},
Rewrite {
source_location: "dir/c6",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),
location: "c7",
relation: None,
@@ -761,9 +653,7 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {
},
Rewrite {
source_location: "c5",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(08fe19ca4d2f79624f35333157d610811efc1aed),
diff: Some(
@@ -775,9 +665,7 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {
similarity: 0.75,
},
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(97b3d1a5707f8a11fa5fa8bc6c3bd7b3965601fd),
location: "newly-added",
relation: None,
@@ -785,13 +673,9 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {
},
Modification {
location: "b",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f198d0640214092732566fb00543163845c8252c),
},
]
@@ -828,15 +712,11 @@ fn copies_in_entire_tree_by_similarity_with_limit() -> crate::Result {
[
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
location: "c6",
relation: None,
@@ -844,15 +724,11 @@ fn copies_in_entire_tree_by_similarity_with_limit() -> crate::Result {
},
Rewrite {
source_location: "dir/c6",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),
location: "c7",
relation: None,
@@ -860,21 +736,15 @@ fn copies_in_entire_tree_by_similarity_with_limit() -> crate::Result {
},
Modification {
location: "b",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(f198d0640214092732566fb00543163845c8252c),
},
Addition {
location: "newly-added",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(97b3d1a5707f8a11fa5fa8bc6c3bd7b3965601fd),
},
]
@@ -910,26 +780,18 @@ fn copies_by_similarity_with_limit() -> crate::Result {
[
Modification {
location: "dir",
- previous_entry_mode: EntryMode(
- 16384,
- ),
+ previous_entry_mode: EntryMode(0o40000),
previous_id: Sha1(f01fd5b4d733a4ae749cbb58a828cdb3f342f298),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(1d7e20e07562a54af0408fd2669b0c56a6faa6f0),
},
Rewrite {
source_location: "base",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),
location: "c4",
relation: None,
@@ -938,17 +800,13 @@ fn copies_by_similarity_with_limit() -> crate::Result {
Addition {
location: "c5",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(08fe19ca4d2f79624f35333157d610811efc1aed),
},
Addition {
location: "dir/c6",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),
},
]
@@ -984,15 +842,11 @@ fn realistic_renames_by_identity() -> crate::Result {
[
Rewrite {
source_location: "git-index/src/file.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "git-index/src/file/mod.rs",
relation: None,
@@ -1001,20 +855,14 @@ fn realistic_renames_by_identity() -> crate::Result {
Addition {
location: "git-index/tests/index/file/access.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Modification {
location: "git-index/tests/index/file/mod.rs",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(8ba3a16384aacc37d01564b28401755ce8053f51),
},
]
@@ -1070,36 +918,26 @@ fn realistic_renames_disabled() -> crate::Result {
Deletion {
location: "git-index/src/file.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "git-index/src/file/mod.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "git-index/tests/index/file/access.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Modification {
location: "git-index/tests/index/file/mod.rs",
- previous_entry_mode: EntryMode(
- 33188,
- ),
+ previous_entry_mode: EntryMode(0o100644),
previous_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(8ba3a16384aacc37d01564b28401755ce8053f51),
},
]
@@ -1161,9 +999,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(0026010e87631065a2739f627622feb14f903fd4),
},
Addition {
@@ -1173,9 +1009,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(0026010e87631065a2739f627622feb14f903fd4),
},
Deletion {
@@ -1185,9 +1019,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1197,9 +1029,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1209,9 +1039,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1221,9 +1049,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1233,9 +1059,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1245,9 +1069,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1257,9 +1079,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1269,9 +1089,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1281,9 +1099,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1293,9 +1109,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1305,9 +1119,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1317,9 +1129,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1329,9 +1139,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1341,9 +1149,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
@@ -1353,9 +1159,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
1,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
@@ -1365,9 +1169,7 @@ fn realistic_renames_disabled_2() -> crate::Result {
2,
),
),
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
]
@@ -1456,33 +1258,25 @@ fn realistic_renames_disabled_3() -> crate::Result {
Addition {
location: "src/ein.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Addition {
location: "src/gix.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
location: "src/plumbing-cli.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
Deletion {
location: "src/porcelain-cli.rs",
relation: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
},
]
@@ -1539,15 +1333,11 @@ fn realistic_renames_by_identity_3() -> crate::Result {
[
Rewrite {
source_location: "src/plumbing-cli.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "src/ein.rs",
relation: None,
@@ -1555,15 +1345,11 @@ fn realistic_renames_by_identity_3() -> crate::Result {
},
Rewrite {
source_location: "src/porcelain-cli.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: None,
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "src/gix.rs",
relation: None,
@@ -1629,9 +1415,7 @@ fn realistic_renames_2() -> crate::Result {
[
Rewrite {
source_location: "git-sec",
- source_entry_mode: EntryMode(
- 16384,
- ),
+ source_entry_mode: EntryMode(0o40000),
source_relation: Some(
Parent(
1,
@@ -1639,9 +1423,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(0026010e87631065a2739f627622feb14f903fd4),
diff: None,
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(0026010e87631065a2739f627622feb14f903fd4),
location: "gix-sec",
relation: Some(
@@ -1653,9 +1435,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/CHANGELOG.md",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1663,9 +1443,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/CHANGELOG.md",
relation: Some(
@@ -1677,9 +1455,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/Cargo.toml",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1687,9 +1463,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/Cargo.toml",
relation: Some(
@@ -1701,9 +1475,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/src/identity.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1711,9 +1483,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/src/identity.rs",
relation: Some(
@@ -1725,9 +1495,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/src/lib.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1735,9 +1503,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/src/lib.rs",
relation: Some(
@@ -1749,9 +1515,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/src/permission.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1759,9 +1523,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/src/permission.rs",
relation: Some(
@@ -1773,9 +1535,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/src/trust.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1783,9 +1543,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/src/trust.rs",
relation: Some(
@@ -1797,9 +1555,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/tests/sec.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1807,9 +1563,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/tests/sec.rs",
relation: Some(
@@ -1821,9 +1575,7 @@ fn realistic_renames_2() -> crate::Result {
},
Rewrite {
source_location: "git-sec/tests/identity/mod.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
1,
@@ -1831,9 +1583,7 @@ fn realistic_renames_2() -> crate::Result {
),
source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),
location: "gix-sec/tests/identity/mod.rs",
relation: Some(
@@ -1927,9 +1677,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
[
Rewrite {
source_location: "src/plumbing/options.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
2,
@@ -1937,9 +1685,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
),
source_id: Sha1(00750edc07d6415dcc07ae0351e9397b0222b7ba),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(00750edc07d6415dcc07ae0351e9397b0222b7ba),
location: "src/plumbing-renamed/options/mod.rs",
relation: Some(
@@ -1951,9 +1697,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
},
Rewrite {
source_location: "src/plumbing/mod.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
2,
@@ -1961,9 +1705,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
),
source_id: Sha1(0cfbf08886fca9a91cb753ec8734c84fcbe52c9f),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(0cfbf08886fca9a91cb753ec8734c84fcbe52c9f),
location: "src/plumbing-renamed/mod.rs",
relation: Some(
@@ -1975,9 +1717,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
},
Rewrite {
source_location: "src/plumbing/main.rs",
- source_entry_mode: EntryMode(
- 33188,
- ),
+ source_entry_mode: EntryMode(0o100644),
source_relation: Some(
ChildOfParent(
2,
@@ -1985,9 +1725,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
),
source_id: Sha1(d00491fd7e5bb6fa28c517a0bb32b8b506539d4d),
diff: None,
- entry_mode: EntryMode(
- 33188,
- ),
+ entry_mode: EntryMode(0o100644),
id: Sha1(d00491fd7e5bb6fa28c517a0bb32b8b506539d4d),
location: "src/plumbing-renamed/main.rs",
relation: Some(
@@ -1999,9 +1737,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
},
Rewrite {
source_location: "src/plumbing",
- source_entry_mode: EntryMode(
- 16384,
- ),
+ source_entry_mode: EntryMode(0o40000),
source_relation: Some(
Parent(
2,
@@ -2009,9 +1745,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {
),
source_id: Sha1(b9d41dcdbd92fcab2fb6594d04f2ad99b3472621),
diff: None,
- entry_mode: EntryMode(
- 16384,
- ),
+ entry_mode: EntryMode(0o40000),
id: Sha1(202702465d7bb291153629dc2e8b353afe9cbdae),
location: "src/plumbing-renamed",
relation: Some(
diff --git a/mod.rs b/mod.rs
index c1e3b0a..fd3de08 100644
--- a/mod.rs
+++ b/mod.rs
@@ -42,10 +42,16 @@ pub struct Editor<'a> {
///
/// Note that even though it can be created from any `u16`, it should be preferable to
/// create it by converting [`EntryKind`] into `EntryMode`.
-#[derive(Clone, Copy, PartialEq, Eq, Debug, Ord, PartialOrd, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EntryMode(pub u16);
+impl std::fmt::Debug for EntryMode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "EntryMode({:#o})", self.0)
+ }
+}
+
/// A discretized version of ideal and valid values for entry modes.
///
/// Note that even though it can represent every valid [mode](EntryMode), it might
|
|
chore: mark query condition keys ('id:in' etc) as deprecated
|
7248762178d3c0386e78a72fd5e4cfa9701de882
|
chore
|
https://github.com/mikro-orm/mikro-orm/commit/7248762178d3c0386e78a72fd5e4cfa9701de882
|
mark query condition keys ('id:in' etc) as deprecated
|
diff --git a/query-conditions.md b/query-conditions.md
index ec1fee9..66828d7 100644
--- a/query-conditions.md
+++ b/query-conditions.md
@@ -35,6 +35,8 @@ const res = await orm.em.find(Author, {
Another way to do this by including the operator in your keys:
+> This approach is deprecated and will be removed in future versions.
+
```typescript
const res = await orm.em.find(Author, { $and: [
{ 'id:in': [1, 2, 7] },
|
|
build(docker): simplify risingwave docker setup (#8126)
Remove risingwave-specific minio service in favor of existing minio
service.
|
f2ff173c1467c5921edfb0ac9790ff8b0340bfc9
|
build
|
https://github.com/rohankumardubey/ibis/commit/f2ff173c1467c5921edfb0ac9790ff8b0340bfc9
|
simplify risingwave docker setup (#8126)
Remove risingwave-specific minio service in favor of existing minio
service.
|
diff --git a/compose.yaml b/compose.yaml
index 0f6808b..153b4c7 100644
--- a/compose.yaml
+++ b/compose.yaml
@@ -104,9 +104,10 @@ services:
retries: 20
test:
- CMD-SHELL
- - mc ready data && mc mb --ignore-existing data/trino
+ - mc ready data && mc mb --ignore-existing data/trino data/risingwave
networks:
- trino
+ - risingwave
volumes:
- $PWD/docker/minio/config.json:/.mc/config.json:ro
@@ -537,74 +538,26 @@ services:
networks:
- impala
- risingwave-minio:
- image: "quay.io/minio/minio:latest"
- command:
- - server
- - "--address"
- - "0.0.0.0:9301"
- - "--console-address"
- - "0.0.0.0:9400"
- - /data
- expose:
- - "9301"
- - "9400"
- ports:
- - "9301:9301"
- - "9400:9400"
- depends_on: []
- volumes:
- - "risingwave-minio:/data"
- entrypoint: /bin/sh -c "set -e; mkdir -p \\"/data/hummock001\\"; /usr/bin/docker-entrypoint.sh \\"$$0\\" \\"$$@\\" "
- environment:
- MINIO_CI_CD: "1"
- MINIO_ROOT_PASSWORD: hummockadmin
- MINIO_ROOT_USER: hummockadmin
- MINIO_DOMAIN: "risingwave-minio"
- container_name: risingwave-minio
- healthcheck:
- test:
- - CMD-SHELL
- - bash -c 'printf \\"GET / HTTP/1.1\\n\\n\\" > /dev/tcp/127.0.0.1/9301; exit $$?;'
- interval: 5s
- timeout: 5s
- retries: 20
- restart: always
- networks:
- - risingwave
-
risingwave:
image: ghcr.io/risingwavelabs/risingwave:nightly-20240122
command: "standalone --meta-opts=\\" \\
--advertise-addr 0.0.0.0:5690 \\
--backend mem \\
- --state-store hummock+minio://hummockadmin:hummockadmin@risingwave-minio:9301/hummock001 \\
- --data-directory hummock_001 \\
- --config-path /risingwave.toml\\" \\
- --compute-opts=\\" \\
- --config-path /risingwave.toml \\
- --advertise-addr 0.0.0.0:5688 \\
- --role both \\" \\
- --frontend-opts=\\" \\
- --config-path /risingwave.toml \\
- --listen-addr 0.0.0.0:4566 \\
- --advertise-addr 0.0.0.0:4566 \\" \\
- --compactor-opts=\\" \\
- --advertise-addr 0.0.0.0:6660 \\""
- expose:
- - "4566"
+ --state-store hummock+minio://accesskey:secretkey@minio:9000/risingwave \\
+ --data-directory hummock_001\\" \\
+ --compute-opts=\\"--advertise-addr 0.0.0.0:5688 --role both\\" \\
+ --frontend-opts=\\"--listen-addr 0.0.0.0:4566 --advertise-addr 0.0.0.0:4566\\" \\
+ --compactor-opts=\\"--advertise-addr 0.0.0.0:6660\\""
ports:
- - "4566:4566"
+ - 4566:4566
depends_on:
- - risingwave-minio
+ minio:
+ condition: service_healthy
volumes:
- - "./docker/risingwave/risingwave.toml:/risingwave.toml"
- risingwave:/data
environment:
RUST_BACKTRACE: "1"
- # If ENABLE_TELEMETRY is not set, telemetry will start by default
- ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
- container_name: risingwave
+ ENABLE_TELEMETRY: "false"
healthcheck:
test:
- CMD-SHELL
@@ -612,10 +565,9 @@ services:
- bash -c 'printf \\"GET / HTTP/1.1\\n\\n\\" > /dev/tcp/127.0.0.1/5688; exit $$?;'
- bash -c 'printf \\"GET / HTTP/1.1\\n\\n\\" > /dev/tcp/127.0.0.1/4566; exit $$?;'
- bash -c 'printf \\"GET / HTTP/1.1\\n\\n\\" > /dev/tcp/127.0.0.1/5690; exit $$?;'
- interval: 5s
- timeout: 5s
+ interval: 1s
retries: 20
- restart: always
+ restart: on-failure
networks:
- risingwave
@@ -646,5 +598,4 @@ volumes:
postgres:
exasol:
impala:
- risingwave-minio:
risingwave:
diff --git a/risingwave.toml b/risingwave.toml
index ed50e5b..95208b3 100644
--- a/risingwave.toml
+++ b/risingwave.toml
@@ -1,2 +0,0 @@
-# RisingWave config file to be mounted into the Docker containers.
-# See https://github.com/risingwavelabs/risingwave/blob/main/src/config/example.toml for example
diff --git a/test_json.py b/test_json.py
index f9c2f52..0d0e2fa 100644
--- a/test_json.py
+++ b/test_json.py
@@ -41,8 +41,7 @@ pytestmark = [
reason="https://github.com/ibis-project/ibis/pull/6920#discussion_r1373212503",
)
@pytest.mark.broken(
- ["risingwave"],
- reason="TODO(Kexiang): order mismatch in array",
+ ["risingwave"], reason="TODO(Kexiang): order mismatch in array", strict=False
)
def test_json_getitem(json_t, expr_fn, expected):
expr = expr_fn(json_t)
|
|
test: dont export entities from single file tests
|
c49db6414b6b6416c16d0d0590e43bbf1162f0a7
|
test
|
https://github.com/mikro-orm/mikro-orm/commit/c49db6414b6b6416c16d0d0590e43bbf1162f0a7
|
dont export entities from single file tests
|
diff --git a/custom-pivot-entity-auto-discovery.sqlite.test.ts b/custom-pivot-entity-auto-discovery.sqlite.test.ts
index 4f2975f..0723ffc 100644
--- a/custom-pivot-entity-auto-discovery.sqlite.test.ts
+++ b/custom-pivot-entity-auto-discovery.sqlite.test.ts
@@ -13,7 +13,7 @@ import {
import { SqliteDriver } from '@mikro-orm/sqlite';
@Entity()
-export class Order {
+class Order {
@PrimaryKey()
id!: number;
@@ -33,7 +33,7 @@ export class Order {
}
@Entity()
-export class Product {
+class Product {
@PrimaryKey()
id!: number;
@@ -55,7 +55,7 @@ export class Product {
}
@Entity()
-export class OrderItem {
+class OrderItem {
[OptionalProps]?: 'amount';
diff --git a/GH725.test.ts b/GH725.test.ts
index 217ca4f..f1129d1 100644
--- a/GH725.test.ts
+++ b/GH725.test.ts
@@ -1,5 +1,4 @@
import { EntitySchema, MikroORM, sql, Type, ValidationError } from '@mikro-orm/core';
-import type { AbstractSqlDriver } from '@mikro-orm/knex';
import { SqliteDriver } from '@mikro-orm/sqlite';
import { PostgreSqlDriver } from '@mikro-orm/postgresql';
@@ -105,13 +104,12 @@ export const TestSchema2 = new EntitySchema<Test2>({
describe('GH issue 725', () => {
test('mapping values from returning statement to custom types', async () => {
- const orm = await MikroORM.init<AbstractSqlDriver>({
+ const orm = await MikroORM.init({
entities: [TestSchema],
- dbName: `mikro_orm_test_gh_725`,
+ dbName: 'mikro_orm_test_gh_725',
driver: PostgreSqlDriver,
});
await orm.schema.ensureDatabase();
- await orm.schema.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
await orm.schema.dropSchema();
await orm.schema.createSchema();
@@ -142,7 +140,7 @@ describe('GH issue 725', () => {
});
test('validation when trying to persist not discovered entity', async () => {
- const orm = await MikroORM.init<AbstractSqlDriver>({
+ const orm = await MikroORM.init({
entities: [TestSchema2],
dbName: `:memory:`,
driver: SqliteDriver,
diff --git a/GH4242.test.ts b/GH4242.test.ts
index 10d7bd9..a1c9aec 100644
--- a/GH4242.test.ts
+++ b/GH4242.test.ts
@@ -47,7 +47,6 @@ beforeAll(async () => {
});
await orm.schema.ensureDatabase();
- await orm.schema.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
await orm.schema.refreshDatabase();
});
diff --git a/GH1003.test.ts b/GH1003.test.ts
index 6bc4a65..9e81e2f 100644
--- a/GH1003.test.ts
+++ b/GH1003.test.ts
@@ -2,7 +2,7 @@ import { BaseEntity, Collection, MikroORM, Entity, ManyToOne, OneToMany, Primary
import type { Ref } from '@mikro-orm/sqlite';
@Entity()
-export class Parent extends BaseEntity {
+class Parent extends BaseEntity {
@PrimaryKey()
id!: string;
@@ -13,7 +13,7 @@ export class Parent extends BaseEntity {
}
@Entity()
-export class Child extends BaseEntity {
+class Child extends BaseEntity {
@PrimaryKey()
id!: string;
diff --git a/GH1009.test.ts b/GH1009.test.ts
index 5e45cef..002df9e 100644
--- a/GH1009.test.ts
+++ b/GH1009.test.ts
@@ -1,7 +1,7 @@
import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Property } from '@mikro-orm/sqlite';
@Entity({ tableName: 'brands' })
-export class Brand {
+class Brand {
@PrimaryKey()
id!: number;
@@ -12,7 +12,7 @@ export class Brand {
}
@Entity({ tableName: 'brand_site_restrictions' })
-export class BrandSiteRestriction {
+class BrandSiteRestriction {
@PrimaryKey()
id!: number;
@@ -26,7 +26,7 @@ export class BrandSiteRestriction {
}
@Entity({ tableName: 'placements' })
-export class Placement {
+class Placement {
@PrimaryKey()
id!: number;
@@ -40,7 +40,7 @@ export class Placement {
}
@Entity({ tableName: 'publishers' })
-export class Publisher {
+class Publisher {
@OneToMany({ entity: () => Site, mappedBy: 'publisher' })
sites = new Collection<Site>(this);
@@ -51,7 +51,7 @@ export class Publisher {
}
@Entity({ tableName: 'sites' })
-export class Site {
+class Site {
@ManyToOne({ entity: () => Publisher, nullable: true })
publisher?: Publisher;
diff --git a/GH1041.test.ts b/GH1041.test.ts
index 4177777..be45f27 100644
--- a/GH1041.test.ts
+++ b/GH1041.test.ts
@@ -2,7 +2,7 @@ import { Collection, Entity, LoadStrategy, ManyToMany, MikroORM, PopulateHint, P
import { mockLogger } from '../helpers';
@Entity()
-export class App {
+class App {
@PrimaryKey()
id!: number;
@@ -16,7 +16,7 @@ export class App {
}
@Entity()
-export class User {
+class User {
@PrimaryKey()
id!: number;
diff --git a/GH1115.test.ts b/GH1115.test.ts
index 58abc3a..28fdfcc 100644
--- a/GH1115.test.ts
+++ b/GH1115.test.ts
@@ -1,7 +1,7 @@
import { Entity, ManyToOne, MikroORM, PrimaryKey, Property } from '@mikro-orm/sqlite';
@Entity()
-export class B {
+class B {
@PrimaryKey()
id!: number;
@@ -12,7 +12,7 @@ export class B {
}
@Entity()
-export class A {
+class A {
@PrimaryKey()
id!: number;
diff --git a/GH1171.test.ts b/GH1171.test.ts
index fb7786e..142210b 100644
--- a/GH1171.test.ts
+++ b/GH1171.test.ts
@@ -2,7 +2,7 @@ import { Entity, MikroORM, OneToOne, PrimaryKey, Property } from '@mikro-orm/sql
import { v4 } from 'uuid';
@Entity()
-export class B {
+class B {
@PrimaryKey()
id: string = v4();
@@ -13,7 +13,7 @@ export class B {
}
@Entity()
-export class A {
+class A {
@PrimaryKey()
id!: string;
diff --git a/GH1395.test.ts b/GH1395.test.ts
index 8ac3015..4402642 100644
--- a/GH1395.test.ts
+++ b/GH1395.test.ts
@@ -6,7 +6,7 @@ export interface EmailMessageTest {
}
@Entity()
-export class TestTemplate {
+class TestTemplate {
@PrimaryKey()
_id!: ObjectId;
diff --git a/GH1616.test.ts b/GH1616.test.ts
index 2c15e9e..ba88c10 100644
--- a/GH1616.test.ts
+++ b/GH1616.test.ts
@@ -1,7 +1,7 @@
import { Embeddable, Embedded, Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/sqlite';
@Embeddable()
-export class D {
+class D {
@Property({ type: 'boolean', nullable: true })
test?: boolean = false;
@@ -9,7 +9,7 @@ export class D {
}
@Embeddable()
-export class C {
+class C {
@Embedded(() => D, { object: true, nullable: false })
d!: D;
@@ -17,7 +17,7 @@ export class C {
}
@Embeddable()
-export class B {
+class B {
@Embedded(() => C, { object: true, nullable: false })
c!: C;
@@ -28,7 +28,7 @@ export class B {
}
@Entity()
-export class A {
+class A {
@PrimaryKey()
id!: number;
diff --git a/GH1626.test.ts b/GH1626.test.ts
index 46a9075..15c02c1 100644
--- a/GH1626.test.ts
+++ b/GH1626.test.ts
@@ -6,7 +6,7 @@ import {
Property,
} from '@mikro-orm/sqlite';
import { mockLogger } from '../helpers';
-export class NativeBigIntType extends BigIntType {
+class NativeBigIntType extends BigIntType {
override convertToJSValue(value: any): any {
if (!value) {
@@ -19,7 +19,7 @@ export class NativeBigIntType extends BigIntType {
}
@Entity()
-export class Author {
+class Author {
@PrimaryKey({ type: NativeBigIntType, comment: 'PK' })
id!: bigint;
diff --git a/GH1704.test.ts b/GH1704.test.ts
index ba6eb7a..23c9d06 100644
--- a/GH1704.test.ts
+++ b/GH1704.test.ts
@@ -2,7 +2,7 @@ import { Entity, PrimaryKey, Property, OneToOne, MikroORM } from '@mikro-orm/sql
import { mockLogger } from '../helpers';
@Entity()
-export class Profile {
+class Profile {
@PrimaryKey()
id!: number;
diff --git a/GH1721.test.ts b/GH1721.test.ts
index 3b51754..ece501a 100644
--- a/GH1721.test.ts
+++ b/GH1721.test.ts
@@ -2,7 +2,7 @@ import { Entity, MikroORM, PrimaryKey, Property, Type } from '@mikro-orm/sqlite'
import { Guid } from 'guid-typescript';
import { mockLogger } from '../helpers';
-export class GuidType extends Type<Guid | undefined, string | undefined> {
+class GuidType extends Type<Guid | undefined, string | undefined> {
override convertToDatabaseValue(value: Guid | undefined): string | undefined {
if (!value) {
@@ -27,7 +27,7 @@ export class GuidType extends Type<Guid | undefined, string | undefined> {
}
@Entity()
-export class Couch {
+class Couch {
@PrimaryKey({ type: GuidType })
id!: Guid;
diff --git a/GH1902.test.ts b/GH1902.test.ts
index 18dd82c..1aeb903 100644
--- a/GH1902.test.ts
+++ b/GH1902.test.ts
@@ -14,7 +14,7 @@ import {
} from '@mikro-orm/sqlite';
@Entity({ tableName: 'users' })
-export class UserEntity {
+class UserEntity {
@PrimaryKey({ type: 'number' })
id!: number;
@@ -32,7 +32,7 @@ export class UserEntity {
}
@Entity({ tableName: 'tenants' })
-export class TenantEntity {
+class TenantEntity {
[OptionalProps]?: 'isEnabled';
diff --git a/GH1910.test.ts b/GH1910.test.ts
index fc96336..46e5b81 100644
--- a/GH1910.test.ts
+++ b/GH1910.test.ts
@@ -2,7 +2,7 @@ import type { EntityManager } from '@mikro-orm/postgresql';
import { Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/postgresql';
@Entity()
-export class A {
+class A {
@PrimaryKey({ type: 'number' })
id!: number;
diff --git a/GH1927.test.ts b/GH1927.test.ts
index 20e0184..3e02620 100644
--- a/GH1927.test.ts
+++ b/GH1927.test.ts
@@ -2,7 +2,7 @@ import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Propert
import { mockLogger } from '../helpers';
@Entity()
-export class Author {
+class Author {
@PrimaryKey()
id!: number;
@@ -20,7 +20,7 @@ export class Author {
}
@Entity()
-export class Book {
+class Book {
@PrimaryKey()
id!: number;
diff --git a/GH2273.test.ts b/GH2273.test.ts
index a9d67fa..28e957c 100644
--- a/GH2273.test.ts
+++ b/GH2273.test.ts
@@ -1,7 +1,7 @@
import { Entity, LoadStrategy, MikroORM, OneToOne, PrimaryKey, Property } from '@mikro-orm/sqlite';
@Entity()
-export class Checkout {
+class Checkout {
@PrimaryKey()
id!: number;
@@ -14,7 +14,7 @@ export class Checkout {
}
@Entity()
-export class Discount {
+class Discount {
@PrimaryKey()
id!: number;
@@ -35,7 +35,7 @@ export class Discount {
}
@Entity()
-export class Checkout2 {
+class Checkout2 {
@PrimaryKey()
id!: number;
@@ -49,7 +49,7 @@ export class Checkout2 {
}
@Entity()
-export class Discount2 {
+class Discount2 {
@PrimaryKey()
id!: number;
diff --git a/GH228.test.ts b/GH228.test.ts
index 592a1f9..4f8741d 100644
--- a/GH228.test.ts
+++ b/GH228.test.ts
@@ -2,7 +2,7 @@ import { Entity, ManyToOne, MikroORM, PrimaryKey, Property } from '@mikro-orm/sq
import { mockLogger } from '../helpers';
@Entity()
-export class B {
+class B {
@PrimaryKey({ type: 'number' })
id!: number;
diff --git a/GH2379.test.ts b/GH2379.test.ts
index 82b1570..b9c1768 100644
--- a/GH2379.test.ts
+++ b/GH2379.test.ts
@@ -2,7 +2,7 @@ import { Collection, Entity, Ref, ManyToOne, MikroORM, OneToMany, OptionalProps,
import { performance } from 'perf_hooks';
@Entity()
-export class VendorBuyerRelationship {
+class VendorBuyerRelationship {
[OptionalProps]?: 'created';
@@ -24,7 +24,7 @@ export class VendorBuyerRelationship {
}
@Entity()
-export class Member {
+class Member {
[OptionalProps]?: 'created';
@@ -49,7 +49,7 @@ export class Member {
}
@Entity()
-export class Job {
+class Job {
[OptionalProps]?: 'rejected';
diff --git a/GH2395.test.ts b/GH2395.test.ts
index 345862b..5739875 100644
--- a/GH2395.test.ts
+++ b/GH2395.test.ts
@@ -1,7 +1,7 @@
import { Cascade, Collection, Entity, Ref, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/sqlite';
@Entity()
-export class Parent {
+class Parent {
@PrimaryKey()
id!: number;
@@ -18,7 +18,7 @@ export class Parent {
}
@Entity()
-export class Child {
+class Child {
@PrimaryKey()
id!: number;
@@ -29,7 +29,7 @@ export class Child {
}
@Entity()
-export class Child2 {
+class Child2 {
@PrimaryKey()
id!: number;
diff --git a/GH2406.test.ts b/GH2406.test.ts
index ada83b2..5ee50a3 100644
--- a/GH2406.test.ts
+++ b/GH2406.test.ts
@@ -1,7 +1,7 @@
import { Collection, Entity, Ref, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/sqlite';
@Entity({ forceConstructor: true })
-export class Parent {
+class Parent {
@PrimaryKey()
id!: number;
@@ -12,7 +12,7 @@ export class Parent {
}
@Entity({ forceConstructor: true })
-export class Child {
+class Child {
@PrimaryKey()
id!: number;
diff --git a/GH2583.test.ts b/GH2583.test.ts
index 7627406..3c75950 100644
--- a/GH2583.test.ts
+++ b/GH2583.test.ts
@@ -7,7 +7,7 @@ export enum WithEnumArrayValue {
}
@Entity()
-export class WithEnumArray {
+class WithEnumArray {
@PrimaryKey()
id!: number;
diff --git a/GH2675.test.ts b/GH2675.test.ts
index e6433d0..e5f893f 100644
--- a/GH2675.test.ts
+++ b/GH2675.test.ts
@@ -1,7 +1,7 @@
import { Entity, LoadStrategy, ManyToOne, MikroORM, PrimaryKey, wrap } from '@mikro-orm/postgresql';
@Entity()
-export class A {
+class A {
@PrimaryKey()
id!: number;
@@ -9,7 +9,7 @@ export class A {
}
@Entity()
-export class B {
+class B {
@PrimaryKey()
id!: number;
diff --git a/GH2774.test.ts b/GH2774.test.ts
index 9e387bf..7fd6d5c 100644
--- a/GH2774.test.ts
+++ b/GH2774.test.ts
@@ -1,7 +1,7 @@
import { Embeddable, Embedded, Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/sqlite';
@Embeddable()
-export class Nested {
+class Nested {
@Property({ nullable: true })
value: string | null = null;
@@ -9,7 +9,7 @@ export class Nested {
}
@Embeddable()
-export class Name {
+class Name {
@Property({ nullable: true })
value: string | null = null;
@@ -20,7 +20,7 @@ export class Name {
}
@Entity()
-export class User {
+class User {
@PrimaryKey()
id!: number;
diff --git a/GH2781.test.ts b/GH2781.test.ts
index 4a69fab..db88bce 100644
--- a/GH2781.test.ts
+++ b/GH2781.test.ts
@@ -1,7 +1,7 @@
import { Entity, ManyToOne, MikroORM, PrimaryKey, Property } from '@mikro-orm/postgresql';
@Entity()
-export class Address {
+class Address {
@PrimaryKey()
id!: number;
@@ -22,7 +22,7 @@ export class Address {
}
@Entity()
-export class Customer {
+class Customer {
@PrimaryKey()
id!: number;
diff --git a/GH2784.test.ts b/GH2784.test.ts
index d8a385c..fb07cab 100644
--- a/GH2784.test.ts
+++ b/GH2784.test.ts
@@ -1,7 +1,7 @@
import { Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/postgresql';
@Entity()
-export class Address {
+class Address {
@PrimaryKey()
id!: number;
diff --git a/GH2815.test.ts b/GH2815.test.ts
index a0bda20..6d9a93a 100644
--- a/GH2815.test.ts
+++ b/GH2815.test.ts
@@ -1,7 +1,7 @@
import { Entity, MikroORM, OneToOne, PrimaryKey } from '@mikro-orm/sqlite';
@Entity()
-export class Position {
+class Position {
@PrimaryKey()
id!: number;
@@ -12,7 +12,7 @@ export class Position {
}
@Entity()
-export class Leg {
+class Leg {
@PrimaryKey()
id!: number;
@@ -23,7 +23,7 @@ export class Leg {
}
@Entity()
-export class Position2 {
+class Position2 {
@PrimaryKey()
id!: number;
@@ -34,7 +34,7 @@ export class Position2 {
}
@Entity()
-export class Leg2 {
+class Leg2 {
@PrimaryKey()
id!: number;
diff --git a/GH2821.test.ts b/GH2821.test.ts
index e71adcc..c0374ec 100644
--- a/GH2821.test.ts
+++ b/GH2821.test.ts
@@ -1,7 +1,7 @@
import { Entity, MikroORM, OneToOne, PrimaryKey } from '@mikro-orm/sqlite';
@Entity()
-export class Position {
+class Position {
@PrimaryKey()
id!: number;
@@ -15,7 +15,7 @@ export class Position {
}
@Entity()
-export class Leg {
+class Leg {
@PrimaryKey()
id!: number;
diff --git a/GH2882.test.ts b/GH2882.test.ts
index b1151a0..b2f1cd6 100644
--- a/GH2882.test.ts
+++ b/GH2882.test.ts
@@ -1,7 +1,7 @@
import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, wrap } from '@mikro-orm/sqlite';
@Entity()
-export class Parent {
+class Parent {
@PrimaryKey()
id!: number;
diff --git a/GH2974.test.ts b/GH2974.test.ts
index 7e6aa86..0a2b0b0 100644
--- a/GH2974.test.ts
+++ b/GH2974.test.ts
@@ -1,7 +1,7 @@
import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Property, wrap } from '@mikro-orm/better-sqlite';
@Entity()
-export class SomeMany {
+class SomeMany {
@PrimaryKey()
id!: number;
@@ -15,7 +15,7 @@ export class SomeMany {
}
@Entity()
-export class Test {
+class Test {
@PrimaryKey()
id!: number;
diff --git a/GH302.test.ts b/GH302.test.ts
index 386268c..a3b9f6e 100644
--- a/GH302.test.ts
+++ b/GH302.test.ts
@@ -1,7 +1,7 @@
import { Entity, Ref, MikroORM, PrimaryKey, Property, Reference, ManyToOne, OneToMany, Collection } from '@mikro-orm/sqlite';
@Entity()
-export class A {
+class A {
@PrimaryKey({ type: 'number' })
id: number;
@@ -20,7 +20,7 @@ export class A {
}
@Entity()
-export class B {
+class B {
@PrimaryKey({ type: 'number' })
id!: number;
diff --git a/GH3026.test.ts b/GH3026.test.ts
index 726a2c2..234e35d 100644
--- a/GH3026.test.ts
+++ b/GH3026.test.ts
@@ -2,7 +2,7 @@ import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Propert
import { mockLogger } from '../helpers';
@Entity()
-export class Ingredient {
+class Ingredient {
@PrimaryKey()
id!: number;
@@ -16,7 +16,7 @@ export class Ingredient {
}
@Entity()
-export class Recipe {
+class Recipe {
@PrimaryKey()
id!: number;
@@ -30,7 +30,7 @@ export class Recipe {
}
@Entity()
-export class RecipeIngredient {
+class RecipeIngredient {
@PrimaryKey()
id!: number;
diff --git a/GH3240.test.ts b/GH3240.test.ts
index c4dbc90..1ecf341 100644
--- a/GH3240.test.ts
+++ b/GH3240.test.ts
@@ -3,7 +3,7 @@ import { Collection, Entity, ManyToMany, MikroORM, PrimaryKey, Property } from '
type SquadType = 'GROUND' | 'AIR';
@Entity()
-export class Soldier {
+class Soldier {
@PrimaryKey()
id!: number;
@@ -20,7 +20,7 @@ export class Soldier {
}
@Entity()
-export class Squad {
+class Squad {
@PrimaryKey()
id!: number;
diff --git a/GH3287.test.ts b/GH3287.test.ts
index aa4c627..fde217e 100644
--- a/GH3287.test.ts
+++ b/GH3287.test.ts
@@ -1,7 +1,7 @@
import { Collection, Entity, LoadStrategy, ManyToMany, MikroORM, PrimaryKey } from '@mikro-orm/better-sqlite';
@Entity()
-export class Group {
+class Group {
@PrimaryKey()
id!: number;
@@ -15,7 +15,7 @@ export class Group {
}
@Entity()
-export class Participant {
+class Participant {
@PrimaryKey()
id!: number;
diff --git a/GH3490.test.ts b/GH3490.test.ts
index 861411f..acaa95d 100644
--- a/GH3490.test.ts
+++ b/GH3490.test.ts
@@ -1,7 +1,7 @@
import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/sqlite';
@Entity()
-export class Contract {
+class Contract {
@PrimaryKey()
id!: number;
@@ -12,7 +12,7 @@ export class Contract {
}
@Entity()
-export class Customer {
+class Customer {
@PrimaryKey()
id!: number;
diff --git a/GH3548.test.ts b/GH3548.test.ts
index dc350c7..7f68e74 100644
--- a/GH3548.test.ts
+++ b/GH3548.test.ts
@@ -1,7 +1,7 @@
import { MikroORM, ObjectId, Entity, PrimaryKey, Property, OneToOne } from '@mikro-orm/mongodb';
@Entity()
-export class Author {
+class Author {
@PrimaryKey()
_id!: ObjectId;
@@ -15,7 +15,7 @@ export class Author {
}
@Entity()
-export class AuthorDetail {
+class AuthorDetail {
@PrimaryKey()
_id!: ObjectId;
diff --git a/GH3696.test.ts b/GH3696.test.ts
index 9489710..e4e1473 100644
--- a/GH3696.test.ts
+++ b/GH3696.test.ts
@@ -2,7 +2,7 @@ import { FullTextType, MikroORM, Collection, Entity, Index, ManyToMany, PrimaryK
@Entity()
@Unique({ properties: ['name'] })
-export class Artist {
+class Artist {
@PrimaryKey()
id!: number;
@@ -23,7 +23,7 @@ export class Artist {
}
@Entity()
-export class Song {
+class Song {
@PrimaryKey()
id!: number;
diff --git a/GH3738.test.ts b/GH3738.test.ts
index 28959b4..5f4f3b3 100644
--- a/GH3738.test.ts
+++ b/GH3738.test.ts
@@ -12,7 +12,7 @@ import {
import { randomUUID } from 'crypto';
@Entity()
-export class Question {
+class Question {
[OptionalProps]?: 'createdAt';
@@ -31,7 +31,7 @@ export class Question {
}
@Entity()
-export class Answer {
+class Answer {
[OptionalProps]?: 'createdAt' | 'question';
diff --git a/GH3844.test.ts b/GH3844.test.ts
index b2e57bb..aa9fdf8 100644
--- a/GH3844.test.ts
+++ b/GH3844.test.ts
@@ -2,7 +2,7 @@ import { Entity, PrimaryKey, Property, OneToOne, Ref, ref } from '@mikro-orm/cor
import { MikroORM } from '@mikro-orm/sqlite';
@Entity()
-export class GamePoolEntity {
+class GamePoolEntity {
@PrimaryKey()
contract_address!: string;
@@ -36,7 +36,7 @@ export class GamePoolEntity {
}
@Entity()
-export class GamePoolScannerEntity {
+class GamePoolScannerEntity {
@OneToOne(() => GamePoolEntity, e => e.scanner, {
primary: true,
diff --git a/GH4295.test.ts b/GH4295.test.ts
index add3565..5541a53 100644
--- a/GH4295.test.ts
+++ b/GH4295.test.ts
@@ -14,7 +14,7 @@ class RunScheduleEntity {
}
@Entity()
-export class AEntity {
+class AEntity {
@PrimaryKey()
id!: number;
diff --git a/GH4343.test.ts b/GH4343.test.ts
index 40e1425..2350b6a 100644
--- a/GH4343.test.ts
+++ b/GH4343.test.ts
@@ -3,7 +3,7 @@ import { Entity, ManyToOne, PrimaryKey, Property, ref, Ref } from '@mikro-orm/co
import { v4 } from 'uuid';
@Entity()
-export class LocalizedString {
+class LocalizedString {
@PrimaryKey({ type: 'uuid' })
id = v4();
@@ -21,7 +21,7 @@ export class LocalizedString {
}
@Entity()
-export class Book {
+class Book {
@PrimaryKey({ type: 'uuid' })
id = v4();
diff --git a/GH4533.test.ts b/GH4533.test.ts
index 49428ea..494fdac 100644
--- a/GH4533.test.ts
+++ b/GH4533.test.ts
@@ -15,7 +15,7 @@ import { SqliteDriver } from '@mikro-orm/sqlite';
import { mockLogger } from '../helpers';
@Entity({ tableName: 'core_users' })
-export class User {
+class User {
@PrimaryKey()
id!: number;
@@ -33,7 +33,7 @@ export class User {
}
@Entity({ tableName: 'core_roles' })
-export class Role {
+class Role {
@PrimaryKey()
id!: number;
diff --git a/GH4973.test.ts b/GH4973.test.ts
index 3f1237a..ffdabd8 100644
--- a/GH4973.test.ts
+++ b/GH4973.test.ts
@@ -2,7 +2,7 @@ import { Collection, Entity, OneToMany, MikroORM, PrimaryKey, Property, ManyToOn
import { mockLogger } from '../helpers';
@Entity()
-export class User {
+class User {
@PrimaryKey()
id!: number;
@@ -13,7 +13,7 @@ export class User {
}
@Entity()
-export class Book {
+class Book {
@PrimaryKey()
id!: number;
diff --git a/GH557.test.ts b/GH557.test.ts
index d087073..024fad2 100644
--- a/GH557.test.ts
+++ b/GH557.test.ts
@@ -1,7 +1,7 @@
import { MikroORM, Entity, ManyToOne, OneToOne, PrimaryKey, Property } from '@mikro-orm/sqlite';
@Entity()
-export class Rate {
+class Rate {
@PrimaryKey()
id!: number;
@@ -22,7 +22,7 @@ export class Rate {
}
@Entity()
-export class Application {
+class Application {
@PrimaryKey()
id!: number;
diff --git a/GH572.test.ts b/GH572.test.ts
index 969d8cf..e706846 100644
--- a/GH572.test.ts
+++ b/GH572.test.ts
@@ -2,7 +2,7 @@ import { Entity, Ref, MikroORM, OneToOne, PrimaryKey, Property, QueryOrder } fro
import { mockLogger } from '../helpers';
@Entity()
-export class A {
+class A {
@PrimaryKey()
id!: number;
@@ -13,7 +13,7 @@ export class A {
}
@Entity()
-export class B {
+class B {
@PrimaryKey()
id!: number;
diff --git a/GH755.test.ts b/GH755.test.ts
index bf858e4..3371cfe 100644
--- a/GH755.test.ts
+++ b/GH755.test.ts
@@ -1,6 +1,6 @@
import { EntitySchema, MikroORM } from '@mikro-orm/sqlite';
-export class Test {
+class Test {
id!: string;
createdAt!: Date;
diff --git a/GH811.test.ts b/GH811.test.ts
index 9178f7b..8cdc022 100644
--- a/GH811.test.ts
+++ b/GH811.test.ts
@@ -2,7 +2,7 @@ import { Entity, helper, MikroORM, OneToOne, PrimaryKey, Property } from '@mikro
import { v4 } from 'uuid';
@Entity()
-export class Address {
+class Address {
@PrimaryKey({ type: 'uuid' })
id = v4();
@@ -13,7 +13,7 @@ export class Address {
}
@Entity()
-export class Contact {
+class Contact {
@PrimaryKey({ type: 'uuid' })
id = v4();
@@ -27,7 +27,7 @@ export class Contact {
}
@Entity()
-export class Employee {
+class Employee {
@PrimaryKey({ type: 'uuid' })
id = v4();
diff --git a/sqlite-constraints.test.ts b/sqlite-constraints.test.ts
index 2cf8d92..f133a4c 100644
--- a/sqlite-constraints.test.ts
+++ b/sqlite-constraints.test.ts
@@ -2,9 +2,8 @@ import { Entity, type EntityManager, ManyToOne, MikroORM, PrimaryKey, Property,
import { SqliteDriver } from '@mikro-orm/sqlite';
import { BetterSqliteDriver } from '@mikro-orm/better-sqlite';
-
@Entity()
-export class Author {
+class Author {
@PrimaryKey({ type: 'string' })
id!: string;
@@ -14,9 +13,8 @@ export class Author {
}
-
@Entity()
-export class Book {
+class Book {
@PrimaryKey({ type: 'string' })
id!: string;
@@ -29,7 +27,6 @@ export class Book {
}
-
async function createEntities(em: EntityManager) {
const author = new Author();
author.id = '1';
@@ -44,7 +41,6 @@ async function createEntities(em: EntityManager) {
return author;
}
-
describe('sqlite driver', () => {
let orm: MikroORM<SqliteDriver>;
@@ -70,7 +66,6 @@ describe('sqlite driver', () => {
});
});
-
describe('better-sqlite driver', () => {
let orm: MikroORM<BetterSqliteDriver>;
@@ -95,4 +90,3 @@ describe('better-sqlite driver', () => {
}
});
});
-
|
|
chore: impl some error conversions
|
ed0f8e1d57380fe5b76248bf8dd88973898718c4
|
chore
|
https://github.com/erg-lang/erg/commit/ed0f8e1d57380fe5b76248bf8dd88973898718c4
|
impl some error conversions
|
diff --git a/mod.rs b/mod.rs
index 0629fba..754c1be 100644
--- a/mod.rs
+++ b/mod.rs
@@ -186,6 +186,12 @@ impl From<ParserRunnerError> for CompileError {
}
}
+impl From<CompileError> for ParserRunnerErrors {
+ fn from(err: CompileError) -> Self {
+ Self::new(vec![err.into()])
+ }
+}
+
impl From<CompileError> for ParserRunnerError {
fn from(err: CompileError) -> Self {
Self {
diff --git a/error.rs b/error.rs
index 5ebc324..10b9d16 100644
--- a/error.rs
+++ b/error.rs
@@ -609,6 +609,12 @@ impl ParserRunnerError {
}
}
+impl From<ParserRunnerError> for LexError {
+ fn from(err: ParserRunnerError) -> Self {
+ Self::new(err.core)
+ }
+}
+
#[derive(Debug)]
pub struct ParserRunnerErrors(Vec<ParserRunnerError>);
@@ -618,6 +624,12 @@ impl_stream!(ParserRunnerErrors, ParserRunnerError);
impl MultiErrorDisplay<ParserRunnerError> for ParserRunnerErrors {}
+impl From<ParserRunnerErrors> for LexErrors {
+ fn from(errs: ParserRunnerErrors) -> Self {
+ Self(errs.0.into_iter().map(LexError::from).collect())
+ }
+}
+
impl fmt::Display for ParserRunnerErrors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.fmt_all(f)
|
|
chore(deps): relock
|
94959b143e68b92360441c7383e1930ff986e5e5
|
chore
|
https://github.com/rohankumardubey/ibis/commit/94959b143e68b92360441c7383e1930ff986e5e5
|
relock
|
diff --git a/poetry.lock b/poetry.lock
index 4da84bd..acf1f70 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -4855,83 +4855,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte
docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
-[[package]]
-name = "pymssql"
-version = "2.2.11"
-description = "DB-API interface to Microsoft SQL Server for Python. (new Cython-based version)"
-optional = true
-python-versions = "*"
-files = [
- {file = "pymssql-2.2.11-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:692ab328ac290bd2031bc4dd6deae32665dfffda1b12aaa92928d3ebc667d5ad"},
- {file = "pymssql-2.2.11-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:723a4612421027a01b51e42e786678a18c4a27613a3ccecf331c026e0cc41353"},
- {file = "pymssql-2.2.11-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:34ab2373ca607174ad7244cfe955c07b6bc77a1e21d3c3143dbe934dec82c3a4"},
- {file = "pymssql-2.2.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bc0ba19b4426c57509f065a03748d9ac230f1543ecdac57175e6ebd213a7bc0"},
- {file = "pymssql-2.2.11-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8d9d42a50f6e8e6b356e4e8b2fa1da725344ec0be6f8a6107b7196e5bd74906"},
- {file = "pymssql-2.2.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aec64022a2419fad9f496f8e310522635e39d092970e1d55375ea0be86725174"},
- {file = "pymssql-2.2.11-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c389c8041c94d4058827faf5735df5f8e4c1c1eebdd051859536dc393925a667"},
- {file = "pymssql-2.2.11-cp310-cp310-win32.whl", hash = "sha256:6452326cecd4dcee359a6f8878b827118a8c8523cd24de5b3a971a7a172e4275"},
- {file = "pymssql-2.2.11-cp310-cp310-win_amd64.whl", hash = "sha256:c1bde266dbc91b100abd0311102a6585df09cc963599421cc12fd6b4cfa8e3d3"},
- {file = "pymssql-2.2.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6ddaf0597138179517bdbf5b5aa3caffee65987316dc906359a5d0801d0847ee"},
- {file = "pymssql-2.2.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c26af25991715431559cb5b37f243b8ff676540f504ed0317774dfc71827af1"},
- {file = "pymssql-2.2.11-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:410e8c40b7c1b421e750cf80ccf2da8d802ed815575758ac9a78c5f6cd995723"},
- {file = "pymssql-2.2.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1767239ed45e1fa91d82fc0c63305750530787cd64089cabbe183eb538a35b"},
- {file = "pymssql-2.2.11-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9a644e4158fed30ae9f3846f2f1c74d36fa1610eb552de35b7f611d063fa3c85"},
- {file = "pymssql-2.2.11-cp311-cp311-win32.whl", hash = "sha256:1956c111debe67f69a9c839b33ce420f0e8def1ef5ff9831c03d8ac840f82376"},
- {file = "pymssql-2.2.11-cp311-cp311-win_amd64.whl", hash = "sha256:0bdd1fb49b0e331e47e83f39d4af784c857e230bfc73519654bab29285c51c63"},
- {file = "pymssql-2.2.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2609bbd3b715822bb4fa6d457b2985d32ad6ab9580fdb61ae6e0eee251791d24"},
- {file = "pymssql-2.2.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c382aea9adaaee189f352d7a493e3f76c13f9337ec2b6aa40e76b114fa13ebac"},
- {file = "pymssql-2.2.11-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5928324a09de7466368c15ece1de4ab5ea968d24943ceade758836f9fc7149f5"},
- {file = "pymssql-2.2.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee8b10f797d0bfec626b803891cf9e98480ee11f2e8459a7616cdb7e4e4bf2de"},
- {file = "pymssql-2.2.11-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:1d5aa1a090b17f4ba75ffac3bb371f6c8c869692b653689396f9b470fde06981"},
- {file = "pymssql-2.2.11-cp312-cp312-win32.whl", hash = "sha256:1f7ba71cf81af65c005173f279928bf86700d295f97e4965e169b5764bc6c4f2"},
- {file = "pymssql-2.2.11-cp312-cp312-win_amd64.whl", hash = "sha256:a0ebb0e40c93f8f1e40aad80f512ae4aa89cb1ec8a96964b9afedcff1d5813fd"},
- {file = "pymssql-2.2.11-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e0ed115902956efaca9d9a20fa9b2b604e3e11d640416ca74900d215cdcbf3ab"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1a75afa17746972bb61120fb6ea907657fc1ab68250bbbd8b21a00d0720ed0f4"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2ae69d8e46637a203cfb48e05439fc9e2ff7646fa1f5396aa3577ce52810031"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13710240457ace5b8c9cca7f4971504656f5703b702895a86386e87c7103801"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7234b0f61dd9ccb2304171b5fd7ed9db133b4ea7c835c9942c9dc5bfc00c1cb"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcd76a8cc757c7cfe2d235f232a20d74ac8cebf9feabcdcbda5ef33157d14b1"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:84aff3235ad1289c4079c548cfcdf7eaaf2475b9f81557351deb42e8f45a9c2d"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b081aa7b02911e3f299f7d1f68ce8ca585a5119d44601bf4483da0aae8c2181"},
- {file = "pymssql-2.2.11-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d315f08c106c884d6b42f70c9518e765a5bc23f6d3a587346bc4e6f198768c7a"},
- {file = "pymssql-2.2.11-cp36-cp36m-win32.whl", hash = "sha256:c8b35b3d5e326729e5edb73d593103d2dbfb474bd36ee95b4e85e1f8271ba98a"},
- {file = "pymssql-2.2.11-cp36-cp36m-win_amd64.whl", hash = "sha256:139c5032e0a2765764987803f1266132fcc5da572848ccc4d29cebba794a4260"},
- {file = "pymssql-2.2.11-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:7bac28aed1d625a002e0289e0c18d1808cecbdc12e2a1a3927dbbaff66e5fff3"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4eeaacc1dbbc678f4e80c6fd6fc279468021fdf2e486adc8631ec0de6b6c0e62"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:428e32e53c554798bc2d0682a169fcb681df6b68544c4aedd1186018ea7e0447"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b621c5e32136dabc2fea25696beab0647ec336d25c04ab6d8eb8c8ee92f0e52"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658c85474ea01ca3a30de769df06f46681e882524b05c6994cd6fd985c485f27"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:070181361ab94bdaeb14b591a35d853f327bc90c660b04047d474274fbb80357"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:492e49616b58b2d6caf4a2598cb344572870171a7b65ba1ac61a5e248b6a8e1c"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:803122aec31fbd52f5d65ef3b30b3bd2dc7b2a9e3a8223d16078a25805155c45"},
- {file = "pymssql-2.2.11-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:09075e129655ab1178d2d60efb9b3fbf5cdb6da2338ecdb3a92c53a4ad7efa0c"},
- {file = "pymssql-2.2.11-cp37-cp37m-win32.whl", hash = "sha256:b4a8377527702d746c490c2ce67d17f1c351d182b49b82fae6e67ae206bf9663"},
- {file = "pymssql-2.2.11-cp37-cp37m-win_amd64.whl", hash = "sha256:167313d91606dc7a3c05b2ad60491a138b7408a8779599ab6430a48a67f133f0"},
- {file = "pymssql-2.2.11-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8d418f4dca245421242ed9df59d3bcda0cd081650df6deb1bef7f157b6a6f9dd"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f0c44169df8d23c7ce172bd90ef5deb44caf19f15990e4db266e3193071988a4"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78032e45ea33c55d430b93e55370b900479ea324fae5d5d32486cc0fdc0fedd"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:984d99ee6a2579f86c536b1b0354ad3dc9701e98a4b3953f1301b4695477cd2f"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:287c8f79a7eca0c6787405797bac0f7c502d9be151f3f823aae12042235f8426"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ea4ea296afcae34bc61e4e0ef2f503270fd4bb097b308a07a9194f1f063aa1"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a114633fa02b7eb5bc63520bf07954106c0ed0ce032449c871abb8b8c435a872"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7332db36a537cbc16640a0c3473a2e419aa5bc1f9953cada3212e7b2587de658"},
- {file = "pymssql-2.2.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cd7292d872948c1f67c8cc12158f2c8ed9873d54368139ce1f67b2262ac34029"},
- {file = "pymssql-2.2.11-cp38-cp38-win32.whl", hash = "sha256:fbca115e11685b5891755cc22b3db4348071b8d100a41e1ce93526d9c3dbf2d5"},
- {file = "pymssql-2.2.11-cp38-cp38-win_amd64.whl", hash = "sha256:452b88a4ceca7efb934b5babb365851a3c52e723642092ebc92777397c2cacdb"},
- {file = "pymssql-2.2.11-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:001242cedc73587cbb10aec4069de50febbff3c4c50f9908a215476496b3beab"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:da492482b923b9cc9ad37f0f5592c776279299db2a89c0b7fc931aaefec652d4"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:139a833e6e72a624e4f2cde803a34a616d5661dd9a5b2ae0402d9d8a597b2f1f"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57fbfad252434d64bdf4b6a935e4241616a4cf8df7af58b9772cd91fce9309a"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5308507c2c4e94ede7e5b164870c1ba2be55abab6daf795b5529e2da4e838b6"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdca43c42d5f370358535b2107140ed550d74f9ef0fc95d2d7fa8c4e40ee48c2"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:fe0cc975aac87b364fdb55cb89642435c3e859dcd99d7260f48af94111ba2673"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4551f50c8a3b6ffbd71f794ee1c0c0134134c5d6414302c2fa28b67fe4470d07"},
- {file = "pymssql-2.2.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ae9818df40588d5a49e7476f05e31cc83dea630d607178d66762ca8cf32e9f77"},
- {file = "pymssql-2.2.11-cp39-cp39-win32.whl", hash = "sha256:15257c7bd89c0283f70d6eaafd9b872201818572b8ba1e8576408ae23ef50c7c"},
- {file = "pymssql-2.2.11-cp39-cp39-win_amd64.whl", hash = "sha256:65bb674c0ba35379bf93d1b2cf06fdc5e7ec56e1d0e9de525bdcf977190b2865"},
- {file = "pymssql-2.2.11.tar.gz", hash = "sha256:15815bf1ff9edb475ec4ef567f23e23c4e828ce119ff5bf98a072b66b8d0ac1b"},
-]
-
[[package]]
name = "pymysql"
version = "1.1.0"
@@ -7416,7 +7339,7 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \\"PyPy\\
cffi = ["cffi (>=1.11)"]
[extras]
-all = ["black", "clickhouse-connect", "dask", "datafusion", "db-dtypes", "deltalake", "duckdb", "duckdb-engine", "fsspec", "geoalchemy2", "geopandas", "google-cloud-bigquery", "google-cloud-bigquery-storage", "graphviz", "impyla", "oracledb", "packaging", "polars", "psycopg2", "pydata-google-auth", "pydruid", "pymssql", "pymysql", "pyspark", "regex", "requests", "shapely", "snowflake-connector-python", "snowflake-sqlalchemy", "sqlalchemy", "sqlalchemy-exasol", "sqlalchemy-views", "trino"]
+all = ["black", "clickhouse-connect", "dask", "datafusion", "db-dtypes", "deltalake", "duckdb", "duckdb-engine", "geoalchemy2", "geopandas", "google-cloud-bigquery", "google-cloud-bigquery-storage", "graphviz", "impyla", "oracledb", "packaging", "polars", "psycopg2", "pydata-google-auth", "pydruid", "pymysql", "pyodbc", "pyspark", "regex", "shapely", "snowflake-connector-python", "snowflake-sqlalchemy", "sqlalchemy", "sqlalchemy-exasol", "sqlalchemy-views", "trino"]
bigquery = ["db-dtypes", "google-cloud-bigquery", "google-cloud-bigquery-storage", "pydata-google-auth"]
clickhouse = ["clickhouse-connect", "sqlalchemy"]
dask = ["dask", "regex"]
@@ -7428,8 +7351,8 @@ duckdb = ["duckdb", "duckdb-engine", "sqlalchemy", "sqlalchemy-views"]
exasol = ["sqlalchemy", "sqlalchemy-exasol", "sqlalchemy-views"]
flink = []
geospatial = ["geoalchemy2", "geopandas", "shapely"]
-impala = ["fsspec", "impyla", "requests", "sqlalchemy"]
-mssql = ["pymssql", "sqlalchemy", "sqlalchemy-views"]
+impala = ["impyla", "sqlalchemy"]
+mssql = ["pyodbc", "sqlalchemy", "sqlalchemy-views"]
mysql = ["pymysql", "sqlalchemy", "sqlalchemy-views"]
oracle = ["oracledb", "packaging", "sqlalchemy", "sqlalchemy-views"]
pandas = ["regex"]
@@ -7444,4 +7367,4 @@ visualization = ["graphviz"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
-content-hash = "e33849b55adc9ca33aa5b98b94dbeca72c6cef7e7150890fe3c3adba206b3892"
+content-hash = "7cdedb3e9657196bfe4485e8cdb35998c826cca681595b38f39e1ba253c2886c"
|
|
refactor: make EntityProperty interface generic (use keyof T on name)
|
8c9ee4d0c15200b4f2a2a93abc6885caf3e6f419
|
refactor
|
https://github.com/mikro-orm/mikro-orm/commit/8c9ee4d0c15200b4f2a2a93abc6885caf3e6f419
|
make EntityProperty interface generic (use keyof T on name)
|
diff --git a/Entity.ts b/Entity.ts
index 7d7e4d8..6bad43b 100644
--- a/Entity.ts
+++ b/Entity.ts
@@ -53,10 +53,10 @@ export type EntityName<T extends IEntityType<T>> = string | EntityClass<T>;
export type EntityData<T extends IEntityType<T>> = { [P in keyof T]?: T[P] | IPrimaryKey; } & Record<string, any>;
-export interface EntityProperty {
- name: string;
+export interface EntityProperty<T extends IEntityType<T> = any> {
+ name: string & keyof T;
fk: string;
- entity: () => EntityName<IEntity>;
+ entity: () => EntityName<T>;
type: string;
primary: boolean;
length?: any;
@@ -84,7 +84,7 @@ export interface EntityMetadata<T extends IEntityType<T> = any> {
path: string;
primaryKey: keyof T & string;
serializedPrimaryKey: keyof T & string;
- properties: { [K in keyof T & string]: EntityProperty };
+ properties: { [K in keyof T & string]: EntityProperty<T> };
customRepository: () => { new (em: EntityManager, entityName: EntityName<T>): EntityRepository<T> };
hooks: Record<string, string[]>;
prototype: EntityClass<T> & IEntity;
|
|
fix: infinite recursion bugs
|
c31e93052b3d3390d53340590e78b24e786a4efb
|
fix
|
https://github.com/erg-lang/erg/commit/c31e93052b3d3390d53340590e78b24e786a4efb
|
infinite recursion bugs
|
diff --git a/eval.rs b/eval.rs
index 8cfb16b..a687055 100644
--- a/eval.rs
+++ b/eval.rs
@@ -1337,6 +1337,8 @@ impl Context {
}
TyParam::FreeVar(fv) if fv.is_linked() => self.convert_tp_into_type(fv.crack().clone()),
TyParam::Type(t) => Ok(t.as_ref().clone()),
+ TyParam::Mono(name) => Ok(Type::Mono(name)),
+ // TyParam::Erased(_t) => Ok(Type::Obj),
TyParam::Value(v) => self.convert_value_into_type(v).map_err(TyParam::Value),
// TODO: Dict, Set
other => Err(other),
@@ -1672,7 +1674,7 @@ impl Context {
line!() as usize,
().loc(),
self.caused_by(),
- &tp.qual_name().unwrap_or("_".into()),
+ &tp.to_string(),
)
})?;
if qt.is_generalized() {
diff --git a/inquire.rs b/inquire.rs
index d853dd4..96bbce8 100644
--- a/inquire.rs
+++ b/inquire.rs
@@ -4,14 +4,15 @@ use std::path::{Path, PathBuf};
use erg_common::config::Input;
use erg_common::consts::{ERG_MODE, PYTHON_MODE};
-use erg_common::dict;
use erg_common::error::{ErrorCore, Location, SubMessage};
use erg_common::levenshtein;
use erg_common::set::Set;
use erg_common::traits::{Locational, NoTypeDisplay, Stream};
use erg_common::triple::Triple;
use erg_common::Str;
-use erg_common::{fmt_option, fmt_slice, log, option_enum_unwrap, set, switch_lang};
+use erg_common::{
+ dict, fmt_option, fmt_slice, get_hash, log, option_enum_unwrap, set, switch_lang,
+};
use erg_parser::ast::{self, Identifier, VarName};
use erg_parser::token::Token;
@@ -1024,20 +1025,23 @@ impl Context {
let coerced = self
.coerce(obj.t(), &())
.map_err(|mut errs| errs.remove(0))?;
- if &coerced == obj.ref_t() {
- Err(TyCheckError::no_attr_error(
- self.cfg.input.clone(),
- line!() as usize,
- attr_name.loc(),
- namespace.name.to_string(),
- obj.ref_t(),
- attr_name.inspect(),
- self.get_similar_attr(obj.ref_t(), attr_name.inspect()),
- ))
- } else {
+ if &coerced != obj.ref_t() {
+ let hash = get_hash(obj.ref_t());
obj.ref_t().coerce();
- self.search_method_info(obj, attr_name, pos_args, kw_args, input, namespace)
+ if get_hash(obj.ref_t()) != hash {
+ return self
+ .search_method_info(obj, attr_name, pos_args, kw_args, input, namespace);
+ }
}
+ Err(TyCheckError::no_attr_error(
+ self.cfg.input.clone(),
+ line!() as usize,
+ attr_name.loc(),
+ namespace.name.to_string(),
+ obj.ref_t(),
+ attr_name.inspect(),
+ self.get_similar_attr(obj.ref_t(), attr_name.inspect()),
+ ))
}
fn validate_visibility(
@@ -1263,12 +1267,13 @@ impl Context {
return Err(self.not_callable_error(obj, attr_name, instance, None));
}
if sub != Never {
+ let hash = get_hash(instance);
instance.coerce();
if instance.is_quantified_subr() {
let instance = self.instantiate(instance.clone(), obj)?;
self.substitute_call(obj, attr_name, &instance, pos_args, kw_args)?;
return Ok(SubstituteResult::Coerced(instance));
- } else {
+ } else if get_hash(instance) != hash {
return self
.substitute_call(obj, attr_name, instance, pos_args, kw_args);
}
|
|
fix: Parse refs from bytes, not from String.
The latter can cause issues around illformed UTF-8 which wouldn't
bother git either.
This comes at the expense of not parsing line by line anymore, but
instead reading as fast as possible and parsing afterwards.
Performance wise I think it doesn't matter, but it will cause
more memory to be used. If this ever becomes a problem,
for example during pushes where we are stuck with V1, we can consider
implementing our own streaming appreach that works with packet lines
instead - they are just not exposed here even though they could.
|
806b8c2ef392137f3a6ebd0f28da2a3a07a9f3eb
|
fix
|
https://github.com/Byron/gitoxide/commit/806b8c2ef392137f3a6ebd0f28da2a3a07a9f3eb
|
Parse refs from bytes, not from String.
The latter can cause issues around illformed UTF-8 which wouldn't
bother git either.
This comes at the expense of not parsing line by line anymore, but
instead reading as fast as possible and parsing afterwards.
Performance wise I think it doesn't matter, but it will cause
more memory to be used. If this ever becomes a problem,
for example during pushes where we are stuck with V1, we can consider
implementing our own streaming appreach that works with packet lines
instead - they are just not exposed here even though they could.
|
diff --git a/Cargo.toml b/Cargo.toml
index 808b693..fb9b828 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -46,7 +46,7 @@ git-credentials = { version = "^0.7.0", path = "../git-credentials" }
thiserror = "1.0.32"
serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"]}
-bstr = { version = "1.0.1", default-features = false, features = ["std"] }
+bstr = { version = "1.0.1", default-features = false, features = ["std", "unicode"] }
nom = { version = "7", default-features = false, features = ["std"]}
btoi = "0.4.2"
diff --git a/tests.rs b/tests.rs
index f5ad7cd..1776007 100644
--- a/tests.rs
+++ b/tests.rs
@@ -15,6 +15,8 @@ unborn refs/heads/symbolic symref-target:refs/heads/target
"
.as_bytes();
+ #[cfg(feature = "blocking-client")]
+ let input = &mut Fixture(input);
let out = refs::from_v2_refs(input).await.expect("no failure on valid input");
assert_eq!(
@@ -56,6 +58,7 @@ unborn refs/heads/symbolic symref-target:refs/heads/target
#[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
async fn extract_references_from_v1_refs() {
+ #[cfg_attr(feature = "blocking-client", allow(unused_mut))]
let input = &mut "73a6868963993a3328e7d8fe94e5a6ac5078a944 HEAD
21c9b7500cb144b3169a6537961ec2b9e865be81 MISSING_NAMESPACE_TARGET
73a6868963993a3328e7d8fe94e5a6ac5078a944 refs/heads/main
@@ -63,6 +66,8 @@ async fn extract_references_from_v1_refs() {
dce0ea858eef7ff61ad345cc5cdac62203fb3c10 refs/tags/git-commitgraph-v0.0.0
21c9b7500cb144b3169a6537961ec2b9e865be81 refs/tags/git-commitgraph-v0.0.0^{}"
.as_bytes();
+ #[cfg(feature = "blocking-client")]
+ let input = &mut Fixture(input);
let out = refs::from_v1_refs_received_as_part_of_handshake_and_capabilities(
input,
Capabilities::from_bytes(b"\\0symref=HEAD:refs/heads/main symref=MISSING_NAMESPACE_TARGET:(null)")
@@ -106,7 +111,7 @@ fn extract_symbolic_references_from_capabilities() -> Result<(), client::Error>
let caps = client::Capabilities::from_bytes(
b"\\0unrelated symref=HEAD:refs/heads/main symref=ANOTHER:refs/heads/foo symref=MISSING_NAMESPACE_TARGET:(null) agent=git/2.28.0",
)?
- .0;
+ .0;
let out = refs::shared::from_capabilities(caps.iter()).expect("a working example");
assert_eq!(
@@ -128,3 +133,38 @@ fn extract_symbolic_references_from_capabilities() -> Result<(), client::Error>
);
Ok(())
}
+
+#[cfg(feature = "blocking-client")]
+struct Fixture<'a>(&'a [u8]);
+
+#[cfg(feature = "blocking-client")]
+impl<'a> std::io::Read for Fixture<'a> {
+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+ self.0.read(buf)
+ }
+}
+
+#[cfg(feature = "blocking-client")]
+impl<'a> std::io::BufRead for Fixture<'a> {
+ fn fill_buf(&mut self) -> std::io::Result<&[u8]> {
+ self.0.fill_buf()
+ }
+
+ fn consume(&mut self, amt: usize) {
+ self.0.consume(amt)
+ }
+}
+
+#[cfg(feature = "blocking-client")]
+impl<'a> git_transport::client::ReadlineBufRead for Fixture<'a> {
+ fn readline(
+ &mut self,
+ ) -> Option<std::io::Result<Result<git_packetline::PacketLineRef<'_>, git_packetline::decode::Error>>> {
+ use bstr::{BStr, ByteSlice};
+ let bytes: &BStr = self.0.into();
+ let mut lines = bytes.lines();
+ let res = lines.next()?;
+ self.0 = lines.as_bytes();
+ Some(Ok(Ok(git_packetline::PacketLineRef::Data(res))))
+ }
+}
diff --git a/arguments.rs b/arguments.rs
index f79f4a6..1e560a6 100644
--- a/arguments.rs
+++ b/arguments.rs
@@ -1,333 +0,0 @@
-use bstr::ByteSlice;
-use git_transport::Protocol;
-
-use crate::fetch;
-
-fn arguments_v1(features: impl IntoIterator<Item = &'static str>) -> fetch::Arguments {
- fetch::Arguments::new(Protocol::V1, features.into_iter().map(|n| (n, None)).collect())
-}
-
-fn arguments_v2(features: impl IntoIterator<Item = &'static str>) -> fetch::Arguments {
- fetch::Arguments::new(Protocol::V2, features.into_iter().map(|n| (n, None)).collect())
-}
-
-struct Transport<T> {
- inner: T,
- stateful: bool,
-}
-
-#[cfg(feature = "blocking-client")]
-mod impls {
- use std::borrow::Cow;
-
- use bstr::BStr;
- use git_transport::{
- client,
- client::{Error, MessageKind, RequestWriter, SetServiceResponse, WriteMode},
- Protocol, Service,
- };
-
- use crate::fetch::tests::arguments::Transport;
-
- impl<T: client::TransportWithoutIO> client::TransportWithoutIO for Transport<T> {
- fn set_identity(&mut self, identity: client::Account) -> Result<(), Error> {
- self.inner.set_identity(identity)
- }
-
- fn request(&mut self, write_mode: WriteMode, on_into_read: MessageKind) -> Result<RequestWriter<'_>, Error> {
- self.inner.request(write_mode, on_into_read)
- }
-
- fn to_url(&self) -> Cow<'_, BStr> {
- self.inner.to_url()
- }
-
- fn supported_protocol_versions(&self) -> &[Protocol] {
- self.inner.supported_protocol_versions()
- }
-
- fn connection_persists_across_multiple_requests(&self) -> bool {
- self.stateful
- }
-
- fn configure(
- &mut self,
- config: &dyn std::any::Any,
- ) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
- self.inner.configure(config)
- }
- }
-
- impl<T: client::Transport> client::Transport for Transport<T> {
- fn handshake<'a>(
- &mut self,
- service: Service,
- extra_parameters: &'a [(&'a str, Option<&'a str>)],
- ) -> Result<SetServiceResponse<'_>, Error> {
- self.inner.handshake(service, extra_parameters)
- }
- }
-}
-
-#[cfg(feature = "async-client")]
-mod impls {
- use std::borrow::Cow;
-
- use async_trait::async_trait;
- use bstr::BStr;
- use git_transport::{
- client,
- client::{Error, MessageKind, RequestWriter, SetServiceResponse, WriteMode},
- Protocol, Service,
- };
-
- use crate::fetch::tests::arguments::Transport;
- impl<T: client::TransportWithoutIO + Send> client::TransportWithoutIO for Transport<T> {
- fn set_identity(&mut self, identity: client::Account) -> Result<(), Error> {
- self.inner.set_identity(identity)
- }
-
- fn request(&mut self, write_mode: WriteMode, on_into_read: MessageKind) -> Result<RequestWriter<'_>, Error> {
- self.inner.request(write_mode, on_into_read)
- }
-
- fn to_url(&self) -> Cow<'_, BStr> {
- self.inner.to_url()
- }
-
- fn supported_protocol_versions(&self) -> &[Protocol] {
- self.inner.supported_protocol_versions()
- }
-
- fn connection_persists_across_multiple_requests(&self) -> bool {
- self.stateful
- }
-
- fn configure(
- &mut self,
- config: &dyn std::any::Any,
- ) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
- self.inner.configure(config)
- }
- }
-
- #[async_trait(?Send)]
- impl<T: client::Transport + Send> client::Transport for Transport<T> {
- async fn handshake<'a>(
- &mut self,
- service: Service,
- extra_parameters: &'a [(&'a str, Option<&'a str>)],
- ) -> Result<SetServiceResponse<'_>, Error> {
- self.inner.handshake(service, extra_parameters).await
- }
- }
-}
-
-fn transport(
- out: &mut Vec<u8>,
- stateful: bool,
-) -> Transport<git_transport::client::git::Connection<&'static [u8], &mut Vec<u8>>> {
- Transport {
- inner: git_transport::client::git::Connection::new(
- &[],
- out,
- Protocol::V1, // does not matter
- b"does/not/matter".as_bstr().to_owned(),
- None::<(&str, _)>,
- git_transport::client::git::ConnectMode::Process, // avoid header to be sent
- ),
- stateful,
- }
-}
-
-fn id(hex: &str) -> git_hash::ObjectId {
- git_hash::ObjectId::from_hex(hex.as_bytes()).expect("expect valid hex id")
-}
-
-mod v1 {
- use bstr::ByteSlice;
-
- use crate::fetch::tests::arguments::{arguments_v1, id, transport};
-
- #[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
- async fn haves_and_wants_for_clone() {
- let mut out = Vec::new();
- let mut t = transport(&mut out, true);
- let mut arguments = arguments_v1(["feature-a", "feature-b"].iter().cloned());
-
- arguments.want(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c907"));
- arguments.want(id("ff333369de1221f9bfbbe03a3a13e9a09bc1ffff"));
- arguments.send(&mut t, true).await.expect("sending to buffer to work");
- assert_eq!(
- out.as_bstr(),
- b"0046want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a feature-b
-0032want ff333369de1221f9bfbbe03a3a13e9a09bc1ffff
-00000009done
-"
- .as_bstr()
- );
- }
-
- #[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
- async fn haves_and_wants_for_fetch_stateless() {
- let mut out = Vec::new();
- let mut t = transport(&mut out, false);
- let mut arguments = arguments_v1(["feature-a", "shallow", "deepen-since", "deepen-not"].iter().copied());
-
- arguments.deepen(1);
- arguments.shallow(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff"));
- arguments.want(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c907"));
- arguments.deepen_since(12345);
- arguments.deepen_not("refs/heads/main".into());
- arguments.have(id("0000000000000000000000000000000000000000"));
- arguments.send(&mut t, false).await.expect("sending to buffer to work");
-
- arguments.have(id("1111111111111111111111111111111111111111"));
- arguments.send(&mut t, true).await.expect("sending to buffer to work");
- assert_eq!(
- out.as_bstr(),
- b"005cwant 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a shallow deepen-since deepen-not
-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff
-000ddeepen 1
-0017deepen-since 12345
-001fdeepen-not refs/heads/main
-00000032have 0000000000000000000000000000000000000000
-0000005cwant 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a shallow deepen-since deepen-not
-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff
-000ddeepen 1
-0017deepen-since 12345
-001fdeepen-not refs/heads/main
-00000032have 1111111111111111111111111111111111111111
-0009done
-"
- .as_bstr()
- );
- }
-
- #[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
- async fn haves_and_wants_for_fetch_stateful() {
- let mut out = Vec::new();
- let mut t = transport(&mut out, true);
- let mut arguments = arguments_v1(["feature-a", "shallow"].iter().copied());
-
- arguments.deepen(1);
- arguments.want(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c907"));
- arguments.have(id("0000000000000000000000000000000000000000"));
- arguments.send(&mut t, false).await.expect("sending to buffer to work");
-
- arguments.have(id("1111111111111111111111111111111111111111"));
- arguments.send(&mut t, true).await.expect("sending to buffer to work");
- assert_eq!(
- out.as_bstr(),
- b"0044want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a shallow
-000ddeepen 1
-00000032have 0000000000000000000000000000000000000000
-00000032have 1111111111111111111111111111111111111111
-0009done
-"
- .as_bstr()
- );
- }
-}
-
-mod v2 {
- use bstr::ByteSlice;
-
- use crate::fetch::tests::arguments::{arguments_v2, id, transport};
-
- #[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
- async fn haves_and_wants_for_clone_stateful() {
- let mut out = Vec::new();
- let mut t = transport(&mut out, true);
- let mut arguments = arguments_v2(["feature-a", "shallow"].iter().copied());
-
- arguments.deepen(1);
- arguments.deepen_relative();
- arguments.want(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c907"));
- arguments.want(id("ff333369de1221f9bfbbe03a3a13e9a09bc1ffff"));
- arguments.send(&mut t, true).await.expect("sending to buffer to work");
- assert_eq!(
- out.as_bstr(),
- b"0012command=fetch
-0001000ethin-pack
-0010include-tag
-000eofs-delta
-000ddeepen 1
-0014deepen-relative
-0032want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907
-0032want ff333369de1221f9bfbbe03a3a13e9a09bc1ffff
-0009done
-0000"
- .as_bstr(),
- "we filter features/capabilities without value as these apparently shouldn't be listed (remote dies otherwise)"
- );
- }
-
- #[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
- async fn haves_and_wants_for_fetch_stateless_and_stateful() {
- for is_stateful in &[false, true] {
- let mut out = Vec::new();
- let mut t = transport(&mut out, *is_stateful);
- let mut arguments = arguments_v2(Some("shallow"));
-
- arguments.deepen(1);
- arguments.deepen_since(12345);
- arguments.shallow(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff"));
- arguments.want(id("7b333369de1221f9bfbbe03a3a13e9a09bc1c907"));
- arguments.deepen_not("refs/heads/main".into());
- arguments.have(id("0000000000000000000000000000000000000000"));
- arguments.send(&mut t, false).await.expect("sending to buffer to work");
-
- arguments.have(id("1111111111111111111111111111111111111111"));
- arguments.send(&mut t, true).await.expect("sending to buffer to work");
- assert_eq!(
- out.as_bstr(),
- b"0012command=fetch
-0001000ethin-pack
-0010include-tag
-000eofs-delta
-000ddeepen 1
-0017deepen-since 12345
-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff
-0032want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907
-001fdeepen-not refs/heads/main
-0032have 0000000000000000000000000000000000000000
-00000012command=fetch
-0001000ethin-pack
-0010include-tag
-000eofs-delta
-000ddeepen 1
-0017deepen-since 12345
-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff
-0032want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907
-001fdeepen-not refs/heads/main
-0032have 1111111111111111111111111111111111111111
-0009done
-0000"
- .as_bstr(),
- "V2 is stateless by default, so it repeats all but 'haves' in each request"
- );
- }
- }
-
- #[maybe_async::test(feature = "blocking-client", async(feature = "async-client", async_std::test))]
- async fn ref_in_want() {
- let mut out = Vec::new();
- let mut t = transport(&mut out, false);
- let mut arguments = arguments_v2(["ref-in-want"].iter().copied());
-
- arguments.want_ref(b"refs/heads/main".as_bstr());
- arguments.send(&mut t, true).await.expect("sending to buffer to work");
- assert_eq!(
- out.as_bstr(),
- b"0012command=fetch
-0001000ethin-pack
-0010include-tag
-000eofs-delta
-001dwant-ref refs/heads/main
-0009done
-0000"
- .as_bstr()
- )
- }
-}
diff --git a/mod.rs b/mod.rs
index ccad119..f7dab7d 100644
--- a/mod.rs
+++ b/mod.rs
@@ -13,17 +13,19 @@ pub mod parse {
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
+ DecodePacketline(#[from] git_transport::packetline::decode::Error),
+ #[error(transparent)]
Id(#[from] git_hash::decode::Error),
#[error("{symref:?} could not be parsed. A symref is expected to look like <NAME>:<target>.")]
MalformedSymref { symref: BString },
#[error("{0:?} could not be parsed. A V1 ref line should be '<hex-hash> <path>'.")]
- MalformedV1RefLine(String),
+ MalformedV1RefLine(BString),
#[error(
"{0:?} could not be parsed. A V2 ref line should be '<hex-hash> <path>[ (peeled|symref-target):<value>'."
)]
- MalformedV2RefLine(String),
+ MalformedV2RefLine(BString),
#[error("The ref attribute {attribute:?} is unknown. Found in line {line:?}")]
- UnkownAttribute { attribute: String, line: String },
+ UnkownAttribute { attribute: BString, line: BString },
#[error("{message}")]
InvariantViolation { message: &'static str },
}
@@ -65,3 +67,6 @@ pub use async_io::{from_v1_refs_received_as_part_of_handshake_and_capabilities,
mod blocking_io;
#[cfg(feature = "blocking-client")]
pub use blocking_io::{from_v1_refs_received_as_part_of_handshake_and_capabilities, from_v2_refs};
+
+#[cfg(test)]
+mod tests;
diff --git a/async_io.rs b/async_io.rs
index d3ba694..8b35c91 100644
--- a/async_io.rs
+++ b/async_io.rs
@@ -1,19 +1,17 @@
use futures_io::AsyncBufRead;
-use futures_lite::AsyncBufReadExt;
+use futures_lite::AsyncReadExt;
use crate::handshake::{refs, refs::parse::Error, Ref};
+use bstr::ByteSlice;
/// Parse refs from the given input line by line. Protocol V2 is required for this to succeed.
pub async fn from_v2_refs(in_refs: &mut (dyn AsyncBufRead + Unpin)) -> Result<Vec<Ref>, Error> {
let mut out_refs = Vec::new();
- let mut line = String::new();
- loop {
- line.clear();
- let bytes_read = in_refs.read_line(&mut line).await?;
- if bytes_read == 0 {
- break;
- }
- out_refs.push(refs::shared::parse_v2(&line)?);
+ let mut buf = Vec::new();
+
+ in_refs.read_to_end(&mut buf).await?;
+ for line in ByteSlice::lines(buf.as_slice()) {
+ out_refs.push(refs::shared::parse_v2(line.into())?);
}
Ok(out_refs)
}
@@ -32,14 +30,11 @@ pub async fn from_v1_refs_received_as_part_of_handshake_and_capabilities<'a>(
) -> Result<Vec<Ref>, refs::parse::Error> {
let mut out_refs = refs::shared::from_capabilities(capabilities)?;
let number_of_possible_symbolic_refs_for_lookup = out_refs.len();
- let mut line = String::new();
- loop {
- line.clear();
- let bytes_read = in_refs.read_line(&mut line).await?;
- if bytes_read == 0 {
- break;
- }
- refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, &line)?;
+
+ let mut buf = Vec::new();
+ in_refs.read_to_end(&mut buf).await?;
+ for line in buf.as_slice().lines() {
+ refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, line.into())?;
}
Ok(out_refs.into_iter().map(Into::into).collect())
}
diff --git a/blocking_io.rs b/blocking_io.rs
index 69e4acc..dc11b9e 100644
--- a/blocking_io.rs
+++ b/blocking_io.rs
@@ -1,18 +1,10 @@
-use std::io;
-
use crate::handshake::{refs, refs::parse::Error, Ref};
/// Parse refs from the given input line by line. Protocol V2 is required for this to succeed.
-pub fn from_v2_refs(in_refs: &mut dyn io::BufRead) -> Result<Vec<Ref>, Error> {
+pub fn from_v2_refs(in_refs: &mut dyn git_transport::client::ReadlineBufRead) -> Result<Vec<Ref>, Error> {
let mut out_refs = Vec::new();
- let mut line = String::new();
- loop {
- line.clear();
- let bytes_read = in_refs.read_line(&mut line)?;
- if bytes_read == 0 {
- break;
- }
- out_refs.push(refs::shared::parse_v2(&line)?);
+ while let Some(line) = in_refs.readline().transpose()?.transpose()?.and_then(|l| l.as_bstr()) {
+ out_refs.push(refs::shared::parse_v2(line)?);
}
Ok(out_refs)
}
@@ -26,19 +18,14 @@ pub fn from_v2_refs(in_refs: &mut dyn io::BufRead) -> Result<Vec<Ref>, Error> {
/// Symbolic refs are shoe-horned into server capabilities whereas refs (without symbolic ones) are sent automatically as
/// part of the handshake. Both symbolic and peeled refs need to be combined to fit into the [`Ref`] type provided here.
pub fn from_v1_refs_received_as_part_of_handshake_and_capabilities<'a>(
- in_refs: &mut dyn io::BufRead,
+ in_refs: &mut dyn git_transport::client::ReadlineBufRead,
capabilities: impl Iterator<Item = git_transport::client::capabilities::Capability<'a>>,
) -> Result<Vec<Ref>, Error> {
let mut out_refs = refs::shared::from_capabilities(capabilities)?;
let number_of_possible_symbolic_refs_for_lookup = out_refs.len();
- let mut line = String::new();
- loop {
- line.clear();
- let bytes_read = in_refs.read_line(&mut line)?;
- if bytes_read == 0 {
- break;
- }
- refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, &line)?;
+
+ while let Some(line) = in_refs.readline().transpose()?.transpose()?.and_then(|l| l.as_bstr()) {
+ refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, line)?;
}
Ok(out_refs.into_iter().map(Into::into).collect())
}
diff --git a/shared.rs b/shared.rs
index 14f6119..4d24d11 100644
--- a/shared.rs
+++ b/shared.rs
@@ -1,4 +1,4 @@
-use bstr::{BString, ByteSlice};
+use bstr::{BStr, BString, ByteSlice};
use crate::handshake::{refs::parse::Error, Ref};
@@ -70,7 +70,7 @@ impl InternalRef {
_ => None,
}
}
- fn lookup_symbol_has_path(&self, predicate_path: &str) -> bool {
+ fn lookup_symbol_has_path(&self, predicate_path: &BStr) -> bool {
matches!(self, InternalRef::SymbolicForLookup { path, .. } if path == predicate_path)
}
}
@@ -109,19 +109,19 @@ pub(crate) fn from_capabilities<'a>(
pub(in crate::handshake::refs) fn parse_v1(
num_initial_out_refs: usize,
out_refs: &mut Vec<InternalRef>,
- line: &str,
+ line: &BStr,
) -> Result<(), Error> {
let trimmed = line.trim_end();
let (hex_hash, path) = trimmed.split_at(
trimmed
- .find(' ')
- .ok_or_else(|| Error::MalformedV1RefLine(trimmed.to_owned()))?,
+ .find(b" ")
+ .ok_or_else(|| Error::MalformedV1RefLine(trimmed.to_owned().into()))?,
);
let path = &path[1..];
if path.is_empty() {
- return Err(Error::MalformedV1RefLine(trimmed.to_owned()));
+ return Err(Error::MalformedV1RefLine(trimmed.to_owned().into()));
}
- match path.strip_suffix("^{}") {
+ match path.strip_suffix(b"^{}") {
Some(stripped) => {
let (previous_path, tag) =
out_refs
@@ -146,7 +146,7 @@ pub(in crate::handshake::refs) fn parse_v1(
match out_refs
.iter()
.take(num_initial_out_refs)
- .position(|r| r.lookup_symbol_has_path(path))
+ .position(|r| r.lookup_symbol_has_path(path.into()))
{
Some(position) => match out_refs.swap_remove(position) {
InternalRef::SymbolicForLookup { path: _, target } => out_refs.push(InternalRef::Symbolic {
@@ -166,36 +166,36 @@ pub(in crate::handshake::refs) fn parse_v1(
Ok(())
}
-pub(in crate::handshake::refs) fn parse_v2(line: &str) -> Result<Ref, Error> {
+pub(in crate::handshake::refs) fn parse_v2(line: &BStr) -> Result<Ref, Error> {
let trimmed = line.trim_end();
- let mut tokens = trimmed.splitn(3, ' ');
+ let mut tokens = trimmed.splitn(3, |b| *b == b' ');
match (tokens.next(), tokens.next()) {
(Some(hex_hash), Some(path)) => {
- let id = if hex_hash == "unborn" {
+ let id = if hex_hash == b"unborn" {
None
} else {
Some(git_hash::ObjectId::from_hex(hex_hash.as_bytes())?)
};
if path.is_empty() {
- return Err(Error::MalformedV2RefLine(trimmed.to_owned()));
+ return Err(Error::MalformedV2RefLine(trimmed.to_owned().into()));
}
Ok(if let Some(attribute) = tokens.next() {
- let mut tokens = attribute.splitn(2, ':');
+ let mut tokens = attribute.splitn(2, |b| *b == b':');
match (tokens.next(), tokens.next()) {
(Some(attribute), Some(value)) => {
if value.is_empty() {
- return Err(Error::MalformedV2RefLine(trimmed.to_owned()));
+ return Err(Error::MalformedV2RefLine(trimmed.to_owned().into()));
}
match attribute {
- "peeled" => Ref::Peeled {
+ b"peeled" => Ref::Peeled {
full_ref_name: path.into(),
object: git_hash::ObjectId::from_hex(value.as_bytes())?,
tag: id.ok_or(Error::InvariantViolation {
message: "got 'unborn' as tag target",
})?,
},
- "symref-target" => match value {
- "(null)" => Ref::Direct {
+ b"symref-target" => match value {
+ b"(null)" => Ref::Direct {
full_ref_name: path.into(),
object: id.ok_or(Error::InvariantViolation {
message: "got 'unborn' while (null) was a symref target",
@@ -215,13 +215,13 @@ pub(in crate::handshake::refs) fn parse_v2(line: &str) -> Result<Ref, Error> {
},
_ => {
return Err(Error::UnkownAttribute {
- attribute: attribute.to_owned(),
- line: trimmed.to_owned(),
+ attribute: attribute.to_owned().into(),
+ line: trimmed.to_owned().into(),
})
}
}
}
- _ => return Err(Error::MalformedV2RefLine(trimmed.to_owned())),
+ _ => return Err(Error::MalformedV2RefLine(trimmed.to_owned().into())),
}
} else {
Ref::Direct {
@@ -232,6 +232,6 @@ pub(in crate::handshake::refs) fn parse_v2(line: &str) -> Result<Ref, Error> {
}
})
}
- _ => Err(Error::MalformedV2RefLine(trimmed.to_owned())),
+ _ => Err(Error::MalformedV2RefLine(trimmed.to_owned().into())),
}
}
|
|
test: add "describeMethods" scope
|
761adace7c9680c7e16a0f69096cb3b4f66d7410
|
test
|
https://github.com/pmndrs/react-spring/commit/761adace7c9680c7e16a0f69096cb3b4f66d7410
|
add "describeMethods" scope
|
diff --git a/SpringValue.test.ts b/SpringValue.test.ts
index 940c43d..9ee539d 100644
--- a/SpringValue.test.ts
+++ b/SpringValue.test.ts
@@ -15,41 +15,7 @@ describe('SpringValue', () => {
})
describeProps()
-
- describe('"set" method', () => {
- it('stops the active animation', async () => {
- const spring = new SpringValue(0)
- const promise = spring.start(1)
-
- await advanceUntilValue(spring, 0.5)
- spring.set(2)
-
- expect(spring.idle).toBeTruthy()
- expect(await promise).toMatchObject({
- finished: false,
- value: 2,
- })
- })
-
- describe('when a new value is passed', () => {
- it('calls the "onChange" prop', () => {
- const onChange = jest.fn()
- const spring = new SpringValue(0, { onChange })
- spring.set(1)
- expect(onChange).toBeCalledWith(1, spring)
- })
- it.todo('wraps the "onChange" call with "batchedUpdates"')
- })
-
- describe('when the current value is passed', () => {
- it('skips the "onChange" call', () => {
- const onChange = jest.fn()
- const spring = new SpringValue(0, { onChange })
- spring.set(0)
- expect(onChange).not.toBeCalled()
- })
- })
- })
+ describeMethods()
describeTarget('another SpringValue', from => {
const node = new SpringValue(from)
@@ -128,6 +94,43 @@ function describeConfigProp() {
})
}
+function describeMethods() {
+ describe('"set" method', () => {
+ it('stops the active animation', async () => {
+ const spring = new SpringValue(0)
+ const promise = spring.start(1)
+
+ await advanceUntilValue(spring, 0.5)
+ spring.set(2)
+
+ expect(spring.idle).toBeTruthy()
+ expect(await promise).toMatchObject({
+ finished: false,
+ value: 2,
+ })
+ })
+
+ describe('when a new value is passed', () => {
+ it('calls the "onChange" prop', () => {
+ const onChange = jest.fn()
+ const spring = new SpringValue(0, { onChange })
+ spring.set(1)
+ expect(onChange).toBeCalledWith(1, spring)
+ })
+ it.todo('wraps the "onChange" call with "batchedUpdates"')
+ })
+
+ describe('when the current value is passed', () => {
+ it('skips the "onChange" call', () => {
+ const onChange = jest.fn()
+ const spring = new SpringValue(0, { onChange })
+ spring.set(0)
+ expect(onChange).not.toBeCalled()
+ })
+ })
+ })
+}
+
/** The minimum requirements for testing a dynamic target */
type OpaqueTarget = {
node: FrameValue
|
|
test(benchmarks): add `to_pyarrow` benchmark for duckdb
|
a80cac77f749a03d04c5f37edc152ce15ea0c43e
|
test
|
https://github.com/rohankumardubey/ibis/commit/a80cac77f749a03d04c5f37edc152ce15ea0c43e
|
add `to_pyarrow` benchmark for duckdb
|
diff --git a/test_benchmarks.py b/test_benchmarks.py
index 2534322..4fc1c9f 100644
--- a/test_benchmarks.py
+++ b/test_benchmarks.py
@@ -753,3 +753,59 @@ def test_parse_many_duckdb_types(benchmark):
types = ["VARCHAR", "INTEGER", "DOUBLE", "BIGINT"] * 1000
benchmark(parse_many, types)
+
+
[email protected](scope="session")
+def sql() -> str:
+ return """
+ SELECT t1.id as t1_id, x, t2.id as t2_id, y
+ FROM t1 INNER JOIN t2
+ ON t1.id = t2.id
+ """
+
+
[email protected](scope="session")
+def ddb(tmp_path_factory):
+ duckdb = pytest.importorskip("duckdb")
+
+ N = 20_000_000
+
+ con = duckdb.connect()
+
+ path = str(tmp_path_factory.mktemp("duckdb") / "data.ddb")
+ sql = (
+ lambda var, table, n=N: f"""
+ CREATE TABLE {table} AS
+ SELECT ROW_NUMBER() OVER () AS id, {var}
+ FROM (
+ SELECT {var}
+ FROM RANGE({n}) _ ({var})
+ ORDER BY RANDOM()
+ )
+ """
+ )
+
+ with duckdb.connect(path) as con:
+ con.execute(sql("x", table="t1"))
+ con.execute(sql("y", table="t2"))
+ return path
+
+
+def test_duckdb_to_pyarrow(benchmark, sql, ddb) -> None:
+ # yes, we're benchmarking duckdb here, not ibis
+ #
+ # we do this to get a baseline for comparison
+ duckdb = pytest.importorskip("duckdb")
+ con = duckdb.connect(ddb, read_only=True)
+
+ benchmark(lambda sql: con.sql(sql).to_arrow_table(), sql)
+
+
+def test_ibis_duckdb_to_pyarrow(benchmark, sql, ddb) -> None:
+ pytest.importorskip("duckdb")
+ pytest.importorskip("duckdb_engine")
+
+ con = ibis.duckdb.connect(ddb, read_only=True)
+
+ expr = con.sql(sql)
+ benchmark(expr.to_pyarrow)
|
|
ci: reenable doctest builds (#9353)
|
4769ee5ecb8f10f17d10749a0228b6eb4d094a86
|
ci
|
https://github.com/ibis-project/ibis/commit/4769ee5ecb8f10f17d10749a0228b6eb4d094a86
|
reenable doctest builds (#9353)
|
diff --git a/ibis-main.yml b/ibis-main.yml
index 6c8952e..7384c88 100644
--- a/ibis-main.yml
+++ b/ibis-main.yml
@@ -128,8 +128,6 @@ jobs:
run: poetry run python -c 'import shapely.geometry, duckdb'
test_doctests:
- # FIXME(kszucs): re-enable this build
- if: false
name: Doctests
runs-on: ubuntu-latest
steps:
@@ -147,7 +145,7 @@ jobs:
uses: actions/setup-python@v5
id: install_python
with:
- python-version: "3.12"
+ python-version: "3.10"
- name: install poetry
run: pip install 'poetry==1.8.3'
|
|
fix: `where -> were` typo fix. (#560)
|
0eca94d84bd82f2083b41acdb316edce54365f11
|
fix
|
https://github.com/Byron/gitoxide/commit/0eca94d84bd82f2083b41acdb316edce54365f11
|
`where -> were` typo fix. (#560)
|
diff --git a/write.rs b/write.rs
index 1118b14..292556c 100644
--- a/write.rs
+++ b/write.rs
@@ -327,7 +327,7 @@ impl section::Segment {
if *conventional_count == 1 { "was" } else { "were" }
)?;
if unique_issues.is_empty() {
- writeln!(out, " - 0 issues like '(#ID)' where seen in commit messages")?;
+ writeln!(out, " - 0 issues like '(#ID)' were seen in commit messages")?;
} else {
writeln!(
out,
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a10ab12..e27af59 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,7 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- 1 commit contributed to the release.
- 0 commits were understood as [conventional](https://www.conventionalcommits.org).
- - 0 issues like '(#ID)' where seen in commit messages
+ - 0 issues like '(#ID)' were seen in commit messages
### Commit Details
|
|
chore: update dependencies
|
2ee33a66feacd52b3fa651a1dfbd32b0412949ab
|
chore
|
https://github.com/mikro-orm/mikro-orm/commit/2ee33a66feacd52b3fa651a1dfbd32b0412949ab
|
update dependencies
|
diff --git a/package.json b/package.json
index 89d60f1..8e7a6fa 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "mikro-orm",
- "version": "2.0.0-alpha.13",
+ "version": "2.0.0-rc",
"description": "Simple typescript ORM for node.js based on data-mapper, unit-of-work and identity-map patterns. Supports MongoDB, MySQL and SQLite databases as well as usage with vanilla JS.",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
@@ -76,7 +76,7 @@
"fast-deep-equal": "^2.0.1",
"globby": "^9.1.0",
"node-request-context": "^1.0.5",
- "ts-morph": "^1.2.0",
+ "ts-morph": "^1.3.0",
"typescript": "^3.3.3",
"uuid": "^3.3.2"
},
@@ -91,15 +91,15 @@
"@types/clone": "^0.1.30",
"@types/globby": "^8.0.0",
"@types/jest": "^24.0.9",
- "@types/mongodb": "^3.1.19",
+ "@types/mongodb": "^3.1.20",
"@types/mysql2": "types/mysql2",
- "@types/node": "^11.9.6",
+ "@types/node": "^11.10.4",
"@types/uuid": "^3.4.4",
"codacy-coverage": "^3.4.0",
"coveralls": "^3.0.3",
"husky": "^1.3.1",
"jest": "^24.1.0",
- "lint-staged": "^8.1.4",
+ "lint-staged": "^8.1.5",
"mongodb": "^3.1.13",
"mysql2": "^1.6.5",
"rimraf": "^2.6.3",
@@ -107,6 +107,6 @@
"sqlite": "^3.0.2",
"ts-jest": "^24.0.0",
"ts-node": "^8.0.2",
- "tslint": "^5.13.0"
+ "tslint": "^5.13.1"
}
}
|
|
chore: replace `quick-error` with `thiserror`
This increases the compile time of the crate alone if there is no proc-macro
in the dependency tree, but will ever so slightly improve compile times for `gix`
as a whole.
|
cce96ee1382d3d56d77820a2aba6e2d17b52f91c
|
chore
|
https://github.com/Byron/gitoxide/commit/cce96ee1382d3d56d77820a2aba6e2d17b52f91c
|
replace `quick-error` with `thiserror`
This increases the compile time of the crate alone if there is no proc-macro
in the dependency tree, but will ever so slightly improve compile times for `gix`
as a whole.
|
diff --git a/Cargo.lock b/Cargo.lock
index 7fe410e..9958d11 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1622,9 +1622,9 @@ dependencies = [
"once_cell",
"parking_lot 0.12.1",
"prodash",
- "quick-error 2.0.1",
"sha1",
"sha1_smol",
+ "thiserror",
"walkdir",
]
diff --git a/Cargo.toml b/Cargo.toml
index 8a18211..52bd064 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -43,7 +43,7 @@ crc32 = ["crc32fast"]
## and reduced performance is acceptable. **zlib-stock** can be used if dynamic linking of an external zlib library is desired or if cmake is not available.
## Note that a competitive Zlib implementation is critical to `gitoxide's` object database performance.
## Additional backends are supported, each of which overriding the default Rust backend.
-zlib = ["flate2", "flate2/rust_backend", "quick-error"]
+zlib = ["flate2", "flate2/rust_backend", "thiserror"]
## Use zlib-ng (libz-ng-sys) with native API (no compat mode) that can co-exist with system libz.
zlib-ng= ["flate2/zlib-ng"]
## Use a C-based backend which can compress and decompress significantly faster than the other options.
@@ -125,7 +125,7 @@ bytes = { version = "1.0.0", optional = true }
# zlib module
flate2 = { version = "1.0.17", optional = true, default-features = false }
-quick-error = { version = "2.0.0", optional = true }
+thiserror = { version = "1.0.38", optional = true }
## If enabled, OnceCell will be made available for interior mutability either in sync or unsync forms.
once_cell = { version = "1.13.0", optional = true }
diff --git a/mod.rs b/mod.rs
index b48df2b..0bc1bd3 100644
--- a/mod.rs
+++ b/mod.rs
@@ -2,24 +2,16 @@ pub use flate2::{Decompress, Status};
/// non-streaming interfaces for decompression
pub mod inflate {
- use quick_error::quick_error;
- quick_error! {
- /// The error returned by various [Inflate methods][super::Inflate]
- #[allow(missing_docs)]
- #[derive(Debug)]
- pub enum Error {
- WriteInflated(err: std::io::Error) {
- display("Could not write all bytes when decompressing content")
- from()
- }
- Inflate(err: flate2::DecompressError) {
- display("Could not decode zip stream, status was '{:?}'", err)
- from()
- }
- Status(status: flate2::Status) {
- display("The zlib status indicated an error, status was '{:?}'", status)
- }
- }
+ /// The error returned by various [Inflate methods][super::Inflate]
+ #[derive(Debug, thiserror::Error)]
+ #[allow(missing_docs)]
+ pub enum Error {
+ #[error("Could not write all bytes when decompressing content")]
+ WriteInflated(#[from] std::io::Error),
+ #[error("Could not decode zip stream, status was '{0:?}'")]
+ Inflate(#[from] flate2::DecompressError),
+ #[error("The zlib status indicated an error, status was '{0:?}'")]
+ Status(flate2::Status),
}
}
|
|
build: updated versions
|
0b9cb35626036ccc4d41909c1d6c0e43c5f15c60
|
build
|
https://github.com/tsparticles/tsparticles/commit/0b9cb35626036ccc4d41909c1d6c0e43c5f15c60
|
updated versions
|
diff --git a/package.dist.json b/package.dist.json
index 8c7fa20..440b37c 100644
--- a/package.dist.json
+++ b/package.dist.json
@@ -99,7 +99,7 @@
"./package.json": "./package.json"
},
"dependencies": {
- "@tsparticles/engine": "^"
+ "@tsparticles/engine": "^3.5.0"
},
"publishConfig": {
"access": "public"
|
|
fix: test failure
|
3df896e1c699dfcf6f206081c1f8c2b12b8f1a84
|
fix
|
https://github.com/erg-lang/erg/commit/3df896e1c699dfcf6f206081c1f8c2b12b8f1a84
|
test failure
|
diff --git a/test.rs b/test.rs
index d3d6852..57891a6 100644
--- a/test.rs
+++ b/test.rs
@@ -184,7 +184,7 @@ fn test_tolerant_completion() -> Result<(), Box<dyn std::error::Error>> {
let resp = client.request_completion(uri.raw(), 2, 10, ".")?;
if let Some(CompletionResponse::Array(items)) = resp {
assert!(items.len() >= 10);
- assert!(items.iter().any(|item| item.label == "tqdm"));
+ assert!(items.iter().any(|item| item.label == "pi"));
Ok(())
} else {
Err(format!("not items: {resp:?}").into())
diff --git a/tolerant_completion.er b/tolerant_completion.er
index f3ac76b..c9877a0 100644
--- a/tolerant_completion.er
+++ b/tolerant_completion.er
@@ -1,6 +1,6 @@
-tqdm = pyimport "tqdm"
+math = pyimport "math"
-f _: tqdm
+f _: math
i = 1
s = "a"
g() = None + i s i
diff --git a/build.rs b/build.rs
index 37eba19..02933af 100644
--- a/build.rs
+++ b/build.rs
@@ -31,6 +31,12 @@ fn main() -> std::io::Result<()> {
copy_dir(&erg_path, "lib").unwrap_or_else(|_| {
eprintln!("failed to copy the std library to {erg_path}");
});
+ let pkgs_path = path::Path::new(&erg_path).join("lib").join("pkgs");
+ if !pkgs_path.exists() {
+ fs::create_dir(&pkgs_path).unwrap_or_else(|_| {
+ eprintln!("failed to create the directory: {}", pkgs_path.display());
+ });
+ }
Ok(())
}
|
|
build: try fixing publish issues
|
e14dc55a9fdf5368faaaf8ab2c01012eda8c2a39
|
build
|
https://github.com/tsparticles/tsparticles/commit/e14dc55a9fdf5368faaaf8ab2c01012eda8c2a39
|
try fixing publish issues
|
diff --git a/package.json b/package.json
index 445fc32..891a523 100644
--- a/package.json
+++ b/package.json
@@ -19,6 +19,9 @@
"ini": "^2.0.0",
"lerna": "^4.0.0"
},
+ "resolutions": {
+ "npm-packlist": "1.1.12"
+ },
"husky": {
"hooks": {
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS"
|
|
fix: make `_stopAnimation` clear keys from the prop cache
This ensures that future `_diff` calls return true, which is required for starting animations.
|
0e7d65d367fc3d6af7555e079f6306591972d0d7
|
fix
|
https://github.com/pmndrs/react-spring/commit/0e7d65d367fc3d6af7555e079f6306591972d0d7
|
make `_stopAnimation` clear keys from the prop cache
This ensures that future `_diff` calls return true, which is required for starting animations.
|
diff --git a/Controller.ts b/Controller.ts
index 192affe..60c36c7 100644
--- a/Controller.ts
+++ b/Controller.ts
@@ -573,11 +573,20 @@ class Controller<State extends object = any> {
animatedValues = toArray(animated.getPayload() as any)
}
+ // Replace the animation config with a lighter object
this.animations[key] = { key, animated, animatedValues } as any
+
+ // Tell the frameloop: "these animations are done"
animatedValues.forEach(v => (v.done = true))
- // Prevent delayed updates to this key.
+ // Prevent delayed updates to this key
this.timestamps['to.' + key] = now()
+ this.timestamps['from.' + key] = now()
+
+ // Clear this key from the prop cache, so future diffs are guaranteed
+ const { to, from } = this.props
+ if (is.obj(to)) delete to[key]
+ if (from) delete from[key]
}
}
|
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 5