Dataset Viewer
Auto-converted to Parquet Duplicate
Repository Name
stringlengths
1
28
Git URL
stringlengths
26
64
SHA
stringlengths
40
40
Author
stringlengths
2
37
Commit Date
stringlengths
19
19
Description
stringlengths
13
160
Body
stringlengths
8
30.2k
Full Commit Message
stringlengths
36
42.6k
Git Diff
stringlengths
22
22.6M
cloud-hypervisor
https://github.com/cloud-hypervisor/cloud-hypervisor
0eba2d1b817fd6fb0c2691b1ee3f950b66ce76a2
dependabot[bot]
2024-01-04 05:24:23
build: Bump micro_http from `a4d632f` to `e75dfa1`
Bumps [micro_http](https://github.com/firecracker-microvm/micro-http) from `a4d632f` to `e75dfa1`. - [Commits](https://github.com/firecracker-microvm/micro-http/compare/a4d632f2c5ea45712c0d2002dc909a63879e85c3...e75dfa1eeea23b69caa7407bc2c3a76d7b7262fb) ---
build: Bump micro_http from `a4d632f` to `e75dfa1` Bumps [micro_http](https://github.com/firecracker-microvm/micro-http) from `a4d632f` to `e75dfa1`. - [Commits](https://github.com/firecracker-microvm/micro-http/compare/a4d632f2c5ea45712c0d2002dc909a63879e85c3...e75dfa1eeea23b69caa7407bc2c3a76d7b7262fb) --- updated-dependencies: - dependency-name: micro_http dependency-type: direct:production ... Signed-off-by: dependabot[bot] <[email protected]>
diff --git a/Cargo.lock b/Cargo.lock index acd64a2173..42829099de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -92,7 +92,7 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" name = "api_client" version = "0.1.0" dependencies = [ - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -120,7 +120,7 @@ dependencies = [ "vm-fdt", "vm-memory", "vm-migration", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -354,7 +354,7 @@ dependencies = [ "virtio-queue", "vm-memory", "vm-virtio", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -463,7 +463,7 @@ dependencies = [ "tracer", "vm-memory", "vmm", - "vmm-sys-util", + "vmm-sys-util 0.11.1", "wait-timeout", "zbus", ] @@ -603,7 +603,7 @@ dependencies = [ "vm-device", "vm-memory", "vm-migration", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1023,7 +1023,7 @@ dependencies = [ "thiserror", "vfio-ioctls", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1150,7 +1150,7 @@ source = "git+https://github.com/cloud-hypervisor/kvm-bindings?branch=ch-v0.6.0- dependencies = [ "serde", "serde_derive", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1160,7 +1160,7 @@ source = "git+https://github.com/rust-vmm/kvm-ioctls?branch=main#23a3bb045a467e6 dependencies = [ "kvm-bindings", "libc", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1262,10 +1262,10 @@ dependencies = [ [[package]] name = "micro_http" version = "0.1.0" -source = "git+https://github.com/firecracker-microvm/micro-http?branch=main#a4d632f2c5ea45712c0d2002dc909a63879e85c3" +source = "git+https://github.com/firecracker-microvm/micro-http?branch=main#e75dfa1eeea23b69caa7407bc2c3a76d7b7262fb" dependencies = [ "libc", - "vmm-sys-util", + "vmm-sys-util 0.12.1", ] [[package]] @@ -1295,7 +1295,7 @@ dependencies = [ "libc", "serde", "serde_derive", - "vmm-sys-util", + "vmm-sys-util 0.11.1", "zerocopy", ] @@ -1306,7 +1306,7 @@ source = "git+https://github.com/rust-vmm/mshv?branch=main#0dd4d3452a7f2e95199f4 dependencies = [ "libc", "mshv-bindings", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1322,7 +1322,7 @@ dependencies = [ name = "net_gen" version = "0.1.0" dependencies = [ - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1347,7 +1347,7 @@ dependencies = [ "virtio-queue", "vm-memory", "vm-virtio", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1534,7 +1534,7 @@ dependencies = [ "vm-device", "vm-memory", "vm-migration", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -1784,7 +1784,7 @@ dependencies = [ "libc", "log", "thiserror", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2162,7 +2162,7 @@ dependencies = [ "serde", "serde_json", "ssh2", - "vmm-sys-util", + "vmm-sys-util 0.11.1", "wait-timeout", ] @@ -2219,7 +2219,7 @@ dependencies = [ "log", "net_gen", "thiserror", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2327,7 +2327,7 @@ dependencies = [ "serde_derive", "syn 1.0.109", "versionize_derive", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2345,7 +2345,7 @@ name = "vfio-bindings" version = "0.4.0" source = "git+https://github.com/rust-vmm/vfio?branch=main#59c604fa6e42080f0a47c124ba29454fe4cb7475" dependencies = [ - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2363,7 +2363,7 @@ dependencies = [ "thiserror", "vfio-bindings", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2380,7 +2380,7 @@ dependencies = [ "thiserror", "vfio-bindings", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2392,7 +2392,7 @@ dependencies = [ "bitflags 2.4.1", "libc", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2407,7 +2407,7 @@ dependencies = [ "virtio-bindings", "virtio-queue", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2426,7 +2426,7 @@ dependencies = [ "virtio-bindings", "virtio-queue", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2444,7 +2444,7 @@ dependencies = [ "vhost-user-backend", "virtio-bindings", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2484,7 +2484,7 @@ dependencies = [ "vm-memory", "vm-migration", "vm-virtio", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2496,7 +2496,7 @@ dependencies = [ "log", "virtio-bindings", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2518,7 +2518,7 @@ dependencies = [ "thiserror", "vfio-ioctls", "vm-memory", - "vmm-sys-util", + "vmm-sys-util 0.11.1", ] [[package]] @@ -2614,7 +2614,7 @@ dependencies = [ "vm-memory", "vm-migration", "vm-virtio", - "vmm-sys-util", + "vmm-sys-util 0.11.1", "zbus", "zerocopy", ] @@ -2631,6 +2631,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "vmm-sys-util" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1435039746e20da4f8d507a72ee1b916f7b4b05af7a91c093d2c6561934ede" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + [[package]] name = "wait-timeout" version = "0.2.0"
revanced-patches
https://github.com/ReVanced/revanced-patches
184b403df3d5285aabbe8481fbc52ea32d6e94cb
semantic-release-bot
2023-01-28 21:53:20
chore(release): 2.157.1-dev.2 [skip ci]
## [2.157.1-dev.2](https://github.com/revanced/revanced-patches/compare/v2.157.1-dev.1...v2.157.1-dev.2) (2023-01-28) ### Bug Fixes * **youtube:** resolve duplicate preference keys ([#1550](https://github.com/revanced/revanced-patches/issues/1550)) ([aafdb89](https://github.com/revanced/revanced-patches/commit/aafdb891b2f0f243cb2d997a38ab3e6a7b46aba8))
chore(release): 2.157.1-dev.2 [skip ci] ## [2.157.1-dev.2](https://github.com/revanced/revanced-patches/compare/v2.157.1-dev.1...v2.157.1-dev.2) (2023-01-28) ### Bug Fixes * **youtube:** resolve duplicate preference keys ([#1550](https://github.com/revanced/revanced-patches/issues/1550)) ([aafdb89](https://github.com/revanced/revanced-patches/commit/aafdb891b2f0f243cb2d997a38ab3e6a7b46aba8))
diff --git a/CHANGELOG.md b/CHANGELOG.md index d1ca8c2de5..becd387e8c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +## [2.157.1-dev.2](https://github.com/revanced/revanced-patches/compare/v2.157.1-dev.1...v2.157.1-dev.2) (2023-01-28) + + +### Bug Fixes + +* **youtube:** resolve duplicate preference keys ([#1550](https://github.com/revanced/revanced-patches/issues/1550)) ([aafdb89](https://github.com/revanced/revanced-patches/commit/aafdb891b2f0f243cb2d997a38ab3e6a7b46aba8)) + ## [2.157.1-dev.1](https://github.com/revanced/revanced-patches/compare/v2.157.0...v2.157.1-dev.1) (2023-01-28) diff --git a/gradle.properties b/gradle.properties index 423153d618..eeda220f11 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,2 @@ kotlin.code.style = official -version = 2.157.1-dev.1 +version = 2.157.1-dev.2
node
https://github.com/nodejs/node
b9a460e6ebff2f255495396884cb1d1d49838f4f
Michael Dawson
2022-02-18 01:49:39
build: allow manual run of feature request action
add "workflow_dispatch:" so that action can be run manuallyl for debugging.
build: allow manual run of feature request action add "workflow_dispatch:" so that action can be run manuallyl for debugging. Signed-off-by: Michael Dawson <[email protected]> PR-URL: https://github.com/nodejs/node/pull/42037 Reviewed-By: Mestery <[email protected]> Reviewed-By: Richard Lau <[email protected]>
diff --git a/.github/workflows/close-stale-feature-requests.yml b/.github/workflows/close-stale-feature-requests.yml index 898dcbdecbb4b0..3e9147fbdd371a 100644 --- a/.github/workflows/close-stale-feature-requests.yml +++ b/.github/workflows/close-stale-feature-requests.yml @@ -1,5 +1,6 @@ name: Close stale feature requests on: + workflow_dispatch: schedule: # Run every day at 1:00 AM UTC. - cron: 0 1 * * *
vault
https://github.com/hashicorp/vault
357f5f7a0c6e0669b411b4aaa641aea41481218a
Brian Shumate
2019-12-03 00:25:59
Docs: Update Telemetry (#7959)
- Add wal_loadWAL metric - Create section for Merkle Tree and WAL metrics - Remove trailing spaces
Docs: Update Telemetry (#7959) - Add wal_loadWAL metric - Create section for Merkle Tree and WAL metrics - Remove trailing spaces
diff --git a/website/source/docs/internals/telemetry.html.md b/website/source/docs/internals/telemetry.html.md index 6fdc642babd1..424396b27d12 100644 --- a/website/source/docs/internals/telemetry.html.md +++ b/website/source/docs/internals/telemetry.html.md @@ -128,13 +128,11 @@ These metrics relate to policies and tokens. | `vault.expire.renew-token` | Time taken to renew a token which does not need to invoke a logical backend | ms | summary | | `vault.expire.register` | Time taken for register operations | ms | summary | -Thes operations take a request and response with an associated lease and register a lease entry with lease ID +These operations take a request and response with an associated lease and register a lease entry with lease ID | Metric | Description | Unit | Type | | :----- | :---------- | :--- | :--- | | `vault.expire.register-auth` | Time taken for register authentication operations which create lease entries without lease ID | ms | summary | -| `vault.merkle_flushdirty` | Time taken to flush any dirty pages to cold storage | ms | summary | -| `vault.merkle_savecheckpoint` | Time taken to save the checkpoint | ms | summary | | `vault.policy.get_policy` | Time taken to get a policy | ms | summary | | `vault.policy.list_policies` | Time taken to list policies | ms | summary | | `vault.policy.delete_policy` | Time taken to delete a policy | ms | summary | @@ -145,11 +143,6 @@ Thes operations take a request and response with an associated lease and registe | `vault.token.revoke` | Time taken to revoke a token | ms | summary | | `vault.token.revoke-tree` | Time taken to revoke a token tree | ms | summary | | `vault.token.store` | Time taken to store an updated token entry without writing to the secondary index | ms | summary | -| `vault.wal_deletewals` | Time taken to delete a Write Ahead Log (WAL) | ms | summary | -| `vault.wal_gc_deleted` | Number of Write Ahead Logs (WAL) deleted during each garbage collection run | WAL | counter | -| `vault.wal_gc_total` | Total Number of Write Ahead Logs (WAL) on disk | WAL | counter | -| `vault.wal_persistwals` | Time taken to persist a Write Ahead Log (WAL) | ms | summary | -| `vault.wal_flushready` | Time taken to flush a ready Write Ahead Log (WAL) to storage | ms | summary | ## Auth Methods Metrics @@ -168,13 +161,28 @@ These metrics relate to supported authentication methods. | `vault.route.rollback.secret` | Time taken to perform a route rollback operation for the [K/V secret backend][kv-secrets-engine] | ms | summary | | `vault.route.rollback.sys` | Time taken to perform a route rollback operation for the system backend | ms | summary | +## Merkle Tree and Write Ahead Log Metrics + +These metrics relate to internal operations on Merkle Trees and Write Ahead Logs (WAL) + +| Metric | Description | Unit | Type | +| :----- | :---------- | :--- | :--- | +| `vault.merkle_flushdirty` | Time taken to flush any dirty pages to cold storage | ms | summary | +| `vault.merkle_savecheckpoint` | Time taken to save the checkpoint | ms | summary | +| `vault.wal_deletewals` | Time taken to delete a Write Ahead Log (WAL) | ms | summary | +| `vault.wal_gc_deleted` | Number of Write Ahead Logs (WAL) deleted during each garbage collection run | WAL | counter | +| `vault.wal_gc_total` | Total Number of Write Ahead Logs (WAL) on disk | WAL | counter | +| `vault.wal_loadWAL` | Time taken to load a Write Ahead Log (WAL) | ms | summary | +| `vault.wal_persistwals` | Time taken to persist a Write Ahead Log (WAL) | ms | summary | +| `vault.wal_flushready` | Time taken to flush a ready Write Ahead Log (WAL) to storage | ms | summary | + ## Replication Metrics These metrics relate to [Vault Enterprise Replication](https://www.vaultproject.io/docs/enterprise/replication/index.html). | Metric | Description | Unit | Type | | :----- | :---------- | :--- | :--- | -| `logshipper.streamWALs.missing_guard` | Number of incidences where the starting Merkle Tree index used to begin streaming WAL entries is not matched/found | missing guards | counter | +| `logshipper.streamWALs.missing_guard` | Number of incidences where the starting Merkle Tree index used to begin streaming WAL entries is not matched/found | missing guards | counter | | `logshipper.streamWALs.guard_found` | Number of incidences where the starting Merkle Tree index used to begin streaming WAL entries is matched/found | found guards | counter | | `replication.fetchRemoteKeys` | Time taken to fetch keys from a remote cluster participating in replication prior to Merkle Tree based delta generation | ms | summary | | `replication.merkleDiff` | Time taken to perform a Merkle Tree based delta generation between the clusters participating in replication | ms | summary |
unleash
https://github.com/Unleash/unleash
f8e34564aa9e2348878262296da00bfbeda36a39
Nuno Góis
2023-01-27 20:32:39
fix: delete variant button tooltip (#3014)
Small fix on the tooltip for the "delete variant" button in the new environment variants form. ![image](https://user-images.githubusercontent.com/14320932/215116642-0e78a2a7-71d6-4fa1-9138-6133d71ef091.png)
fix: delete variant button tooltip (#3014) Small fix on the tooltip for the "delete variant" button in the new environment variants form. ![image](https://user-images.githubusercontent.com/14320932/215116642-0e78a2a7-71d6-4fa1-9138-6133d71ef091.png)
diff --git a/frontend/src/component/feature/FeatureView/FeatureVariants/FeatureEnvironmentVariants/EnvironmentVariantsModal/VariantForm/VariantForm.tsx b/frontend/src/component/feature/FeatureView/FeatureVariants/FeatureEnvironmentVariants/EnvironmentVariantsModal/VariantForm/VariantForm.tsx index 6caf70b213bb..b8a05773abe6 100644 --- a/frontend/src/component/feature/FeatureView/FeatureVariants/FeatureEnvironmentVariants/EnvironmentVariantsModal/VariantForm/VariantForm.tsx +++ b/frontend/src/component/feature/FeatureView/FeatureVariants/FeatureEnvironmentVariants/EnvironmentVariantsModal/VariantForm/VariantForm.tsx @@ -10,6 +10,7 @@ import { InputAdornment, styled, Switch, + Tooltip, } from '@mui/material'; import { ConditionallyRender } from 'component/common/ConditionallyRender/ConditionallyRender'; import { IPayload } from 'interfaces/featureToggle'; @@ -30,7 +31,7 @@ const StyledVariantForm = styled('div')(({ theme }) => ({ borderRadius: theme.shape.borderRadiusLarge, })); -const StyledDeleteButton = styled(IconButton)(({ theme }) => ({ +const StyledDeleteButtonTooltip = styled(Tooltip)(({ theme }) => ({ position: 'absolute', top: theme.spacing(2), right: theme.spacing(2), @@ -293,12 +294,23 @@ export const VariantForm = ({ return ( <StyledVariantForm> - <StyledDeleteButton - onClick={() => removeVariant(variant.id)} - disabled={isProtectedVariant(variant)} + <StyledDeleteButtonTooltip + arrow + title={ + isProtectedVariant(variant) + ? 'You need to have at least one variable variant' + : 'Delete variant' + } > - <Delete /> - </StyledDeleteButton> + <div> + <IconButton + onClick={() => removeVariant(variant.id)} + disabled={isProtectedVariant(variant)} + > + <Delete /> + </IconButton> + </div> + </StyledDeleteButtonTooltip> <StyledTopRow> <StyledNameContainer> <StyledLabel>Variant name</StyledLabel>
rspack
https://github.com/web-infra-dev/rspack
b89df1d8c8c9c6cb5c60acf89bb8e6d54a1568c6
neverland
2024-11-04 13:53:55
perf(cli): enable Node.js compile cache (#8331)
* perf(cli): enable Node.js compile cache * fix: no need for NODE_DISABLE_COMPILE_CACHE
perf(cli): enable Node.js compile cache (#8331) * perf(cli): enable Node.js compile cache * fix: no need for NODE_DISABLE_COMPILE_CACHE
diff --git a/packages/rspack-cli/bin/rspack.js b/packages/rspack-cli/bin/rspack.js index 1b1c557b82ac..1510c6b18dc5 100755 --- a/packages/rspack-cli/bin/rspack.js +++ b/packages/rspack-cli/bin/rspack.js @@ -1,4 +1,17 @@ #!/usr/bin/env node +const nodeModule = require("node:module"); + +// enable on-disk code caching of all modules loaded by Node.js +// requires Nodejs >= 22.8.0 +const { enableCompileCache } = nodeModule; +if (enableCompileCache) { + try { + enableCompileCache(); + } catch { + // ignore errors + } +} + const { RspackCLI } = require("../dist/index"); async function runCLI() {
redwood
https://github.com/redwoodjs/redwood
fb306fab56e0ab86717e9f5823c5874fe0b39883
Dominic Saadi
2023-05-09 05:46:14
chore(docs): backport recent fixes to v5 (#8263)
* backport #8252 * backport #8243
chore(docs): backport recent fixes to v5 (#8263) * backport #8252 * backport #8243
diff --git a/docs/versioned_docs/version-5.0/how-to/build-dashboards-fast-with-tremor.md b/docs/versioned_docs/version-5.0/how-to/build-dashboards-fast-with-tremor.md index c772a6a32723..36a149b5de4b 100644 --- a/docs/versioned_docs/version-5.0/how-to/build-dashboards-fast-with-tremor.md +++ b/docs/versioned_docs/version-5.0/how-to/build-dashboards-fast-with-tremor.md @@ -1,6 +1,6 @@ --- -description: "Learn how to build dashboards fast using the termor React library of data visualization components." +description: "Learn how to build dashboards fast using the Tremor React library of data visualization components." --- # Build Dashboards Fast with Tremor diff --git a/docs/versioned_docs/version-5.0/tutorial/chapter4/authentication.md b/docs/versioned_docs/version-5.0/tutorial/chapter4/authentication.md index 89aa7591595e..0ad9d366f940 100644 --- a/docs/versioned_docs/version-5.0/tutorial/chapter4/authentication.md +++ b/docs/versioned_docs/version-5.0/tutorial/chapter4/authentication.md @@ -490,7 +490,7 @@ And after clicking "Signup" you should end up back on the homepage, where everyt ![Posts admin](https://user-images.githubusercontent.com/300/146465485-c169a4b8-f398-47ec-8412-4fc15a666976.png) -Awesome! Signing up will automatically log you in (although this behavior [can be changed](../../authentication.md#signuphandler)) and if you look in the code for the `SignupPage` you'll see where the redirect to the homepage takes place (hint: check out line 21). +Awesome! Signing up will automatically log you in (although this behavior [can be changed](../../auth/dbauth.md#signuphandler)) and if you look in the code for the `SignupPage` you'll see where the redirect to the homepage takes place (hint: check out line 21). ## Add a Logout Link
zitadel
https://github.com/zitadel/zitadel
c37d55b069f268a87c4ec2c2287345ab2c261324
Silvan
2020-09-11 18:21:50
fix: User grant id key (#731)
* fix: log id and error message * fix: user grant by id correct search field
fix: User grant id key (#731) * fix: log id and error message * fix: user grant by id correct search field
diff --git a/internal/token/repository/view/token.go b/internal/token/repository/view/token.go index 5a90ad9e1fc..24ae43d8a64 100644 --- a/internal/token/repository/view/token.go +++ b/internal/token/repository/view/token.go @@ -1,16 +1,15 @@ package view import ( - global_model "github.com/caos/zitadel/internal/model" "time" - "github.com/jinzhu/gorm" - "github.com/lib/pq" - "github.com/caos/zitadel/internal/errors" + global_model "github.com/caos/zitadel/internal/model" token_model "github.com/caos/zitadel/internal/token/model" "github.com/caos/zitadel/internal/token/repository/view/model" "github.com/caos/zitadel/internal/view/repository" + "github.com/jinzhu/gorm" + "github.com/lib/pq" ) func TokenByID(db *gorm.DB, table, tokenID string) (*model.Token, error) { @@ -18,7 +17,7 @@ func TokenByID(db *gorm.DB, table, tokenID string) (*model.Token, error) { query := repository.PrepareGetByKey(table, model.TokenSearchKey(token_model.TokenSearchKeyTokenID), tokenID) err := query(db, token) if errors.IsNotFound(err) { - return nil, errors.ThrowNotFound(nil, "VIEW-Nqwf1", "Errors.Token.NotFound") + return nil, errors.ThrowNotFound(nil, "VIEW-6ub3p", "Errors.Token.NotFound") } return token, err } diff --git a/internal/usergrant/model/user_grant_view.go b/internal/usergrant/model/user_grant_view.go index c75495cc75b..e4776ea0b47 100644 --- a/internal/usergrant/model/user_grant_view.go +++ b/internal/usergrant/model/user_grant_view.go @@ -48,6 +48,7 @@ const ( UserGrantSearchKeyGrantID UserGrantSearchKeyOrgName UserGrantSearchKeyRoleKey + UserGrantSearchKeyID ) type UserGrantSearchQuery struct { diff --git a/internal/usergrant/repository/view/model/user_grant_query.go b/internal/usergrant/repository/view/model/user_grant_query.go index b712d81d2d4..a3c3e063005 100644 --- a/internal/usergrant/repository/view/model/user_grant_query.go +++ b/internal/usergrant/repository/view/model/user_grant_query.go @@ -65,6 +65,8 @@ func (key UserGrantSearchKey) ToColumnName() string { return UserGrantKeyOrgName case grant_model.UserGrantSearchKeyRoleKey: return UserGrantKeyRole + case grant_model.UserGrantSearchKeyID: + return UserGrantKeyID default: return "" } diff --git a/internal/usergrant/repository/view/user_grant_view.go b/internal/usergrant/repository/view/user_grant_view.go index 57047479362..a3d2d87d356 100644 --- a/internal/usergrant/repository/view/user_grant_view.go +++ b/internal/usergrant/repository/view/user_grant_view.go @@ -10,13 +10,13 @@ import ( ) func UserGrantByID(db *gorm.DB, table, grantID string) (*model.UserGrantView, error) { - user := new(model.UserGrantView) - query := repository.PrepareGetByKey(table, model.UserGrantSearchKey(grant_model.UserGrantSearchKeyGrantID), grantID) - err := query(db, user) + grant := new(model.UserGrantView) + query := repository.PrepareGetByKey(table, model.UserGrantSearchKey(grant_model.UserGrantSearchKeyID), grantID) + err := query(db, grant) if caos_errs.IsNotFound(err) { - return nil, caos_errs.ThrowNotFound(nil, "VIEW-Nqwf1", "Errors.Token.NotFound") + return nil, caos_errs.ThrowNotFound(nil, "VIEW-Nqwf1", "Errors.UserGrant.NotFound") } - return user, err + return grant, err } func UserGrantByIDs(db *gorm.DB, table, resourceOwnerID, projectID, userID string) (*model.UserGrantView, error) {
components
https://github.com/angular/components
46a4db2d4fb6b23278901e6dcbd7d94a4b8b7374
Miles Malerba
2022-12-14 02:52:41
fix(material/form-field): allow getting harness by validity (#26232)
* fix(material/form-field): allow getting harness by validity * fixup! fix(material/form-field): allow getting harness by validity
fix(material/form-field): allow getting harness by validity (#26232) * fix(material/form-field): allow getting harness by validity * fixup! fix(material/form-field): allow getting harness by validity
diff --git a/src/material/form-field/testing/form-field-harness-filters.ts b/src/material/form-field/testing/form-field-harness-filters.ts index 64809f875db5..ead796b38adf 100644 --- a/src/material/form-field/testing/form-field-harness-filters.ts +++ b/src/material/form-field/testing/form-field-harness-filters.ts @@ -14,4 +14,6 @@ export interface FormFieldHarnessFilters extends BaseHarnessFilters { floatingLabelText?: string | RegExp; /** Filters based on whether the form field has error messages. */ hasErrors?: boolean; + /** Filters based on whether the form field value is valid. */ + isValid?: boolean; } diff --git a/src/material/form-field/testing/form-field-harness.ts b/src/material/form-field/testing/form-field-harness.ts index cd523d483061..3793c5f4a4ba 100644 --- a/src/material/form-field/testing/form-field-harness.ts +++ b/src/material/form-field/testing/form-field-harness.ts @@ -248,6 +248,11 @@ export class MatFormFieldHarness extends _MatFormFieldHarnessBase< 'hasErrors', options.hasErrors, async (harness, hasErrors) => (await harness.hasErrors()) === hasErrors, + ) + .addOption( + 'isValid', + options.isValid, + async (harness, isValid) => (await harness.isControlValid()) === isValid, ); } diff --git a/src/material/form-field/testing/shared.spec.ts b/src/material/form-field/testing/shared.spec.ts index a0379870a745..b4e5e0a786f9 100644 --- a/src/material/form-field/testing/shared.spec.ts +++ b/src/material/form-field/testing/shared.spec.ts @@ -197,6 +197,17 @@ export function runHarnessTests( ); }); + it('should be able to get form-field by validity', async () => { + let invalid = await loader.getAllHarnesses(formFieldHarness.with({isValid: false})); + expect(invalid.length).toBe(0); + + fixture.componentInstance.requiredControl.setValue(''); + dispatchFakeEvent(fixture.nativeElement.querySelector('#with-errors input'), 'blur'); + + invalid = await loader.getAllHarnesses(formFieldHarness.with({isValid: false})); + expect(invalid.length).toBe(1); + }); + it('should be able to get error harnesses from the form-field harness', async () => { const formFields = await loader.getAllHarnesses(formFieldHarness); expect(await formFields[1].getErrors()).toEqual([]); diff --git a/src/material/legacy-form-field/testing/form-field-harness.ts b/src/material/legacy-form-field/testing/form-field-harness.ts index a38344ddc094..535763a5cdc7 100644 --- a/src/material/legacy-form-field/testing/form-field-harness.ts +++ b/src/material/legacy-form-field/testing/form-field-harness.ts @@ -57,6 +57,11 @@ export class MatLegacyFormFieldHarness extends _MatFormFieldHarnessBase< 'hasErrors', options.hasErrors, async (harness, hasErrors) => (await harness.hasErrors()) === hasErrors, + ) + .addOption( + 'isValid', + options.isValid, + async (harness, isValid) => (await harness.isControlValid()) === isValid, ); } diff --git a/tools/public_api_guard/material/form-field-testing.md b/tools/public_api_guard/material/form-field-testing.md index 2e92006fbc4c..63bc03460e3c 100644 --- a/tools/public_api_guard/material/form-field-testing.md +++ b/tools/public_api_guard/material/form-field-testing.md @@ -28,6 +28,7 @@ export type FormFieldControlHarness = MatInputHarness | MatSelectHarness | MatDa export interface FormFieldHarnessFilters extends BaseHarnessFilters { floatingLabelText?: string | RegExp; hasErrors?: boolean; + isValid?: boolean; } // @public
Files
https://github.com/JohnSundell/Files
886e170593a8ed9c9fd9b17b87885eb906e960a2
Marco Gavelli
2022-11-27 21:13:11
Fix: Fixed issue where tags loaded slowly (#10577)
* Geee * Downgrade to version 4 * Remove upgrade from connection string * Recreate DB if version > 4
Fix: Fixed issue where tags loaded slowly (#10577) * Geee * Downgrade to version 4 * Remove upgrade from connection string * Recreate DB if version > 4
⚠️ HTTP 404: Not Found
deno
https://github.com/denoland/deno
8f854782b13658f169920a1a7a21b8e6b64a0c00
Bartek Iwańczuk
2023-08-10 09:31:35
fix(ext/timers): some timers are not resolved (#20055)
Fixes https://github.com/denoland/deno/issues/19866
fix(ext/timers): some timers are not resolved (#20055) Fixes https://github.com/denoland/deno/issues/19866
diff --git a/cli/tests/unit/timers_test.ts b/cli/tests/unit/timers_test.ts index c50cb779c646ff..5c076ad0903351 100644 --- a/cli/tests/unit/timers_test.ts +++ b/cli/tests/unit/timers_test.ts @@ -727,3 +727,32 @@ Deno.test({ assertEquals(output, ""); }, }); + +// Regression test for https://github.com/denoland/deno/issues/19866 +Deno.test({ + name: "regression for #19866", + fn: async () => { + const timeoutsFired = []; + + // deno-lint-ignore require-await + async function start(n: number) { + let i = 0; + const intervalId = setInterval(() => { + i++; + if (i > 2) { + clearInterval(intervalId!); + } + timeoutsFired.push(n); + }, 20); + } + + for (let n = 0; n < 100; n++) { + start(n); + } + + // 3s should be plenty of time for all the intervals to fire + // but it might still be flaky on CI. + await new Promise((resolve) => setTimeout(resolve, 3000)); + assertEquals(timeoutsFired.length, 300); + }, +}); diff --git a/ext/web/02_timers.js b/ext/web/02_timers.js index cfabdeb98b7ffd..ade1c7123cd0c4 100644 --- a/ext/web/02_timers.js +++ b/ext/web/02_timers.js @@ -243,6 +243,7 @@ function runAfterTimeout(task, millis, timerInfo) { resolved: false, prev: scheduledTimers.tail, next: null, + task, }; // Add timerObject to the end of the list. @@ -286,7 +287,7 @@ function runAfterTimeout(task, millis, timerInfo) { while (currentEntry !== null) { if (currentEntry.millis <= timerObject.millis) { currentEntry.resolved = true; - ArrayPrototypePush(timerTasks, task); + ArrayPrototypePush(timerTasks, currentEntry.task); removeFromScheduledTimers(currentEntry); if (currentEntry === timerObject) {
zinit
https://github.com/zdharma-continuum/zinit
64fa4aef3ae517afe5444b24df9603e9d1a77a55
vladislav doster
2022-11-21 04:38:42
fix: remove broken gh-r zunit test for "warp" (#430)
* remove broken gh-r zunit test for "warp" * set LC_CTYPE & LANG env variables for macOS
fix: remove broken gh-r zunit test for "warp" (#430) * remove broken gh-r zunit test for "warp" * set LC_CTYPE & LANG env variables for macOS Signed-off-by: Vladislav Doster <[email protected]>
diff --git a/tests/gh-r.zunit b/tests/gh-r.zunit index 0e6b3f6f3..a32168565 100755 --- a/tests/gh-r.zunit +++ b/tests/gh-r.zunit @@ -645,12 +645,6 @@ $kopia --version; assert $state equals 0 local volta="$ZBIN/volta"; assert "$volta" is_executable run $volta --version; assert $state equals 0 } -@test 'warp' { # Secure and simple terminal sharing - ! [[ $OSTYPE =~ 'darwin*' ]] && skip 'Warp test skipped on Linux' - run zinit lbin'!* -> warp' for spolu/warp; assert $state equals 0 - local warp="$ZBIN/warp"; assert "$warp" is_executable - run $warp --version; assert $state equals 0 -} @test 'whalebrew' { # Homebrew, but with Docker images run zinit lbin'!* -> whalebrew' for whalebrew/whalebrew; assert $state equals 0 local whalebrew="$ZBIN/whalebrew"; assert "$whalebrew" is_executable diff --git a/tests/plugins.zunit b/tests/plugins.zunit index 59f7c1c7e..e30bda8cd 100755 --- a/tests/plugins.zunit +++ b/tests/plugins.zunit @@ -4,6 +4,8 @@ @setup { zinit default-ice as'null' light-mode nocompile nocompletions ZBIN=$ZPFX/bin + export LC_CTYPE=C + export LANG=C } # @test 'nnn' {
rspack
https://github.com/web-infra-dev/rspack
061cd7879d030a40195dc36278ff45d4db1daaf2
CPunisher
2024-07-02 11:33:56
fix: align source name conflict handling (#6993)
* Remove useless mediate var * Fix source name conflict detection * Add testcase * Fix clippy * Fix typo * Sort module_to_source_name before handling conflict * Update and align testcase with mini-css-extract-plugin * fix: directly collect HashMap ---------
fix: align source name conflict handling (#6993) * Remove useless mediate var * Fix source name conflict detection * Add testcase * Fix clippy * Fix typo * Sort module_to_source_name before handling conflict * Update and align testcase with mini-css-extract-plugin * fix: directly collect HashMap --------- Co-authored-by: Cong-Cong Pan <[email protected]>
diff --git a/Cargo.lock b/Cargo.lock index ee22a1669730..c7a1195a9d60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3480,6 +3480,7 @@ dependencies = [ "dashmap", "derivative", "futures", + "itertools 0.12.1", "once_cell", "pathdiff", "rayon", diff --git a/crates/rspack_plugin_devtool/Cargo.toml b/crates/rspack_plugin_devtool/Cargo.toml index 588bd61728fe..35977ac2d201 100644 --- a/crates/rspack_plugin_devtool/Cargo.toml +++ b/crates/rspack_plugin_devtool/Cargo.toml @@ -12,6 +12,7 @@ async-trait = { workspace = true } dashmap = { workspace = true } derivative = { workspace = true } futures = { workspace = true } +itertools = { workspace = true } once_cell = { workspace = true } pathdiff = { workspace = true } rayon = { workspace = true } diff --git a/crates/rspack_plugin_devtool/src/lib.rs b/crates/rspack_plugin_devtool/src/lib.rs index 95ac7e6b1dce..d8ba84d3e4fd 100644 --- a/crates/rspack_plugin_devtool/src/lib.rs +++ b/crates/rspack_plugin_devtool/src/lib.rs @@ -34,6 +34,7 @@ pub struct ModuleFilenameTemplateFnCtx { pub namespace: String, } +#[derive(Debug, PartialEq, Eq, Hash)] enum ModuleOrSource { Source(String), Module(ModuleIdentifier), diff --git a/crates/rspack_plugin_devtool/src/source_map_dev_tool_plugin.rs b/crates/rspack_plugin_devtool/src/source_map_dev_tool_plugin.rs index 74add6384a03..10aff0eafa8f 100644 --- a/crates/rspack_plugin_devtool/src/source_map_dev_tool_plugin.rs +++ b/crates/rspack_plugin_devtool/src/source_map_dev_tool_plugin.rs @@ -2,6 +2,7 @@ use std::{borrow::Cow, path::Path}; use derivative::Derivative; use futures::future::{join_all, BoxFuture}; +use itertools::Itertools; use once_cell::sync::Lazy; use pathdiff::diff_paths; use rayon::prelude::*; @@ -169,8 +170,9 @@ impl SourceMapDevToolPlugin { let output_options = &compilation.options.output; let map_options = MapOptions::new(self.columns); - let mapped_sources = raw_assets - .par_iter() + let mut mapped_asstes: Vec<MappedAsset> = Vec::with_capacity(raw_assets.len()); + let mut mapped_sources = raw_assets + .into_par_iter() .map(|(file, asset)| { let is_match = match &self.test { Some(test) => test(file.to_owned()), @@ -191,167 +193,166 @@ impl SourceMapDevToolPlugin { .flatten() .collect::<Vec<_>>(); - let mut used_names_set = HashSet::<String>::default(); - let mut mapped_buffer: Vec<(String, Vec<u8>, Option<Vec<u8>>)> = - Vec::with_capacity(mapped_sources.len()); - - let mut default_filenames = match &self.module_filename_template { - ModuleFilenameTemplate::String(s) => mapped_sources - .iter() - .filter_map(|(_file, _asset, source_map)| source_map.as_ref()) - .flat_map(|source_map| source_map.sources()) - .collect::<Vec<_>>() - .par_iter() - .map(|source| { - let module_or_source = if let Some(stripped) = source.strip_prefix("webpack://") { - let source = make_paths_absolute(compilation.options.context.as_str(), stripped); - let identifier = ModuleIdentifier::from(source.clone()); - match compilation - .get_module_graph() - .module_by_identifier(&identifier) - { - Some(module) => ModuleOrSource::Module(module.identifier()), - None => ModuleOrSource::Source(source), - } - } else { - ModuleOrSource::Source(source.to_string()) - }; - Some(( - ModuleFilenameHelpers::create_filename_of_string_template( - &module_or_source, - compilation, - s, - output_options, - self.namespace.as_str(), - ), + let source_map_modules = mapped_sources + .par_iter() + .filter_map(|(_file, _asset, source_map)| source_map.as_ref()) + .flat_map(|source_map| source_map.sources()) + .map(|source| { + let module_or_source = if let Some(stripped) = source.strip_prefix("webpack://") { + let source = make_paths_absolute(compilation.options.context.as_str(), stripped); + let identifier = ModuleIdentifier::from(source.clone()); + match compilation + .get_module_graph() + .module_by_identifier(&identifier) + { + Some(module) => ModuleOrSource::Module(module.identifier()), + None => ModuleOrSource::Source(source), + } + } else { + ModuleOrSource::Source(source.to_string()) + }; + (source.to_string(), module_or_source) + }) + .collect::<HashMap<_, _>>(); + + let module_source_names = source_map_modules.values().collect::<Vec<_>>(); + let mut module_to_source_name = match &self.module_filename_template { + ModuleFilenameTemplate::String(s) => module_source_names + .into_par_iter() + .map(|module_or_source| { + let source_name = ModuleFilenameHelpers::create_filename_of_string_template( module_or_source, - )) + compilation, + s, + output_options, + self.namespace.as_str(), + ); + (module_or_source, source_name) }) - .collect::<Vec<_>>(), + .collect::<HashMap<_, _>>(), ModuleFilenameTemplate::Fn(f) => { - let features = mapped_sources - .iter() - .filter_map(|(_file, _asset, source_map)| source_map.as_ref()) - .flat_map(|source_map| source_map.sources()) - .map(|source| async { - let module_or_source = if let Some(stripped) = source.strip_prefix("webpack://") { - let source = make_paths_absolute(compilation.options.context.as_str(), stripped); - let identifier = ModuleIdentifier::from(source.clone()); - match compilation - .get_module_graph() - .module_by_identifier(&identifier) - { - Some(module) => ModuleOrSource::Module(module.identifier()), - None => ModuleOrSource::Source(source), - } - } else { - ModuleOrSource::Source(source.to_string()) - }; - - let filename = ModuleFilenameHelpers::create_filename_of_fn_template( - &module_or_source, + let features = module_source_names + .into_par_iter() + .map(|module_or_source| async move { + let source_name = ModuleFilenameHelpers::create_filename_of_fn_template( + module_or_source, compilation, f, output_options, self.namespace.as_str(), ) - .await; - - match filename { - Ok(filename) => Ok(Some((filename, module_or_source))), - Err(err) => Err(err), - } + .await?; + Ok((module_or_source, source_name)) }) .collect::<Vec<_>>(); join_all(features) .await .into_iter() - .collect::<Result<Vec<_>>>()? + .collect::<Result<HashMap<_, _>>>()? } }; - let mut default_filenames_index = 0; - - for (filename, asset, source_map) in mapped_sources { - let source_map_buffer = match source_map { - Some(mut source_map) => { - source_map.set_file(Some(filename.clone())); - - let sources = source_map.sources_mut(); - for source in sources { - let (source_name, module_or_source) = default_filenames[default_filenames_index] - .take() - .expect("expected a filename at the given index but found None"); - default_filenames_index += 1; - - let mut has_name = used_names_set.contains(&source_name); - if !has_name { - used_names_set.insert(source_name.clone()); - *source = Cow::from(source_name); - continue; - } - // Try the fallback name first - let mut source_name = match &self.fallback_module_filename_template { - ModuleFilenameTemplate::String(s) => { - ModuleFilenameHelpers::create_filename_of_string_template( - &module_or_source, - compilation, - s, - output_options, - self.namespace.as_str(), - ) - } - ModuleFilenameTemplate::Fn(f) => { - ModuleFilenameHelpers::create_filename_of_fn_template( - &module_or_source, - compilation, - f, - output_options, - self.namespace.as_str(), - ) - .await? - } - }; - - has_name = used_names_set.contains(&source_name); - if !has_name { - used_names_set.insert(source_name.clone()); - *source = Cow::from(source_name); - continue; - } + let mut used_names_set = HashSet::<String>::default(); + for (module_or_source, source_name) in + module_to_source_name + .iter_mut() + .sorted_by(|(key_a, _), (key_b, _)| { + let ident_a = match key_a { + ModuleOrSource::Module(identifier) => identifier, + ModuleOrSource::Source(source) => source.as_str(), + }; + let ident_b = match key_b { + ModuleOrSource::Module(identifier) => identifier, + ModuleOrSource::Source(source) => source.as_str(), + }; + ident_a.len().cmp(&ident_b.len()) + }) + { + let mut has_name = used_names_set.contains(source_name); + if !has_name { + used_names_set.insert(source_name.clone()); + continue; + } - // Otherwise, append stars until we have a valid name - while has_name { - source_name.push('*'); - has_name = used_names_set.contains(&source_name); - } - used_names_set.insert(source_name.clone()); - *source = Cow::from(source_name); - } - if self.no_sources { - for content in source_map.sources_content_mut() { - *content = Cow::from(String::default()); - } - } - if let Some(source_root) = &self.source_root { - source_map.set_source_root(Some(source_root.clone())); - } - let mut source_map_buffer = Vec::new(); - source_map - .to_writer(&mut source_map_buffer) - .unwrap_or_else(|e| panic!("{}", e.to_string())); - Some(source_map_buffer) + // Try the fallback name first + let mut new_source_name = match &self.fallback_module_filename_template { + ModuleFilenameTemplate::String(s) => { + ModuleFilenameHelpers::create_filename_of_string_template( + module_or_source, + compilation, + s, + output_options, + self.namespace.as_str(), + ) + } + ModuleFilenameTemplate::Fn(f) => { + ModuleFilenameHelpers::create_filename_of_fn_template( + module_or_source, + compilation, + f, + output_options, + self.namespace.as_str(), + ) + .await? } - None => None, }; - let mut code_buffer = Vec::new(); - asset.to_writer(&mut code_buffer).into_diagnostic()?; - mapped_buffer.push((filename.to_owned(), code_buffer, source_map_buffer)); + has_name = used_names_set.contains(&new_source_name); + if !has_name { + used_names_set.insert(new_source_name.clone()); + *source_name = new_source_name; + continue; + } + + // Otherwise, append stars until we have a valid name + while has_name { + new_source_name.push('*'); + has_name = used_names_set.contains(&new_source_name); + } + used_names_set.insert(new_source_name.clone()); + *source_name = new_source_name; } - let mut mapped_asstes: Vec<MappedAsset> = Vec::with_capacity(raw_assets.len()); - for (filename, code_buffer, source_map_buffer) in mapped_buffer { + for (filename, _asset, source_map) in mapped_sources.iter_mut() { + if let Some(source_map) = source_map { + source_map.set_file(Some(filename.clone())); + + let sources = source_map.sources_mut(); + for source in sources { + let module_or_source = source_map_modules + .get(source.as_ref()) + .expect("expected a module or source"); + let source_name = module_to_source_name + .get(module_or_source) + .expect("expected a filename at the given index but found None") + .clone(); + *source = Cow::from(source_name); + } + if self.no_sources { + for content in source_map.sources_content_mut() { + *content = Cow::from(String::default()); + } + } + if let Some(source_root) = &self.source_root { + source_map.set_source_root(Some(source_root.clone())); + } + } + } + + for (filename, asset, source_map) in mapped_sources { + let code_buffer = { + let mut code_buffer = Vec::new(); + asset.to_writer(&mut code_buffer).into_diagnostic()?; + code_buffer + }; + let source_map_buffer = source_map.map(|source_map| { + let mut source_map_buffer = Vec::new(); + source_map + .to_writer(&mut source_map_buffer) + .unwrap_or_else(|e| panic!("{}", e.to_string())); + source_map_buffer + }); + let mut asset = compilation .assets() .get(&filename) diff --git a/packages/rspack-test-tools/tests/configCases/source-map/conflict/a.js b/packages/rspack-test-tools/tests/configCases/source-map/conflict/a.js new file mode 100644 index 000000000000..59c9329c5d4f --- /dev/null +++ b/packages/rspack-test-tools/tests/configCases/source-map/conflict/a.js @@ -0,0 +1,3 @@ +import test from "./common"; +console.log("a"); +console.log(test()); diff --git a/packages/rspack-test-tools/tests/configCases/source-map/conflict/b.js b/packages/rspack-test-tools/tests/configCases/source-map/conflict/b.js new file mode 100644 index 000000000000..0425a41680a1 --- /dev/null +++ b/packages/rspack-test-tools/tests/configCases/source-map/conflict/b.js @@ -0,0 +1,3 @@ +import test from "./common"; +console.log("b"); +console.log(test()); diff --git a/packages/rspack-test-tools/tests/configCases/source-map/conflict/common.js b/packages/rspack-test-tools/tests/configCases/source-map/conflict/common.js new file mode 100644 index 000000000000..fc20ead70ac2 --- /dev/null +++ b/packages/rspack-test-tools/tests/configCases/source-map/conflict/common.js @@ -0,0 +1,3 @@ +export default function test() { + console.log("test"); +} diff --git a/packages/rspack-test-tools/tests/configCases/source-map/conflict/index.js b/packages/rspack-test-tools/tests/configCases/source-map/conflict/index.js new file mode 100644 index 000000000000..c2e0e754c622 --- /dev/null +++ b/packages/rspack-test-tools/tests/configCases/source-map/conflict/index.js @@ -0,0 +1,18 @@ +import("./a"); +import("./b"); + +it("conflict", () => { + const fs = require("fs"); + const source_a = fs.readFileSync(__dirname + "/a_js.bundle0.js.map", "utf-8"); + const source_b = fs.readFileSync(__dirname + "/b_js.bundle0.js.map", "utf-8"); + const map_a = JSON.parse(source_a); + const map_b = JSON.parse(source_b); + expect(map_a.sources).toStrictEqual([ + "webpack:///./a.js", + "webpack:///./common.js", + ]); + expect(map_b.sources).toStrictEqual([ + "webpack:///./b.js", + "webpack:///./common.js", + ]); +}); diff --git a/packages/rspack-test-tools/tests/configCases/source-map/conflict/rspack.config.js b/packages/rspack-test-tools/tests/configCases/source-map/conflict/rspack.config.js new file mode 100644 index 000000000000..5505ff5d26ec --- /dev/null +++ b/packages/rspack-test-tools/tests/configCases/source-map/conflict/rspack.config.js @@ -0,0 +1,9 @@ +/** + * @type {import('webpack').Configuration | import('@rspack/cli').Configuration} + */ +module.exports = { + mode: "development", + devtool: "source-map", + externals: ["source-map"], + entry: "./index.js", +}; \ No newline at end of file diff --git a/tests/plugin-test/css-extract/cases/devtool-source-map-from-loaders/expected/main.css.map b/tests/plugin-test/css-extract/cases/devtool-source-map-from-loaders/expected/main.css.map index 3eb623ccfa8f..a7a0a9293b6a 100644 --- a/tests/plugin-test/css-extract/cases/devtool-source-map-from-loaders/expected/main.css.map +++ b/tests/plugin-test/css-extract/cases/devtool-source-map-from-loaders/expected/main.css.map @@ -1 +1 @@ -{"version":3,"file":"main.css","sources":["webpack:///./style.css?86c4"],"sourcesContent":["body {\n background: red;\n}\n"],"names":[],"mappings":"AAAA;EACE,eAAe;AACjB"} \ No newline at end of file +{"version":3,"file":"main.css","sources":["webpack:///./style.css"],"sourcesContent":["body {\n background: red;\n}\n"],"names":[],"mappings":"AAAA;EACE,eAAe;AACjB"} \ No newline at end of file diff --git a/tests/plugin-test/css-extract/cases/devtool-source-map/expected/main.css.map b/tests/plugin-test/css-extract/cases/devtool-source-map/expected/main.css.map index 3eb623ccfa8f..a7a0a9293b6a 100644 --- a/tests/plugin-test/css-extract/cases/devtool-source-map/expected/main.css.map +++ b/tests/plugin-test/css-extract/cases/devtool-source-map/expected/main.css.map @@ -1 +1 @@ -{"version":3,"file":"main.css","sources":["webpack:///./style.css?86c4"],"sourcesContent":["body {\n background: red;\n}\n"],"names":[],"mappings":"AAAA;EACE,eAAe;AACjB"} \ No newline at end of file +{"version":3,"file":"main.css","sources":["webpack:///./style.css"],"sourcesContent":["body {\n background: red;\n}\n"],"names":[],"mappings":"AAAA;EACE,eAAe;AACjB"} \ No newline at end of file diff --git a/tests/plugin-test/css-extract/cases/pathinfo-devtool-source-map/expected/main.css.map b/tests/plugin-test/css-extract/cases/pathinfo-devtool-source-map/expected/main.css.map index d3c9dd3454e1..dd33a0cd7b12 100644 --- a/tests/plugin-test/css-extract/cases/pathinfo-devtool-source-map/expected/main.css.map +++ b/tests/plugin-test/css-extract/cases/pathinfo-devtool-source-map/expected/main.css.map @@ -1 +1 @@ -{"version":3,"file":"main.css","sources":["webpack:///./style.css?86c4","webpack:///./other.css?264c","webpack:///./extra.css?9c40"],"sourcesContent":["body {\n background: red;\n}\n","body {\n background: blue;\n}\n","body {\n background: yellow;\n}\n"],"names":[],"mappings":";;;AAAA;EACE,eAAe;AACjB;;;;;ACFA;EACE,gBAAgB;AAClB;;;;;ACFA;EACE,kBAAkB;AACpB"} \ No newline at end of file +{"version":3,"file":"main.css","sources":["webpack:///./style.css","webpack:///./other.css","webpack:///./extra.css"],"sourcesContent":["body {\n background: red;\n}\n","body {\n background: blue;\n}\n","body {\n background: yellow;\n}\n"],"names":[],"mappings":";;;AAAA;EACE,eAAe;AACjB;;;;;ACFA;EACE,gBAAgB;AAClB;;;;;ACFA;EACE,kBAAkB;AACpB"} \ No newline at end of file
skaffold
https://github.com/GoogleContainerTools/skaffold
71dd5be381080641112b69285a9d959b0ad99627
ericzzzzzzz
2023-06-23 03:29:15
fix: Clean up dev images except the last (#8897)
* fix: clean-up-images-in-devloop * fix: wrap cleaning up logic * chore: log * chore: move query artifact back to the main goroutine * chore: add test * chore: increase timeout * chore: delegate clean up job to localpruner to avoid warning logs when cache disabled
fix: Clean up dev images except the last (#8897) * fix: clean-up-images-in-devloop * fix: wrap cleaning up logic * chore: log * chore: move query artifact back to the main goroutine * chore: add test * chore: increase timeout * chore: delegate clean up job to localpruner to avoid warning logs when cache disabled
diff --git a/integration/dev_test.go b/integration/dev_test.go index a9ed6d72161..df7b1e177ea 100644 --- a/integration/dev_test.go +++ b/integration/dev_test.go @@ -368,6 +368,50 @@ func TestDevPortForward(t *testing.T) { } } +func TestDevDeletePreviousBuiltImages(t *testing.T) { + tests := []struct { + name string + dir string + }{ + { + name: "microservices", + dir: "examples/microservices"}, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + MarkIntegrationTest(t, CanRunWithoutGcp) + // Run skaffold build first to fail quickly on a build failure + skaffold.Build().InDir(test.dir).RunOrFail(t) + + ns, k8sClient := SetupNamespace(t) + + rpcAddr := randomPort() + skaffold.Dev("--status-check=false", "--port-forward", "--rpc-port", rpcAddr).InDir(test.dir).InNs(ns.Name).RunBackground(t) + + _, entries := apiEvents(t, rpcAddr) + + waitForPortForwardEvent(t, entries, "leeroy-app", "service", ns.Name, "leeroooooy app!!\n") + deployment := k8sClient.GetDeployment("leeroy-app") + image := deployment.Spec.Template.Spec.Containers[0].Image + + original, perms, fErr := replaceInFile("leeroooooy app!!", "test string", fmt.Sprintf("%s/leeroy-app/app.go", test.dir)) + failNowIfError(t, fErr) + defer func() { + if original != nil { + os.WriteFile(fmt.Sprintf("%s/leeroy-app/app.go", test.dir), original, perms) + } + }() + + waitForPortForwardEvent(t, entries, "leeroy-app", "service", ns.Name, "test string\n") + client := SetupDockerClient(t) + ctx := context.TODO() + wait.Poll(3*time.Second, time.Minute*2, func() (done bool, err error) { + return !client.ImageExists(ctx, image), nil + }) + }) + } +} + func TestDevPortForwardDefaultNamespace(t *testing.T) { MarkIntegrationTest(t, CanRunWithoutGcp) diff --git a/pkg/skaffold/build/local/local.go b/pkg/skaffold/build/local/local.go index f1cc930ffc3..368894b367b 100644 --- a/pkg/skaffold/build/local/local.go +++ b/pkg/skaffold/build/local/local.go @@ -57,6 +57,21 @@ func (b *Builder) PostBuild(ctx context.Context, _ io.Writer) error { return nil } +// Prune uses the docker API client to remove all images built with Skaffold +func (b *Builder) Prune(ctx context.Context, _ io.Writer) error { + var toPrune []string + seen := make(map[string]bool) + + for _, img := range b.builtImages { + if !seen[img] && !b.localPruner.isPruned(img) { + toPrune = append(toPrune, img) + seen[img] = true + } + } + _, err := b.localDocker.Prune(ctx, toPrune, b.pruneChildren) + return err +} + func (b *Builder) Concurrency() *int { return b.local.Concurrency } func (b *Builder) PushImages() bool { @@ -89,6 +104,20 @@ func (b *Builder) buildArtifact(ctx context.Context, out io.Writer, a *latest.Ar } imageID := digestOrImageID + if b.mode == config.RunModes.Dev { + artifacts, err := b.artifactStore.GetArtifacts([]*latest.Artifact{a}) + if err != nil { + log.Entry(ctx).Debugf("failed to get artifacts from store, err: %v", err) + } + // delete previous built images asynchronously + go func() { + if len(artifacts) > 0 { + bgCtx := context.Background() + id, _ := b.getImageIDForTag(bgCtx, artifacts[0].Tag) + b.localPruner.runPrune(bgCtx, []string{id}) + } + }() + } b.builtImages = append(b.builtImages, imageID) return build.TagWithImageID(ctx, tag, imageID, b.localDocker) } diff --git a/pkg/skaffold/build/local/types.go b/pkg/skaffold/build/local/types.go index 94893324846..fef419ae43a 100644 --- a/pkg/skaffold/build/local/types.go +++ b/pkg/skaffold/build/local/types.go @@ -123,21 +123,6 @@ func NewBuilder(ctx context.Context, bCtx BuilderContext, buildCfg *latest.Local }, nil } -// Prune uses the docker API client to remove all images built with Skaffold -func (b *Builder) Prune(ctx context.Context, _ io.Writer) error { - var toPrune []string - seen := make(map[string]bool) - - for _, img := range b.builtImages { - if !seen[img] && !b.localPruner.isPruned(img) { - toPrune = append(toPrune, img) - seen[img] = true - } - } - _, err := b.localDocker.Prune(ctx, toPrune, b.pruneChildren) - return err -} - // artifactBuilder represents a per artifact builder interface type artifactBuilder interface { Build(ctx context.Context, out io.Writer, a *latest.Artifact, tag string, platforms platform.Matcher) (string, error)
influxdb
https://github.com/influxdata/influxdb
efbc4ae7c18cc10f2bae7e1cef6a05b0b6720bf9
Jonathan A. Sternberg
2020-06-02 04:27:50
feat(storage/flux): implement create empty for the window table reader (#18288)
This implements create empty for the window table reader and allows this table read function to be used when it is specified. It will pass down the create empty flag from the original window call into the storage read function. This also fixes the window table reader so it properly creates individual tables for each window. Previously, it was constructing one table for an entire series instead of one table per window. Tests have been added to verify three edge case behaviors. The first is the normal read operation where all values are present. The second is when create empty is specified so null values may be created. The third is with truncated boundaries to ensure that storage is read from and the start and stop timestamps get correctly truncated.
feat(storage/flux): implement create empty for the window table reader (#18288) This implements create empty for the window table reader and allows this table read function to be used when it is specified. It will pass down the create empty flag from the original window call into the storage read function. This also fixes the window table reader so it properly creates individual tables for each window. Previously, it was constructing one table for an entire series instead of one table per window. Tests have been added to verify three edge case behaviors. The first is the normal read operation where all values are present. The second is when create empty is specified so null values may be created. The third is with truncated boundaries to ensure that storage is read from and the start and stop timestamps get correctly truncated.
diff --git a/query/stdlib/influxdata/influxdb/operators.go b/query/stdlib/influxdata/influxdb/operators.go index 102155a511b..e18577287ed 100644 --- a/query/stdlib/influxdata/influxdb/operators.go +++ b/query/stdlib/influxdata/influxdb/operators.go @@ -111,6 +111,7 @@ type ReadWindowAggregatePhysSpec struct { WindowEvery int64 Aggregates []plan.ProcedureKind + CreateEmpty bool } func (s *ReadWindowAggregatePhysSpec) Kind() plan.ProcedureKind { @@ -123,6 +124,7 @@ func (s *ReadWindowAggregatePhysSpec) Copy() plan.ProcedureSpec { ns.ReadRangePhysSpec = *s.ReadRangePhysSpec.Copy().(*ReadRangePhysSpec) ns.WindowEvery = s.WindowEvery ns.Aggregates = s.Aggregates + ns.CreateEmpty = s.CreateEmpty return ns } diff --git a/query/stdlib/influxdata/influxdb/rules.go b/query/stdlib/influxdata/influxdb/rules.go index f8d8b2fae15..92d28024448 100644 --- a/query/stdlib/influxdata/influxdb/rules.go +++ b/query/stdlib/influxdata/influxdb/rules.go @@ -766,8 +766,7 @@ func (PushDownWindowAggregateRule) Rewrite(ctx context.Context, pn plan.Node) (p !window.Offset.IsZero() || windowSpec.TimeColumn != "_time" || windowSpec.StartColumn != "_start" || - windowSpec.StopColumn != "_stop" || - windowSpec.CreateEmpty { + windowSpec.StopColumn != "_stop" { return pn, false, nil } @@ -776,6 +775,7 @@ func (PushDownWindowAggregateRule) Rewrite(ctx context.Context, pn plan.Node) (p ReadRangePhysSpec: *fromSpec.Copy().(*ReadRangePhysSpec), Aggregates: []plan.ProcedureKind{fnNode.Kind()}, WindowEvery: window.Every.Nanoseconds(), + CreateEmpty: windowSpec.CreateEmpty, }), true, nil } diff --git a/query/stdlib/influxdata/influxdb/rules_test.go b/query/stdlib/influxdata/influxdb/rules_test.go index a42d28cff0f..78442acb904 100644 --- a/query/stdlib/influxdata/influxdb/rules_test.go +++ b/query/stdlib/influxdata/influxdb/rules_test.go @@ -1224,13 +1224,14 @@ func TestPushDownWindowAggregateRule(t *testing.T) { } // construct a simple result - simpleResult := func(proc plan.ProcedureKind) *plantest.PlanSpec { + simpleResult := func(proc plan.ProcedureKind, createEmpty bool) *plantest.PlanSpec { return &plantest.PlanSpec{ Nodes: []plan.Node{ plan.CreatePhysicalNode("ReadWindowAggregate", &influxdb.ReadWindowAggregatePhysSpec{ ReadRangePhysSpec: readRange, Aggregates: []plan.ProcedureKind{proc}, WindowEvery: 60000000000, + CreateEmpty: createEmpty, }), }, } @@ -1268,7 +1269,7 @@ func TestPushDownWindowAggregateRule(t *testing.T) { Name: "SimplePassMin", Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, Before: simplePlanWithWindowAgg(window1m, "min", minProcedureSpec()), - After: simpleResult("min"), + After: simpleResult("min", false), }) // ReadRange -> window -> max => ReadWindowAggregate @@ -1277,7 +1278,7 @@ func TestPushDownWindowAggregateRule(t *testing.T) { Name: "SimplePassMax", Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, Before: simplePlanWithWindowAgg(window1m, "max", maxProcedureSpec()), - After: simpleResult("max"), + After: simpleResult("max", false), }) // ReadRange -> window -> mean => ReadWindowAggregate @@ -1286,7 +1287,7 @@ func TestPushDownWindowAggregateRule(t *testing.T) { Name: "SimplePassMean", Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, Before: simplePlanWithWindowAgg(window1m, "mean", meanProcedureSpec()), - After: simpleResult("mean"), + After: simpleResult("mean", false), }) // ReadRange -> window -> count => ReadWindowAggregate @@ -1295,7 +1296,7 @@ func TestPushDownWindowAggregateRule(t *testing.T) { Name: "SimplePassCount", Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, Before: simplePlanWithWindowAgg(window1m, "count", countProcedureSpec()), - After: simpleResult("count"), + After: simpleResult("count", false), }) // ReadRange -> window -> sum => ReadWindowAggregate @@ -1304,7 +1305,7 @@ func TestPushDownWindowAggregateRule(t *testing.T) { Name: "SimplePassSum", Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, Before: simplePlanWithWindowAgg(window1m, "sum", sumProcedureSpec()), - After: simpleResult("sum"), + After: simpleResult("sum", false), }) // Rewrite with successors @@ -1384,10 +1385,16 @@ func TestPushDownWindowAggregateRule(t *testing.T) { badWindow5.StopColumn = "_stappp" simpleMinUnchanged("BadStop", badWindow5) - // Condition not met: createEmpty is not false - badWindow6 := window1m - badWindow6.CreateEmpty = true - simpleMinUnchanged("BadCreateEmpty", badWindow6) + // Condition met: createEmpty is true. + window6 := window1m + window6.CreateEmpty = true + tests = append(tests, plantest.RuleTestCase{ + Context: haveCaps, + Name: "CreateEmptyPassMin", + Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, + Before: simplePlanWithWindowAgg(window6, "min", minProcedureSpec()), + After: simpleResult("min", true), + }) // Condition not met: duration too long. simpleMinUnchanged("WindowTooLarge", window1y) @@ -1554,7 +1561,7 @@ func TestPushDownWindowAggregateRule(t *testing.T) { Name: "FailNoCaps", Rules: []plan.Rule{influxdb.PushDownWindowAggregateRule{}}, Before: simplePlanWithWindowAgg(window1m, "count", countProcedureSpec()), - After: simpleResult("count"), + After: simpleResult("count", false), NoChange: true, }) diff --git a/query/stdlib/influxdata/influxdb/source.go b/query/stdlib/influxdata/influxdb/source.go index 7bbbbe9ad1b..c992b0da49b 100644 --- a/query/stdlib/influxdata/influxdb/source.go +++ b/query/stdlib/influxdata/influxdb/source.go @@ -351,6 +351,7 @@ func createReadWindowAggregateSource(s plan.ProcedureSpec, id execute.DatasetID, }, WindowEvery: spec.WindowEvery, Aggregates: spec.Aggregates, + CreateEmpty: spec.CreateEmpty, }, a, ), nil diff --git a/query/storage.go b/query/storage.go index 17e235237ff..5dda7643592 100644 --- a/query/storage.go +++ b/query/storage.go @@ -73,6 +73,7 @@ type ReadWindowAggregateSpec struct { ReadFilterSpec WindowEvery int64 Aggregates []plan.ProcedureKind + CreateEmpty bool } // TableIterator is a table iterator that also keeps track of cursor statistics from the storage engine. diff --git a/storage/flux/reader.go b/storage/flux/reader.go index 921d7c61e4f..b1b07ccb943 100644 --- a/storage/flux/reader.go +++ b/storage/flux/reader.go @@ -592,10 +592,13 @@ func (wai *windowAggregateIterator) Do(f func(flux.Table) error) error { if rs == nil { return nil } - return wai.handleRead(f, rs, req.WindowEvery) + return wai.handleRead(f, rs) } -func (wai *windowAggregateIterator) handleRead(f func(flux.Table) error, rs storage.ResultSet, windowEvery int64) error { +func (wai *windowAggregateIterator) handleRead(f func(flux.Table) error, rs storage.ResultSet) error { + windowEvery := wai.spec.WindowEvery + createEmpty := wai.spec.CreateEmpty + // these resources must be closed if not nil on return var ( cur cursors.Cursor @@ -627,19 +630,19 @@ READ: switch typedCur := cur.(type) { case cursors.IntegerArrayCursor: cols, defs := determineTableColsForWindowAggregate(rs.Tags(), flux.TInt) - table = newIntegerWindowTable(done, typedCur, bnds, windowEvery, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) + table = newIntegerWindowTable(done, typedCur, bnds, windowEvery, createEmpty, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) case cursors.FloatArrayCursor: cols, defs := determineTableColsForWindowAggregate(rs.Tags(), flux.TFloat) - table = newFloatWindowTable(done, typedCur, bnds, windowEvery, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) + table = newFloatWindowTable(done, typedCur, bnds, windowEvery, createEmpty, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) case cursors.UnsignedArrayCursor: cols, defs := determineTableColsForWindowAggregate(rs.Tags(), flux.TUInt) - table = newUnsignedWindowTable(done, typedCur, bnds, windowEvery, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) + table = newUnsignedWindowTable(done, typedCur, bnds, windowEvery, createEmpty, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) case cursors.BooleanArrayCursor: cols, defs := determineTableColsForWindowAggregate(rs.Tags(), flux.TBool) - table = newBooleanWindowTable(done, typedCur, bnds, windowEvery, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) + table = newBooleanWindowTable(done, typedCur, bnds, windowEvery, createEmpty, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) case cursors.StringArrayCursor: cols, defs := determineTableColsForWindowAggregate(rs.Tags(), flux.TString) - table = newStringWindowTable(done, typedCur, bnds, windowEvery, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) + table = newStringWindowTable(done, typedCur, bnds, windowEvery, createEmpty, key, cols, rs.Tags(), defs, wai.cache, wai.alloc) default: panic(fmt.Sprintf("unreachable: %T", typedCur)) } @@ -647,7 +650,7 @@ READ: cur = nil if !table.Empty() { - if err := f(table); err != nil { + if err := splitWindows(wai.ctx, table, f); err != nil { table.Close() table = nil return err diff --git a/storage/flux/table.gen.go b/storage/flux/table.gen.go index 490e9b80968..81c5c9101b8 100644 --- a/storage/flux/table.gen.go +++ b/storage/flux/table.gen.go @@ -9,6 +9,7 @@ package storageflux import ( "sync" + "github.com/apache/arrow/go/arrow/array" "github.com/influxdata/flux" "github.com/influxdata/flux/arrow" "github.com/influxdata/flux/execute" @@ -102,7 +103,9 @@ type floatWindowTable struct { floatTable windowEvery int64 arr *cursors.FloatArray + nextTS int64 idxInArr int + createEmpty bool } func newFloatWindowTable( @@ -110,6 +113,7 @@ func newFloatWindowTable( cur cursors.FloatArrayCursor, bounds execute.Bounds, every int64, + createEmpty bool, key flux.GroupKey, cols []flux.ColMeta, tags models.Tags, @@ -123,6 +127,11 @@ func newFloatWindowTable( cur: cur, }, windowEvery: every, + createEmpty: createEmpty, + } + if t.createEmpty { + start := int64(bounds.Start) + t.nextTS = start + (every - start%every) } t.readTags(tags) t.advance() @@ -130,40 +139,126 @@ func newFloatWindowTable( return t } -func (t *floatWindowTable) advance() bool { +func (t *floatWindowTable) Do(f func(flux.ColReader) error) error { + return t.do(f, t.advance) +} + +// createNextWindow will read the timestamps from the array +// cursor and construct the values for the next window. +func (t *floatWindowTable) createNextWindow() (start, stop *array.Int64, ok bool) { + var stopT int64 + if t.createEmpty { + stopT = t.nextTS + t.nextTS += t.windowEvery + } else { + if !t.nextBuffer() { + return nil, nil, false + } + stopT = t.arr.Timestamps[t.idxInArr] + } + + // Regain the window start time from the window end time. + startT := stopT - t.windowEvery + if startT < int64(t.bounds.Start) { + startT = int64(t.bounds.Start) + } + if stopT > int64(t.bounds.Stop) { + stopT = int64(t.bounds.Stop) + } + + // If the start time is after our stop boundary, + // we exit here when create empty is true. + if t.createEmpty && startT >= int64(t.bounds.Stop) { + return nil, nil, false + } + start = arrow.NewInt([]int64{startT}, t.alloc) + stop = arrow.NewInt([]int64{stopT}, t.alloc) + return start, stop, true +} + +// nextAt will retrieve the next value that can be used with +// the given stop timestamp. If no values can be used with the timestamp, +// it will return the default value and false. +func (t *floatWindowTable) nextAt(ts int64) (v float64, ok bool) { + if !t.nextBuffer() { + return + } else if !t.isInWindow(ts, t.arr.Timestamps[t.idxInArr]) { + return + } + v, ok = t.arr.Values[t.idxInArr], true + t.idxInArr++ + return v, ok +} + +// isInWindow will check if the given time at stop can be used within +// the window stop time for ts. The ts may be a truncated stop time +// because of a restricted boundary while stop will be the true +// stop time returned by storage. +func (t *floatWindowTable) isInWindow(ts int64, stop int64) bool { + // This method checks if the stop time is a valid stop time for + // that interval. This calculation is different from the calculation + // of the window itself. For example, for a 10 second window that + // starts at 20 seconds, we would include points between [20, 30). + // The stop time for this interval would be 30, but because the stop + // time can be truncated, valid stop times range from anywhere between + // (20, 30]. The storage engine will always produce 30 as the end time + // but we may have truncated the stop time because of the boundary + // and this is why we are checking for this range instead of checking + // if the two values are equal. + start := stop - t.windowEvery + return start < ts && ts <= stop +} + +// nextBuffer will ensure the array cursor is filled +// and will return true if there is at least one value +// that can be read from it. +func (t *floatWindowTable) nextBuffer() bool { + // Discard the current array cursor if we have + // exceeded it. + if t.arr != nil && t.idxInArr >= t.arr.Len() { + t.arr = nil + } + + // Retrieve the next array cursor if needed. if t.arr == nil { - t.arr = t.cur.Next() - if t.arr.Len() == 0 { - t.arr = nil + arr := t.cur.Next() + if arr.Len() == 0 { return false } - t.idxInArr = 0 + t.arr, t.idxInArr = arr, 0 + } + return true +} + +// appendValues will scan the timestamps and append values +// that match those timestamps from the buffer. +func (t *floatWindowTable) appendValues(intervals []int64, appendValue func(v float64), appendNull func()) { + for i := 0; i < len(intervals); i++ { + if v, ok := t.nextAt(intervals[i]); ok { + appendValue(v) + continue + } + appendNull() } +} + +func (t *floatWindowTable) advance() bool { + // Create the timestamps for the next window. + start, stop, ok := t.createNextWindow() + if !ok { + return false + } + values := t.mergeValues(stop.Int64Values()) // Retrieve the buffer for the data to avoid allocating // additional slices. If the buffer is still being used // because the references were retained, then we will // allocate a new buffer. - columnReader := t.allocateBuffer(1) - // regain the window start time from the window end time - rangeStart := int64(t.bounds.Start) - rangeEnd := int64(t.bounds.Stop) - stop := t.arr.Timestamps[t.idxInArr] - start := stop - t.windowEvery - if start < rangeStart { - start = rangeStart - } - if stop > rangeEnd { - stop = rangeEnd - } - columnReader.cols[startColIdx] = arrow.NewInt([]int64{start}, t.alloc) - columnReader.cols[stopColIdx] = arrow.NewInt([]int64{stop}, t.alloc) - columnReader.cols[windowedValueColIdx] = t.toArrowBuffer(t.arr.Values[t.idxInArr : t.idxInArr+1]) - t.appendTags(columnReader) - t.idxInArr++ - if t.idxInArr == t.arr.Len() { - t.arr = nil - } + cr := t.allocateBuffer(stop.Len()) + cr.cols[startColIdx] = start + cr.cols[stopColIdx] = stop + cr.cols[windowedValueColIdx] = values + t.appendTags(cr) return true } @@ -363,7 +458,9 @@ type integerWindowTable struct { integerTable windowEvery int64 arr *cursors.IntegerArray + nextTS int64 idxInArr int + createEmpty bool } func newIntegerWindowTable( @@ -371,6 +468,7 @@ func newIntegerWindowTable( cur cursors.IntegerArrayCursor, bounds execute.Bounds, every int64, + createEmpty bool, key flux.GroupKey, cols []flux.ColMeta, tags models.Tags, @@ -384,6 +482,11 @@ func newIntegerWindowTable( cur: cur, }, windowEvery: every, + createEmpty: createEmpty, + } + if t.createEmpty { + start := int64(bounds.Start) + t.nextTS = start + (every - start%every) } t.readTags(tags) t.advance() @@ -391,40 +494,126 @@ func newIntegerWindowTable( return t } -func (t *integerWindowTable) advance() bool { +func (t *integerWindowTable) Do(f func(flux.ColReader) error) error { + return t.do(f, t.advance) +} + +// createNextWindow will read the timestamps from the array +// cursor and construct the values for the next window. +func (t *integerWindowTable) createNextWindow() (start, stop *array.Int64, ok bool) { + var stopT int64 + if t.createEmpty { + stopT = t.nextTS + t.nextTS += t.windowEvery + } else { + if !t.nextBuffer() { + return nil, nil, false + } + stopT = t.arr.Timestamps[t.idxInArr] + } + + // Regain the window start time from the window end time. + startT := stopT - t.windowEvery + if startT < int64(t.bounds.Start) { + startT = int64(t.bounds.Start) + } + if stopT > int64(t.bounds.Stop) { + stopT = int64(t.bounds.Stop) + } + + // If the start time is after our stop boundary, + // we exit here when create empty is true. + if t.createEmpty && startT >= int64(t.bounds.Stop) { + return nil, nil, false + } + start = arrow.NewInt([]int64{startT}, t.alloc) + stop = arrow.NewInt([]int64{stopT}, t.alloc) + return start, stop, true +} + +// nextAt will retrieve the next value that can be used with +// the given stop timestamp. If no values can be used with the timestamp, +// it will return the default value and false. +func (t *integerWindowTable) nextAt(ts int64) (v int64, ok bool) { + if !t.nextBuffer() { + return + } else if !t.isInWindow(ts, t.arr.Timestamps[t.idxInArr]) { + return + } + v, ok = t.arr.Values[t.idxInArr], true + t.idxInArr++ + return v, ok +} + +// isInWindow will check if the given time at stop can be used within +// the window stop time for ts. The ts may be a truncated stop time +// because of a restricted boundary while stop will be the true +// stop time returned by storage. +func (t *integerWindowTable) isInWindow(ts int64, stop int64) bool { + // This method checks if the stop time is a valid stop time for + // that interval. This calculation is different from the calculation + // of the window itself. For example, for a 10 second window that + // starts at 20 seconds, we would include points between [20, 30). + // The stop time for this interval would be 30, but because the stop + // time can be truncated, valid stop times range from anywhere between + // (20, 30]. The storage engine will always produce 30 as the end time + // but we may have truncated the stop time because of the boundary + // and this is why we are checking for this range instead of checking + // if the two values are equal. + start := stop - t.windowEvery + return start < ts && ts <= stop +} + +// nextBuffer will ensure the array cursor is filled +// and will return true if there is at least one value +// that can be read from it. +func (t *integerWindowTable) nextBuffer() bool { + // Discard the current array cursor if we have + // exceeded it. + if t.arr != nil && t.idxInArr >= t.arr.Len() { + t.arr = nil + } + + // Retrieve the next array cursor if needed. if t.arr == nil { - t.arr = t.cur.Next() - if t.arr.Len() == 0 { - t.arr = nil + arr := t.cur.Next() + if arr.Len() == 0 { return false } - t.idxInArr = 0 + t.arr, t.idxInArr = arr, 0 } + return true +} + +// appendValues will scan the timestamps and append values +// that match those timestamps from the buffer. +func (t *integerWindowTable) appendValues(intervals []int64, appendValue func(v int64), appendNull func()) { + for i := 0; i < len(intervals); i++ { + if v, ok := t.nextAt(intervals[i]); ok { + appendValue(v) + continue + } + appendNull() + } +} + +func (t *integerWindowTable) advance() bool { + // Create the timestamps for the next window. + start, stop, ok := t.createNextWindow() + if !ok { + return false + } + values := t.mergeValues(stop.Int64Values()) // Retrieve the buffer for the data to avoid allocating // additional slices. If the buffer is still being used // because the references were retained, then we will // allocate a new buffer. - columnReader := t.allocateBuffer(1) - // regain the window start time from the window end time - rangeStart := int64(t.bounds.Start) - rangeEnd := int64(t.bounds.Stop) - stop := t.arr.Timestamps[t.idxInArr] - start := stop - t.windowEvery - if start < rangeStart { - start = rangeStart - } - if stop > rangeEnd { - stop = rangeEnd - } - columnReader.cols[startColIdx] = arrow.NewInt([]int64{start}, t.alloc) - columnReader.cols[stopColIdx] = arrow.NewInt([]int64{stop}, t.alloc) - columnReader.cols[windowedValueColIdx] = t.toArrowBuffer(t.arr.Values[t.idxInArr : t.idxInArr+1]) - t.appendTags(columnReader) - t.idxInArr++ - if t.idxInArr == t.arr.Len() { - t.arr = nil - } + cr := t.allocateBuffer(stop.Len()) + cr.cols[startColIdx] = start + cr.cols[stopColIdx] = stop + cr.cols[windowedValueColIdx] = values + t.appendTags(cr) return true } @@ -624,7 +813,9 @@ type unsignedWindowTable struct { unsignedTable windowEvery int64 arr *cursors.UnsignedArray + nextTS int64 idxInArr int + createEmpty bool } func newUnsignedWindowTable( @@ -632,6 +823,7 @@ func newUnsignedWindowTable( cur cursors.UnsignedArrayCursor, bounds execute.Bounds, every int64, + createEmpty bool, key flux.GroupKey, cols []flux.ColMeta, tags models.Tags, @@ -645,6 +837,11 @@ func newUnsignedWindowTable( cur: cur, }, windowEvery: every, + createEmpty: createEmpty, + } + if t.createEmpty { + start := int64(bounds.Start) + t.nextTS = start + (every - start%every) } t.readTags(tags) t.advance() @@ -652,40 +849,126 @@ func newUnsignedWindowTable( return t } -func (t *unsignedWindowTable) advance() bool { +func (t *unsignedWindowTable) Do(f func(flux.ColReader) error) error { + return t.do(f, t.advance) +} + +// createNextWindow will read the timestamps from the array +// cursor and construct the values for the next window. +func (t *unsignedWindowTable) createNextWindow() (start, stop *array.Int64, ok bool) { + var stopT int64 + if t.createEmpty { + stopT = t.nextTS + t.nextTS += t.windowEvery + } else { + if !t.nextBuffer() { + return nil, nil, false + } + stopT = t.arr.Timestamps[t.idxInArr] + } + + // Regain the window start time from the window end time. + startT := stopT - t.windowEvery + if startT < int64(t.bounds.Start) { + startT = int64(t.bounds.Start) + } + if stopT > int64(t.bounds.Stop) { + stopT = int64(t.bounds.Stop) + } + + // If the start time is after our stop boundary, + // we exit here when create empty is true. + if t.createEmpty && startT >= int64(t.bounds.Stop) { + return nil, nil, false + } + start = arrow.NewInt([]int64{startT}, t.alloc) + stop = arrow.NewInt([]int64{stopT}, t.alloc) + return start, stop, true +} + +// nextAt will retrieve the next value that can be used with +// the given stop timestamp. If no values can be used with the timestamp, +// it will return the default value and false. +func (t *unsignedWindowTable) nextAt(ts int64) (v uint64, ok bool) { + if !t.nextBuffer() { + return + } else if !t.isInWindow(ts, t.arr.Timestamps[t.idxInArr]) { + return + } + v, ok = t.arr.Values[t.idxInArr], true + t.idxInArr++ + return v, ok +} + +// isInWindow will check if the given time at stop can be used within +// the window stop time for ts. The ts may be a truncated stop time +// because of a restricted boundary while stop will be the true +// stop time returned by storage. +func (t *unsignedWindowTable) isInWindow(ts int64, stop int64) bool { + // This method checks if the stop time is a valid stop time for + // that interval. This calculation is different from the calculation + // of the window itself. For example, for a 10 second window that + // starts at 20 seconds, we would include points between [20, 30). + // The stop time for this interval would be 30, but because the stop + // time can be truncated, valid stop times range from anywhere between + // (20, 30]. The storage engine will always produce 30 as the end time + // but we may have truncated the stop time because of the boundary + // and this is why we are checking for this range instead of checking + // if the two values are equal. + start := stop - t.windowEvery + return start < ts && ts <= stop +} + +// nextBuffer will ensure the array cursor is filled +// and will return true if there is at least one value +// that can be read from it. +func (t *unsignedWindowTable) nextBuffer() bool { + // Discard the current array cursor if we have + // exceeded it. + if t.arr != nil && t.idxInArr >= t.arr.Len() { + t.arr = nil + } + + // Retrieve the next array cursor if needed. if t.arr == nil { - t.arr = t.cur.Next() - if t.arr.Len() == 0 { - t.arr = nil + arr := t.cur.Next() + if arr.Len() == 0 { return false } - t.idxInArr = 0 + t.arr, t.idxInArr = arr, 0 + } + return true +} + +// appendValues will scan the timestamps and append values +// that match those timestamps from the buffer. +func (t *unsignedWindowTable) appendValues(intervals []int64, appendValue func(v uint64), appendNull func()) { + for i := 0; i < len(intervals); i++ { + if v, ok := t.nextAt(intervals[i]); ok { + appendValue(v) + continue + } + appendNull() + } +} + +func (t *unsignedWindowTable) advance() bool { + // Create the timestamps for the next window. + start, stop, ok := t.createNextWindow() + if !ok { + return false } + values := t.mergeValues(stop.Int64Values()) // Retrieve the buffer for the data to avoid allocating // additional slices. If the buffer is still being used // because the references were retained, then we will // allocate a new buffer. - columnReader := t.allocateBuffer(1) - // regain the window start time from the window end time - rangeStart := int64(t.bounds.Start) - rangeEnd := int64(t.bounds.Stop) - stop := t.arr.Timestamps[t.idxInArr] - start := stop - t.windowEvery - if start < rangeStart { - start = rangeStart - } - if stop > rangeEnd { - stop = rangeEnd - } - columnReader.cols[startColIdx] = arrow.NewInt([]int64{start}, t.alloc) - columnReader.cols[stopColIdx] = arrow.NewInt([]int64{stop}, t.alloc) - columnReader.cols[windowedValueColIdx] = t.toArrowBuffer(t.arr.Values[t.idxInArr : t.idxInArr+1]) - t.appendTags(columnReader) - t.idxInArr++ - if t.idxInArr == t.arr.Len() { - t.arr = nil - } + cr := t.allocateBuffer(stop.Len()) + cr.cols[startColIdx] = start + cr.cols[stopColIdx] = stop + cr.cols[windowedValueColIdx] = values + t.appendTags(cr) return true } @@ -885,7 +1168,9 @@ type stringWindowTable struct { stringTable windowEvery int64 arr *cursors.StringArray + nextTS int64 idxInArr int + createEmpty bool } func newStringWindowTable( @@ -893,6 +1178,7 @@ func newStringWindowTable( cur cursors.StringArrayCursor, bounds execute.Bounds, every int64, + createEmpty bool, key flux.GroupKey, cols []flux.ColMeta, tags models.Tags, @@ -906,6 +1192,11 @@ func newStringWindowTable( cur: cur, }, windowEvery: every, + createEmpty: createEmpty, + } + if t.createEmpty { + start := int64(bounds.Start) + t.nextTS = start + (every - start%every) } t.readTags(tags) t.advance() @@ -913,40 +1204,126 @@ func newStringWindowTable( return t } -func (t *stringWindowTable) advance() bool { +func (t *stringWindowTable) Do(f func(flux.ColReader) error) error { + return t.do(f, t.advance) +} + +// createNextWindow will read the timestamps from the array +// cursor and construct the values for the next window. +func (t *stringWindowTable) createNextWindow() (start, stop *array.Int64, ok bool) { + var stopT int64 + if t.createEmpty { + stopT = t.nextTS + t.nextTS += t.windowEvery + } else { + if !t.nextBuffer() { + return nil, nil, false + } + stopT = t.arr.Timestamps[t.idxInArr] + } + + // Regain the window start time from the window end time. + startT := stopT - t.windowEvery + if startT < int64(t.bounds.Start) { + startT = int64(t.bounds.Start) + } + if stopT > int64(t.bounds.Stop) { + stopT = int64(t.bounds.Stop) + } + + // If the start time is after our stop boundary, + // we exit here when create empty is true. + if t.createEmpty && startT >= int64(t.bounds.Stop) { + return nil, nil, false + } + start = arrow.NewInt([]int64{startT}, t.alloc) + stop = arrow.NewInt([]int64{stopT}, t.alloc) + return start, stop, true +} + +// nextAt will retrieve the next value that can be used with +// the given stop timestamp. If no values can be used with the timestamp, +// it will return the default value and false. +func (t *stringWindowTable) nextAt(ts int64) (v string, ok bool) { + if !t.nextBuffer() { + return + } else if !t.isInWindow(ts, t.arr.Timestamps[t.idxInArr]) { + return + } + v, ok = t.arr.Values[t.idxInArr], true + t.idxInArr++ + return v, ok +} + +// isInWindow will check if the given time at stop can be used within +// the window stop time for ts. The ts may be a truncated stop time +// because of a restricted boundary while stop will be the true +// stop time returned by storage. +func (t *stringWindowTable) isInWindow(ts int64, stop int64) bool { + // This method checks if the stop time is a valid stop time for + // that interval. This calculation is different from the calculation + // of the window itself. For example, for a 10 second window that + // starts at 20 seconds, we would include points between [20, 30). + // The stop time for this interval would be 30, but because the stop + // time can be truncated, valid stop times range from anywhere between + // (20, 30]. The storage engine will always produce 30 as the end time + // but we may have truncated the stop time because of the boundary + // and this is why we are checking for this range instead of checking + // if the two values are equal. + start := stop - t.windowEvery + return start < ts && ts <= stop +} + +// nextBuffer will ensure the array cursor is filled +// and will return true if there is at least one value +// that can be read from it. +func (t *stringWindowTable) nextBuffer() bool { + // Discard the current array cursor if we have + // exceeded it. + if t.arr != nil && t.idxInArr >= t.arr.Len() { + t.arr = nil + } + + // Retrieve the next array cursor if needed. if t.arr == nil { - t.arr = t.cur.Next() - if t.arr.Len() == 0 { - t.arr = nil + arr := t.cur.Next() + if arr.Len() == 0 { return false } - t.idxInArr = 0 + t.arr, t.idxInArr = arr, 0 + } + return true +} + +// appendValues will scan the timestamps and append values +// that match those timestamps from the buffer. +func (t *stringWindowTable) appendValues(intervals []int64, appendValue func(v string), appendNull func()) { + for i := 0; i < len(intervals); i++ { + if v, ok := t.nextAt(intervals[i]); ok { + appendValue(v) + continue + } + appendNull() + } +} + +func (t *stringWindowTable) advance() bool { + // Create the timestamps for the next window. + start, stop, ok := t.createNextWindow() + if !ok { + return false } + values := t.mergeValues(stop.Int64Values()) // Retrieve the buffer for the data to avoid allocating // additional slices. If the buffer is still being used // because the references were retained, then we will // allocate a new buffer. - columnReader := t.allocateBuffer(1) - // regain the window start time from the window end time - rangeStart := int64(t.bounds.Start) - rangeEnd := int64(t.bounds.Stop) - stop := t.arr.Timestamps[t.idxInArr] - start := stop - t.windowEvery - if start < rangeStart { - start = rangeStart - } - if stop > rangeEnd { - stop = rangeEnd - } - columnReader.cols[startColIdx] = arrow.NewInt([]int64{start}, t.alloc) - columnReader.cols[stopColIdx] = arrow.NewInt([]int64{stop}, t.alloc) - columnReader.cols[windowedValueColIdx] = t.toArrowBuffer(t.arr.Values[t.idxInArr : t.idxInArr+1]) - t.appendTags(columnReader) - t.idxInArr++ - if t.idxInArr == t.arr.Len() { - t.arr = nil - } + cr := t.allocateBuffer(stop.Len()) + cr.cols[startColIdx] = start + cr.cols[stopColIdx] = stop + cr.cols[windowedValueColIdx] = values + t.appendTags(cr) return true } @@ -1146,7 +1523,9 @@ type booleanWindowTable struct { booleanTable windowEvery int64 arr *cursors.BooleanArray + nextTS int64 idxInArr int + createEmpty bool } func newBooleanWindowTable( @@ -1154,6 +1533,7 @@ func newBooleanWindowTable( cur cursors.BooleanArrayCursor, bounds execute.Bounds, every int64, + createEmpty bool, key flux.GroupKey, cols []flux.ColMeta, tags models.Tags, @@ -1167,6 +1547,11 @@ func newBooleanWindowTable( cur: cur, }, windowEvery: every, + createEmpty: createEmpty, + } + if t.createEmpty { + start := int64(bounds.Start) + t.nextTS = start + (every - start%every) } t.readTags(tags) t.advance() @@ -1174,40 +1559,126 @@ func newBooleanWindowTable( return t } -func (t *booleanWindowTable) advance() bool { +func (t *booleanWindowTable) Do(f func(flux.ColReader) error) error { + return t.do(f, t.advance) +} + +// createNextWindow will read the timestamps from the array +// cursor and construct the values for the next window. +func (t *booleanWindowTable) createNextWindow() (start, stop *array.Int64, ok bool) { + var stopT int64 + if t.createEmpty { + stopT = t.nextTS + t.nextTS += t.windowEvery + } else { + if !t.nextBuffer() { + return nil, nil, false + } + stopT = t.arr.Timestamps[t.idxInArr] + } + + // Regain the window start time from the window end time. + startT := stopT - t.windowEvery + if startT < int64(t.bounds.Start) { + startT = int64(t.bounds.Start) + } + if stopT > int64(t.bounds.Stop) { + stopT = int64(t.bounds.Stop) + } + + // If the start time is after our stop boundary, + // we exit here when create empty is true. + if t.createEmpty && startT >= int64(t.bounds.Stop) { + return nil, nil, false + } + start = arrow.NewInt([]int64{startT}, t.alloc) + stop = arrow.NewInt([]int64{stopT}, t.alloc) + return start, stop, true +} + +// nextAt will retrieve the next value that can be used with +// the given stop timestamp. If no values can be used with the timestamp, +// it will return the default value and false. +func (t *booleanWindowTable) nextAt(ts int64) (v bool, ok bool) { + if !t.nextBuffer() { + return + } else if !t.isInWindow(ts, t.arr.Timestamps[t.idxInArr]) { + return + } + v, ok = t.arr.Values[t.idxInArr], true + t.idxInArr++ + return v, ok +} + +// isInWindow will check if the given time at stop can be used within +// the window stop time for ts. The ts may be a truncated stop time +// because of a restricted boundary while stop will be the true +// stop time returned by storage. +func (t *booleanWindowTable) isInWindow(ts int64, stop int64) bool { + // This method checks if the stop time is a valid stop time for + // that interval. This calculation is different from the calculation + // of the window itself. For example, for a 10 second window that + // starts at 20 seconds, we would include points between [20, 30). + // The stop time for this interval would be 30, but because the stop + // time can be truncated, valid stop times range from anywhere between + // (20, 30]. The storage engine will always produce 30 as the end time + // but we may have truncated the stop time because of the boundary + // and this is why we are checking for this range instead of checking + // if the two values are equal. + start := stop - t.windowEvery + return start < ts && ts <= stop +} + +// nextBuffer will ensure the array cursor is filled +// and will return true if there is at least one value +// that can be read from it. +func (t *booleanWindowTable) nextBuffer() bool { + // Discard the current array cursor if we have + // exceeded it. + if t.arr != nil && t.idxInArr >= t.arr.Len() { + t.arr = nil + } + + // Retrieve the next array cursor if needed. if t.arr == nil { - t.arr = t.cur.Next() - if t.arr.Len() == 0 { - t.arr = nil + arr := t.cur.Next() + if arr.Len() == 0 { return false } - t.idxInArr = 0 + t.arr, t.idxInArr = arr, 0 } + return true +} + +// appendValues will scan the timestamps and append values +// that match those timestamps from the buffer. +func (t *booleanWindowTable) appendValues(intervals []int64, appendValue func(v bool), appendNull func()) { + for i := 0; i < len(intervals); i++ { + if v, ok := t.nextAt(intervals[i]); ok { + appendValue(v) + continue + } + appendNull() + } +} + +func (t *booleanWindowTable) advance() bool { + // Create the timestamps for the next window. + start, stop, ok := t.createNextWindow() + if !ok { + return false + } + values := t.mergeValues(stop.Int64Values()) // Retrieve the buffer for the data to avoid allocating // additional slices. If the buffer is still being used // because the references were retained, then we will // allocate a new buffer. - columnReader := t.allocateBuffer(1) - // regain the window start time from the window end time - rangeStart := int64(t.bounds.Start) - rangeEnd := int64(t.bounds.Stop) - stop := t.arr.Timestamps[t.idxInArr] - start := stop - t.windowEvery - if start < rangeStart { - start = rangeStart - } - if stop > rangeEnd { - stop = rangeEnd - } - columnReader.cols[startColIdx] = arrow.NewInt([]int64{start}, t.alloc) - columnReader.cols[stopColIdx] = arrow.NewInt([]int64{stop}, t.alloc) - columnReader.cols[windowedValueColIdx] = t.toArrowBuffer(t.arr.Values[t.idxInArr : t.idxInArr+1]) - t.appendTags(columnReader) - t.idxInArr++ - if t.idxInArr == t.arr.Len() { - t.arr = nil - } + cr := t.allocateBuffer(stop.Len()) + cr.cols[startColIdx] = start + cr.cols[stopColIdx] = stop + cr.cols[windowedValueColIdx] = values + t.appendTags(cr) return true } diff --git a/storage/flux/table.gen.go.tmpl b/storage/flux/table.gen.go.tmpl index f4455d7862e..4b3c0c66c4b 100644 --- a/storage/flux/table.gen.go.tmpl +++ b/storage/flux/table.gen.go.tmpl @@ -3,6 +3,7 @@ package storageflux import ( "sync" + "github.com/apache/arrow/go/arrow/array" "github.com/influxdata/flux" "github.com/influxdata/flux/arrow" "github.com/influxdata/flux/execute" @@ -96,7 +97,9 @@ type {{.name}}WindowTable struct { {{.name}}Table windowEvery int64 arr *cursors.{{.Name}}Array + nextTS int64 idxInArr int + createEmpty bool } func new{{.Name}}WindowTable( @@ -104,6 +107,7 @@ func new{{.Name}}WindowTable( cur cursors.{{.Name}}ArrayCursor, bounds execute.Bounds, every int64, + createEmpty bool, key flux.GroupKey, cols []flux.ColMeta, tags models.Tags, @@ -117,6 +121,11 @@ func new{{.Name}}WindowTable( cur: cur, }, windowEvery: every, + createEmpty: createEmpty, + } + if t.createEmpty { + start := int64(bounds.Start) + t.nextTS = start + (every - start % every) } t.readTags(tags) t.advance() @@ -124,40 +133,126 @@ func new{{.Name}}WindowTable( return t } -func (t *{{.name}}WindowTable) advance() bool { +func (t *{{.name}}WindowTable) Do(f func(flux.ColReader) error) error { + return t.do(f, t.advance) +} + +// createNextWindow will read the timestamps from the array +// cursor and construct the values for the next window. +func (t *{{.name}}WindowTable) createNextWindow() (start, stop *array.Int64, ok bool) { + var stopT int64 + if t.createEmpty { + stopT = t.nextTS + t.nextTS += t.windowEvery + } else { + if !t.nextBuffer() { + return nil, nil, false + } + stopT = t.arr.Timestamps[t.idxInArr] + } + + // Regain the window start time from the window end time. + startT := stopT - t.windowEvery + if startT < int64(t.bounds.Start) { + startT = int64(t.bounds.Start) + } + if stopT > int64(t.bounds.Stop) { + stopT = int64(t.bounds.Stop) + } + + // If the start time is after our stop boundary, + // we exit here when create empty is true. + if t.createEmpty && startT >= int64(t.bounds.Stop) { + return nil, nil, false + } + start = arrow.NewInt([]int64{startT}, t.alloc) + stop = arrow.NewInt([]int64{stopT}, t.alloc) + return start, stop, true +} + +// nextAt will retrieve the next value that can be used with +// the given stop timestamp. If no values can be used with the timestamp, +// it will return the default value and false. +func (t *{{.name}}WindowTable) nextAt(ts int64) (v {{.Type}}, ok bool) { + if !t.nextBuffer() { + return + } else if !t.isInWindow(ts, t.arr.Timestamps[t.idxInArr]) { + return + } + v, ok = t.arr.Values[t.idxInArr], true + t.idxInArr++ + return v, ok +} + +// isInWindow will check if the given time at stop can be used within +// the window stop time for ts. The ts may be a truncated stop time +// because of a restricted boundary while stop will be the true +// stop time returned by storage. +func (t *{{.name}}WindowTable) isInWindow(ts int64, stop int64) bool { + // This method checks if the stop time is a valid stop time for + // that interval. This calculation is different from the calculation + // of the window itself. For example, for a 10 second window that + // starts at 20 seconds, we would include points between [20, 30). + // The stop time for this interval would be 30, but because the stop + // time can be truncated, valid stop times range from anywhere between + // (20, 30]. The storage engine will always produce 30 as the end time + // but we may have truncated the stop time because of the boundary + // and this is why we are checking for this range instead of checking + // if the two values are equal. + start := stop - t.windowEvery + return start < ts && ts <= stop +} + +// nextBuffer will ensure the array cursor is filled +// and will return true if there is at least one value +// that can be read from it. +func (t *{{.name}}WindowTable) nextBuffer() bool { + // Discard the current array cursor if we have + // exceeded it. + if t.arr != nil && t.idxInArr >= t.arr.Len() { + t.arr = nil + } + + // Retrieve the next array cursor if needed. if t.arr == nil { - t.arr = t.cur.Next() - if t.arr.Len() == 0 { - t.arr = nil + arr := t.cur.Next() + if arr.Len() == 0 { return false } - t.idxInArr = 0 + t.arr, t.idxInArr = arr, 0 } + return true +} + +// appendValues will scan the timestamps and append values +// that match those timestamps from the buffer. +func (t *{{.name}}WindowTable) appendValues(intervals []int64, appendValue func(v {{.Type}}), appendNull func()) { + for i := 0; i < len(intervals); i++ { + if v, ok := t.nextAt(intervals[i]); ok { + appendValue(v) + continue + } + appendNull() + } +} + +func (t *{{.name}}WindowTable) advance() bool { + // Create the timestamps for the next window. + start, stop, ok := t.createNextWindow() + if !ok { + return false + } + values := t.mergeValues(stop.Int64Values()) // Retrieve the buffer for the data to avoid allocating // additional slices. If the buffer is still being used // because the references were retained, then we will // allocate a new buffer. - columnReader := t.allocateBuffer(1) - // regain the window start time from the window end time - rangeStart := int64(t.bounds.Start) - rangeEnd := int64(t.bounds.Stop) - stop := t.arr.Timestamps[t.idxInArr] - start := stop - t.windowEvery - if start < rangeStart { - start = rangeStart - } - if stop > rangeEnd { - stop = rangeEnd - } - columnReader.cols[startColIdx] = arrow.NewInt([]int64{start}, t.alloc) - columnReader.cols[stopColIdx] = arrow.NewInt([]int64{stop}, t.alloc) - columnReader.cols[windowedValueColIdx] = t.toArrowBuffer(t.arr.Values[t.idxInArr : t.idxInArr+1]) - t.appendTags(columnReader) - t.idxInArr++ - if t.idxInArr == t.arr.Len() { - t.arr = nil - } + cr := t.allocateBuffer(stop.Len()) + cr.cols[startColIdx] = start + cr.cols[stopColIdx] = stop + cr.cols[windowedValueColIdx] = values + t.appendTags(cr) return true } diff --git a/storage/flux/table.go b/storage/flux/table.go index 67950924e8c..063700f7558 100644 --- a/storage/flux/table.go +++ b/storage/flux/table.go @@ -224,27 +224,57 @@ func (t *floatTable) toArrowBuffer(vs []float64) *array.Float64 { func (t *floatGroupTable) toArrowBuffer(vs []float64) *array.Float64 { return arrow.NewFloat(vs, t.alloc) } +func (t *floatWindowTable) mergeValues(intervals []int64) *array.Float64 { + b := arrow.NewFloatBuilder(t.alloc) + b.Resize(len(intervals)) + t.appendValues(intervals, b.Append, b.AppendNull) + return b.NewFloat64Array() +} func (t *integerTable) toArrowBuffer(vs []int64) *array.Int64 { return arrow.NewInt(vs, t.alloc) } func (t *integerGroupTable) toArrowBuffer(vs []int64) *array.Int64 { return arrow.NewInt(vs, t.alloc) } +func (t *integerWindowTable) mergeValues(intervals []int64) *array.Int64 { + b := arrow.NewIntBuilder(t.alloc) + b.Resize(len(intervals)) + t.appendValues(intervals, b.Append, b.AppendNull) + return b.NewInt64Array() +} func (t *unsignedTable) toArrowBuffer(vs []uint64) *array.Uint64 { return arrow.NewUint(vs, t.alloc) } func (t *unsignedGroupTable) toArrowBuffer(vs []uint64) *array.Uint64 { return arrow.NewUint(vs, t.alloc) } +func (t *unsignedWindowTable) mergeValues(intervals []int64) *array.Uint64 { + b := arrow.NewUintBuilder(t.alloc) + b.Resize(len(intervals)) + t.appendValues(intervals, b.Append, b.AppendNull) + return b.NewUint64Array() +} func (t *stringTable) toArrowBuffer(vs []string) *array.Binary { return arrow.NewString(vs, t.alloc) } func (t *stringGroupTable) toArrowBuffer(vs []string) *array.Binary { return arrow.NewString(vs, t.alloc) } +func (t *stringWindowTable) mergeValues(intervals []int64) *array.Binary { + b := arrow.NewStringBuilder(t.alloc) + b.Resize(len(intervals)) + t.appendValues(intervals, b.AppendString, b.AppendNull) + return b.NewBinaryArray() +} func (t *booleanTable) toArrowBuffer(vs []bool) *array.Boolean { return arrow.NewBool(vs, t.alloc) } func (t *booleanGroupTable) toArrowBuffer(vs []bool) *array.Boolean { return arrow.NewBool(vs, t.alloc) } +func (t *booleanWindowTable) mergeValues(intervals []int64) *array.Boolean { + b := arrow.NewBoolBuilder(t.alloc) + b.Resize(len(intervals)) + t.appendValues(intervals, b.Append, b.AppendNull) + return b.NewBooleanArray() +} diff --git a/storage/flux/table_test.go b/storage/flux/table_test.go index 56a34f0ffec..6af995a09d2 100644 --- a/storage/flux/table_test.go +++ b/storage/flux/table_test.go @@ -7,13 +7,19 @@ import ( "math/rand" "os" "path/filepath" + "sort" "testing" "time" + "github.com/google/go-cmp/cmp" "github.com/influxdata/flux" "github.com/influxdata/flux/execute" + "github.com/influxdata/flux/execute/executetest" "github.com/influxdata/flux/memory" + "github.com/influxdata/flux/plan" + "github.com/influxdata/flux/stdlib/universe" "github.com/influxdata/flux/values" + "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/cmd/influxd/generate" "github.com/influxdata/influxdb/v2/mock" "github.com/influxdata/influxdb/v2/models" @@ -25,104 +31,498 @@ import ( "go.uber.org/zap/zaptest" ) -func BenchmarkReadFilter(b *testing.B) { +type SetupFunc func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) + +type StorageReader struct { + Org influxdb.ID + Bucket influxdb.ID + Bounds execute.Bounds + Close func() + query.StorageReader +} + +func NewStorageReader(tb testing.TB, setupFn SetupFunc) *StorageReader { + logger := zaptest.NewLogger(tb) + rootDir, err := ioutil.TempDir("", "storage-flux-test") + if err != nil { + tb.Fatal(err) + } + close := func() { _ = os.RemoveAll(rootDir) } + idgen := mock.NewMockIDGenerator() - tagsSpec := &gen.TagsSpec{ - Tags: []*gen.TagValuesSpec{ - { - TagKey: "t0", - Values: func() gen.CountableSequence { - return gen.NewCounterByteSequence("a-%d", 0, 5) + org, bucket := idgen.ID(), idgen.ID() + sg, tr := setupFn(org, bucket) + + generator := generate.Generator{} + if _, err := generator.Run(context.Background(), rootDir, sg); err != nil { + tb.Fatal(err) + } + + enginePath := filepath.Join(rootDir, "engine") + engine := storage.NewEngine(enginePath, storage.NewConfig()) + engine.WithLogger(logger) + + if err := engine.Open(context.Background()); err != nil { + tb.Fatal(err) + } + reader := storageflux.NewReader(readservice.NewStore(engine)) + return &StorageReader{ + Org: org, + Bucket: bucket, + Bounds: execute.Bounds{ + Start: values.ConvertTime(tr.Start), + Stop: values.ConvertTime(tr.End), + }, + Close: close, + StorageReader: reader, + } +} + +func (r *StorageReader) ReadWindowAggregate(ctx context.Context, spec query.ReadWindowAggregateSpec, alloc *memory.Allocator) (query.TableIterator, error) { + wr := r.StorageReader.(query.WindowAggregateReader) + return wr.ReadWindowAggregate(ctx, spec, alloc) +} + +func TestStorageReader_ReadWindowAggregate(t *testing.T) { + reader := NewStorageReader(t, func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) { + tagsSpec := &gen.TagsSpec{ + Tags: []*gen.TagValuesSpec{ + { + TagKey: "t0", + Values: func() gen.CountableSequence { + return gen.NewCounterByteSequence("a-%s", 0, 3) + }, }, }, - { - TagKey: "t1", - Values: func() gen.CountableSequence { - return gen.NewCounterByteSequence("b-%d", 0, 1000) + } + spec := gen.Spec{ + OrgID: org, + BucketID: bucket, + Measurements: []gen.MeasurementSpec{ + { + Name: "m0", + TagsSpec: tagsSpec, + FieldValuesSpec: &gen.FieldValuesSpec{ + Name: "f0", + TimeSequenceSpec: gen.TimeSequenceSpec{ + Count: math.MaxInt32, + Delta: 10 * time.Second, + }, + DataType: models.Float, + Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { + return gen.NewTimeFloatValuesSequence( + spec.Count, + gen.NewTimestampSequenceFromSpec(spec), + gen.NewFloatArrayValuesSequence([]float64{1.0, 2.0, 3.0, 4.0}), + ) + }, + }, }, }, + } + tr := gen.TimeRange{ + Start: mustParseTime("2019-11-25T00:00:00Z"), + End: mustParseTime("2019-11-25T00:02:00Z"), + } + return gen.NewSeriesGeneratorFromSpec(&spec, tr), tr + }) + defer reader.Close() + + mem := &memory.Allocator{} + ti, err := reader.ReadWindowAggregate(context.Background(), query.ReadWindowAggregateSpec{ + ReadFilterSpec: query.ReadFilterSpec{ + OrganizationID: reader.Org, + BucketID: reader.Bucket, + Bounds: reader.Bounds, + }, + WindowEvery: int64(30 * time.Second), + Aggregates: []plan.ProcedureKind{ + universe.CountKind, }, + }, mem) + if err != nil { + t.Fatal(err) } - spec := gen.Spec{ - OrgID: idgen.ID(), - BucketID: idgen.ID(), - Measurements: []gen.MeasurementSpec{ - { - Name: "m0", - TagsSpec: tagsSpec, - FieldValuesSpec: &gen.FieldValuesSpec{ - Name: "f0", - TimeSequenceSpec: gen.TimeSequenceSpec{ - Count: math.MaxInt32, - Delta: time.Minute, - }, - DataType: models.Float, - Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { - r := rand.New(rand.NewSource(10)) - return gen.NewTimeFloatValuesSequence( - spec.Count, - gen.NewTimestampSequenceFromSpec(spec), - gen.NewFloatRandomValuesSequence(0, 90, r), - ) + + windowEvery := values.ConvertDuration(30 * time.Second) + makeWindowTable := func(t0 string, start execute.Time, value interface{}) *executetest.Table { + valueType := flux.ColumnType(values.New(value).Type()) + stop := start.Add(windowEvery) + return &executetest.Table{ + KeyCols: []string{"_start", "_stop", "_field", "_measurement", "t0"}, + ColMeta: []flux.ColMeta{ + {Label: "_start", Type: flux.TTime}, + {Label: "_stop", Type: flux.TTime}, + {Label: "_value", Type: valueType}, + {Label: "_field", Type: flux.TString}, + {Label: "_measurement", Type: flux.TString}, + {Label: "t0", Type: flux.TString}, + }, + Data: [][]interface{}{ + {start, stop, value, "f0", "m0", t0}, + }, + } + } + + var want []*executetest.Table + for _, t0 := range []string{"a-0", "a-1", "a-2"} { + for i := 0; i < 4; i++ { + offset := windowEvery.Mul(i) + start := reader.Bounds.Start.Add(offset) + want = append(want, makeWindowTable(t0, start, int64(3))) + } + } + executetest.NormalizeTables(want) + sort.Sort(executetest.SortedTables(want)) + + var got []*executetest.Table + if err := ti.Do(func(table flux.Table) error { + t, err := executetest.ConvertTable(table) + if err != nil { + return err + } + got = append(got, t) + return nil + }); err != nil { + t.Fatal(err) + } + executetest.NormalizeTables(got) + sort.Sort(executetest.SortedTables(got)) + + // compare these two + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("unexpected results -want/+got:\n%s", diff) + } +} + +func TestStorageReader_ReadWindowAggregate_CreateEmpty(t *testing.T) { + reader := NewStorageReader(t, func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) { + tagsSpec := &gen.TagsSpec{ + Tags: []*gen.TagValuesSpec{ + { + TagKey: "t0", + Values: func() gen.CountableSequence { + return gen.NewCounterByteSequence("a-%s", 0, 3) }, }, }, - { - Name: "m0", - TagsSpec: tagsSpec, - FieldValuesSpec: &gen.FieldValuesSpec{ - Name: "f1", - TimeSequenceSpec: gen.TimeSequenceSpec{ - Count: math.MaxInt32, - Delta: time.Minute, - }, - DataType: models.Float, - Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { - r := rand.New(rand.NewSource(11)) - return gen.NewTimeFloatValuesSequence( - spec.Count, - gen.NewTimestampSequenceFromSpec(spec), - gen.NewFloatRandomValuesSequence(0, 180, r), - ) + } + spec := gen.Spec{ + OrgID: org, + BucketID: bucket, + Measurements: []gen.MeasurementSpec{ + { + Name: "m0", + TagsSpec: tagsSpec, + FieldValuesSpec: &gen.FieldValuesSpec{ + Name: "f0", + TimeSequenceSpec: gen.TimeSequenceSpec{ + Count: math.MaxInt32, + Delta: 15 * time.Second, + }, + DataType: models.Float, + Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { + return gen.NewTimeFloatValuesSequence( + spec.Count, + gen.NewTimestampSequenceFromSpec(spec), + gen.NewFloatArrayValuesSequence([]float64{1.0, 2.0, 3.0, 4.0}), + ) + }, }, }, }, - { - Name: "m0", - TagsSpec: tagsSpec, - FieldValuesSpec: &gen.FieldValuesSpec{ - Name: "f1", - TimeSequenceSpec: gen.TimeSequenceSpec{ - Count: math.MaxInt32, - Delta: time.Minute, + } + tr := gen.TimeRange{ + Start: mustParseTime("2019-11-25T00:00:00Z"), + End: mustParseTime("2019-11-25T00:02:00Z"), + } + return gen.NewSeriesGeneratorFromSpec(&spec, tr), tr + }) + defer reader.Close() + + mem := &memory.Allocator{} + ti, err := reader.ReadWindowAggregate(context.Background(), query.ReadWindowAggregateSpec{ + ReadFilterSpec: query.ReadFilterSpec{ + OrganizationID: reader.Org, + BucketID: reader.Bucket, + Bounds: reader.Bounds, + }, + WindowEvery: int64(10 * time.Second), + Aggregates: []plan.ProcedureKind{ + universe.CountKind, + }, + CreateEmpty: true, + }, mem) + if err != nil { + t.Fatal(err) + } + + windowEvery := values.ConvertDuration(10 * time.Second) + makeWindowTable := func(t0 string, start execute.Time, value interface{}, isNull bool) *executetest.Table { + valueType := flux.ColumnType(values.New(value).Type()) + stop := start.Add(windowEvery) + if isNull { + value = nil + } + return &executetest.Table{ + KeyCols: []string{"_start", "_stop", "_field", "_measurement", "t0"}, + ColMeta: []flux.ColMeta{ + {Label: "_start", Type: flux.TTime}, + {Label: "_stop", Type: flux.TTime}, + {Label: "_value", Type: valueType}, + {Label: "_field", Type: flux.TString}, + {Label: "_measurement", Type: flux.TString}, + {Label: "t0", Type: flux.TString}, + }, + Data: [][]interface{}{ + {start, stop, value, "f0", "m0", t0}, + }, + } + } + + var want []*executetest.Table + for _, t0 := range []string{"a-0", "a-1", "a-2"} { + for i := 0; i < 12; i++ { + offset := windowEvery.Mul(i) + start := reader.Bounds.Start.Add(offset) + isNull := (i+1)%3 == 0 + want = append(want, makeWindowTable(t0, start, int64(1), isNull)) + } + } + executetest.NormalizeTables(want) + sort.Sort(executetest.SortedTables(want)) + + var got []*executetest.Table + if err := ti.Do(func(table flux.Table) error { + t, err := executetest.ConvertTable(table) + if err != nil { + return err + } + got = append(got, t) + return nil + }); err != nil { + t.Fatal(err) + } + executetest.NormalizeTables(got) + sort.Sort(executetest.SortedTables(got)) + + // compare these two + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("unexpected results -want/+got:\n%s", diff) + } +} + +func TestStorageReader_ReadWindowAggregate_TruncatedBounds(t *testing.T) { + reader := NewStorageReader(t, func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) { + tagsSpec := &gen.TagsSpec{ + Tags: []*gen.TagValuesSpec{ + { + TagKey: "t0", + Values: func() gen.CountableSequence { + return gen.NewCounterByteSequence("a-%s", 0, 3) }, - DataType: models.Float, - Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { - r := rand.New(rand.NewSource(12)) - return gen.NewTimeFloatValuesSequence( - spec.Count, - gen.NewTimestampSequenceFromSpec(spec), - gen.NewFloatRandomValuesSequence(10, 10000, r), - ) + }, + }, + } + spec := gen.Spec{ + OrgID: org, + BucketID: bucket, + Measurements: []gen.MeasurementSpec{ + { + Name: "m0", + TagsSpec: tagsSpec, + FieldValuesSpec: &gen.FieldValuesSpec{ + Name: "f0", + TimeSequenceSpec: gen.TimeSequenceSpec{ + Count: math.MaxInt32, + Delta: 5 * time.Second, + }, + DataType: models.Float, + Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { + return gen.NewTimeFloatValuesSequence( + spec.Count, + gen.NewTimestampSequenceFromSpec(spec), + gen.NewFloatArrayValuesSequence([]float64{1.0, 2.0, 3.0, 4.0}), + ) + }, }, }, }, + } + tr := gen.TimeRange{ + Start: mustParseTime("2019-11-25T00:00:00Z"), + End: mustParseTime("2019-11-25T00:01:00Z"), + } + return gen.NewSeriesGeneratorFromSpec(&spec, tr), tr + }) + defer reader.Close() + + mem := &memory.Allocator{} + ti, err := reader.ReadWindowAggregate(context.Background(), query.ReadWindowAggregateSpec{ + ReadFilterSpec: query.ReadFilterSpec{ + OrganizationID: reader.Org, + BucketID: reader.Bucket, + Bounds: execute.Bounds{ + Start: values.ConvertTime(mustParseTime("2019-11-25T00:00:05Z")), + Stop: values.ConvertTime(mustParseTime("2019-11-25T00:00:25Z")), + }, + }, + WindowEvery: int64(10 * time.Second), + Aggregates: []plan.ProcedureKind{ + universe.CountKind, }, + }, mem) + if err != nil { + t.Fatal(err) } - tr := gen.TimeRange{ - Start: mustParseTime("2019-11-25T00:00:00Z"), - End: mustParseTime("2019-11-26T00:00:00Z"), + + makeWindowTable := func(t0 string, start, stop time.Duration, value interface{}) *executetest.Table { + startT := reader.Bounds.Start.Add(values.ConvertDuration(start)) + stopT := reader.Bounds.Start.Add(values.ConvertDuration(stop)) + valueType := flux.ColumnType(values.New(value).Type()) + return &executetest.Table{ + KeyCols: []string{"_start", "_stop", "_field", "_measurement", "t0"}, + ColMeta: []flux.ColMeta{ + {Label: "_start", Type: flux.TTime}, + {Label: "_stop", Type: flux.TTime}, + {Label: "_value", Type: valueType}, + {Label: "_field", Type: flux.TString}, + {Label: "_measurement", Type: flux.TString}, + {Label: "t0", Type: flux.TString}, + }, + Data: [][]interface{}{ + {startT, stopT, value, "f0", "m0", t0}, + }, + } + } + + var want []*executetest.Table + for _, t0 := range []string{"a-0", "a-1", "a-2"} { + want = append(want, + makeWindowTable(t0, 5*time.Second, 10*time.Second, int64(1)), + makeWindowTable(t0, 10*time.Second, 20*time.Second, int64(2)), + makeWindowTable(t0, 20*time.Second, 25*time.Second, int64(1)), + ) } - sg := gen.NewSeriesGeneratorFromSpec(&spec, tr) - benchmarkRead(b, sg, func(r query.StorageReader) error { + executetest.NormalizeTables(want) + sort.Sort(executetest.SortedTables(want)) + + var got []*executetest.Table + if err := ti.Do(func(table flux.Table) error { + t, err := executetest.ConvertTable(table) + if err != nil { + return err + } + got = append(got, t) + return nil + }); err != nil { + t.Fatal(err) + } + executetest.NormalizeTables(got) + sort.Sort(executetest.SortedTables(got)) + + // compare these two + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("unexpected results -want/+got:\n%s", diff) + } +} + +func BenchmarkReadFilter(b *testing.B) { + setupFn := func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) { + tagsSpec := &gen.TagsSpec{ + Tags: []*gen.TagValuesSpec{ + { + TagKey: "t0", + Values: func() gen.CountableSequence { + return gen.NewCounterByteSequence("a-%s", 0, 5) + }, + }, + { + TagKey: "t1", + Values: func() gen.CountableSequence { + return gen.NewCounterByteSequence("b-%s", 0, 1000) + }, + }, + }, + } + spec := gen.Spec{ + OrgID: org, + BucketID: bucket, + Measurements: []gen.MeasurementSpec{ + { + Name: "m0", + TagsSpec: tagsSpec, + FieldValuesSpec: &gen.FieldValuesSpec{ + Name: "f0", + TimeSequenceSpec: gen.TimeSequenceSpec{ + Count: math.MaxInt32, + Delta: time.Minute, + }, + DataType: models.Float, + Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { + r := rand.New(rand.NewSource(10)) + return gen.NewTimeFloatValuesSequence( + spec.Count, + gen.NewTimestampSequenceFromSpec(spec), + gen.NewFloatRandomValuesSequence(0, 90, r), + ) + }, + }, + }, + { + Name: "m0", + TagsSpec: tagsSpec, + FieldValuesSpec: &gen.FieldValuesSpec{ + Name: "f1", + TimeSequenceSpec: gen.TimeSequenceSpec{ + Count: math.MaxInt32, + Delta: time.Minute, + }, + DataType: models.Float, + Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { + r := rand.New(rand.NewSource(11)) + return gen.NewTimeFloatValuesSequence( + spec.Count, + gen.NewTimestampSequenceFromSpec(spec), + gen.NewFloatRandomValuesSequence(0, 180, r), + ) + }, + }, + }, + { + Name: "m0", + TagsSpec: tagsSpec, + FieldValuesSpec: &gen.FieldValuesSpec{ + Name: "f1", + TimeSequenceSpec: gen.TimeSequenceSpec{ + Count: math.MaxInt32, + Delta: time.Minute, + }, + DataType: models.Float, + Values: func(spec gen.TimeSequenceSpec) gen.TimeValuesSequence { + r := rand.New(rand.NewSource(12)) + return gen.NewTimeFloatValuesSequence( + spec.Count, + gen.NewTimestampSequenceFromSpec(spec), + gen.NewFloatRandomValuesSequence(10, 10000, r), + ) + }, + }, + }, + }, + } + tr := gen.TimeRange{ + Start: mustParseTime("2019-11-25T00:00:00Z"), + End: mustParseTime("2019-11-26T00:00:00Z"), + } + return gen.NewSeriesGeneratorFromSpec(&spec, tr), tr + } + benchmarkRead(b, setupFn, func(r *StorageReader) error { mem := &memory.Allocator{} tables, err := r.ReadFilter(context.Background(), query.ReadFilterSpec{ - OrganizationID: spec.OrgID, - BucketID: spec.BucketID, - Bounds: execute.Bounds{ - Start: values.ConvertTime(tr.Start), - Stop: values.ConvertTime(tr.End), - }, + OrganizationID: r.Org, + BucketID: r.Bucket, + Bounds: r.Bounds, }, mem) if err != nil { return err @@ -134,27 +534,9 @@ func BenchmarkReadFilter(b *testing.B) { }) } -func benchmarkRead(b *testing.B, sg gen.SeriesGenerator, f func(r query.StorageReader) error) { - logger := zaptest.NewLogger(b) - rootDir, err := ioutil.TempDir("", "storage-reads-test") - if err != nil { - b.Fatal(err) - } - defer func() { _ = os.RemoveAll(rootDir) }() - - generator := generate.Generator{} - if _, err := generator.Run(context.Background(), rootDir, sg); err != nil { - b.Fatal(err) - } - - enginePath := filepath.Join(rootDir, "engine") - engine := storage.NewEngine(enginePath, storage.NewConfig()) - engine.WithLogger(logger) - - if err := engine.Open(context.Background()); err != nil { - b.Fatal(err) - } - reader := storageflux.NewReader(readservice.NewStore(engine)) +func benchmarkRead(b *testing.B, setupFn SetupFunc, f func(r *StorageReader) error) { + reader := NewStorageReader(b, setupFn) + defer reader.Close() b.ResetTimer() b.ReportAllocs() diff --git a/storage/flux/window.go b/storage/flux/window.go new file mode 100644 index 00000000000..c8994d68770 --- /dev/null +++ b/storage/flux/window.go @@ -0,0 +1,183 @@ +package storageflux + +import ( + "context" + "fmt" + "sync/atomic" + + "github.com/apache/arrow/go/arrow/array" + "github.com/influxdata/flux" + "github.com/influxdata/flux/arrow" + "github.com/influxdata/flux/execute" + "github.com/influxdata/flux/values" + "github.com/influxdata/influxdb/v2" +) + +// splitWindows will split a windowTable by creating a new table from each +// row and modifying the group key to use the start and stop values from +// that row. +func splitWindows(ctx context.Context, in flux.Table, f func(t flux.Table) error) error { + wts := &windowTableSplitter{ + ctx: ctx, + in: in, + } + return wts.Do(f) +} + +type windowTableSplitter struct { + ctx context.Context + in flux.Table +} + +func (w *windowTableSplitter) Do(f func(flux.Table) error) error { + defer w.in.Done() + + startIdx, err := w.getTimeColumnIndex(execute.DefaultStartColLabel) + if err != nil { + return err + } + + stopIdx, err := w.getTimeColumnIndex(execute.DefaultStopColLabel) + if err != nil { + return err + } + + return w.in.Do(func(cr flux.ColReader) error { + // Retrieve the start and stop columns for splitting + // the windows. + start := cr.Times(startIdx) + stop := cr.Times(stopIdx) + + // Iterate through each time to produce a table + // using the start and stop values. + arrs := make([]array.Interface, len(cr.Cols())) + for j := range cr.Cols() { + arrs[j] = getColumnValues(cr, j) + } + + for i, n := 0, cr.Len(); i < n; i++ { + startT, stopT := start.Value(i), stop.Value(i) + + // Rewrite the group key using the new time. + key := groupKeyForWindow(cr.Key(), startT, stopT) + + // Produce a slice for each column into a new + // table buffer. + buffer := arrow.TableBuffer{ + GroupKey: key, + Columns: cr.Cols(), + Values: make([]array.Interface, len(cr.Cols())), + } + for j, arr := range arrs { + buffer.Values[j] = arrow.Slice(arr, int64(i), int64(i+1)) + } + + // Wrap these into a single table and execute. + done := make(chan struct{}) + table := &windowTableRow{ + buffer: buffer, + done: done, + } + if err := f(table); err != nil { + return err + } + + select { + case <-done: + case <-w.ctx.Done(): + return w.ctx.Err() + } + } + return nil + }) +} + +func (w *windowTableSplitter) getTimeColumnIndex(label string) (int, error) { + j := execute.ColIdx(label, w.in.Cols()) + if j < 0 { + return -1, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("missing %q column from window splitter", label), + } + } else if c := w.in.Cols()[j]; c.Type != flux.TTime { + return -1, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("%q column must be of type time", label), + } + } + return j, nil +} + +type windowTableRow struct { + used int32 + buffer arrow.TableBuffer + done chan struct{} +} + +func (w *windowTableRow) Key() flux.GroupKey { + return w.buffer.GroupKey +} + +func (w *windowTableRow) Cols() []flux.ColMeta { + return w.buffer.Columns +} + +func (w *windowTableRow) Do(f func(flux.ColReader) error) error { + if !atomic.CompareAndSwapInt32(&w.used, 0, 1) { + return &influxdb.Error{ + Code: influxdb.EInternal, + Msg: "table already read", + } + } + defer close(w.done) + + err := f(&w.buffer) + w.buffer.Release() + return err +} + +func (w *windowTableRow) Done() { + if atomic.CompareAndSwapInt32(&w.used, 0, 1) { + w.buffer.Release() + close(w.done) + } +} + +func (w *windowTableRow) Empty() bool { + return false +} + +func groupKeyForWindow(key flux.GroupKey, start, stop int64) flux.GroupKey { + cols := key.Cols() + vs := make([]values.Value, len(cols)) + for j, c := range cols { + if c.Label == execute.DefaultStartColLabel { + vs[j] = values.NewTime(values.Time(start)) + } else if c.Label == execute.DefaultStopColLabel { + vs[j] = values.NewTime(values.Time(stop)) + } else { + vs[j] = key.Value(j) + } + } + return execute.NewGroupKey(cols, vs) +} + +// getColumnValues returns the array from the column reader as an array.Interface. +func getColumnValues(cr flux.ColReader, j int) array.Interface { + switch typ := cr.Cols()[j].Type; typ { + case flux.TInt: + return cr.Ints(j) + case flux.TUInt: + return cr.UInts(j) + case flux.TFloat: + return cr.Floats(j) + case flux.TString: + return cr.Strings(j) + case flux.TBool: + return cr.Bools(j) + case flux.TTime: + return cr.Times(j) + default: + panic(fmt.Errorf("unimplemented column type: %s", typ)) + } +}
tabby
https://github.com/TabbyML/tabby
19bade6e45fc6c240659bc0d2a4fb0854cc7ce9f
Meng Zhang
2024-10-24 08:27:04
chore(ui): tune thread-feeds component (#3308)
* chore(ui): enhance thread-feeds component with dynamic class names and add margin to ThreadFeeds in MainPanel * refactor(ui): replace IconMessagesSquare with IconFiles in thread-feeds component * [autofix.ci] apply automated fixes ---------
chore(ui): tune thread-feeds component (#3308) * chore(ui): enhance thread-feeds component with dynamic class names and add margin to ThreadFeeds in MainPanel * refactor(ui): replace IconMessagesSquare with IconFiles in thread-feeds component * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
diff --git a/ee/tabby-ui/app/(home)/components/thread-feeds.tsx b/ee/tabby-ui/app/(home)/components/thread-feeds.tsx index 7a8efa5b6208..6f90375cf8cd 100644 --- a/ee/tabby-ui/app/(home)/components/thread-feeds.tsx +++ b/ee/tabby-ui/app/(home)/components/thread-feeds.tsx @@ -10,8 +10,8 @@ import { graphql } from '@/lib/gql/generates' import { ContextSource, ListThreadsQuery } from '@/lib/gql/generates/graphql' import { Member, useAllMembers } from '@/lib/hooks/use-all-members' import { contextInfoQuery, listThreadMessages } from '@/lib/tabby/query' -import { getTitleFromMessages } from '@/lib/utils' -import { IconMessagesSquare, IconSpinner } from '@/components/ui/icons' +import { cn, getTitleFromMessages } from '@/lib/utils' +import { IconFiles, IconSpinner } from '@/components/ui/icons' import { Separator } from '@/components/ui/separator' import { Skeleton } from '@/components/ui/skeleton' import { LoadMoreIndicator } from '@/components/load-more-indicator' @@ -134,7 +134,7 @@ export function ThreadFeeds({ onNavigateToThread }} > - <div className="w-full"> + <div className={cn('w-full', className)}> <AnimationWrapper delay={0.4} style={{ width: '100%' }}> <LoadingWrapper loading={fetching || fetchingUsers} @@ -229,7 +229,7 @@ function ThreadItem({ data }: ThreadItemProps) { > <div className="transform-bg group flex-1 overflow-hidden rounded-lg px-3 py-2 hover:bg-accent"> <div className="mb-1.5 flex items-center gap-2"> - <IconMessagesSquare className="shrink-0" /> + <IconFiles className="shrink-0" /> <LoadingWrapper loading={fetching} fallback={ diff --git a/ee/tabby-ui/app/(home)/page.tsx b/ee/tabby-ui/app/(home)/page.tsx index dd769124b9ba..5121717b4bfe 100644 --- a/ee/tabby-ui/app/(home)/page.tsx +++ b/ee/tabby-ui/app/(home)/page.tsx @@ -138,6 +138,7 @@ function MainPanel() { )} <Stats /> <ThreadFeeds + className="lg:mt-8" onNavigateToThread={() => { if (!scroller.current) return setHomeScrollPosition(scroller.current.scrollTop) diff --git a/ee/tabby-ui/components/ui/icons.tsx b/ee/tabby-ui/components/ui/icons.tsx index f307428339ec..f5cf6ae27260 100644 --- a/ee/tabby-ui/components/ui/icons.tsx +++ b/ee/tabby-ui/components/ui/icons.tsx @@ -11,6 +11,7 @@ import { Bug, ChevronsDownUp, CirclePlay, + Files, FileText, Filter, GitFork, @@ -1619,11 +1620,11 @@ function IconHash({ className, ...props }: React.ComponentProps<typeof Hash>) { return <Hash className={cn('h-4 w-4', className)} {...props} /> } -function IconMessagesSquare({ +function IconFiles({ className, ...props }: React.ComponentProps<typeof MessagesSquare>) { - return <MessagesSquare className={cn('h-4 w-4', className)} {...props} /> + return <Files className={cn('h-4 w-4', className)} {...props} /> } export { @@ -1723,5 +1724,5 @@ export { IconWrapText, IconAlignJustify, IconHash, - IconMessagesSquare + IconFiles }
RSSHub
https://github.com/DIYgod/RSSHub
c2a886f91d1c38ca5c69cc08b229828e74c36d3c
Ethan Shen
2022-02-02 01:07:46
feat(route): add Whoscall (#8478)
* feat(route): add Whoscall * fix: add docs
feat(route): add Whoscall (#8478) * feat(route): add Whoscall * fix: add docs
diff --git a/docs/blog.md b/docs/blog.md index 4b0901963e6f68..631ddf2d73bdc6 100644 --- a/docs/blog.md +++ b/docs/blog.md @@ -90,6 +90,33 @@ pageClass: routes <Route author="CitrusIce" example="/phrack" path="/phrack" /> +## Whoscall + +### 最新文章 + +<Route author="nczitzk" example="/whoscall" path="/whoscall"/> + +</Route> + +### 分類 + +<Route author="nczitzk" example="/whoscall/categories/5-Whoscall 百科" path="/whoscall/categories/:category?" :paramsDesc="['分类,见下表,可在对应分類页 URL 中找到,默认为最新文章']"> + +| News | Whoscall 百科 | 防詐小學堂 | Whoscall 日常 | +| ------ | --------------- | -------------- | --------------- | +| 1-News | 5-Whoscall 百科 | 4 - 防詐小學堂 | 6-Whoscall 日常 | + +</Route> + +### 標籤 + +<Route author="nczitzk" example="/whoscall/tags/whoscall小百科" path="/whoscall/tags/:tag?" :paramsDesc="['標籤,见下表,可在对应標籤页 URL 中找到,默认为最新文章']"> + +| 防疫也防詐 | 防詐專家 | 來電辨識 | whoscall 日常 | +| ---------- | -------- | -------- | ------------- | + +</Route> + ## WordPress <Route author="Lonor" example="/blogs/wordpress/lawrence.code.blog" path="/blogs/wordpress/:domain/:https?" :paramsDesc="['WordPress 博客域名', '默认 https 协议。填写 `http`或`https`']"/> diff --git a/lib/v2/whoscall/index.js b/lib/v2/whoscall/index.js new file mode 100644 index 00000000000000..80a7846be22e19 --- /dev/null +++ b/lib/v2/whoscall/index.js @@ -0,0 +1,55 @@ +const got = require('@/utils/got'); +const cheerio = require('cheerio'); +const { parseDate } = require('@/utils/parse-date'); + +module.exports = async (ctx) => { + const id = ctx.params.id ?? ''; + const what = ctx.params.what ?? ''; + + const rootUrl = 'https://whoscall.com'; + const currentUrl = `${rootUrl}/zh-hant/blog/${id ? `${what}/${id}` : 'articles'}`; + + const response = await got({ + method: 'get', + url: currentUrl, + }); + + const $ = cheerio.load(response.data); + + const list = $('.post-card__title a') + .map((_, item) => { + item = $(item); + + return { + title: item.text(), + link: `${rootUrl}${item.attr('href')}`, + }; + }) + .get(); + + const items = await Promise.all( + list.map((item) => + ctx.cache.tryGet(item.link, async () => { + const detailResponse = await got({ + method: 'get', + url: item.link, + }); + + const content = cheerio.load(detailResponse.data); + + item.description = content('.blog-article__body').html(); + item.pubDate = parseDate(detailResponse.data.match(/"datePublished":"(.*)","dateModified"/)[1]); + + return item; + }) + ) + ); + + ctx.state.data = { + title: $('title') + .text() + .replace(/ - 第\d+頁/, ''), + link: currentUrl, + item: items, + }; +}; diff --git a/lib/v2/whoscall/maintainer.js b/lib/v2/whoscall/maintainer.js new file mode 100644 index 00000000000000..dd4f9e28d470f2 --- /dev/null +++ b/lib/v2/whoscall/maintainer.js @@ -0,0 +1,5 @@ +module.exports = { + '/': ['nczitzk'], + '/tags/:tag?': ['nczitzk'], + '/categories/:category?': ['nczitzk'], +}; diff --git a/lib/v2/whoscall/radar.js b/lib/v2/whoscall/radar.js new file mode 100644 index 00000000000000..e13c42e266c69b --- /dev/null +++ b/lib/v2/whoscall/radar.js @@ -0,0 +1,25 @@ +module.exports = { + 'whoscall.com': { + _name: 'Whoscall', + '.': [ + { + title: '最新文章', + docs: 'https://docs.rsshub.app/blog.html#whoscall-zui-xin-wen-zhang', + source: ['/zh-hant/blog/articles', '/'], + target: '/whoscall', + }, + { + title: '分類', + docs: 'https://docs.rsshub.app/blog.html#whoscall-fen-lei', + source: ['/zh-hant/blog/categories/:category', '/'], + target: '/whoscall/categories/:category?', + }, + { + title: '標籤', + docs: 'https://docs.rsshub.app/blog.html#whoscall-biao-qian', + source: ['/zh-hant/blog/tags/:tag', '/'], + target: '/whoscall/tags/:tag?', + }, + ], + }, +}; diff --git a/lib/v2/whoscall/router.js b/lib/v2/whoscall/router.js new file mode 100644 index 00000000000000..73d31c4be5affc --- /dev/null +++ b/lib/v2/whoscall/router.js @@ -0,0 +1,3 @@ +module.exports = function (router) { + router.get('/:what?/:id?', require('./index')); +};
tilt
https://github.com/tilt-dev/tilt
f5b6d303e01c4e980f1b174a460f006b912afa4e
dependabot[bot]
2022-12-08 06:43:11
build(deps): bump decode-uri-component from 0.2.0 to 0.2.2 in /web (#5985)
Bumps [decode-uri-component](https://github.com/SamVerschueren/decode-uri-component) from 0.2.0 to 0.2.2. - [Release notes](https://github.com/SamVerschueren/decode-uri-component/releases) - [Commits](https://github.com/SamVerschueren/decode-uri-component/compare/v0.2.0...v0.2.2) ---
build(deps): bump decode-uri-component from 0.2.0 to 0.2.2 in /web (#5985) Bumps [decode-uri-component](https://github.com/SamVerschueren/decode-uri-component) from 0.2.0 to 0.2.2. - [Release notes](https://github.com/SamVerschueren/decode-uri-component/releases) - [Commits](https://github.com/SamVerschueren/decode-uri-component/compare/v0.2.0...v0.2.2) --- updated-dependencies: - dependency-name: decode-uri-component dependency-type: indirect ... Signed-off-by: dependabot[bot] <[email protected]> Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
diff --git a/web/yarn.lock b/web/yarn.lock index 4e903f0261..6432a0d6fe 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -6034,9 +6034,9 @@ decimal.js@^10.2.1, decimal.js@^10.3.1: integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + version "0.2.2" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" + integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== dedent@^0.7.0: version "0.7.0"
angular
https://github.com/angular/angular
182e2c7449a347b50142d1d462578abb59a42e51
Alex Rickabaugh
2019-04-19 23:45:25
feat(ivy): add backwards compatibility config to template type-checking (#29698)
View Engine's implementation of naive template type-checking is less advanced than the current Ivy implementation. As a result, Ivy catches lots of typing bugs which VE does not. As a result, it's necessary to tone down the Ivy template type-checker in the default case. This commit introduces a mechanism for doing that, by passing a config to the template type-checking engine. Through this configuration, particular checks can be loosened or disabled entirely. Testing strategy: TCB tests included. PR Close #29698
feat(ivy): add backwards compatibility config to template type-checking (#29698) View Engine's implementation of naive template type-checking is less advanced than the current Ivy implementation. As a result, Ivy catches lots of typing bugs which VE does not. As a result, it's necessary to tone down the Ivy template type-checker in the default case. This commit introduces a mechanism for doing that, by passing a config to the template type-checking engine. Through this configuration, particular checks can be loosened or disabled entirely. Testing strategy: TCB tests included. PR Close #29698
diff --git a/packages/compiler-cli/src/ngtsc/program.ts b/packages/compiler-cli/src/ngtsc/program.ts index 3ce128cf5e8e6..52e2490b8cfe7 100644 --- a/packages/compiler-cli/src/ngtsc/program.ts +++ b/packages/compiler-cli/src/ngtsc/program.ts @@ -31,7 +31,7 @@ import {FactoryGenerator, FactoryInfo, GeneratedShimsHostWrapper, ShimGenerator, import {ivySwitchTransform} from './switch'; import {IvyCompilation, declarationTransformFactory, ivyTransformFactory} from './transform'; import {aliasTransformFactory} from './transform/src/alias'; -import {TypeCheckContext, TypeCheckProgramHost} from './typecheck'; +import {TypeCheckContext, TypeCheckProgramHost, TypeCheckingConfig} from './typecheck'; import {normalizeSeparators} from './util/src/path'; import {getRootDirs, isDtsPath} from './util/src/typescript'; @@ -191,7 +191,12 @@ export class NgtscProgram implements api.Program { const compilation = this.ensureAnalyzed(); const diagnostics = [...compilation.diagnostics]; if (!!this.options.fullTemplateTypeCheck) { - const ctx = new TypeCheckContext(this.refEmitter !); + const config: TypeCheckingConfig = { + applyTemplateContextGuards: true, + checkTemplateBodies: true, + checkTypeOfBindings: true, + }; + const ctx = new TypeCheckContext(config, this.refEmitter !); compilation.typeCheck(ctx); diagnostics.push(...this.compileTypeCheckProgram(ctx)); } diff --git a/packages/compiler-cli/src/ngtsc/typecheck/src/api.ts b/packages/compiler-cli/src/ngtsc/typecheck/src/api.ts index 65225aefdd487..3f4269b4d1da0 100644 --- a/packages/compiler-cli/src/ngtsc/typecheck/src/api.ts +++ b/packages/compiler-cli/src/ngtsc/typecheck/src/api.ts @@ -54,3 +54,25 @@ export interface TypeCtorMetadata { */ fields: {inputs: string[]; outputs: string[]; queries: string[];}; } + +export interface TypeCheckingConfig { + /** + * Whether to check the left-hand side type of binding operations. + * + * For example, if this is `false` then the expression `[input]="expr"` will have `expr` type- + * checked, but not the assignment of the resulting type to the `input` property of whichever + * directive or component is receiving the binding. If set to `true`, both sides of the assignment + * are checked. + */ + checkTypeOfBindings: boolean; + + /** + * Whether to narrow the types of template contexts. + */ + applyTemplateContextGuards: boolean; + + /** + * Whether to descend into template bodies and check any bindings there. + */ + checkTemplateBodies: boolean; +} diff --git a/packages/compiler-cli/src/ngtsc/typecheck/src/context.ts b/packages/compiler-cli/src/ngtsc/typecheck/src/context.ts index 1942b5440b2de..71035f4809cb9 100644 --- a/packages/compiler-cli/src/ngtsc/typecheck/src/context.ts +++ b/packages/compiler-cli/src/ngtsc/typecheck/src/context.ts @@ -13,7 +13,7 @@ import {NoopImportRewriter, ReferenceEmitter} from '../../imports'; import {ClassDeclaration} from '../../reflection'; import {ImportManager} from '../../translator'; -import {TypeCheckBlockMetadata, TypeCheckableDirectiveMeta, TypeCtorMetadata} from './api'; +import {TypeCheckBlockMetadata, TypeCheckableDirectiveMeta, TypeCheckingConfig, TypeCtorMetadata} from './api'; import {generateTypeCheckBlock} from './type_check_block'; import {generateTypeCtor} from './type_constructor'; @@ -27,7 +27,7 @@ import {generateTypeCtor} from './type_constructor'; * checking code. */ export class TypeCheckContext { - constructor(private refEmitter: ReferenceEmitter) {} + constructor(private config: TypeCheckingConfig, private refEmitter: ReferenceEmitter) {} /** * A `Map` of `ts.SourceFile`s that the context has seen to the operations (additions of methods @@ -141,7 +141,7 @@ export class TypeCheckContext { this.opMap.set(sf, []); } const ops = this.opMap.get(sf) !; - ops.push(new TcbOp(node, tcbMeta)); + ops.push(new TcbOp(node, tcbMeta, this.config)); } } @@ -171,8 +171,8 @@ interface Op { */ class TcbOp implements Op { constructor( - readonly node: ClassDeclaration<ts.ClassDeclaration>, readonly meta: TypeCheckBlockMetadata) { - } + readonly node: ClassDeclaration<ts.ClassDeclaration>, readonly meta: TypeCheckBlockMetadata, + readonly config: TypeCheckingConfig) {} /** * Type check blocks are inserted immediately after the end of the component class. @@ -181,7 +181,7 @@ class TcbOp implements Op { execute(im: ImportManager, sf: ts.SourceFile, refEmitter: ReferenceEmitter, printer: ts.Printer): string { - const tcb = generateTypeCheckBlock(this.node, this.meta, im, refEmitter); + const tcb = generateTypeCheckBlock(this.node, this.meta, this.config, im, refEmitter); return printer.printNode(ts.EmitHint.Unspecified, tcb, sf); } } diff --git a/packages/compiler-cli/src/ngtsc/typecheck/src/expression.ts b/packages/compiler-cli/src/ngtsc/typecheck/src/expression.ts index 32d7946a61356..15e24fe3276cb 100644 --- a/packages/compiler-cli/src/ngtsc/typecheck/src/expression.ts +++ b/packages/compiler-cli/src/ngtsc/typecheck/src/expression.ts @@ -9,6 +9,7 @@ import {AST, ASTWithSource, Binary, Conditional, Interpolation, KeyedRead, LiteralArray, LiteralMap, LiteralPrimitive, MethodCall, NonNullAssert, PropertyRead} from '@angular/compiler'; import * as ts from 'typescript'; +import {TypeCheckingConfig} from './api'; const BINARY_OPS = new Map<string, ts.SyntaxKind>([ ['+', ts.SyntaxKind.PlusToken], ['-', ts.SyntaxKind.MinusToken], @@ -34,7 +35,8 @@ const BINARY_OPS = new Map<string, ts.SyntaxKind>([ * AST. */ export function astToTypescript( - ast: AST, maybeResolve: (ast: AST) => ts.Expression | null): ts.Expression { + ast: AST, maybeResolve: (ast: AST) => ts.Expression | null, + config: TypeCheckingConfig): ts.Expression { const resolved = maybeResolve(ast); if (resolved !== null) { return resolved; @@ -42,17 +44,17 @@ export function astToTypescript( // Branch based on the type of expression being processed. if (ast instanceof ASTWithSource) { // Fall through to the underlying AST. - return astToTypescript(ast.ast, maybeResolve); + return astToTypescript(ast.ast, maybeResolve, config); } else if (ast instanceof PropertyRead) { // This is a normal property read - convert the receiver to an expression and emit the correct // TypeScript expression to read the property. - const receiver = astToTypescript(ast.receiver, maybeResolve); + const receiver = astToTypescript(ast.receiver, maybeResolve, config); return ts.createPropertyAccess(receiver, ast.name); } else if (ast instanceof Interpolation) { - return astArrayToExpression(ast.expressions, maybeResolve); + return astArrayToExpression(ast.expressions, maybeResolve, config); } else if (ast instanceof Binary) { - const lhs = astToTypescript(ast.left, maybeResolve); - const rhs = astToTypescript(ast.right, maybeResolve); + const lhs = astToTypescript(ast.left, maybeResolve, config); + const rhs = astToTypescript(ast.right, maybeResolve, config); const op = BINARY_OPS.get(ast.operation); if (op === undefined) { throw new Error(`Unsupported Binary.operation: ${ast.operation}`); @@ -67,30 +69,30 @@ export function astToTypescript( return ts.createLiteral(ast.value); } } else if (ast instanceof MethodCall) { - const receiver = astToTypescript(ast.receiver, maybeResolve); + const receiver = astToTypescript(ast.receiver, maybeResolve, config); const method = ts.createPropertyAccess(receiver, ast.name); - const args = ast.args.map(expr => astToTypescript(expr, maybeResolve)); + const args = ast.args.map(expr => astToTypescript(expr, maybeResolve, config)); return ts.createCall(method, undefined, args); } else if (ast instanceof Conditional) { - const condExpr = astToTypescript(ast.condition, maybeResolve); - const trueExpr = astToTypescript(ast.trueExp, maybeResolve); - const falseExpr = astToTypescript(ast.falseExp, maybeResolve); + const condExpr = astToTypescript(ast.condition, maybeResolve, config); + const trueExpr = astToTypescript(ast.trueExp, maybeResolve, config); + const falseExpr = astToTypescript(ast.falseExp, maybeResolve, config); return ts.createParen(ts.createConditional(condExpr, trueExpr, falseExpr)); } else if (ast instanceof LiteralArray) { - const elements = ast.expressions.map(expr => astToTypescript(expr, maybeResolve)); + const elements = ast.expressions.map(expr => astToTypescript(expr, maybeResolve, config)); return ts.createArrayLiteral(elements); } else if (ast instanceof LiteralMap) { const properties = ast.keys.map(({key}, idx) => { - const value = astToTypescript(ast.values[idx], maybeResolve); + const value = astToTypescript(ast.values[idx], maybeResolve, config); return ts.createPropertyAssignment(ts.createStringLiteral(key), value); }); return ts.createObjectLiteral(properties, true); } else if (ast instanceof KeyedRead) { - const receiver = astToTypescript(ast.obj, maybeResolve); - const key = astToTypescript(ast.key, maybeResolve); + const receiver = astToTypescript(ast.obj, maybeResolve, config); + const key = astToTypescript(ast.key, maybeResolve, config); return ts.createElementAccess(receiver, key); } else if (ast instanceof NonNullAssert) { - const expr = astToTypescript(ast.expression, maybeResolve); + const expr = astToTypescript(ast.expression, maybeResolve, config); return ts.createNonNullExpression(expr); } else { throw new Error(`Unknown node type: ${Object.getPrototypeOf(ast).constructor}`); @@ -102,13 +104,14 @@ export function astToTypescript( * and separating them with commas. */ function astArrayToExpression( - astArray: AST[], maybeResolve: (ast: AST) => ts.Expression | null): ts.Expression { + astArray: AST[], maybeResolve: (ast: AST) => ts.Expression | null, + config: TypeCheckingConfig): ts.Expression { // Reduce the `asts` array into a `ts.Expression`. Multiple expressions are combined into a // `ts.BinaryExpression` with a comma separator. First make a copy of the input array, as // it will be modified during the reduction. const asts = astArray.slice(); return asts.reduce( - (lhs, ast) => - ts.createBinary(lhs, ts.SyntaxKind.CommaToken, astToTypescript(ast, maybeResolve)), - astToTypescript(asts.pop() !, maybeResolve)); + (lhs, ast) => ts.createBinary( + lhs, ts.SyntaxKind.CommaToken, astToTypescript(ast, maybeResolve, config)), + astToTypescript(asts.pop() !, maybeResolve, config)); } diff --git a/packages/compiler-cli/src/ngtsc/typecheck/src/type_check_block.ts b/packages/compiler-cli/src/ngtsc/typecheck/src/type_check_block.ts index 3a2e4732e61f1..3b0dd0a2bf4db 100644 --- a/packages/compiler-cli/src/ngtsc/typecheck/src/type_check_block.ts +++ b/packages/compiler-cli/src/ngtsc/typecheck/src/type_check_block.ts @@ -13,7 +13,7 @@ import {NOOP_DEFAULT_IMPORT_RECORDER, Reference, ReferenceEmitter} from '../../i import {ClassDeclaration} from '../../reflection'; import {ImportManager, translateExpression, translateType} from '../../translator'; -import {TypeCheckBlockMetadata, TypeCheckableDirectiveMeta} from './api'; +import {TypeCheckBlockMetadata, TypeCheckableDirectiveMeta, TypeCheckingConfig} from './api'; import {astToTypescript} from './expression'; /** @@ -29,8 +29,10 @@ import {astToTypescript} from './expression'; */ export function generateTypeCheckBlock( node: ClassDeclaration<ts.ClassDeclaration>, meta: TypeCheckBlockMetadata, - importManager: ImportManager, refEmitter: ReferenceEmitter): ts.FunctionDeclaration { - const tcb = new Context(meta.boundTarget, node.getSourceFile(), importManager, refEmitter); + config: TypeCheckingConfig, importManager: ImportManager, + refEmitter: ReferenceEmitter): ts.FunctionDeclaration { + const tcb = + new Context(config, meta.boundTarget, node.getSourceFile(), importManager, refEmitter); const scope = Scope.forNodes(tcb, null, tcb.boundTarget.target.template !); return ts.createFunctionDeclaration( @@ -187,7 +189,7 @@ class TcbTemplateBodyOp extends TcbOp { // The second kind of guard is a template context guard. This guard narrows the template // rendering context variable `ctx`. - if (dir.hasNgTemplateContextGuard) { + if (dir.hasNgTemplateContextGuard && this.tcb.config.applyTemplateContextGuards) { const ctx = this.scope.resolve(this.template); const guardInvoke = tsCallMethod(dirId, 'ngTemplateContextGuard', [dirInstId, ctx]); directiveGuards.push(guardInvoke); @@ -288,7 +290,13 @@ class TcbUnclaimedInputsOp extends TcbOp { continue; } - const expr = tcbExpression(binding.value, this.tcb, this.scope); + let expr = tcbExpression(binding.value, this.tcb, this.scope); + + // If checking the type of bindings is disabled, cast the resulting expression to 'any' before + // the assignment. + if (!this.tcb.config.checkTypeOfBindings) { + expr = tsCastToAny(expr); + } if (binding.type === BindingType.Property) { if (binding.name !== 'style' && binding.name !== 'class') { @@ -331,6 +339,7 @@ class Context { private nextId = 1; constructor( + readonly config: TypeCheckingConfig, readonly boundTarget: BoundTarget<TypeCheckableDirectiveMeta>, private sourceFile: ts.SourceFile, private importManager: ImportManager, private refEmitter: ReferenceEmitter) {} @@ -605,9 +614,11 @@ class Scope { } else if (node instanceof TmplAstTemplate) { // Template children are rendered in a child scope. this.appendDirectivesAndInputsOfNode(node); - const ctxIndex = this.opQueue.push(new TcbTemplateContextOp(this.tcb, this)) - 1; - this.templateCtxOpMap.set(node, ctxIndex); - this.opQueue.push(new TcbTemplateBodyOp(this.tcb, this, node)); + if (this.tcb.config.checkTemplateBodies) { + const ctxIndex = this.opQueue.push(new TcbTemplateContextOp(this.tcb, this)) - 1; + this.templateCtxOpMap.set(node, ctxIndex); + this.opQueue.push(new TcbTemplateBodyOp(this.tcb, this, node)); + } } else if (node instanceof TmplAstBoundText) { this.opQueue.push(new TcbTextInterpolationOp(this.tcb, this, node)); } @@ -681,7 +692,7 @@ function tcbExpression(ast: AST, tcb: Context, scope: Scope): ts.Expression { // `astToTypescript` actually does the conversion. A special resolver `tcbResolve` is passed which // interprets specific expression nodes that interact with the `ImplicitReceiver`. These nodes // actually refer to identifiers within the current scope. - return astToTypescript(ast, (ast) => tcbResolve(ast, tcb, scope)); + return astToTypescript(ast, (ast) => tcbResolve(ast, tcb, scope), tcb.config); } /** @@ -695,7 +706,12 @@ function tcbCallTypeCtor( // Construct an array of `ts.PropertyAssignment`s for each input of the directive that has a // matching binding. - const members = bindings.map(b => ts.createPropertyAssignment(b.field, b.expression)); + const members = bindings.map(({field, expression}) => { + if (!tcb.config.checkTypeOfBindings) { + expression = tsCastToAny(expression); + } + return ts.createPropertyAssignment(field, expression); + }); // Call the `ngTypeCtor` method on the directive class, with an object literal argument created // from the matched inputs. @@ -874,3 +890,8 @@ function tcbResolve(ast: AST, tcb: Context, scope: Scope): ts.Expression|null { return null; } } + +function tsCastToAny(expr: ts.Expression): ts.Expression { + return ts.createParen( + ts.createAsExpression(expr, ts.createKeywordTypeNode(ts.SyntaxKind.AnyKeyword))); +} diff --git a/packages/compiler-cli/src/ngtsc/typecheck/test/type_check_block_spec.ts b/packages/compiler-cli/src/ngtsc/typecheck/test/type_check_block_spec.ts index 0b31ee3ea689c..f874654f8a1da 100644 --- a/packages/compiler-cli/src/ngtsc/typecheck/test/type_check_block_spec.ts +++ b/packages/compiler-cli/src/ngtsc/typecheck/test/type_check_block_spec.ts @@ -12,7 +12,7 @@ import * as ts from 'typescript'; import {ImportMode, Reference, ReferenceEmitStrategy, ReferenceEmitter} from '../../imports'; import {ClassDeclaration, isNamedClassDeclaration} from '../../reflection'; import {ImportManager} from '../../translator'; -import {TypeCheckBlockMetadata, TypeCheckableDirectiveMeta} from '../src/api'; +import {TypeCheckBlockMetadata, TypeCheckableDirectiveMeta, TypeCheckingConfig} from '../src/api'; import {generateTypeCheckBlock} from '../src/type_check_block'; @@ -74,6 +74,66 @@ describe('type check blocks', () => { expect(block).not.toContain('.class = '); expect(block).not.toContain('.style = '); }); + + describe('config', () => { + const DIRECTIVES: TestDirective[] = [{ + name: 'Dir', + selector: '[dir]', + exportAs: ['dir'], + inputs: {'dirInput': 'dirInput'}, + hasNgTemplateContextGuard: true, + }]; + const BASE_CONFIG: TypeCheckingConfig = { + applyTemplateContextGuards: true, + checkTemplateBodies: true, + checkTypeOfBindings: true, + }; + + describe('config.applyTemplateContextGuards', () => { + const TEMPLATE = `<div *dir></div>`; + const GUARD_APPLIED = 'if (i0.Dir.ngTemplateContextGuard('; + + it('should apply template context guards when enabled', () => { + const block = tcb(TEMPLATE, DIRECTIVES); + expect(block).toContain(GUARD_APPLIED); + }); + it('should not apply template context guards when disabled', () => { + const DISABLED_CONFIG = {...BASE_CONFIG, applyTemplateContextGuards: false}; + const block = tcb(TEMPLATE, DIRECTIVES, DISABLED_CONFIG); + expect(block).not.toContain(GUARD_APPLIED); + }); + }); + + describe('config.checkTemplateBodies', () => { + const TEMPLATE = `<ng-template>{{a}}</ng-template>`; + + it('should descend into template bodies when enabled', () => { + const block = tcb(TEMPLATE, DIRECTIVES); + expect(block).toContain('ctx.a;'); + }); + it('should not descend into template bodies when disabled', () => { + const DISABLED_CONFIG = {...BASE_CONFIG, checkTemplateBodies: false}; + const block = tcb(TEMPLATE, DIRECTIVES, DISABLED_CONFIG); + expect(block).not.toContain('ctx.a;'); + }); + }); + + describe('config.checkTypeOfBindings', () => { + const TEMPLATE = `<div dir [dirInput]="a" [nonDirInput]="a"></div>`; + + it('should check types of bindings when enabled', () => { + const block = tcb(TEMPLATE, DIRECTIVES); + expect(block).toContain('i0.Dir.ngTypeCtor({ dirInput: ctx.a })'); + expect(block).toContain('.nonDirInput = ctx.a;'); + }); + it('should not check types of bindings when disabled', () => { + const DISABLED_CONFIG = {...BASE_CONFIG, checkTypeOfBindings: false}; + const block = tcb(TEMPLATE, DIRECTIVES, DISABLED_CONFIG); + expect(block).toContain('i0.Dir.ngTypeCtor({ dirInput: (ctx.a as any) })'); + expect(block).toContain('.nonDirInput = (ctx.a as any);'); + }); + }); + }); }); it('should generate a circular directive reference correctly', () => { @@ -128,7 +188,8 @@ type TestDirective = Partial<Pick<TypeCheckableDirectiveMeta, Exclude<keyof TypeCheckableDirectiveMeta, 'ref'>>>& {selector: string, name: string}; -function tcb(template: string, directives: TestDirective[] = []): string { +function tcb( + template: string, directives: TestDirective[] = [], config?: TypeCheckingConfig): string { const classes = ['Test', ...directives.map(dir => dir.name)]; const code = classes.map(name => `class ${name} {}`).join('\n'); @@ -161,9 +222,15 @@ function tcb(template: string, directives: TestDirective[] = []): string { fnName: 'Test_TCB', }; + config = config || { + applyTemplateContextGuards: true, + checkTypeOfBindings: true, + checkTemplateBodies: true, + }; + const im = new ImportManager(undefined, 'i'); - const tcb = - generateTypeCheckBlock(clazz, meta, im, new ReferenceEmitter([new FakeReferenceStrategy()])); + const tcb = generateTypeCheckBlock( + clazz, meta, config, im, new ReferenceEmitter([new FakeReferenceStrategy()])); const res = ts.createPrinter().printNode(ts.EmitHint.Unspecified, tcb, sf); return res.replace(/\s+/g, ' '); diff --git a/packages/compiler-cli/src/ngtsc/typecheck/test/type_constructor_spec.ts b/packages/compiler-cli/src/ngtsc/typecheck/test/type_constructor_spec.ts index ab64dded323ed..e8363d3945d4c 100644 --- a/packages/compiler-cli/src/ngtsc/typecheck/test/type_constructor_spec.ts +++ b/packages/compiler-cli/src/ngtsc/typecheck/test/type_constructor_spec.ts @@ -13,6 +13,7 @@ import {LogicalFileSystem} from '../../path'; import {isNamedClassDeclaration} from '../../reflection'; import {getDeclaration, makeProgram} from '../../testing/in_memory_typescript'; import {getRootDirs} from '../../util/src/typescript'; +import {TypeCheckingConfig} from '../src/api'; import {TypeCheckContext} from '../src/context'; import {TypeCheckProgramHost} from '../src/host'; @@ -24,6 +25,12 @@ const LIB_D_TS = { type NonNullable<T> = T extends null | undefined ? never : T;` }; +const ALL_ENABLED_CONFIG: TypeCheckingConfig = { + applyTemplateContextGuards: true, + checkTemplateBodies: true, + checkTypeOfBindings: true, +}; + describe('ngtsc typechecking', () => { describe('ctors', () => { it('compiles a basic type constructor', () => { @@ -47,7 +54,7 @@ TestClass.ngTypeCtor({value: 'test'}); new AbsoluteModuleStrategy(program, checker, options, host), new LogicalProjectStrategy(checker, logicalFs), ]); - const ctx = new TypeCheckContext(emitter); + const ctx = new TypeCheckContext(ALL_ENABLED_CONFIG, emitter); const TestClass = getDeclaration(program, 'main.ts', 'TestClass', isNamedClassDeclaration); ctx.addTypeCtor(program.getSourceFile('main.ts') !, TestClass, { fnName: 'ngTypeCtor',
ant-design
https://github.com/ant-design/ant-design
39e37bf1b41102354e23ebeb072748429aef1f5e
偏右
2020-10-30 12:57:37
fix: Input[allowClear] className (#27462)
* fix: Input[allowClear] className missing
fix: Input[allowClear] className (#27462) * fix: Input[allowClear] className missing close #27444 * fix snapshot
diff --git a/components/form/__tests__/__snapshots__/demo.test.js.snap b/components/form/__tests__/__snapshots__/demo.test.js.snap index a6ec07b9f409..e2570853b0b1 100644 --- a/components/form/__tests__/__snapshots__/demo.test.js.snap +++ b/components/form/__tests__/__snapshots__/demo.test.js.snap @@ -7623,7 +7623,7 @@ exports[`renders ./components/form/demo/validate-static.md correctly 1`] = ` class="ant-form-item-control-input-content" > <span - class="ant-input-affix-wrapper" + class="ant-input-affix-wrapper ant-input-password" > <input action="click" diff --git a/components/input/ClearableLabeledInput.tsx b/components/input/ClearableLabeledInput.tsx index c049a72a636e..0ac0afe1ef36 100644 --- a/components/input/ClearableLabeledInput.tsx +++ b/components/input/ClearableLabeledInput.tsx @@ -12,6 +12,10 @@ export function hasPrefixSuffix(props: InputProps | ClearableInputProps) { return !!(props.prefix || props.suffix || props.allowClear); } +function hasAddon(props: InputProps | ClearableInputProps) { + return !!(props.addonBefore || props.addonAfter); +} + /** * This basic props required for input and textarea. */ @@ -121,8 +125,8 @@ class ClearableLabeledInput extends React.Component<ClearableInputProps> { [`${prefixCls}-affix-wrapper-rtl`]: direction === 'rtl', [`${prefixCls}-affix-wrapper-readonly`]: readOnly, [`${prefixCls}-affix-wrapper-borderless`]: !bordered, - // https://github.com/ant-design/ant-design/issues/27258 - [`${className}`]: !allowClear && className, + // className will go to addon wrapper + [`${className}`]: !hasAddon(this.props) && className, }); return ( <span @@ -145,7 +149,7 @@ class ClearableLabeledInput extends React.Component<ClearableInputProps> { renderInputWithLabel(prefixCls: string, labeledElement: React.ReactElement) { const { addonBefore, addonAfter, style, size, className, direction } = this.props; // Not wrap when there is not addons - if (!addonBefore && !addonAfter) { + if (!hasAddon(this.props)) { return labeledElement; } @@ -156,8 +160,7 @@ class ClearableLabeledInput extends React.Component<ClearableInputProps> { ) : null; const addonAfterNode = addonAfter ? <span className={addonClassName}>{addonAfter}</span> : null; - const mergedWrapperClassName = classNames(`${prefixCls}-wrapper`, { - [wrapperClassName]: addonBefore || addonAfter, + const mergedWrapperClassName = classNames(`${prefixCls}-wrapper`, wrapperClassName, { [`${wrapperClassName}-rtl`]: direction === 'rtl', }); @@ -197,8 +200,9 @@ class ClearableLabeledInput extends React.Component<ClearableInputProps> { { [`${prefixCls}-affix-wrapper-rtl`]: direction === 'rtl', [`${prefixCls}-affix-wrapper-borderless`]: !bordered, + // className will go to addon wrapper + [`${className}`]: !hasAddon(this.props) && className, }, - className, ); return ( <span className={affixWrapperCls} style={style}> diff --git a/components/input/__tests__/Search.test.js b/components/input/__tests__/Search.test.js index 49f0546be61a..9c1f936e0b3c 100644 --- a/components/input/__tests__/Search.test.js +++ b/components/input/__tests__/Search.test.js @@ -22,10 +22,8 @@ describe('Input.Search', () => { }); it('should support ReactNode suffix without error', () => { - const fn = () => { - mount(<Search suffix={<div>ok</div>} />); - }; - expect(fn).not.toThrow(); + const wrapper = mount(<Search suffix={<div>ok</div>} />); + expect(wrapper.render()).toMatchSnapshot(); }); it('should disable enter button when disabled prop is true', () => { diff --git a/components/input/__tests__/__snapshots__/Search.test.js.snap b/components/input/__tests__/__snapshots__/Search.test.js.snap index a2746d8e5d29..b5936dcce558 100644 --- a/components/input/__tests__/__snapshots__/Search.test.js.snap +++ b/components/input/__tests__/__snapshots__/Search.test.js.snap @@ -45,6 +45,62 @@ exports[`Input.Search rtl render component should be rendered correctly in RTL d </span> `; +exports[`Input.Search should support ReactNode suffix without error 1`] = ` +<span + class="ant-input-group-wrapper ant-input-search" +> + <span + class="ant-input-wrapper ant-input-group" + > + <span + class="ant-input-affix-wrapper" + > + <input + class="ant-input" + type="text" + value="" + /> + <span + class="ant-input-suffix" + > + <div> + ok + </div> + </span> + </span> + <span + class="ant-input-group-addon" + > + <button + class="ant-btn ant-btn-icon-only ant-input-search-button" + type="button" + > + <span + aria-label="search" + class="anticon anticon-search" + role="img" + > + <svg + aria-hidden="true" + class="" + data-icon="search" + fill="currentColor" + focusable="false" + height="1em" + viewBox="64 64 896 896" + width="1em" + > + <path + d="M909.6 854.5L649.9 594.8C690.2 542.7 712 479 712 412c0-80.2-31.3-155.4-87.9-212.1-56.6-56.7-132-87.9-212.1-87.9s-155.5 31.3-212.1 87.9C143.2 256.5 112 331.8 112 412c0 80.1 31.3 155.5 87.9 212.1C256.5 680.8 331.8 712 412 712c67 0 130.6-21.8 182.7-62l259.7 259.6a8.2 8.2 0 0011.6 0l43.6-43.5a8.2 8.2 0 000-11.6zM570.4 570.4C528 612.7 471.8 636 412 636s-116-23.3-158.4-65.6C211.3 528 188 471.8 188 412s23.3-116.1 65.6-158.4C296 211.3 352.2 188 412 188s116.1 23.2 158.4 65.6S636 352.2 636 412s-23.3 116.1-65.6 158.4z" + /> + </svg> + </span> + </button> + </span> + </span> +</span> +`; + exports[`Input.Search should support addonAfter 1`] = ` <span class="ant-input-group-wrapper ant-input-search" @@ -149,7 +205,7 @@ exports[`Input.Search should support addonAfter and suffix for loading 1`] = ` class="ant-input-wrapper ant-input-group" > <span - class="ant-input-affix-wrapper ant-input-search" + class="ant-input-affix-wrapper" > <input class="ant-input" @@ -208,7 +264,7 @@ exports[`Input.Search should support addonAfter and suffix for loading 2`] = ` class="ant-input-wrapper ant-input-group" > <span - class="ant-input-affix-wrapper ant-input-search ant-input-search-with-button" + class="ant-input-affix-wrapper" > <input class="ant-input" @@ -365,7 +421,7 @@ exports[`Input.Search should support invalid suffix 1`] = ` class="ant-input-wrapper ant-input-group" > <span - class="ant-input-affix-wrapper ant-input-search" + class="ant-input-affix-wrapper" > <input class="ant-input" diff --git a/components/input/__tests__/__snapshots__/demo.test.js.snap b/components/input/__tests__/__snapshots__/demo.test.js.snap index 047141581bff..481fdc7203d8 100644 --- a/components/input/__tests__/__snapshots__/demo.test.js.snap +++ b/components/input/__tests__/__snapshots__/demo.test.js.snap @@ -2737,7 +2737,7 @@ Array [ class="ant-input-wrapper ant-input-group" > <span - class="ant-input-affix-wrapper ant-input-affix-wrapper-lg ant-input-search ant-input-search-large ant-input-search-with-button" + class="ant-input-affix-wrapper ant-input-affix-wrapper-lg" > <input class="ant-input ant-input-lg" diff --git a/components/input/__tests__/__snapshots__/index.test.js.snap b/components/input/__tests__/__snapshots__/index.test.js.snap index 28a6d8740e3b..0dfbbe2c2cd2 100644 --- a/components/input/__tests__/__snapshots__/index.test.js.snap +++ b/components/input/__tests__/__snapshots__/index.test.js.snap @@ -354,57 +354,3 @@ exports[`Input should support size in form 1`] = ` </div> </form> `; - -exports[`Input.Search should support suffix 1`] = ` -<span - class="ant-input-group-wrapper ant-input-search" -> - <span - class="ant-input-wrapper ant-input-group" - > - <span - class="ant-input-affix-wrapper ant-input-search" - > - <input - class="ant-input" - type="text" - value="" - /> - <span - class="ant-input-suffix" - > - suffix - </span> - </span> - <span - class="ant-input-group-addon" - > - <button - class="ant-btn ant-btn-icon-only ant-input-search-button" - type="button" - > - <span - aria-label="search" - class="anticon anticon-search" - role="img" - > - <svg - aria-hidden="true" - class="" - data-icon="search" - fill="currentColor" - focusable="false" - height="1em" - viewBox="64 64 896 896" - width="1em" - > - <path - d="M909.6 854.5L649.9 594.8C690.2 542.7 712 479 712 412c0-80.2-31.3-155.4-87.9-212.1-56.6-56.7-132-87.9-212.1-87.9s-155.5 31.3-212.1 87.9C143.2 256.5 112 331.8 112 412c0 80.1 31.3 155.5 87.9 212.1C256.5 680.8 331.8 712 412 712c67 0 130.6-21.8 182.7-62l259.7 259.6a8.2 8.2 0 0011.6 0l43.6-43.5a8.2 8.2 0 000-11.6zM570.4 570.4C528 612.7 471.8 636 412 636s-116-23.3-158.4-65.6C211.3 528 188 471.8 188 412s23.3-116.1 65.6-158.4C296 211.3 352.2 188 412 188s116.1 23.2 158.4 65.6S636 352.2 636 412s-23.3 116.1-65.6 158.4z" - /> - </svg> - </span> - </button> - </span> - </span> -</span> -`; diff --git a/components/input/__tests__/index.test.js b/components/input/__tests__/index.test.js index fce7dbf55853..0268a9627b67 100644 --- a/components/input/__tests__/index.test.js +++ b/components/input/__tests__/index.test.js @@ -76,6 +76,20 @@ describe('Input', () => { }); }); +describe('prefix and suffix', () => { + it('should support className when has suffix', () => { + const wrapper = mount(<Input suffix="suffix" className="my-class-name" />); + expect(wrapper.getDOMNode().className.includes('my-class-name')).toBe(true); + expect(wrapper.find('input').getDOMNode().className.includes('my-class-name')).toBe(false); + }); + + it('should support className when has prefix', () => { + const wrapper = mount(<Input prefix="prefix" className="my-class-name" />); + expect(wrapper.getDOMNode().className.includes('my-class-name')).toBe(true); + expect(wrapper.find('input').getDOMNode().className.includes('my-class-name')).toBe(false); + }); +}); + describe('As Form Control', () => { it('should be reset when wrapped in form.getFieldDecorator without initialValue', () => { const Demo = () => { @@ -110,13 +124,6 @@ describe('As Form Control', () => { }); }); -describe('Input.Search', () => { - it('should support suffix', () => { - const wrapper = mount(<Input.Search suffix="suffix" />); - expect(wrapper.render()).toMatchSnapshot(); - }); -}); - describe('Input allowClear', () => { it('should change type when click', () => { const wrapper = mount(<Input allowClear />); @@ -189,4 +196,11 @@ describe('Input allowClear', () => { expect(wrapper.find('.ant-input-clear-icon-hidden').exists()).toBeTruthy(); }); }); + + // https://github.com/ant-design/ant-design/issues/27444 + it('should support className', () => { + const wrapper = mount(<Input allowClear className="my-class-name" />); + expect(wrapper.getDOMNode().className.includes('my-class-name')).toBe(true); + expect(wrapper.find('input').getDOMNode().className.includes('my-class-name')).toBe(false); + }); });
tabby
https://github.com/TabbyML/tabby
3f1fefbfa1a3436d0a973810b8e96b2645a418c1
Meng Zhang
2023-12-26 13:08:53
feat(core): add TABBY_MODEL_CACHE_ROOT to allow individually set tabb… (#1121)
* feat(core): add TABBY_MODEL_CACHE_ROOT to allow individually set tabby model cache dir * docs: update changelog * [autofix.ci] apply automated fixes ---------
feat(core): add TABBY_MODEL_CACHE_ROOT to allow individually set tabb… (#1121) * feat(core): add TABBY_MODEL_CACHE_ROOT to allow individually set tabby model cache dir * docs: update changelog * [autofix.ci] apply automated fixes --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
diff --git a/CHANGELOG.md b/CHANGELOG.md index 44792bca161c..c4999ea24006 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ * Add windows cpu binary distribution. * Add linux rocm (AMD GPU) binary distribution. * Fix cached permanent redirection on certain browsers(e.g chrome) when `--webserver` is not enabled. +* Add environment variable `TABBY_MODEL_CACHE_ROOT` to override models cache directory individually. # v0.7.0 (12/15/2023) diff --git a/crates/tabby-common/src/path.rs b/crates/tabby-common/src/path.rs index 2dc5305fe542..823d64f91ce4 100644 --- a/crates/tabby-common/src/path.rs +++ b/crates/tabby-common/src/path.rs @@ -9,6 +9,8 @@ lazy_static! { Err(_) => home::home_dir().unwrap().join(".tabby"), })) }; + static ref TABBY_MODEL_CACHE_ROOT: Option<PathBuf> = + env::var("TABBY_MODEL_CACHE_ROOT").ok().map(PathBuf::from); } #[cfg(feature = "testutils")] @@ -48,7 +50,11 @@ pub fn dataset_dir() -> PathBuf { } pub fn models_dir() -> PathBuf { - tabby_root().join("models") + if let Some(cache_root) = &*TABBY_MODEL_CACHE_ROOT { + cache_root.clone() + } else { + tabby_root().join("models") + } } pub fn events_dir() -> PathBuf {
angular
https://github.com/angular/angular
9e10af45c5da64ed1f40e8e346c85c03c4f38274
nikvarma
2024-01-25 01:53:51
docs: typo Chat to chart fix (#53892)
PR Close #53892
docs: typo Chat to chart fix (#53892) PR Close #53892
diff --git a/adev/src/content/tools/devtools.md b/adev/src/content/tools/devtools.md index 3a4b9c061107f7..77e7bad221c5bf 100644 --- a/adev/src/content/tools/devtools.md +++ b/adev/src/content/tools/devtools.md @@ -129,7 +129,7 @@ When you select a bar, DevTools displays useful information about it including: ### Understand component execution -The bar chat displayed after clicking on a change detection cycle displays a detailed view about how much time your application spent running change detection in that particular component or directive. +The bar chart displayed after clicking on a change detection cycle displays a detailed view about how much time your application spent running change detection in that particular component or directive. This example shows the total time spent by the `NgForOf` directive and which method was called on it.
clair
https://github.com/quay/clair
d565775c190a4262ce049cb06a9c1842c42e00b8
crozzy
2023-02-23 20:18:52
chore: Add back GIT_HASH as needed for image name
We need the short git hash for the image name.
chore: Add back GIT_HASH as needed for image name We need the short git hash for the image name. Signed-off-by: crozzy <[email protected]>
diff --git a/contrib/openshift/build_and_deploy.sh b/contrib/openshift/build_and_deploy.sh index f0fab89478..af4470b34c 100755 --- a/contrib/openshift/build_and_deploy.sh +++ b/contrib/openshift/build_and_deploy.sh @@ -3,6 +3,7 @@ set -exv REPOSITORY="quay.io/app-sre" IMAGE="${REPOSITORY}/clair" +GIT_HASH=`git rev-parse --short=7 HEAD` git archive HEAD| docker build -t clair-service:latest -
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
4