Hash
stringlengths
40
40
Date
stringlengths
19
20
Author
stringlengths
2
30
commit_message
stringlengths
3
28.8k
IsMerge
bool
1 class
Additions
int64
0
55.2k
Deletions
int64
0
991
Total Changes
int64
-3
55.2k
git_diff
stringlengths
23
47.3k
Repository Name
stringclasses
159 values
Owner
stringclasses
85 values
Primary Language
stringclasses
20 values
Language
stringclasses
19 values
Stars
float64
218
411k
Forks
float64
8
79k
Description
stringclasses
96 values
Repository
stringclasses
161 values
type
stringclasses
6 values
Comment
stringlengths
7
156
ff6f1720c47b595d52fdd5f03ed1294f580fd5a9
2022-11-18 10:23:36
bannedbook
update
false
0
0
0
--- docs/vsp-cn.py Binary files a/docs/vsp-cn.py and b/docs/vsp-cn.py differ --- docs/vsp-en.py Binary files a/docs/vsp-en.py and b/docs/vsp-en.py differ
fanqiang
bannedbook
Kotlin
Kotlin
39,286
7,317
翻墙-科学上网
bannedbook_fanqiang
BUG_FIX
Correcting a typo in the README
aad27753137e0211b706e767c50ff8a0d9eb5ce8
2023-09-28 14:03:12
bannedbook
update
false
0
0
0
--- ChromeGoMac/README.md Binary files a/ChromeGoMac/README.md and b/ChromeGoMac/README.md differ --- MAC允许未知来源的应用.md Binary files "a/MAC\345\205\201\350\256\270\346\234\252\347\237\245\346\235\245\346\272\220\347\232\204\345\272\224\347\224\250.md" and /dev/null differ --- v2ss/images/mac-allow-unkown-app.webp Binary files a/v2ss/images/mac-allow-unkown-app.webp and /dev/null differ --- v2ss/images/mac01.jpg Binary files a/v2ss/images/mac01.jpg and /dev/null differ --- v2ss/images/mac02.jpg Binary files a/v2ss/images/mac02.jpg and /dev/null differ --- v2ss/images/mac03.jpg Binary files a/v2ss/images/mac03.jpg and /dev/null differ
fanqiang
bannedbook
Kotlin
Kotlin
39,286
7,317
翻墙-科学上网
bannedbook_fanqiang
DOC_CHANGE
Obvious
88f99a0853b4311a0d43748f7a837ce163e69cd8
2023-01-11 04:36:34
Kebin Liu
Fixes privoxy linking
false
2
2
4
--- deps/Makefile @@ -111,8 +111,8 @@ privoxy: && patch -Ru configure.in < $${PWD}/../patch/privoxy/configure.in.patch \ && autoreconf -fi \ && LDFLAGS="-target $(TARGET) -L$${PWD}/../dist/$(ARCH)/pcre/lib" \ - CPPFLAGS="-target $(TARGET) -Dunix -I$${PWD}/../dist/$(ARCH)/pcre/include" \ - CFLAGS="-target $(TARGET) -Dunix -I$${PWD}/../dist/$(ARCH)/pcre/include" \ + CPPFLAGS="-target $(TARGET) -Dunix" \ + CFLAGS="-target $(TARGET) -Dunix" \ ./configure --prefix $${PWD}/../dist/$(ARCH)/privoxy \ --host=$(TARGET) \ --disable-debug \
shadowsocksx-ng
shadowsocks
Swift
Swift
32,651
7,935
Next Generation of ShadowsocksX
shadowsocks_shadowsocksx-ng
BUG_FIX
obvious
956b486cacff05ebbe580eb9d89a1c508c4fa3a3
2024-12-30 16:34:46
Peter Krefting
l10n: sv.po: Update Swedish translation Signed-off-by: Peter Krefting <[email protected]>
false
212
63
275
--- po/sv.po @@ -5,10 +5,10 @@ # msgid "" msgstr "" -"Project-Id-Version: git 2.48.0\n" +"Project-Id-Version: git 2.47.0\n" "Report-Msgid-Bugs-To: Git Mailing List <[email protected]>\n" -"POT-Creation-Date: 2024-12-30 11:57+0100\n" -"PO-Revision-Date: 2024-12-30 12:03+0100\n" +"POT-Creation-Date: 2024-09-19 02:06+0000\n" +"PO-Revision-Date: 2024-09-28 15:45+0100\n" "Last-Translator: Peter Krefting <[email protected]>\n" "Language-Team: Svenska <[email protected]>\n" "Language: sv\n" @@ -624,10 +624,10 @@ msgstr "Endast binära filer ändrade." #, c-format msgid "" "\n" -"Disable this message with \"git config set advice.%s false\"" +"Disable this message with \"git config advice.%s false\"" msgstr "" "\n" -"Slå av meddelandet med ”git config set advice.%s false”" +"Slå av meddelandet med ”git config advice.%s false”" #, c-format msgid "%shint:%s%.*s%s\n" @@ -1357,10 +1357,6 @@ msgstr "objektnamnet är inte giltigt: %s" msgid "not a tree object: %s" msgstr "inte ett trädobjekt: %s" -#, c-format -msgid "failed to unpack tree object %s" -msgstr "misslyckades packa upp trädobjektet %s" - #, c-format msgid "File not found: %s" msgstr "Hittar inte filen: %s" @@ -3829,8 +3825,9 @@ msgstr "ny ofödd gren" msgid "update ignored files (default)" msgstr "uppdatera ignorerade filer (standard)" -msgid "do not check if another worktree is using this branch" -msgstr "kontrollera inte om en annan arbetskatalog använder grenen" +msgid "do not check if another worktree is holding the given ref" +msgstr "" +"kontrollera inte om en annan arbetskatalog håller den angivna referensen" msgid "checkout our version for unmerged files" msgstr "checka ut vår version för ej sammanslagna filer" @@ -4128,11 +4125,11 @@ msgstr "skapa en grund klon på detta djup" msgid "create a shallow clone since a specific time" msgstr "skapa en grund klon från en angiven tidpunkt" -msgid "ref" -msgstr "ref" +msgid "revision" +msgstr "revision" -msgid "deepen history of shallow clone, excluding ref" -msgstr "fördjupa historik för grund klon, exkludera ref" +msgid "deepen history of shallow clone, excluding rev" +msgstr "fördjupa historik för grund klon, exkludera revisionen" msgid "clone only one branch, HEAD or --branch" msgstr "klona endast en gren, HEAD eller --branch" @@ -5057,10 +5054,10 @@ msgstr "" msgid "" "git config unset [<file-option>] [--all] [--value=<value>] [--fixed-value] " -"<name>" +"<name> <value>" msgstr "" "git config unset [<filflagga>] [--all] [--value=<värde>] [--fixed-value] " -"<namn>" +"<namn> <värde>" msgid "git config rename-section [<file-option>] <old-name> <new-name>" msgstr "git config rename-section [<filflagga>] <gammalt-namn> <nytt-namn>" @@ -5493,8 +5490,12 @@ msgid "traversed %lu commits\n" msgstr "traverserade %lu incheckningar\n" #, c-format -msgid "found %i tags; gave up search at %s\n" -msgstr "hittade %i taggar; gav upp sökning vid %s\n" +msgid "" +"more than %i tags found; listed %i most recent\n" +"gave up search at %s\n" +msgstr "" +"mer än %i taggar hittades; listar de %i senaste\n" +"gav upp sökningen vid %s\n" #, c-format msgid "describe %s\n" @@ -5926,20 +5927,6 @@ msgstr "%s är inte ett giltigt objekt" msgid "the object %s does not exist" msgstr "objektet %s finns inte" -#, c-format -msgid "" -"Run 'git remote set-head %s %s' to follow the change, or set\n" -"'remote.%s.followRemoteHEAD' configuration option to a different value\n" -"if you do not want to see this message. Specifically running\n" -"'git config set remote.%s.followRemoteHEAD %s' will disable the warning\n" -"until the remote changes HEAD to something else." -msgstr "" -"Kör ”git remote set-head %s %s” för att följa ändringen, eller sätt\n" -"konfigurationsflaggan ”remote %s.followRemoteHEAD” till ett annat värde\n" -"om du inte vill se det här meddelandet. Du kan specifikt inaktivera\n" -"varningen till fjärren ändrar HEAD till något annat genom att köra\n" -"”git config set remote %s.followRemoteHEAD %s”." - msgid "multiple branches detected, incompatible with --set-upstream" msgstr "flera grenar upptäcktes, inkompatibelt med --set-upstream" @@ -6077,9 +6064,6 @@ msgstr "referenskarta" msgid "specify fetch refmap" msgstr "ange referenskarta för ”fetch”" -msgid "revision" -msgstr "revision" - msgid "report that we have only objects reachable from this object" msgstr "rapportera att vi bara har objekt nåbara från detta objektet" @@ -6805,25 +6789,8 @@ msgstr "varken systemd-timer eller crontab är tillgänglig" msgid "%s scheduler is not available" msgstr "%s-schemaläggare är inte tillgänglig" -#, c-format -msgid "" -"unable to create '%s.lock': %s.\n" -"\n" -"Another scheduled git-maintenance(1) process seems to be running in this\n" -"repository. Please make sure no other maintenance processes are running and\n" -"then try again. If it still fails, a git-maintenance(1) process may have\n" -"crashed in this repository earlier: remove the file manually to continue." -msgstr "" -"Kunde inte skapa ”%s.lock”: %s.\n" -"\n" -"Det verkar som en annan schemalagd git-maintenance(1)-process kör i det\n" -"här arkivet. Se till att inga andra underhållsprocesser körs och försök\n" -"sedan igen. Om det fortfarande misslyckas kanske en git-maintenance(1)-\n" -"process har kraschat i det här arkivet tidigare: ta bort filen manuellt\n" -"för att fortsätta." - -msgid "cannot acquire lock for scheduled background maintenance" -msgstr "kan inte erhålla låset för schemalagt bakgrundsunderhåll" +msgid "another process is scheduling background maintenance" +msgstr "en annan process schemalägger bakgrundsunderhåll" msgid "git maintenance start [--scheduler=<scheduler>]" msgstr "git maintenance start [--scheduler=<schemaläggare>]" @@ -7353,7 +7320,7 @@ msgstr "paketfilnamnet ”%s” slutar inte med ”.%s”" #, c-format msgid "cannot write %s file '%s'" -msgstr "kan inte skriva %s-fil ”%s”" +msgstr "kan inte ta skriva %s-fil ”%s”" #, c-format msgid "cannot close written %s file '%s'" @@ -7390,19 +7357,6 @@ msgid_plural "chain length = %d: %lu objects" msgstr[0] "kedjelängd = %d: %lu objekt" msgstr[1] "kedjelängd = %d: %lu objekt" -msgid "could not start pack-objects to repack local links" -msgstr "kunde inte starta pack-objects för att packa om lokala länkar" - -msgid "failed to feed local object to pack-objects" -msgstr "misslyckades sända lokala objekt till pack-objects" - -msgid "index-pack: Expecting full hex object ID lines only from pack-objects." -msgstr "" -"index-pack: Förväntar kompletta hex-objekt-ID-rader endast från pack-objects." - -msgid "could not finish pack-objects to repack local links" -msgstr "kunde inte avsluta pack-objects för att packa om lokala länkar" - msgid "Cannot come back to cwd" msgstr "Kan inte gå tillbaka till arbetskatalogen (cwd)" @@ -7414,9 +7368,6 @@ msgstr "felaktig %s" msgid "unknown hash algorithm '%s'" msgstr "okänd hashningsalgoritm ”%s”" -msgid "--promisor cannot be used with a pack name" -msgstr "--promisor kan inte användas med ett paketnamn" - msgid "--stdin requires a git repository" msgstr "--stdin kräver ett git-arkiv" @@ -8757,11 +8708,11 @@ msgstr "git notes [--ref <anteckningsref>] [list [<objekt>]]" msgid "" "git notes [--ref <notes-ref>] add [-f] [--allow-empty] [--[no-]separator|--" "separator=<paragraph-break>] [--[no-]stripspace] [-m <msg> | -F <file> | (-c " -"| -C) <object>] [<object>] [-e]" +"| -C) <object>] [<object>]" msgstr "" "git notes [--ref <anteckningsref>] add [-f] [--allow-empty] [--" "[no-]separator|--separator=<styckebrytning>] [--[no-]stripspace] [-m <medd> " -"| -F <fil> | (-c | -C) <objekt>] [<objekt>] [-e]" +"| -F <fil> | (-c | -C) <objekt>] [<objekt>]" msgid "git notes [--ref <notes-ref>] copy [-f] <from-object> <to-object>" msgstr "" @@ -8770,11 +8721,11 @@ msgstr "" msgid "" "git notes [--ref <notes-ref>] append [--allow-empty] [--[no-]separator|--" "separator=<paragraph-break>] [--[no-]stripspace] [-m <msg> | -F <file> | (-c " -"| -C) <object>] [<object>] [-e]" +"| -C) <object>] [<object>]" msgstr "" "git notes [--ref <anteckningsref>] append [--allow-empty] [--" "[no-]separator|--separator=<styckebrytning>] [--[no-]stripspace] [-m <medd> " -"| -F <fil> | (-c | -C) <objekt>] [<objekt>] [-e]" +"| -F <fil> | (-c | -C) <objekt>] [<objekt>]" msgid "git notes [--ref <notes-ref>] edit [--allow-empty] [<object>]" msgstr "git notes [--ref <anteckningsref>] edit [--allow-empty] [<objekt>]" @@ -8893,9 +8844,6 @@ msgstr "anteckningsinnehåll i en fil" msgid "reuse and edit specified note object" msgstr "återanvänd och redigera angivet anteckningsobjekt" -msgid "edit note message in editor" -msgstr "redigera anteckning i textredigeringsprogram" - msgid "reuse specified note object" msgstr "återanvänd angivet anteckningsobjekt" @@ -9388,9 +9336,6 @@ msgstr "hantering av saknade objekt" msgid "do not pack objects in promisor packfiles" msgstr "packa inte objekt i kontraktspackfiler" -msgid "implies --missing=allow-any" -msgstr "implicerar --missing=allow-any" - msgid "respect islands during delta compression" msgstr "respektera öar under deltakomprimering" @@ -10974,30 +10919,6 @@ msgid_plural " Local refs configured for 'git push'%s:" msgstr[0] " Lokal referens konfigurerad för ”git push”%s:" msgstr[1] " Lokala referenser konfigurerade för ”git push”%s:" -#, c-format -msgid "'%s/HEAD' is unchanged and points to '%s'\n" -msgstr "”%s/HEAD” är oförändrad och pekar på ”%s”\n" - -#, c-format -msgid "'%s/HEAD' has changed from '%s' and now points to '%s'\n" -msgstr "”%s/HEAD” har ändrats från ”%s” och pekar nu på ”%s”\n" - -#, c-format -msgid "'%s/HEAD' is now created and points to '%s'\n" -msgstr "”%s/HEAD” har nu skapats och pekar på ”%s”\n" - -#, c-format -msgid "'%s/HEAD' was detached at '%s' and now points to '%s'\n" -msgstr "”%s/HEAD” kopplades från vid ”%s” och pekar nu på ”%s”\n" - -#, c-format -msgid "" -"'%s/HEAD' used to point to '%s' (which is not a remote branch), but now " -"points to '%s'\n" -msgstr "" -"”%s/HEAD” pekade tidigare på ”%s” (som inte är en fjärrgren), men pekar nu " -"på ”%s”\n" - msgid "set refs/remotes/<name>/HEAD according to remote" msgstr "sätt refs/remotes/<namn>/HEAD enligt fjärren" @@ -11019,7 +10940,7 @@ msgid "Not a valid ref: %s" msgstr "Inte en giltig referens: %s" #, c-format -msgid "Could not set up %s" +msgid "Could not setup %s" msgstr "Kunde inte ställa in %s" #, c-format @@ -13720,9 +13641,6 @@ msgstr "ställ in spårningsläge (se git-branch(1))" msgid "try to match the new branch name with a remote-tracking branch" msgstr "försök träffa namn på ny gren mot en fjärrspårande gren" -msgid "use relative paths for worktrees" -msgstr "använd relativa sökvägar för arbetskataloger" - #, c-format msgid "options '%s', '%s', and '%s' cannot be used together" msgstr "flaggorna ”%s”, ”%s” och ”%s” kan inte användas samtidigt" @@ -13994,26 +13912,6 @@ msgstr "kan inte skapa ”%s”" msgid "index-pack died" msgstr "index-pack dog" -#, c-format -msgid "directory '%s' is present in index, but not sparse" -msgstr "katalogen ”%s” finns i indexet, men inte glest" - -msgid "corrupted cache-tree has entries not present in index" -msgstr "trasigt cacheträd innehåller poster som inte finns i indexet" - -#, c-format -msgid "%s with flags 0x%x should not be in cache-tree" -msgstr "%s med flaggorna 0x%x borde inte finnas i cacheträdet" - -#, c-format -msgid "bad subtree '%.*s'" -msgstr "felaktigt underträd ”%.*s”" - -#, c-format -msgid "cache-tree for path %.*s does not match. Expected %s got %s" -msgstr "" -"cacheträd för sökvägen %.*s stämmer inte överens. Förväntade %s fick %s" - msgid "terminating chunk id appears earlier than expected" msgstr "avslutande stycke-id förekommer tidigare än förväntat" @@ -14879,7 +14777,7 @@ msgid "" "to convert the grafts into replace refs.\n" "\n" "Turn this message off by running\n" -"\"git config set advice.graftFileDeprecated false\"" +"\"git config advice.graftFileDeprecated false\"" msgstr "" "Stöd för <GIT_DIR>/info/grafts avråds från och\n" "kommer tas bort i en framtida version av Git.\n" @@ -14888,7 +14786,7 @@ msgstr "" "för att omvandla grafts till ersättningsreferenser.\n" "\n" "Slå av detta meddelande genom att skriva\n" -"”git config set advice.graftFileDeprecated false”" +"”git config advice.graftFileDeprecated false”" #, c-format msgid "commit %s exists in commit-graph but not in the object database" @@ -15699,19 +15597,6 @@ msgstr "url saknar protokoll: %s" msgid "credential url cannot be parsed: %s" msgstr "kan inte tolka url för inloggingsuppgifter: %s" -#, c-format -msgid "invalid timeout '%s', expecting a non-negative integer" -msgstr "felaktig tidsgräns ”%s”, förväntade ett icke-negativt heltal" - -#, c-format -msgid "invalid init-timeout '%s', expecting a non-negative integer" -msgstr "" -"felaktig värde för init-timeout ”%s”, förväntade ett icke-negativt heltal" - -#, c-format -msgid "invalid max-connections '%s', expecting an integer" -msgstr "felaktigt värde för max-connections ”%s”, förväntade ett heltal" - msgid "in the future" msgstr "i framtiden" @@ -16411,20 +16296,6 @@ msgstr "felaktig git-namnrymdssökväg ”%s”" msgid "too many args to run %s" msgstr "för många flaggor för att köra %s" -#, c-format -msgid "" -"You are attempting to fetch %s, which is in the commit graph file but not in " -"the object database.\n" -"This is probably due to repo corruption.\n" -"If you are attempting to repair this repo corruption by refetching the " -"missing object, use 'git fetch --refetch' with the missing object." -msgstr "" -"Du försöker hämta %s som är i incheckningsgrafen men inte i " -"objektdatabasen.\n" -"Det händer antagligen på grund av att arkivet är trasigt.\n" -"Om du försöker reparera det trasiga arkivet genom att hämta om det saknade " -"objektet, använd ”git fetch --refetch” med det saknade objektet." - msgid "git fetch-pack: expected shallow list" msgstr "git fetch-pack: förväntade grund lista" @@ -17009,10 +16880,10 @@ msgstr[1] "" #, c-format msgid "" "The '%s' hook was ignored because it's not set as executable.\n" -"You can disable this warning with `git config set advice.ignoredHook false`." +"You can disable this warning with `git config advice.ignoredHook false`." msgstr "" "Kroken ”%s” ignorerades eftersom den inte är markerad som körbar.\n" -"Du kan inaktivera varningen med ”git config set advice.ignoredHook false”." +"Du kan inaktivera varningen med ”git config advice.ignoredHook false”." msgid "not a git repository" msgstr "inte ett git-arkiv" @@ -17029,9 +16900,15 @@ msgstr "http.postBuffer har negativt värde; använder förvalet %d" msgid "Delegation control is not supported with cURL < 7.22.0" msgstr "Delegerad styrning stöds inte av cURL < 7.22.0" +msgid "Public key pinning not supported with cURL < 7.39.0" +msgstr "Fastnålning av öppen nyckel stöds inte av cURL < 7.39.0" + msgid "Unknown value for http.proactiveauth" msgstr "Okänt värde för http.proactiveauth" +msgid "CURLSSLOPT_NO_REVOKE not supported with cURL < 7.44.0" +msgstr "CURLSSLOPT_NO_REVOKE stöds inte av cURL < 7.44.0" + #, c-format msgid "Unsupported SSL backend '%s'. Supported SSL backends:" msgstr "SSL-bakändan ”%s” stöds inte. Dessa SSL-bakändor stöds:" @@ -17214,10 +17091,6 @@ msgstr "citerad CRLF upptäcktes" msgid "unable to format message: %s" msgstr "kan inte formatera meddelandet: %s" -#, c-format -msgid "invalid marker-size '%s', expecting an integer" -msgstr "felaktigt värde för marker-size ”%s”, förväntade ett heltal" - #, c-format msgid "Failed to merge submodule %s (not checked out)" msgstr "Misslyckades slå ihop undermodulen %s (ej utcheckad)" @@ -18122,14 +17995,6 @@ msgstr "packat objekt %s (lagrat i %s) är trasigt" msgid "missing mapping of %s to %s" msgstr "saknar koppling av %s till %s" -#, c-format -msgid "unable to open %s" -msgstr "kan inte öppna %s" - -#, c-format -msgid "files '%s' and '%s' differ in contents" -msgstr "filerna ”%s” och ”%s” har olika innehåll" - #, c-format msgid "unable to write file %s" msgstr "kan inte skriva filen %s" @@ -18215,6 +18080,10 @@ msgstr "%s: filtypen stöds ej" msgid "%s is not a valid '%s' object" msgstr "%s är inte ett giltigt ”%s”-objekt" +#, c-format +msgid "unable to open %s" +msgstr "kan inte öppna %s" + #, c-format msgid "hash mismatch for %s (expected %s)" msgstr "hash stämmer inte för %s (förväntade %s)" @@ -18316,7 +18185,7 @@ msgid "" "\n" "where \"$br\" is somehow empty and a 40-hex ref is created. Please\n" "examine these refs and maybe delete them. Turn this message off by\n" -"running \"git config set advice.objectNameWarning false\"" +"running \"git config advice.objectNameWarning false\"" msgstr "" "Git skapar normalt aldrig referenser som slutar med 40 hexadecimala\n" "tecken, då detta ignoreras när du anger 40-hex enbart. Dessa\n" @@ -18326,7 +18195,7 @@ msgstr "" "\n" "där ”$br” på något sätt blivit tomt och en 40-hex-referens skapats.\n" "Undersök referenserna och ta kanske bort dem. Stäng av meddelandet\n" -"genom att köra ”git config set advice.objectNameWarning false”" +"genom att köra ”git config advice.objectNameWarning false”" #, c-format msgid "log for '%.*s' only goes back to %s" @@ -18485,6 +18354,13 @@ msgstr "flerpaketsbitkarta saknar nödvändigt omvänt index" msgid "could not open pack %s" msgstr "kunde inte öppna paketfilen %s" +msgid "could not determine MIDX preferred pack" +msgstr "kunde inte bestämma det föredragna MIDX-paketet" + +#, c-format +msgid "preferred pack (%s) is invalid" +msgstr "föredragen paketfil (%s) är ogiltig" + msgid "corrupt bitmap lookup table: triplet position out of index" msgstr "trasig bitkarteuppslagstabell: trippelposition utanför indexet" @@ -19599,24 +19475,16 @@ msgstr "loggen för referensen %s slutade oväntat på %s" msgid "log for %s is empty" msgstr "loggen för %s är tom" -#, c-format -msgid "refusing to update reflog for pseudoref '%s'" -msgstr "vägrar uppdatera referenslogg för pseudoreferensen ”%s”" - -#, c-format -msgid "refusing to update pseudoref '%s'" -msgstr "vägrar uppdatera pseudoreferensen ”%s”" - -#, c-format -msgid "refusing to update reflog with bad name '%s'" -msgstr "vägrar uppdatera referenslogg med trasigt namn ”%s”" +msgid "refusing to force and skip creation of reflog" +msgstr "vägrar att tvinga och hoppa över skapande av reflogg" #, c-format msgid "refusing to update ref with bad name '%s'" msgstr "vägrar uppdatera referens med trasigt namn ”%s”" -msgid "refusing to force and skip creation of reflog" -msgstr "vägrar att tvinga och hoppa över skapande av reflogg" +#, c-format +msgid "refusing to update pseudoref '%s'" +msgstr "vägrar uppdatera pseudoreferensen ”%s”" #, c-format msgid "update_ref failed for ref '%s': %s" @@ -19667,10 +19535,6 @@ msgstr "" "kan inte läsa referensen ”%s”: förväntade symbolisk referens med målet ”%s”: " "men är en vanlig referens" -#, c-format -msgid "cannot read ref file '%s'" -msgstr "kan inte läsa ref-fil ”%s”" - #, c-format msgid "cannot open directory %s" msgstr "kunde inte öppna katalogen %s" @@ -19876,10 +19740,6 @@ msgstr "mer än en receivepack angavs, använder den första" msgid "more than one uploadpack given, using the first" msgstr "mer än en uploadpack angavs, använder den första" -#, c-format -msgid "unrecognized followRemoteHEAD value '%s' ignored" -msgstr "okänt värde ”%s” för followRemoteHEAD ignorerades" - #, c-format msgid "unrecognized value transfer.credentialsInUrl: '%s'" msgstr "okänt värde transfer.credentialsInUrl: ”%s”" @@ -21805,9 +21665,6 @@ msgstr "incheckning %s är inte märkt nåbar" msgid "too many commits marked reachable" msgstr "för många incheckningar markerade nåbara" -msgid "could not determine MIDX preferred pack" -msgstr "kunde inte bestämma det föredragna MIDX-paketet" - msgid "test-tool serve-v2 [<options>]" msgstr "test-tool serve-v2 [<flaggor>]" @@ -22462,9 +22319,6 @@ msgstr ".git-filen är trasig" msgid ".git file incorrect" msgstr ".git-filen är felaktig" -msgid ".git file absolute/relative path mismatch" -msgstr "absolut/relativ sökväg för .git-fil stämmer inte överens" - msgid "not a valid path" msgstr "inte en giltig sökväg" @@ -22480,9 +22334,6 @@ msgstr "kan inte hitta arkivet; ”.git”-filen är trasig" msgid "gitdir unreadable" msgstr "gitdir är oläsbar" -msgid "gitdir absolute/relative path mismatch" -msgstr "absolut/relativ sökväg för git-katalog stämmer inte överens" - msgid "gitdir incorrect" msgstr "gitdir är felaktig" @@ -22517,13 +22368,6 @@ msgstr "kan inte slå av %s i ”%s”" msgid "failed to set extensions.worktreeConfig setting" msgstr "misslyckades ändra inställningen extensions.worktreeConfig" -msgid "unable to upgrade repository format to support relative worktrees" -msgstr "" -"kunde inte uppgradera arkivformat till att stöda relativa arbetskataloger" - -msgid "unable to set extensions.relativeWorktrees setting" -msgstr "misslyckades ändra inställningen extensions.relativeWorktrees" - #, c-format msgid "could not setenv '%s'" msgstr "kunde inte lagra miljövariabeln ”%s”" @@ -23373,3 +23217,10 @@ msgstr "" #, perl-format msgid "Do you really want to send %s? [y|N]: " msgstr "Vill du verkligen sända %s? [y=ja, n=nej]: " + +#~ msgid "revision walk setup failed\n" +#~ msgstr "misslyckades starta revisionstraversering\n" + +#, c-format +#~ msgid "unable to parse contact: %s" +#~ msgstr "kan inte tolka kontakt: %s"
git
null
C
C
null
null
Version control
_git
DOC_CHANGE
Obvious
d9b5d80bf38be314dd0d09f65dc44ff34c2a843f
2022-11-13 05:22:56
low-batt
Fix unable to hide Quick Panel with OSC, #4071 This commit will change the `MainWindowController` methods `showPlaylistSidebar` and `showSettingsSidebar` to hide or show the panel when no tab is specified.
false
2
2
4
--- iina/MainWindowController.swift @@ -2616,7 +2616,7 @@ class MainWindowController: PlayerWindowController { self.showSideBar(viewController: view, type: .settings) } case .settings: - if view.currentTab == tab || tab == nil { + if view.currentTab == tab { if hideIfAlreadyShown { hideSideBar() } @@ -2645,7 +2645,7 @@ class MainWindowController: PlayerWindowController { self.showSideBar(viewController: view, type: .playlist) } case .playlist: - if view.currentTab == tab || tab == nil { + if view.currentTab == tab { if hideIfAlreadyShown { hideSideBar() }
iina
iina
Swift
Swift
39,591
2,605
The modern video player for macOS.
iina_iina
BUG_FIX
this commit fixes/polishes an earlier feature
5c86fd82f7a982dfb24b35d117a18ca7ab98f12e
2023-11-01 02:46:35
Vinicius Souza
Update main.yml fix github actions
false
16
3
19
--- .github/workflows/main.yml @@ -1,16 +1,3 @@ -name: website-deploy -on: - push: - branches: - - 'master' -permissions: - contents: read # to fetch code (actions/checkout) -jobs: - Job: - runs-on: windows-latest - steps: - - name: Convert Markdown to HTML - uses: natescherer/markdown-to-html-with-github-style-action@v1 - with: - path: README.md - outputpath: out +- run: npm i markdown-to-html-cli -g +- run: markdown-to-html --output coverage/index.html +- run: markdown-to-html --source README.md --output out/index.html
awesome-ios
vsouza
Swift
Swift
48,363
6,877
A curated list of awesome iOS ecosystem, including Objective-C and Swift Projects
vsouza_awesome-ios
CONFIG_CHANGE
changes in yml file
4c40d2d99080bbfe862363fde5ac99da47a548e5
2023-10-07 17:32:39
dependabot[bot]
chore(deps): bump hooks_riverpod from 2.4.0 to 2.4.3 (#786) Bumps [hooks_riverpod](https://github.com/rrousselGit/riverpod) from 2.4.0 to 2.4.3. - [Commits](https://github.com/rrousselGit/riverpod/compare/hooks_riverpod-v2.4.0...hooks_riverpod-v2.4.3) --- updated-dependencies: - dependency-name: hooks_riverpod dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
false
7
7
14
--- pubspec.lock @@ -813,10 +813,10 @@ packages: dependency: "direct main" description: name: flutter_riverpod - sha256: e667e406a74d67715f1fa0bd941d9ded49aff72f3a9f4440a36aece4e8d457a7 + sha256: "1bd39b04f1bcd217a969589777ca6bd642d116e3e5de65c3e6a8e8bdd8b178ec" url: "https://pub.dev" source: hosted - version: "2.4.3" + version: "2.4.0" flutter_rust_bridge: dependency: transitive description: @@ -1020,10 +1020,10 @@ packages: dependency: "direct main" description: name: hooks_riverpod - sha256: "69dcb88acbc68c81fc27ec15a89a4e24b7812c83c13a6307a1a9366ada758541" + sha256: ad7b877c3687e38764633d221a1f65491bc7a540e724101e9a404a84db2a4276 url: "https://pub.dev" source: hosted - version: "2.4.3" + version: "2.4.0" html: dependency: "direct main" description: @@ -1673,10 +1673,10 @@ packages: dependency: transitive description: name: riverpod - sha256: "494bf2cfb4df30000273d3052bdb1cc1de738574c6b678f0beb146ea56f5e208" + sha256: a600120d6f213a9922860eea1abc32597436edd5b2c4e73b91410f8c2af67d22 url: "https://pub.dev" source: hosted - version: "2.4.3" + version: "2.4.0" rxdart: dependency: transitive description: --- pubspec.yaml @@ -59,7 +59,7 @@ dependencies: go_router: ^10.0.0 hive: ^2.2.3 hive_flutter: ^1.1.0 - hooks_riverpod: ^2.4.3 + hooks_riverpod: ^2.1.1 html: ^0.15.1 http: ^1.1.0 image_picker: ^1.0.4
spotube
krtirtho
Dart
Dart
35,895
1,491
🎧 Open source Spotify client that doesn't require Premium nor uses Electron! Available for both desktop & mobile!
krtirtho_spotube
CONFIG_CHANGE
dependency version update
312b186939cfd021b29528122065863febaf9604
2025-04-05T22:34:40Z
chromium-autoroll
Roll Chrome Mac Arm PGO Profile Roll Chrome Mac Arm PGO profile from chrome-mac-arm-main-1743883035-e01ce0b88df1827e888693315ac475dfd77aba72-6179c5840b51135cf4b22cc823b29a6e11978989.profdata to chrome-mac-arm-main-1743889861-4be2fca48adc7eec9905a54fad8f13206c3ae5e4-a3df226fa3c63112c3cf8003d11f56d62a4372de.profdata If this roll has caused a breakage, revert this CL and stop the roller using the controls here: https://autoroll.skia.org/r/pgo-mac-arm-chromium Please CC [email protected],[email protected] on the revert to ensure that a human is aware of the problem. To file a bug in Chromium main branch: https://bugs.chromium.org/p/chromium/issues/entry To report a problem with the AutoRoller itself, please file a bug: https://issues.skia.org/issues/new?component=1389291&template=1850622 Documentation for the AutoRoller is here: https://skia.googlesource.com/buildbot/+doc/main/autoroll/README.md Cq-Include-Trybots: luci.chrome.try:mac-chrome Tbr: [email protected] Merge-Approval-Bypass: Chrome autoroller Change-Id: If2c662dd4a06a8bb088ac485dee13261ec210ef6 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6435998 Commit-Queue: chromium-autoroll <[email protected]> Bot-Commit: chromium-autoroll <[email protected]> Cr-Commit-Position: refs/heads/main@{#1443137}
false
1
1
2
--- chrome/build/mac-arm.pgo.txt @@ -1 +1 @@ -chrome-mac-arm-main-1743883035-e01ce0b88df1827e888693315ac475dfd77aba72-6179c5840b51135cf4b22cc823b29a6e11978989.profdata +chrome-mac-arm-main-1743889861-4be2fca48adc7eec9905a54fad8f13206c3ae5e4-a3df226fa3c63112c3cf8003d11f56d62a4372de.profdata
chromium
null
C
C
null
null
Browser
_chromium
CONFIG_CHANGE
version/id change of roll media app
294eb3c53f3f8d13b7ce4cbd1ebcd90c9688d6f0
2022-01-28 01:34:41
서다솔
Add merge sort document in Korean (#632) * Add merge sort doc in Korean * Update README.md
false
37
12
49
--- src/algorithms/sorting/merge-sort/README.ko-KR.md @@ -1,22 +0,0 @@ -# 병합 정렬 - -컴퓨터과학에서, 병합 정렬(일반적으로 mergesort라고 쓰는)은 효율적이고, 범용적인, 비교 기반의 정렬 알고리즘입니다. 대부분의 구현들은 안정적인 정렬을 만들어내며, 이는 정렬된 산출물에서 동일한 요소들의 입력 순서가 유지된다는 것을 의미합니다. 병합 정렬은 1945년에 John von Neumann이 만든 분할 정복 알고리즘입니다. - -병합 정렬의 예시입니다. 우선 리스트를 가장 작은 단위로 나누고(한 개의 요소), 두 개의 인접한 리스트를 정렬하고 병합하기 위해 각 요소와 인접한 리스트를 비교합니다. 마지막으로 모든 요소들은 정렬되고 병합됩니다. - -![Merge Sort](https://upload.wikimedia.org/wikipedia/commons/c/cc/Merge-sort-example-300px.gif) - -재귀적인 병합 정렬 알고리즘은 7개의 정수값을 가진 배열을 정렬하는데 사용됩니다. 다음은 합병 정렬을 모방하기 위해 사람이 취하는 단계입니다.(하향식) - -![Merge Sort](https://upload.wikimedia.org/wikipedia/commons/e/e6/Merge_sort_algorithm_diagram.svg) - -## 복잡도 - -| Name | Best | Average | Worst | Memory | Stable | Comments | -| --------------------- | :-------------: | :-----------------: | :-----------------: | :-------: | :-------: | :-------- | -| **Merge sort** | n&nbsp;log(n) | n&nbsp;log(n) | n&nbsp;log(n) | n | Yes | | - -## 참조 - -- [Wikipedia](https://en.wikipedia.org/wiki/Merge_sort) -- [YouTube](https://www.youtube.com/watch?v=KF2j-9iSf4Q&index=27&list=PLLXdhg_r2hKA7DPDsunoDZ-Z769jWn4R8) --- src/algorithms/sorting/merge-sort/README.md @@ -1,26 +1,23 @@ # Merge Sort -_Read this in other languages:_ -[_한국어_](README.ko-KR.md) - -In computer science, merge sort (also commonly spelled -mergesort) is an efficient, general-purpose, -comparison-based sorting algorithm. Most implementations -produce a stable sort, which means that the implementation -preserves the input order of equal elements in the sorted -output. Mergesort is a divide and conquer algorithm that +In computer science, merge sort (also commonly spelled +mergesort) is an efficient, general-purpose, +comparison-based sorting algorithm. Most implementations +produce a stable sort, which means that the implementation +preserves the input order of equal elements in the sorted +output. Mergesort is a divide and conquer algorithm that was invented by John von Neumann in 1945. -An example of merge sort. First divide the list into -the smallest unit (1 element), then compare each -element with the adjacent list to sort and merge the -two adjacent lists. Finally all the elements are sorted +An example of merge sort. First divide the list into +the smallest unit (1 element), then compare each +element with the adjacent list to sort and merge the +two adjacent lists. Finally all the elements are sorted and merged. ![Merge Sort](https://upload.wikimedia.org/wikipedia/commons/c/cc/Merge-sort-example-300px.gif) -A recursive merge sort algorithm used to sort an array of 7 -integer values. These are the steps a human would take to +A recursive merge sort algorithm used to sort an array of 7 +integer values. These are the steps a human would take to emulate merge sort (top-down). ![Merge Sort](https://upload.wikimedia.org/wikipedia/commons/e/e6/Merge_sort_algorithm_diagram.svg)
javascript-algorithms
trekhleb
JavaScript
JavaScript
190,336
30,518
📝 Algorithms and data structures implemented in JavaScript with explanations and links to further readings
trekhleb_javascript-algorithms
DOC_CHANGE
Matched \bdoc(umentation)?\b in message
c66c3aef8126bf943b2746afb6a9c2a536990db5
null
Fice-T
Fix graphical bug in cypher theme when return status is non-zero - When the previous command's return status is non-zero and the prompt is redrawn, graphical errors sometimes arise due to the fg{red} not being escaped.
false
1
1
0
--- cypher.zsh-theme @@ -1,4 +1,4 @@ # Based on evan's prompt # Shows the exit status of the last command if non-zero # Uses "#" instead of "»" when running with elevated privileges -PROMPT="%m %{${fg_bold[red]}%}:: %{${fg[green]}%}%3~%(0?. . ${fg[red]}%? )%{${fg[blue]}%}»%{${reset_color}%} " +PROMPT="%m %{${fg_bold[red]}%}:: %{${fg[green]}%}%3~%(0?. . %{${fg[red]}%}%? )%{${fg[blue]}%}»%{${reset_color}%} "
ohmyzsh_ohmyzsh.json
null
null
null
null
null
null
ohmyzsh_ohmyzsh.json
BUG_FIX
5, fix written in commit msg
1b526ff3b84d74a7aa65d7f9348176af8911df16
2025-03-18 14:56:03
Adrian Kuegel
Reverts f5c96c35985a9475ee69fbf4c9c8c056c97ec7cd PiperOrigin-RevId: 737915974
false
10
1
11
--- third_party/xla/xla/service/scan_loop_accumulator_input_unification.cc @@ -33,7 +33,6 @@ limitations under the License. #include "xla/hlo/ir/hlo_opcode.h" #include "xla/hlo/transforms/simplifiers/tuple_simplifier.h" #include "xla/literal_util.h" -#include "xla/service/call_graph.h" #include "xla/service/pattern_matcher.h" #include "xla/service/while_loop_simplifier.h" #include "xla/service/while_loop_unroller.h" @@ -210,16 +209,8 @@ FindAccumulatorInputPairs(const HloAliasAnalysis& alias_analysis, absl::StatusOr<bool> UnifyAccumulatorWithInput( const HloAliasAnalysis& alias_analysis, std::vector<std::pair<HloInstruction*, WhileLoopConfig>> unrollable_loops) { - // TODO(b/333521102): Helper function to check if a computation is a body of a - // while call. Currently, IsWhileBodyComputation api call does not work - // properly so we check it ourself. We should switch to IsWhileBodyComputation - // when it's fixed. - std::unique_ptr<CallGraph> call_graph = - CallGraph::Build(&alias_analysis.dataflow_analysis().module()); auto is_while_body = [&](HloComputation* comp) { - std::vector<HloInstruction*> callers = - call_graph->GetComputationCallers(comp); - return !callers.empty() && callers.at(0)->opcode() == HloOpcode::kWhile; + return comp->GetUniqueCaller(HloOpcode::kWhile).has_value(); }; std::vector<HloInstruction*> changed_loops;
tensorflow
tensorflow
C++
C++
188,388
74,565
An Open Source Machine Learning Framework for Everyone
tensorflow_tensorflow
CONFIG_CHANGE
Obvious
553149732aef68da28208eb81e163b066485bf1c
2025-02-08 06:16:32
Ruifeng Zheng
[SPARK-50945][ML][PYTHON][CONNECT] Support Summarizer and SummaryBuilder on Connect ### What changes were proposed in this pull request? Support Summarizer and SummaryBuilder on Connect ### Why are the changes needed? For feature parity ### Does this PR introduce _any_ user-facing change? yes, new feature supported ``` In [2]: data = [ ...: [Vectors.dense([1, 0, 0, -2]), 1.0], ...: [Vectors.dense([4, 5, 0, 3]), 2.0], ...: [Vectors.dense([6, 7, 0, 8]), 1.0], ...: [Vectors.dense([9, 0, 0, 1]), 1.0], ...: ] ...: df = spark.createDataFrame(data, ["features", "weight"]) ...: ...: summarizer = Summarizer.metrics("mean", "count") In [3]: df.select(summarizer.summary(df.features)).show(truncate=False) +--------------------------------+ |aggregate_metrics(features, 1.0)| +--------------------------------+ |{[5.0,3.0,0.0,2.5], 4} | +--------------------------------+ ``` ### How was this patch tested? new tests ### Was this patch authored or co-authored using generative AI tooling? no Closes #49847 from zhengruifeng/ml_connect_summarizer. Authored-by: Ruifeng Zheng <[email protected]> Signed-off-by: Ruifeng Zheng <[email protected]>
false
130
7
137
--- mllib/src/main/scala/org/apache/spark/ml/stat/Summarizer.scala @@ -26,15 +26,13 @@ import org.apache.spark.ml.linalg.{Vector, Vectors, VectorUDT} import org.apache.spark.rdd.RDD import org.apache.spark.sql.Column import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.expressions.{Expression, ImplicitCastInputTypes, Literal} +import org.apache.spark.sql.catalyst.expressions.{Expression, ImplicitCastInputTypes} import org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate import org.apache.spark.sql.catalyst.trees.BinaryLike -import org.apache.spark.sql.catalyst.util.ArrayData import org.apache.spark.sql.classic.ClassicConversions.ColumnConstructorExt import org.apache.spark.sql.classic.ExpressionUtils.expression import org.apache.spark.sql.functions.lit import org.apache.spark.sql.types._ -import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.Utils /** @@ -292,14 +290,6 @@ private[spark] object SummaryBuilderImpl extends Logging { StructType(fields) } - private def extractRequestedMetrics(metrics: Expression): (Seq[Metric], Seq[ComputeMetric]) = { - metrics.eval() match { - case arrayData: ArrayData => - val requested = arrayData.toSeq[UTF8String](StringType) - getRelevantMetrics(requested.map(_.toString)) - } - } - private val vectorUDT = new VectorUDT /** @@ -352,7 +342,7 @@ private[spark] object SummaryBuilderImpl extends Logging { private[stat] case object ComputeMin extends ComputeMetric - private[spark] case class MetricsAggregate( + private case class MetricsAggregate( requestedMetrics: Seq[Metric], requestedComputeMetrics: Seq[ComputeMetric], featuresExpr: Expression, @@ -363,27 +353,6 @@ private[spark] object SummaryBuilderImpl extends Logging { with ImplicitCastInputTypes with BinaryLike[Expression] { - // helper constructor - def this( - metrics: (Seq[Metric], Seq[ComputeMetric]), - featuresExpr: Expression, - weightExpr: Expression) = { - this(metrics._1, metrics._2, featuresExpr, weightExpr, 0, 0) - } - - def this( - requestedMetrics: Expression, - featuresExpr: Expression, - weightExpr: Expression) = { - this(extractRequestedMetrics(requestedMetrics), featuresExpr, weightExpr) - } - - def this( - requestedMetrics: Expression, - featuresExpr: Expression) = { - this(requestedMetrics, featuresExpr, Literal(1.0)) - } - override def eval(state: SummarizerBuffer): Any = { val metrics = requestedMetrics.map { case Mean => vectorUDT.serialize(state.mean) --- mllib/src/main/scala/org/apache/spark/sql/ml/InternalFunctionRegistration.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql.ml import org.apache.spark.ml.linalg.{SparseVector, Vector, Vectors} -import org.apache.spark.ml.stat._ import org.apache.spark.mllib.linalg.{SparseVector => OldSparseVector, Vector => OldVector} import org.apache.spark.sql.{SparkSessionExtensions, SparkSessionExtensionsProvider} import org.apache.spark.sql.catalyst.analysis.FunctionRegistry @@ -97,9 +96,6 @@ object InternalFunctionRegistration { case exprs => throw QueryCompilationErrors.wrongNumArgsError("array_to_vector", "1", exprs.size) } - - FunctionRegistry - .registerInternalExpression[SummaryBuilderImpl.MetricsAggregate]("aggregate_metrics") } class InternalFunctionRegistration extends SparkSessionExtensionsProvider { --- python/pyspark/ml/stat.py @@ -416,17 +416,6 @@ class Summarizer: @staticmethod def _get_single_metric(col: Column, weightCol: Optional[Column], metric: str) -> Column: col, weightCol = Summarizer._check_param(col, weightCol) - - if is_remote(): - # The alias name maybe different from the one in Spark Classic, - # because we cannot get the same string representation of the Column object. - return ( - Summarizer.metrics(metric) - .summary(col, weightCol) - .getField(metric) - .alias(f"{metric}({col._expr})") - ) - return Column( JavaWrapper._new_java_obj( "org.apache.spark.ml.stat.Summarizer." + metric, col._jc, weightCol._jc @@ -468,12 +457,6 @@ class Summarizer: ------- :py:class:`pyspark.ml.stat.SummaryBuilder` """ - if is_remote(): - builder = SummaryBuilder(None) - builder._metrics = [m for m in metrics] # type: ignore[attr-defined] - builder._java_obj = None - return builder - from pyspark.core.context import SparkContext from pyspark.sql.classic.column import _to_seq @@ -498,8 +481,7 @@ class SummaryBuilder(JavaWrapper): """ def __init__(self, jSummaryBuilder: "JavaObject"): - if not is_remote(): - super(SummaryBuilder, self).__init__(jSummaryBuilder) + super(SummaryBuilder, self).__init__(jSummaryBuilder) def summary(self, featuresCol: Column, weightCol: Optional[Column] = None) -> Column: """ @@ -521,16 +503,6 @@ class SummaryBuilder(JavaWrapper): an aggregate column that contains the statistics. The exact content of this structure is determined during the creation of the builder. """ - if is_remote(): - from pyspark.sql.connect.functions import builtin as F - - return F._invoke_function( - "aggregate_metrics", - F.array([F.lit(m) for m in self._metrics]), # type: ignore[attr-defined] - featuresCol, - weightCol if weightCol is not None else F.lit(1.0), - ) - featuresCol, weightCol = Summarizer._check_param(featuresCol, weightCol) assert self._java_obj is not None --- python/pyspark/ml/tests/test_stat.py @@ -18,73 +18,13 @@ import numpy as np import unittest -from pyspark.ml.linalg import Vectors, DenseVector -from pyspark.ml.stat import ( - ChiSquareTest, - Correlation, - KolmogorovSmirnovTest, - Summarizer, - SummaryBuilder, -) -from pyspark.sql import functions as F -from pyspark.sql import DataFrame, Row +from pyspark.ml.linalg import Vectors +from pyspark.ml.stat import ChiSquareTest, Correlation, KolmogorovSmirnovTest +from pyspark.sql import DataFrame from pyspark.testing.sqlutils import ReusedSQLTestCase class StatTestsMixin: - def test_summarizer(self): - spark = self.spark - data = [ - [Vectors.dense([1, 0, 0, -2]), 1.0], - [Vectors.dense([4, 5, 0, 3]), 2.0], - [Vectors.dense([6, 7, 0, 8]), 1.0], - [Vectors.dense([9, 0, 0, 1]), 1.0], - ] - df = spark.createDataFrame(data, ["features", "weight"]) - - summarizer = Summarizer.metrics("mean", "count") - self.assertIsInstance(summarizer, SummaryBuilder) - - res1 = df.select(summarizer.summary(df.features)) - self.assertEqual(res1.columns, ["aggregate_metrics(features, 1.0)"]) - self.assertEqual(res1.count(), 1) - self.assertEqual(res1.head()[0], Row(mean=DenseVector([5.0, 3.0, 0.0, 2.5]), count=4)) - - res2 = df.select(summarizer.summary(F.col("features"), df.weight)) - self.assertEqual(res2.columns, ["aggregate_metrics(features, weight)"]) - self.assertEqual(res2.count(), 1) - self.assertEqual( - res2.head()[0], - Row(mean=DenseVector([4.8, 3.4, 0.0, 2.6]), count=4), - res2.head()[0][0].toArray(), - ) - - res3 = df.select(Summarizer.max(df.features, df.weight)) - self.assertEqual(res3.columns, ["max(features)"]) - self.assertEqual(res3.count(), 1) - self.assertEqual(res3.head()[0], DenseVector([9.0, 7.0, 0.0, 8.0])) - - res4 = df.select(Summarizer.numNonZeros(F.col("features"))) - self.assertEqual(res4.columns, ["numNonZeros(features)"]) - self.assertEqual(res4.count(), 1) - self.assertEqual(res4.head()[0], DenseVector([4.0, 2.0, 0.0, 4.0])) - - res5 = df.select(Summarizer.normL1(F.col("features"))) - self.assertEqual(res5.columns, ["normL1(features)"]) - self.assertEqual(res5.count(), 1) - self.assertEqual(res5.head()[0], DenseVector([20.0, 12.0, 0.0, 14.0])) - - res6 = df.select(Summarizer.normL2(F.col("features"))) - self.assertEqual(res6.columns, ["normL2(features)"]) - self.assertEqual(res6.count(), 1) - self.assertTrue( - np.allclose( - res6.head()[0].toArray(), - [11.5758369, 8.60232527, 0.0, 8.83176087], - atol=1e-4, - ), - ) - def test_chisquaretest(self): spark = self.spark data = [ --- sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -905,7 +905,7 @@ object FunctionRegistry { /** Registry for internal functions used by Connect and the Column API. */ private[sql] val internal: SimpleFunctionRegistry = new SimpleFunctionRegistry - private[spark] def registerInternalExpression[T <: Expression : ClassTag]( + private def registerInternalExpression[T <: Expression : ClassTag]( name: String, setAlias: Boolean = false): Unit = { val (info, builder) = FunctionRegistryBase.build[T](name, None)
apache-spark
null
Scala
Scala
null
null
Apache Spark - A unified analytics engine for large-scale data processing
_apache-spark
NEW_FEAT
obvious
02bdd68e7daf59a0081ad9b05b5a17624d620dc3
2025-03-21 17:03:07
Philip Herron
gccrs: Remove bad assertion in name resolution This was a handy debug assertion but only works for valid rust code. This needs to handle the case where the type is not resolved which is a valid case. Fixes Rust-GCC#2423 gcc/rust/ChangeLog: * resolve/rust-ast-resolve-item.cc (ResolveItem::visit): remove assertions gcc/testsuite/ChangeLog: * rust/compile/nr2/exclude: nr2 can't handle this * rust/compile/issue-2423.rs: New test. Signed-off-by: Philip Herron <[email protected]>
false
39
6
45
--- gcc/rust/resolve/rust-ast-resolve-item.cc @@ -582,14 +582,7 @@ ResolveItem::visit (AST::InherentImpl &impl_block) // Setup paths CanonicalPath self_cpath = CanonicalPath::create_empty (); bool ok = ResolveTypeToCanonicalPath::go (impl_block.get_type (), self_cpath); - if (!ok) - { - resolver->get_name_scope ().pop (); - resolver->get_type_scope ().pop (); - resolver->get_label_scope ().pop (); - return; - } - + rust_assert (ok); rust_debug ("AST::InherentImpl resolve Self: {%s}", self_cpath.get ().c_str ()); @@ -678,17 +671,12 @@ ResolveItem::visit (AST::TraitImpl &impl_block) return; } + bool ok; // setup paths CanonicalPath canonical_trait_type = CanonicalPath::create_empty (); - bool ok = ResolveTypeToCanonicalPath::go (impl_block.get_trait_path (), - canonical_trait_type); - if (!ok) - { - resolver->get_name_scope ().pop (); - resolver->get_type_scope ().pop (); - resolver->get_label_scope ().pop (); - return; - } + ok = ResolveTypeToCanonicalPath::go (impl_block.get_trait_path (), + canonical_trait_type); + rust_assert (ok); rust_debug ("AST::TraitImpl resolve trait type: {%s}", canonical_trait_type.get ().c_str ()); @@ -696,13 +684,7 @@ ResolveItem::visit (AST::TraitImpl &impl_block) CanonicalPath canonical_impl_type = CanonicalPath::create_empty (); ok = ResolveTypeToCanonicalPath::go (impl_block.get_type (), canonical_impl_type); - if (!ok) - { - resolver->get_name_scope ().pop (); - resolver->get_type_scope ().pop (); - resolver->get_label_scope ().pop (); - return; - } + rust_assert (ok); rust_debug ("AST::TraitImpl resolve self: {%s}", canonical_impl_type.get ().c_str ()); --- gcc/testsuite/rust/compile/issue-2423.rs @@ -1,14 +0,0 @@ -impl NonExistant { - // { dg-error "failed to resolve" "" { target *-*-* } .-1 } - fn test() {} -} - -impl NotFound for NonExistant { - // { dg-error "failed to resolve" "" { target *-*-* } .-1 } - fn test() {} -} - -trait A {} - -impl A for NotFound {} -// { dg-error "failed to resolve" "" { target *-*-* } .-1 } --- gcc/testsuite/rust/compile/nr2/exclude @@ -206,5 +206,4 @@ issue-1773.rs issue-2905-1.rs issue-2905-2.rs issue-2907.rs -issue-2423.rs # please don't delete the trailing newline
gcc
gcc-mirror
C
C
null
null
Compiler
gcc-mirror_gcc
BUG_FIX
obvious
2a9a7a0e4a64d813c9863d70f2203e06202ef010
2023-12-21 19:08:32
fatedier
fix login retry interval (#3879)
false
2
2
4
--- client/service.go @@ -332,7 +332,7 @@ func (svr *Service) loopLoginUntilSuccess(maxInterval time.Duration, firstLoginE // try to reconnect to server until success wait.BackoffUntil(loginFunc, wait.NewFastBackoffManager( wait.FastBackoffOptions{ - Duration: time.Second, + Duration: time.Millisecond, Factor: 2, Jitter: 0.1, MaxDuration: maxInterval, --- pkg/util/version/version.go @@ -19,7 +19,7 @@ import ( "strings" ) -var version = "0.53.2" +var version = "0.53.1" func Full() string { return version
frp
fatedier
Go
Go
91,116
13,769
A fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet.
fatedier_frp
CODE_IMPROVEMENT
Code change: type annotation added
3398d9f35f9952e9e5f1ca8b565ccd452ed4e64e
2022-09-28 23:19:11
david-gary
Updated link for Cuckoo Hashing paper (#691)
false
1
1
2
--- data_structures/README.md @@ -3,7 +3,7 @@ * [Dynamic Hash Tables](http://www.csd.uoc.gr/~hy460/pdf/Dynamic%20Hash%20Tables.pdf) * [Simple, Fast, and Practical Non-Blocking and Blocking Concurrent Queue Algorithms](https://www.cs.rochester.edu/u/scott/papers/1996_PODC_queues.pdf) * [RRB-Trees: Efficient Immutable Vectors](http://infoscience.epfl.ch/record/169879/files/RMTrees.pdf) -* [Cuckoo Hashing](https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.104.9191&rep=rep1&type=pdf) +* [Cuckoo Hashing](https://www.cs.tau.ac.il/~shanir/advanced-seminar-data-structures-2009/bib/pagh01cuckoo.pdf) * [Fenwick Tree](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.14.8917&rep=rep1&type=pdf) * [Hopscotch Hashing](http://mcg.cs.tau.ac.il/papers/disc2008-hopscotch.pdf)
papers-we-love
papers-we-love
Shell
Shell
91,347
5,859
Papers from the computer science community to read and discuss.
papers-we-love_papers-we-love
CONFIG_CHANGE
Very small changes
b5a6ff5bdd70b5d35db473b3519866537fec0088
null
Adrian Holovaty
Fixed #488 -- removetags template filter now removes tags without a space before the final slash git-svn-id: http://code.djangoproject.com/svn/django/trunk@1018 bcc190cf-cafb-0310-a4f2-bffc1f526a37
false
1
1
0
--- defaultfilters.py @@ -175,7 +175,7 @@ def removetags(value, tags): "Removes a space separated list of [X]HTML tags from the output" tags = [re.escape(tag) for tag in tags.split()] tags_re = '(%s)' % '|'.join(tags) - starttag_re = re.compile('<%s(>|(\s+[^>]*>))' % tags_re) + starttag_re = re.compile(r'<%s(/?>|(\s+[^>]*>))' % tags_re) endtag_re = re.compile('</%s>' % tags_re) value = starttag_re.sub('', value) value = endtag_re.sub('', value)
django_django.json
null
null
null
null
null
null
django_django.json
BUG_FIX
4, Fix written in commit msg which clearly fixes a code error
a836006a5b81e01931a42b17deb31d89eef40afa
2025-02-19 16:29:29
Jiachi Liu
[dev-overlay] refactor to group the middleware response utils (#76199)
false
69
83
152
--- packages/next/src/client/components/react-dev-overlay/_experimental/font/get-dev-overlay-font-middleware.ts @@ -1,9 +1,9 @@ +import { internalServerError } from '../../server/shared' +import { notFound } from '../../../not-found' import type { ServerResponse, IncomingMessage } from 'http' import path from 'path' import * as fs from 'fs/promises' import { constants } from 'fs' -import * as Log from '../../../../../build/output/log' -import { middlewareResponse } from '../../server/middleware-response' const FONT_PREFIX = '/__nextjs_font/' @@ -34,14 +34,14 @@ export function getDevOverlayFontMiddleware() { const fontFile = pathname.replace(FONT_PREFIX, '') if (!VALID_FONTS.includes(fontFile)) { - return middlewareResponse.notFound(res) + return notFound() } const fontPath = path.resolve(__dirname, fontFile) const fileExists = await checkFileExists(fontPath) if (!fileExists) { - return middlewareResponse.notFound(res) + return notFound() } const fontData = await fs.readFile(fontPath) @@ -50,11 +50,11 @@ export function getDevOverlayFontMiddleware() { }) res.end(fontData) } catch (err) { - Log.error( + console.error( 'Failed to serve font:', err instanceof Error ? err.message : err ) - return middlewareResponse.internalServerError(res) + return internalServerError(res) } } } --- packages/next/src/client/components/react-dev-overlay/server/get-next-error-feedback-middleware.ts @@ -1,5 +1,6 @@ import { eventErrorFeedback } from '../../../../telemetry/events/error-feedback' -import { middlewareResponse } from './middleware-response' +import { badRequest, internalServerError, noContent } from './shared' + import type { ServerResponse, IncomingMessage } from 'http' import type { Telemetry } from '../../../../telemetry/storage' @@ -21,7 +22,7 @@ export function getNextErrorFeedbackMiddleware(telemetry: Telemetry) { const wasHelpful = searchParams.get('wasHelpful') if (!errorCode || !wasHelpful) { - return middlewareResponse.badRequest(res) + return badRequest(res) } await telemetry.record( @@ -31,9 +32,9 @@ export function getNextErrorFeedbackMiddleware(telemetry: Telemetry) { }) ) - return middlewareResponse.noContent(res) + return noContent(res) } catch (error) { - return middlewareResponse.internalServerError(res) + return internalServerError(res) } } } --- packages/next/src/client/components/react-dev-overlay/server/middleware-response.ts @@ -1,34 +0,0 @@ -import type { ServerResponse } from 'http' -import { inspect } from 'util' - -export const middlewareResponse = { - noContent(res: ServerResponse) { - res.statusCode = 204 - res.end('No Content') - }, - notFound(res: ServerResponse) { - res.statusCode = 404 - res.end('Not Found') - }, - badRequest(res: ServerResponse) { - res.statusCode = 400 - res.end('Bad Request') - }, - internalServerError(res: ServerResponse, error?: unknown) { - res.statusCode = 500 - res.setHeader('Content-Type', 'text/plain') - res.end( - error !== undefined - ? inspect(error, { colors: false }) - : 'Internal Server Error' - ) - }, - json(res: ServerResponse, data: any) { - res - .setHeader('Content-Type', 'application/json') - .end(Buffer.from(JSON.stringify(data))) - }, - jsonString(res: ServerResponse, data: string) { - res.setHeader('Content-Type', 'application/json').end(Buffer.from(data)) - }, -} --- packages/next/src/client/components/react-dev-overlay/server/middleware-turbopack.ts @@ -1,11 +1,17 @@ import type { IncomingMessage, ServerResponse } from 'http' import { + badRequest, getOriginalCodeFrame, + internalServerError, + json, + jsonString, + noContent, + notFound, type OriginalStackFrameResponse, type OriginalStackFramesRequest, type OriginalStackFramesResponse, } from './shared' -import { middlewareResponse } from './middleware-response' + import fs, { constants as FS } from 'fs/promises' import path from 'path' import url from 'url' @@ -370,7 +376,7 @@ export function getOverlayMiddleware(project: Project) { if (pathname === '/__nextjs_original-stack-frames') { if (req.method !== 'POST') { - return middlewareResponse.badRequest(res) + return badRequest(res) } const body = await new Promise<string>((resolve, reject) => { @@ -404,26 +410,26 @@ export function getOverlayMiddleware(project: Project) { }) ) - return middlewareResponse.json(res, result) + return json(res, result) } else if (pathname === '/__nextjs_launch-editor') { const frame = createStackFrame(searchParams) - if (!frame) return middlewareResponse.badRequest(res) + if (!frame) return badRequest(res) const fileExists = await fs.access(frame.file, FS.F_OK).then( () => true, () => false ) - if (!fileExists) return middlewareResponse.notFound(res) + if (!fileExists) return notFound(res) try { launchEditor(frame.file, frame.line ?? 1, frame.column ?? 1) } catch (err) { console.log('Failed to launch editor:', err) - return middlewareResponse.internalServerError(res) + return internalServerError(res) } - return middlewareResponse.noContent(res) + return noContent(res) } return next() @@ -445,7 +451,7 @@ export function getSourceMapMiddleware(project: Project) { let filename = searchParams.get('filename') if (!filename) { - return middlewareResponse.badRequest(res) + return badRequest(res) } // TODO(veil): Always try the native version first. @@ -457,10 +463,10 @@ export function getSourceMapMiddleware(project: Project) { const sourceMap = findSourceMap(filename) if (sourceMap) { - return middlewareResponse.json(res, sourceMap.payload) + return json(res, sourceMap.payload) } - return middlewareResponse.noContent(res) + return noContent(res) } try { @@ -474,20 +480,20 @@ export function getSourceMapMiddleware(project: Project) { const sourceMapString = await project.getSourceMap(filename) if (sourceMapString) { - return middlewareResponse.jsonString(res, sourceMapString) + return jsonString(res, sourceMapString) } if (filename.startsWith('file:')) { const sourceMap = await getSourceMapFromFile(filename) if (sourceMap) { - return middlewareResponse.json(res, sourceMap) + return json(res, sourceMap) } } } catch (error) { console.error('Failed to get source map:', error) } - middlewareResponse.noContent(res) + noContent(res) } } --- packages/next/src/client/components/react-dev-overlay/server/middleware-webpack.ts @@ -10,12 +10,16 @@ import type { StackFrame } from 'next/dist/compiled/stacktrace-parser' import { getSourceMapFromFile } from '../internal/helpers/get-source-map-from-file' import { launchEditor } from '../internal/helpers/launchEditor' import { + badRequest, getOriginalCodeFrame, + internalServerError, + json, + noContent, + notFound, type OriginalStackFrameResponse, type OriginalStackFramesRequest, type OriginalStackFramesResponse, } from './shared' -import { middlewareResponse } from './middleware-response' export { getServerError } from '../internal/helpers/node-stack-frames' export { parseStack } from '../internal/helpers/parse-stack' export { getSourceMapFromFile } @@ -518,7 +522,7 @@ export function getOverlayMiddleware(options: { if (pathname === '/__nextjs_original-stack-frames') { if (req.method !== 'POST') { - return middlewareResponse.badRequest(res) + return badRequest(res) } const body = await new Promise<string>((resolve, reject) => { @@ -535,7 +539,7 @@ export function getOverlayMiddleware(options: { body ) as OriginalStackFramesRequest - return middlewareResponse.json( + return json( res, await getOriginalStackFrames({ isServer, @@ -553,7 +557,7 @@ export function getOverlayMiddleware(options: { }) ) } catch (err) { - return middlewareResponse.badRequest(res) + return badRequest(res) } } else if (pathname === '/__nextjs_launch-editor') { const frame = { @@ -564,7 +568,7 @@ export function getOverlayMiddleware(options: { arguments: searchParams.getAll('arguments').filter(Boolean), } satisfies StackFrame - if (!frame.file) return middlewareResponse.badRequest(res) + if (!frame.file) return badRequest(res) // frame files may start with their webpack layer, like (middleware)/middleware.js const filePath = path.resolve( @@ -575,16 +579,16 @@ export function getOverlayMiddleware(options: { () => true, () => false ) - if (!fileExists) return middlewareResponse.notFound(res) + if (!fileExists) return notFound(res) try { launchEditor(filePath, frame.lineNumber, frame.column ?? 1) } catch (err) { console.log('Failed to launch editor:', err) - return middlewareResponse.internalServerError(res) + return internalServerError(res) } - return middlewareResponse.noContent(res) + return noContent(res) } return next() @@ -612,7 +616,7 @@ export function getSourceMapMiddleware(options: { const filename = searchParams.get('filename') if (!filename) { - return middlewareResponse.badRequest(res) + return badRequest(res) } let source: Source | undefined @@ -636,13 +640,13 @@ export function getSourceMapMiddleware(options: { }, }) } catch (error) { - return middlewareResponse.internalServerError(res, error) + return internalServerError(res, error) } if (!source) { - return middlewareResponse.noContent(res) + return noContent(res) } - return middlewareResponse.json(res, source.sourceMap) + return json(res, source.sourceMap) } } --- packages/next/src/client/components/react-dev-overlay/server/shared.ts @@ -1,4 +1,6 @@ import type { StackFrame } from 'stacktrace-parser' +import type { ServerResponse } from 'http' +import { inspect } from 'util' import { codeFrameColumns } from 'next/dist/compiled/babel/code-frame' import isInternal, { nextInternalsRe, @@ -80,3 +82,38 @@ export function getOriginalCodeFrame( { forceColor: colors } ) } + +export function noContent(res: ServerResponse) { + res.statusCode = 204 + res.end('No Content') +} + +export function badRequest(res: ServerResponse) { + res.statusCode = 400 + res.end('Bad Request') +} + +export function notFound(res: ServerResponse) { + res.statusCode = 404 + res.end('Not Found') +} + +export function internalServerError(res: ServerResponse, error?: unknown) { + res.statusCode = 500 + res.setHeader('Content-Type', 'text/plain') + res.end( + error !== undefined + ? inspect(error, { colors: false }) + : 'Internal Server Error' + ) +} + +export function json(res: ServerResponse, data: any) { + res + .setHeader('Content-Type', 'application/json') + .end(Buffer.from(JSON.stringify(data))) +} + +export function jsonString(res: ServerResponse, data: string) { + res.setHeader('Content-Type', 'application/json').end(Buffer.from(data)) +}
next.js
vercel
JavaScript
JavaScript
129,891
27,821
The React Framework
vercel_next.js
CODE_IMPROVEMENT
obvious
9257682b01d71bf9cb554c1e7ba6b48e9b6c5dc1
null
Eric Seidel
Remove stray period added from web editor.
false
1
1
0
--- analyze.dart @@ -12,7 +12,7 @@ import 'package:path/path.dart' as path; import '../artifacts.dart'; import '../base/logging.dart'; import '../base/process.dart'; -import '../build_configuration.dart';. +import '../build_configuration.dart'; import '../runner/flutter_command.dart'; class AnalyzeCommand extends FlutterCommand {
flutter_flutter.json
null
null
null
null
null
null
flutter_flutter.json
BUG_FIX
5, removed a stray period which might have been added by mistake
f03bd9a1505c1b3d4ec7a816155c1c8ecf6659fb
2025-04-06T05:45:17Z
chromium-autoroll
Roll Chrome Android ARM64 PGO Profile Roll Chrome Android ARM64 PGO profile from chrome-android64-main-1743905357-7c20a3c8985d42cbc34bcbbe73908b6a600698cb-a205f17afa78eff1b6409fe5af1490cae4580143.profdata to chrome-android64-main-1743910398-b21a73d7b7a75bf822dd616e602f725ae152d5cb-b6a0e007e2e543a3d77226a228544e17d4a4b3e2.profdata If this roll has caused a breakage, revert this CL and stop the roller using the controls here: https://autoroll.skia.org/r/pgo-android-arm64-chromium Please CC [email protected],[email protected] on the revert to ensure that a human is aware of the problem. To file a bug in Chromium main branch: https://bugs.chromium.org/p/chromium/issues/entry To report a problem with the AutoRoller itself, please file a bug: https://issues.skia.org/issues/new?component=1389291&template=1850622 Documentation for the AutoRoller is here: https://skia.googlesource.com/buildbot/+doc/main/autoroll/README.md Tbr: [email protected] Merge-Approval-Bypass: Chrome autoroller Change-Id: Iad40580241af20bfbfc7761aebdc05995a30ca0e Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6436449 Commit-Queue: chromium-autoroll <[email protected]> Bot-Commit: chromium-autoroll <[email protected]> Cr-Commit-Position: refs/heads/main@{#1443168}
false
1
1
2
--- chrome/build/android-arm64.pgo.txt @@ -1 +1 @@ -chrome-android64-main-1743905357-7c20a3c8985d42cbc34bcbbe73908b6a600698cb-a205f17afa78eff1b6409fe5af1490cae4580143.profdata +chrome-android64-main-1743910398-b21a73d7b7a75bf822dd616e602f725ae152d5cb-b6a0e007e2e543a3d77226a228544e17d4a4b3e2.profdata
chromium
null
C
C
null
null
Browser
_chromium
CONFIG_CHANGE
just some version changes done
e527a1843e00ca077806480a9961925e4fcf4d85
2024-11-08 07:43:17
Lucas
chore: bump version 0.7.3 (#6743) * chore: update changelog * feat: add 'custom namespace' in plan description
false
31
1
32
--- CHANGELOG.md @@ -1,18 +1,4 @@ # Release Notes -## Version 0.7.3 - 07/11/2024 -### New Features -- Enable custom URLs for published pages -- Support toggling headings -- Create a subpage by typing in the document -- Turn selected blocks into a subpage -- Add a manual date picker for the Date property - -### Bug Fixes -- Fixed an issue where the workspace owner was unable to delete spaces created by others -- Fixed cursor height inconsistencies with text height -- Fixed editing issues in Kanban cards -- Fixed an issue preventing images or files from being dropped into empty paragraphs - ## Version 0.7.2 - 22/10/2024 ### New Features - Copy link to block --- frontend/appflowy_flutter/lib/workspace/presentation/settings/pages/settings_plan_comparison_dialog.dart @@ -670,13 +670,6 @@ final _planLabels = [ _PlanItem( label: LocaleKeys.settings_comparePlanDialog_planLabels_itemFileUpload.tr(), ), - _PlanItem( - label: - LocaleKeys.settings_comparePlanDialog_planLabels_customNamespace.tr(), - tooltip: LocaleKeys - .settings_comparePlanDialog_planLabels_customNamespaceTooltip - .tr(), - ), ]; class _CellItem { @@ -715,9 +708,6 @@ final List<_CellItem> _freeLabels = [ _CellItem( label: LocaleKeys.settings_comparePlanDialog_freeLabels_itemFileUpload.tr(), ), - const _CellItem( - label: '', - ), ]; final List<_CellItem> _proLabels = [ @@ -749,8 +739,4 @@ final List<_CellItem> _proLabels = [ _CellItem( label: LocaleKeys.settings_comparePlanDialog_proLabels_itemFileUpload.tr(), ), - const _CellItem( - label: '', - icon: FlowySvgs.check_m, - ), ]; --- frontend/resources/translations/en.json @@ -935,11 +935,9 @@ "itemFive": "Mobile app", "itemSix": "AI Responses", "itemFileUpload": "File uploads", - "customNamespace": "Custom namespace", "tooltipSix": "Lifetime means the number of responses never reset", "intelligentSearch": "Intelligent search", - "tooltipSeven": "Allows you to customize part of the URL for your workspace", - "customNamespaceTooltip": "Custom published site URL" + "tooltipSeven": "Allows you to customize part of the URL for your workspace" }, "freeLabels": { "itemOne": "Charged per workspace",
appflowy
appflowy-io
Dart
Dart
61,077
4,078
Bring projects, wikis, and teams together with AI. AppFlowy is the AI collaborative workspace where you achieve more without losing control of your data. The leading open source Notion alternative.
appflowy-io_appflowy
NEW_FEAT
Obvious
4ac4ce3e4001114ee35d9e762f3fbdf94ba0ea7b
null
Vadim Pisarevsky
replaced 8x6 pattern with 9x6, which orientation can be determined without any ambiguity
false
0
0
0
--- pattern.pdf Binary files a/doc/pattern.pdf and /dev/null differ --- pattern.png Binary files /dev/null and b/doc/pattern.png differ
opencv_opencv.json
null
null
null
null
null
null
opencv_opencv.json
BUG_FIX
3, previous pattern was ambiguous
e81f7155154c0f5d40363e84a8f24a5b559b5eed
2025-01-29 20:40:35
Filippo Valsorda
lib/fips140: freeze v1.0.0 FIPS 140 module zip file make v1.0.0.zip make v1.0.0.test make updatesum Changed the v%.zip Makefile target to use the default of origin/master, as per its comment and intention, instead of the local master. Change-Id: I6a6a4656c097d11b8cdc96766394c984f9c47f82 Reviewed-on: https://go-review.googlesource.com/c/go/+/644645 Reviewed-by: Carlos Amedee <[email protected]> Reviewed-by: Roland Shoemaker <[email protected]> Auto-Submit: Filippo Valsorda <[email protected]> LUCI-TryBot-Result: Go LUCI <[email protected]>
false
2
1
3
--- lib/fips140/Makefile @@ -27,7 +27,7 @@ default: # copy and edit the 'go run' command by hand to use a different branch. v%.zip: git fetch origin master - go run ../../src/cmd/go/internal/fips140/mkzip.go v$* + go run ../../src/cmd/go/internal/fips140/mkzip.go -b master v$* # normally mkzip refuses to overwrite an existing zip file. # make v1.2.3.rm removes the zip file and and unpacked --- lib/fips140/fips140.sum @@ -9,4 +9,3 @@ # # go test cmd/go/internal/fips140 -update # -v1.0.0.zip b50508feaeff05d22516b21e1fd210bbf5d6a1e422eaf2cfa23fe379342713b8 --- lib/fips140/v1.0.0.zip Binary files a/lib/fips140/v1.0.0.zip and /dev/null differ
go
golang
Go
Go
126,191
17,926
The Go programming language
golang_go
CONFIG_CHANGE
Only config file changes have been made
384c5bed2d921027085cd1f2d4eff9a2652b6513
null
Rob Pike
update golden.out for two versions of hello, world SVN=121479
false
2
0
2
--- golden.out @@ -18,6 +18,7 @@ BUG: known to fail incorrectly hashmap.go:46: fatal error: optoas: no entry LSH-<uint32>UINT32 BUG: known to fail incorrectly =========== helloworld.go +hello, world =========== if.go =========== int_lit.go int_lit.go:5: syntax error @@ -47,3 +48,4 @@ test0.go:54: illegal types for operand (<Point2>{}) AS ({}) BUG: known to fail incorrectly =========== turing.go +Hello World!
golang_go.json
null
null
null
null
null
null
golang_go.json
CONFIG_CHANGE
5, version change
8740ffa760a098363863de18e3adff0f0d819fbd
2025-04-02 02:24:18
Michael Lazos
[Hierarchical Compile] Add cycle detection to graph region expansion (#150305) Pull Request resolved: https://github.com/pytorch/pytorch/pull/150305 Approved by: https://github.com/anijain2305 ghstack dependencies: #150303, #150304
false
78
28
106
--- torch/_dynamo/graph_deduplication.py @@ -15,9 +15,9 @@ from typing import Any import torch.fx from torch._dynamo import config from torch._higher_order_ops.utils import has_potential_input_alias_or_mutation +from torch.utils._pytree import tree_flatten from .graph_region_tracker import Node, Region -from .graph_utils import _flatten_args_kwargs log = logging.getLogger(__name__) @@ -87,6 +87,30 @@ when they are created in output_graph. return output_replacements +# flattens with support for slices +# Note: a better way to do this would +# be register/unregister slices as pytree nodes +# but there is no unregister API in the pytorch +# pytree impl +def _flatten_args_kwargs(args: Any) -> list[Node]: + fully_flattened = [] + + def flatten(args: Any) -> None: + flattened, _ = tree_flatten(args) + for arg in flattened: + if isinstance(arg, slice): + start = arg.start + stop = arg.stop + step = arg.step + flatten((start, stop, step)) + else: + fully_flattened.append(arg) + + flatten(args) + + return fully_flattened + + def _replace_region_with_subgraph( graph: torch.fx.Graph, region: Region, --- torch/_dynamo/graph_region_tracker.py @@ -27,8 +27,6 @@ import torch.fx from torch._subclasses.fake_tensor import FakeTensor from torch.utils._pytree import tree_flatten -from .graph_utils import _flatten_args_kwargs - T = TypeVar("T") @@ -255,8 +253,6 @@ class GraphRegionTracker: """ topological_ranking = {node: i for i, node in enumerate(graph.nodes)} region_groups_with_rank = [] - # needed to detect if replacing a region will create cycles - node_to_recursive_ancestors = _populate_recursive_ancestor_map(graph) # Create region groups; a region group is a group # of regions that are all identical. In this initial state @@ -285,12 +281,7 @@ class GraphRegionTracker: # overlap. seen_nodes: set[Node] = set() for region_group in region_groups: - fully_expand_region_group( - region_group, - seen_nodes, - node_to_recursive_ancestors, - self._is_identical, - ) + fully_expand_region_group(region_group, seen_nodes, self._is_identical) # sort topologically for region in region_group: region.sort(key=lambda n: topological_ranking[n]) @@ -306,7 +297,6 @@ class GraphRegionTracker: def fully_expand_region_group( regions: list[Region], seen_nodes: set[Node], - node_to_recursive_ancestors: dict[Node, set[Node]], is_identical_fn: Callable[[Node, Node], bool], ) -> None: debug_log("--------------------------------------------------") @@ -337,14 +327,11 @@ def fully_expand_region_group( # regions are only expanded if the node to add is valid # for ALL regions while current_node: - add_node = not _will_create_cycle( - current_node, regions[0], node_to_recursive_ancestors - ) + add_node = True nodes_to_add.clear() nodes_to_add.append(current_node) nodes_to_add_set = set(nodes_to_add) - for ind, region_it in enumerate(region_iters[1:]): - ind += 1 # compensate for the 0th region + for region_it in region_iters[1:]: node = region_it.next() debug_log("--------------------") @@ -357,9 +344,6 @@ def fully_expand_region_group( and node not in nodes_to_add_set and node.op != "placeholder" and is_identical_fn(node, current_node) - and not _will_create_cycle( - node, regions[ind], node_to_recursive_ancestors - ) ) nodes_to_add.append(node) nodes_to_add_set.add(node) @@ -384,35 +368,3 @@ def fully_expand_region_group( debug_log("end expand new region group: %s", regions) debug_log("--------------------------------------------------") - - -def _populate_recursive_ancestor_map(graph: torch.fx.Graph) -> dict[Node, set[Node]]: - node_to_recursive_ancestors: dict[Node, set[Node]] = {} - for node in graph.nodes: - node_to_recursive_ancestors[node] = set() - for node in graph.nodes: - all_args = _flatten_args_kwargs((node.args, node.kwargs)) - for arg in all_args: - if isinstance(arg, Node): - node_to_recursive_ancestors[node].update( - node_to_recursive_ancestors[arg] - ) - node_to_recursive_ancestors[node].add(node) - return node_to_recursive_ancestors - - -def _will_create_cycle( - node_to_add: Node, - region: Region, - node_to_recursive_ancestors: dict[Node, set[Node]], -) -> bool: - region_set: set[Node] = set(region) - region_ancestors: set[Node] = set( - tree_flatten([list(node_to_recursive_ancestors[node]) for node in region])[0] - ) - external_users = [user for user in node_to_add.users if user not in region_set] - for user in external_users: - if user in region_ancestors: - return True - - return False --- torch/_dynamo/graph_utils.py @@ -1,32 +1,6 @@ from collections import deque -from typing import Any from torch.fx import Graph, Node -from torch.utils._pytree import tree_flatten - - -# flattens with support for slices -# Note: a better way to do this would -# be register/unregister slices as pytree nodes -# but there is no unregister API in the pytorch -# pytree impl -def _flatten_args_kwargs(args: Any) -> list[Node]: - fully_flattened = [] - - def flatten(args: Any) -> None: - flattened, _ = tree_flatten(args) - for arg in flattened: - if isinstance(arg, slice): - start = arg.start - stop = arg.stop - step = arg.step - flatten((start, stop, step)) - else: - fully_flattened.append(arg) - - flatten(args) - - return fully_flattened def _detect_cycles(graph: Graph) -> str:
pytorch
null
python
Python
null
null
Tensors and Dynamic neural networks in Python with strong GPU acceleration
_pytorch
NEW_FEAT
Code change: new python function
8aa2f6a4a2293129cda44f9cd7994fcc0145b882
2024-01-29 22:45:24
Erik Wilde
fixing typo (#108)
false
1
1
2
--- spec/standard-webhooks.md @@ -299,7 +299,7 @@ Some webhook consumers have firewalls (or other security mechanisms) in front of #### Server side request forgery (SSRF) -A server-side request forgery (SSRF) attack is when an attacker abuses functionality on the server to read or update internal resources. In the attack, the attacker supplies or modifies a URL which the server will then make a call to. By carefully selecting the URLs, the attacker may be able to read server configuration such as AWS metadata, connect to internal services like HTTP-enabled databases or perform post requests towards internal services which are not intended to be exposed. +A server-side request forgery (SSRF) attack is when an attacker abuses functionality on the server to read or update internal resources. In the attack, the attacker supplies or modifies a URL which the server will then make a call to. By carefully selecting the URLs, the attacker may be able to read server configuration such as AWS metadata, connect to internal services like http enabled databases or perform post requests towards internal services which are not intended to be exposed. Webhooks implementations are especially vulnerable to SSRF as they let their consumers (customers) add any URLs they want, which will be called from the internal webhook system.
standard-webhooks
standard-webhooks
Elixir
Elixir
1,390
37
The Standard Webhooks specification
standard-webhooks_standard-webhooks
DOC_CHANGE
Obvious
eb430570925c98b1d875c3a5a5d33fb42090cee1
2024-05-10 11:05:30
Vaivaswatha N
Add function deduplication in debug mode, but considering metadata equality (debug info) (#5977) Closes #5890 --------- Co-authored-by: IGI-111 <[email protected]> Co-authored-by: Joshua Batty <[email protected]>
false
359
21
380
--- sway-core/src/lib.rs @@ -43,9 +43,9 @@ use sway_ast::AttributeDecl; use sway_error::handler::{ErrorEmitted, Handler}; use sway_ir::{ create_o1_pass_group, register_known_passes, Context, Kind, Module, PassGroup, PassManager, - ARGDEMOTION_NAME, CONSTDEMOTION_NAME, DCE_NAME, FNDEDUP_DEBUG_PROFILE_NAME, FUNC_DCE_NAME, - INLINE_MODULE_NAME, MEM2REG_NAME, MEMCPYOPT_NAME, MISCDEMOTION_NAME, MODULEPRINTER_NAME, - RETDEMOTION_NAME, SIMPLIFYCFG_NAME, SROA_NAME, + ARGDEMOTION_NAME, CONSTDEMOTION_NAME, DCE_NAME, FUNC_DCE_NAME, INLINE_MODULE_NAME, + MEM2REG_NAME, MEMCPYOPT_NAME, MISCDEMOTION_NAME, MODULEPRINTER_NAME, RETDEMOTION_NAME, + SIMPLIFYCFG_NAME, SROA_NAME, }; use sway_types::constants::DOC_COMMENT_ATTRIBUTE_NAME; use sway_types::SourceEngine; @@ -883,10 +883,6 @@ pub(crate) fn compile_ast_to_ir_to_asm( // Inlining is necessary until #4899 is resolved. pass_group.append_pass(INLINE_MODULE_NAME); - // We run a function deduplication pass that only removes duplicate - // functions when everything, including the metadata are identical. - pass_group.append_pass(FNDEDUP_DEBUG_PROFILE_NAME); - // Do DCE so other optimizations run faster. pass_group.append_pass(FUNC_DCE_NAME); pass_group.append_pass(DCE_NAME); --- sway-ir/src/optimize/fn_dedup.rs @@ -13,28 +13,17 @@ use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; use crate::{ build_call_graph, callee_first_order, AnalysisResults, Block, Context, Function, InstOp, - Instruction, IrError, MetadataIndex, Metadatum, Module, Pass, PassMutability, ScopedPass, - Value, + Instruction, IrError, Module, Pass, PassMutability, ScopedPass, Value, }; -pub const FNDEDUP_DEBUG_PROFILE_NAME: &str = "fndedup-debug-profile"; -pub const FNDEDUP_RELEASE_PROFILE_NAME: &str = "fndedup-release-profile"; +pub const FNDEDUP_NAME: &str = "fndedup"; -pub fn create_fn_dedup_release_profile_pass() -> Pass { +pub fn create_fn_dedup_pass() -> Pass { Pass { - name: FNDEDUP_RELEASE_PROFILE_NAME, - descr: "Deduplicate functions, ignore metadata", + name: FNDEDUP_NAME, + descr: "Deduplicate functions.", deps: vec![], - runner: ScopedPass::ModulePass(PassMutability::Transform(dedup_fns_release_profile)), - } -} - -pub fn create_fn_dedup_debug_profile_pass() -> Pass { - Pass { - name: FNDEDUP_DEBUG_PROFILE_NAME, - descr: "Deduplicate functions, consider metadata also", - deps: vec![], - runner: ScopedPass::ModulePass(PassMutability::Transform(dedup_fns_debug_profile)), + runner: ScopedPass::ModulePass(PassMutability::Transform(dedup_fns)), } } @@ -46,20 +35,13 @@ struct EqClass { function_hash_map: FxHashMap<Function, u64>, } -fn hash_fn( - context: &Context, - function: Function, - eq_class: &mut EqClass, - ignore_metadata: bool, -) -> u64 { +fn hash_fn(context: &Context, function: Function, eq_class: &mut EqClass) -> u64 { let state = &mut FxHasher::default(); // A unique, but only in this function, ID for values. let localised_value_id: &mut FxHashMap<Value, u64> = &mut FxHashMap::default(); // A unique, but only in this function, ID for blocks. let localised_block_id: &mut FxHashMap<Block, u64> = &mut FxHashMap::default(); - // A unique, but only in this function, ID for MetadataIndex. - let metadata_hashes: &mut FxHashMap<MetadataIndex, u64> = &mut FxHashMap::default(); // TODO: We could do a similar localised ID'ing of local variable names // and ASM block arguments too, thereby slightly relaxing the equality check. @@ -72,9 +54,7 @@ fn hash_fn( context: &Context, v: Value, localised_value_id: &mut FxHashMap<Value, u64>, - metadata_hashes: &mut FxHashMap<MetadataIndex, u64>, hasher: &mut FxHasher, - ignore_metadata: bool, ) { match &context.values.get(v.0).unwrap().value { crate::ValueDatum::Argument(_) | crate::ValueDatum::Instruction(_) => { @@ -82,60 +62,6 @@ fn hash_fn( } crate::ValueDatum::Configurable(c) | crate::ValueDatum::Constant(c) => c.hash(hasher), } - if let Some(m) = &context.values.get(v.0).unwrap().metadata { - if !ignore_metadata { - hash_metadata(context, *m, metadata_hashes, hasher) - } - } - } - - fn hash_metadata( - context: &Context, - m: MetadataIndex, - metadata_hashes: &mut FxHashMap<MetadataIndex, u64>, - hasher: &mut FxHasher, - ) { - if let Some(hash) = metadata_hashes.get(&m) { - return hash.hash(hasher); - } - - let md_contents = context - .metadata - .get(m.0) - .expect("Orphan / missing metadata"); - let descr = std::mem::discriminant(md_contents); - let state = &mut FxHasher::default(); - // We temporarily set the discriminant as the hash. - descr.hash(state); - metadata_hashes.insert(m, state.finish()); - - fn internal( - context: &Context, - m: &Metadatum, - metadata_hashes: &mut FxHashMap<MetadataIndex, u64>, - hasher: &mut FxHasher, - ) { - match m { - Metadatum::Integer(i) => i.hash(hasher), - Metadatum::Index(mdi) => hash_metadata(context, *mdi, metadata_hashes, hasher), - Metadatum::String(s) => s.hash(hasher), - Metadatum::SourceId(sid) => sid.hash(hasher), - Metadatum::Struct(name, fields) => { - name.hash(hasher); - fields - .iter() - .for_each(|field| internal(context, field, metadata_hashes, hasher)); - } - Metadatum::List(l) => l - .iter() - .for_each(|i| hash_metadata(context, *i, metadata_hashes, hasher)), - } - } - internal(context, md_contents, metadata_hashes, hasher); - - let m_hash = state.finish(); - metadata_hashes.insert(m, m_hash); - m_hash.hash(hasher); } // Start with the function return type. @@ -163,14 +89,7 @@ fn hash_fn( std::mem::discriminant(&inst.op).hash(state); // Hash value inputs to instructions in one-go. for v in inst.op.get_operands() { - hash_value( - context, - v, - localised_value_id, - metadata_hashes, - state, - ignore_metadata, - ); + hash_value(context, v, localised_value_id, state); } // Hash non-value inputs. match &inst.op { @@ -271,7 +190,6 @@ pub fn dedup_fns( context: &mut Context, _: &AnalysisResults, module: Module, - ignore_metadata: bool, ) -> Result<bool, IrError> { let mut modified = false; let eq_class = &mut EqClass { @@ -281,7 +199,7 @@ pub fn dedup_fns( let cg = build_call_graph(context, &context.modules.get(module.0).unwrap().functions); let callee_first = callee_first_order(&cg); for function in callee_first { - let hash = hash_fn(context, function, eq_class, ignore_metadata); + let hash = hash_fn(context, function, eq_class); eq_class .hash_set_map .entry(hash) @@ -334,19 +252,3 @@ pub fn dedup_fns( Ok(modified) } - -fn dedup_fns_debug_profile( - context: &mut Context, - analysis_results: &AnalysisResults, - module: Module, -) -> Result<bool, IrError> { - dedup_fns(context, analysis_results, module, false) -} - -fn dedup_fns_release_profile( - context: &mut Context, - analysis_results: &AnalysisResults, - module: Module, -) -> Result<bool, IrError> { - dedup_fns(context, analysis_results, module, true) -} --- sway-ir/src/pass_manager.rs @@ -1,13 +1,12 @@ use crate::{ create_arg_demotion_pass, create_const_combine_pass, create_const_demotion_pass, create_dce_pass, create_dom_fronts_pass, create_dominators_pass, create_escaped_symbols_pass, - create_fn_dedup_debug_profile_pass, create_fn_dedup_release_profile_pass, create_func_dce_pass, - create_inline_in_main_pass, create_inline_in_module_pass, create_mem2reg_pass, - create_memcpyopt_pass, create_misc_demotion_pass, create_module_printer_pass, - create_module_verifier_pass, create_postorder_pass, create_ret_demotion_pass, - create_simplify_cfg_pass, create_sroa_pass, Context, Function, IrError, Module, - CONSTCOMBINE_NAME, DCE_NAME, FNDEDUP_RELEASE_PROFILE_NAME, FUNC_DCE_NAME, INLINE_MODULE_NAME, - MEM2REG_NAME, SIMPLIFYCFG_NAME, + create_fn_dedup_pass, create_func_dce_pass, create_inline_in_main_pass, + create_inline_in_module_pass, create_mem2reg_pass, create_memcpyopt_pass, + create_misc_demotion_pass, create_module_printer_pass, create_module_verifier_pass, + create_postorder_pass, create_ret_demotion_pass, create_simplify_cfg_pass, create_sroa_pass, + Context, Function, IrError, Module, CONSTCOMBINE_NAME, DCE_NAME, FNDEDUP_NAME, FUNC_DCE_NAME, + INLINE_MODULE_NAME, MEM2REG_NAME, SIMPLIFYCFG_NAME, }; use downcast_rs::{impl_downcast, Downcast}; use rustc_hash::FxHashMap; @@ -307,8 +306,7 @@ pub fn register_known_passes(pm: &mut PassManager) { pm.register(create_module_printer_pass()); pm.register(create_module_verifier_pass()); // Optimization passes. - pm.register(create_fn_dedup_release_profile_pass()); - pm.register(create_fn_dedup_debug_profile_pass()); + pm.register(create_fn_dedup_pass()); pm.register(create_mem2reg_pass()); pm.register(create_sroa_pass()); pm.register(create_inline_in_module_pass()); @@ -330,7 +328,7 @@ pub fn create_o1_pass_group() -> PassGroup { // Configure to run our passes. o1.append_pass(MEM2REG_NAME); o1.append_pass(INLINE_MODULE_NAME); - o1.append_pass(FNDEDUP_RELEASE_PROFILE_NAME); + o1.append_pass(FNDEDUP_NAME); o1.append_pass(CONSTCOMBINE_NAME); o1.append_pass(SIMPLIFYCFG_NAME); o1.append_pass(CONSTCOMBINE_NAME); --- sway-ir/tests/fn_dedup/debug/debug-dce.ir @@ -1,67 +0,0 @@ -// regex: FOONAME=fn (foo_1|foo_3) - -script { - entry fn main() -> bool, !1 { - entry(): - v0 = const u64 1, !2 - v1 = const u64 1, !3 - v2 = call foo_1(v0, v1), !4 - cbr v2, block0(), block1(v2), !5 - - block0(): - v3 = const u64 1, !6 - v4 = const u64 2, !7 - v5 = call foo_3(v3, v4), !8 - br block1(v5), !5 - - block1(v6: bool): - ret bool v6 - } - - // check: $(FOONAME) - fn foo_1(t1 !9: u64, t2 !10: u64) -> bool, !13 { - entry(t1: u64, t2: u64): - v0 = call eq_2(t1, t2), !14 - ret bool v0 - } - - pub fn eq_2(self !16: u64, other !17: u64) -> bool, !18 { - entry(self: u64, other: u64): - v0 = cmp eq self other - ret bool v0 - } - - // not: $(FOONAME) - fn foo_3(t1 !9: u64, t2 !10: u64) -> bool, !19 { - entry(t1: u64, t2: u64): - v0 = call eq_4(t1, t2), !14 - ret bool v0 - } - - pub fn eq_4(self !16: u64, other !17: u64) -> bool, !18 { - entry(self: u64, other: u64): - v0 = cmp eq self other - ret bool v0 - } -} - -!0 = "sway_test/src/main.sw" -!1 = span !0 9 55 -!2 = span !0 35 36 -!3 = span !0 38 39 -!4 = span !0 31 40 -!5 = span !0 31 53 -!6 = span !0 48 49 -!7 = span !0 51 52 -!8 = span !0 44 53 -!9 = span !0 84 86 -!10 = span !0 91 93 -!11 = span !0 74 134 -!12 = inline "never" -!13 = (!11 !12) -!14 = span !0 123 132 -!15 = "sway/sway-lib-core/src/ops.sw" -!16 = span !15 12645 12649 -!17 = span !15 12651 12656 -!18 = span !15 12639 12705 -!19 = (!11 !12) --- sway-ir/tests/fn_dedup/debug/debug-nodce.ir @@ -1,68 +0,0 @@ -// regex: FOONAME=fn (foo_1|foo_3) - -script { - entry fn main() -> bool, !1 { - entry(): - v0 = const u64 1, !2 - v1 = const u64 1, !3 - v2 = call foo_1(v0, v1), !4 - cbr v2, block0(), block1(v2), !5 - - block0(): - v3 = const u64 1, !6 - v4 = const u64 2, !7 - v5 = call foo_3(v3, v4), !8 - br block1(v5), !5 - - block1(v6: bool): - ret bool v6 - } - - // check: $(FOONAME) - fn foo_1(t1 !9: u64, t2 !10: u64) -> bool, !13 { - entry(t1: u64, t2: u64): - v0 = call eq_2(t1, t2), !14 - ret bool v0 - } - - pub fn eq_2(self !16: u64, other !17: u64) -> bool, !18 { - entry(self: u64, other: u64): - v0 = cmp eq self other - ret bool v0 - } - - // check: $(FOONAME) - fn foo_3(t1 !9: u64, t2 !10: u64) -> bool, !19 { - entry(t1: u64, t2: u64): - v0 = call eq_4(t1, t2), !20 - ret bool v0 - } - - pub fn eq_4(self !16: u64, other !17: u64) -> bool, !18 { - entry(self: u64, other: u64): - v0 = cmp eq self other - ret bool v0 - } -} - -!0 = "sway_test/src/main.sw" -!1 = span !0 9 55 -!2 = span !0 35 36 -!3 = span !0 38 39 -!4 = span !0 31 40 -!5 = span !0 31 53 -!6 = span !0 48 49 -!7 = span !0 51 52 -!8 = span !0 44 53 -!9 = span !0 84 86 -!10 = span !0 91 93 -!11 = span !0 74 134 -!12 = inline "never" -!13 = (!11 !12) -!14 = span !0 123 132 -!15 = "sway/sway-lib-core/src/ops.sw" -!16 = span !15 12645 12649 -!17 = span !15 12651 12656 -!18 = span !15 12639 12705 -!19 = (!11 !12) -!20 = span !0 133 142 --- sway-ir/tests/fn_dedup/release/release-dce.ir @@ -1,71 +0,0 @@ -// regex: FOONAME=fn foo_[0-9]+ -// regex: EQNAME=fn eq_[0-9]+ - -script { - entry fn main() -> bool, !1 { - entry(): - v0 = const u64 1, !2 - v1 = const u64 1, !3 - v2 = call foo_1(v0, v1), !4 - cbr v2, block0(), block1(v2), !5 - - block0(): - v3 = const u64 1, !6 - v4 = const u64 2, !7 - v5 = call foo_3(v3, v4), !8 - br block1(v5), !5 - - block1(v6: bool): - ret bool v6 - } - - // check: $(FOONAME) - fn foo_1(t1 !9: u64, t2 !10: u64) -> bool, !13 { - entry(t1: u64, t2: u64): - v0 = call eq_2(t1, t2), !14 - ret bool v0 - } - - // check: $(EQNAME) - pub fn eq_2(self !16: u64, other !17: u64) -> bool, !18 { - entry(self: u64, other: u64): - v0 = cmp eq self other - ret bool v0 - } - - // not: $(FOONAME) - fn foo_3(t1 !9: u64, t2 !10: u64) -> bool, !19 { - entry(t1: u64, t2: u64): - v0 = call eq_4(t1, t2), !20 - ret bool v0 - } - - // not: $(EQNAME) - pub fn eq_4(self !16: u64, other !17: u64) -> bool, !18 { - entry(self: u64, other: u64): - v0 = cmp eq self other - ret bool v0 - } -} - -!0 = "sway_test/src/main.sw" -!1 = span !0 9 55 -!2 = span !0 35 36 -!3 = span !0 38 39 -!4 = span !0 31 40 -!5 = span !0 31 53 -!6 = span !0 48 49 -!7 = span !0 51 52 -!8 = span !0 44 53 -!9 = span !0 84 86 -!10 = span !0 91 93 -!11 = span !0 74 134 -!12 = inline "never" -!13 = (!11 !12) -!14 = span !0 123 132 -!15 = "sway/sway-lib-core/src/ops.sw" -!16 = span !15 12645 12649 -!17 = span !15 12651 12656 -!18 = span !15 12639 12705 -!19 = (!11 !12) -!20 = span !0 133 142 --- sway-ir/tests/tests.rs @@ -5,8 +5,7 @@ use sway_ir::{ create_dce_pass, create_dom_fronts_pass, create_dominators_pass, create_escaped_symbols_pass, create_mem2reg_pass, create_memcpyopt_pass, create_misc_demotion_pass, create_postorder_pass, create_ret_demotion_pass, create_simplify_cfg_pass, optimize as opt, register_known_passes, - Context, ExperimentalFlags, PassGroup, PassManager, DCE_NAME, FNDEDUP_DEBUG_PROFILE_NAME, - FNDEDUP_RELEASE_PROFILE_NAME, FUNC_DCE_NAME, MEM2REG_NAME, SROA_NAME, + Context, ExperimentalFlags, PassGroup, PassManager, DCE_NAME, MEM2REG_NAME, SROA_NAME, }; use sway_types::SourceEngine; @@ -276,33 +275,6 @@ fn sroa() { // ------------------------------------------------------------------------------------------------- -#[allow(clippy::needless_collect)] -#[test] -fn fndedup_debug() { - run_tests("fn_dedup/debug", |_first_line, ir: &mut Context| { - let mut pass_mgr = PassManager::default(); - let mut pass_group = PassGroup::default(); - register_known_passes(&mut pass_mgr); - pass_group.append_pass(FNDEDUP_DEBUG_PROFILE_NAME); - pass_group.append_pass(FUNC_DCE_NAME); - pass_mgr.run(ir, &pass_group).unwrap() - }) -} - -#[allow(clippy::needless_collect)] -#[test] -fn fndedup_release() { - run_tests("fn_dedup/release", |_first_line, ir: &mut Context| { - let mut pass_mgr = PassManager::default(); - let mut pass_group = PassGroup::default(); - register_known_passes(&mut pass_mgr); - pass_group.append_pass(FNDEDUP_RELEASE_PROFILE_NAME); - pass_group.append_pass(FUNC_DCE_NAME); - pass_mgr.run(ir, &pass_group).unwrap() - }) -} - -// ------------------------------------------------------------------------------------------------- #[test] fn serialize() { // This isn't running a pass, it's just confirming that the IR can be loaded and printed, and
sway
fuellabs
Rust
Rust
62,435
5,382
🌴 Empowering everyone to build reliable and efficient smart contracts.
fuellabs_sway
BUG_FIX
function deduplication added to deduplicate only the functions where everything is identical contrary to when equality of metadata was ignored previously
27ec5610598ce51d792f9adadde8eab727481e7c
null
SimaQ
feat: add form to antd.
false
2
1
1
--- index.js @@ -53,7 +53,8 @@ const antd = { Icon: require('./components/iconfont'), Row: require('./components/layout').Row, Col: require('./components/layout').Col, - Spin: require('./components/spin') + Spin: require('./components/spin'), + Form: require('./components/form'), }; antd.version = require('./package.json').version;
ant-design_ant-design.json
null
null
null
null
null
null
ant-design_ant-design.json
NEW_FEAT
5, feat written in commit msg
b1e3f43d267d9f8776aafb86c6ea584f721874d4
2025-03-19 00:36:53
Sandeep Somavarapu
remove unused code (#243897)
false
0
149
149
--- src/vs/workbench/contrib/configExporter/electron-sandbox/configurationExportHelper.contribution.ts @@ -0,0 +1,26 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { IWorkbenchContribution, IWorkbenchContributionsRegistry, Extensions as WorkbenchExtensions } from '../../../common/contributions.js'; +import { Registry } from '../../../../platform/registry/common/platform.js'; +import { IInstantiationService } from '../../../../platform/instantiation/common/instantiation.js'; +import { LifecyclePhase } from '../../../services/lifecycle/common/lifecycle.js'; +import { INativeWorkbenchEnvironmentService } from '../../../services/environment/electron-sandbox/environmentService.js'; +import { DefaultConfigurationExportHelper } from './configurationExportHelper.js'; + +export class ExtensionPoints implements IWorkbenchContribution { + + constructor( + @IInstantiationService instantiationService: IInstantiationService, + @INativeWorkbenchEnvironmentService environmentService: INativeWorkbenchEnvironmentService + ) { + // Config Exporter + if (environmentService.args['export-default-configuration']) { + instantiationService.createInstance(DefaultConfigurationExportHelper); + } + } +} + +Registry.as<IWorkbenchContributionsRegistry>(WorkbenchExtensions.Workbench).registerWorkbenchContribution(ExtensionPoints, LifecyclePhase.Restored); --- src/vs/workbench/contrib/configExporter/electron-sandbox/configurationExportHelper.ts @@ -0,0 +1,120 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { INativeWorkbenchEnvironmentService } from '../../../services/environment/electron-sandbox/environmentService.js'; +import { Registry } from '../../../../platform/registry/common/platform.js'; +import { IConfigurationNode, IConfigurationRegistry, Extensions, IConfigurationPropertySchema } from '../../../../platform/configuration/common/configurationRegistry.js'; +import { IExtensionService } from '../../../services/extensions/common/extensions.js'; +import { ICommandService } from '../../../../platform/commands/common/commands.js'; +import { IFileService } from '../../../../platform/files/common/files.js'; +import { VSBuffer } from '../../../../base/common/buffer.js'; +import { URI } from '../../../../base/common/uri.js'; +import { IProductService } from '../../../../platform/product/common/productService.js'; + +interface IExportedConfigurationNode { + name: string; + description: string; + default: any; + type?: string | string[]; + enum?: any[]; + enumDescriptions?: string[]; +} + +interface IConfigurationExport { + settings: IExportedConfigurationNode[]; + buildTime: number; + commit?: string; + buildNumber?: number; +} + +export class DefaultConfigurationExportHelper { + + constructor( + @INativeWorkbenchEnvironmentService environmentService: INativeWorkbenchEnvironmentService, + @IExtensionService private readonly extensionService: IExtensionService, + @ICommandService private readonly commandService: ICommandService, + @IFileService private readonly fileService: IFileService, + @IProductService private readonly productService: IProductService + ) { + const exportDefaultConfigurationPath = environmentService.args['export-default-configuration']; + if (exportDefaultConfigurationPath) { + this.writeConfigModelAndQuit(URI.file(exportDefaultConfigurationPath)); + } + } + + private async writeConfigModelAndQuit(target: URI): Promise<void> { + try { + await this.extensionService.whenInstalledExtensionsRegistered(); + await this.writeConfigModel(target); + } finally { + this.commandService.executeCommand('workbench.action.quit'); + } + } + + private async writeConfigModel(target: URI): Promise<void> { + const config = this.getConfigModel(); + + const resultString = JSON.stringify(config, undefined, ' '); + await this.fileService.writeFile(target, VSBuffer.fromString(resultString)); + } + + private getConfigModel(): IConfigurationExport { + const configRegistry = Registry.as<IConfigurationRegistry>(Extensions.Configuration); + const configurations = configRegistry.getConfigurations().slice(); + const settings: IExportedConfigurationNode[] = []; + const processedNames = new Set<string>(); + + const processProperty = (name: string, prop: IConfigurationPropertySchema) => { + if (processedNames.has(name)) { + console.warn('Setting is registered twice: ' + name); + return; + } + + processedNames.add(name); + const propDetails: IExportedConfigurationNode = { + name, + description: prop.description || prop.markdownDescription || '', + default: prop.default, + type: prop.type + }; + + if (prop.enum) { + propDetails.enum = prop.enum; + } + + if (prop.enumDescriptions || prop.markdownEnumDescriptions) { + propDetails.enumDescriptions = prop.enumDescriptions || prop.markdownEnumDescriptions; + } + + settings.push(propDetails); + }; + + const processConfig = (config: IConfigurationNode) => { + if (config.properties) { + for (const name in config.properties) { + processProperty(name, config.properties[name]); + } + } + + config.allOf?.forEach(processConfig); + }; + + configurations.forEach(processConfig); + + const excludedProps = configRegistry.getExcludedConfigurationProperties(); + for (const name in excludedProps) { + processProperty(name, excludedProps[name]); + } + + const result: IConfigurationExport = { + settings: settings.sort((a, b) => a.name.localeCompare(b.name)), + buildTime: Date.now(), + commit: this.productService.commit, + buildNumber: this.productService.settingsSearchBuildId + }; + + return result; + } +} --- src/vs/workbench/workbench.desktop.main.ts @@ -126,6 +126,9 @@ import './contrib/issue/electron-sandbox/process.contribution.js'; // Remote import './contrib/remote/electron-sandbox/remote.contribution.js'; +// Configuration Exporter +import './contrib/configExporter/electron-sandbox/configurationExportHelper.contribution.js'; + // Terminal import './contrib/terminal/electron-sandbox/terminal.contribution.js';
vscode
microsoft
TypeScript
TypeScript
168,072
30,802
Visual Studio Code
microsoft_vscode
CODE_IMPROVEMENT
unused code removed
970567deb89a5431d37d11b8f43ddbdb0eaee872
2024-02-24 06:54:07
soya_daizu
i18n: update Japanese (#1154)
false
3
3
6
--- app/assets/i18n/_missing_translations_ja.json @@ -5,11 +5,11 @@ ], "settingsTab": { "send": { - "title": "送信", - "shareViaLinkAutoAccept": "リンク経由で共有時: 自動で承諾" + "title": "Send", + "shareViaLinkAutoAccept": "Share via link: Auto accept" } }, "webSharePage": { - "autoAccept": "自動でリクエストを承諾する" + "autoAccept": "Automatically accept requests" } }
localsend
localsend
Dart
Dart
58,423
3,136
An open-source cross-platform alternative to AirDrop
localsend_localsend
CONFIG_CHANGE
Japanese updated in json file
c9d84da18d1e0d28a7e16ca6df8e6d47570501d4
2025-03-25 22:10:46
Easwar Hariharan
Bluetooth: L2CAP: convert timeouts to secs_to_jiffies() Commit b35108a51cf7 ("jiffies: Define secs_to_jiffies()") introduced secs_to_jiffies(). As the value here is a multiple of 1000, use secs_to_jiffies() instead of msecs_to_jiffies() for readability. Signed-off-by: Easwar Hariharan <[email protected]> Signed-off-by: Luiz Augusto von Dentz <[email protected]>
false
4
4
8
--- include/net/bluetooth/l2cap.h @@ -38,8 +38,8 @@ #define L2CAP_DEFAULT_TX_WINDOW 63 #define L2CAP_DEFAULT_EXT_WINDOW 0x3FFF #define L2CAP_DEFAULT_MAX_TX 3 -#define L2CAP_DEFAULT_RETRANS_TO 2 /* seconds */ -#define L2CAP_DEFAULT_MONITOR_TO 12 /* seconds */ +#define L2CAP_DEFAULT_RETRANS_TO 2000 /* 2 seconds */ +#define L2CAP_DEFAULT_MONITOR_TO 12000 /* 12 seconds */ #define L2CAP_DEFAULT_MAX_PDU_SIZE 1492 /* Sized for AMP packet */ #define L2CAP_DEFAULT_ACK_TO 200 #define L2CAP_DEFAULT_MAX_SDU_SIZE 0xFFFF --- net/bluetooth/l2cap_core.c @@ -282,7 +282,7 @@ static void __set_retrans_timer(struct l2cap_chan *chan) if (!delayed_work_pending(&chan->monitor_timer) && chan->retrans_timeout) { l2cap_set_timer(chan, &chan->retrans_timer, - secs_to_jiffies(chan->retrans_timeout)); + msecs_to_jiffies(chan->retrans_timeout)); } } @@ -291,7 +291,7 @@ static void __set_monitor_timer(struct l2cap_chan *chan) __clear_retrans_timer(chan); if (chan->monitor_timeout) { l2cap_set_timer(chan, &chan->monitor_timer, - secs_to_jiffies(chan->monitor_timeout)); + msecs_to_jiffies(chan->monitor_timeout)); } }
linux
torvalds
C
C
189,022
55,340
Linux kernel source tree
torvalds_linux
BUG_FIX
this commit fixes/polishes an earlier feature
3fccede1a712173661410dc95f60b747bb8655a3
2024-10-29 17:07:30
hiddenSharp429
fix: update release.yml for build configuration
false
2
0
2
--- .github/workflows/release.yml @@ -21,8 +21,6 @@ jobs: - name: Build run: | brew install automake - brew install autoconf - brew install libtool make VERSION="${GITHUB_REF_NAME}" release make release-dmg shasum -a 256 build/Release/ShadowsocksX-NG.dmg > build/Release/ShadowsocksX-NG.dmg.checksum
shadowsocksx-ng
shadowsocks
Swift
Swift
32,651
7,935
Next Generation of ShadowsocksX
shadowsocks_shadowsocksx-ng
CONFIG_CHANGE
changes in yml file
37cf1984b9a97278ea83589dde712547a1438c3c
2023-05-18 14:46:46
Hanna Fadida
Add BITFIELD_RO basic tests for non-repl use cases (#12187) Current tests for BITFIELD_RO command are skipped in the external mode, and therefore reply-schemas-validator reports a coverage error. This PR adds basic tests to increase coverage.
false
12
2
14
--- tests/unit/bitfield.tcl @@ -214,16 +214,6 @@ start_server {tags {"bitops"}} { } r del mystring } - - test {BITFIELD_RO with only key as argument} { - set res [r bitfield_ro bits] - assert {$res eq {}} - } - - test {BITFIELD_RO fails when write option is used} { - catch {r bitfield_ro bits set u8 0 100 get u8 0} err - assert_match {*ERR BITFIELD_RO only supports the GET subcommand*} $err - } } start_server {tags {"repl external:skip"}} { @@ -250,12 +240,12 @@ start_server {tags {"repl external:skip"}} { assert_equal 100 [$slave bitfield_ro bits get u8 0] } - test {BITFIELD_RO with only key as argument on read-only replica} { + test {BITFIELD_RO with only key as argument} { set res [$slave bitfield_ro bits] assert {$res eq {}} } - test {BITFIELD_RO fails when write option is used on read-only replica} { + test {BITFIELD_RO fails when write option is used} { catch {$slave bitfield_ro bits set u8 0 100 get u8 0} err assert_match {*ERR BITFIELD_RO only supports the GET subcommand*} $err }
redis
redis
C
C
68,201
23,916
Redis is an in-memory database that persists on disk. The data model is key-value, but many different kind of values are supported: Strings, Lists, Sets, Sorted Sets, Hashes, Streams, HyperLogLogs, Bitmaps.
redis_redis
NEW_FEAT
null
7d3aac2d7111138b288a83005171b7900b8bcddd
null
Eliseo Martínez
Fix warnings: spell.c: find_word(): Dead assignment: HI. Problem : Dead assignment @ 1602. Diagnostic : Harmless issue. Rationale : Code using this assignment (line 1666) was disabled. Vim's tip at Wed Nov 12 13:07:54 2014 +0100 (changeset 6352:2f7bf5f90f57) hasn't changed this yet. Resolution : Disable assignment. Directive processors are used for that in order to match the way the other code was disabled.
false
2
0
2
--- spell.c @@ -1599,7 +1599,9 @@ static void find_word(matchinf_T *mip, int mode) mip->mi_compoff = (int)(p - mip->mi_fword); } } +#if 0 c = mip->mi_compoff; +#endif ++mip->mi_complen; if (flags & WF_COMPROOT) ++mip->mi_compextra;
neovim_neovim.json
null
null
null
null
null
null
neovim_neovim.json
BUG_FIX
5, fixed warnings.
041ada15867b4ed24193f2023747f275806bcf10
2025-03-07 02:34:18
Robo
build: match upstream with unsafe buffer paths (#45853) * build: match upstream with unsafe buffer paths * Don't assume STL iterators are pointers Refs https://issues.chromium.org/issues/328308661 * chore: spanify process_singleton_win.cc
false
44
126
170
--- build/args/all.gn @@ -73,3 +73,5 @@ enterprise_cloud_content_analysis = false # TODO: remove dependency on legacy ipc # https://issues.chromium.org/issues/40943039 content_enable_legacy_ipc = true + +clang_unsafe_buffers_paths = "//electron/electron_unsafe_buffers_paths.txt" --- electron_unsafe_buffers_paths.txt @@ -0,0 +1,34 @@ +# Copyright 2024 The Electron Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# The set of path prefixes that should be checked for unsafe buffer usage (see +# -Wunsafe-buffer-usage in Clang). +# +# *** +# Paths should be written as relative to the root of the source tree with +# unix-style path separators. Directory prefixes should end with `/`, such +# as `base/`. +# *** +# +# Files in this set are known to not use pointer arithmetic/subscripting, and +# make use of constructs like base::span or containers like std::vector instead. +# +# See `docs/unsafe_buffers.md`. + +# These directories are excluded because they come from outside Electron and +# we don't have control over their contents. +-base/ +-chrome/ +-components/ +-device/ +-extensions/ +-google_apis/ +-net/ +-services/ +-skia/ +-third_party/ +-tools/ +-ui/ +-url/ +-v8/ --- patches/chromium/.patches @@ -140,4 +140,4 @@ ignore_parse_errors_for_pkey_appusermodel_toastactivatorclsid.patch feat_add_signals_when_embedder_cleanup_callbacks_run_for.patch feat_separate_content_settings_callback_for_sync_and_async_clipboard.patch fix_win32_synchronous_spellcheck.patch -fix_enable_wrap_iter_in_string_view_and_array.patch +chore_remove_conflicting_allow_unsafe_libc_calls.patch --- patches/chromium/chore_remove_conflicting_allow_unsafe_libc_calls.patch @@ -0,0 +1,49 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Charles Kerr <[email protected]> +Date: Sat, 22 Feb 2025 13:15:39 -0600 +Subject: chore: remove conflicting allow_unsafe_libc_calls + +We want builds to fail if a buffer warning comes from Electron code but +not from code that we don't maintain (e.g. upstream Chromium code), so +//electron/electron_unsafe_buffer_paths.txt turns off Chromium warnings. + +There are some upstream files that generate warnings *and* also have +pragmas that override //electron/electron_unsafe_buffer_paths.txt, +forcing them to be tested. This breaks our build. + +Files can be removed from this patch when upstream either removes the +pragma or fixes the other warnings. This patch can be removed when no +files are left. + +diff --git a/net/cookies/parsed_cookie.cc b/net/cookies/parsed_cookie.cc +index 7d5d0106a3675b3fa21b0e00a755f5c0ed11c87b..d26c645d70b54b31815c8140954ee6d0a34fa8af 100644 +--- a/net/cookies/parsed_cookie.cc ++++ b/net/cookies/parsed_cookie.cc +@@ -2,11 +2,6 @@ + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. + +-#ifdef UNSAFE_BUFFERS_BUILD +-// TODO(crbug.com/390223051): Remove C-library calls to fix the errors. +-#pragma allow_unsafe_libc_calls +-#endif +- + // Portions of this code based on Mozilla: + // (netwerk/cookie/src/nsCookieService.cpp) + /* ***** BEGIN LICENSE BLOCK ***** +diff --git a/net/http/http_response_headers.cc b/net/http/http_response_headers.cc +index 813f2f7f274bf02b6679b9321ae83948ab634697..2c61297669ba7d513f8493dfb6f478245f5c7c58 100644 +--- a/net/http/http_response_headers.cc ++++ b/net/http/http_response_headers.cc +@@ -2,11 +2,6 @@ + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. + +-#ifdef UNSAFE_BUFFERS_BUILD +-// TODO(crbug.com/390223051): Remove C-library calls to fix the errors. +-#pragma allow_unsafe_libc_calls +-#endif +- + // The rules for header parsing were borrowed from Firefox: + // http://lxr.mozilla.org/seamonkey/source/netwerk/protocol/http/src/nsHttpResponseHead.cpp + // The rules for parsing content-types were also borrowed from Firefox: --- patches/chromium/feat_add_data_parameter_to_processsingleton.patch @@ -179,7 +179,7 @@ index 08cbe32a258bf478f1da0a07064d3e9ef14c44a5..b9f2a43cb90fac4b031a4b4da38d6435 if (!WriteToSocket(socket.fd(), to_send.data(), to_send.length())) { // Try to kill the other process, because it might have been dead. diff --git a/chrome/browser/process_singleton_win.cc b/chrome/browser/process_singleton_win.cc -index d91f58ebe3a024bc41ed72121c49172f68e0d862..7b85ba5ed8d0c2a152899ad65f275e6680a93dba 100644 +index d91f58ebe3a024bc41ed72121c49172f68e0d862..255160d6bd6b2ea1cd640fde8f4b4ce598148418 100644 --- a/chrome/browser/process_singleton_win.cc +++ b/chrome/browser/process_singleton_win.cc @@ -81,10 +81,12 @@ BOOL CALLBACK BrowserWindowEnumeration(HWND window, LPARAM param) { @@ -197,7 +197,7 @@ index d91f58ebe3a024bc41ed72121c49172f68e0d862..7b85ba5ed8d0c2a152899ad65f275e66 static const int min_message_size = 7; if (cds->cbData < min_message_size * sizeof(wchar_t) || cds->cbData % sizeof(wchar_t) != 0) { -@@ -134,6 +136,23 @@ bool ParseCommandLine(const COPYDATASTRUCT* cds, +@@ -134,6 +136,37 @@ bool ParseCommandLine(const COPYDATASTRUCT* cds, const std::wstring cmd_line = msg.substr(second_null + 1, third_null - second_null); *parsed_command_line = base::CommandLine::FromString(cmd_line); @@ -210,18 +210,32 @@ index d91f58ebe3a024bc41ed72121c49172f68e0d862..7b85ba5ed8d0c2a152899ad65f275e66 + return true; + } + ++ // Get length of the additional data. ++ const std::wstring additional_data_length_string = ++ msg.substr(third_null + 1, fourth_null - third_null); ++ size_t additional_data_length; ++ base::StringToSizeT(additional_data_length_string, &additional_data_length); ++ ++ const std::wstring::size_type fifth_null = ++ msg.find_first_of(L'\0', fourth_null + 1); ++ if (fifth_null == std::wstring::npos || ++ fifth_null == msg.length()) { ++ LOG(WARNING) << "Invalid format for start command, we need a string in 6 " ++ "parts separated by NULLs"; ++ } ++ + // Get the actual additional data. + const std::wstring additional_data = -+ msg.substr(third_null + 1, fourth_null - third_null); -+ base::span<const uint8_t> additional_data_bytes = -+ base::as_byte_span(additional_data); -+ *parsed_additional_data = std::vector<uint8_t>( -+ additional_data_bytes.begin(), additional_data_bytes.end()); ++ msg.substr(fourth_null + 1, fifth_null - fourth_null); ++ const uint8_t* additional_data_bytes = ++ reinterpret_cast<const uint8_t*>(additional_data.c_str()); ++ *parsed_additional_data = std::vector<uint8_t>(additional_data_bytes, ++ additional_data_bytes + additional_data_length); + return true; } return false; -@@ -155,13 +174,14 @@ bool ProcessLaunchNotification( +@@ -155,13 +188,14 @@ bool ProcessLaunchNotification( base::CommandLine parsed_command_line(base::CommandLine::NO_PROGRAM); base::FilePath current_directory; @@ -239,7 +253,7 @@ index d91f58ebe3a024bc41ed72121c49172f68e0d862..7b85ba5ed8d0c2a152899ad65f275e66 return true; } -@@ -265,9 +285,11 @@ bool ProcessSingleton::EscapeVirtualization( +@@ -265,9 +299,11 @@ bool ProcessSingleton::EscapeVirtualization( ProcessSingleton::ProcessSingleton( const std::string& program_name, const base::FilePath& user_data_dir, @@ -251,7 +265,7 @@ index d91f58ebe3a024bc41ed72121c49172f68e0d862..7b85ba5ed8d0c2a152899ad65f275e66 program_name_(program_name), is_app_sandboxed_(is_app_sandboxed), is_virtualized_(false), -@@ -294,7 +316,7 @@ ProcessSingleton::NotifyResult ProcessSingleton::NotifyOtherProcess() { +@@ -294,7 +330,7 @@ ProcessSingleton::NotifyResult ProcessSingleton::NotifyOtherProcess() { return PROCESS_NONE; } @@ -261,7 +275,7 @@ index d91f58ebe3a024bc41ed72121c49172f68e0d862..7b85ba5ed8d0c2a152899ad65f275e66 return PROCESS_NOTIFIED; case NotifyChromeResult::NOTIFY_FAILED: diff --git a/chrome/browser/win/chrome_process_finder.cc b/chrome/browser/win/chrome_process_finder.cc -index 019ac7e93e009a713ce56ee8bcacf467b4fe769d..283693966c041340983aa78a95f8a274db601fb4 100644 +index 019ac7e93e009a713ce56ee8bcacf467b4fe769d..9417403bb9cacd0572b37493ab2d98130313db4d 100644 --- a/chrome/browser/win/chrome_process_finder.cc +++ b/chrome/browser/win/chrome_process_finder.cc @@ -39,7 +39,9 @@ HWND FindRunningChromeWindow(const base::FilePath& user_data_dir) { @@ -275,13 +289,13 @@ index 019ac7e93e009a713ce56ee8bcacf467b4fe769d..283693966c041340983aa78a95f8a274 TRACE_EVENT0("startup", "AttemptToNotifyRunningChrome"); DCHECK(remote_window); -@@ -68,12 +70,24 @@ NotifyChromeResult AttemptToNotifyRunningChrome(HWND remote_window) { +@@ -68,12 +70,29 @@ NotifyChromeResult AttemptToNotifyRunningChrome(HWND remote_window) { new_command_line.AppendSwitchNative(switches::kSourceShortcut, si.lpTitle); // Send the command line to the remote chrome window. - // Format is "START\0<<<current directory>>>\0<<<commandline>>>". + // Format is -+ // "START\0<current-directory>\0<command-line>\0<additional-data>". ++ // "START\0<current-directory>\0<command-line>\0<additional-data-length>\0<additional-data>". std::wstring to_send = base::StrCat( {std::wstring_view{L"START\0", 6}, cur_dir.value(), std::wstring_view{L"\0", 1}, new_command_line.GetCommandLineString(), @@ -289,6 +303,11 @@ index 019ac7e93e009a713ce56ee8bcacf467b4fe769d..283693966c041340983aa78a95f8a274 + size_t additional_data_size = additional_data.size_bytes(); + if (additional_data_size) { ++ // Send over the size, because the reinterpret cast to wchar_t could ++ // add padding. ++ to_send.append(base::UTF8ToWide(base::NumberToString(additional_data_size))); ++ to_send.append(L"\0", 1); // Null separator. ++ + size_t padded_size = additional_data_size / sizeof(wchar_t); + if (additional_data_size % sizeof(wchar_t) != 0) { + padded_size++; --- patches/chromium/fix_enable_wrap_iter_in_string_view_and_array.patch @@ -1,22 +0,0 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: deepak1556 <[email protected]> -Date: Sat, 1 Mar 2025 05:11:41 +0900 -Subject: fix: enable __wrap_iter in string_view and array - -Refs https://github.com/electron/electron/issues/45810#issuecomment-2691417213 - -Patch can be removed when build_make_libcxx_abi_unstable_false_for_electron.patch is removed. - -diff --git a/buildtools/third_party/libc++/__config_site b/buildtools/third_party/libc++/__config_site -index e240ff6fff94a6cebf8662996712fe7eb22e5fff..ddf1693002aa171b3d91aa4ef08f5b360e4adddc 100644 ---- a/buildtools/third_party/libc++/__config_site -+++ b/buildtools/third_party/libc++/__config_site -@@ -19,6 +19,8 @@ - #define _LIBCPP_ABI_NAMESPACE __Cr - - #define _LIBCPP_ABI_VERSION 1 -+#define _LIBCPP_ABI_USE_WRAP_ITER_IN_STD_ARRAY -+#define _LIBCPP_ABI_USE_WRAP_ITER_IN_STD_STRING_VIEW - - #define _LIBCPP_ABI_FORCE_ITANIUM 0 - #define _LIBCPP_ABI_FORCE_MICROSOFT 0 --- shell/browser/api/electron_api_app.cc @@ -405,7 +405,7 @@ int GetPathConstant(std::string_view name) { {"videos", chrome::DIR_USER_VIDEOS}, }); // clang-format on - auto iter = Lookup.find(name); + const auto* iter = Lookup.find(name); return iter != Lookup.end() ? iter->second : -1; } --- shell/browser/api/electron_api_system_preferences_win.cc @@ -129,7 +129,7 @@ std::string SystemPreferences::GetColor(gin_helper::ErrorThrower thrower, {"window-text", COLOR_WINDOWTEXT}, }); - if (auto iter = Lookup.find(color); iter != Lookup.end()) + if (const auto* iter = Lookup.find(color); iter != Lookup.end()) return ToRGBAHex(color_utils::GetSysSkColor(iter->second)); thrower.ThrowError("Unknown color: " + color); --- shell/browser/api/electron_api_web_request.cc @@ -80,7 +80,7 @@ struct UserData : public base::SupportsUserData::Data { }; extensions::WebRequestResourceType ParseResourceType(std::string_view value) { - if (auto iter = ResourceTypes.find(value); iter != ResourceTypes.end()) + if (const auto* iter = ResourceTypes.find(value); iter != ResourceTypes.end()) return iter->second; return extensions::WebRequestResourceType::OTHER; --- shell/browser/ui/views/client_frame_view_linux.cc @@ -416,7 +416,7 @@ void ClientFrameViewLinux::LayoutButtonsOnSide( } for (views::FrameButton frame_button : frame_buttons) { - auto button = + auto* button = std::ranges::find_if(nav_buttons_, [&](const NavButton& test) { return test.type != skip_type && test.frame_button == frame_button; }); --- shell/common/api/electron_api_url_loader.cc @@ -568,7 +568,7 @@ gin::Handle<SimpleURLLoaderWrapper> SimpleURLLoaderWrapper::Create( {"no-cors", Val::kNoCors}, {"same-origin", Val::kSameOrigin}, }); - if (auto iter = Lookup.find(mode); iter != Lookup.end()) + if (auto* iter = Lookup.find(mode); iter != Lookup.end()) request->mode = iter->second; } @@ -597,7 +597,7 @@ gin::Handle<SimpleURLLoaderWrapper> SimpleURLLoaderWrapper::Create( {"worker", Val::kWorker}, {"xslt", Val::kXslt}, }); - if (auto iter = Lookup.find(destination); iter != Lookup.end()) + if (auto* iter = Lookup.find(destination); iter != Lookup.end()) request->destination = iter->second; } --- shell/common/gin_converters/std_converter.h @@ -224,7 +224,7 @@ bool FromV8WithLookup(v8::Isolate* isolate, if (key_transform) key_transform(key); - if (auto iter = table.find(key); iter != table.end()) { + if (const auto* iter = table.find(key); iter != table.end()) { *out = iter->second; return true; } --- shell/common/keyboard_util.cc @@ -108,7 +108,7 @@ CodeAndShiftedChar KeyboardCodeFromKeyIdentifier(const std::string_view str) { {"volumeup", {ui::VKEY_VOLUME_UP, {}}}, }); - if (auto iter = Lookup.find(str); iter != Lookup.end()) + if (auto* const iter = Lookup.find(str); iter != Lookup.end()) return iter->second; return {ui::VKEY_UNKNOWN, {}};
electron
electron
C++
C++
115,677
15,852
:electron: Build cross-platform desktop apps with JavaScript, HTML, and CSS
electron_electron
BUG_FIX
Code change: fix keyword in code
cff6e280c77afb0bc42a10348eeef5360db8f361
2024-11-18 04:19:35
Bhuwan Pandit
feat(cli): support multiple env file argument (#26527) Closes #26425 ## Overview This PR adds support for specifying multiple environment files as arguments when using the Deno CLI. Subsequent files override pre-existing variables defined in previous files. If the same variable is defined in the environment and in the file, the value from the environment takes precedence. ## Example Usage ```bash deno run --allow-env --env-file --env-file=".env.one" --env-file=".env.two" script.ts ``` --------- Co-authored-by: Bartek Iwańczuk <[email protected]>
false
107
58
165
--- cli/args/flags.rs @@ -613,7 +613,7 @@ pub struct Flags { pub internal: InternalFlags, pub ignore: Vec<String>, pub import_map_path: Option<String>, - pub env_file: Option<Vec<String>>, + pub env_file: Option<String>, pub inspect_brk: Option<SocketAddr>, pub inspect_wait: Option<SocketAddr>, pub inspect: Option<SocketAddr>, @@ -3775,14 +3775,12 @@ fn env_file_arg() -> Arg { .help(cstr!( "Load environment variables from local file <p(245)>Only the first environment variable with a given key is used. - Existing process environment variables are not overwritten, so if variables with the same names already exist in the environment, their values will be preserved. - Where multiple declarations for the same environment variable exist in your .env file, the first one encountered is applied. This is determined by the order of the files you pass as arguments.</>" + Existing process environment variables are not overwritten.</>" )) .value_hint(ValueHint::FilePath) .default_missing_value(".env") .require_equals(true) .num_args(0..=1) - .action(ArgAction::Append) } fn reload_arg() -> Arg { @@ -5489,9 +5487,7 @@ fn import_map_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) { } fn env_file_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) { - flags.env_file = matches - .get_many::<String>("env-file") - .map(|values| values.cloned().collect()); + flags.env_file = matches.remove_one::<String>("env-file"); } fn reload_arg_parse( @@ -7427,7 +7423,7 @@ mod tests { allow_all: true, ..Default::default() }, - env_file: Some(vec![".example.env".to_owned()]), + env_file: Some(".example.env".to_owned()), ..Flags::default() } ); @@ -7521,7 +7517,7 @@ mod tests { allow_all: true, ..Default::default() }, - env_file: Some(vec![".example.env".to_owned()]), + env_file: Some(".example.env".to_owned()), unsafely_ignore_certificate_errors: Some(vec![]), ..Flags::default() } @@ -8169,7 +8165,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(vec![".env".to_owned()]), + env_file: Some(".env".to_owned()), code_cache_enabled: true, ..Flags::default() } @@ -8185,7 +8181,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(vec![".env".to_owned()]), + env_file: Some(".env".to_owned()), code_cache_enabled: true, ..Flags::default() } @@ -8218,7 +8214,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(vec![".another_env".to_owned()]), + env_file: Some(".another_env".to_owned()), code_cache_enabled: true, ..Flags::default() } @@ -8239,29 +8235,7 @@ mod tests { subcommand: DenoSubcommand::Run(RunFlags::new_default( "script.ts".to_string(), )), - env_file: Some(vec![".another_env".to_owned()]), - code_cache_enabled: true, - ..Flags::default() - } - ); - } - - #[test] - fn run_multiple_env_file_defined() { - let r = flags_from_vec(svec![ - "deno", - "run", - "--env-file", - "--env-file=.two_env", - "script.ts" - ]); - assert_eq!( - r.unwrap(), - Flags { - subcommand: DenoSubcommand::Run(RunFlags::new_default( - "script.ts".to_string(), - )), - env_file: Some(vec![".env".to_owned(), ".two_env".to_owned()]), + env_file: Some(".another_env".to_owned()), code_cache_enabled: true, ..Flags::default() } @@ -8404,7 +8378,7 @@ mod tests { allow_read: Some(vec![]), ..Default::default() }, - env_file: Some(vec![".example.env".to_owned()]), + env_file: Some(".example.env".to_owned()), ..Flags::default() } ); @@ -10079,7 +10053,7 @@ mod tests { unsafely_ignore_certificate_errors: Some(vec![]), v8_flags: svec!["--help", "--random-seed=1"], seed: Some(1), - env_file: Some(vec![".example.env".to_owned()]), + env_file: Some(".example.env".to_owned()), ..Flags::default() } ); --- cli/args/mod.rs @@ -1128,7 +1128,7 @@ impl CliOptions { self.flags.otel_config() } - pub fn env_file_name(&self) -> Option<&Vec<String>> { + pub fn env_file_name(&self) -> Option<&String> { self.flags.env_file.as_ref() } @@ -1935,22 +1935,19 @@ pub fn config_to_deno_graph_workspace_member( }) } -fn load_env_variables_from_env_file(filename: Option<&Vec<String>>) { - let Some(env_file_names) = filename else { +fn load_env_variables_from_env_file(filename: Option<&String>) { + let Some(env_file_name) = filename else { return; }; - - for env_file_name in env_file_names.iter().rev() { - match from_filename(env_file_name) { - Ok(_) => (), - Err(error) => { - match error { + match from_filename(env_file_name) { + Ok(_) => (), + Err(error) => { + match error { dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line), dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name), dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name), _ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name), } - } } } } --- cli/standalone/binary.rs @@ -659,15 +659,9 @@ impl<'a> DenoCompileBinaryWriter<'a> { remote_modules_store.add_redirects(&graph.redirects); let env_vars_from_env_file = match cli_options.env_file_name() { - Some(env_filenames) => { - let mut aggregated_env_vars = IndexMap::new(); - for env_filename in env_filenames.iter().rev() { - log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename); - - let env_vars = get_file_env_vars(env_filename.to_string())?; - aggregated_env_vars.extend(env_vars); - } - aggregated_env_vars + Some(env_filename) => { + log::info!("{} Environment variables from the file \"{}\" were embedded in the generated executable file", crate::colors::yellow("Warning"), env_filename); + get_file_env_vars(env_filename.to_string())? } None => Default::default(), }; --- tests/integration/run_tests.rs @@ -418,6 +418,16 @@ fn permissions_cache() { }); } +itest!(env_file { + args: "run --env=env --allow-env run/env_file.ts", + output: "run/env_file.out", +}); + +itest!(env_file_missing { + args: "run --env=missing --allow-env run/env_file.ts", + output: "run/env_file_missing.out", +}); + itest!(lock_write_fetch { args: "run --quiet --allow-import --allow-read --allow-write --allow-env --allow-run run/lock_write_fetch/main.ts", --- tests/specs/run/env_file/__test__.jsonc @@ -1,20 +0,0 @@ -{ - "tests": { - "basic": { - "args": "run --env=./env --allow-env env_file.ts", - "output": "env_file.out" - }, - "missing": { - "args": "run --env=./missing --allow-env env_file.ts", - "output": "env_file_missing.out" - }, - "multiple": { - "args": "run --env=./env --env=./env_one --env=./env_two --allow-env env_file.ts", - "output": "multiple_env_file.out" - }, - "unparseable": { - "args": "run --env=./env_unparseable --allow-env env_file.ts", - "output": "env_unparseable.out" - } - } -} --- tests/specs/run/env_file/env @@ -1,4 +0,0 @@ -FOO=BAR -ANOTHER_FOO=ANOTHER_${FOO} -MULTILINE="First Line -Second Line" \ No newline at end of file --- tests/specs/run/env_file/env_one @@ -1,2 +0,0 @@ -FOO=BARBAR -ANOTHER_FOO=OVERRIDEN_BY_ENV_ONE --- tests/specs/run/env_file/env_two @@ -1 +0,0 @@ -FOO=OVERRIDEN_BY_ENV_TWO --- tests/specs/run/env_file/env_unparseable.out @@ -1,4 +0,0 @@ -Warning Parsing failed within the specified environment file: ./env_unparseable at index: 3 of the value: c:\path -valid -undefined -undefined --- tests/specs/run/env_file/multiple_env_file.out @@ -1,4 +0,0 @@ -OVERRIDEN_BY_ENV_TWO -OVERRIDEN_BY_ENV_ONE -First Line -Second Line --- tests/specs/run/env_unparsable_file/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "run --env=../../../testdata/env_unparsable --allow-env main.js", + "output": "main.out" +} --- tests/specs/run/env_unparsable_file/main.out @@ -0,0 +1,4 @@ +Warning Parsing failed within the specified environment file: ../../../testdata/env_unparsable at index: 3 of the value: c:\path +valid +undefined +undefined --- tests/testdata/run/env_file.ts @@ -0,0 +1,3 @@ +console.log(Deno.env.get("FOO")); +console.log(Deno.env.get("ANOTHER_FOO")); +console.log(Deno.env.get("MULTILINE")); --- tests/specs/run/env_file/env_file_missing.out @@ -1,4 +1,4 @@ -Warning The `--env-file` flag was used, but the environment file specified './missing' was not found. +Warning The `--env-file` flag was used, but the environment file specified 'missing' was not found. undefined undefined undefined --- tools/lint.js @@ -219,7 +219,7 @@ async function ensureNoNewITests() { "pm_tests.rs": 0, "publish_tests.rs": 0, "repl_tests.rs": 0, - "run_tests.rs": 18, + "run_tests.rs": 20, "shared_library_tests.rs": 0, "task_tests.rs": 2, "test_tests.rs": 0,
deno
denoland
Rust
Rust
102,021
5,502
A modern runtime for JavaScript and TypeScript.
denoland_deno
NEW_FEAT
obvious
372cc4a0100efd3dbd80193b74ee65752d139ea8
null
Lanco
Fix typo (#2772)
false
1
1
0
--- context.go @@ -732,7 +732,7 @@ func (c *Context) ShouldBindBodyWith(obj interface{}, bb binding.BindingBody) (e // If the headers are nots syntactically valid OR the remote IP does not correspong to a trusted proxy, // the remote IP (coming form Request.RemoteAddr) is returned. func (c *Context) ClientIP() string { - // Check if we're running on a tursted platform + // Check if we're running on a trusted platform switch c.engine.TrustedPlatform { case PlatformGoogleAppEngine: if addr := c.requestHeader("X-Appengine-Remote-Addr"); addr != "" {
gin-gonic_gin.json
null
null
null
null
null
null
gin-gonic_gin.json
CONFIG_CHANGE
4, fix written but only typo fix not code fix.
b9a836515fad5df57a86412b2cd41c49869ec0d6
null
Filippo Valsorda
Update the Vimeo test vector md5 confirmed that this is indeed the first 10241 (we went off by one with byte range 0-10240) of the full, playing mp4, so they probably reencoded or something
false
1
1
0
--- vimeo.py @@ -27,7 +27,7 @@ class VimeoIE(InfoExtractor): { u'url': u'http://vimeo.com/56015672#at=0', u'file': u'56015672.mp4', - u'md5': u'ae7a1d8b183758a0506b0622f37dfa14', + u'md5': u'8879b6cc097e987f02484baf890129e5', u'info_dict': { u"upload_date": u"20121220", u"description": u"This is a test case for youtube-dl.\nFor more information, see github.com/rg3/youtube-dl\nTest chars: \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550",
yt-dlp_yt-dlp.json
null
null
null
null
null
null
yt-dlp_yt-dlp.json
BUG_FIX
4, Previous test vector caused some issue, so updated
0122938a7ab49b531b71602584da524ffebdd2f9
2025-03-26 20:06:39
Tomas Glozar
rtla: Always set all tracer options rtla currently only sets tracer options that are explicitly set by the user, with the exception of OSNOISE_WORKLOAD. This leads to improper behavior in case rtla is run with those options not set to the default value. rtla does reset them to the original value upon exiting, but that does not protect it from starting with non-default values set either by an improperly exited rtla or by another user of the tracers. For example, after running this command: $ echo 1 > /sys/kernel/tracing/osnoise/stop_tracing_us all runs of rtla will stop at the 1us threshold, even if not requested by the user: $ rtla osnoise hist Index CPU-000 CPU-001 1 8 5 2 5 9 3 1 2 4 6 1 5 2 1 6 0 1 8 1 1 12 0 1 14 1 0 15 1 0 over: 0 0 count: 25 21 min: 1 1 avg: 3.68 3.05 max: 15 12 rtla osnoise hit stop tracing Fix the problem by setting the default value for all tracer options if the user has not provided their own value. For most of the options, it's enough to just drop the if clause checking for the value being set. For cpus, "all" is used as the default value, and for osnoise default period and runtime, default values of the osnoise_data variable in trace_osnoise.c are used. Cc: Luis Goncalves <[email protected]> Link: https://lore.kernel.org/[email protected] Fixes: 1eceb2fc2ca5 ("rtla/osnoise: Add osnoise top mode") Fixes: 829a6c0b5698 ("rtla/osnoise: Add the hist mode") Fixes: a828cd18bc4a ("rtla: Add timerlat tool and timelart top mode") Fixes: 1eeb6328e8b3 ("rtla/timerlat: Add timerlat hist mode") Signed-off-by: Tomas Glozar <[email protected]> Reviewed-by: John Kacur <[email protected]> Signed-off-by: Steven Rostedt (Google) <[email protected]>
false
56
59
115
--- tools/tracing/rtla/src/osnoise.c @@ -17,9 +17,6 @@ #include "osnoise.h" -#define DEFAULT_SAMPLE_PERIOD 1000000 /* 1s */ -#define DEFAULT_SAMPLE_RUNTIME 1000000 /* 1s */ - /* * osnoise_get_cpus - return the original "osnoise/cpus" content * @@ -1130,43 +1127,46 @@ osnoise_apply_config(struct osnoise_tool *tool, struct osnoise_params *params) if (!params->sleep_time) params->sleep_time = 1; - retval = osnoise_set_cpus(tool->context, params->cpus ? params->cpus : "all"); - if (retval) { - err_msg("Failed to apply CPUs config\n"); - goto out_err; + if (params->cpus) { + retval = osnoise_set_cpus(tool->context, params->cpus); + if (retval) { + err_msg("Failed to apply CPUs config\n"); + goto out_err; + } } if (params->runtime || params->period) { retval = osnoise_set_runtime_period(tool->context, params->runtime, params->period); - } else { - retval = osnoise_set_runtime_period(tool->context, - DEFAULT_SAMPLE_PERIOD, - DEFAULT_SAMPLE_RUNTIME); - } - - if (retval) { - err_msg("Failed to set runtime and/or period\n"); - goto out_err; + if (retval) { + err_msg("Failed to set runtime and/or period\n"); + goto out_err; + } } - retval = osnoise_set_stop_us(tool->context, params->stop_us); - if (retval) { - err_msg("Failed to set stop us\n"); - goto out_err; + if (params->stop_us) { + retval = osnoise_set_stop_us(tool->context, params->stop_us); + if (retval) { + err_msg("Failed to set stop us\n"); + goto out_err; + } } - retval = osnoise_set_stop_total_us(tool->context, params->stop_total_us); - if (retval) { - err_msg("Failed to set stop total us\n"); - goto out_err; + if (params->stop_total_us) { + retval = osnoise_set_stop_total_us(tool->context, params->stop_total_us); + if (retval) { + err_msg("Failed to set stop total us\n"); + goto out_err; + } } - retval = osnoise_set_tracing_thresh(tool->context, params->threshold); - if (retval) { - err_msg("Failed to set tracing_thresh\n"); - goto out_err; + if (params->threshold) { + retval = osnoise_set_tracing_thresh(tool->context, params->threshold); + if (retval) { + err_msg("Failed to set tracing_thresh\n"); + goto out_err; + } } if (params->hk_cpus) { --- tools/tracing/rtla/src/timerlat.c @@ -16,8 +16,6 @@ #include "timerlat.h" -#define DEFAULT_TIMERLAT_PERIOD 1000 /* 1ms */ - /* * timerlat_apply_config - apply common configs to the initialized tool */ @@ -29,44 +27,49 @@ timerlat_apply_config(struct osnoise_tool *tool, struct timerlat_params *params) if (!params->sleep_time) params->sleep_time = 1; - retval = osnoise_set_cpus(tool->context, params->cpus ? params->cpus : "all"); - if (retval) { - err_msg("Failed to apply CPUs config\n"); - goto out_err; - } - - if (!params->cpus) { + if (params->cpus) { + retval = osnoise_set_cpus(tool->context, params->cpus); + if (retval) { + err_msg("Failed to apply CPUs config\n"); + goto out_err; + } + } else { for (i = 0; i < sysconf(_SC_NPROCESSORS_CONF); i++) CPU_SET(i, &params->monitored_cpus); } - retval = osnoise_set_stop_us(tool->context, params->stop_us); - if (retval) { - err_msg("Failed to set stop us\n"); - goto out_err; + if (params->stop_us) { + retval = osnoise_set_stop_us(tool->context, params->stop_us); + if (retval) { + err_msg("Failed to set stop us\n"); + goto out_err; + } } - retval = osnoise_set_stop_total_us(tool->context, params->stop_total_us); - if (retval) { - err_msg("Failed to set stop total us\n"); - goto out_err; + if (params->stop_total_us) { + retval = osnoise_set_stop_total_us(tool->context, params->stop_total_us); + if (retval) { + err_msg("Failed to set stop total us\n"); + goto out_err; + } } - retval = osnoise_set_timerlat_period_us(tool->context, - params->timerlat_period_us ? - params->timerlat_period_us : - DEFAULT_TIMERLAT_PERIOD); - if (retval) { - err_msg("Failed to set timerlat period\n"); - goto out_err; + if (params->timerlat_period_us) { + retval = osnoise_set_timerlat_period_us(tool->context, params->timerlat_period_us); + if (retval) { + err_msg("Failed to set timerlat period\n"); + goto out_err; + } } - retval = osnoise_set_print_stack(tool->context, params->print_stack); - if (retval) { - err_msg("Failed to set print stack\n"); - goto out_err; + if (params->print_stack) { + retval = osnoise_set_print_stack(tool->context, params->print_stack); + if (retval) { + err_msg("Failed to set print stack\n"); + goto out_err; + } } if (params->hk_cpus) {
linux
torvalds
C
C
189,022
55,340
Linux kernel source tree
torvalds_linux
BUG_FIX
fixed an exception
6046d49ad14b39bae67f4f74e151e065db159c60
2023-05-06 11:35:41
adams549659584
feat: 新增加载中、重置缓存、版本提示等功能
false
206
91
297
--- web/chat.html @@ -7,8 +7,7 @@ <head> <title>Bing AI</title> <meta content="text/html; charset=utf-8" http-equiv="content-type" /> - <meta name="viewport" - content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, viewport-fit=cover" /> + <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, viewport-fit=cover" /> <meta name="referrer" content="origin-when-cross-origin" /> <link rel="icon" type="image/svg+xml" href="/web/img/logo.svg" /> <link rel="manifest" href="/web/manifest.webmanifest"> @@ -52,15 +51,6 @@ <div id="b_sydBgCover"></div> </div> - <!-- loading start --> - <div class="loading-spinner"> - <div class="bounce1"></div> - <div class="bounce2"></div> - <div class="bounce3"></div> - </div> - <!-- loading end --> - - <!-- chat nav start --> <div class="chat-nav"> <a class="nav__title-github" href="https://github.com/adams549659584/go-proxy-bingai" target="_blank"> <svg height="32" aria-hidden="true" viewBox="0 0 16 16" version="1.1" width="32" data-view-component="true" @@ -88,19 +78,15 @@ <div class="chat-login-title">设置用户</div> <input class="chat-login-inp-cookie" type="text" placeholder="用户 Cookie ,仅需要 _U 的值"> <div class="chat-login-btn-group"> - <div class="chat-login-btn chat-login-btn-cancel"> + <div class="chat-login-btn-cancel"> 取消 </div> - <div class="chat-login-btn chat-login-btn-save"> + <div class="chat-login-btn-save"> 保存 </div> - <div class="chat-login-btn chat-login-btn-reset"> - 重置 - </div> </div> </div> </div> - <!-- chat nav end --> <script type="text/javascript">//<![CDATA[ (function (n, t) { --- web/css/index.css @@ -15,16 +15,13 @@ form, table, tr, th, -td, -input, -div { +td { border: 0; border-collapse: collapse; border-spacing: 0; list-style: none; margin: 0; - padding: 0; - box-sizing: border-box; + padding: 0 } html { @@ -112,10 +109,10 @@ html { .chat-login-bg .chat-login { width: 90vw; + /* height: 40vh; */ background-image: url(/cdx/bg-sprite.png); background-position: 0% 0%; border-radius: 1vmin; - padding: 0 4vmin; } .chat-login .chat-login-title { @@ -123,142 +120,81 @@ html { font-weight: 600; text-align: center; color: #111; - font-size: 8vmin; - margin: 4vmin 0; + font-size: 6vmin; + height: 14vmin; + line-height: 14vmin; } .chat-login .chat-login-inp-cookie { display: block; text-align: center; - font-size: 5vmin; + width: 80vw; + height: 14vmin; + line-height: 14vmin; + margin: 0 auto; + font-size: 6vmin; border-radius: 1vmin; border: none; outline: none; - padding: 4vmin; - margin: 0; - width: 100%; + padding: 0; + margin: 0 auto; } .chat-login-btn-group { display: flex; - justify-content: space-between; + justify-content: center; align-items: center; } -.chat-login-btn-group .chat-login-btn { - background: #fff; - color: #111; +.chat-login-btn-group .chat-login-btn-save, +.chat-login-btn-group .chat-login-btn-cancel { + border-image: linear-gradient(81.62deg, #2870ea 8.72%, #1b4aef 85.01%); + background: linear-gradient(81.62deg, #2870ea 8.72%, #1b4aef 85.01%); border-radius: 1vmin; text-decoration: none; - /* height: 12vmin; */ - /* line-height: 12vmin; */ + width: 30vmin; + height: 14vmin; + line-height: 14vmin; box-sizing: border-box; font-style: normal; - font-size: 5vmin; + font-weight: 600; + font-size: 6vmin; + color: #fff; text-align: center; + margin: 4vmin auto; cursor: pointer; user-select: none; - margin: 4vmin 0; - padding: 3vmin 6vmin; } -.chat-login-btn.chat-login-btn-save { - border-image: linear-gradient(81.62deg, #2870ea 8.72%, #1b4aef 85.01%); - background: linear-gradient(81.62deg, #2870ea 8.72%, #1b4aef 85.01%); - color: #fff; -} - -.chat-login-btn.chat-login-btn-reset { - background: #f56c6c; - color: #fff; +.chat-login-btn-group .chat-login-btn-cancel { + background: #fff; + color: #111; } @media screen and (min-width: 750px) { .chat-login-bg .chat-login { - width: 550px; - padding: 0 20px; + width: 40vw; } .chat-login .chat-login-title { font-size: 36px; - margin: 20px 0; + height: 80px; + line-height: 80px; } .chat-login .chat-login-inp-cookie { - font-size: 18px; - padding: 20px; + width: 36vw; + height: 50px; + line-height: 50px; + font-size: 16px; } - .chat-login-btn.chat-login-btn-save, - .chat-login-btn.chat-login-btn-cancel, - .chat-login-btn.chat-login-btn-reset { - font-size: 20px; - margin: 20px 0; - padding: 16px 40px; + .chat-login-btn-group .chat-login-btn-save, + .chat-login-btn-group .chat-login-btn-cancel { + width: 104px; + height: 50px; + line-height: 50px; + font-size: 16px; + margin: 20px auto; } -} - -/* loading start */ -.loading-spinner { - display: flex; - justify-content: center; - align-items: center; - height: 100vh; - opacity: 1; - transition: opacity 2s ease-out; -} - -.loading-spinner.hidden { - opacity: 0; -} - -.loading-spinner>div { - width: 30px; - height: 30px; - background: linear-gradient(90deg, #2870EA 10.79%, #1B4AEF 87.08%); - - border-radius: 100%; - display: inline-block; - -webkit-animation: sk-bouncedelay 1.4s infinite ease-in-out both; - animation: sk-bouncedelay 1.4s infinite ease-in-out both; -} - -.loading-spinner .bounce1 { - -webkit-animation-delay: -0.32s; - animation-delay: -0.32s; -} - -.loading-spinner .bounce2 { - -webkit-animation-delay: -0.16s; - animation-delay: -0.16s; -} - -@-webkit-keyframes sk-bouncedelay { - - 0%, - 80%, - 100% { - -webkit-transform: scale(0) - } - - 40% { - -webkit-transform: scale(1.0) - } -} - -@keyframes sk-bouncedelay { - - 0%, - 80%, - 100% { - -webkit-transform: scale(0); - transform: scale(0); - } - - 40% { - -webkit-transform: scale(1.0); - transform: scale(1.0); - } -} - -/* loading end */ \ No newline at end of file +} \ No newline at end of file --- web/js/index.js @@ -100,26 +100,28 @@ function setCookie(name, value, minutes = 0, path = '/', domain = '') { async function registerSW() { if ('serviceWorker' in navigator && workbox) { - window.addEventListener('load', async function () { - const wb = new workbox.Workbox('sw.js'); - wb.addEventListener('installed', async function (event) { - console.log('Service Worker 安装成功:', event); - const swVersion = await wb.messageSW({ type: 'GET_VERSION' }); - alert(`新版本 ${swVersion} 已就绪,刷新后即可体验 !`); - window.location.reload(); - }); - - wb.addEventListener('activated', function (event) { - console.log('Service Worker 激活成功:', event); - }); - - wb.addEventListener('updated', function (event) { - console.log('Service Worker 更新成功:', event); - }); - const swRegistration = await wb.register(); - const swVersion = await wb.messageSW({ type: 'GET_VERSION' }); - console.log('Service Worker Version:', swVersion); + const wb = new workbox.Workbox('sw.js'); + wb.addEventListener('activated', (event) => { + // console.log(`离线就绪 : `); + // console.log(`activated : `, event); + // console.log(`activated isUpdate : `, event.isUpdate); + if (event.isUpdate) { + // 更新 + console.log(`sw 有更新,reload() `); + wb.messageSkipWaiting(); + window.location.reload(true); + } + }); + wb.addEventListener('waiting', (event) => { + console.log(`waiting : `, event); }); + wb.addEventListener('externalwaiting', (event) => { + console.log(`externalwaiting : `, event); + }); + const swRegistration = await wb.register({ immediate: false }); + + const swVersion = await wb.messageSW({ type: 'GET_VERSION' }); + console.log('Service Worker Version:', swVersion); } } registerSW(); @@ -151,19 +153,10 @@ async function tryCreateConversationId(trycount = 0) { await sleep(300); trycount += 1; console.log(`开始第 ${trycount} 次重试创建会话ID`); - setCookie('BingAI_Rand_IP', '', -1); tryCreateConversationId(trycount); } } -function hideLoading() { - const loadingEle = document.querySelector('.loading-spinner'); - loadingEle.addEventListener('transitionend', function () { - loadingEle.remove(); - }); - loadingEle.classList.add('hidden'); -} - (function () { var config = { cookLoc: {} }; sj_evt.bind( @@ -173,9 +166,6 @@ function hideLoading() { if (SydFSCModule && SydFSCModule.initWithWaitlistUpdate) { SydFSCModule.initWithWaitlistUpdate(config, 10); - // 隐藏加载中 - hideLoading(); - // todo 反馈暂时无法使用,先移除 document .querySelector('cib-serp') @@ -194,12 +184,13 @@ function hideLoading() { // 用户 cookie const userCookieName = '_U'; - const randIpCookieName = 'BingAI_Rand_IP'; const userCookieVal = getCookie(userCookieName); const chatLoginBgEle = document.querySelector('.chat-login-bg'); if (!userCookieVal) { // chatLoginBgEle.style.display = 'flex'; tryCreateConversationId(); + } else { + document.querySelector('.chat-login-inp-cookie').value = userCookieVal; } document.querySelector('.chat-login-btn-save').onclick = function () { const cookie = document.querySelector('.chat-login-inp-cookie').value; @@ -214,32 +205,7 @@ function hideLoading() { document.querySelector('.chat-login-btn-cancel').onclick = function () { chatLoginBgEle.style.display = 'none'; }; - document.querySelector('.chat-login-btn-reset').onclick = async function () { - // del cookie - setCookie(userCookieName, '', -1); - setCookie(randIpCookieName, '', -1); - // del storage - localStorage.clear(); - sessionStorage.clear(); - // del sw - const cacheKeys = await caches.keys(); - for (const cacheKey of cacheKeys) { - await caches.open(cacheKey).then(async (cache) => { - const requests = await cache.keys(); - return await Promise.all( - requests.map((request) => { - console.log(`del cache : `, request.url); - return cache.delete(request); - }) - ); - }); - } - chatLoginBgEle.style.display = 'none'; - window.location.reload(); - }; document.querySelector('.nav__title-setting').onclick = function () { - const userCookieVal = getCookie(userCookieName); - document.querySelector('.chat-login-inp-cookie').value = userCookieVal; chatLoginBgEle.style.display = 'flex'; }; } --- web/sw.js @@ -1,7 +1,7 @@ // 引入workbox 框架 importScripts('./js/sw/workbox-sw.js'); -const SW_VERSION = 'v1.3.2'; +const SW_VERSION = '1.3.0'; const CACHE_PREFIX = 'BingAI'; workbox.setConfig({ debug: false, logLevel: 'warn' }); @@ -19,7 +19,7 @@ workbox.precaching.precacheAndRoute([ // css { url: '/web/css/index.css', - revision: '2023.05.06.14', + revision: '2023.05.06', }, // js { @@ -56,12 +56,12 @@ workbox.precaching.precacheAndRoute([ }, { url: '/web/js/index.js', - revision: '2023.05.06.14', + revision: '2023.05.06.11', }, // html { url: '/web/chat.html', - revision: '2023.05.06.14', + revision: '2023.05.06', }, // ico { @@ -117,30 +117,27 @@ self.addEventListener('message', (event) => { const replyPort = event.ports[0]; const message = event.data; // console.log(`sw message : `, message); - if (message.type === 'SKIP_WAITING') { - self.skipWaiting(); - } if (replyPort && message && message.type === 'GET_VERSION') { replyPort.postMessage(SW_VERSION); } }); // 安装阶段可删除旧缓存等等 -// self.addEventListener('install', async (event) => { -// const cacheKeys = await caches.keys(); -// for (const cacheKey of cacheKeys) { -// await caches.open(cacheKey).then(async (cache) => { -// const requests = await cache.keys(); -// return await Promise.all( -// requests.map((request) => { -// if (true || request.url.includes('xxx')) { -// console.log(`del cache : `, request.url); -// return cache.delete(request); -// } else { -// return Promise.resolve(); -// } -// }) -// ); -// }); -// } -// }); +self.addEventListener('install', async (event) => { + const cacheKeys = await caches.keys(); + for (const cacheKey of cacheKeys) { + await caches.open(cacheKey).then(async (cache) => { + const requests = await cache.keys(); + return await Promise.all( + requests.map((request) => { + if (true || request.url.includes('xxx')) { + console.log(`del cache : `, request.url); + return cache.delete(request); + } else { + return Promise.resolve(); + } + }) + ); + }); + } +});
go-proxy-bingai
adams549659584
HTML
HTML
8,773
13,135
用 Vue3 和 Go 搭建的微软 New Bing 演示站点,拥有一致的 UI 体验,支持 ChatGPT 提示词,国内可用。
adams549659584_go-proxy-bingai
NEW_FEAT
Code change: new js function
c1466eb66eda14ade0b7d0728c73f9753291385f
2024-04-08 23:31:38
Ilkka Seppälä
dcos: update license years
false
1
1
2
--- LICENSE.md @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright � 2014-2024 Ilkka Seppälä +Copyright 2014-2021 Ilkka Seppl Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal
java-design-patterns
iluwatar
Java
Java
90,911
26,831
Design patterns implemented in Java
iluwatar_java-design-patterns
DOC_CHANGE
Obvious
5434242af764d1525bd6ddf6e53ee5567042e381
2025-03-16 06:46:09
Zalathar
Build `UserTypeProjections` lazily when visiting bindings
false
179
112
291
--- compiler/rustc_middle/src/mir/mod.rs @@ -33,8 +33,8 @@ use crate::ty::codec::{TyDecoder, TyEncoder}; use crate::ty::print::{FmtPrinter, Printer, pretty_print_const, with_no_trimmed_paths}; use crate::ty::{ - self, GenericArg, GenericArgsRef, Instance, InstanceKind, List, Ty, TyCtxt, TypeVisitableExt, - TypingEnv, UserTypeAnnotationIndex, + self, AdtDef, GenericArg, GenericArgsRef, Instance, InstanceKind, List, Ty, TyCtxt, + TypeVisitableExt, TypingEnv, UserTypeAnnotationIndex, }; mod basic_blocks; @@ -1482,10 +1482,53 @@ pub struct UserTypeProjections { pub contents: Vec<UserTypeProjection>, } -impl UserTypeProjections { +impl<'tcx> UserTypeProjections { + pub fn none() -> Self { + UserTypeProjections { contents: vec![] } + } + + pub fn is_empty(&self) -> bool { + self.contents.is_empty() + } + pub fn projections(&self) -> impl Iterator<Item = &UserTypeProjection> + ExactSizeIterator { self.contents.iter() } + + pub fn push_user_type(mut self, base_user_type: UserTypeAnnotationIndex) -> Self { + self.contents.push(UserTypeProjection { base: base_user_type, projs: vec![] }); + self + } + + fn map_projections(mut self, f: impl FnMut(UserTypeProjection) -> UserTypeProjection) -> Self { + self.contents = self.contents.into_iter().map(f).collect(); + self + } + + pub fn index(self) -> Self { + self.map_projections(|pat_ty_proj| pat_ty_proj.index()) + } + + pub fn subslice(self, from: u64, to: u64) -> Self { + self.map_projections(|pat_ty_proj| pat_ty_proj.subslice(from, to)) + } + + pub fn deref(self) -> Self { + self.map_projections(|pat_ty_proj| pat_ty_proj.deref()) + } + + pub fn leaf(self, field: FieldIdx) -> Self { + self.map_projections(|pat_ty_proj| pat_ty_proj.leaf(field)) + } + + pub fn variant( + self, + adt_def: AdtDef<'tcx>, + variant_index: VariantIdx, + field_index: FieldIdx, + ) -> Self { + self.map_projections(|pat_ty_proj| pat_ty_proj.variant(adt_def, variant_index, field_index)) + } } /// Encodes the effect of a user-supplied type annotation on the @@ -1510,6 +1553,42 @@ pub struct UserTypeProjection { pub projs: Vec<ProjectionKind>, } +impl UserTypeProjection { + pub(crate) fn index(mut self) -> Self { + self.projs.push(ProjectionElem::Index(())); + self + } + + pub(crate) fn subslice(mut self, from: u64, to: u64) -> Self { + self.projs.push(ProjectionElem::Subslice { from, to, from_end: true }); + self + } + + pub(crate) fn deref(mut self) -> Self { + self.projs.push(ProjectionElem::Deref); + self + } + + pub(crate) fn leaf(mut self, field: FieldIdx) -> Self { + self.projs.push(ProjectionElem::Field(field, ())); + self + } + + pub(crate) fn variant( + mut self, + adt_def: AdtDef<'_>, + variant_index: VariantIdx, + field_index: FieldIdx, + ) -> Self { + self.projs.push(ProjectionElem::Downcast( + Some(adt_def.variant(variant_index).name), + variant_index, + )); + self.projs.push(ProjectionElem::Field(field_index, ())); + self + } +} + rustc_index::newtype_index! { #[derive(HashStable)] #[encodable] --- compiler/rustc_mir_build/src/builder/matches/mod.rs @@ -5,11 +5,6 @@ //! This also includes code for pattern bindings in `let` statements and //! function parameters. -use std::assert_matches::assert_matches; -use std::borrow::Borrow; -use std::mem; -use std::sync::Arc; - use rustc_abi::VariantIdx; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::stack::ensure_sufficient_stack; @@ -24,7 +19,6 @@ use crate::builder::ForGuard::{self, OutsideGuard, RefWithinGuard}; use crate::builder::expr::as_place::PlaceBuilder; -use crate::builder::matches::user_ty::ProjectedUserTypesNode; use crate::builder::scope::DropKind; use crate::builder::{ BlockAnd, BlockAndExtension, Builder, GuardFrame, GuardFrameLocal, LocalsForNode, @@ -33,9 +27,13 @@ // helper functions, broken out by category: mod match_pair; mod test; -mod user_ty; mod util; +use std::assert_matches::assert_matches; +use std::borrow::Borrow; +use std::mem; +use std::sync::Arc; + /// Arguments to [`Builder::then_else_break_inner`] that are usually forwarded /// to recursive invocations. #[derive(Clone, Copy)] @@ -759,12 +757,11 @@ pub(crate) fn declare_bindings( ) -> Option<SourceScope> { self.visit_primary_bindings_special( pattern, - &ProjectedUserTypesNode::None, - &mut |this, name, mode, var, span, ty, user_tys| { + UserTypeProjections::none(), + &mut |this, name, mode, var, span, ty, user_ty| { let vis_scope = *visibility_scope .get_or_insert_with(|| this.new_source_scope(scope_span, LintLevel::Inherited)); let source_info = SourceInfo { span, scope: this.source_scope }; - let user_tys = user_tys.build_user_type_projections(); this.declare_binding( source_info, @@ -773,7 +770,7 @@ pub(crate) fn declare_bindings( mode, var, ty, - user_tys, + user_ty, ArmHasGuard(guard.is_some()), opt_match_place.map(|(x, y)| (x.cloned(), y)), pattern.span, @@ -877,7 +874,7 @@ pub(super) fn visit_primary_bindings( fn visit_primary_bindings_special( &mut self, pattern: &Pat<'tcx>, - user_tys: &ProjectedUserTypesNode<'_>, + pattern_user_ty: UserTypeProjections, f: &mut impl FnMut( &mut Self, Symbol, @@ -885,21 +882,21 @@ fn visit_primary_bindings_special( LocalVarId, Span, Ty<'tcx>, - &ProjectedUserTypesNode<'_>, + UserTypeProjections, ), ) { // Avoid having to write the full method name at each recursive call. - let visit_subpat = |this: &mut Self, subpat, user_tys: &_, f: &mut _| { + let visit_subpat = |this: &mut Self, subpat, user_tys, f: &mut _| { this.visit_primary_bindings_special(subpat, user_tys, f) }; match pattern.kind { PatKind::Binding { name, mode, var, ty, ref subpattern, is_primary, .. } => { if is_primary { - f(self, name, mode, var, pattern.span, ty, user_tys); + f(self, name, mode, var, pattern.span, ty, pattern_user_ty.clone()); } if let Some(subpattern) = subpattern.as_ref() { - visit_subpat(self, subpattern, user_tys, f); + visit_subpat(self, subpattern, pattern_user_ty, f); } } @@ -908,13 +905,13 @@ fn visit_primary_bindings_special( let from = u64::try_from(prefix.len()).unwrap(); let to = u64::try_from(suffix.len()).unwrap(); for subpattern in prefix.iter() { - visit_subpat(self, subpattern, &user_tys.index(), f); + visit_subpat(self, subpattern, pattern_user_ty.clone().index(), f); } if let Some(subpattern) = slice { - visit_subpat(self, subpattern, &user_tys.subslice(from, to), f); + visit_subpat(self, subpattern, pattern_user_ty.clone().subslice(from, to), f); } for subpattern in suffix.iter() { - visit_subpat(self, subpattern, &user_tys.index(), f); + visit_subpat(self, subpattern, pattern_user_ty.clone().index(), f); } } @@ -925,11 +922,11 @@ fn visit_primary_bindings_special( | PatKind::Error(_) => {} PatKind::Deref { ref subpattern } => { - visit_subpat(self, subpattern, &user_tys.deref(), f); + visit_subpat(self, subpattern, pattern_user_ty.deref(), f); } PatKind::DerefPattern { ref subpattern, .. } => { - visit_subpat(self, subpattern, &ProjectedUserTypesNode::None, f); + visit_subpat(self, subpattern, UserTypeProjections::none(), f); } PatKind::AscribeUserType { @@ -945,31 +942,28 @@ fn visit_primary_bindings_special( // Note that the variance doesn't apply here, as we are tracking the effect // of `user_ty` on any bindings contained with subpattern. - // Caution: Pushing this user type here is load-bearing even for - // patterns containing no bindings, to ensure that the type ends - // up represented in MIR _somewhere_. let base_user_ty = self.canonical_user_type_annotations.push(annotation.clone()); - let subpattern_user_tys = user_tys.push_user_type(base_user_ty); - visit_subpat(self, subpattern, &subpattern_user_tys, f) + let subpattern_user_ty = pattern_user_ty.push_user_type(base_user_ty); + visit_subpat(self, subpattern, subpattern_user_ty, f) } PatKind::ExpandedConstant { ref subpattern, .. } => { - visit_subpat(self, subpattern, user_tys, f) + visit_subpat(self, subpattern, pattern_user_ty, f) } PatKind::Leaf { ref subpatterns } => { for subpattern in subpatterns { - let subpattern_user_tys = user_tys.leaf(subpattern.field); - debug!("visit_primary_bindings: subpattern_user_tys={subpattern_user_tys:?}"); - visit_subpat(self, &subpattern.pattern, &subpattern_user_tys, f); + let subpattern_user_ty = pattern_user_ty.clone().leaf(subpattern.field); + debug!("visit_primary_bindings: subpattern_user_ty={:?}", subpattern_user_ty); + visit_subpat(self, &subpattern.pattern, subpattern_user_ty, f); } } PatKind::Variant { adt_def, args: _, variant_index, ref subpatterns } => { for subpattern in subpatterns { - let subpattern_user_tys = - user_tys.variant(adt_def, variant_index, subpattern.field); - visit_subpat(self, &subpattern.pattern, &subpattern_user_tys, f); + let subpattern_user_ty = + pattern_user_ty.clone().variant(adt_def, variant_index, subpattern.field); + visit_subpat(self, &subpattern.pattern, subpattern_user_ty, f); } } PatKind::Or { ref pats } => { @@ -978,7 +972,7 @@ fn visit_primary_bindings_special( // `let (x | y) = ...`, the primary binding of `y` occurs in // the right subpattern for subpattern in pats.iter() { - visit_subpat(self, subpattern, user_tys, f); + visit_subpat(self, subpattern, pattern_user_ty.clone(), f); } } } @@ -2770,7 +2764,7 @@ fn declare_binding( mode: BindingMode, var_id: LocalVarId, var_ty: Ty<'tcx>, - user_ty: Option<Box<UserTypeProjections>>, + user_ty: UserTypeProjections, has_guard: ArmHasGuard, opt_match_place: Option<(Option<Place<'tcx>>, Span)>, pat_span: Span, @@ -2780,7 +2774,7 @@ fn declare_binding( let local = LocalDecl { mutability: mode.1, ty: var_ty, - user_ty, + user_ty: if user_ty.is_empty() { None } else { Some(Box::new(user_ty)) }, source_info, local_info: ClearCrossCrate::Set(Box::new(LocalInfo::User(BindingForm::Var( VarBindingForm { --- compiler/rustc_mir_build/src/builder/matches/user_ty.rs @@ -1,140 +0,0 @@ -//! Helper code for building a linked list of user-type projections on the -//! stack while visiting a THIR pattern. -//! -//! This avoids having to repeatedly clone a partly-built [`UserTypeProjections`] -//! at every step of the traversal, which is what the previous code was doing. - -use std::assert_matches::assert_matches; -use std::iter; - -use rustc_abi::{FieldIdx, VariantIdx}; -use rustc_middle::mir::{ProjectionElem, UserTypeProjection, UserTypeProjections}; -use rustc_middle::ty::{AdtDef, UserTypeAnnotationIndex}; -use rustc_span::Symbol; - -/// One of a list of "operations" that can be used to lazily build projections -/// of user-specified types. -#[derive(Clone, Debug)] -pub(crate) enum ProjectedUserTypesOp { - PushUserType { base: UserTypeAnnotationIndex }, - - Index, - Subslice { from: u64, to: u64 }, - Deref, - Leaf { field: FieldIdx }, - Variant { name: Symbol, variant: VariantIdx, field: FieldIdx }, -} - -#[derive(Debug)] -pub(crate) enum ProjectedUserTypesNode<'a> { - None, - Chain { parent: &'a Self, op: ProjectedUserTypesOp }, -} - -impl<'a> ProjectedUserTypesNode<'a> { - pub(crate) fn push_user_type(&'a self, base: UserTypeAnnotationIndex) -> Self { - // Pushing a base user type always causes the chain to become non-empty. - Self::Chain { parent: self, op: ProjectedUserTypesOp::PushUserType { base } } - } - - /// Push another projection op onto the chain, but only if it is already non-empty. - fn maybe_push(&'a self, op_fn: impl FnOnce() -> ProjectedUserTypesOp) -> Self { - match self { - Self::None => Self::None, - Self::Chain { .. } => Self::Chain { parent: self, op: op_fn() }, - } - } - - pub(crate) fn index(&'a self) -> Self { - self.maybe_push(|| ProjectedUserTypesOp::Index) - } - - pub(crate) fn subslice(&'a self, from: u64, to: u64) -> Self { - self.maybe_push(|| ProjectedUserTypesOp::Subslice { from, to }) - } - - pub(crate) fn deref(&'a self) -> Self { - self.maybe_push(|| ProjectedUserTypesOp::Deref) - } - - pub(crate) fn leaf(&'a self, field: FieldIdx) -> Self { - self.maybe_push(|| ProjectedUserTypesOp::Leaf { field }) - } - - pub(crate) fn variant( - &'a self, - adt_def: AdtDef<'_>, - variant: VariantIdx, - field: FieldIdx, - ) -> Self { - self.maybe_push(|| { - let name = adt_def.variant(variant).name; - ProjectedUserTypesOp::Variant { name, variant, field } - }) - } - - /// Traverses the chain of nodes to yield each op in the chain. - /// Because this walks from child node to parent node, the ops are - /// naturally yielded in "reverse" order. - fn iter_ops_reversed(&'a self) -> impl Iterator<Item = &'a ProjectedUserTypesOp> { - let mut next = self; - iter::from_fn(move || match next { - Self::None => None, - Self::Chain { parent, op } => { - next = parent; - Some(op) - } - }) - } - - /// Assembles this chain of user-type projections into a proper data structure. - pub(crate) fn build_user_type_projections(&self) -> Option<Box<UserTypeProjections>> { - // If we know there's nothing to do, just return None immediately. - if matches!(self, Self::None) { - return None; - } - - let ops_reversed = self.iter_ops_reversed().cloned().collect::<Vec<_>>(); - // The "first" op should always be `PushUserType`. - // Other projections are only added if there is at least one user type. - assert_matches!(ops_reversed.last(), Some(ProjectedUserTypesOp::PushUserType { .. })); - - let mut projections = vec![]; - for op in ops_reversed.into_iter().rev() { - match op { - ProjectedUserTypesOp::PushUserType { base } => { - projections.push(UserTypeProjection { base, projs: vec![] }) - } - - ProjectedUserTypesOp::Index => { - for p in &mut projections { - p.projs.push(ProjectionElem::Index(())) - } - } - ProjectedUserTypesOp::Subslice { from, to } => { - for p in &mut projections { - p.projs.push(ProjectionElem::Subslice { from, to, from_end: true }) - } - } - ProjectedUserTypesOp::Deref => { - for p in &mut projections { - p.projs.push(ProjectionElem::Deref) - } - } - ProjectedUserTypesOp::Leaf { field } => { - for p in &mut projections { - p.projs.push(ProjectionElem::Field(field, ())) - } - } - ProjectedUserTypesOp::Variant { name, variant, field } => { - for p in &mut projections { - p.projs.push(ProjectionElem::Downcast(Some(name), variant)); - p.projs.push(ProjectionElem::Field(field, ())); - } - } - } - } - - Some(Box::new(UserTypeProjections { contents: projections })) - } -}
rust
rust-lang
Rust
Rust
101,693
13,172
Empowering everyone to build reliable and efficient software.
rust-lang_rust
PERF_IMPROVEMENT
simplify decoder draining logic
5912dadf08fa2b29f723ec2c2b2315399a75c06e
2025-03-19 17:20:47
Mara Bos
Add test.
false
22
0
22
--- tests/ui/thread-local/spawn-hook-atexit.rs @@ -1,22 +0,0 @@ -// Regression test for https://github.com/rust-lang/rust/issues/138696 -//@ run-pass - -#![feature(rustc_private)] - -extern crate libc; - -fn main() { - std::thread::spawn(|| { - unsafe { libc::atexit(spawn_in_atexit) }; - }) - .join() - .unwrap(); -} - -extern "C" fn spawn_in_atexit() { - std::thread::spawn(|| { - println!("Thread spawned in atexit"); - }) - .join() - .unwrap(); -}
rust
rust-lang
Rust
Rust
101,693
13,172
Empowering everyone to build reliable and efficient software.
rust-lang_rust
BUG_FIX
Obvious
df9ac58f8b73f7cdddad67396f1992acb0706b02
2025-03-12 01:46:58
Jordan Harband
[Fix] `reinstall-packages`: do not reinstall corepack Fixes #3544
false
14
5
19
--- nvm.sh @@ -2740,7 +2740,7 @@ nvm_npm_global_modules() { NPMLIST=$(nvm use "${VERSION}" >/dev/null && npm list -g --depth=0 2>/dev/null | command sed -e '1d' -e '/UNMET PEER DEPENDENCY/d') local INSTALLS - INSTALLS=$(nvm_echo "${NPMLIST}" | command sed -e '/ -> / d' -e '/\(empty\)/ d' -e 's/^.* \(.*@[^ ]*\).*/\1/' -e '/^npm@[^ ]*.*$/ d' -e '/^corepack@[^ ]*.*$/ d' | command xargs) + INSTALLS=$(nvm_echo "${NPMLIST}" | command sed -e '/ -> / d' -e '/\(empty\)/ d' -e 's/^.* \(.*@[^ ]*\).*/\1/' -e '/^npm@[^ ]*.*$/ d' | command xargs) local LINKS LINKS="$(nvm_echo "${NPMLIST}" | command sed -n 's/.* -> \(.*\)/\1/ p')" --- test/slow/nvm reinstall-packages/should work as expected @@ -16,7 +16,7 @@ EXPECTED_PACKAGES_INSTALL="autoprefixer bower david@11 grunt-cli grunth-cli http echo "$EXPECTED_PACKAGES_INSTALL" | sed -e 's/test-npmlink //' | xargs npm install -g --quiet get_packages() { - npm list -g --depth=0 | \sed -e '1 d' -e 's/^.* \(.*\)@.*/\1/' -e '/^npm$/ d' -e '/^corepack$/ d' | xargs + npm list -g --depth=0 | \sed -e '1 d' -e 's/^.* \(.*\)@.*/\1/' -e '/^npm$/ d' | xargs } nvm use 0.10.29 --- test/slow/nvm reinstall-packages/works with no installs @@ -5,7 +5,7 @@ die () { echo "$@" ; exit 1; } \. ../../../nvm.sh get_packages() { - npm list -g --depth=0 | \sed -e '1 d' -e 's/^.* \(.*\)@.*/\1/' -e '/^npm$/ d' -e '/^corepack$/ d' | xargs + npm list -g --depth=0 | \sed -e '1 d' -e 's/^.* \(.*\)@.*/\1/' -e '/^npm$/ d' | xargs } nvm use 4.7.2 @@ -14,14 +14,5 @@ ORIGINAL_PACKAGES=$(get_packages) nvm reinstall-packages 4.7.1 FINAL_PACKAGES=$(get_packages) -[ -z "${ORIGINAL_PACKAGES}" ] || die "v4: original packages were not empty: ${ORIGINAL_PACKAGES}" -[ -z "${FINAL_PACKAGES}" ] || die "v4: final packages were not empty: ${FINAL_PACKAGES}" - -nvm use 23.8.20 -ORIGINAL_PACKAGES=$(get_packages) - -nvm reinstall-packages 23.8.0 -FINAL_PACKAGES=$(get_packages) - -[ -z "${ORIGINAL_PACKAGES}" ] || die "v23: original packages were not empty: ${ORIGINAL_PACKAGES}" -[ -z "${FINAL_PACKAGES}" ] || die "v23: final packages were not empty: ${FINAL_PACKAGES}" +[ -z "${ORIGINAL_PACKAGES}" ] || die "original packages were not empty: ${ORIGINAL_PACKAGES}" +[ -z "${FINAL_PACKAGES}" ] || die "final packages were not empty: ${FINAL_PACKAGES}"
nvm
nvm-sh
Shell
Shell
82,623
8,249
Node Version Manager - POSIX-compliant bash script to manage multiple active node.js versions
nvm-sh_nvm
BUG_FIX
Obvious
58a0cc9373d3e75883082d02681361e3557294c5
2023-05-22 22:56:43
Serhiy Mytrovtsiy
lang: added missing translations
false
66
0
66
--- Stats/Supporting Files/bg.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Утилизацията на диска е %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Температурна единица"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Ниво"; --- Stats/Supporting Files/ca.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unitat de temperatura"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nivell"; --- Stats/Supporting Files/cs.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Využití disku je %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Jednotka teploty"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Úroveň"; --- Stats/Supporting Files/da.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperaturenhed"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Niveau"; --- Stats/Supporting Files/de.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Auslastung ist %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperatureinheit"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Ladezustand"; --- Stats/Supporting Files/el.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Χρώμα ανάγνωσης"; "Write color" = "Χρώμα εγγραφής"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Μονάδα θερμοκρασίας"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Επίπεδο"; --- Stats/Supporting Files/en.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperature unit"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Level"; --- Stats/Supporting Files/es.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unidad de temperatura"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nivel"; --- Stats/Supporting Files/fa.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "مصرف دیسک %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "واحد دما"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "سطح"; --- Stats/Supporting Files/fr.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "L'utilisation du disque est de %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unité de température"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Niveau"; --- Stats/Supporting Files/he.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "יחידת טמפרטורה"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "רמה"; --- Stats/Supporting Files/hr.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Korištenje diska je %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Jedinica temperature"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Stanje"; --- Stats/Supporting Files/hu.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "%0 a lemez használat"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Hőmérséklet mértékegysége"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Töltöttségi szint"; --- Stats/Supporting Files/id.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unit suhu"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Level"; --- Stats/Supporting Files/it.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Utilizzo disco %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unità di temperatura"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Livello"; --- Stats/Supporting Files/ja.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "温度単位"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "充電残量"; --- Stats/Supporting Files/ko.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "디스크 사용률 %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "온도 단위"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "잔량"; --- Stats/Supporting Files/nb.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk-bruk er %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperaturenhet"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nivå"; --- Stats/Supporting Files/nl.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperatuureenheid"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Niveau"; --- Stats/Supporting Files/pl.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Wykorzystanie dysku %0"; "Read color" = "Kolor odczytu"; "Write color" = "Kolor zapisu"; -"Disk usage" = "Użycie dysku"; // Sensors "Temperature unit" = "Jednostka temperatury"; @@ -329,7 +328,6 @@ "Every hour" = "Co godzinę"; "Every 12 hours" = "Co 12 godzin"; "Every 24 hours" = "Co 24 godzin"; -"Network activity" = "Aktywność sieciowa"; // Battery "Level" = "Poziom naładowania"; --- Stats/Supporting Files/pt-BR.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Utilização de disco é de %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unidade de temperatura"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nível"; --- Stats/Supporting Files/pt-PT.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Utilização do disco é %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Unidade de temperatura"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nível"; --- Stats/Supporting Files/ro.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk utilization is %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperatura unității"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nivel"; --- Stats/Supporting Files/ru.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Использование диска %0"; "Read color" = "Цвет чтения"; "Write color" = "Цвет записи"; -"Disk usage" = "Использование диска"; // Sensors "Temperature unit" = "Единица измерения температуры"; @@ -329,7 +328,6 @@ "Every hour" = "Каждый час"; "Every 12 hours" = "Каждые 12 часов"; "Every 24 hours" = "Каждые 24 часа"; -"Network activity" = "Сетевая активность"; // Battery "Level" = "Уровень заряда"; --- Stats/Supporting Files/sk.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Využitie disku je %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Jednotka teploty"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Úroveň"; --- Stats/Supporting Files/sl.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Izkoriščenost diska je %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Enota za temperaturo"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Raven"; --- Stats/Supporting Files/sv.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Skivanvändning %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Temperaturenhet"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Nivå"; --- Stats/Supporting Files/th.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "ปริมาณการใช้งานดิสก์คือ %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "หน่วยอุณหภูมิ"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "ระดับ"; --- Stats/Supporting Files/tr.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk kullanımı %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Sıcaklık birimi"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Doluluk"; --- Stats/Supporting Files/uk.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Використання диска %0"; "Read color" = "Колір зчитування"; "Write color" = "Колір запису"; -"Disk usage" = "Використання диска"; // Sensors "Temperature unit" = "Одиниця виміру температури"; @@ -329,7 +328,6 @@ "Every hour" = "Щогодини"; "Every 12 hours" = "Кожні 12 годин"; "Every 24 hours" = "Кожні 24 години"; -"Network activity" = "Мережева активність"; // Battery "Level" = "Рівень заряду"; --- Stats/Supporting Files/vi.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Mức sử dụng đĩa là %0"; "Read color" = "Read color"; "Write color" = "Write color"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "Đơn vị nhiệt độ"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "Dung lượng Pin"; --- Stats/Supporting Files/zh-Hans.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "Disk 利用率是 %0"; "Read color" = "读取颜色"; "Write color" = "写入颜色"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "温度单位"; @@ -329,7 +328,6 @@ "Every hour" = "每小时"; "Every 12 hours" = "每 12 小时"; "Every 24 hours" = "每 24 小时"; -"Network activity" = "Network activity"; // Battery "Level" = "电量"; --- Stats/Supporting Files/zh-Hant.lproj/Localizable.strings @@ -274,7 +274,6 @@ "Disk utilization is" = "磁碟利用率為:%0"; "Read color" = "讀取色彩"; "Write color" = "寫入色彩"; -"Disk usage" = "Disk usage"; // Sensors "Temperature unit" = "溫度單位"; @@ -329,7 +328,6 @@ "Every hour" = "Every hour"; "Every 12 hours" = "Every 12 hours"; "Every 24 hours" = "Every 24 hours"; -"Network activity" = "Network activity"; // Battery "Level" = "電量";
stats
exelban
Swift
Swift
29,655
950
macOS system monitor in your menu bar
exelban_stats
DOC_CHANGE
Obvious
10d9d173903e0bcb53da13633a2cf897317585e4
2025-03-26 19:01:32
Adam Semenenko
[Gradle] Add kdoc for KGP JS npm dependency management ^KT-76299
false
94
8
102
--- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/targets/js/npm/resolver/GradleDependencies.kt @@ -11,102 +11,32 @@ import org.gradle.api.initialization.IncludedBuild import java.io.File import java.io.Serializable -/** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Represents a dependency on a Kotlin/JS project from a remote repository. - * - * The npm dependencies are specified in the `package.json` file that is packed into the `.klib`. - * The `package.json` is extracted in - * [org.jetbrains.kotlin.gradle.targets.js.npm.resolver.KotlinCompilationNpmResolution.createPreparedResolution]. - * - * Gradle might not have downloaded the file yet. - * To avoid eagerly downloading, the file is downloaded later and this - * dependency will be transformed to [FileExternalGradleDependency]. - * - * KBT should look at removing this and replacing it with an artifact transformer, - * but there is no planned work yet. - * - * @see KotlinCompilationNpmResolver - * @see FileExternalGradleDependency - * @see org.jetbrains.kotlin.gradle.targets.js.npm.resolver.KotlinCompilationNpmResolver.ConfigurationVisitor.visitArtifact - * @see org.gradle.api.artifacts.ResolvedDependency - */ data class ExternalGradleDependency( val dependency: ResolvedDependency, - val artifact: ResolvedArtifact, + val artifact: ResolvedArtifact ) : Serializable - -/** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Represents a dependency declared in a buildscript on some existing files on disk. - * - * E.g. `implementation(files("some-dir"))` - * - * We expect that the files are `.klib` files that contain `package.json` files. - * - * The `package.json` files are extracted in - * [org.jetbrains.kotlin.gradle.targets.js.npm.resolver.KotlinCompilationNpmResolution.createPreparedResolution]. - * - * Note that Gradle are deprecating - * [org.gradle.api.artifacts.SelfResolvingDependency] - * (which [org.gradle.api.artifacts.FileCollectionDependency] implements), - * so KBT might need to reimplement this. - * - * @see KotlinCompilationNpmResolver - * @see org.gradle.api.artifacts.FileCollectionDependency - */ data class FileCollectionExternalGradleDependency( val files: Collection<File>, - val dependencyVersion: String?, + val dependencyVersion: String? ) : Serializable -/** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Represents a **downloaded** dependency on a Kotlin/JS project from a remote repository. - * - * @see KotlinCompilationNpmResolver - * @see ExternalGradleDependency - */ data class FileExternalGradleDependency( val dependencyName: String, val dependencyVersion: String, - val file: File, + val file: File ) : Serializable -/** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Represents a dependency on a Kotlin/JS project from a Composite build. - * - * Used to manually declare task dependencies for - * [org.jetbrains.kotlin.gradle.targets.js.npm.tasks.KotlinPackageJsonTask]. - * - * @see KotlinCompilationNpmResolver - */ data class CompositeDependency( val dependencyName: String, val dependencyVersion: String, val includedBuildDir: File, @Transient - val includedBuild: IncludedBuild?, + val includedBuild: IncludedBuild? ) : Serializable -/** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Represents a dependency on another Kotlin/JS project from within the same Gradle build. - * - * E.g. `implementation(project(":some-other-project))` - * - * @see KotlinCompilationNpmResolver - * @see org.jetbrains.kotlin.gradle.targets.js.npm.resolver.KotlinCompilationNpmResolution.createPreparedResolution - */ data class InternalDependency( val projectPath: String, val compilationName: String, - val projectName: String, -) : Serializable + val projectName: String +) : Serializable \ No newline at end of file --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/targets/js/npm/resolver/KotlinCompilationNpmResolution.kt @@ -15,13 +15,6 @@ import org.jetbrains.kotlin.gradle.targets.js.npm.resolved.PreparedKotlinCompila import org.jetbrains.kotlin.gradle.utils.getFile import java.io.Serializable -/** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Contains details about the npm dependencies that can affect a Kotlin Compilation. - * - * The resolution is produced by [KotlinCompilationNpmResolver]. - */ class KotlinCompilationNpmResolution( var internalDependencies: Collection<InternalDependency>, var internalCompositeDependencies: Collection<CompositeDependency>, @@ -175,4 +168,4 @@ class KotlinCompilationNpmResolution( } return direct + unique } -} +} \ No newline at end of file --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/targets/js/npm/resolver/KotlinCompilationNpmResolver.kt @@ -38,15 +38,6 @@ import org.jetbrains.kotlin.gradle.targets.wasm.nodejs.WasmNodeJsRootPlugin.Comp import org.jetbrains.kotlin.gradle.targets.wasm.nodejs.WasmNodeJsRootPlugin.Companion.kotlinNpmResolutionManager as wasmKotlinNpmResolutionManager /** - * _This is an internal KGP utility and should not be used in user buildscripts._ - * - * Extracts transitive npm dependencies from dependencies used by a Kotlin Compilation. - * - * KGP JS uses these dependencies to manually extract transitive npm dependencies. - * The npm dependencies are not used directly, but instead - * to configure task inputs for Gradle's up-to-date checks - * and manually configuring task dependencies. - * * See [KotlinNpmResolutionManager] for details about resolution process. */ class KotlinCompilationNpmResolver( @@ -330,4 +321,4 @@ class KotlinCompilationNpmResolver( const val PUBLIC_PACKAGE_JSON_ATTR_VALUE = "public-package-json" } -} +} \ No newline at end of file
kotlin
jetbrains
Kotlin
Kotlin
50,115
5,861
The Kotlin Programming Language.
jetbrains_kotlin
DOC_CHANGE
The prefix fix: suggests a bug fix, but the actual change is not fixing code behavior, it’s improving documentation rendering
a3b087fe42d6f7c70932082e25be8e4415687d2e
2023-08-29 19:05:20
Jack Louvton
Update index.html with jackishere signature
false
5
0
5
--- index.html @@ -4032,11 +4032,6 @@ <td>Individual</td> <td>Development; open-source community efforts</td> </tr> - <tr> - <td><a href="https://github.com/jackishere">Jack Louvton</a></td> - <td>Individual</td> - <td>DevOps; Cloud Architect; open-source community efforts</td> - </tr> </tbody> </table> </div>
manifesto
opentofu
HTML
HTML
36,134
1,083
The OpenTF Manifesto expresses concern over HashiCorp's switch of the Terraform license from open-source to the Business Source License (BSL) and calls for the tool's return to a truly open-source license.
opentofu_manifesto
DOC_CHANGE
Obvious
6255f54f413ae9784d9bfb94151b59aeca72c0ec
null
kaisawind
fix[TagsView]: fixed refresh affixed-tag bug (#1653)
false
1
0
1
--- TagsView.vue @@ -76,6 +76,7 @@ export default { routes.forEach(route => { if (route.meta && route.meta.affix) { tags.push({ + fullPath: path.resolve(basePath, route.path), path: path.resolve(basePath, route.path), name: route.name, meta: { ...route.meta }
PanJiaChen_vue-element-admin.json
null
null
null
null
null
null
PanJiaChen_vue-element-admin.json
BUG_FIX
5, fixed a bug
5b9c7e62337142f665d3c4e73a35db5b60ce75b6
2023-08-28 20:13:40
Ali Sajid Imami
feat: add Ali Sajid Imami to the signatories
false
5
0
5
--- index.html @@ -3506,11 +3506,6 @@ <td>Individual</td> <td>Development; Documentation; Testing; open-source community efforts</td> </tr> - <tr> - <td><a href="https://github.com/AliSajid">Ali Sajid Imami</a></td> - <td>Individual</td> - <td>Development; Documentation; Testing; open-source community efforts</td> - </tr> </tbody> </table> </div>
manifesto
opentofu
HTML
HTML
36,134
1,083
The OpenTF Manifesto expresses concern over HashiCorp's switch of the Terraform license from open-source to the Business Source License (BSL) and calls for the tool's return to a truly open-source license.
opentofu_manifesto
NEW_FEAT
obvious
b3212cece2d3509fa4dbf4fe9ff0af5cc739e99a
2024-07-09 17:30:14
Tien Do Nam
fix: bump version in Inno script
false
1
1
2
--- scripts/compile_windows_exe-inno.iss @@ -4,7 +4,7 @@ ; Copy app/assets/packaging/logo-256.ico to D:\inno\logo-256.ico #define MyAppName "LocalSend" -#define MyAppVersion "1.15.0" +#define MyAppVersion "1.14.0" #define MyAppPublisher "Tien Do Nam" #define MyAppURL "https://localsend.org" #define MyAppExeName "localsend_app.exe"
localsend
localsend
Dart
Dart
58,423
3,136
An open-source cross-platform alternative to AirDrop
localsend_localsend
CONFIG_CHANGE
version changes done
31ec7a71f2742da7155e1365b9b48b1fb956b9bd
null
Reuben Morais
Fix undefined variable when saving test samples to --test_output_file X-DeepSpeech: NOBUILD
false
1
1
0
--- DeepSpeech.py @@ -669,7 +669,7 @@ def train(): def test(): - evaluate(FLAGS.test_files.split(','), create_model, try_loading) + samples = evaluate(FLAGS.test_files.split(','), create_model, try_loading) if FLAGS.test_output_file: # Save decoded tuples as JSON, converting NumPy floats to Python floats json.dump(samples, open(FLAGS.test_output_file, 'w'), default=float)
mozilla_DeepSpeech.json
null
null
null
null
null
null
mozilla_DeepSpeech.json
BUG_FIX
5, fix written in commit msg
fc16281cf5bd1506e8cfe62eee1ef02dac82bad5
2022-07-16 20:15:31
Evan You
release: v2.7.7
false
17
4
21
--- CHANGELOG.md @@ -1,16 +1,3 @@ -## [2.7.7](https://github.com/vuejs/vue/compare/v2.7.6...v2.7.7) (2022-07-16) - - -### Bug Fixes - -* **codegen:** script setup should not attempt to resolve native elements as component ([e8d3a7d](https://github.com/vuejs/vue/commit/e8d3a7d7a17f9e66d592fb1ddc4a603af9958d36)), closes [#12674](https://github.com/vuejs/vue/issues/12674) -* **inject:** fix edge case of provided with async-mutated getters ([ea5d0f3](https://github.com/vuejs/vue/commit/ea5d0f3fbfd983cb0275457cbcef344f926381ea)), closes [#12667](https://github.com/vuejs/vue/issues/12667) -* **setup:** ensure setup context slots can be accessed immediately ([67760f8](https://github.com/vuejs/vue/commit/67760f8d30778f58afeada3589d4ac4493329ad5)), closes [#12672](https://github.com/vuejs/vue/issues/12672) -* **types:** vue.d.ts should use relative import to v3-component-public-instance ([#12668](https://github.com/vuejs/vue/issues/12668)) ([46ec648](https://github.com/vuejs/vue/commit/46ec64869479393f95b6abda7a4adecf19867d06)), closes [#12666](https://github.com/vuejs/vue/issues/12666) -* **watch:** fix queueing multiple post watchers ([25ffdb6](https://github.com/vuejs/vue/commit/25ffdb62d22fe8688aca144d945671d5c82a8887)), closes [#12664](https://github.com/vuejs/vue/issues/12664) - - - ## [2.7.6](https://github.com/vuejs/vue/compare/v2.7.5...v2.7.6) (2022-07-15) --- package.json @@ -1,6 +1,6 @@ { "name": "vue", - "version": "2.7.7", + "version": "2.7.6", "packageManager": "[email protected]", "description": "Reactive, component-oriented view layer for modern web interfaces.", "main": "dist/vue.runtime.common.js", --- packages/compiler-sfc/package.json @@ -1,6 +1,6 @@ { "name": "@vue/compiler-sfc", - "version": "2.7.7", + "version": "2.7.6", "description": "compiler-sfc for Vue 2", "main": "dist/compiler-sfc.js", "types": "dist/compiler-sfc.d.ts", --- packages/server-renderer/package.json @@ -1,6 +1,6 @@ { "name": "vue-server-renderer", - "version": "2.7.7", + "version": "2.7.6", "description": "server renderer for Vue 2.0", "main": "index.js", "types": "types/index.d.ts", --- packages/template-compiler/package.json @@ -1,6 +1,6 @@ { "name": "vue-template-compiler", - "version": "2.7.7", + "version": "2.7.6", "description": "template compiler for Vue 2.0", "main": "index.js", "unpkg": "browser.js",
vue
vuejs
TypeScript
TypeScript
208,427
33,725
This is the repo for Vue 2. For Vue 3, go to https://github.com/vuejs/core
vuejs_vue
DOC_CHANGE
changes in md file
fd46b2c3c3be3c592b9dd2e1a1f9efb415407603
2023-02-02 13:21:28
ИEØ_ΙΙØZ
Update languages.json (#2204) add & correct some strings
false
8
8
16
--- LANGUAGES/languages.json @@ -1780,7 +1780,7 @@ "STR_WAIT_PROCESS":"目前仍有執行緒正在運作中,請稍候...", "STR_MENU_OPTION":"選項", "STR_MENU_SECURE_BOOT":"支援安全開機", - "STR_MENU_PART_CFG":"設定分割磁區", + "STR_MENU_PART_CFG":"分割區配置", "STR_BTN_OK":"確定", "STR_BTN_CANCEL":"取消", "STR_PRESERVE_SPACE":"在磁區最後保留一部分空間", @@ -1789,7 +1789,7 @@ "STR_CLEAR_SUCCESS":"Ventoy 已成功從裝置中清除", "STR_CLEAR_FAILED":"清除 Ventoy 的過程中發生錯誤,請再次重新插入磁碟重試,詳細訊息請檢視 log.txt 檔案。", "STR_MENU_PART_STYLE":"分割表格式", - "STR_DISK_2TB_MBR_ERROR":"超過 2TB 的磁碟請選擇 GPT 格式分割表", + "STR_DISK_2TB_MBR_ERROR":"超過 2TB 的磁碟請選擇 GPT 分割表格式", "STR_SHOW_ALL_DEV":"顯示所有裝置", "STR_PART_ALIGN_4KB":"磁碟分割區按照 4KB 對齊", "STR_WEB_COMMUNICATION_ERR":"通訊錯誤:", @@ -1799,7 +1799,7 @@ "STR_WEB_TOKEN_MISMATCH":"伺服器狀態已更新", "STR_WEB_SERVICE_BUSY":"伺服器忙碌中,請稍後再試", "STR_MENU_VTSI_CREATE":"建立 VTSI 檔案", - "STR_VTSI_CREATE_TIP":"這項操作將只會在原地目錄建立一個 VTSI 的檔案#@請問是否繼續?", + "STR_VTSI_CREATE_TIP":"這個操作將只會在原地目錄建立一個 VTSI 的檔案#@請問是否繼續?", "STR_VTSI_CREATE_SUCCESS":"VTSI 檔案建立完成!#@您可以使用 Rufus(3.15+) 將 VTSI 檔案寫入指定的裝置以完成 Ventoy 的安裝。", "STR_VTSI_CREATE_FAILED":"VTSI 檔案建立失敗。", "STR_MENU_PART_RESIZE":"無損安裝", @@ -1810,11 +1810,11 @@ "STR_INSTALL_YES_TIP1":"警告:資料將會遺失!", "STR_INSTALL_YES_TIP2":"請在下方的文字輸入框輸入 YES 來證實您想要進行全新安裝取代升級。", "STR_PART_VENTOY_FS":"Ventoy 分割區文件系統", - "STR_PART_FS":"檔案系統", - "STR_PART_CLUSTER":"磁區單位大小", - "STR_PART_CLUSTER_DEFAULT":"系統預設值", - "STR_DONATE":"贊助", - "STR_4KN_UNSUPPORTED":"目前 Ventoy 不支援單位磁區格式為4K的硬碟。", + "STR_PART_FS":"文件系統", + "STR_PART_CLUSTER":"簇的大小", + "STR_PART_CLUSTER_DEFAULT":"系統默認值", + "STR_DONATE":"捐助", + "STR_4KN_UNSUPPORTED":"Currently Ventoy does not support 4K native device.", "STRXXX":"" },
ventoy
ventoy
C
C
65,265
4,197
A new bootable USB solution.
ventoy_ventoy
CONFIG_CHANGE
Very small changes
7b9096955bd033bd57e1665d01372a4c65ac812f
2024-11-22 16:56:17
simonhamp
Fix styling
false
0
2
2
--- tests/Fakes/FakeChildProcessTest.php @@ -215,3 +215,5 @@ it('asserts message using callable', function () { $this->fail('Expected assertion to fail'); }); + +
laravel
nativephp
PHP
PHP
3,498
182
Laravel wrapper for the NativePHP framework
nativephp_laravel
CODE_IMPROVEMENT
Obvious
301515d2e86e30904dc5ca9cd81057049d7de8ff
2024-05-21 09:30:35
fatedier
update the default value of transport.tcpMuxKeepaliveInterval (#4231)
false
9
5
14
--- Release.md @@ -1,7 +1,3 @@ ### Fixes * Fixed an issue where HTTP/2 was not enabled for https2http and https2https plugins. - -### Changes - -* Updated the default value of `transport.tcpMuxKeepaliveInterval` from 60 to 30. --- conf/frpc_full_example.toml @@ -76,7 +76,7 @@ transport.poolCount = 5 # Specify keep alive interval for tcp mux. # only valid if tcpMux is enabled. -# transport.tcpMuxKeepaliveInterval = 30 +# transport.tcpMuxKeepaliveInterval = 60 # Communication protocol used to connect to server # supports tcp, kcp, quic, websocket and wss now, default is tcp --- conf/frps_full_example.toml @@ -34,7 +34,7 @@ transport.maxPoolCount = 5 # Specify keep alive interval for tcp mux. # only valid if tcpMux is true. -# transport.tcpMuxKeepaliveInterval = 30 +# transport.tcpMuxKeepaliveInterval = 60 # tcpKeepalive specifies the interval between keep-alive probes for an active network connection between frpc and frps. # If negative, keep-alive probes are disabled. --- pkg/config/v1/client.go @@ -135,7 +135,7 @@ func (c *ClientTransportConfig) Complete() { c.ProxyURL = util.EmptyOr(c.ProxyURL, os.Getenv("http_proxy")) c.PoolCount = util.EmptyOr(c.PoolCount, 1) c.TCPMux = util.EmptyOr(c.TCPMux, lo.ToPtr(true)) - c.TCPMuxKeepaliveInterval = util.EmptyOr(c.TCPMuxKeepaliveInterval, 30) + c.TCPMuxKeepaliveInterval = util.EmptyOr(c.TCPMuxKeepaliveInterval, 60) if lo.FromPtr(c.TCPMux) { // If TCPMux is enabled, heartbeat of application layer is unnecessary because we can rely on heartbeat in tcpmux. c.HeartbeatInterval = util.EmptyOr(c.HeartbeatInterval, -1) --- pkg/config/v1/server.go @@ -176,7 +176,7 @@ type ServerTransportConfig struct { func (c *ServerTransportConfig) Complete() { c.TCPMux = util.EmptyOr(c.TCPMux, lo.ToPtr(true)) - c.TCPMuxKeepaliveInterval = util.EmptyOr(c.TCPMuxKeepaliveInterval, 30) + c.TCPMuxKeepaliveInterval = util.EmptyOr(c.TCPMuxKeepaliveInterval, 60) c.TCPKeepAlive = util.EmptyOr(c.TCPKeepAlive, 7200) c.MaxPoolCount = util.EmptyOr(c.MaxPoolCount, 5) if lo.FromPtr(c.TCPMux) { --- pkg/util/version/version.go @@ -14,7 +14,7 @@ package version -var version = "0.58.1" +var version = "0.58.0" func Full() string { return version
frp
fatedier
Go
Go
91,116
13,769
A fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet.
fatedier_frp
CONFIG_CHANGE
Very small changes
050873fc287f2f006675fc4a1c6b8ba4d6300901
2023-08-21 23:26:55
MartinGC94
Prevent fallback to file completion when tab completing type names (#20084)
false
7
1
8
--- src/System.Management.Automation/engine/CommandCompletion/CompletionAnalysis.cs @@ -1128,7 +1128,7 @@ namespace System.Management.Automation replacementIndex = typeNameToComplete.Extent.StartOffset; replacementLength = typeNameToComplete.Extent.EndOffset - replacementIndex; completionContext.WordToComplete = typeNameToComplete.FullName; - return CompletionCompleters.CompleteType(completionContext); + result = CompletionCompleters.CompleteType(completionContext); } } --- test/powershell/Host/TabCompletion/TabCompletion.Tests.ps1 @@ -2626,12 +2626,6 @@ function MyFunction ($param1, $param2) $res = TabExpansion2 -inputScript 'using module @{' $res.CompletionMatches.CompletionText -join ' ' | Should -BeExactly "GUID MaximumVersion ModuleName ModuleVersion RequiredVersion" } - - It 'Should not fallback to file completion when completing typenames' { - $Text = '[abcdefghijklmnopqrstuvwxyz]' - $res = TabExpansion2 -inputScript $Text -cursorColumn ($Text.Length - 1) - $res.CompletionMatches | Should -HaveCount 0 - } } Describe "Tab completion tests with remote Runspace" -Tags Feature,RequireAdminOnWindows {
powershell
powershell
C#
C#
46,656
7,522
PowerShell for every system!
powershell_powershell
PERF_IMPROVEMENT
Obvious
6fbcef3ccc1ed99e48be7fb3817ed0be324d17be
2025-03-18 15:40:07
Andrey Yastrebov
[Gradle] Swift Export enabled by default ^KT-75921
false
60
83
143
--- libraries/tools/kotlin-gradle-plugin-integration-tests/src/test/kotlin/org/jetbrains/kotlin/gradle/native/SwiftExportDslIT.kt @@ -35,6 +35,9 @@ class SwiftExportDslIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -70,6 +73,9 @@ class SwiftExportDslIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -114,6 +120,9 @@ class SwiftExportDslIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -154,6 +163,9 @@ class SwiftExportDslIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -188,6 +200,9 @@ class SwiftExportDslIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -204,6 +219,9 @@ class SwiftExportDslIT : KGPBaseTest() { dependencyManagement = DependencyManagement.DefaultDependencyManagement(setOf(mavenUrl.absolutePathString())), buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { projectPath.resolve("shared/build.gradle.kts").replaceText( @@ -241,6 +259,9 @@ class SwiftExportDslIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { projectPath.resolve("shared/build.gradle.kts").replaceText( --- libraries/tools/kotlin-gradle-plugin-integration-tests/src/test/kotlin/org/jetbrains/kotlin/gradle/native/SwiftExportIT.kt @@ -34,6 +34,9 @@ class SwiftExportIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { buildAndFail( @@ -57,6 +60,9 @@ class SwiftExportIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -97,6 +103,9 @@ class SwiftExportIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -154,6 +163,9 @@ class SwiftExportIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -191,6 +203,9 @@ class SwiftExportIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( @@ -226,6 +241,9 @@ class SwiftExportIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { build( --- libraries/tools/kotlin-gradle-plugin-integration-tests/src/test/kotlin/org/jetbrains/kotlin/gradle/native/SwiftExportXCIT.kt @@ -30,12 +30,16 @@ class SwiftExportXCIT : KGPBaseTest() { gradleVersion, buildOptions = defaultBuildOptions.copy( configurationCache = BuildOptions.ConfigurationCacheValue.ENABLED, + nativeOptions = BuildOptions.NativeOptions().copy( + swiftExportEnabled = true, + ) ) ) { buildXcodeProject( xcodeproj = projectPath.resolve("iosApp/iosApp.xcodeproj"), destination = "platform=iOS Simulator,id=${simulator.udid}", - action = XcodeBuildAction.Test + action = XcodeBuildAction.Test, + appendToProperties = { "kotlin.experimental.swift-export.enabled=true" } ) } } --- libraries/tools/kotlin-gradle-plugin-integration-tests/src/test/kotlin/org/jetbrains/kotlin/gradle/testbase/BuildOptions.kt @@ -164,6 +164,7 @@ data class BuildOptions( val cocoapodsPlatform: String? = null, val cocoapodsConfiguration: String? = null, val cocoapodsArchs: String? = null, + val swiftExportEnabled: Boolean? = null, val distributionType: String? = null, val distributionDownloadFromMaven: Boolean? = true, val reinstall: Boolean? = null, @@ -365,6 +366,9 @@ data class BuildOptions( nativeOptions.cocoapodsConfiguration?.let { arguments.add("-Pkotlin.native.cocoapods.configuration=${it}") } + nativeOptions.swiftExportEnabled?.let { + arguments.add("-Pkotlin.experimental.swift-export.enabled=${it}") + } nativeOptions.distributionDownloadFromMaven?.let { arguments.add("-Pkotlin.native.distribution.downloadFromMaven=${it}") } --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/PropertiesProvider.kt @@ -436,6 +436,9 @@ internal class PropertiesProvider private constructor(private val project: Proje val appleAllowEmbedAndSignWithCocoapods: Boolean get() = booleanProperty(PropertyNames.KOTLIN_APPLE_ALLOW_EMBED_AND_SIGN_WITH_COCOAPODS) ?: false + val swiftExportEnabled: Boolean + get() = booleanProperty(PropertyNames.KOTLIN_SWIFT_EXPORT_ENABLED) ?: false + val appleIgnoreXcodeVersionCompatibility: Boolean get() = booleanProperty(PropertyNames.KOTLIN_APPLE_XCODE_COMPATIBILITY_NOWARN) ?: false @@ -725,6 +728,7 @@ internal class PropertiesProvider private constructor(private val project: Proje val KOTLIN_APPLE_XCODE_COMPATIBILITY_NOWARN = property("kotlin.apple.xcodeCompatibility.nowarn") val KOTLIN_APPLE_COCOAPODS_EXECUTABLE = property("kotlin.apple.cocoapods.bin") val KOTLIN_APPLE_ALLOW_EMBED_AND_SIGN_WITH_COCOAPODS = property("kotlin.apple.deprecated.allowUsingEmbedAndSignWithCocoaPodsDependencies") + val KOTLIN_SWIFT_EXPORT_ENABLED = property("kotlin.experimental.swift-export.enabled") val KOTLIN_NATIVE_ENABLE_KLIBS_CROSSCOMPILATION = property("kotlin.native.enableKlibsCrossCompilation") val KOTLIN_ARCHIVES_TASK_OUTPUT_AS_FRIEND_ENABLED = property("kotlin.build.archivesTaskOutputAsFriendModule") val KOTLIN_KMP_ISOLATED_PROJECT_SUPPORT = property("kotlin.kmp.isolated-projects.support") --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/diagnostics/checkers/GradleDeprecatedPropertyChecker.kt @@ -33,7 +33,6 @@ internal object GradleDeprecatedPropertyChecker : KotlinGradleProjectChecker { DeprecatedProperty(KotlinJsCompilerType.jsCompilerProperty), DeprecatedProperty("${KotlinJsCompilerType.jsCompilerProperty}.nowarn"), DeprecatedProperty("kotlin.mpp.androidGradlePluginCompatibility.nowarn"), // Since 2.1.0 - DeprecatedProperty("kotlin.experimental.swift-export.enabled"), ) private val errorDeprecatedProperties: List<DeprecatedProperty> = listOf( --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/diagnostics/checkers/SwiftExportModuleNameChecker.kt @@ -18,6 +18,8 @@ import org.jetbrains.kotlin.gradle.plugin.mpp.apple.swiftexport.internal.validat internal object SwiftExportModuleNameChecker : KotlinGradleProjectChecker { override suspend fun KotlinGradleProjectCheckerContext.runChecks(collector: KotlinToolingDiagnosticsCollector) { + if (!kotlinPropertiesProvider.swiftExportEnabled) return + AfterFinaliseDsl.await() val extension = multiplatformExtension?.getExtension<SwiftExportExtension>( --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/mpp/apple/AppleXcodeTasks.kt @@ -30,6 +30,7 @@ import org.jetbrains.kotlin.gradle.tasks.registerTask import org.jetbrains.kotlin.gradle.utils.getFile import org.jetbrains.kotlin.gradle.utils.lowerCamelCaseName import org.jetbrains.kotlin.gradle.utils.mapToFile +import org.jetbrains.kotlin.gradle.swiftexport.ExperimentalSwiftExportDsl import java.io.File import javax.inject.Inject @@ -175,6 +176,7 @@ private fun fireEnvException(frameworkTaskName: String, envBuildType: NativeBuil } } +@ExperimentalSwiftExportDsl internal fun Project.registerEmbedSwiftExportTask( target: KotlinNativeTarget, environment: XcodeEnvironment, --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/mpp/apple/swiftexport/ExperimentalSwiftExportDsl.kt @@ -11,5 +11,4 @@ package org.jetbrains.kotlin.gradle.swiftexport "This API is experimental and can be unstable. Add @OptIn(org.jetbrains.kotlin.gradle.swiftexport.ExperimentalSwiftExportDsl::class) annotation.", level = RequiresOptIn.Level.WARNING ) -@Target(AnnotationTarget.CLASS, AnnotationTarget.FUNCTION, AnnotationTarget.PROPERTY) annotation class ExperimentalSwiftExportDsl \ No newline at end of file --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/mpp/apple/swiftexport/SetupSwiftExportDSL.kt @@ -9,17 +9,24 @@ import org.gradle.api.Project import org.jetbrains.kotlin.gradle.dsl.multiplatformExtension import org.jetbrains.kotlin.gradle.dsl.supportedAppleTargets import org.jetbrains.kotlin.gradle.plugin.KotlinProjectSetupAction +import org.jetbrains.kotlin.gradle.plugin.PropertiesProvider.Companion.kotlinPropertiesProvider import org.jetbrains.kotlin.gradle.plugin.addExtension +import org.jetbrains.kotlin.gradle.plugin.diagnostics.KotlinToolingDiagnostics +import org.jetbrains.kotlin.gradle.plugin.diagnostics.reportDiagnosticOncePerBuild import org.jetbrains.kotlin.gradle.plugin.mpp.KotlinNativeTarget import org.jetbrains.kotlin.gradle.plugin.mpp.apple.XcodeEnvironment import org.jetbrains.kotlin.gradle.plugin.mpp.apple.registerEmbedSwiftExportTask +import org.jetbrains.kotlin.gradle.swiftexport.ExperimentalSwiftExportDsl internal object SwiftExportDSLConstants { const val SWIFT_EXPORT_EXTENSION_NAME = "swiftExport" const val TASK_GROUP = "SwiftExport" } +@ExperimentalSwiftExportDsl internal val SetUpSwiftExportAction = KotlinProjectSetupAction { + if (!kotlinPropertiesProvider.swiftExportEnabled) return@KotlinProjectSetupAction + warnAboutExperimentalSwiftExportFeature() val swiftExportExtension = objects.SwiftExportExtension(dependencies) multiplatformExtension.addExtension( @@ -30,6 +37,12 @@ internal val SetUpSwiftExportAction = KotlinProjectSetupAction { registerSwiftExportPipeline(swiftExportExtension) } +private fun Project.warnAboutExperimentalSwiftExportFeature() { + reportDiagnosticOncePerBuild( + KotlinToolingDiagnostics.ExperimentalFeatureWarning("Swift Export", "https://kotl.in/1cr522") + ) +} + private fun Project.registerSwiftExportPipeline( swiftExportExtension: SwiftExportExtension, ) { --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/mpp/apple/swiftexport/SwiftExportExtension.kt @@ -15,7 +15,6 @@ import org.gradle.api.provider.* import org.gradle.api.tasks.Input import org.gradle.api.tasks.Internal import org.gradle.api.tasks.Optional -import org.jetbrains.kotlin.gradle.dsl.KotlinGradlePluginDsl import org.jetbrains.kotlin.gradle.plugin.mpp.AbstractNativeLibrary import org.jetbrains.kotlin.gradle.plugin.mpp.apple.swiftexport.internal.exportedSwiftExportApiConfigurationName import org.jetbrains.kotlin.gradle.plugin.mpp.getCoordinatesFromGroupNameAndVersion @@ -27,14 +26,6 @@ import org.jetbrains.kotlin.gradle.utils.getValue import org.jetbrains.kotlin.gradle.utils.namedDomainObjectSet import javax.inject.Inject -/** - * Represents metadata for a Swift exported module in a project. - * This interface provides configuration options for defining the - * exported module's name and package collapsing rules. - * - * This API is experimental and may change in future versions. - */ -@ExperimentalSwiftExportDsl interface SwiftExportedModuleMetadata { /** * Configure name of the swift export module from this project. @@ -51,12 +42,6 @@ interface SwiftExportedModuleMetadata { val flattenPackage: Property<String> } -/** - * Represents the advanced configuration for exporting Swift code. - * - * This API is experimental and may change in future versions. - */ -@ExperimentalSwiftExportDsl interface SwiftExportAdvancedConfiguration { /** * Configure SwiftExportConfig.settings parameters @@ -73,12 +58,6 @@ interface SwiftExportAdvancedConfiguration { val freeCompilerArgs: ListProperty<String> } -/** - * Represents metadata for a specific version of a Swift exported module. - * - * This API is experimental and may change in future versions. - */ -@ExperimentalSwiftExportDsl interface SwiftExportedModuleVersionMetadata : SwiftExportedModuleMetadata { /** * Module version identifier @@ -90,34 +69,8 @@ interface SwiftExportedModuleVersionMetadata : SwiftExportedModuleMetadata { internal fun ObjectFactory.SwiftExportExtension(dependencies: DependencyHandler): SwiftExportExtension = newInstance(SwiftExportExtension::class.java, dependencies) -/** - * An *experimental* plugin DSL extension to configure Swift Export. - * - * Swift Export is a part of the Kotlin toolset designed to generate Swift code from Kotlin source files. - * You can use this tool to create Swift bindings for your Kotlin multiplatform libraries. - * - * This extension is available inside the `kotlin {}` block in your build script: - * - * ```kotlin - * kotlin { - * swiftExport { - * // Your Swift Export configuration - * } - * } - * ``` - * - * Note that this DSL is experimental, and it will likely change in future versions until it is stable. - * - * @since 2.1.0 - */ -/* -We can't mark top level extensions with @ExperimentalSwiftExportDsl because -in buildSrc Gradle always creates accessors for these extensions which cause the opt-in error, -which cannot be suppressed. - -See Gradle issue https://github.com/gradle/gradle/issues/32019 - */ -@KotlinGradlePluginDsl +@ExperimentalSwiftExportDsl +@Suppress("unused", "MemberVisibilityCanBePrivate") // Public API abstract class SwiftExportExtension @Inject constructor( private val objectFactory: ObjectFactory, private val providerFactory: ProviderFactory, @@ -127,7 +80,6 @@ abstract class SwiftExportExtension @Inject constructor( /** * Configure Link task. */ - @ExperimentalSwiftExportDsl fun linkTask(configure: KotlinNativeLink.() -> Unit = {}) { forAllSwiftExportBinaries { linkTaskProvider.configure { linkTask -> @@ -139,7 +91,6 @@ abstract class SwiftExportExtension @Inject constructor( /** * Configure Link task. */ - @ExperimentalSwiftExportDsl fun linkTask(configure: Action<KotlinNativeLink>) = linkTask { configure.execute(this) } @@ -147,7 +98,6 @@ abstract class SwiftExportExtension @Inject constructor( /** * Configure Swift Export Advanced parameters. */ - @ExperimentalSwiftExportDsl fun configure(configure: SwiftExportAdvancedConfiguration.() -> Unit = {}) { advancedConfiguration.configure() } @@ -155,7 +105,6 @@ abstract class SwiftExportExtension @Inject constructor( /** * Configure Swift Export Advanced parameters. */ - @ExperimentalSwiftExportDsl fun configure(configure: Action<SwiftExportAdvancedConfiguration>) = configure { configure.execute(this) } @@ -163,7 +112,6 @@ abstract class SwiftExportExtension @Inject constructor( /** * Configure Swift Export modules export. */ - @ExperimentalSwiftExportDsl fun export(dependency: Any, configure: SwiftExportedModuleMetadata.() -> Unit = {}) { val dependencyProvider: Provider<Dependency> = when (dependency) { is Provider<*> -> dependency.map { dep -> @@ -207,7 +155,6 @@ abstract class SwiftExportExtension @Inject constructor( /** * Configure Swift Export modules export. */ - @ExperimentalSwiftExportDsl fun export(dependency: Any, configure: Action<SwiftExportedModuleMetadata>) = export(dependency) { configure.execute(this) } --- libraries/tools/kotlin-gradle-plugin/src/common/kotlin/org/jetbrains/kotlin/gradle/plugin/mpp/apple/swiftexport/internal/SwiftExportInit.kt @@ -11,6 +11,7 @@ import org.gradle.api.attributes.Category import org.gradle.api.attributes.LibraryElements import org.gradle.api.attributes.Usage import org.jetbrains.kotlin.gradle.internal.KOTLIN_MODULE_GROUP +import org.jetbrains.kotlin.gradle.plugin.PropertiesProvider.Companion.kotlinPropertiesProvider import org.jetbrains.kotlin.gradle.plugin.categoryByName import org.jetbrains.kotlin.gradle.plugin.diagnostics.KotlinToolingDiagnostics import org.jetbrains.kotlin.gradle.plugin.diagnostics.KotlinToolingDiagnosticsCollector @@ -31,8 +32,10 @@ private const val SWIFT_EXPORT_EMBEDDABLE_MODULE = "swift-export-embeddable" internal const val SWIFT_EXPORT_MODULE_NAME_PATTERN = "^[A-Za-z0-9_]+$" internal fun Project.initSwiftExportClasspathConfigurations() { - maybeCreateSwiftExportClasspathDependenciesConfiguration() - SwiftExportClasspathResolvableConfiguration + if (project.kotlinPropertiesProvider.swiftExportEnabled) { + maybeCreateSwiftExportClasspathDependenciesConfiguration() + SwiftExportClasspathResolvableConfiguration + } } private fun Project.maybeCreateSwiftExportClasspathDependenciesConfiguration(): Configuration { --- libraries/tools/kotlin-gradle-plugin/src/functionalTest/kotlin/org/jetbrains/kotlin/gradle/unitTests/SwiftExportUnitTests.kt @@ -607,6 +607,7 @@ private fun swiftExportProject( archs = archs, ) configureRepositoriesForTests() + enableSwiftExport() }, code = { kotlin { @@ -632,6 +633,7 @@ private fun ProjectInternal.setupForSwiftExport( sdk = sdk, archs = archs, ) + enableSwiftExport() applyMultiplatformPlugin() kotlin { multiplatform() --- libraries/tools/kotlin-gradle-plugin/src/functionalTest/kotlin/org/jetbrains/kotlin/gradle/util/buildProject.kt @@ -156,6 +156,11 @@ fun Project.enableDefaultJsDomApiDependency(enabled: Boolean = true) { project.propertiesExtension.set(PropertiesProvider.PropertyNames.KOTLIN_JS_STDLIB_DOM_API_INCLUDED, enabled.toString()) } + +fun Project.enableSwiftExport(enabled: Boolean = true) { + project.propertiesExtension.set(PropertiesProvider.PropertyNames.KOTLIN_SWIFT_EXPORT_ENABLED, enabled.toString()) +} + fun Project.setMultiplatformAndroidSourceSetLayoutVersion(version: Int) { project.propertiesExtension.set(PropertiesProvider.PropertyNames.KOTLIN_MPP_ANDROID_SOURCE_SET_LAYOUT_VERSION, version.toString()) }
kotlin
jetbrains
Kotlin
Kotlin
50,115
5,861
The Kotlin Programming Language.
jetbrains_kotlin
NEW_FEAT
probably new feature to enable Swift export by default
a522a02ba24977881900619ede25e2dffaf696ca
2024-02-29 07:09:12
2dust
Temporarily add compatibility to VLESS
false
4
0
4
--- v2rayN/v2rayN/Handler/ConfigHandler.cs @@ -920,10 +920,6 @@ namespace v2rayN.Handler { return -1; } - if (!Utile.IsNullOrEmpty(profileItem.security) && profileItem.security != "none") - { - profileItem.security = "none"; - } AddServerCommon(config, profileItem, toFile);
v2rayn
2dust
C#
C#
75,986
12,289
A GUI client for Windows, Linux and macOS, support Xray and sing-box and others
2dust_v2rayn
CONFIG_CHANGE
Very small changes
de0b97b3eadae120eda505b45df2de2115dcb6f0
2023-12-06 13:22:18
Adam Hines
fix(types): fix type augmentation and compiler-sfc types w/moduleResolution: bundler (#13107) close #13106
false
4
3
7
--- package.json @@ -18,20 +18,19 @@ ], "exports": { ".": { - "types": "./types/index.d.ts", "import": { "node": "./dist/vue.runtime.mjs", "default": "./dist/vue.runtime.esm.js" }, - "require": "./dist/vue.runtime.common.js" + "require": "./dist/vue.runtime.common.js", + "types": "./types/index.d.ts" }, "./compiler-sfc": { - "types": "./compiler-sfc/index.d.ts", "import": "./compiler-sfc/index.mjs", "require": "./compiler-sfc/index.js" }, "./dist/*": "./dist/*", - "./types/*": ["./types/*.d.ts", "./types/*"], + "./types/*": "./types/*", "./package.json": "./package.json" }, "sideEffects": false,
vue
vuejs
TypeScript
TypeScript
208,427
33,725
This is the repo for Vue 2. For Vue 3, go to https://github.com/vuejs/core
vuejs_vue
PERF_IMPROVEMENT
Code change: indexing added
3a27024e22a58798513baa69637a63cecf34ca58
2023-11-28 05:29:36
Tony Dang
add tailwind build step to alias
false
3
1
4
--- README.md @@ -103,13 +103,11 @@ defp aliases do [ setup: ["deps.get", "ecto.setup", "cmd --cd assets npm install"], ..., - "assets.deploy": ["tailwind default --minify", "cmd --cd assets node build.js --deploy", "phx.digest"] + "assets.deploy": ["cmd --cd assets node build.js --deploy", "phx.digest"] ] end ``` -Note: `tailwind default --minify` is only required in the `assets.deploy` alias if you're using Tailwind. If you are not using Tailwind, you can remove it from the list. - 3. Run the following in your terminal ```bash
live_svelte
woutdp
Elixir
Elixir
1,416
58
Svelte inside Phoenix LiveView with seamless end-to-end reactivity
woutdp_live_svelte
DOC_CHANGE
Obvious
9f55600c89db6c18fc7b300a759e9a16a359209c
2023-04-15 19:54:21
buech
fix: Updated test report to use html tags so we can format CSS
false
55
39
94
--- MatGPT.mlapp Binary files a/MatGPT.mlapp and b/MatGPT.mlapp differ --- contents/styles.css @@ -48,7 +48,7 @@ margin: 0px 0px 0px 0px; } -.test-block-green { +.test-block { display: block; font-size: 1.15em; background-color: #dcf3dc; @@ -57,7 +57,7 @@ margin: 0px 0px 0px 0px; } -.test-block-red { +.test-block-error { display: block; font-size: 1.15em; background-color: #f3dcdc; --- helpers/CodeChecker.m @@ -117,11 +117,8 @@ function runCodeFiles(obj) % RUNCODEFILES - Tries to run all the generated scripts and % captures outputs/figures - % Before tests, make existing figure handles invisible + % Before tests, get lists of current figures figsBefore = findobj('Type','figure'); - for i = 1:length(figsBefore) - figsBefore(i).HandleVisibility = "off"; - end % Also check for open and closed Simulink files addOns = matlab.addons.installedAddons; @@ -147,7 +144,7 @@ function runCodeFiles(obj) end end - % Find newly Simulink models + % Find newly created figures and Simulink models if hasSimulink BDsNew = setdiff(find_system("SearchDepth",0),BDsBefore); BDsNew(BDsNew=="simulink") = []; @@ -169,14 +166,11 @@ function runCodeFiles(obj) end end - % Save new figures as png and reset the old figures as visible - figsAfter = findobj("Type","figure"); - for i = 1:length(figsAfter) - saveas(figsAfter(i),"Figure" + i + ".png"); - end - for i = 1:length(figsBefore) - figsBefore(i).HandleVisibility = "on"; - end + % Save new figures as PNG + figsNew = setdiff(findobj("Type","figure"),figsBefore); + for i = 1:length(figsNew) + saveas(figsNew(i),"Figure" + i + ".png"); + end end function processResults(obj) @@ -191,53 +185,28 @@ function processResults(obj) % Get the error messages from the Results table obj.ErrorMessages = obj.Results.ScriptOutput(obj.Results.IsError); - % Set up the report header - numBlocks = height(obj.Results); - numErrors = length(obj.ErrorMessages); - reportHeader = sprintf(['<div class="test-report">Here are the test results. ' ... - 'There were <b>%d code blocks</b> tested and <b>%d errors</b>.</p>'], ... - numBlocks,numErrors); - - % Handle plurals - if numBlocks == 1 - reportHeader = replace(reportHeader,'were','was'); - reportHeader = replace(reportHeader,'blocks','block'); - end - if numErrors == 1 - reportHeader = replace(reportHeader,'errors','error'); - end - % Loop through results table to make the report - testReport = reportHeader; + testReport = ''; for i = 1:height(obj.Results) % Start the report with the script name - testReport = [testReport sprintf('<div class="test"><p><b>Test:</b> %s</p>', ... - obj.Results.ScriptName(i))]; %#ok<AGROW> + testReport = [testReport sprintf('%s Test: %s %s\n\n',repelem('-',15), ... + obj.Results.ScriptName(i),repelem('-',15))]; %#ok<AGROW> - % Use error style if code produced an error - codeBlockClass = "test-block-green"; - if obj.Results.IsError(i) - codeBlockClass = "test-block-red"; - end - % Add code - testReport = [testReport sprintf('<code class=%s>%%%% Code: \n\n%s\n\n', ... - codeBlockClass,obj.Results.ScriptContents(i))]; %#ok<AGROW> + testReport = [testReport sprintf('%%%% Code: \n%s\n\n', ... + obj.Results.ScriptContents(i))]; %#ok<AGROW> % Add output - testReport = [testReport sprintf('%%%% Command Window Output: \n\n%s', ... + testReport = [testReport sprintf('%%%% Command Window Output: \n%s\n\n', ... obj.Results.ScriptOutput(i))]; %#ok<AGROW> - - % Add the closing tags - testReport = [testReport '</code></div>']; %#ok<AGROW> end % Show any image artifacts imageFiles = obj.Artifacts(contains(obj.Artifacts,".png")); folders = split(obj.OutputFolder,filesep); if ~isempty(imageFiles) - testReport = [testReport '<div class="figures"><p><b>Figures</b></p>']; + testReport = [testReport sprintf('%s Figures %s\n\n',repelem('-',30),repelem('-',30))]; for i = 1:length(imageFiles) % Get the relative path of the image. Assumes that the HTML % file is at the same level as the "GeneratedCode" folder @@ -245,21 +214,36 @@ function processResults(obj) relativePath = replace(relativePath,'\','/'); % Assemble the html code for displaying the image - testReport = [testReport sprintf('<div class="figure"><img src="%s" class="ml-figure"/></div>',relativePath)]; %#ok<AGROW> + testReport = [testReport sprintf('<img src="%s" class="ml-figure"/>\n\n',relativePath)]; %#ok<AGROW> end - testReport = [testReport '</div>']; end % List the artifacts - testReport = [testReport '<div class="artifacts"><p><b>Artifacts</b><p>']; - testReport = [testReport sprintf('<code class="code-block">The following artifacts were saved to: %s\n\n',obj.OutputFolder)]; + testReport = [testReport sprintf('%s Artifacts %s\n\n',repelem('-',30),repelem('-',30))]; + testReport = [testReport sprintf('The following artifacts were saved to: %s\n\n',obj.OutputFolder)]; for i = 1:length(obj.Artifacts) - testReport = [testReport sprintf(' %s\n',obj.Artifacts(i))]; %#ok<AGROW> + testReport = [testReport sprintf(' %s\n',obj.Artifacts(i))]; %#ok<AGROW> + end + + % Remove trailing newlines + testReport = strip(testReport,"right"); + + % Set up the report format string + numBlocks = height(obj.Results); + numErrors = length(obj.ErrorMessages); + reportFormat = 'Here are the test results. There were <b>%d code blocks</b> tested and <b>%d errors</b>.\n\n```\n%s\n```'; + + % Handle plurals + if numBlocks == 1 + reportFormat = replace(reportFormat,'were','was'); + reportFormat = replace(reportFormat,'blocks','block'); end - testReport = [testReport '</code></div>']; - + if numErrors == 1 + reportFormat = replace(reportFormat,'errors','error'); + end + % Assign testReport to Report property - obj.Report = testReport; + obj.Report = sprintf(reportFormat,numBlocks,numErrors,testReport); end end end \ No newline at end of file
matgpt
toshiakit
MATLAB
MATLAB
218
33
MATLAB app to access ChatGPT API from OpenAI
toshiakit_matgpt
CODE_IMPROVEMENT
although the commit msg says fix, the code changes seems like refactoring
de962973b8c67127e18a98b8b8a25407dfb5f5d7
2025-01-30 19:37:33
Henri Devieux
Update op-geth to v1.101411.6 (#400)
false
2
2
4
--- geth/Dockerfile @@ -20,8 +20,8 @@ FROM golang:1.22 AS geth WORKDIR /app ENV REPO=https://github.com/ethereum-optimism/op-geth.git -ENV VERSION=v1.101411.6 -ENV COMMIT=50b3422b9ac682a8fa503c4f409339a9bff69717 +ENV VERSION=v1.101411.4 +ENV COMMIT=efa05b1bf5c22a60745e638ad9d4adadfe3daba9 RUN git clone $REPO --branch $VERSION --single-branch . && \ git switch -c branch-$VERSION && \ bash -c '[ "$(git rev-parse HEAD)" = "$COMMIT" ]'
node
base
Shell
Shell
68,555
2,658
Everything required to run your own Base node
base_node
CODE_IMPROVEMENT
Code change: type annotation added
f477dbcfdedabd991c9829f14a926d4fe7677983
2025-03-27 22:10:35
Pieter De Baets
Rename ReactBridge to BridgeSoLoader (#50315) Summary: Pull Request resolved: https://github.com/facebook/react-native/pull/50315 Make it clearer that the only purpose of this class is to manage the soloading of the core bridge so file. Changelog: [Internal] Reviewed By: Abbondanzo Differential Revision: D71965759 fbshipit-source-id: d333f3e768a2359b082d4df279548cbd4b58ec76
false
59
76
135
--- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/CatalystInstanceImpl.java @@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicInteger; @LegacyArchitecture public class CatalystInstanceImpl implements CatalystInstance { static { - BridgeSoLoader.staticInit(); + ReactBridge.staticInit(); LegacyArchitectureLogger.assertWhenLegacyArchitectureMinifyingEnabled( "CatalystInstanceImpl", LegacyArchitectureLogLevel.WARNING); } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/CxxModuleWrapperBase.kt @@ -48,7 +48,7 @@ protected constructor( private companion object { init { - BridgeSoLoader.staticInit() + ReactBridge.staticInit() LegacyArchitectureLogger.assertWhenLegacyArchitectureMinifyingEnabled( "CxxModuleWrapperBase", LegacyArchitectureLogLevel.WARNING) } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/Inspector.java @@ -18,7 +18,7 @@ import java.util.List; @DoNotStrip public class Inspector { static { - BridgeSoLoader.staticInit(); + ReactBridge.staticInit(); } private final HybridData mHybridData; --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/NativeArray.kt @@ -19,7 +19,7 @@ public abstract class NativeArray protected constructor() : private companion object { init { - BridgeSoLoader.staticInit() + ReactBridge.staticInit() } } } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/NativeMap.kt @@ -17,7 +17,7 @@ public abstract class NativeMap : HybridClassBase() { private companion object { init { - BridgeSoLoader.staticInit() + ReactBridge.staticInit() } } } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/BridgeSoLoader.kt @@ -7,6 +7,7 @@ package com.facebook.react.bridge +import android.os.SystemClock import com.facebook.react.common.annotations.internal.LegacyArchitecture import com.facebook.react.common.annotations.internal.LegacyArchitectureLogger import com.facebook.soloader.SoLoader @@ -14,28 +15,41 @@ import com.facebook.systrace.Systrace import com.facebook.systrace.Systrace.TRACE_TAG_REACT_JAVA_BRIDGE @LegacyArchitecture -internal object BridgeSoLoader { +internal object ReactBridge { init { - LegacyArchitectureLogger.assertWhenLegacyArchitectureMinifyingEnabled("BridgeSoLoader") + LegacyArchitectureLogger.assertWhenLegacyArchitectureMinifyingEnabled("ReactBridge") } + @Volatile private var _loadStartTime: Long = 0 + @Volatile private var _loadEndTime: Long = 0 + @Volatile private var _didInit: Boolean = false + @JvmStatic @Synchronized fun staticInit() { - if (initialized) { + if (_didInit) { return } - Systrace.beginSection(TRACE_TAG_REACT_JAVA_BRIDGE, "BridgeSoLoader") + _loadStartTime = SystemClock.uptimeMillis() + Systrace.beginSection( + TRACE_TAG_REACT_JAVA_BRIDGE, "ReactBridge.staticInit::load:reactnativejni") ReactMarker.logMarker(ReactMarkerConstants.LOAD_REACT_NATIVE_SO_FILE_START) SoLoader.loadLibrary("reactnativejni") ReactMarker.logMarker(ReactMarkerConstants.LOAD_REACT_NATIVE_SO_FILE_END) Systrace.endSection(TRACE_TAG_REACT_JAVA_BRIDGE) - initialized = true + _loadEndTime = SystemClock.uptimeMillis() + _didInit = true } - @get:JvmStatic - @get:JvmName("isInitialized") - @Volatile - var initialized: Boolean = false - private set + @JvmStatic + val loadStartTime: Long + get() = _loadStartTime + + @JvmStatic + val loadEndTime: Long + get() = _loadEndTime + + @JvmStatic + val initialized: Boolean + @JvmName("isInitialized") get() = _didInit } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/ReactInstanceManagerInspectorTarget.java @@ -70,6 +70,6 @@ public class ReactInstanceManagerInspectorTarget implements AutoCloseable { static { LegacyArchitectureLogger.assertWhenLegacyArchitectureMinifyingEnabled( "ReactInstanceManagerInspectorTarget", LegacyArchitectureLogLevel.WARNING); - BridgeSoLoader.staticInit(); + ReactBridge.staticInit(); } } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/bridge/ReactMarker.java @@ -211,7 +211,7 @@ public class ReactMarker { now = SystemClock.uptimeMillis(); } - if (BridgeSoLoader.isInitialized()) { + if (ReactBridge.isInitialized()) { // First send the current marker nativeLogMarker(name.name(), now); --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/defaults/DefaultSoLoader.kt @@ -9,16 +9,18 @@ package com.facebook.react.defaults import com.facebook.soloader.SoLoader -internal object DefaultSoLoader { - @Synchronized - @JvmStatic - fun maybeLoadSoLibrary() { - SoLoader.loadLibrary("react_newarchdefaults") - try { - SoLoader.loadLibrary("appmodules") - } catch (e: UnsatisfiedLinkError) { - // ignore: DefaultTurboModuleManagerDelegate is still used in apps that don't have - // appmodules.so +internal class DefaultSoLoader { + companion object { + @Synchronized + @JvmStatic + fun maybeLoadSoLibrary() { + SoLoader.loadLibrary("react_newarchdefaults") + try { + SoLoader.loadLibrary("appmodules") + } catch (e: UnsatisfiedLinkError) { + // ignore: DefaultTurboModuleManagerDelegate is still used in apps that don't have + // appmodules.so + } } } } --- packages/react-native/ReactAndroid/src/main/java/com/facebook/react/fabric/FabricSoLoader.kt @@ -20,7 +20,8 @@ public object FabricSoLoader { if (didInit) { return } - Systrace.beginSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "FabricSoLoader") + Systrace.beginSection( + Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "FabricSoLoader.staticInit::load:fabricjni") ReactMarker.logMarker(ReactMarkerConstants.LOAD_REACT_NATIVE_SO_FILE_START) SoLoader.loadLibrary("fabricjni") ReactMarker.logMarker(ReactMarkerConstants.LOAD_REACT_NATIVE_SO_FILE_END)
react-native
facebook
C++
C++
120,863
24,536
A framework for building native applications using React
facebook_react-native
BUG_FIX
Code change: exception handling
221feb01c813ed3115fa301a09d2070ec973fc58
2023-11-01 02:58:01
Vinicius Souza
Update main.yml add tests
false
17
0
17
--- .github/workflows/main.yml @@ -16,20 +16,3 @@ jobs: source: README.md output: html/index.html github-corners: https://github.com/jaywcjlove/markdown-to-html-cli - - - - name: Markdown-HTML - # You may pin to the exact commit or the version. - # uses: ZacJW/markdown-html-action@581e6df07c787a1eb980cb2fd6c0c82ace9c9c82 - uses: ZacJW/[email protected] - with: - input_files: "README.md" - output_files: true - - - name: Converts Markdown to HTML - uses: jaywcjlove/markdown-to-html-cli@main - with: - source: README-zh.md - output: coverage/action.html - github-corners: https://github.com/jaywcjlove/markdown-to-html-cli - favicon: data:image/svg+xml,<svg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 100 100%22><text y=%22.9em%22 font-size=%2290%22>🌐</text></svg>
awesome-ios
vsouza
Swift
Swift
48,363
6,877
A curated list of awesome iOS ecosystem, including Objective-C and Swift Projects
vsouza_awesome-ios
CONFIG_CHANGE
Obvious
2d7d3c9c5cde3dc77d5e2061539e6bacd70ac774
2025-03-05 20:48:35
Niklas Mischkulnig
Turbopack: don't use HashMap in manifests (#76833) Should make the output more deterministic. We could change these to instead use our own `SliceMap`, but these aren't stored anywhere.
false
18
19
37
--- crates/next-core/src/next_manifests/mod.rs @@ -3,6 +3,7 @@ pub mod client_reference_manifest; use anyhow::{Context, Result}; +use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; use turbo_rcstr::RcStr; use turbo_tasks::{ @@ -21,7 +22,7 @@ use crate::next_config::{CrossOriginConfig, Rewrites, RouteHas}; #[derive(Serialize, Default, Debug)] pub struct PagesManifest { #[serde(flatten)] - pub pages: FxIndexMap<RcStr, RcStr>, + pub pages: FxHashMap<RcStr, RcStr>, } #[derive(Debug, Default)] @@ -232,16 +233,16 @@ pub enum Regions { #[derive(Serialize, Default, Debug)] pub struct MiddlewaresManifestV2 { pub sorted_middleware: Vec<RcStr>, - pub middleware: FxIndexMap<RcStr, EdgeFunctionDefinition>, + pub middleware: FxHashMap<RcStr, EdgeFunctionDefinition>, pub instrumentation: Option<InstrumentationDefinition>, - pub functions: FxIndexMap<RcStr, EdgeFunctionDefinition>, + pub functions: FxHashMap<RcStr, EdgeFunctionDefinition>, } #[derive(Serialize, Default, Debug)] #[serde(rename_all = "camelCase")] pub struct ReactLoadableManifest { #[serde(flatten)] - pub manifest: FxIndexMap<RcStr, ReactLoadableManifestEntry>, + pub manifest: FxHashMap<RcStr, ReactLoadableManifestEntry>, } #[derive(Serialize, Default, Debug)] @@ -254,8 +255,8 @@ pub struct ReactLoadableManifestEntry { #[derive(Serialize, Default, Debug)] #[serde(rename_all = "camelCase")] pub struct NextFontManifest { - pub pages: FxIndexMap<RcStr, Vec<RcStr>>, - pub app: FxIndexMap<RcStr, Vec<RcStr>>, + pub pages: FxHashMap<RcStr, Vec<RcStr>>, + pub app: FxHashMap<RcStr, Vec<RcStr>>, pub app_using_size_adjust: bool, pub pages_using_size_adjust: bool, } @@ -284,9 +285,9 @@ pub struct LoadableManifest { #[serde(rename_all = "camelCase")] pub struct ServerReferenceManifest<'a> { /// A map from hashed action name to the runtime module we that exports it. - pub node: FxIndexMap<&'a str, ActionManifestEntry<'a>>, + pub node: FxHashMap<&'a str, ActionManifestEntry<'a>>, /// A map from hashed action name to the runtime module we that exports it. - pub edge: FxIndexMap<&'a str, ActionManifestEntry<'a>>, + pub edge: FxHashMap<&'a str, ActionManifestEntry<'a>>, } #[derive(Serialize, Default, Debug)] @@ -294,9 +295,9 @@ pub struct ServerReferenceManifest<'a> { pub struct ActionManifestEntry<'a> { /// A mapping from the page that uses the server action to the runtime /// module that exports it. - pub workers: FxIndexMap<&'a str, ActionManifestWorkerEntry<'a>>, + pub workers: FxHashMap<&'a str, ActionManifestWorkerEntry<'a>>, - pub layer: FxIndexMap<&'a str, ActionLayer>, + pub layer: FxHashMap<&'a str, ActionLayer>, } #[derive(Serialize, Debug)] @@ -344,22 +345,22 @@ pub struct ClientReferenceManifest { pub client_modules: ManifestNode, /// Mapping of client module ID to corresponding SSR module ID and required /// SSR chunks. - pub ssr_module_mapping: FxIndexMap<ModuleId, ManifestNode>, + pub ssr_module_mapping: FxHashMap<ModuleId, ManifestNode>, /// Same as `ssr_module_mapping`, but for Edge SSR. #[serde(rename = "edgeSSRModuleMapping")] - pub edge_ssr_module_mapping: FxIndexMap<ModuleId, ManifestNode>, + pub edge_ssr_module_mapping: FxHashMap<ModuleId, ManifestNode>, /// Mapping of client module ID to corresponding RSC module ID and required /// RSC chunks. - pub rsc_module_mapping: FxIndexMap<ModuleId, ManifestNode>, + pub rsc_module_mapping: FxHashMap<ModuleId, ManifestNode>, /// Same as `rsc_module_mapping`, but for Edge RSC. #[serde(rename = "edgeRscModuleMapping")] - pub edge_rsc_module_mapping: FxIndexMap<ModuleId, ManifestNode>, + pub edge_rsc_module_mapping: FxHashMap<ModuleId, ManifestNode>, /// Mapping of server component path to required CSS client chunks. #[serde(rename = "entryCSSFiles")] - pub entry_css_files: FxIndexMap<RcStr, FxIndexSet<CssResource>>, + pub entry_css_files: FxHashMap<RcStr, FxIndexSet<CssResource>>, /// Mapping of server component path to required JS client chunks. #[serde(rename = "entryJSFiles")] - pub entry_js_files: FxIndexMap<RcStr, FxIndexSet<RcStr>>, + pub entry_js_files: FxHashMap<RcStr, FxIndexSet<RcStr>>, } #[derive(Serialize, Debug, Clone, Eq, Hash, PartialEq)] @@ -382,7 +383,7 @@ pub struct ModuleLoading { pub struct ManifestNode { /// Mapping of export name to manifest node entry. #[serde(flatten)] - pub module_exports: FxIndexMap<RcStr, ManifestNodeEntry>, + pub module_exports: FxHashMap<RcStr, ManifestNodeEntry>, } #[derive(Serialize, Debug, Clone)] @@ -489,7 +490,7 @@ pub struct ClientBuildManifest<'a> { pub sorted_pages: &'a [RcStr], #[serde(flatten)] - pub pages: FxIndexMap<RcStr, Vec<&'a str>>, + pub pages: FxHashMap<RcStr, Vec<&'a str>>, } #[cfg(test)]
next.js
vercel
JavaScript
JavaScript
129,891
27,821
The React Framework
vercel_next.js
PERF_IMPROVEMENT
simplify decoder draining logic
93f464a1455831695858d1f2298727a33bc9ab27
null
PatrickJS
feat(change_detection.ts): export PipeFactory Closes #2245
false
1
1
0
--- change_detection.ts @@ -47,7 +47,7 @@ export {DynamicChangeDetector} from './src/change_detection/dynamic_change_detec export {ChangeDetectorRef} from './src/change_detection/change_detector_ref'; export {PipeRegistry} from './src/change_detection/pipes/pipe_registry'; export {uninitialized} from './src/change_detection/change_detection_util'; -export {WrappedValue, Pipe} from './src/change_detection/pipes/pipe'; +export {WrappedValue, Pipe, PipeFactory} from './src/change_detection/pipes/pipe'; export {NullPipe, NullPipeFactory} from './src/change_detection/pipes/null_pipe'; export { defaultPipes,
angular_angular.json
null
null
null
null
null
null
angular_angular.json
NEW_FEAT
5, added a new feature as feat in commit message
c08b925d282e35ea7bd9a8f4c29de2ece0debb49
2023-03-23 19:04:43
Michele Bologna
fix(uninstall): abort uninstall if unable to change shell (#10357)
false
12
11
23
--- tools/uninstall.sh @@ -1,15 +1,3 @@ -if hash chsh >/dev/null 2>&1 && [ -f ~/.shell.pre-oh-my-zsh ]; then - old_shell=$(cat ~/.shell.pre-oh-my-zsh) - echo "Switching your shell back to '$old_shell':" - if chsh -s "$old_shell"; then - rm -f ~/.shell.pre-oh-my-zsh - else - echo "Could not change default shell. Change it manually by running chsh" - echo "or editing the /etc/passwd file." - exit - fi -fi - read -r -p "Are you sure you want to remove Oh My Zsh? [y/N] " confirmation if [ "$confirmation" != y ] && [ "$confirmation" != Y ]; then echo "Uninstall cancelled" @@ -37,5 +25,16 @@ else echo "No original zsh config found" fi +if hash chsh >/dev/null 2>&1 && [ -f ~/.shell.pre-oh-my-zsh ]; then + old_shell=$(cat ~/.shell.pre-oh-my-zsh) + echo "Switching your shell back to '$old_shell':" + if chsh -s "$old_shell"; then + rm -f ~/.shell.pre-oh-my-zsh + else + echo "Could not change default shell. Change it manually by running chsh" + echo "or editing the /etc/passwd file." + fi +fi + echo "Thanks for trying out Oh My Zsh. It's been uninstalled." echo "Don't forget to restart your terminal!"
ohmyzsh
ohmyzsh
Shell
Shell
176,465
26,013
🙃 A delightful community-driven (with 2,400+ contributors) framework for managing your zsh configuration. Includes 300+ optional plugins (rails, git, macOS, hub, docker, homebrew, node, php, python, etc), 140+ themes to spice up your morning, and an auto-update tool that makes it easy to keep up with the latest updates from the community.
ohmyzsh_ohmyzsh
BUG_FIX
obvious
b23992033ce67ace887044140efb2df70aa1766d
2024-01-31 00:29:47
Jordan Harband
[eslint config] [patch] `jsx-a11y/no-redundant-roles`: allow `nav` to have `navigation` role
false
3
1
4
--- packages/eslint-config-airbnb/rules/react-a11y.js @@ -201,9 +201,7 @@ module.exports = { // ensure HTML elements do not specify redundant ARIA roles // https://github.com/jsx-eslint/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-redundant-roles.md - 'jsx-a11y/no-redundant-roles': ['error', { - nav: ['navigation'], - }], + 'jsx-a11y/no-redundant-roles': 'error', // Enforce that DOM elements without semantic behavior not have interaction handlers // https://github.com/jsx-eslint/eslint-plugin-jsx-a11y/blob/master/docs/rules/no-static-element-interactions.md
javascript
airbnb
JavaScript
JavaScript
146,197
26,671
JavaScript Style Guide
airbnb_javascript
CODE_IMPROVEMENT
Dropped support for older Swift versions, removed legacy version checks, and updated CI and README accordingly.
b67ea0ff9d33e6d182551aceb5573c82eac4481a
2025-02-08 00:45:26
Thomas Guillem
es_out: drain from ES_OUT_DRAIN
false
17
0
17
--- src/input/es_out.c @@ -230,7 +230,6 @@ typedef struct vlc_tick_t i_buffering_extra_initial; vlc_tick_t i_buffering_extra_stream; vlc_tick_t i_buffering_extra_system; - bool b_draining; /* Record */ sout_stream_t *p_sout_record; @@ -263,7 +262,6 @@ static void EsOutChangePosition(es_out_sys_t *out, bool b_flush, es_out_id_t *p_ static void EsOutProgramChangePause(es_out_sys_t *out, bool b_paused, vlc_tick_t i_date); static void EsOutProgramsChangeRate(es_out_sys_t *out); static void EsOutDecodersStopBuffering(es_out_sys_t *out, bool b_forced); -static void EsOutDrainDecoder(es_out_sys_t *p_sys, es_out_id_t *es, bool wait); static void EsOutGlobalMeta(es_out_sys_t *p_out, const vlc_meta_t *p_meta); static void EsOutMeta(es_out_sys_t *p_out, const vlc_meta_t *p_meta, const vlc_meta_t *p_progmeta); static int EsOutEsUpdateFmt(es_out_id_t *es, const es_format_t *fmt); @@ -726,15 +724,6 @@ static int EsOutDrain(es_out_sys_t *p_sys) return VLC_EGENERIC; } - if( !p_sys->b_draining ) - { - es_out_id_t *id; - foreach_es_then_es_slaves(id) - if (id->p_dec != NULL) - EsOutDrainDecoder(p_sys, id, false); - p_sys->b_draining = true; - } - return VLC_SUCCESS; } @@ -966,7 +955,6 @@ static void EsOutChangePosition(es_out_sys_t *p_sys, bool b_flush, p_sys->i_buffering_extra_initial = 0; p_sys->i_buffering_extra_stream = 0; p_sys->i_buffering_extra_system = 0; - p_sys->b_draining = false; p_sys->i_preroll_end = -1; p_sys->i_prev_stream_level = -1; } @@ -2948,7 +2936,6 @@ static int EsOutSend(es_out_t *out, es_out_id_t *es, block_t *p_block ) } vlc_mutex_lock( &p_sys->lock ); - assert( !p_sys->b_draining ); /* Shift all slaves timestamps with the main source normal time. This will * allow to synchronize 2 demuxers with different time bases. Remove the @@ -3991,12 +3978,9 @@ static int EsOutVaPrivControlLocked(es_out_sys_t *p_sys, input_source_t *source, { EsOutDrain(p_sys); es_out_id_t *id; - if( p_sys->b_draining ) - return VLC_SUCCESS; foreach_es_then_es_slaves(id) if (id->p_dec != NULL) EsOutDrainDecoder(p_sys, id, false); - p_sys->b_draining = true; return VLC_SUCCESS; } case ES_OUT_PRIV_SET_VBI_PAGE: @@ -4128,7 +4112,6 @@ input_EsOutNew(input_thread_t *p_input, input_source_t *main_source, float rate, p_sys->rate = rate; p_sys->b_buffering = true; - p_sys->b_draining = false; p_sys->i_preroll_end = -1; p_sys->i_prev_stream_level = -1;
vlc
null
C
C
null
null
Video player
_vlc
CODE_IMPROVEMENT
simplify decoder draining logic
21240ed96251ed0085e9c3e4b96fe38ae29b12fb
2022-04-14 08:54:36
fatedier
some improvements
false
9
3
12
--- client/service.go @@ -357,13 +357,8 @@ func (svr *Service) ReloadConf(pxyCfgs map[string]config.ProxyConf, visitorCfgs svr.cfgMu.Unlock() svr.ctlMu.RLock() - ctl := svr.ctl - svr.ctlMu.RUnlock() - - if ctl != nil { - return svr.ctl.ReloadConf(pxyCfgs, visitorCfgs) - } - return nil + defer svr.ctlMu.RUnlock() + return svr.ctl.ReloadConf(pxyCfgs, visitorCfgs) } func (svr *Service) Close() { --- cmd/frpc/sub/root.go @@ -115,7 +115,6 @@ var rootCmd = &cobra.Command{ return nil } wg.Add(1) - time.Sleep(time.Millisecond) go func() { defer wg.Done() err := runClient(path) --- pkg/util/vhost/http.go @@ -89,7 +89,7 @@ func NewHTTPReverseProxy(option HTTPReverseProxyOptions, vhostRouter *Routers) * BufferPool: newWrapPool(), ErrorLog: log.New(newWrapLogger(), "", 0), ErrorHandler: func(rw http.ResponseWriter, req *http.Request, err error) { - frpLog.Warn("do http proxy request [host: %s] error: %v", req.Host, err) + frpLog.Warn("do http proxy request error: %v", err) rw.WriteHeader(http.StatusNotFound) rw.Write(getNotFoundPageContent()) },
frp
fatedier
Go
Go
91,116
13,769
A fast reverse proxy to help you expose a local server behind a NAT or firewall to the internet.
fatedier_frp
BUG_FIX
this commit fixes/polishes an earlier feature
f8031caa2fe608bfb5e1a640c39aed2568c7fa52
2022-10-10 13:08:38
Yangshun Tay
[ui][slide out] add className prop
false
6
1
7
--- packages/ui/src/SlideOut/SlideOut.tsx @@ -8,7 +8,6 @@ export type SlideOutEnterFrom = 'end' | 'start'; type Props = Readonly<{ children: React.ReactNode; - className: string; enterFrom?: SlideOutEnterFrom; isShown?: boolean; onClose?: () => void; @@ -41,7 +40,6 @@ const enterFromClasses: Record< export default function SlideOut({ children, - className, enterFrom = 'end', isShown = false, size, @@ -52,10 +50,7 @@ export default function SlideOut({ return ( <Transition.Root as={Fragment} show={isShown}> - <Dialog - as="div" - className={clsx('relative z-40', className)} - onClose={() => onClose?.()}> + <Dialog as="div" className="relative z-40" onClose={() => onClose?.()}> <Transition.Child as={Fragment} enter="transition-opacity ease-linear duration-300"
tech-interview-handbook
yangshun
TypeScript
TypeScript
122,353
15,039
💯 Curated coding interview preparation materials for busy software engineers
yangshun_tech-interview-handbook
NEW_FEAT
seems like a new feature to the ui
92817d4f0e0986ad6b3fb34735b3d988d2455e1f
2024-03-12 07:11:48
macro
Update PmsProductAttributeParam.java
false
1
1
2
--- mall-admin/src/main/java/com/macro/mall/dto/PmsProductAttributeParam.java @@ -35,7 +35,7 @@ public class PmsProductAttributeParam { @ApiModelProperty("检索类型;0->不需要进行检索;1->关键字检索;2->范围检索") @FlagValidator({"0","1","2"}) private Integer searchType; - @ApiModelProperty("相同属性商品是否关联;0->不关联;1->关联") + @ApiModelProperty("相同属性产品是否关联;0->不关联;1->关联") @FlagValidator({"0","1"}) private Integer relatedStatus; @ApiModelProperty("是否支持手动新增;0->不支持;1->支持")
mall
macrozheng
Java
Java
79,319
29,052
mall项目是一套电商系统,包括前台商城系统及后台管理系统,基于Spring Boot+MyBatis实现,采用Docker容器化部署。 前台商城系统包含首页门户、商品推荐、商品搜索、商品展示、购物车、订单流程、会员中心、客户服务、帮助中心等模块。 后台管理系统包含商品管理、订单管理、会员管理、促销管理、运营管理、内容管理、统计报表、财务管理、权限管理、设置等模块。
macrozheng_mall
CONFIG_CHANGE
Very small changes
e3926720118f76e4b1352a75e332026e49154f34
null
Mohsen Azimi
Remove extra out argument sent to Parcel and reduce logging in output (#22) Use log-level=1 for parcel.
false
1
1
0
--- Makefile @@ -54,7 +54,7 @@ msg.pb.d.ts: msg.pb.js node_modules dist/main.js: $(TS_FILES) node_modules ./node_modules/.bin/tsc --noEmit # Only for type checking. - ./node_modules/.bin/parcel build --out-dir=dist/ --no-minify main.ts + ./node_modules/.bin/parcel build --out-dir=dist/ --log-level=1 --no-minify main.ts node_modules: yarn
denoland_deno.json
null
null
null
null
null
null
denoland_deno.json
PERF_IMPROVEMENT
4, reducing logging will probably improve performance
c32339509336b1ba0ddb08eee369c82f85fe76f1
2025-02-06 21:14:29
Hadley Wickham
Make claude tests more robust (#311)
false
10
4
14
--- R/provider-claude.R @@ -14,7 +14,7 @@ NULL #' [developer account](https://console.anthropic.com/) #' #' ## Authentication -#' +#' #' To authenticate, we recommend saving your #' [API key](https://console.anthropic.com/account/keys) to #' the `ANTHROPIC_API_KEY` env var in your `.Renviron` @@ -87,12 +87,7 @@ method(chat_request, ProviderClaude) <- function(provider, # <https://docs.anthropic.com/en/api/getting-started#authentication> req <- req_headers_redacted(req, `x-api-key` = provider@api_key) # <https://docs.anthropic.com/en/api/rate-limits> - req <- req_retry( - req, - # <https://docs.anthropic.com/en/api/errors#http-errors> - is_transient = function(resp) resp_status(resp) %in% c(429, 503, 529), - max_tries = 2 - ) + req <- req_retry(req, max_tries = 2) # <https://docs.anthropic.com/en/api/errors> req <- req_error(req, body = function(resp) { --- tests/testthat/test-provider-claude.R @@ -27,11 +27,10 @@ test_that("respects turns interface", { test_that("all tool variations work", { chat_fun <- chat_claude - retry_test(test_tools_simple(chat_fun)) + test_tools_simple(chat_fun) test_tools_async(chat_fun) test_tools_parallel(chat_fun) - # Claude sometimes returns an empty string - retry_test(test_tools_sequential(chat_fun, total_calls = 6)) + test_tools_sequential(chat_fun, total_calls = 6) }) test_that("can extract data", {
ellmer
tidyverse
R
R
401
55
Call LLM APIs from R
tidyverse_ellmer
NEW_FEAT
Obvious
15404ecab4b5cca0d02f5d8e357123e5bb3d26d6
2024-07-30 09:05:39
fufesou
fix: clipboard, windows, controlled side, formats (#8885) * fix: clipboard, windows, controlled side, formats Signed-off-by: fufesou <[email protected]> * Clipboard, reuse ipc conn and send_raw() Signed-off-by: fufesou <[email protected]> * Clipboard, merge content buffer Signed-off-by: fufesou <[email protected]> * refact: clipboard service, ipc stream Signed-off-by: fufesou <[email protected]> --------- Signed-off-by: fufesou <[email protected]>
false
216
27
243
--- src/clipboard.rs @@ -16,11 +16,10 @@ lazy_static::lazy_static! { static ref ARBOARD_MTX: Arc<Mutex<()>> = Arc::new(Mutex::new(())); // cache the clipboard msg static ref LAST_MULTI_CLIPBOARDS: Arc<Mutex<MultiClipboards>> = Arc::new(Mutex::new(MultiClipboards::new())); - // For updating in server and getting content in cm. - // Clipboard on Linux is "server--clients" mode. + // Clipboard on Linux is "server--clients" mode. // The clipboard content is owned by the server and passed to the clients when requested. // Plain text is the only exception, it does not require the server to be present. - static ref CLIPBOARD_CTX: Arc<Mutex<Option<ClipboardContext>>> = Arc::new(Mutex::new(None)); + static ref CLIPBOARD_UPDATE_CTX: Arc<Mutex<Option<ClipboardContext>>> = Arc::new(Mutex::new(None)); } const SUPPORTED_FORMATS: &[ClipboardFormat] = &[ @@ -160,34 +159,12 @@ pub fn check_clipboard( None } -#[cfg(target_os = "windows")] -pub fn check_clipboard_cm() -> ResultType<MultiClipboards> { - let mut ctx = CLIPBOARD_CTX.lock().unwrap(); - if ctx.is_none() { - match ClipboardContext::new() { - Ok(x) => { - *ctx = Some(x); - } - Err(e) => { - hbb_common::bail!("Failed to create clipboard context: {}", e); - } - } - } - if let Some(ctx) = ctx.as_mut() { - let content = ctx.get(ClipboardSide::Host, false)?; - let clipboards = proto::create_multi_clipboards(content); - Ok(clipboards) - } else { - hbb_common::bail!("Failed to create clipboard context"); - } -} - fn update_clipboard_(multi_clipboards: Vec<Clipboard>, side: ClipboardSide) { let mut to_update_data = proto::from_multi_clipbards(multi_clipboards); if to_update_data.is_empty() { return; } - let mut ctx = CLIPBOARD_CTX.lock().unwrap(); + let mut ctx = CLIPBOARD_UPDATE_CTX.lock().unwrap(); if ctx.is_none() { match ClipboardContext::new() { Ok(x) => { --- src/ipc.rs @@ -1,3 +1,10 @@ +use std::{ + collections::HashMap, + sync::atomic::{AtomicBool, Ordering}, +}; +#[cfg(not(windows))] +use std::{fs::File, io::prelude::*}; + use crate::{ privacy_mode::PrivacyModeState, ui_interface::{get_local_option, set_local_option}, @@ -7,12 +14,6 @@ use parity_tokio_ipc::{ Connection as Conn, ConnectionClient as ConnClient, Endpoint, Incoming, SecurityAttributes, }; use serde_derive::{Deserialize, Serialize}; -use std::{ - collections::HashMap, - sync::atomic::{AtomicBool, Ordering}, -}; -#[cfg(not(windows))] -use std::{fs::File, io::prelude::*}; #[cfg(all(feature = "flutter", feature = "plugin_framework"))] #[cfg(not(any(target_os = "android", target_os = "ios")))] @@ -25,11 +26,8 @@ use hbb_common::{ config::{self, Config, Config2}, futures::StreamExt as _, futures_util::sink::SinkExt, - log, password_security as password, timeout, - tokio::{ - self, - io::{AsyncRead, AsyncWrite}, - }, + log, password_security as password, timeout, tokio, + tokio::io::{AsyncRead, AsyncWrite}, tokio_util::codec::Framed, ResultType, }; @@ -102,20 +100,6 @@ pub enum FS { }, } -#[cfg(target_os = "windows")] -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(tag = "t")] -pub struct ClipboardNonFile { - pub compress: bool, - pub content: bytes::Bytes, - pub content_len: usize, - pub next_raw: bool, - pub width: i32, - pub height: i32, - // message.proto: ClipboardFormat - pub format: i32, -} - #[cfg(not(any(target_os = "android", target_os = "ios")))] #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(tag = "t", content = "c")] @@ -223,8 +207,6 @@ pub enum Data { #[cfg(not(any(target_os = "android", target_os = "ios")))] ClipboardFile(ClipboardFile), ClipboardFileEnabled(bool), - #[cfg(target_os = "windows")] - ClipboardNonFile(Option<(String, Vec<ClipboardNonFile>)>), PrivacyModeState((i32, PrivacyModeState, String)), TestRendezvousServer, #[cfg(not(any(target_os = "android", target_os = "ios")))] --- src/server/clipboard_service.rs @@ -3,8 +3,6 @@ pub use crate::clipboard::{ check_clipboard, ClipboardContext, ClipboardSide, CLIPBOARD_INTERVAL as INTERVAL, CLIPBOARD_NAME as NAME, }; -#[cfg(windows)] -use crate::ipc::{self, ClipboardFile, ClipboardNonFile, Data}; use clipboard_master::{CallbackResult, ClipboardHandler}; use std::{ io, @@ -16,8 +14,6 @@ struct Handler { sp: EmptyExtraFieldService, ctx: Option<ClipboardContext>, tx_cb_result: Sender<CallbackResult>, - #[cfg(target_os = "windows")] - stream: Option<ipc::ConnectionTmpl<parity_tokio_ipc::ConnectionClient>>, } pub fn new() -> GenericService { @@ -32,8 +28,6 @@ fn run(sp: EmptyExtraFieldService) -> ResultType<()> { sp: sp.clone(), ctx: Some(ClipboardContext::new()?), tx_cb_result, - #[cfg(target_os = "windows")] - stream: None, }; let (tx_start_res, rx_start_res) = channel(); @@ -70,10 +64,8 @@ fn run(sp: EmptyExtraFieldService) -> ResultType<()> { impl ClipboardHandler for Handler { fn on_clipboard_change(&mut self) -> CallbackResult { self.sp.snapshot(|_sps| Ok(())).ok(); - if self.sp.ok() { - if let Some(msg) = self.get_clipboard_msg() { - self.sp.send(msg); - } + if let Some(msg) = check_clipboard(&mut self.ctx, ClipboardSide::Host, false) { + self.sp.send(msg); } CallbackResult::Next } @@ -85,107 +77,3 @@ impl ClipboardHandler for Handler { CallbackResult::Next } } - -impl Handler { - fn get_clipboard_msg(&mut self) -> Option<Message> { - #[cfg(target_os = "windows")] - if crate::common::is_server() && crate::platform::is_root() { - match self.read_clipboard_from_cm_ipc() { - Err(e) => { - log::error!("Failed to read clipboard from cm: {}", e); - } - Ok(data) => { - let mut msg = Message::new(); - let multi_clipboards = MultiClipboards { - clipboards: data - .into_iter() - .map(|c| Clipboard { - compress: c.compress, - content: c.content, - width: c.width, - height: c.height, - format: ClipboardFormat::from_i32(c.format) - .unwrap_or(ClipboardFormat::Text) - .into(), - ..Default::default() - }) - .collect(), - ..Default::default() - }; - msg.set_multi_clipboards(multi_clipboards); - return Some(msg); - } - } - } - check_clipboard(&mut self.ctx, ClipboardSide::Host, false) - } - - // It's ok to do async operation in the clipboard service because: - // 1. the clipboard is not used frequently. - // 2. the clipboard handle is sync and will not block the main thread. - #[cfg(windows)] - #[tokio::main(flavor = "current_thread")] - async fn read_clipboard_from_cm_ipc(&mut self) -> ResultType<Vec<ClipboardNonFile>> { - let mut is_sent = false; - if let Some(stream) = &mut self.stream { - // If previous stream is still alive, reuse it. - // If the previous stream is dead, `is_sent` will trigger reconnect. - is_sent = stream.send(&Data::ClipboardNonFile(None)).await.is_ok(); - } - if !is_sent { - let mut stream = crate::ipc::connect(100, "_cm").await?; - stream.send(&Data::ClipboardNonFile(None)).await?; - self.stream = Some(stream); - } - - if let Some(stream) = &mut self.stream { - loop { - match stream.next_timeout(800).await? { - Some(Data::ClipboardNonFile(Some((err, mut contents)))) => { - if !err.is_empty() { - bail!("{}", err); - } else { - if contents.iter().any(|c| c.next_raw) { - match timeout(1000, stream.next_raw()).await { - Ok(Ok(mut data)) => { - for c in &mut contents { - if c.next_raw { - if c.content_len <= data.len() { - c.content = - data.split_off(c.content_len).into(); - } else { - // Reconnect the next time to avoid the next raw data mismatch. - self.stream = None; - bail!("failed to get raw clipboard data: invalid size"); - } - } - } - } - Ok(Err(e)) => { - // reset by peer - self.stream = None; - bail!("failed to get raw clipboard data: {}", e); - } - Err(e) => { - // Reconnect to avoid the next raw data remaining in the buffer. - self.stream = None; - log::debug!("failed to get raw clipboard data: {}", e); - } - } - } - return Ok(contents); - } - } - Some(Data::ClipboardFile(ClipboardFile::MonitorReady)) => { - // ClipboardFile::MonitorReady is the first message sent by cm. - } - _ => { - bail!("failed to get clipboard data from cm"); - } - } - } - } - // unreachable! - bail!("failed to get clipboard data from cm"); - } -} --- src/ui_cm_interface.rs @@ -1,5 +1,16 @@ -#[cfg(target_os = "windows")] -use crate::ipc::ClipboardNonFile; +#[cfg(any(target_os = "android", target_os = "ios", feature = "flutter"))] +use std::iter::FromIterator; +#[cfg(any(target_os = "windows", target_os = "linux", target_os = "macos"))] +use std::sync::Arc; +use std::{ + collections::HashMap, + ops::{Deref, DerefMut}, + sync::{ + atomic::{AtomicI64, Ordering}, + RwLock, + }, +}; + #[cfg(not(any(target_os = "android", target_os = "ios")))] use crate::ipc::Connection; #[cfg(not(any(target_os = "ios")))] @@ -25,18 +36,6 @@ use hbb_common::{ #[cfg(any(target_os = "windows", target_os = "linux", target_os = "macos"))] use hbb_common::{tokio::sync::Mutex as TokioMutex, ResultType}; use serde_derive::Serialize; -#[cfg(any(target_os = "android", target_os = "ios", feature = "flutter"))] -use std::iter::FromIterator; -#[cfg(any(target_os = "windows", target_os = "linux", target_os = "macos"))] -use std::sync::Arc; -use std::{ - collections::HashMap, - ops::{Deref, DerefMut}, - sync::{ - atomic::{AtomicI64, Ordering}, - RwLock, - }, -}; #[derive(Serialize, Clone)] pub struct Client { @@ -487,41 +486,6 @@ impl<T: InvokeUiCM> IpcTaskRunner<T> { Data::CloseVoiceCall(reason) => { self.cm.voice_call_closed(self.conn_id, reason.as_str()); } - #[cfg(target_os = "windows")] - Data::ClipboardNonFile(_) => { - match crate::clipboard::check_clipboard_cm() { - Ok(multi_clipoards) => { - let mut raw_contents = bytes::BytesMut::new(); - let mut main_data = vec![]; - for c in multi_clipoards.clipboards.into_iter() { - let (content, content_len, next_raw) = { - // TODO: find out a better threshold - let content_len = c.content.len(); - if content_len > 1024 * 3 { - (c.content, content_len, false) - } else { - raw_contents.extend(c.content); - (bytes::Bytes::new(), content_len, true) - } - }; - main_data.push(ClipboardNonFile { - compress: c.compress, - content, - content_len, - next_raw, - width: c.width, - height: c.height, - format: c.format.value(), - }); - } - allow_err!(self.stream.send(&Data::ClipboardNonFile(Some(("".to_owned(), main_data)))).await); - allow_err!(self.stream.send_raw(raw_contents.into()).await); - } - Err(e) => { - allow_err!(self.stream.send(&Data::ClipboardNonFile(Some((format!("{}", e), vec![])))).await); - } - } - } _ => { }
rustdesk
rustdesk
Rust
Rust
83,345
11,693
An open-source remote desktop application designed for self-hosting, as an alternative to TeamViewer.
rustdesk_rustdesk
BUG_FIX
obvious
df9015ab477767af2d6449a446926395a9923ace
2025-01-14 00:44:06
Prabhakar Yadav
fix(curriculum): replaced the last question in css quiz (#58045) Co-authored-by: Naomi <[email protected]>
false
5
5
10
--- curriculum/challenges/english/25-front-end-development/quiz-basic-css/66ed8fa2f45ce3ece4053eab.md @@ -435,21 +435,21 @@ Which rule applies `24px` padding to the top and bottom? #### --text-- -For `padding: 10px 20px 30px 40px`, what is the correct order of values? +What does the `line-height` property do? #### --distractors-- -Right, Top, Left, Bottom +Controls the size of the font. --- -Top, Left, Bottom, Right +Controls the responsiveness of text. --- -Top, Bottom, Right, Left +Controls how much space is between the letters of text. #### --answer-- -Top, Right, Bottom, Left +Controls how much space is between the lines of text.
freecodecamp
freecodecamp
TypeScript
TypeScript
410,748
39,092
freeCodeCamp.org's open-source codebase and curriculum. Learn to code for free.
freecodecamp_freecodecamp
DOC_CHANGE
Obvious
c2e3363165f775cce19164384b856b239ca5f490
2024-03-27 17:39:42
Easy
更新为CC-BY-NC-SA授权
false
437
24
461
--- LICENSE @@ -1,437 +1,24 @@ -Attribution-NonCommercial-ShareAlike 4.0 International - -======================================================================= - -Creative Commons Corporation ("Creative Commons") is not a law firm and -does not provide legal services or legal advice. Distribution of -Creative Commons public licenses does not create a lawyer-client or -other relationship. Creative Commons makes its licenses and related -information available on an "as-is" basis. Creative Commons gives no -warranties regarding its licenses, any material licensed under their -terms and conditions, or any related information. Creative Commons -disclaims all liability for damages resulting from their use to the -fullest extent possible. - -Using Creative Commons Public Licenses - -Creative Commons public licenses provide a standard set of terms and -conditions that creators and other rights holders may use to share -original works of authorship and other material subject to copyright -and certain other rights specified in the public license below. The -following considerations are for informational purposes only, are not -exhaustive, and do not form part of our licenses. - - Considerations for licensors: Our public licenses are - intended for use by those authorized to give the public - permission to use material in ways otherwise restricted by - copyright and certain other rights. Our licenses are - irrevocable. Licensors should read and understand the terms - and conditions of the license they choose before applying it. - Licensors should also secure all rights necessary before - applying our licenses so that the public can reuse the - material as expected. Licensors should clearly mark any - material not subject to the license. This includes other CC- - licensed material, or material used under an exception or - limitation to copyright. More considerations for licensors: - wiki.creativecommons.org/Considerations_for_licensors - - Considerations for the public: By using one of our public - licenses, a licensor grants the public permission to use the - licensed material under specified terms and conditions. If - the licensor's permission is not necessary for any reason--for - example, because of any applicable exception or limitation to - copyright--then that use is not regulated by the license. Our - licenses grant only permissions under copyright and certain - other rights that a licensor has authority to grant. Use of - the licensed material may still be restricted for other - reasons, including because others have copyright or other - rights in the material. A licensor may make special requests, - such as asking that all changes be marked or described. - Although not required by our licenses, you are encouraged to - respect those requests where reasonable. More considerations - for the public: - wiki.creativecommons.org/Considerations_for_licensees - -======================================================================= - -Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International -Public License - -By exercising the Licensed Rights (defined below), You accept and agree -to be bound by the terms and conditions of this Creative Commons -Attribution-NonCommercial-ShareAlike 4.0 International Public License -("Public License"). To the extent this Public License may be -interpreted as a contract, You are granted the Licensed Rights in -consideration of Your acceptance of these terms and conditions, and the -Licensor grants You such rights in consideration of benefits the -Licensor receives from making the Licensed Material available under -these terms and conditions. - - -Section 1 -- Definitions. - - a. Adapted Material means material subject to Copyright and Similar - Rights that is derived from or based upon the Licensed Material - and in which the Licensed Material is translated, altered, - arranged, transformed, or otherwise modified in a manner requiring - permission under the Copyright and Similar Rights held by the - Licensor. For purposes of this Public License, where the Licensed - Material is a musical work, performance, or sound recording, - Adapted Material is always produced where the Licensed Material is - synched in timed relation with a moving image. - - b. Adapter's License means the license You apply to Your Copyright - and Similar Rights in Your contributions to Adapted Material in - accordance with the terms and conditions of this Public License. - - c. BY-NC-SA Compatible License means a license listed at - creativecommons.org/compatiblelicenses, approved by Creative - Commons as essentially the equivalent of this Public License. - - d. Copyright and Similar Rights means copyright and/or similar rights - closely related to copyright including, without limitation, - performance, broadcast, sound recording, and Sui Generis Database - Rights, without regard to how the rights are labeled or - categorized. For purposes of this Public License, the rights - specified in Section 2(b)(1)-(2) are not Copyright and Similar - Rights. - - e. Effective Technological Measures means those measures that, in the - absence of proper authority, may not be circumvented under laws - fulfilling obligations under Article 11 of the WIPO Copyright - Treaty adopted on December 20, 1996, and/or similar international - agreements. - - f. Exceptions and Limitations means fair use, fair dealing, and/or - any other exception or limitation to Copyright and Similar Rights - that applies to Your use of the Licensed Material. - - g. License Elements means the license attributes listed in the name - of a Creative Commons Public License. The License Elements of this - Public License are Attribution, NonCommercial, and ShareAlike. - - h. Licensed Material means the artistic or literary work, database, - or other material to which the Licensor applied this Public - License. - - i. Licensed Rights means the rights granted to You subject to the - terms and conditions of this Public License, which are limited to - all Copyright and Similar Rights that apply to Your use of the - Licensed Material and that the Licensor has authority to license. - - j. Licensor means the individual(s) or entity(ies) granting rights - under this Public License. - - k. NonCommercial means not primarily intended for or directed towards - commercial advantage or monetary compensation. For purposes of - this Public License, the exchange of the Licensed Material for - other material subject to Copyright and Similar Rights by digital - file-sharing or similar means is NonCommercial provided there is - no payment of monetary compensation in connection with the - exchange. - - l. Share means to provide material to the public by any means or - process that requires permission under the Licensed Rights, such - as reproduction, public display, public performance, distribution, - dissemination, communication, or importation, and to make material - available to the public including in ways that members of the - public may access the material from a place and at a time - individually chosen by them. - - m. Sui Generis Database Rights means rights other than copyright - resulting from Directive 96/9/EC of the European Parliament and of - the Council of 11 March 1996 on the legal protection of databases, - as amended and/or succeeded, as well as other essentially - equivalent rights anywhere in the world. - - n. You means the individual or entity exercising the Licensed Rights - under this Public License. Your has a corresponding meaning. - - -Section 2 -- Scope. - - a. License grant. - - 1. Subject to the terms and conditions of this Public License, - the Licensor hereby grants You a worldwide, royalty-free, - non-sublicensable, non-exclusive, irrevocable license to - exercise the Licensed Rights in the Licensed Material to: - - a. reproduce and Share the Licensed Material, in whole or - in part, for NonCommercial purposes only; and - - b. produce, reproduce, and Share Adapted Material for - NonCommercial purposes only. - - 2. Exceptions and Limitations. For the avoidance of doubt, where - Exceptions and Limitations apply to Your use, this Public - License does not apply, and You do not need to comply with - its terms and conditions. - - 3. Term. The term of this Public License is specified in Section - 6(a). - - 4. Media and formats; technical modifications allowed. The - Licensor authorizes You to exercise the Licensed Rights in - all media and formats whether now known or hereafter created, - and to make technical modifications necessary to do so. The - Licensor waives and/or agrees not to assert any right or - authority to forbid You from making technical modifications - necessary to exercise the Licensed Rights, including - technical modifications necessary to circumvent Effective - Technological Measures. For purposes of this Public License, - simply making modifications authorized by this Section 2(a) - (4) never produces Adapted Material. - - 5. Downstream recipients. - - a. Offer from the Licensor -- Licensed Material. Every - recipient of the Licensed Material automatically - receives an offer from the Licensor to exercise the - Licensed Rights under the terms and conditions of this - Public License. - - b. Additional offer from the Licensor -- Adapted Material. - Every recipient of Adapted Material from You - automatically receives an offer from the Licensor to - exercise the Licensed Rights in the Adapted Material - under the conditions of the Adapter's License You apply. - - c. No downstream restrictions. You may not offer or impose - any additional or different terms or conditions on, or - apply any Effective Technological Measures to, the - Licensed Material if doing so restricts exercise of the - Licensed Rights by any recipient of the Licensed - Material. - - 6. No endorsement. Nothing in this Public License constitutes or - may be construed as permission to assert or imply that You - are, or that Your use of the Licensed Material is, connected - with, or sponsored, endorsed, or granted official status by, - the Licensor or others designated to receive attribution as - provided in Section 3(a)(1)(A)(i). - - b. Other rights. - - 1. Moral rights, such as the right of integrity, are not - licensed under this Public License, nor are publicity, - privacy, and/or other similar personality rights; however, to - the extent possible, the Licensor waives and/or agrees not to - assert any such rights held by the Licensor to the limited - extent necessary to allow You to exercise the Licensed - Rights, but not otherwise. - - 2. Patent and trademark rights are not licensed under this - Public License. - - 3. To the extent possible, the Licensor waives any right to - collect royalties from You for the exercise of the Licensed - Rights, whether directly or through a collecting society - under any voluntary or waivable statutory or compulsory - licensing scheme. In all other cases the Licensor expressly - reserves any right to collect such royalties, including when - the Licensed Material is used other than for NonCommercial - purposes. - - -Section 3 -- License Conditions. - -Your exercise of the Licensed Rights is expressly made subject to the -following conditions. - - a. Attribution. - - 1. If You Share the Licensed Material (including in modified - form), You must: - - a. retain the following if it is supplied by the Licensor - with the Licensed Material: - - i. identification of the creator(s) of the Licensed - Material and any others designated to receive - attribution, in any reasonable manner requested by - the Licensor (including by pseudonym if - designated); - - ii. a copyright notice; - - iii. a notice that refers to this Public License; - - iv. a notice that refers to the disclaimer of - warranties; - - v. a URI or hyperlink to the Licensed Material to the - extent reasonably practicable; - - b. indicate if You modified the Licensed Material and - retain an indication of any previous modifications; and - - c. indicate the Licensed Material is licensed under this - Public License, and include the text of, or the URI or - hyperlink to, this Public License. - - 2. You may satisfy the conditions in Section 3(a)(1) in any - reasonable manner based on the medium, means, and context in - which You Share the Licensed Material. For example, it may be - reasonable to satisfy the conditions by providing a URI or - hyperlink to a resource that includes the required - information. - 3. If requested by the Licensor, You must remove any of the - information required by Section 3(a)(1)(A) to the extent - reasonably practicable. - - b. ShareAlike. - - In addition to the conditions in Section 3(a), if You Share - Adapted Material You produce, the following conditions also apply. - - 1. The Adapter's License You apply must be a Creative Commons - license with the same License Elements, this version or - later, or a BY-NC-SA Compatible License. - - 2. You must include the text of, or the URI or hyperlink to, the - Adapter's License You apply. You may satisfy this condition - in any reasonable manner based on the medium, means, and - context in which You Share Adapted Material. - - 3. You may not offer or impose any additional or different terms - or conditions on, or apply any Effective Technological - Measures to, Adapted Material that restrict exercise of the - rights granted under the Adapter's License You apply. - - -Section 4 -- Sui Generis Database Rights. - -Where the Licensed Rights include Sui Generis Database Rights that -apply to Your use of the Licensed Material: - - a. for the avoidance of doubt, Section 2(a)(1) grants You the right - to extract, reuse, reproduce, and Share all or a substantial - portion of the contents of the database for NonCommercial purposes - only; - - b. if You include all or a substantial portion of the database - contents in a database in which You have Sui Generis Database - Rights, then the database in which You have Sui Generis Database - Rights (but not its individual contents) is Adapted Material, - including for purposes of Section 3(b); and - - c. You must comply with the conditions in Section 3(a) if You Share - all or a substantial portion of the contents of the database. - -For the avoidance of doubt, this Section 4 supplements and does not -replace Your obligations under this Public License where the Licensed -Rights include other Copyright and Similar Rights. - - -Section 5 -- Disclaimer of Warranties and Limitation of Liability. - - a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE - EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS - AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF - ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, - IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, - WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR - PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, - ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT - KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT - ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. - - b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE - TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, - NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, - INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, - COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR - USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN - ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR - DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR - IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. - - c. The disclaimer of warranties and limitation of liability provided - above shall be interpreted in a manner that, to the extent - possible, most closely approximates an absolute disclaimer and - waiver of all liability. - - -Section 6 -- Term and Termination. - - a. This Public License applies for the term of the Copyright and - Similar Rights licensed here. However, if You fail to comply with - this Public License, then Your rights under this Public License - terminate automatically. - - b. Where Your right to use the Licensed Material has terminated under - Section 6(a), it reinstates: - - 1. automatically as of the date the violation is cured, provided - it is cured within 30 days of Your discovery of the - violation; or - - 2. upon express reinstatement by the Licensor. - - For the avoidance of doubt, this Section 6(b) does not affect any - right the Licensor may have to seek remedies for Your violations - of this Public License. - - c. For the avoidance of doubt, the Licensor may also offer the - Licensed Material under separate terms or conditions or stop - distributing the Licensed Material at any time; however, doing so - will not terminate this Public License. - - d. Sections 1, 5, 6, 7, and 8 survive termination of this Public - License. - - -Section 7 -- Other Terms and Conditions. - - a. The Licensor shall not be bound by any additional or different - terms or conditions communicated by You unless expressly agreed. - - b. Any arrangements, understandings, or agreements regarding the - Licensed Material not stated herein are separate from and - independent of the terms and conditions of this Public License. - - -Section 8 -- Interpretation. - - a. For the avoidance of doubt, this Public License does not, and - shall not be interpreted to, reduce, limit, restrict, or impose - conditions on any use of the Licensed Material that could lawfully - be made without permission under this Public License. - - b. To the extent possible, if any provision of this Public License is - deemed unenforceable, it shall be automatically reformed to the - minimum extent necessary to make it enforceable. If the provision - cannot be reformed, it shall be severed from this Public License - without affecting the enforceability of the remaining terms and - conditions. - - c. No term or condition of this Public License will be waived and no - failure to comply consented to unless expressly agreed to by the - Licensor. - - d. Nothing in this Public License constitutes or may be interpreted - as a limitation upon, or waiver of, any privileges and immunities - that apply to the Licensor or You, including from the legal - processes of any jurisdiction or authority. - -======================================================================= - -Creative Commons is not a party to its public -licenses. Notwithstanding, Creative Commons may elect to apply one of -its public licenses to material it publishes and in those instances -will be considered the “Licensor.” The text of the Creative Commons -public licenses is dedicated to the public domain under the CC0 Public -Domain Dedication. Except for the limited purpose of indicating that -material is shared under a Creative Commons public license or as -otherwise permitted by the Creative Commons policies published at -creativecommons.org/policies, Creative Commons does not authorize the -use of the trademark "Creative Commons" or any other trademark or logo -of Creative Commons without its prior written consent including, -without limitation, in connection with any unauthorized modifications -to any of its public licenses or any other arrangements, -understandings, or agreements concerning use of licensed material. For -the avoidance of doubt, this paragraph does not form part of the -public licenses. - -Creative Commons may be contacted at creativecommons.org. +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to <https://unlicense.org>
one-person-businesses-methodology-v2.0
easychen
PHP
PHP
5,272
464
《一人企业方法论》第二版,也适合做其他副业(比如自媒体、电商、数字商品)的非技术人群。
easychen_one-person-businesses-methodology-v2.0
NEW_FEAT
Large additions with few deletions
75a908aacfe9ad86a18f3930cfff9c4636ffd644
2022-09-19 10:51:48
Jordan Harband
[eslint config] [deps] update `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks`
false
6
6
12
--- packages/eslint-config-airbnb/package.json @@ -78,9 +78,9 @@ "eslint": "^7.32.0 || ^8.2.0", "eslint-find-rules": "^4.1.0", "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.6.1", - "eslint-plugin-react": "^7.31.8", - "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.30.0", + "eslint-plugin-react-hooks": "^4.5.0", "in-publish": "^2.0.1", "react": ">= 0.13.0", "safe-publish-latest": "^2.0.0", @@ -89,9 +89,9 @@ "peerDependencies": { "eslint": "^7.32.0 || ^8.2.0", "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.6.1", - "eslint-plugin-react": "^7.31.8", - "eslint-plugin-react-hooks": "^4.6.0" + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.30.0", + "eslint-plugin-react-hooks": "^4.5.0" }, "engines": { "node": "^10.12.0 || ^12.22.0 || ^14.17.0 || >=16.0.0"
javascript
airbnb
JavaScript
JavaScript
146,197
26,671
JavaScript Style Guide
airbnb_javascript
CODE_IMPROVEMENT
binary changes in .py files inside docs folder
426d508551f970f7a4bc93de6460e6fdaa9c8a0f
2025-02-01 16:33:18
Fatih Uzunoglu
qml: prevent assigning intermediate value to source size in `BannnerCone.qml` Partially changing a value type changes the whole value. In image case, this means that the images can get re-loaded with indeterminate size just to discard them after the size is determined. Fortunately this does not happen currently, because the sizes are determined before the component is complete and QQuickImage only loads if the component is complete. However, due to the declarative nature of QML, bindings are evaluated and assigned one by one, this means that source size would be changed two times even if .width and .height are pending re-evaluation at the same time (such as both depend on DPR). At the same time, the order is also not defined, so with such a setup as following: sourceSize.width: width * eDPR sourceSize.height: height * eDPR When eDPR changes, Qt evaluates the binding for width or height, adjusts sourceSize, sourceSize changes and change signal is signalled, then evaluates the other sub-part of the value type (width or height), adjusts sourceSize, sourceSize changes again and change signal is signalled. Qt could technically optimize this, but as of Qt 6.8.1 it is not the case. Meanwhile with the following: sourceSize: Qt.size(width * eDPR, height * eDPR) When eDPR changes, Qt evaluates the binding and adjusts the source size, sourceSize changes and change signal is signalled. Source size does not change two times, and the image would not be loaded two times.
false
1
2
3
--- modules/gui/qt/widgets/qml/BannerCone.qml @@ -30,7 +30,8 @@ Image { required property color color property alias csdMenuVisible: csdMenu.menuVisible - sourceSize: Qt.size(VLCStyle.icon_normal, VLCStyle.icon_normal) + sourceSize.width: VLCStyle.icon_normal + sourceSize.height: VLCStyle.icon_normal mipmap: MainCtx.useXmasCone() source: MainCtx.useXmasCone() ? "qrc:///logo/vlc48-xmas.png" // TODO: new xmas cone for designs?
vlc
null
C
C
null
null
Video player
_vlc
PERF_IMPROVEMENT
probably a performance improvement, improved loading of image, which would improve performance
104876840d2dd7533e00248039c44acf39135767
2025-01-24 14:37:52
Fatih Uzunoglu
qml: do not use `Screen.devicePixelRatio` in `VideoInfoExpandPanel.qml` `Screen.devicePixelRatio` is not reported correctly on Wayland, unlike window's effective device pixel ratio.
false
4
2
6
--- modules/gui/qt/medialibrary/qml/VideoInfoExpandPanel.qml @@ -114,12 +114,10 @@ FocusScope { anchors.fill: parent source: model.thumbnail || VLCStyle.noArtVideoCover - sourceSize.width: width * eDPR - sourceSize.height: height * eDPR + sourceSize.width: width * Screen.devicePixelRatio + sourceSize.height: height * Screen.devicePixelRatio radius: VLCStyle.gridCover_radius - readonly property real eDPR: MainCtx.effectiveDevicePixelRatio(Window.window) - Widgets.DefaultShadow { anchors.centerIn: parent
vlc
null
C
C
null
null
Video player
_vlc
BUG_FIX
correcting display behavior under Wayland
0ea3078c9698bb3026eab8a7e1ae6243ea8e5761
2023-12-28 21:08:46
dependabot[bot]
chore(deps): bump xunit from 2.4.1 to 2.6.4 in /libraries/csharp (#72) Bumps [xunit](https://github.com/xunit/xunit) from 2.4.1 to 2.6.4. - [Commits](https://github.com/xunit/xunit/compare/2.4.1...2.6.4) --- updated-dependencies: - dependency-name: xunit dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
false
1
1
2
--- libraries/csharp/StandardWebhooks.Tests/StandardWebhooks.Tests.csproj @@ -9,7 +9,7 @@ <ItemGroup> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" /> <PackageReference Include="Moq" Version="4.16.1" /> - <PackageReference Include="xunit" Version="2.6.4" /> + <PackageReference Include="xunit" Version="2.4.1" /> <PackageReference Include="xunit.runner.visualstudio" Version="2.4.3"> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <PrivateAssets>all</PrivateAssets>
standard-webhooks
standard-webhooks
Elixir
Elixir
1,390
37
The Standard Webhooks specification
standard-webhooks_standard-webhooks
CONFIG_CHANGE
Obvious
530dc5395da7d81f1edb4a416a674ba842990638
null
Akshay Agrawal
Delete equivalence between step and learning rate; the two are different concepts.
false
1
1
0
--- eager.ipynb @@ -534,7 +534,7 @@ "source": [ "### Create an optimizer\n", "\n", - "An *[optimizer](https://developers.google.com/machine-learning/crash-course/glossary#optimizer)* applies the computed gradients to the model's variables to minimize the `loss` function. You can think of a curved surface (see Figure 3) and we want to find its lowest point by walking around. The gradients point in the direction of steepest ascent—so we'll travel the opposite way and move down the hill. By iteratively calculating the loss and gradients for each *step* (or *[learning rate](https://developers.google.com/machine-learning/crash-course/glossary#learning_rate)*), we'll adjust the model during training. Gradually, the model will find the best combination of weights and bias to minimize loss. And the lower the loss, the better the model's predictions.\n", + "An *[optimizer](https://developers.google.com/machine-learning/crash-course/glossary#optimizer)* applies the computed gradients to the model's variables to minimize the `loss` function. You can think of a curved surface (see Figure 3) and we want to find its lowest point by walking around. The gradients point in the direction of steepest ascent—so we'll travel the opposite way and move down the hill. By iteratively calculating the loss and gradient for each example, we'll adjust the model during training. Gradually, the model will find the best combination of weights and bias to minimize loss. And the lower the loss, the better the model's predictions.\n", "\n", "<table>\n", " <tr><td>\n",
tensorflow_models.json
null
null
null
null
null
null
tensorflow_models.json
CONFIG_CHANGE
5, readme or comment change
631556b35b3670eacc38da4bcfe458bdbab2cd2a
2024-01-28 20:17:03
Zezhong Li
Update README.md add introduction
false
24
11
35
--- README.md @@ -1,41 +1,28 @@ -# Awesome Time Series Segmentation Papers +# Awesome Multivariate Time Series Segmentation Papers [![Awesome](https://awesome.re/badge.svg)](https://awesome.re) -This repository contains a reading list of papers on **Time Series Segmentation**. **This repository is still being continuously improved. ** - -As a crucial time series preprocessing technique, **semantic segmentation** divides poorly understood time series into several discrete and homogeneous segments. This approach aims to uncover latent temporal evolution patterns, detect unexpected regularities and regimes, thereby rendering the analysis of massive time series data more manageable. - -Time series segmentation often intertwines with research in many domains. Firstly, the relationship between **time series segmentation, time series change point detection,** and some aspects of **time series anomaly/outlier detection** is somewhat ambiguous. Therefore, this repository includes a selection of papers from these areas. Secondly, time series segmentation can be regarded as a process of information compression in time series, hence papers in this field often incorporate concepts from **information theory** (e.g., using minimum description length to guide the design of unsupervised time series segmentation models). Additionally, the task of decomposing human actions into a series of plausible motion primitives can be addressed through methods for segmenting sensor time series. Consequently, papers related to motion capture from the fields of **computer vision** and **ubiquitous computing** are also included in this collection. - -Generally, the subjects of unsupervised semantic segmentation can be categorized into: - -* ![univariate time series forecasting](https://img.shields.io/badge/-Univariate-brightgreen) univariate time series: ![](https://latex.codecogs.com/svg.image?\inline&space;1\times&space;T), where ![](https://latex.codecogs.com/svg.image?\inline&space;T) is the length of the time series. -* ![multivariate time series forecasting](https://img.shields.io/badge/-Multivariate-red) multivariate time series: ![](https://latex.codecogs.com/svg.image?\inline&space;M\times&space;T), where ![](https://latex.codecogs.com/svg.image?\inline&space;M) is the number of variables (channels). -* ![spatio-temporal forecasting](https://img.shields.io/badge/-Tensor-blue) tensor: ![](https://latex.codecogs.com/svg.image?\inline&space;N_{1}&space;\times&space;\cdots&space;\times&space;N_{k}&space;\times&space;M\times&space;T), where ![](https://latex.codecogs.com/svg.image?\inline&space;N_{1}&space;\times&space;\cdots&space;\times&space;N_{k}) denotes the dimensions other than time and variables. - -In the field of time series research, unlike time series forecasting, anomaly detection, and classification/clustering, the number of papers on time series segmentation has been somewhat lukewarm in recent years (this observation may carry a degree of subjectivity from the author). Additionally, deep learning methods do not seem to dominate this area as they do in others. Some classic but solid algorithms remain highly competitive even today, with quite a few originating from the same research group. Therefore, in the following paper list, I will introduce them indexed by well-known researchers and research groups in this field. +This repository contains a reading list of papers on **Multivariate Time Series Segmentation (aka. Change point detection)**. These papers are mainly categorized according to the type of model. **This repository is still being continuously improved. ** ## Some Additional Information. -🚩 2024/1/27: **I have marked some recommended papers / dataset / implementation with 🌟 (Just my personal preference 😉).** +🚩 2024/1/27: **I have marked some recommended papers with 🌟 (Just my personal preference 😉).** -## Survey & Evaluation +## Survey & Evaluation NOTE: the ranking has no particular order. | TYPE | Venue | Paper Title and Paper Interpretation | Code | | :-----------: | :-------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| Dataset | *DARLI-AP@EDBT/ICDT '23* | Time Series Segmentation Applied to a New Data Set for Mobile Sensing of Human Activities 🌟 | [MOSAD](https://github.com/ermshaua/mobile-sensing-human-activity-data-set) | -| Dataset | *ECML-PKDD Workshop '23* | Human Activity Segmentation Challenge@ECML/PKDD’23 🌟 | [Challenge Link](https://ecml-aaltd.github.io/aaltd2023/challenge.html) | -| Visualization | *IEEE TVCG '21* | MultiSegVA Using Visual Analytics to Segment Biologging Time Series on Multiple Scales | None | -| Survey | *IEEE J. Sel. Areas Commun. '21* | Sequential (Quickest) Change Detection Classical Results and New Directions | None | -| Survey | *Signal Process. '20* | Selective review of offline change point detection methods 🌟 | [Ruptures](https://centre-borelli.github.io/ruptures-docs/) | -| Evaluation | *Arxiv '20* | An Evaluation of Change Point Detection Algorithms 🌟 | [TCPDBench](https://github.com/alan-turing-institute/TCPDBench) | | Survey | *Knowl. Inf. Syst. '17* | A survey of methods for time series change point detection 🌟 | None | +| Evaluation | *Arxiv '20* | An Evaluation of Change Point Detection Algorithms 🌟 | [TCPDBench](https://github.com/alan-turing-institute/TCPDBench) | | Evaluation | *Inf. Syst. '17* | An evaluation of combinations of lossy compression and change-detection approaches for time-series data | None | +| Dataset | *ECML-PKDD Workshop '23* | Human Activity Segmentation Challenge@ECML/PKDD’23 🌟 | [Challenge Link](https://ecml-aaltd.github.io/aaltd2023/challenge.html) | | Survey | *IEEE Trans Hum. Mach. Syst. '16* | Movement Primitive Segmentation for Human Motion Modeling A Framework for Analysis 🌟 | None | +| Visualization | *IEEE TVCG '21* | MultiSegVA Using Visual Analytics to Segment Biologging Time Series on Multiple Scales | None | +| Survey | *Signal Process. '20* | Selective review of offline change point detection methods 🌟 | [Ruptures](https://centre-borelli.github.io/ruptures-docs/) | +| Survey | *IEEE J. Sel. Areas Commun. '21* | Sequential (Quickest) Change Detection Classical Results and New Directions | None | | Survey | *EAAI '11* | A review on time series data mining | None | | Survey | *CSUR '11* | Time-series data mining | None | -| Dataset | *GI '04* | Segmenting Motion Capture Data into Distinct Behaviors | [Website](http://graphics.cs.cmu.edu/projects/segmentation/) 🌟 | -| | | | | +| Dataset | *DARLI-AP@EDBT/ICDT '23* | Time Series Segmentation Applied to a New Data Set for Mobile Sensing of Human Activities 🌟 | [MOSAD](https://github.com/ermshaua/mobile-sensing-human-activity-data-set) | +| **Dataset** | *GI '04* | Segmenting Motion Capture Data into Distinct Behaviors 🌟 | [Website](http://graphics.cs.cmu.edu/projects/segmentation/) |
awesome-time-series-segmentation-papers
lzz19980125
MATLAB
MATLAB
454
8
This repository contains a reading list of papers on Time Series Segmentation. This repository is still being continuously improved.
lzz19980125_awesome-time-series-segmentation-papers
DOC_CHANGE
changes in readme
d5bba18b5d81f234657586865248c5b6849599cd
2024-10-09 19:57:04
Ruslan Lesiutin
fix[react-devtools]: record timeline data only when supported (#31154) Stacked on https://github.com/facebook/react/pull/31132. See last commit. There are 2 issues: 1. We've been recording timeline events, even if Timeline Profiler was not supported by the Host. We've been doing this for React Native, for example, which would significantly regress perf of recording a profiling session, but we were not even using this data. 2. Currently, we are generating component stack for every state update event. This is extremely expensive, and we should not be doing this. We can't currently fix the second one, because we would still need to generate all these stacks, and this would still take quite a lot of time. As of right now, we can't generate a component stack lazily without relying on the fact that reference to the Fiber is not stale. With `enableOwnerStacks` we could populate component stacks in some collection, which would be cached at the Backend, and then returned only once Frontend asks for it. This approach also eliminates the need for keeping a reference to a Fiber.
false
126
62
188
--- packages/react-devtools-shared/src/backend/agent.js @@ -153,17 +153,12 @@ export default class Agent extends EventEmitter<{ _persistedSelection: PersistedSelection | null = null; _persistedSelectionMatch: PathMatch | null = null; _traceUpdatesEnabled: boolean = false; - _onReloadAndProfile: - | ((recordChangeDescriptions: boolean, recordTimeline: boolean) => void) - | void; + _onReloadAndProfile: ((recordChangeDescriptions: boolean) => void) | void; constructor( bridge: BackendBridge, isProfiling: boolean = false, - onReloadAndProfile?: ( - recordChangeDescriptions: boolean, - recordTimeline: boolean, - ) => void, + onReloadAndProfile?: (recordChangeDescriptions: boolean) => void, ) { super(); @@ -663,19 +658,17 @@ export default class Agent extends EventEmitter<{ this._bridge.send('isReloadAndProfileSupportedByBackend', true); }; - reloadAndProfile: ({ - recordChangeDescriptions: boolean, - recordTimeline: boolean, - }) => void = ({recordChangeDescriptions, recordTimeline}) => { - if (typeof this._onReloadAndProfile === 'function') { - this._onReloadAndProfile(recordChangeDescriptions, recordTimeline); - } + reloadAndProfile: (recordChangeDescriptions: boolean) => void = + recordChangeDescriptions => { + if (typeof this._onReloadAndProfile === 'function') { + this._onReloadAndProfile(recordChangeDescriptions); + } - // This code path should only be hit if the shell has explicitly told the Store that it supports profiling. - // In that case, the shell must also listen for this specific message to know when it needs to reload the app. - // The agent can't do this in a way that is renderer agnostic. - this._bridge.send('reloadAppForProfiling'); - }; + // This code path should only be hit if the shell has explicitly told the Store that it supports profiling. + // In that case, the shell must also listen for this specific message to know when it needs to reload the app. + // The agent can't do this in a way that is renderer agnostic. + this._bridge.send('reloadAppForProfiling'); + }; renamePath: RenamePathParams => void = ({ hookID, @@ -747,19 +740,17 @@ export default class Agent extends EventEmitter<{ this.removeAllListeners(); }; - startProfiling: ({ - recordChangeDescriptions: boolean, - recordTimeline: boolean, - }) => void = ({recordChangeDescriptions, recordTimeline}) => { - this._isProfiling = true; - for (const rendererID in this._rendererInterfaces) { - const renderer = ((this._rendererInterfaces[ - (rendererID: any) - ]: any): RendererInterface); - renderer.startProfiling(recordChangeDescriptions, recordTimeline); - } - this._bridge.send('profilingStatus', this._isProfiling); - }; + startProfiling: (recordChangeDescriptions: boolean) => void = + recordChangeDescriptions => { + this._isProfiling = true; + for (const rendererID in this._rendererInterfaces) { + const renderer = ((this._rendererInterfaces[ + (rendererID: any) + ]: any): RendererInterface); + renderer.startProfiling(recordChangeDescriptions); + } + this._bridge.send('profilingStatus', this._isProfiling); + }; stopProfiling: () => void = () => { this._isProfiling = false; --- packages/react-devtools-shared/src/backend/fiber/renderer.js @@ -5035,7 +5035,6 @@ export function attach( let isProfiling: boolean = false; let profilingStartTime: number = 0; let recordChangeDescriptions: boolean = false; - let recordTimeline: boolean = false; let rootToCommitProfilingMetadataMap: CommitProfilingMetadataMap | null = null; @@ -5177,16 +5176,12 @@ export function attach( } } - function startProfiling( - shouldRecordChangeDescriptions: boolean, - shouldRecordTimeline: boolean, - ) { + function startProfiling(shouldRecordChangeDescriptions: boolean) { if (isProfiling) { return; } recordChangeDescriptions = shouldRecordChangeDescriptions; - recordTimeline = shouldRecordTimeline; // Capture initial values as of the time profiling starts. // It's important we snapshot both the durations and the id-to-root map, @@ -5217,7 +5212,7 @@ export function attach( rootToCommitProfilingMetadataMap = new Map(); if (toggleProfilingStatus !== null) { - toggleProfilingStatus(true, recordTimeline); + toggleProfilingStatus(true); } } @@ -5226,18 +5221,13 @@ export function attach( recordChangeDescriptions = false; if (toggleProfilingStatus !== null) { - toggleProfilingStatus(false, recordTimeline); + toggleProfilingStatus(false); } - - recordTimeline = false; } // Automatically start profiling so that we don't miss timing info from initial "mount". if (shouldStartProfilingNow) { - startProfiling( - profilingSettings.recordChangeDescriptions, - profilingSettings.recordTimeline, - ); + startProfiling(profilingSettings.recordChangeDescriptions); } function getNearestFiber(devtoolsInstance: DevToolsInstance): null | Fiber { --- packages/react-devtools-shared/src/backend/profilingHooks.js @@ -97,10 +97,7 @@ export function setPerformanceMock_ONLY_FOR_TESTING( } export type GetTimelineData = () => TimelineData | null; -export type ToggleProfilingStatus = ( - value: boolean, - recordTimeline?: boolean, -) => void; +export type ToggleProfilingStatus = (value: boolean) => void; type Response = { getTimelineData: GetTimelineData, @@ -842,10 +839,7 @@ export function createProfilingHooks({ } } - function toggleProfilingStatus( - value: boolean, - recordTimeline: boolean = false, - ) { + function toggleProfilingStatus(value: boolean) { if (isProfiling !== value) { isProfiling = value; @@ -881,45 +875,34 @@ export function createProfilingHooks({ currentReactComponentMeasure = null; currentReactMeasuresStack = []; currentFiberStacks = new Map(); - if (recordTimeline) { - currentTimelineData = { - // Session wide metadata; only collected once. - internalModuleSourceToRanges, - laneToLabelMap: laneToLabelMap || new Map(), - reactVersion, - - // Data logged by React during profiling session. - componentMeasures: [], - schedulingEvents: [], - suspenseEvents: [], - thrownErrors: [], - - // Data inferred based on what React logs. - batchUIDToMeasuresMap: new Map(), - duration: 0, - laneToReactMeasureMap, - startTime: 0, - - // Data only available in Chrome profiles. - flamechart: [], - nativeEvents: [], - networkMeasures: [], - otherUserTimingMarks: [], - snapshots: [], - snapshotHeight: 0, - }; - } + currentTimelineData = { + // Session wide metadata; only collected once. + internalModuleSourceToRanges, + laneToLabelMap: laneToLabelMap || new Map(), + reactVersion, + + // Data logged by React during profiling session. + componentMeasures: [], + schedulingEvents: [], + suspenseEvents: [], + thrownErrors: [], + + // Data inferred based on what React logs. + batchUIDToMeasuresMap: new Map(), + duration: 0, + laneToReactMeasureMap, + startTime: 0, + + // Data only available in Chrome profiles. + flamechart: [], + nativeEvents: [], + networkMeasures: [], + otherUserTimingMarks: [], + snapshots: [], + snapshotHeight: 0, + }; nextRenderShouldStartNewBatch = true; } else { - // This is __EXPENSIVE__. - // We could end up with hundreds of state updated, and for each one of them - // would try to create a component stack with possibly hundreds of Fibers. - // Creating a cache of component stacks won't help, generating a single stack is already expensive enough. - // We should find a way to lazily generate component stacks on demand, when user inspects a specific event. - // If we succeed with moving React DevTools Timeline Profiler to Performance panel, then Timeline Profiler would probably be removed. - // If not, then once enableOwnerStacks is adopted, revisit this again and cache component stacks per Fiber, - // but only return them when needed, sending hundreds of component stacks is beyond the Bridge's bandwidth. - // Postprocess Profile data if (currentTimelineData !== null) { currentTimelineData.schedulingEvents.forEach(event => { --- packages/react-devtools-shared/src/backend/types.js @@ -419,10 +419,7 @@ export type RendererInterface = { renderer: ReactRenderer | null, setTraceUpdatesEnabled: (enabled: boolean) => void, setTrackedPath: (path: Array<PathFrame> | null) => void, - startProfiling: ( - recordChangeDescriptions: boolean, - recordTimeline: boolean, - ) => void, + startProfiling: (recordChangeDescriptions: boolean) => void, stopProfiling: () => void, storeAsGlobal: ( id: number, @@ -490,7 +487,6 @@ export type DevToolsBackend = { export type ProfilingSettings = { recordChangeDescriptions: boolean, - recordTimeline: boolean, }; export type DevToolsHook = { --- packages/react-devtools-shared/src/bridge.js @@ -16,7 +16,6 @@ import type { ProfilingDataBackend, RendererID, DevToolsHookSettings, - ProfilingSettings, } from 'react-devtools-shared/src/backend/types'; import type {StyleAndLayout as StyleAndLayoutPayload} from 'react-devtools-shared/src/backend/NativeStyleEditor/types'; @@ -207,9 +206,6 @@ export type BackendEvents = { hookSettings: [$ReadOnly<DevToolsHookSettings>], }; -type StartProfilingParams = ProfilingSettings; -type ReloadAndProfilingParams = ProfilingSettings; - type FrontendEvents = { clearErrorsAndWarnings: [{rendererID: RendererID}], clearErrorsForElementID: [ElementAndRendererID], @@ -230,13 +226,13 @@ type FrontendEvents = { overrideSuspense: [OverrideSuspense], overrideValueAtPath: [OverrideValueAtPath], profilingData: [ProfilingDataBackend], - reloadAndProfile: [ReloadAndProfilingParams], + reloadAndProfile: [boolean], renamePath: [RenamePath], savedPreferences: [SavedPreferencesParams], setTraceUpdatesEnabled: [boolean], shutdown: [], startInspectingHost: [], - startProfiling: [StartProfilingParams], + startProfiling: [boolean], stopInspectingHost: [boolean], stopProfiling: [], storeAsGlobal: [StoreAsGlobalParams], --- packages/react-devtools-shared/src/constants.js @@ -41,8 +41,6 @@ export const LOCAL_STORAGE_PARSE_HOOK_NAMES_KEY = 'React::DevTools::parseHookNames'; export const SESSION_STORAGE_RECORD_CHANGE_DESCRIPTIONS_KEY = 'React::DevTools::recordChangeDescriptions'; -export const SESSION_STORAGE_RECORD_TIMELINE_KEY = - 'React::DevTools::recordTimeline'; export const SESSION_STORAGE_RELOAD_AND_PROFILE_KEY = 'React::DevTools::reloadAndProfile'; export const LOCAL_STORAGE_BROWSER_THEME = 'React::DevTools::theme'; --- packages/react-devtools-shared/src/devtools/ProfilerStore.js @@ -191,10 +191,7 @@ export default class ProfilerStore extends EventEmitter<{ } startProfiling(): void { - this._bridge.send('startProfiling', { - recordChangeDescriptions: this._store.recordChangeDescriptions, - recordTimeline: this._store.supportsTimeline, - }); + this._bridge.send('startProfiling', this._store.recordChangeDescriptions); this._isProfilingBasedOnUserInput = true; this.emit('isProfiling'); --- packages/react-devtools-shared/src/devtools/views/Profiler/ReloadAndProfileButton.js @@ -54,11 +54,8 @@ export default function ReloadAndProfileButton({ // For now, let's just skip doing it entirely to avoid paying snapshot costs for data we don't need. // startProfiling(); - bridge.send('reloadAndProfile', { - recordChangeDescriptions, - recordTimeline: store.supportsTimeline, - }); - }, [bridge, recordChangeDescriptions, store]); + bridge.send('reloadAndProfile', recordChangeDescriptions); + }, [bridge, recordChangeDescriptions]); if (!supportsReloadAndProfile) { return null; --- packages/react-devtools-shared/src/hook.js @@ -52,7 +52,6 @@ const targetConsole: Object = console; const defaultProfilingSettings: ProfilingSettings = { recordChangeDescriptions: false, - recordTimeline: false, }; export function installHook( --- packages/react-devtools-shared/src/utils.js @@ -38,7 +38,6 @@ import { LOCAL_STORAGE_OPEN_IN_EDITOR_URL, SESSION_STORAGE_RELOAD_AND_PROFILE_KEY, SESSION_STORAGE_RECORD_CHANGE_DESCRIPTIONS_KEY, - SESSION_STORAGE_RECORD_TIMELINE_KEY, } from './constants'; import { ComponentFilterElementType, @@ -1003,28 +1002,18 @@ export function getProfilingSettings(): ProfilingSettings { recordChangeDescriptions: sessionStorageGetItem(SESSION_STORAGE_RECORD_CHANGE_DESCRIPTIONS_KEY) === 'true', - recordTimeline: - sessionStorageGetItem(SESSION_STORAGE_RECORD_TIMELINE_KEY) === 'true', }; } -export function onReloadAndProfile( - recordChangeDescriptions: boolean, - recordTimeline: boolean, -): void { +export function onReloadAndProfile(recordChangeDescriptions: boolean): void { sessionStorageSetItem(SESSION_STORAGE_RELOAD_AND_PROFILE_KEY, 'true'); sessionStorageSetItem( SESSION_STORAGE_RECORD_CHANGE_DESCRIPTIONS_KEY, recordChangeDescriptions ? 'true' : 'false', ); - sessionStorageSetItem( - SESSION_STORAGE_RECORD_TIMELINE_KEY, - recordTimeline ? 'true' : 'false', - ); } export function onReloadAndProfileFlagsReset(): void { sessionStorageRemoveItem(SESSION_STORAGE_RELOAD_AND_PROFILE_KEY); sessionStorageRemoveItem(SESSION_STORAGE_RECORD_CHANGE_DESCRIPTIONS_KEY); - sessionStorageRemoveItem(SESSION_STORAGE_RECORD_TIMELINE_KEY); }
react
facebook
JavaScript
JavaScript
232,878
47,794
The library for web and native user interfaces.
facebook_react
NEW_FEAT
Introduce a new functionality
330fdd2b045343bd07620876cf3db553d46ded6c
null
Brian Anderson
Fix a typo in test expr-alt-generic-box1.rs
false
1
1
0
--- expr-alt-generic-box1.rs @@ -14,7 +14,7 @@ fn test_generic[T](@T expected, &compare[T] eq) { fn test_box() { fn compare_box(@bool b1, @bool b2) -> bool { - ret *b1 == b2; + ret *b1 == *b2; } auto eq = bind compare_box(_, _); test_generic[bool](@true, eq);
rust-lang_rust.json
null
null
null
null
null
null
rust-lang_rust.json
CONFIG_CHANGE
5, obvious
c3d944a367c0d9e4e125c7006e52f352e75776dc
2025-03-26 19:01:48
Christian Eggers
ARM: 9444/1: add KEEP() keyword to ARM_VECTORS Without this, the vectors are removed if LD_DEAD_CODE_DATA_ELIMINATION is enabled. At startup, the CPU (silently) hangs in the undefined instruction exception as soon as the first timer interrupt arrives. On my setup, the system also boots fine without the 2nd and 3rd KEEP() statements, so I cannot tell whether these are actually required. [nathan: Use OVERLAY_KEEP() to avoid breaking old ld.lld versions] Cc: [email protected] Fixes: ed0f94102251 ("ARM: 9404/1: arm32: enable HAVE_LD_DEAD_CODE_DATA_ELIMINATION") Signed-off-by: Christian Eggers <[email protected]> Reviewed-by: Linus Walleij <[email protected]> Signed-off-by: Nathan Chancellor <[email protected]> Signed-off-by: Russell King (Oracle) <[email protected]>
false
3
3
6
--- arch/arm/include/asm/vmlinux.lds.h @@ -131,13 +131,13 @@ __vectors_lma = .; \ OVERLAY 0xffff0000 : NOCROSSREFS AT(__vectors_lma) { \ .vectors { \ - OVERLAY_KEEP(*(.vectors)) \ + *(.vectors) \ } \ .vectors.bhb.loop8 { \ - OVERLAY_KEEP(*(.vectors.bhb.loop8)) \ + *(.vectors.bhb.loop8) \ } \ .vectors.bhb.bpiall { \ - OVERLAY_KEEP(*(.vectors.bhb.bpiall)) \ + *(.vectors.bhb.bpiall) \ } \ } \ ARM_LMA(__vectors, .vectors); \
linux
torvalds
C
C
189,022
55,340
Linux kernel source tree
torvalds_linux
CONFIG_CHANGE
Obvious
c7ef85a92fcf283e95438c86210118c94ed47de7
2025-02-08 15:12:54
arkology
Use `FlowContainer` for `EditorNetworkProfiler` bar
false
17
9
26
--- modules/multiplayer/editor/editor_network_profiler.cpp @@ -35,7 +35,6 @@ #include "editor/gui/editor_run_bar.h" #include "editor/themes/editor_scale.h" #include "scene/gui/check_box.h" -#include "scene/gui/flow_container.h" void EditorNetworkProfiler::_bind_methods() { ADD_SIGNAL(MethodInfo("enable_profiling", PropertyInfo(Variant::BOOL, "enable"))); @@ -298,37 +297,30 @@ bool EditorNetworkProfiler::is_profiling() { } EditorNetworkProfiler::EditorNetworkProfiler() { - FlowContainer *container = memnew(FlowContainer); - container->add_theme_constant_override(SNAME("h_separation"), 8 * EDSCALE); - container->add_theme_constant_override(SNAME("v_separation"), 2 * EDSCALE); - add_child(container); + HBoxContainer *hb = memnew(HBoxContainer); + hb->add_theme_constant_override("separation", 8 * EDSCALE); + add_child(hb); activate = memnew(Button); activate->set_toggle_mode(true); activate->set_text(TTR("Start")); activate->set_disabled(true); activate->connect(SceneStringName(pressed), callable_mp(this, &EditorNetworkProfiler::_activate_pressed)); - container->add_child(activate); + hb->add_child(activate); clear_button = memnew(Button); clear_button->set_text(TTR("Clear")); clear_button->set_disabled(true); clear_button->connect(SceneStringName(pressed), callable_mp(this, &EditorNetworkProfiler::_clear_pressed)); - container->add_child(clear_button); + hb->add_child(clear_button); CheckBox *autostart_checkbox = memnew(CheckBox); autostart_checkbox->set_text(TTR("Autostart")); autostart_checkbox->set_pressed(EditorSettings::get_singleton()->get_project_metadata("debug_options", "autostart_network_profiler", false)); autostart_checkbox->connect(SceneStringName(toggled), callable_mp(this, &EditorNetworkProfiler::_autostart_toggled)); - container->add_child(autostart_checkbox); - - Control *c = memnew(Control); - c->set_h_size_flags(SIZE_EXPAND_FILL); - container->add_child(c); + hb->add_child(autostart_checkbox); - HBoxContainer *hb = memnew(HBoxContainer); - hb->add_theme_constant_override(SNAME("separation"), 8 * EDSCALE); - container->add_child(hb); + hb->add_spacer(); Label *lb = memnew(Label); // TRANSLATORS: This is the label for the network profiler's incoming bandwidth. @@ -367,7 +359,7 @@ EditorNetworkProfiler::EditorNetworkProfiler() { // RPC counters_display = memnew(Tree); - counters_display->set_custom_minimum_size(Size2(280, 0) * EDSCALE); + counters_display->set_custom_minimum_size(Size2(320, 0) * EDSCALE); counters_display->set_v_size_flags(SIZE_EXPAND_FILL); counters_display->set_h_size_flags(SIZE_EXPAND_FILL); counters_display->set_hide_folding(true); @@ -390,7 +382,7 @@ EditorNetworkProfiler::EditorNetworkProfiler() { // Replication replication_display = memnew(Tree); - replication_display->set_custom_minimum_size(Size2(280, 0) * EDSCALE); + replication_display->set_custom_minimum_size(Size2(320, 0) * EDSCALE); replication_display->set_v_size_flags(SIZE_EXPAND_FILL); replication_display->set_h_size_flags(SIZE_EXPAND_FILL); replication_display->set_hide_folding(true);
godot
godotengine
C++
C++
94,776
21,828
Godot Engine – Multi-platform 2D and 3D game engine
godotengine_godot
CODE_IMPROVEMENT
just used flowcontainer to replace previous hboxcontainer
c0132a3f4c6043345840c85a25b3cc3829d9f885
2024-06-23 23:06:45
Aditya Vishwakarma
Fix DNS routing hyperlinks (#872)
false
2
2
4
--- README.md @@ -601,8 +601,8 @@ Services such as [CloudFlare](https://www.cloudflare.com/dns/) and [Route 53](ht * Prevent traffic from going to servers under maintenance * Balance between varying cluster sizes * A/B testing -* [Latency-based](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy-latency.html) -* [Geolocation-based](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy-geo.html) +* [Latency-based](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html#routing-policy-latency) +* [Geolocation-based](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html#routing-policy-geo) ### Disadvantage(s): DNS
system-design-primer
donnemartin
Python
Python
290,909
48,355
Learn how to design large-scale systems. Prep for the system design interview. Includes Anki flashcards.
donnemartin_system-design-primer
DOC_CHANGE
changes in readme
90543e90a025d84b5bdcb1740a556ca26b071d0a
2025-03-21 05:57:28
Sam Larsen
Fix broken dynamo_timed test due to python_version field (#149659) Pull Request resolved: https://github.com/pytorch/pytorch/pull/149659 Approved by: https://github.com/ppanchalia
false
3
0
3
--- test/dynamo/test_utils.py @@ -273,7 +273,6 @@ class TestDynamoTimed(TestCase): e.inductor_config = None e.cuda_version = None e.triton_version = None - e.python_version = None # First event is for the forward. Formatting makes reading diffs # much easier. @@ -339,7 +338,6 @@ class TestDynamoTimed(TestCase): 'num_triton_bundles': None, 'post_grad_pass_time_us': 0, 'pre_grad_pass_time_us': 0, - 'python_version': None, 'recompile_reason': None, 'remote_cache_time_saved_s': None, 'remote_cache_version': None, @@ -426,7 +424,6 @@ class TestDynamoTimed(TestCase): 'num_triton_bundles': None, 'post_grad_pass_time_us': 0, 'pre_grad_pass_time_us': None, - 'python_version': None, 'recompile_reason': None, 'remote_cache_time_saved_s': None, 'remote_cache_version': None,
pytorch
null
python
Python
null
null
Tensors and Dynamic neural networks in Python with strong GPU acceleration
_pytorch
BUG_FIX
Obvious
ca88316f6109727f6ef1d1d1e350ebcb3f89e489
2023-08-31 19:27:03
Ben Pasquariello
Update README.md
false
1
1
2
--- README.md @@ -85,7 +85,7 @@ Complimentary 2-hour Simulink tutorials for <td><ul> Check out our Cheat Sheet Repository to help you learn the following topics -- [MATLAB Basic Functions](https://github.com/mathworks/awesome-matlab-students/blob/main/CheatSheets/matlab-basic-functions-reference.pdf) +- [MATLAB Basic Fucntions](https://github.com/mathworks/awesome-matlab-students/blob/main/CheatSheets/matlab-basic-functions-reference.pdf) - [Visualization](https://github.com/mathworks/awesome-matlab-students/blob/main/CheatSheets/MATLAB_Visualization_Reference_EN.pdf) - [Importing data](https://github.com/mathworks/awesome-matlab-students/blob/main/CheatSheets/importing-exporting-data-cheat-sheet.pdf) - [Deep Learning](https://github.com/mathworks/awesome-matlab-students/blob/main/CheatSheets/deep-learning-with-matlab-quick-start-guide.pdf)
awesome-matlab-students
mathworks
MATLAB
MATLAB
393
42
An awesome list of helpful resources for students learning MATLAB & Simulink. List includes tips & tricks, tutorials, videos, cheat sheets, and opportunities to learn MATLAB & Simulink.
mathworks_awesome-matlab-students
DOC_CHANGE
changes in readme
e5becd01c848fedc1a10ea3a7afb476385674b02
2025-02-27 12:34:07
Monokaix
correct ResyncPeriod comments Signed-off-by: Monokaix <[email protected]>
false
2
4
6
--- cmd/kube-controller-manager/app/controllermanager.go @@ -169,7 +169,8 @@ controller, and serviceaccounts controller.`, } // ResyncPeriod returns a function which generates a duration each time it is -// invoked; this is because that multiple controllers don't get into lock-step. +// invoked; this is so that multiple controllers don't get into lock-step and all +// hammer the apiserver with list requests simultaneously. func ResyncPeriod(c *config.CompletedConfig) func() time.Duration { return func() time.Duration { factor := rand.Float64() + 1 --- staging/src/k8s.io/cloud-provider/app/controllermanager.go @@ -499,7 +499,8 @@ func CreateControllerContext(s *cloudcontrollerconfig.CompletedConfig, clientBui } // ResyncPeriod returns a function which generates a duration each time it is -// invoked; this is because that multiple controllers don't get into lock-step. +// invoked; this is so that multiple controllers don't get into lock-step and all +// hammer the apiserver with list requests simultaneously. func ResyncPeriod(c *cloudcontrollerconfig.CompletedConfig) func() time.Duration { return func() time.Duration { factor := rand.Float64() + 1
kubernetes
kubernetes
Go
Go
113,460
40,344
Production-Grade Container Scheduling and Management
kubernetes_kubernetes
CODE_IMPROVEMENT
only the comments changed
6333e3157e936f155acb5ff223b38fbdaf989b57
2024-03-28 22:23:17
Seraphima Zykova
[FancyZones]Use RawInput to detect Shift key, fix locking out Shift key (#32116) * init RawInputDevice * static * handle input * replace keyboard hooks (ctrl, shift) * keep ctrl hook * spellcheck
false
119
29
148
--- src/modules/fancyzones/FancyZonesLib/DraggingState.cpp @@ -8,6 +8,8 @@ DraggingState::DraggingState(const std::function<void()>& keyUpdateCallback) : m_secondaryMouseState(false), m_middleMouseState(false), m_mouseHook(std::bind(&DraggingState::OnSecondaryMouseDown, this), std::bind(&DraggingState::OnMiddleMouseDown, this)), + m_leftShiftKeyState(keyUpdateCallback), + m_rightShiftKeyState(keyUpdateCallback), m_ctrlKeyState(keyUpdateCallback), m_keyUpdateCallback(keyUpdateCallback) { @@ -20,30 +22,49 @@ void DraggingState::Enable() m_mouseHook.enable(); } + m_leftShiftKeyState.enable(); + m_rightShiftKeyState.enable(); m_ctrlKeyState.enable(); } void DraggingState::Disable() { + const bool leftShiftPressed = m_leftShiftKeyState.state(); + const bool rightShiftPressed = m_rightShiftKeyState.state(); + + if (FancyZonesSettings::settings().shiftDrag) + { + if (leftShiftPressed) + { + FancyZonesUtils::SwallowKey(VK_LSHIFT); + } + + if (rightShiftPressed) + { + FancyZonesUtils::SwallowKey(VK_RSHIFT); + } + } + m_dragging = false; m_secondaryMouseState = false; m_middleMouseState = false; - m_shift = false; m_mouseHook.disable(); + m_leftShiftKeyState.disable(); + m_rightShiftKeyState.disable(); m_ctrlKeyState.disable(); } void DraggingState::UpdateDraggingState() noexcept { - // This updates m_dragging depending on if the shift key is being held down + // This updates m_dragEnabled depending on if the shift key is being held down if (FancyZonesSettings::settings().shiftDrag) { - m_dragging = (m_shift ^ m_secondaryMouseState); + m_dragging = ((m_leftShiftKeyState.state() || m_rightShiftKeyState.state()) ^ m_secondaryMouseState); } else { - m_dragging = !(m_shift ^ m_secondaryMouseState); + m_dragging = !((m_leftShiftKeyState.state() || m_rightShiftKeyState.state()) ^ m_secondaryMouseState); } } @@ -75,10 +96,4 @@ bool DraggingState::IsDragging() const noexcept bool DraggingState::IsSelectManyZonesState() const noexcept { return m_ctrlKeyState.state() || m_middleMouseState; -} - -void DraggingState::SetShiftState(bool value) noexcept -{ - m_shift = value; - m_keyUpdateCallback(); -} +} \ No newline at end of file --- src/modules/fancyzones/FancyZonesLib/DraggingState.h @@ -16,8 +16,6 @@ public: bool IsDragging() const noexcept; bool IsSelectManyZonesState() const noexcept; - void SetShiftState(bool value) noexcept; - private: void OnSecondaryMouseDown(); void OnMiddleMouseDown(); @@ -25,10 +23,9 @@ private: std::atomic<bool> m_secondaryMouseState; std::atomic<bool> m_middleMouseState; MouseButtonsHook m_mouseHook; + KeyState<VK_LSHIFT> m_leftShiftKeyState; + KeyState<VK_RSHIFT> m_rightShiftKeyState; KeyState<VK_LCONTROL, VK_RCONTROL> m_ctrlKeyState; - - bool m_shift{}; - std::function<void()> m_keyUpdateCallback; bool m_dragging{}; // True if we should be showing zone hints while dragging --- src/modules/fancyzones/FancyZonesLib/FancyZones.cpp @@ -21,7 +21,6 @@ #include <FancyZonesLib/FancyZonesWindowProcessing.h> #include <FancyZonesLib/FancyZonesWindowProperties.h> #include <FancyZonesLib/FancyZonesWinHookEventIDs.h> -#include <FancyZonesLib/KeyboardInput.h> #include <FancyZonesLib/MonitorUtils.h> #include <FancyZonesLib/on_thread_executor.h> #include <FancyZonesLib/Settings.h> @@ -144,7 +143,6 @@ public: void ToggleEditor() noexcept; LRESULT WndProc(HWND, UINT, WPARAM, LPARAM) noexcept; - void OnKeyboardInput(WPARAM flags, HRAWINPUT hInput) noexcept; void OnDisplayChange(DisplayChangeType changeType) noexcept; bool AddWorkArea(HMONITOR monitor, const FancyZonesDataTypes::WorkAreaId& id, const FancyZonesUtils::Rect& rect) noexcept; @@ -222,13 +220,7 @@ FancyZones::Run() noexcept m_window = CreateWindowExW(WS_EX_TOOLWINDOW, NonLocalizable::ToolWindowClassName, L"", WS_POPUP, 0, 0, 0, 0, nullptr, nullptr, m_hinstance, this); if (!m_window) { - Logger::critical(L"Failed to create FancyZones window"); - return; - } - - if (!KeyboardInput::Initialize(m_window)) - { - Logger::critical(L"Failed to register raw input device"); + Logger::error(L"Failed to create FancyZones window"); return; } @@ -588,12 +580,6 @@ LRESULT FancyZones::WndProc(HWND window, UINT message, WPARAM wparam, LPARAM lpa } break; - case WM_INPUT: - { - OnKeyboardInput(wparam, reinterpret_cast<HRAWINPUT>(lparam)); - } - break; - case WM_SETTINGCHANGE: { if (wparam == SPI_SETWORKAREA) @@ -731,26 +717,6 @@ LRESULT FancyZones::WndProc(HWND window, UINT message, WPARAM wparam, LPARAM lpa return 0; } -void FancyZones::OnKeyboardInput(WPARAM /*flags*/, HRAWINPUT hInput) noexcept -{ - auto input = KeyboardInput::OnKeyboardInput(hInput); - if (!input.has_value()) - { - return; - } - - switch (input.value().vkKey) - { - case VK_SHIFT: - { - m_draggingState.SetShiftState(input.value().pressed); - } - break; - default: - break; - } -} - void FancyZones::OnDisplayChange(DisplayChangeType changeType) noexcept { Logger::info(L"Display changed, type: {}", DisplayChangeTypeName(changeType)); --- src/modules/fancyzones/FancyZonesLib/FancyZonesLib.vcxproj @@ -53,7 +53,6 @@ <ClInclude Include="FancyZonesData.h" /> <ClInclude Include="GuidUtils.h" /> <ClInclude Include="JsonHelpers.h" /> - <ClInclude Include="KeyboardInput.h" /> <ClInclude Include="KeyState.h" /> <ClInclude Include="FancyZonesData\LayoutHotkeys.h" /> <ClInclude Include="Layout.h" /> @@ -115,7 +114,6 @@ <ClCompile Include="FancyZonesData\LayoutHotkeys.cpp"> <PrecompiledHeaderFile>../pch.h</PrecompiledHeaderFile> </ClCompile> - <ClCompile Include="KeyboardInput.cpp" /> <ClCompile Include="Layout.cpp" /> <ClCompile Include="LayoutConfigurator.cpp" /> <ClCompile Include="LayoutAssignedWindows.cpp" /> --- src/modules/fancyzones/FancyZonesLib/FancyZonesLib.vcxproj.filters @@ -168,9 +168,6 @@ <ClInclude Include="FancyZonesData\LastUsedVirtualDesktop.h"> <Filter>Header Files\FancyZonesData</Filter> </ClInclude> - <ClInclude Include="KeyboardInput.h"> - <Filter>Header Files</Filter> - </ClInclude> </ItemGroup> <ItemGroup> <ClCompile Include="pch.cpp"> @@ -281,9 +278,6 @@ <ClCompile Include="FancyZonesWindowProcessing.cpp"> <Filter>Source Files</Filter> </ClCompile> - <ClCompile Include="KeyboardInput.cpp"> - <Filter>Source Files</Filter> - </ClCompile> </ItemGroup> <ItemGroup> <None Include="packages.config" /> --- src/modules/fancyzones/FancyZonesLib/KeyboardInput.cpp @@ -1,43 +0,0 @@ -#include "pch.h" -#include "KeyboardInput.h" - -#include <hidUsage.h> - -#include <common/logger/logger.h> -#include <common/utils/winapi_error.h> - -bool KeyboardInput::Initialize(HWND window) -{ - RAWINPUTDEVICE inputDevice{}; - inputDevice.usUsagePage = HID_USAGE_PAGE_GENERIC; - inputDevice.usUsage = HID_USAGE_GENERIC_KEYBOARD; - inputDevice.dwFlags = RIDEV_INPUTSINK; - inputDevice.hwndTarget = window; - - bool res = RegisterRawInputDevices(&inputDevice, 1, sizeof(inputDevice)); - if (!res) - { - Logger::error(L"RegisterRawInputDevices error: {}", get_last_error_or_default(GetLastError())); - } - - return res; -} - -std::optional<KeyboardInput::Key> KeyboardInput::OnKeyboardInput(HRAWINPUT hInput) -{ - RAWINPUT input; - UINT size = sizeof(input); - auto result = GetRawInputData(hInput, RID_INPUT, &input, &size, sizeof(RAWINPUTHEADER)); - if (result < sizeof(RAWINPUTHEADER)) - { - return std::nullopt; - } - - if (input.header.dwType == RIM_TYPEKEYBOARD) - { - bool pressed = (input.data.keyboard.Flags & RI_KEY_BREAK) == 0; - return KeyboardInput::Key{ input.data.keyboard.VKey, pressed }; - } - - return std::nullopt; -} \ No newline at end of file --- src/modules/fancyzones/FancyZonesLib/KeyboardInput.h @@ -1,17 +0,0 @@ -#pragma once - -class KeyboardInput -{ -public: - struct Key - { - USHORT vkKey{}; - bool pressed{}; - }; - - KeyboardInput() = default; - ~KeyboardInput() = default; - - static bool Initialize(HWND window); - static std::optional<Key> OnKeyboardInput(HRAWINPUT hInput); -};
powertoys
microsoft
C#
C#
115,301
6,789
Windows system utilities to maximize productivity
microsoft_powertoys
BUG_FIX
fixed issues related to the Shift key
d770c075e010bd1932cf02d176a65db0c1a67069
2025-03-13 14:43:10
Claudio Cambra
macosx: Remove now unused stateForNode method Signed-off-by: Claudio Cambra <[email protected]>
false
0
9
9
--- modules/gui/macosx/library/media-source/VLCLibraryMediaSourceViewNavigationStack.h @@ -45,6 +45,7 @@ typedef struct input_item_node_t input_item_node_t; - (void)clear; - (void)installHandlersOnMediaSource:(VLCMediaSource *)mediaSource; +- (nullable VLCLibraryMediaSourceViewNavigationState *)stateForNode:(input_item_node_t *)node; @end --- modules/gui/macosx/library/media-source/VLCLibraryMediaSourceViewNavigationStack.m @@ -250,4 +250,12 @@ - (void)clear [self updateDelegateNavigationButtons]; } +- (nullable VLCLibraryMediaSourceViewNavigationState *)stateForNode:(input_item_node_t *)node +{ + const NSUInteger index = [_navigationStates indexOfObjectPassingTest:^BOOL(VLCLibraryMediaSourceViewNavigationState * const _Nonnull obj, const NSUInteger idx, BOOL * const _Nonnull stop) { + return obj.currentNodeDisplayed.vlcInputItemNode == node; + }]; + return index == NSNotFound ? nil : _navigationStates[index]; +} + @end
vlc
null
C
C
null
null
Video player
_vlc
PERF_IMPROVEMENT
Code change: indexing added
00d25994d430a988bab84a7fa15050b7e8318f43
2025-01-19 19:01:03
github-actions[bot]
chore(main): release 2.44.1 (#700) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
false
7
0
7
--- CHANGELOG.md @@ -1,12 +1,5 @@ # Changelog -## [2.44.1](https://github.com/ellite/Wallos/compare/v2.44.0...v2.44.1) (2025-01-19) - - -### Bug Fixes - -* error setting date of last exchange rates update ([#699](https://github.com/ellite/Wallos/issues/699)) ([d2f68c4](https://github.com/ellite/Wallos/commit/d2f68c457e9b1328caf983ddc6e2827430855aa6)) - ## [2.44.0](https://github.com/ellite/Wallos/compare/v2.43.1...v2.44.0) (2025-01-12)
wallos
ellite
PHP
PHP
4,155
178
Wallos: Open-Source Personal Subscription Tracker
ellite_wallos
DOC_CHANGE
changes in md file
f8ab85074d99665d6d0a98c3c29124924190d0e0
2022-02-07 12:39:04
Hugo
Add Postali to Geocoding (#3047)
false
1
0
1
--- README.md @@ -983,7 +983,6 @@ API | Description | Auth | HTTPS | CORS | | [OpenStreetMap](http://wiki.openstreetmap.org/wiki/API) | Navigation, geolocation and geographical data | `OAuth` | No | Unknown | | [Pinball Map](https://pinballmap.com/api/v1/docs) | A crowdsourced map of public pinball machines | No | Yes | Yes | | [positionstack](https://positionstack.com/) | Forward & Reverse Batch Geocoding REST API | `apiKey` | Yes | Unknown | -| [Postali](https://postali.app/api) | Mexico Zip Codes API | No | Yes | Yes | | [PostcodeData.nl](http://api.postcodedata.nl/v1/postcode/?postcode=1211EP&streetnumber=60&ref=domeinnaam.nl&type=json) | Provide geolocation data based on postcode for Dutch addresses | No | No | Unknown | | [Postcodes.io](https://postcodes.io) | Postcode lookup & Geolocation for the UK | No | Yes | Yes | | [Queimadas INPE](https://queimadas.dgi.inpe.br/queimadas/dados-abertos) | Access to heat focus data (probable wildfire) | No | Yes | Unknown |
public-apis
public-apis
Python
Python
329,015
34,881
A collective list of free APIs
public-apis_public-apis
DOC_CHANGE
Obvious
956fbcf0b7a678e5485afee7f9c3e8d11ecba974
2022-04-13 07:05:45
macro
Update MemberProductCollectionController.java
false
0
2
2
--- mall-portal/src/main/java/com/macro/mall/portal/controller/MemberProductCollectionController.java @@ -11,6 +11,8 @@ import org.springframework.data.domain.Page; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; +import java.util.List; + /** * 会员商品收藏管理Controller * Created by macro on 2018/8/2.
mall
macrozheng
Java
Java
79,319
29,052
mall项目是一套电商系统,包括前台商城系统及后台管理系统,基于Spring Boot+MyBatis实现,采用Docker容器化部署。 前台商城系统包含首页门户、商品推荐、商品搜索、商品展示、购物车、订单流程、会员中心、客户服务、帮助中心等模块。 后台管理系统包含商品管理、订单管理、会员管理、促销管理、运营管理、内容管理、统计报表、财务管理、权限管理、设置等模块。
macrozheng_mall
CONFIG_CHANGE
Very small changes
0f3e8e8ae713dd08183147f6aa373704c621171c
2023-03-03 20:49:57
Corentin de Maupeou
Update README.md (#17) Providing more information in README.md (edited) Co-authored-by: Michael de Hoog <[email protected]>
false
7
1
8
--- README.md @@ -26,18 +26,12 @@ This repository contains the relevant Docker builds to run your own node on the [![GitHub pull requests by-label](https://img.shields.io/github/issues-pr-raw/base-org/node)](https://github.com/base-org/node/pulls) [![GitHub Issues](https://img.shields.io/github/issues-raw/base-org/node.svg)](https://github.com/base-org/node/issues) -### Hardware requirements +### Requirements We recommend you this configuration to run a node: - at least 16 GB RAM - an SSD drive with at least 100 GB free -### Troubleshooting - -If you encounter problems with your node, please open a [GitHub issue](https://github.com/base-org/node/issues/new/choose) or reach out on our Discord: -- Once you've joined, in the Discord app go to `server menu` > `Linked Roles` > `connect GitHub` and connect your GitHub account so you can gain access to our special developer channels -- Report your issue in `#🛟|node-support` - ### Supported networks | Ethereum Network | Status |
node
base
Shell
Shell
68,555
2,658
Everything required to run your own Base node
base_node
PERF_IMPROVEMENT
Optimizing database queries for better performance
2ced774b98fc56dba8ed1f12c1e76d16bb99e14f
2025-01-15 21:19:00
Aaron Jacobs
Fix handling of bare DATABRICKS_HOST environment variables. (#252) Some environments (e.g. Workbench) set `DATABRICKS_HOST` to the bare hostname and omit the HTTPS prefix; this breaks when we later expect it to be an actual URL. So this commit updates `databricks_workspace()` to handle both cases. Similar code exists in all of the other Databricks packages and SDKs that I am aware of, so this is nothing new; I just missed it. Unit tests are included. Signed-off-by: Aaron Jacobs <[email protected]>
false
25
1
26
--- NEWS.md @@ -7,9 +7,6 @@ * `chat_azure()` now has a `credentials` argument to make authentication against Azure more flexible (#248, @atheriel). -* `chat_databricks()` now handles the `DATABRICKS_HOST` environment variable - correctly whether it includes an HTTPS prefix or not (#252, @atheriel). - # ellmer 0.1.0 * New `chat_vllm()` to chat with models served by vLLM (#140). --- R/provider-databricks.R @@ -167,11 +167,7 @@ method(as_json, list(ProviderDatabricks, ContentText)) <- function(provider, x) } databricks_workspace <- function() { - host <- key_get("DATABRICKS_HOST") - if (!is.null(host) && !grepl("^https?://", host)) { - host <- paste0("https://", host) - } - host + key_get("DATABRICKS_HOST") } # Try various ways to get Databricks credentials. This implements a subset of --- tests/testthat/test-provider-databricks.R @@ -83,20 +83,3 @@ test_that("M2M authentication requests look correct", { }) expect_equal(databricks_token(), "token") }) - -test_that("workspace detection handles URLs with and without an https prefix", { - withr::with_envvar( - c(DATABRICKS_HOST = "example.cloud.databricks.com"), - expect_equal( - databricks_workspace(), - "https://example.cloud.databricks.com" - ) - ) - withr::with_envvar( - c(DATABRICKS_HOST = "https://example.cloud.databricks.com"), - expect_equal( - databricks_workspace(), - "https://example.cloud.databricks.com" - ) - ) -})
ellmer
tidyverse
R
R
401
55
Call LLM APIs from R
tidyverse_ellmer
BUG_FIX
obvious
0f9fb63ec8b76178a1fd442b04b7797d5fe51a55
2022-10-25 20:58:26
Bogdan Bystritskiy
Add awesome-ios-books
false
2
0
2
--- README.md @@ -3368,8 +3368,6 @@ Most of these are paid services, some have free tiers. - [Awesome iOS Interview question list](https://github.com/dashvlas/awesome-ios-interview) - Guide for interviewers and interviewees. Review these iOS interview questions - and get some practical tips along the way. - [Top App Developers](https://github.com/app-developers/top) - A list of top iOS app developers. - [awesome-ios-developer](https://github.com/jphong1111/awesome-ios-developer) - Useful knowledges and stuff for ios developer. -- [awesome-ios-books](https://github.com/bystritskiy/awesome-ios-books) - A list of books for Apple developers. - ## Contributing and License - [See the guide](https://github.com/vsouza/awesome-ios/blob/master/.github/CONTRIBUTING.md)
awesome-ios
vsouza
Swift
Swift
48,363
6,877
A curated list of awesome iOS ecosystem, including Objective-C and Swift Projects
vsouza_awesome-ios
DOC_CHANGE
changes in readme
27096cdc05d89b61b2372b4e4a3018c87f240ab8
2025-01-25 15:57:29
Fabian-Lars
fix(cli): don't force native-tls feature on desktop (#12445)
false
36
44
80
--- .changes/cli-core-native-tls.md @@ -1,6 +0,0 @@ ---- -tauri-cli: patch:bug -tauri: patch:bug ---- - -Fixed an issue that caused Tauri's CLI to enable tauri's `native-tls` feature even though it wasn't needed. Moved `reqwest` to a mobile-only dependency in `tauri` and enabled its `rustls-tls` feature flag. --- Cargo.lock @@ -6937,7 +6937,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] --- crates/tauri-cli/src/interface/rust.rs @@ -360,6 +360,35 @@ fn lookup<F: FnMut(FileType, PathBuf)>(dir: &Path, mut f: F) { } } +fn shared_options( + desktop_dev: bool, + mobile: bool, + args: &mut Vec<String>, + features: &mut Option<Vec<String>>, + app_settings: &RustAppSettings, +) { + if mobile { + args.push("--lib".into()); + features + .get_or_insert(Vec::new()) + .push("tauri/rustls-tls".into()); + } else { + if !desktop_dev { + args.push("--bins".into()); + } + let all_features = app_settings + .manifest + .lock() + .unwrap() + .all_enabled_features(if let Some(f) = features { f } else { &[] }); + if !all_features.contains(&"tauri/rustls-tls".into()) { + features + .get_or_insert(Vec::new()) + .push("tauri/native-tls".into()); + } + } +} + fn dev_options( mobile: bool, args: &mut Vec<String>, @@ -380,11 +409,7 @@ fn dev_options( } *args = dev_args; - if mobile { - args.push("--lib".into()); - } else { - args.push("--bins".into()); - } + shared_options(true, mobile, args, features, app_settings); if !args.contains(&"--no-default-features".into()) { let manifest_features = app_settings.manifest.lock().unwrap().features(); @@ -464,9 +489,7 @@ impl Rust { features .get_or_insert(Vec::new()) .push("tauri/custom-protocol".into()); - if mobile { - args.push("--lib".into()); - } + shared_options(false, mobile, args, features, &self.app_settings); } fn run_dev<F: Fn(Option<i32>, ExitReason) + Send + Sync + 'static>( --- crates/tauri/Cargo.toml @@ -68,6 +68,11 @@ serde_repr = "0.1" http = "1" dirs = "6" percent-encoding = "2" +reqwest = { version = "0.12", default-features = false, features = [ + "json", + "stream", +] } +bytes = { version = "1", features = ["serde"] } raw-window-handle = { version = "0.6", features = ["std"] } glob = "0.3" urlpattern = "0.3" @@ -84,16 +89,13 @@ specta = { version = "^2.0.0-rc.16", optional = true, default-features = false, "function", "derive", ] } - -# desktop -[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd", target_os = "netbsd", target_os = "windows", target_os = "macos"))'.dependencies] +[target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"openbsd\", target_os = \"netbsd\", target_os = \"windows\", target_os = \"macos\"))".dependencies] muda = { version = "0.15", default-features = false, features = ["serde"] } tray-icon = { version = "0.19", default-features = false, features = [ "serde", ], optional = true } -# linux -[target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd", target_os = "netbsd"))'.dependencies] +[target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"openbsd\", target_os = \"netbsd\"))".dependencies] gtk = { version = "0.18", features = ["v3_24"] } webkit2gtk = { version = "=2.0.1", features = ["v2_40"] } @@ -118,23 +120,15 @@ objc2-app-kit = { version = "0.2", default-features = false, features = [ ] } window-vibrancy = "0.5" -# windows [target."cfg(windows)".dependencies] webview2-com = "0.34" window-vibrancy = "0.5" -windows = { version = "0.58", features = ["Win32_Foundation"] } -# mobile -[target.'cfg(any(target_os = "android", all(target_vendor = "apple", not(target_os = "macos"))))'.dependencies] -bytes = { version = "1", features = ["serde"] } -reqwest = { version = "0.12", default-features = false, features = [ - "json", - "stream", - "rustls-tls", -] } +[target."cfg(windows)".dependencies.windows] +version = "0.58" +features = ["Win32_Foundation"] -# android -[target.'cfg(target_os = "android")'.dependencies] +[target."cfg(target_os = \"android\")".dependencies] jni = "0.21" # UIKit, i.e. iOS/tvOS/watchOS/visionOS @@ -185,11 +179,9 @@ objc-exception = ["tauri-runtime-wry/objc-exception"] linux-libxdo = ["tray-icon/libxdo", "muda/libxdo"] isolation = ["tauri-utils/isolation", "tauri-macros/isolation", "uuid"] custom-protocol = ["tauri-macros/custom-protocol"] -# TODO: Remove these flags in v3 and/or enable them by default behind a mobile flag https://github.com/tauri-apps/tauri/issues/12384 -# For now those feature flags keep enabling reqwest features in case some users depend on that by accident. native-tls = ["reqwest/native-tls"] native-tls-vendored = ["reqwest/native-tls-vendored"] -rustls-tls = [] +rustls-tls = ["reqwest/rustls-tls"] devtools = ["tauri-runtime/devtools", "tauri-runtime-wry/devtools"] process-relaunch-dangerous-allow-symlink-macos = [ "tauri-utils/process-relaunch-dangerous-allow-symlink-macos", --- crates/tauri/src/protocol/tauri.rs @@ -115,7 +115,6 @@ fn get_response<R: Runtime>( ); let mut proxy_builder = reqwest::ClientBuilder::new() - .use_rustls_tls() .build() .unwrap() .request(request.method().clone(), &url);
tauri
tauri-apps
Rust
Rust
90,101
2,752
Build smaller, faster, and more secure desktop and mobile applications with a web frontend.
tauri-apps_tauri
BUG_FIX
Code change: bug removal
cccc277dd933bf06f2186e894130f5b78c4d448e
null
Evan You
fix eslint
false
2
0
2
--- compile.spec.js @@ -25,6 +25,7 @@ describe('compile class', () => { it('should compile data bindings with children', () => { const { render, staticRenderFns, errors } = compile(`<foo :a="b"><text>Hello</text></foo>`) expect(render).toEqual(`with(this){return _h('foo',{attrs:{"a":b}},[_h('text',["Hello"])])}`) + expect(staticRenderFns).toEqual([]) expect(errors).toEqual([]) }) @@ -38,6 +39,7 @@ describe('compile class', () => { </refresh> `) expect(render).toEqual(`with(this){return _h('refresh',{staticClass:["refresh"],staticStyle:{flexDirection:"row"},attrs:{"display":displayRefresh},on:{"refresh":handleRefresh}},[_h('loading-indicator'),_h('text',{staticStyle:{marginLeft:"36px",color:"#eee"}},["Load more..."])])}`) + expect(staticRenderFns).toEqual([]) expect(errors).toEqual([]) }) })
vuejs_vue.json
null
null
null
null
null
null
vuejs_vue.json
BUG_FIX
5, fix written in commit msg