mirror of
https://github.com/pomerium/pomerium.git
synced 2025-04-29 02:16:28 +02:00
replace xxhash with xxh3 (#5457)
* update config file paths hash * update filemgr * use xxh3 for hashutil.Hash * update hashutil digest, fix trace buffer test * update comments * update namegen, go mod tidy
This commit is contained in:
parent
5e94b2f8f1
commit
dc9a6bdb81
18 changed files with 76 additions and 66 deletions
|
@ -76,6 +76,9 @@ issues:
|
||||||
- text: "G112:"
|
- text: "G112:"
|
||||||
linters:
|
linters:
|
||||||
- gosec
|
- gosec
|
||||||
|
- text: "G115:"
|
||||||
|
linters:
|
||||||
|
- gosec
|
||||||
- text: "G402: TLS MinVersion too low."
|
- text: "G402: TLS MinVersion too low."
|
||||||
linters:
|
linters:
|
||||||
- gosec
|
- gosec
|
||||||
|
|
|
@ -7,9 +7,9 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
|
"github.com/zeebo/xxh3"
|
||||||
|
|
||||||
"github.com/pomerium/pomerium/internal/events"
|
"github.com/pomerium/pomerium/internal/events"
|
||||||
"github.com/pomerium/pomerium/internal/fileutil"
|
"github.com/pomerium/pomerium/internal/fileutil"
|
||||||
|
@ -264,7 +264,7 @@ func (src *FileWatcherSource) onFileChange(ctx context.Context) {
|
||||||
|
|
||||||
func getAllConfigFilePathsHash(cfg *Config) uint64 {
|
func getAllConfigFilePathsHash(cfg *Config) uint64 {
|
||||||
// read all the config files and build a hash from their contents
|
// read all the config files and build a hash from their contents
|
||||||
h := xxhash.New()
|
h := xxh3.New()
|
||||||
for _, f := range getAllConfigFilePaths(cfg) {
|
for _, f := range getAllConfigFilePaths(cfg) {
|
||||||
_, _ = h.Write([]byte{0})
|
_, _ = h.Write([]byte{0})
|
||||||
f, err := os.Open(f)
|
f, err := os.Open(f)
|
||||||
|
|
|
@ -36,7 +36,7 @@ func Test_BuildClusters(t *testing.T) {
|
||||||
func Test_buildPolicyTransportSocket(t *testing.T) {
|
func Test_buildPolicyTransportSocket(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
cacheDir, _ := os.UserCacheDir()
|
cacheDir, _ := os.UserCacheDir()
|
||||||
customCA := filepath.Join(cacheDir, "pomerium", "envoy", "files", "custom-ca-57394a4e5157303436544830.pem")
|
customCA := filepath.Join(cacheDir, "pomerium", "envoy", "files", "custom-ca-3133535332543131503345494c.pem")
|
||||||
|
|
||||||
b := New("local-grpc", "local-http", "local-metrics", filemgr.NewManager(), nil)
|
b := New("local-grpc", "local-http", "local-metrics", filemgr.NewManager(), nil)
|
||||||
rootCABytes, _ := getCombinedCertificateAuthority(ctx, &config.Config{Options: &config.Options{}})
|
rootCABytes, _ := getCombinedCertificateAuthority(ctx, &config.Config{Options: &config.Options{}})
|
||||||
|
@ -433,10 +433,10 @@ func Test_buildPolicyTransportSocket(t *testing.T) {
|
||||||
},
|
},
|
||||||
"tlsCertificates": [{
|
"tlsCertificates": [{
|
||||||
"certificateChain":{
|
"certificateChain":{
|
||||||
"filename": "`+filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-crt-32375a484d4f49594c4d374830.pem")+`"
|
"filename": "`+filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-crt-5a353247453159375849565a.pem")+`"
|
||||||
},
|
},
|
||||||
"privateKey": {
|
"privateKey": {
|
||||||
"filename": "`+filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-key-33393156483053584631414836.pem")+`"
|
"filename": "`+filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-key-3159554e32473758435257364b.pem")+`"
|
||||||
}
|
}
|
||||||
}],
|
}],
|
||||||
"validationContext": {
|
"validationContext": {
|
||||||
|
|
|
@ -10,6 +10,8 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test(t *testing.T) {
|
func Test(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
t.Run("bytes", func(t *testing.T) {
|
t.Run("bytes", func(t *testing.T) {
|
||||||
|
@ -17,7 +19,7 @@ func Test(t *testing.T) {
|
||||||
ds := mgr.BytesDataSource("test.txt", []byte{1, 2, 3, 4, 5})
|
ds := mgr.BytesDataSource("test.txt", []byte{1, 2, 3, 4, 5})
|
||||||
assert.Equal(t, &envoy_config_core_v3.DataSource{
|
assert.Equal(t, &envoy_config_core_v3.DataSource{
|
||||||
Specifier: &envoy_config_core_v3.DataSource_Filename{
|
Specifier: &envoy_config_core_v3.DataSource_Filename{
|
||||||
Filename: filepath.Join(dir, "test-32354837325a545944534a4537.txt"),
|
Filename: filepath.Join(dir, "test-31443434314d425355414b4539.txt"),
|
||||||
},
|
},
|
||||||
}, ds)
|
}, ds)
|
||||||
mgr.ClearCache()
|
mgr.ClearCache()
|
||||||
|
@ -32,7 +34,7 @@ func Test(t *testing.T) {
|
||||||
ds := mgr.FileDataSource(tmpFilePath)
|
ds := mgr.FileDataSource(tmpFilePath)
|
||||||
assert.Equal(t, &envoy_config_core_v3.DataSource{
|
assert.Equal(t, &envoy_config_core_v3.DataSource{
|
||||||
Specifier: &envoy_config_core_v3.DataSource_Filename{
|
Specifier: &envoy_config_core_v3.DataSource_Filename{
|
||||||
Filename: filepath.Join(dir, "test-474136555958463735414951.txt"),
|
Filename: filepath.Join(dir, "test-3246454c394658475133414f35.txt"),
|
||||||
},
|
},
|
||||||
}, ds)
|
}, ds)
|
||||||
|
|
||||||
|
@ -41,7 +43,7 @@ func Test(t *testing.T) {
|
||||||
ds = mgr.FileDataSource(tmpFilePath)
|
ds = mgr.FileDataSource(tmpFilePath)
|
||||||
assert.Equal(t, &envoy_config_core_v3.DataSource{
|
assert.Equal(t, &envoy_config_core_v3.DataSource{
|
||||||
Specifier: &envoy_config_core_v3.DataSource_Filename{
|
Specifier: &envoy_config_core_v3.DataSource_Filename{
|
||||||
Filename: filepath.Join(dir, "test-3331324c4a35574d5439444d4c.txt"),
|
Filename: filepath.Join(dir, "test-33343439385257475847375443.txt"),
|
||||||
},
|
},
|
||||||
}, ds)
|
}, ds)
|
||||||
|
|
||||||
|
|
|
@ -4,14 +4,14 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/martinlindhe/base36"
|
"github.com/martinlindhe/base36"
|
||||||
|
"github.com/zeebo/xxh3"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetFileNameWithBytesHash constructs a filename using a base filename and a hash of
|
// GetFileNameWithBytesHash constructs a filename using a base filename and a hash of
|
||||||
// the data. For example: GetFileNameWithBytesHash("example.txt", []byte{...}) ==> "example-abcd1234.txt"
|
// the data. For example: GetFileNameWithBytesHash("example.txt", []byte{...}) ==> "example-abcd1234.txt"
|
||||||
func GetFileNameWithBytesHash(base string, data []byte) string {
|
func GetFileNameWithBytesHash(base string, data []byte) string {
|
||||||
h := xxhash.Sum64(data)
|
h := xxh3.Hash(data)
|
||||||
he := base36.Encode(h)
|
he := base36.Encode(h)
|
||||||
ext := filepath.Ext(base)
|
ext := filepath.Ext(base)
|
||||||
return fmt.Sprintf("%s-%x%s", base[:len(base)-len(ext)], he, ext)
|
return fmt.Sprintf("%s-%x%s", base[:len(base)-len(ext)], he, ext)
|
||||||
|
|
|
@ -122,8 +122,8 @@ func TestBuildListeners(t *testing.T) {
|
||||||
|
|
||||||
func Test_buildMetricsHTTPConnectionManagerFilter(t *testing.T) {
|
func Test_buildMetricsHTTPConnectionManagerFilter(t *testing.T) {
|
||||||
cacheDir, _ := os.UserCacheDir()
|
cacheDir, _ := os.UserCacheDir()
|
||||||
certFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-crt-32375a484d4f49594c4d374830.pem")
|
certFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-crt-5a353247453159375849565a.pem")
|
||||||
keyFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-key-33393156483053584631414836.pem")
|
keyFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-key-3159554e32473758435257364b.pem")
|
||||||
|
|
||||||
b := New("local-grpc", "local-http", "local-metrics", filemgr.NewManager(), nil)
|
b := New("local-grpc", "local-http", "local-metrics", filemgr.NewManager(), nil)
|
||||||
li, err := b.buildMetricsListener(&config.Config{
|
li, err := b.buildMetricsListener(&config.Config{
|
||||||
|
|
|
@ -83,7 +83,7 @@ func TestBuilder_buildMainRouteConfiguration(t *testing.T) {
|
||||||
],
|
],
|
||||||
"route": {
|
"route": {
|
||||||
"autoHostRewrite": true,
|
"autoHostRewrite": true,
|
||||||
"cluster": "route-b8e37dd1f9d65ddd",
|
"cluster": "route-5fbd81d8f19363f4",
|
||||||
"hashPolicy": [
|
"hashPolicy": [
|
||||||
{ "header": { "headerName": "x-pomerium-routing-key" }, "terminal": true },
|
{ "header": { "headerName": "x-pomerium-routing-key" }, "terminal": true },
|
||||||
{ "connectionProperties": { "sourceIp": true }, "terminal": true }
|
{ "connectionProperties": { "sourceIp": true }, "terminal": true }
|
||||||
|
@ -100,7 +100,7 @@ func TestBuilder_buildMainRouteConfiguration(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "13322630463485271517"
|
"route_id": "6898812972967355380"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -140,7 +140,7 @@ func TestBuilder_buildMainRouteConfiguration(t *testing.T) {
|
||||||
],
|
],
|
||||||
"route": {
|
"route": {
|
||||||
"autoHostRewrite": true,
|
"autoHostRewrite": true,
|
||||||
"cluster": "route-b8e37dd1f9d65ddd",
|
"cluster": "route-5fbd81d8f19363f4",
|
||||||
"hashPolicy": [
|
"hashPolicy": [
|
||||||
{ "header": { "headerName": "x-pomerium-routing-key" }, "terminal": true },
|
{ "header": { "headerName": "x-pomerium-routing-key" }, "terminal": true },
|
||||||
{ "connectionProperties": { "sourceIp": true }, "terminal": true }
|
{ "connectionProperties": { "sourceIp": true }, "terminal": true }
|
||||||
|
@ -157,7 +157,7 @@ func TestBuilder_buildMainRouteConfiguration(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "13322630463485271517"
|
"route_id": "6898812972967355380"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -392,14 +392,14 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
routeIDs := []string{
|
routeIDs := []string{
|
||||||
1: "772697672458217856",
|
1: "13553029590470792156",
|
||||||
2: "6032229746964560472",
|
2: "7129118097581932399",
|
||||||
3: "13317665674438641304",
|
3: "11039710722247768205",
|
||||||
4: "9768293332770157550",
|
4: "658592019741814826",
|
||||||
5: "13317665674438641304", // same as 3
|
5: "11039710722247768205", // same as 3
|
||||||
6: "6032229746964560472", // same as 2
|
6: "7129118097581932399", // same as 2
|
||||||
7: "6032229746964560472", // same as 2
|
7: "7129118097581932399", // same as 2
|
||||||
8: "1591581179179639728",
|
8: "3463414089682043373",
|
||||||
}
|
}
|
||||||
|
|
||||||
b := &Builder{filemgr: filemgr.NewManager(), reproxy: reproxy.New()}
|
b := &Builder{filemgr: filemgr.NewManager(), reproxy: reproxy.New()}
|
||||||
|
@ -1196,7 +1196,7 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "11959552038839924732"
|
"route_id": "11022856234610764131"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1272,7 +1272,7 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "9444248534316924938"
|
"route_id": "9302002763161476568"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1369,7 +1369,7 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "8231718688890004616"
|
"route_id": "12468817303959353203"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1471,7 +1471,7 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "5652544858774142715"
|
"route_id": "1158488049891246013"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1547,14 +1547,14 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
"appendAction": "OVERWRITE_IF_EXISTS_OR_ADD",
|
"appendAction": "OVERWRITE_IF_EXISTS_OR_ADD",
|
||||||
"header": {
|
"header": {
|
||||||
"key": "x-pomerium-reproxy-policy",
|
"key": "x-pomerium-reproxy-policy",
|
||||||
"value": "5799631121007486501"
|
"value": "12114237825990386381"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"appendAction": "OVERWRITE_IF_EXISTS_OR_ADD",
|
"appendAction": "OVERWRITE_IF_EXISTS_OR_ADD",
|
||||||
"header": {
|
"header": {
|
||||||
"key": "x-pomerium-reproxy-policy-hmac",
|
"key": "x-pomerium-reproxy-policy-hmac",
|
||||||
"value": "v4w8DAUFdw2qw7RJLUZYBHWndqBOdz5Me6A+1vbDQPY="
|
"value": "pe3ai+2H8rHB5zgHi8+ryY6VDcuZZ5pf9Rfkrw0NdBE="
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -1586,7 +1586,7 @@ func Test_buildPolicyRoutes(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "5799631121007486501"
|
"route_id": "12114237825990386381"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1720,7 +1720,7 @@ func Test_buildPolicyRoutesRewrite(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "1410576726089372267"
|
"route_id": "5575146962731507525"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1795,7 +1795,7 @@ func Test_buildPolicyRoutesRewrite(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "1410576726089372267"
|
"route_id": "5575146962731507525"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1875,7 +1875,7 @@ func Test_buildPolicyRoutesRewrite(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "1410576726089372267"
|
"route_id": "5575146962731507525"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1950,7 +1950,7 @@ func Test_buildPolicyRoutesRewrite(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "1410576726089372267"
|
"route_id": "5575146962731507525"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2025,7 +2025,7 @@ func Test_buildPolicyRoutesRewrite(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "1410576726089372267"
|
"route_id": "5575146962731507525"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2105,7 +2105,7 @@ func Test_buildPolicyRoutesRewrite(t *testing.T) {
|
||||||
"checkSettings": {
|
"checkSettings": {
|
||||||
"contextExtensions": {
|
"contextExtensions": {
|
||||||
"internal": "false",
|
"internal": "false",
|
||||||
"route_id": "1410576726089372267"
|
"route_id": "5575146962731507525"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"name": "metrics-ingress-18010634919562279975",
|
"name": "metrics-ingress-2557141950503822122",
|
||||||
"perConnectionBufferLimitBytes": 32768,
|
"perConnectionBufferLimitBytes": 32768,
|
||||||
"address": {
|
"address": {
|
||||||
"socketAddress": {
|
"socketAddress": {
|
||||||
|
|
|
@ -85,7 +85,7 @@ func Test_buildDownstreamTLSContext(t *testing.T) {
|
||||||
b := New("local-grpc", "local-http", "local-metrics", filemgr.NewManager(), nil)
|
b := New("local-grpc", "local-http", "local-metrics", filemgr.NewManager(), nil)
|
||||||
|
|
||||||
cacheDir, _ := os.UserCacheDir()
|
cacheDir, _ := os.UserCacheDir()
|
||||||
clientCAFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "client-ca-313754424855313435355a5348.pem")
|
clientCAFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "client-ca-4e4c564e5a36544a4a33385a.pem")
|
||||||
|
|
||||||
t.Run("no-validation", func(t *testing.T) {
|
t.Run("no-validation", func(t *testing.T) {
|
||||||
downstreamTLSContext, err := b.buildDownstreamTLSContextMulti(context.Background(), &config.Config{Options: &config.Options{}}, nil)
|
downstreamTLSContext, err := b.buildDownstreamTLSContextMulti(context.Background(), &config.Config{Options: &config.Options{}}, nil)
|
||||||
|
|
|
@ -675,7 +675,7 @@ func (p *Policy) Validate() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Checksum returns the xxhash hash for the policy.
|
// Checksum returns the xxh3 hash for the policy.
|
||||||
func (p *Policy) Checksum() uint64 {
|
func (p *Policy) Checksum() uint64 {
|
||||||
return hashutil.MustHash(p)
|
return hashutil.MustHash(p)
|
||||||
}
|
}
|
||||||
|
|
4
go.mod
4
go.mod
|
@ -13,7 +13,6 @@ require (
|
||||||
github.com/bits-and-blooms/bitset v1.20.0
|
github.com/bits-and-blooms/bitset v1.20.0
|
||||||
github.com/caddyserver/certmagic v0.21.4
|
github.com/caddyserver/certmagic v0.21.4
|
||||||
github.com/cenkalti/backoff/v4 v4.3.0
|
github.com/cenkalti/backoff/v4 v4.3.0
|
||||||
github.com/cespare/xxhash/v2 v2.3.0
|
|
||||||
github.com/cloudflare/circl v1.5.0
|
github.com/cloudflare/circl v1.5.0
|
||||||
github.com/coreos/go-oidc/v3 v3.11.0
|
github.com/coreos/go-oidc/v3 v3.11.0
|
||||||
github.com/docker/docker v27.4.1+incompatible
|
github.com/docker/docker v27.4.1+incompatible
|
||||||
|
@ -67,6 +66,7 @@ require (
|
||||||
github.com/tniswong/go.rfcx v0.0.0-20181019234604-07783c52761f
|
github.com/tniswong/go.rfcx v0.0.0-20181019234604-07783c52761f
|
||||||
github.com/volatiletech/null/v9 v9.0.0
|
github.com/volatiletech/null/v9 v9.0.0
|
||||||
github.com/yuin/gopher-lua v1.1.1
|
github.com/yuin/gopher-lua v1.1.1
|
||||||
|
github.com/zeebo/xxh3 v1.0.2
|
||||||
go.opencensus.io v0.24.0
|
go.opencensus.io v0.24.0
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.57.0
|
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.57.0
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0
|
||||||
|
@ -135,6 +135,7 @@ require (
|
||||||
github.com/aws/smithy-go v1.22.1 // indirect
|
github.com/aws/smithy-go v1.22.1 // indirect
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
github.com/caddyserver/zerossl v0.1.3 // indirect
|
github.com/caddyserver/zerossl v0.1.3 // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 // indirect
|
github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 // indirect
|
||||||
github.com/containerd/log v0.1.0 // indirect
|
github.com/containerd/log v0.1.0 // indirect
|
||||||
github.com/containerd/platforms v0.2.1 // indirect
|
github.com/containerd/platforms v0.2.1 // indirect
|
||||||
|
@ -229,7 +230,6 @@ require (
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||||
github.com/zeebo/assert v1.3.1 // indirect
|
github.com/zeebo/assert v1.3.1 // indirect
|
||||||
github.com/zeebo/blake3 v0.2.4 // indirect
|
github.com/zeebo/blake3 v0.2.4 // indirect
|
||||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||||
go.opentelemetry.io/contrib/detectors/gcp v1.31.0 // indirect
|
go.opentelemetry.io/contrib/detectors/gcp v1.31.0 // indirect
|
||||||
go.opentelemetry.io/contrib/propagators/aws v1.32.0 // indirect
|
go.opentelemetry.io/contrib/propagators/aws v1.32.0 // indirect
|
||||||
|
|
|
@ -1,18 +1,16 @@
|
||||||
// Package hashutil provides NON-CRYPTOGRAPHIC utility functions for hashing.
|
// Package hashutil provides NON-CRYPTOGRAPHIC utility functions for hashing.
|
||||||
//
|
//
|
||||||
// http://cyan4973.github.io/xxHash/
|
|
||||||
//
|
|
||||||
//nolint:errcheck
|
//nolint:errcheck
|
||||||
package hashutil
|
package hashutil
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/mitchellh/hashstructure/v2"
|
"github.com/mitchellh/hashstructure/v2"
|
||||||
|
"github.com/zeebo/xxh3"
|
||||||
)
|
)
|
||||||
|
|
||||||
// MustHash returns the xxhash of an arbitrary value or struct. Returns 0
|
// MustHash returns the xxh3 hash of an arbitrary value or struct. Returns 0
|
||||||
// on error.
|
// on error.
|
||||||
// NOT SUITABLE FOR CRYTOGRAPHIC HASHING.
|
// NOT SUITABLE FOR CRYTOGRAPHIC HASHING.
|
||||||
func MustHash(v any) uint64 {
|
func MustHash(v any) uint64 {
|
||||||
|
@ -23,17 +21,17 @@ func MustHash(v any) uint64 {
|
||||||
return hash
|
return hash
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hash returns the xxhash of an arbitrary value or struct.
|
// Hash returns the xxh3 hash of an arbitrary value or struct.
|
||||||
// NOT SUITABLE FOR CRYTOGRAPHIC HASHING.
|
// NOT SUITABLE FOR CRYTOGRAPHIC HASHING.
|
||||||
func Hash(v any) (uint64, error) {
|
func Hash(v any) (uint64, error) {
|
||||||
opts := &hashstructure.HashOptions{
|
opts := &hashstructure.HashOptions{
|
||||||
Hasher: xxhash.New(),
|
Hasher: xxh3.New(),
|
||||||
}
|
}
|
||||||
return hashstructure.Hash(v, hashstructure.FormatV2, opts)
|
return hashstructure.Hash(v, hashstructure.FormatV2, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Digest struct {
|
type Digest struct {
|
||||||
xxhash.Digest
|
xxh3.Hasher
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewDigest() *Digest {
|
func NewDigest() *Digest {
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
// Package hashutil provides NON-CRYPTOGRAPHIC utility functions for hashing
|
package hashutil_test
|
||||||
package hashutil
|
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/pomerium/pomerium/internal/hashutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestHash(t *testing.T) {
|
func TestHash(t *testing.T) {
|
||||||
|
@ -15,8 +16,8 @@ func TestHash(t *testing.T) {
|
||||||
want uint64
|
want uint64
|
||||||
wantErr bool
|
wantErr bool
|
||||||
}{
|
}{
|
||||||
{"string", "string", 6134271061086542852, false},
|
{"string", "string", 15613163272824911089, false},
|
||||||
{"num", 7, 609900476111905877, false},
|
{"num", 7, 9324454920402081455, false},
|
||||||
{
|
{
|
||||||
"compound struct",
|
"compound struct",
|
||||||
struct {
|
struct {
|
||||||
|
@ -26,7 +27,7 @@ func TestHash(t *testing.T) {
|
||||||
[]string{"Battletoads", "Mega Man 1", "Clash at Demonhead"},
|
[]string{"Battletoads", "Mega Man 1", "Clash at Demonhead"},
|
||||||
12,
|
12,
|
||||||
},
|
},
|
||||||
1349584765528830812, false,
|
9585735524299267794, false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"compound struct with embedded func (errors!)",
|
"compound struct with embedded func (errors!)",
|
||||||
|
@ -40,10 +41,10 @@ func TestHash(t *testing.T) {
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
if got := MustHash(tt.v); got != tt.want {
|
if got := hashutil.MustHash(tt.v); got != tt.want {
|
||||||
t.Errorf("MustHash() = %v, want %v", got, tt.want)
|
t.Errorf("MustHash() = %v, want %v", got, tt.want)
|
||||||
}
|
}
|
||||||
got, err := Hash(tt.v)
|
got, err := hashutil.Hash(tt.v)
|
||||||
if tt.wantErr {
|
if tt.wantErr {
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -7,11 +7,12 @@ import (
|
||||||
"slices"
|
"slices"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/pomerium/pomerium/internal/hashutil"
|
|
||||||
commonv1 "go.opentelemetry.io/proto/otlp/common/v1"
|
commonv1 "go.opentelemetry.io/proto/otlp/common/v1"
|
||||||
resourcev1 "go.opentelemetry.io/proto/otlp/resource/v1"
|
resourcev1 "go.opentelemetry.io/proto/otlp/resource/v1"
|
||||||
tracev1 "go.opentelemetry.io/proto/otlp/trace/v1"
|
tracev1 "go.opentelemetry.io/proto/otlp/trace/v1"
|
||||||
"google.golang.org/protobuf/proto"
|
"google.golang.org/protobuf/proto"
|
||||||
|
|
||||||
|
"github.com/pomerium/pomerium/internal/hashutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ScopeBuffer struct {
|
type ScopeBuffer struct {
|
||||||
|
@ -31,6 +32,7 @@ func NewScopeBuffer(scope *ScopeInfo) *ScopeBuffer {
|
||||||
|
|
||||||
type ResourceBuffer struct {
|
type ResourceBuffer struct {
|
||||||
resource *ResourceInfo
|
resource *ResourceInfo
|
||||||
|
scopeIDs []string
|
||||||
spansByScope map[string]*ScopeBuffer
|
spansByScope map[string]*ScopeBuffer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,14 +50,15 @@ func (rb *ResourceBuffer) Insert(scope *ScopeInfo, span *tracev1.Span) {
|
||||||
} else {
|
} else {
|
||||||
spans = NewScopeBuffer(scope)
|
spans = NewScopeBuffer(scope)
|
||||||
rb.spansByScope[scope.ID()] = spans
|
rb.spansByScope[scope.ID()] = spans
|
||||||
|
rb.scopeIDs = append(rb.scopeIDs, scope.ID())
|
||||||
}
|
}
|
||||||
spans.Insert(span)
|
spans.Insert(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rb *ResourceBuffer) Flush() []*tracev1.ScopeSpans {
|
func (rb *ResourceBuffer) Flush() []*tracev1.ScopeSpans {
|
||||||
out := make([]*tracev1.ScopeSpans, 0, len(rb.spansByScope))
|
out := make([]*tracev1.ScopeSpans, 0, len(rb.spansByScope))
|
||||||
for _, key := range slices.Sorted(maps.Keys(rb.spansByScope)) {
|
for _, scopeID := range rb.scopeIDs {
|
||||||
spans := rb.spansByScope[key]
|
spans := rb.spansByScope[scopeID]
|
||||||
slices.SortStableFunc(spans.spans, func(a, b *tracev1.Span) int {
|
slices.SortStableFunc(spans.spans, func(a, b *tracev1.Span) int {
|
||||||
return cmp.Compare(a.StartTimeUnixNano, b.StartTimeUnixNano)
|
return cmp.Compare(a.StartTimeUnixNano, b.StartTimeUnixNano)
|
||||||
})
|
})
|
||||||
|
@ -66,6 +69,7 @@ func (rb *ResourceBuffer) Flush() []*tracev1.ScopeSpans {
|
||||||
}
|
}
|
||||||
out = append(out, scopeSpans)
|
out = append(out, scopeSpans)
|
||||||
}
|
}
|
||||||
|
rb.scopeIDs = nil
|
||||||
clear(rb.spansByScope)
|
clear(rb.spansByScope)
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
@ -73,11 +77,13 @@ func (rb *ResourceBuffer) Flush() []*tracev1.ScopeSpans {
|
||||||
func (rb *ResourceBuffer) Merge(other *ResourceBuffer) {
|
func (rb *ResourceBuffer) Merge(other *ResourceBuffer) {
|
||||||
for scope, otherSpans := range other.spansByScope {
|
for scope, otherSpans := range other.spansByScope {
|
||||||
if ourSpans, ok := rb.spansByScope[scope]; !ok {
|
if ourSpans, ok := rb.spansByScope[scope]; !ok {
|
||||||
|
rb.scopeIDs = append(rb.scopeIDs, scope)
|
||||||
rb.spansByScope[scope] = otherSpans
|
rb.spansByScope[scope] = otherSpans
|
||||||
} else {
|
} else {
|
||||||
ourSpans.Insert(otherSpans.spans...)
|
ourSpans.Insert(otherSpans.spans...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
other.scopeIDs = nil
|
||||||
clear(other.spansByScope)
|
clear(other.spansByScope)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
"github.com/zeebo/xxh3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GenerateCertName(cert *x509.Certificate) *string {
|
func GenerateCertName(cert *x509.Certificate) *string {
|
||||||
|
@ -242,7 +242,7 @@ func differentiateRoutes[T Route](subdomain string, routes []T) iter.Seq2[T, str
|
||||||
b.WriteString(pathSuffix)
|
b.WriteString(pathSuffix)
|
||||||
}
|
}
|
||||||
|
|
||||||
sum := xxhash.Sum64String(b.String())
|
sum := xxh3.HashString(b.String())
|
||||||
nameCounts[sum]++
|
nameCounts[sum]++
|
||||||
if c := nameCounts[sum]; c > 1 {
|
if c := nameCounts[sum]; c > 1 {
|
||||||
b.WriteString(" (")
|
b.WriteString(" (")
|
||||||
|
|
|
@ -40,7 +40,7 @@ func TestProxy_routesPortalJSON(t *testing.T) {
|
||||||
assert.Equal(t, "application/json", w.Header().Get("Content-Type"))
|
assert.Equal(t, "application/json", w.Header().Get("Content-Type"))
|
||||||
assert.JSONEq(t, `{"routes":[
|
assert.JSONEq(t, `{"routes":[
|
||||||
{
|
{
|
||||||
"id": "4e71df99c0317efb",
|
"id": "1013c6be524d7fbd",
|
||||||
"name": "public",
|
"name": "public",
|
||||||
"from": "https://from.example.com",
|
"from": "https://from.example.com",
|
||||||
"type": "http",
|
"type": "http",
|
||||||
|
|
|
@ -20,7 +20,7 @@ func TestRouteFromConfigRoute(t *testing.T) {
|
||||||
|
|
||||||
assert.Equal(t, []portal.Route{
|
assert.Equal(t, []portal.Route{
|
||||||
{
|
{
|
||||||
ID: "4e71df99c0317efb",
|
ID: "1013c6be524d7fbd",
|
||||||
Name: "from",
|
Name: "from",
|
||||||
Type: "http",
|
Type: "http",
|
||||||
From: "https://from.example.com",
|
From: "https://from.example.com",
|
||||||
|
@ -28,20 +28,20 @@ func TestRouteFromConfigRoute(t *testing.T) {
|
||||||
LogoURL: "https://logo.example.com",
|
LogoURL: "https://logo.example.com",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "7c377f11cdb9700e",
|
ID: "15fa6bb41b1f0bd2",
|
||||||
Name: "from-path",
|
Name: "from-path",
|
||||||
Type: "http",
|
Type: "http",
|
||||||
From: "https://from.example.com",
|
From: "https://from.example.com",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "708e3cbd0bbe8547",
|
ID: "773f5c76f710b230",
|
||||||
Name: "postgres",
|
Name: "postgres",
|
||||||
Type: "tcp",
|
Type: "tcp",
|
||||||
From: "tcp+https://postgres.example.com:5432",
|
From: "tcp+https://postgres.example.com:5432",
|
||||||
ConnectCommand: "pomerium-cli tcp postgres.example.com:5432",
|
ConnectCommand: "pomerium-cli tcp postgres.example.com:5432",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "2dd08d87486e051a",
|
ID: "74961d605a24b812",
|
||||||
Name: "dns",
|
Name: "dns",
|
||||||
Type: "udp",
|
Type: "udp",
|
||||||
From: "udp+https://dns.example.com:53",
|
From: "udp+https://dns.example.com:53",
|
||||||
|
|
Loading…
Add table
Reference in a new issue