feature/databroker: user data and session refactor project (#926)

* databroker: add databroker, identity manager, update cache (#864)

* databroker: add databroker, identity manager, update cache

* fix cache tests

* directory service (#885)

* directory: add google and okta

* add onelogin

* add directory provider

* initialize before sync, upate google provider, remove dead code

* add azure provider

* fix azure provider

* fix gitlab

* add gitlab test, fix azure test

* hook up okta

* remove dead code

* fix tests

* fix flaky test

* authorize: use databroker data for rego policy (#904)

* wip

* add directory provider

* initialize before sync, upate google provider, remove dead code

* fix flaky test

* update authorize to use databroker data

* implement signed jwt

* wait for session and user to appear

* fix test

* directory service (#885)

* directory: add google and okta

* add onelogin

* add directory provider

* initialize before sync, upate google provider, remove dead code

* add azure provider

* fix azure provider

* fix gitlab

* add gitlab test, fix azure test

* hook up okta

* remove dead code

* fix tests

* fix flaky test

* remove log line

* only redirect when no session id exists

* prepare rego query as part of create

* return on ctx done

* retry on disconnect for sync

* move jwt signing

* use !=

* use parent ctx for wait

* remove session state, remove logs

* rename function

* add log message

* pre-allocate slice

* use errgroup

* return nil on eof for sync

* move check

* disable timeout on gRPC requests in envoy

* fix gitlab test

* use v4 backoff

* authenticate: databroker changes (#914)

* wip

* add directory provider

* initialize before sync, upate google provider, remove dead code

* fix flaky test

* update authorize to use databroker data

* implement signed jwt

* wait for session and user to appear

* fix test

* directory service (#885)

* directory: add google and okta

* add onelogin

* add directory provider

* initialize before sync, upate google provider, remove dead code

* add azure provider

* fix azure provider

* fix gitlab

* add gitlab test, fix azure test

* hook up okta

* remove dead code

* fix tests

* fix flaky test

* remove log line

* only redirect when no session id exists

* prepare rego query as part of create

* return on ctx done

* retry on disconnect for sync

* move jwt signing

* use !=

* use parent ctx for wait

* remove session state, remove logs

* rename function

* add log message

* pre-allocate slice

* use errgroup

* return nil on eof for sync

* move check

* disable timeout on gRPC requests in envoy

* fix dashboard

* delete session on logout

* permanently delete sessions once they are marked as deleted

* remove permanent delete

* fix tests

* remove groups and refresh test

* databroker: remove dead code, rename cache url, move dashboard (#925)

* wip

* add directory provider

* initialize before sync, upate google provider, remove dead code

* fix flaky test

* update authorize to use databroker data

* implement signed jwt

* wait for session and user to appear

* fix test

* directory service (#885)

* directory: add google and okta

* add onelogin

* add directory provider

* initialize before sync, upate google provider, remove dead code

* add azure provider

* fix azure provider

* fix gitlab

* add gitlab test, fix azure test

* hook up okta

* remove dead code

* fix tests

* fix flaky test

* remove log line

* only redirect when no session id exists

* prepare rego query as part of create

* return on ctx done

* retry on disconnect for sync

* move jwt signing

* use !=

* use parent ctx for wait

* remove session state, remove logs

* rename function

* add log message

* pre-allocate slice

* use errgroup

* return nil on eof for sync

* move check

* disable timeout on gRPC requests in envoy

* fix dashboard

* delete session on logout

* permanently delete sessions once they are marked as deleted

* remove permanent delete

* fix tests

* remove cache service

* remove kv

* remove refresh docs

* remove obsolete cache docs

* add databroker url option

* cache: use memberlist to detect multiple instances

* add databroker service url

* remove cache service

* remove kv

* remove refresh docs

* remove obsolete cache docs

* add databroker url option

* cache: use memberlist to detect multiple instances

* add databroker service url

* wip

* remove groups and refresh test

* fix redirect, signout

* remove databroker client from proxy

* remove unused method

* remove user dashboard test

* handle missing session ids

* session: reject sessions with no id

* sessions: invalidate old sessions via databroker server version (#930)

* session: add a version field tied to the databroker server version that can be used to invalidate sessions

* fix tests

* add log

* authenticate: create user record immediately, call "get" directly in authorize (#931)
This commit is contained in:
Caleb Doxsey 2020-06-19 07:52:44 -06:00 committed by GitHub
parent 39cdb31170
commit dbd7f55b20
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
115 changed files with 8479 additions and 3584 deletions

126
cache/cache.go vendored
View file

@ -5,24 +5,34 @@ package cache
import (
"context"
"errors"
"fmt"
stdlog "log"
"net"
"google.golang.org/grpc"
"gopkg.in/tomb.v2"
"github.com/pomerium/pomerium/config"
"github.com/pomerium/pomerium/internal/cryptutil"
"github.com/pomerium/pomerium/internal/kv"
"github.com/pomerium/pomerium/internal/kv/autocache"
"github.com/pomerium/pomerium/internal/kv/bolt"
"github.com/pomerium/pomerium/internal/kv/redis"
"github.com/pomerium/pomerium/internal/log"
"github.com/pomerium/pomerium/internal/directory"
"github.com/pomerium/pomerium/internal/grpc/databroker"
"github.com/pomerium/pomerium/internal/grpc/session"
"github.com/pomerium/pomerium/internal/grpc/user"
"github.com/pomerium/pomerium/internal/identity"
"github.com/pomerium/pomerium/internal/identity/manager"
"github.com/pomerium/pomerium/internal/urlutil"
)
// Cache represents the cache service. The cache service is a simple interface
// for storing keyed blobs (bytes) of unstructured data.
type Cache struct {
cache kv.Store
dataBrokerServer *DataBrokerServer
sessionServer *SessionServer
userServer *UserServer
manager *manager.Manager
localListener net.Listener
localGRPCServer *grpc.Server
localGRPCConnection *grpc.ClientConn
}
// New creates a new cache service.
@ -31,58 +41,80 @@ func New(opts config.Options) (*Cache, error) {
return nil, fmt.Errorf("cache: bad option: %w", err)
}
cache, err := newCacheStore(opts.CacheStore, &opts)
authenticator, err := identity.NewAuthenticator(opts.GetOauthOptions())
if err != nil {
return nil, fmt.Errorf("cache: failed to create authenticator: %w", err)
}
directoryProvider := directory.GetProvider(&opts)
localListener, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
return nil, err
}
localGRPCServer := grpc.NewServer()
localGRPCConnection, err := grpc.DialContext(context.Background(), localListener.Addr().String(),
grpc.WithInsecure())
if err != nil {
return nil, err
}
dataBrokerServer := NewDataBrokerServer(localGRPCServer)
dataBrokerClient := databroker.NewDataBrokerServiceClient(localGRPCConnection)
sessionServer := NewSessionServer(localGRPCServer, dataBrokerClient)
sessionClient := session.NewSessionServiceClient(localGRPCConnection)
userServer := NewUserServer(localGRPCServer, dataBrokerClient)
userClient := user.NewUserServiceClient(localGRPCConnection)
manager := manager.New(authenticator, directoryProvider, sessionClient, userClient, dataBrokerClient)
return &Cache{
cache: cache,
dataBrokerServer: dataBrokerServer,
sessionServer: sessionServer,
userServer: userServer,
manager: manager,
localListener: localListener,
localGRPCServer: localGRPCServer,
localGRPCConnection: localGRPCConnection,
}, nil
}
// Register registers all the gRPC services with the given server.
func (c *Cache) Register(grpcServer *grpc.Server) {
databroker.RegisterDataBrokerServiceServer(grpcServer, c.dataBrokerServer)
session.RegisterSessionServiceServer(grpcServer, c.sessionServer)
user.RegisterUserServiceServer(grpcServer, c.userServer)
}
// Run runs the cache components.
func (c *Cache) Run(ctx context.Context) error {
t, ctx := tomb.WithContext(ctx)
t.Go(func() error {
return c.runMemberList(ctx)
})
t.Go(func() error {
return c.localGRPCServer.Serve(c.localListener)
})
t.Go(func() error {
<-ctx.Done()
c.localGRPCServer.Stop()
return nil
})
t.Go(func() error {
return c.manager.Run(ctx)
})
return t.Wait()
}
// validate checks that proper configuration settings are set to create
// a cache instance
func validate(o config.Options) error {
if _, err := cryptutil.NewAEADCipherFromBase64(o.SharedKey); err != nil {
return fmt.Errorf("invalid 'SHARED_SECRET': %w", err)
}
if err := urlutil.ValidateURL(o.CacheURL); err != nil {
return fmt.Errorf("invalid 'CACHE_SERVICE_URL': %w", err)
if err := urlutil.ValidateURL(o.DataBrokerURL); err != nil {
return fmt.Errorf("invalid 'DATA_BROKER_SERVICE_URL': %w", err)
}
return nil
}
// newCacheStore creates a new cache store by name and given a set of
// configuration options.
func newCacheStore(name string, o *config.Options) (s kv.Store, err error) {
switch name {
case bolt.Name:
s, err = bolt.New(&bolt.Options{Path: o.CacheStorePath})
case redis.Name:
s, err = redis.New(&redis.Options{
Addr: o.CacheStoreAddr,
Password: o.CacheStorePassword,
})
case autocache.Name:
acLog := log.Logger.With().Str("service", autocache.Name).Logger()
s, err = autocache.New(&autocache.Options{
SharedKey: o.SharedKey,
Log: stdlog.New(acLog, "", 0),
ClusterDomain: o.GetCacheURL().Hostname(),
})
default:
return nil, fmt.Errorf("cache: unknown store: %s", name)
}
if err != nil {
return nil, err
}
return s, nil
}
// Close shuts down the underlying cache store, services, or both -- if any.
func (c *Cache) Close() error {
if c.cache == nil {
return errors.New("cache: cannot close nil cache")
}
return c.cache.Close(context.TODO())
}