From ca7ffbda4f833acd213e4c8cb641da9e6d8c192c Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Wed, 3 Jan 2024 15:00:23 +0530 Subject: [PATCH 01/27] Add manifest entrypoint and database tables * Create a manifest table with fields id, applieddate and manifest data. Applied date will be populated by SQL when the record is inserted. * Add APIs to list all manifests, get manifest by id, delete manifest by id. * Add aditional API to get latest inserted record when manifest id is specied with keyword latest. * TIMESTAMP type in database does not save the time in nano second precision and saved only until seconds precision. So retreiving latest inserted record should get all the records with the latest timestamp and return the last one. --- sunbeam-microcluster/api/endpoints.go | 2 + sunbeam-microcluster/api/manifests.go | 91 +++++++ sunbeam-microcluster/api/types/manifests.go | 12 + .../database/config.mapper.go | 2 +- .../database/jujuuser.mapper.go | 2 +- sunbeam-microcluster/database/manifest.go | 119 +++++++++ .../database/manifest.mapper.go | 246 ++++++++++++++++++ sunbeam-microcluster/database/node.mapper.go | 2 +- sunbeam-microcluster/database/schema.go | 19 ++ sunbeam-microcluster/sunbeam/manifests.go | 103 ++++++++ 10 files changed, 595 insertions(+), 3 deletions(-) create mode 100644 sunbeam-microcluster/api/manifests.go create mode 100644 sunbeam-microcluster/api/types/manifests.go create mode 100644 sunbeam-microcluster/database/manifest.go create mode 100644 sunbeam-microcluster/database/manifest.mapper.go create mode 100644 sunbeam-microcluster/sunbeam/manifests.go diff --git a/sunbeam-microcluster/api/endpoints.go b/sunbeam-microcluster/api/endpoints.go index 02877aea..20f25cf2 100644 --- a/sunbeam-microcluster/api/endpoints.go +++ b/sunbeam-microcluster/api/endpoints.go @@ -18,4 +18,6 @@ var Endpoints = []rest.Endpoint{ jujuusersCmd, jujuuserCmd, configCmd, + manifestsCmd, + manifestCmd, } diff --git a/sunbeam-microcluster/api/manifests.go b/sunbeam-microcluster/api/manifests.go new file mode 100644 index 00000000..88921891 --- /dev/null +++ b/sunbeam-microcluster/api/manifests.go @@ -0,0 +1,91 @@ +package api + +import ( + "encoding/json" + "net/http" + "net/url" + + "github.com/canonical/lxd/lxd/response" + "github.com/canonical/lxd/shared/api" + "github.com/canonical/microcluster/rest" + "github.com/canonical/microcluster/state" + "github.com/gorilla/mux" + + "github.com/openstack-snaps/snap-openstack/sunbeam-microcluster/api/types" + "github.com/openstack-snaps/snap-openstack/sunbeam-microcluster/sunbeam" +) + +// /1.0/manifests endpoint. +var manifestsCmd = rest.Endpoint{ + Path: "manifests", + + Get: rest.EndpointAction{Handler: cmdManifestsGetAll, ProxyTarget: true}, + Post: rest.EndpointAction{Handler: cmdManifestsPost, ProxyTarget: true}, +} + +// /1.0/manifests/ endpoint. +// /1.0/manifests/latest will give the latest inserted manifest record +var manifestCmd = rest.Endpoint{ + Path: "manifests/{manifestid}", + + Get: rest.EndpointAction{Handler: cmdManifestsGet, ProxyTarget: true}, + Delete: rest.EndpointAction{Handler: cmdManifestsDelete, ProxyTarget: true}, +} + +func cmdManifestsGetAll(s *state.State, _ *http.Request) response.Response { + + manifests, err := sunbeam.ListManifests(s) + if err != nil { + return response.InternalError(err) + } + + return response.SyncResponse(true, manifests) +} + +func cmdManifestsGet(s *state.State, r *http.Request) response.Response { + var manifestid string + manifestid, err := url.PathUnescape(mux.Vars(r)["manifestid"]) + if err != nil { + return response.InternalError(err) + } + manifest, err := sunbeam.GetManifest(s, manifestid) + if err != nil { + if err, ok := err.(api.StatusError); ok { + if err.Status() == http.StatusNotFound { + return response.NotFound(err) + } + } + return response.InternalError(err) + } + + return response.SyncResponse(true, manifest) +} + +func cmdManifestsPost(s *state.State, r *http.Request) response.Response { + var req types.Manifest + + err := json.NewDecoder(r.Body).Decode(&req) + if err != nil { + return response.InternalError(err) + } + + err = sunbeam.AddManifest(s, req.ManifestID, req.Data) + if err != nil { + return response.InternalError(err) + } + + return response.EmptySyncResponse +} + +func cmdManifestsDelete(s *state.State, r *http.Request) response.Response { + manifestid, err := url.PathUnescape(mux.Vars(r)["manifestid"]) + if err != nil { + return response.SmartError(err) + } + err = sunbeam.DeleteManifest(s, manifestid) + if err != nil { + return response.InternalError(err) + } + + return response.EmptySyncResponse +} diff --git a/sunbeam-microcluster/api/types/manifests.go b/sunbeam-microcluster/api/types/manifests.go new file mode 100644 index 00000000..d8d3239c --- /dev/null +++ b/sunbeam-microcluster/api/types/manifests.go @@ -0,0 +1,12 @@ +// Package types provides shared types and structs. +package types + +// Manifests holds list of manifest type +type Manifests []Manifest + +// Manifest structure to hold manifest applytime and manifest data +type Manifest struct { + ManifestID string `json:"manifestid" yaml:"manifestid"` + AppliedDate string `json:"applieddate" yaml:"applieddate"` + Data string `json:"data" yaml:"data"` +} diff --git a/sunbeam-microcluster/database/config.mapper.go b/sunbeam-microcluster/database/config.mapper.go index 1e0e3af1..f887c476 100644 --- a/sunbeam-microcluster/database/config.mapper.go +++ b/sunbeam-microcluster/database/config.mapper.go @@ -80,7 +80,7 @@ func getConfigItems(ctx context.Context, stmt *sql.Stmt, args ...any) ([]ConfigI return objects, nil } -// getConfigItems can be used to run handwritten query strings to return a slice of objects. +// getConfigItemsRaw can be used to run handwritten query strings to return a slice of objects. func getConfigItemsRaw(ctx context.Context, tx *sql.Tx, sql string, args ...any) ([]ConfigItem, error) { objects := make([]ConfigItem, 0) diff --git a/sunbeam-microcluster/database/jujuuser.mapper.go b/sunbeam-microcluster/database/jujuuser.mapper.go index eacf7ed3..fd62ce8d 100644 --- a/sunbeam-microcluster/database/jujuuser.mapper.go +++ b/sunbeam-microcluster/database/jujuuser.mapper.go @@ -80,7 +80,7 @@ func getJujuUsers(ctx context.Context, stmt *sql.Stmt, args ...any) ([]JujuUser, return objects, nil } -// getJujuUsers can be used to run handwritten query strings to return a slice of objects. +// getJujuUsersRaw can be used to run handwritten query strings to return a slice of objects. func getJujuUsersRaw(ctx context.Context, tx *sql.Tx, sql string, args ...any) ([]JujuUser, error) { objects := make([]JujuUser, 0) diff --git a/sunbeam-microcluster/database/manifest.go b/sunbeam-microcluster/database/manifest.go new file mode 100644 index 00000000..185de493 --- /dev/null +++ b/sunbeam-microcluster/database/manifest.go @@ -0,0 +1,119 @@ +package database + +import ( + "context" + "database/sql" + "fmt" + "net/http" + + "github.com/canonical/lxd/shared/api" + "github.com/canonical/microcluster/cluster" +) + +//go:generate -command mapper lxd-generate db mapper -t manifest.mapper.go +//go:generate mapper reset +// +//go:generate mapper stmt -d github.com/canonical/microcluster/cluster -e ManifestItem objects table=manifest +//go:generate mapper stmt -d github.com/canonical/microcluster/cluster -e ManifestItem objects-by-ManifestID table=manifest +//go:generate mapper stmt -d github.com/canonical/microcluster/cluster -e ManifestItem id table=manifest +//go:generate mapper stmt -d github.com/canonical/microcluster/cluster -e ManifestItem delete-by-ManifestID table=manifest + +// +//go:generate mapper method -i -d github.com/canonical/microcluster/cluster -e ManifestItem GetMany table=manifest +//go:generate mapper method -i -d github.com/canonical/microcluster/cluster -e ManifestItem GetOne table=manifest +//go:generate mapper method -i -d github.com/canonical/microcluster/cluster -e ManifestItem ID table=manifest +//go:generate mapper method -i -d github.com/canonical/microcluster/cluster -e ManifestItem Exists table=manifest +//go:generate mapper method -i -d github.com/canonical/microcluster/cluster -e ManifestItem DeleteOne-by-ManifestID table=manifest + +// ManifestItem is used to save the Sunbeam manifests provided by user. +// AppliedDate is saved as Timestamp in database but retreived as string +// Probable Bug: https://github.com/mattn/go-sqlite3/issues/951 +type ManifestItem struct { + ID int + ManifestID string `db:"primary=yes"` + AppliedDate string + Data string +} + +// ManifestItemFilter is a required struct for use with lxd-generate. It is used for filtering fields on database fetches. +type ManifestItemFilter struct { + ManifestID *string +} + +var manifestItemCreate = cluster.RegisterStmt(` +INSERT INTO manifest (manifest_id, data) + VALUES (?, ?) +`) + +var latestManifestItemObject = cluster.RegisterStmt(` +SELECT manifest.id, manifest.manifest_id, manifest.applied_date, manifest.data + FROM manifest + WHERE manifest.applied_date = (SELECT MAX(applied_date) FROM manifest) +`) + +// CreateManifestItem adds a new ManifestItem to the database. +// generator: ManifestItem Create +func CreateManifestItem(ctx context.Context, tx *sql.Tx, object ManifestItem) (int64, error) { + // Check if a ManifestItem with the same key exists. + exists, err := ManifestItemExists(ctx, tx, object.ManifestID) + if err != nil { + return -1, fmt.Errorf("Failed to check for duplicates: %w", err) + } + + if exists { + return -1, api.StatusErrorf(http.StatusConflict, "This \"manifest\" entry already exists") + } + + args := make([]any, 2) + + // Populate the statement arguments. + args[0] = object.ManifestID + args[1] = object.Data + + // Prepared statement to use. + stmt, err := cluster.Stmt(tx, manifestItemCreate) + if err != nil { + return -1, fmt.Errorf("Failed to get \"manifestItemCreate\" prepared statement: %w", err) + } + + // Execute the statement. + result, err := stmt.Exec(args...) + if err != nil { + return -1, fmt.Errorf("Failed to create \"manifest\" entry: %w", err) + } + + id, err := result.LastInsertId() + if err != nil { + return -1, fmt.Errorf("Failed to fetch \"manifest\" entry ID: %w", err) + } + + return id, nil +} + +// GetLatestManifestItem returns the latest inserted record in manifest table. +func GetLatestManifestItem(ctx context.Context, tx *sql.Tx) (*ManifestItem, error) { + var err error + + // Pick the prepared statement and arguments to use based on active criteria. + var sqlStmt *sql.Stmt + + sqlStmt, err = cluster.Stmt(tx, latestManifestItemObject) + if err != nil { + return nil, fmt.Errorf("Failed to get \"manifestItemObjects\" prepared statement: %w", err) + } + + // Result slice. + // objects := make([]ManifestItem, 0) + objects, err := getManifestItems(ctx, sqlStmt) + if err != nil { + return nil, fmt.Errorf("Failed to fetch from \"manifest\" table: %w", err) + } + + objectsLen := len(objects) + switch objectsLen { + case 0: + return nil, api.StatusErrorf(http.StatusNotFound, "ManifestItem not found") + default: + return &objects[objectsLen-1], nil + } +} diff --git a/sunbeam-microcluster/database/manifest.mapper.go b/sunbeam-microcluster/database/manifest.mapper.go new file mode 100644 index 00000000..d372d88f --- /dev/null +++ b/sunbeam-microcluster/database/manifest.mapper.go @@ -0,0 +1,246 @@ +package database + +// The code below was generated by lxd-generate - DO NOT EDIT! + +import ( + "context" + "database/sql" + "errors" + "fmt" + "net/http" + "strings" + + "github.com/canonical/lxd/lxd/db/query" + "github.com/canonical/lxd/shared/api" + "github.com/canonical/microcluster/cluster" +) + +var _ = api.ServerEnvironment{} + +var manifestItemObjects = cluster.RegisterStmt(` +SELECT manifest.id, manifest.manifest_id, manifest.applied_date, manifest.data + FROM manifest + ORDER BY manifest.manifest_id +`) + +var manifestItemObjectsByManifestID = cluster.RegisterStmt(` +SELECT manifest.id, manifest.manifest_id, manifest.applied_date, manifest.data + FROM manifest + WHERE ( manifest.manifest_id = ? ) + ORDER BY manifest.manifest_id +`) + +var manifestItemID = cluster.RegisterStmt(` +SELECT manifest.id FROM manifest + WHERE manifest.manifest_id = ? +`) + +var manifestItemDeleteByManifestID = cluster.RegisterStmt(` +DELETE FROM manifest WHERE manifest_id = ? +`) + +// manifestItemColumns returns a string of column names to be used with a SELECT statement for the entity. +// Use this function when building statements to retrieve database entries matching the ManifestItem entity. +func manifestItemColumns() string { + return "manifest.id, manifest.manifest_id, manifest.applied_date, manifest.data" +} + +// getManifestItems can be used to run handwritten sql.Stmts to return a slice of objects. +func getManifestItems(ctx context.Context, stmt *sql.Stmt, args ...any) ([]ManifestItem, error) { + objects := make([]ManifestItem, 0) + + dest := func(scan func(dest ...any) error) error { + m := ManifestItem{} + err := scan(&m.ID, &m.ManifestID, &m.AppliedDate, &m.Data) + if err != nil { + return err + } + + objects = append(objects, m) + + return nil + } + + err := query.SelectObjects(ctx, stmt, dest, args...) + if err != nil { + return nil, fmt.Errorf("Failed to fetch from \"manifest\" table: %w", err) + } + + return objects, nil +} + +// getManifestItemsRaw can be used to run handwritten query strings to return a slice of objects. +func getManifestItemsRaw(ctx context.Context, tx *sql.Tx, sql string, args ...any) ([]ManifestItem, error) { + objects := make([]ManifestItem, 0) + + dest := func(scan func(dest ...any) error) error { + m := ManifestItem{} + err := scan(&m.ID, &m.ManifestID, &m.AppliedDate, &m.Data) + if err != nil { + return err + } + + objects = append(objects, m) + + return nil + } + + err := query.Scan(ctx, tx, sql, dest, args...) + if err != nil { + return nil, fmt.Errorf("Failed to fetch from \"manifest\" table: %w", err) + } + + return objects, nil +} + +// GetManifestItems returns all available ManifestItems. +// generator: ManifestItem GetMany +func GetManifestItems(ctx context.Context, tx *sql.Tx, filters ...ManifestItemFilter) ([]ManifestItem, error) { + var err error + + // Result slice. + objects := make([]ManifestItem, 0) + + // Pick the prepared statement and arguments to use based on active criteria. + var sqlStmt *sql.Stmt + args := []any{} + queryParts := [2]string{} + + if len(filters) == 0 { + sqlStmt, err = cluster.Stmt(tx, manifestItemObjects) + if err != nil { + return nil, fmt.Errorf("Failed to get \"manifestItemObjects\" prepared statement: %w", err) + } + } + + for i, filter := range filters { + if filter.ManifestID != nil { + args = append(args, []any{filter.ManifestID}...) + if len(filters) == 1 { + sqlStmt, err = cluster.Stmt(tx, manifestItemObjectsByManifestID) + if err != nil { + return nil, fmt.Errorf("Failed to get \"manifestItemObjectsByManifestID\" prepared statement: %w", err) + } + + break + } + + query, err := cluster.StmtString(manifestItemObjectsByManifestID) + if err != nil { + return nil, fmt.Errorf("Failed to get \"manifestItemObjects\" prepared statement: %w", err) + } + + parts := strings.SplitN(query, "ORDER BY", 2) + if i == 0 { + copy(queryParts[:], parts) + continue + } + + _, where, _ := strings.Cut(parts[0], "WHERE") + queryParts[0] += "OR" + where + } else if filter.ManifestID == nil { + return nil, fmt.Errorf("Cannot filter on empty ManifestItemFilter") + } else { + return nil, fmt.Errorf("No statement exists for the given Filter") + } + } + + // Select. + if sqlStmt != nil { + objects, err = getManifestItems(ctx, sqlStmt, args...) + } else { + queryStr := strings.Join(queryParts[:], "ORDER BY") + objects, err = getManifestItemsRaw(ctx, tx, queryStr, args...) + } + + if err != nil { + return nil, fmt.Errorf("Failed to fetch from \"manifest\" table: %w", err) + } + + return objects, nil +} + +// GetManifestItem returns the ManifestItem with the given key. +// generator: ManifestItem GetOne +func GetManifestItem(ctx context.Context, tx *sql.Tx, manifestID string) (*ManifestItem, error) { + filter := ManifestItemFilter{} + filter.ManifestID = &manifestID + + objects, err := GetManifestItems(ctx, tx, filter) + if err != nil { + return nil, fmt.Errorf("Failed to fetch from \"manifest\" table: %w", err) + } + + switch len(objects) { + case 0: + return nil, api.StatusErrorf(http.StatusNotFound, "ManifestItem not found") + case 1: + return &objects[0], nil + default: + return nil, fmt.Errorf("More than one \"manifest\" entry matches") + } +} + +// GetManifestItemID return the ID of the ManifestItem with the given key. +// generator: ManifestItem ID +func GetManifestItemID(ctx context.Context, tx *sql.Tx, manifestID string) (int64, error) { + stmt, err := cluster.Stmt(tx, manifestItemID) + if err != nil { + return -1, fmt.Errorf("Failed to get \"manifestItemID\" prepared statement: %w", err) + } + + row := stmt.QueryRowContext(ctx, manifestID) + var id int64 + err = row.Scan(&id) + if errors.Is(err, sql.ErrNoRows) { + return -1, api.StatusErrorf(http.StatusNotFound, "ManifestItem not found") + } + + if err != nil { + return -1, fmt.Errorf("Failed to get \"manifest\" ID: %w", err) + } + + return id, nil +} + +// ManifestItemExists checks if a ManifestItem with the given key exists. +// generator: ManifestItem Exists +func ManifestItemExists(ctx context.Context, tx *sql.Tx, manifestID string) (bool, error) { + _, err := GetManifestItemID(ctx, tx, manifestID) + if err != nil { + if api.StatusErrorCheck(err, http.StatusNotFound) { + return false, nil + } + + return false, err + } + + return true, nil +} + +// DeleteManifestItem deletes the ManifestItem matching the given key parameters. +// generator: ManifestItem DeleteOne-by-ManifestID +func DeleteManifestItem(_ context.Context, tx *sql.Tx, manifestID string) error { + stmt, err := cluster.Stmt(tx, manifestItemDeleteByManifestID) + if err != nil { + return fmt.Errorf("Failed to get \"manifestItemDeleteByManifestID\" prepared statement: %w", err) + } + + result, err := stmt.Exec(manifestID) + if err != nil { + return fmt.Errorf("Delete \"manifest\": %w", err) + } + + n, err := result.RowsAffected() + if err != nil { + return fmt.Errorf("Fetch affected rows: %w", err) + } + + if n == 0 { + return api.StatusErrorf(http.StatusNotFound, "ManifestItem not found") + } else if n > 1 { + return fmt.Errorf("Query deleted %d ManifestItem rows instead of 1", n) + } + + return nil +} diff --git a/sunbeam-microcluster/database/node.mapper.go b/sunbeam-microcluster/database/node.mapper.go index 75557119..fefaf78c 100644 --- a/sunbeam-microcluster/database/node.mapper.go +++ b/sunbeam-microcluster/database/node.mapper.go @@ -106,7 +106,7 @@ func getNodes(ctx context.Context, stmt *sql.Stmt, args ...any) ([]Node, error) return objects, nil } -// getNodes can be used to run handwritten query strings to return a slice of objects. +// getNodesRaw can be used to run handwritten query strings to return a slice of objects. func getNodesRaw(ctx context.Context, tx *sql.Tx, sql string, args ...any) ([]Node, error) { objects := make([]Node, 0) diff --git a/sunbeam-microcluster/database/schema.go b/sunbeam-microcluster/database/schema.go index 4ee6a434..7cb4a1c4 100644 --- a/sunbeam-microcluster/database/schema.go +++ b/sunbeam-microcluster/database/schema.go @@ -14,6 +14,7 @@ var SchemaExtensions = map[int]schema.Update{ 1: NodesSchemaUpdate, 2: ConfigSchemaUpdate, 3: JujuUserSchemaUpdate, + 4: ManifestsSchemaUpdate, } // NodesSchemaUpdate is schema for table nodes @@ -66,3 +67,21 @@ CREATE TABLE jujuuser ( return err } + +// ManifestsSchemaUpdate is schema for table manifest +// TOCHK: TIMESTAMP(6) not storing nano seconds +func ManifestsSchemaUpdate(_ context.Context, tx *sql.Tx) error { + stmt := ` +CREATE TABLE manifest ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + manifest_id TEXT NOT NULL, + applied_date TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP, + data TEXT, + UNIQUE(manifest_id) +); + ` + + _, err := tx.Exec(stmt) + + return err +} diff --git a/sunbeam-microcluster/sunbeam/manifests.go b/sunbeam-microcluster/sunbeam/manifests.go new file mode 100644 index 00000000..ad87d12d --- /dev/null +++ b/sunbeam-microcluster/sunbeam/manifests.go @@ -0,0 +1,103 @@ +package sunbeam + +import ( + "context" + "database/sql" + "fmt" + + "github.com/canonical/microcluster/state" + + "github.com/openstack-snaps/snap-openstack/sunbeam-microcluster/api/types" + "github.com/openstack-snaps/snap-openstack/sunbeam-microcluster/database" +) + +// ListManifests return all the manifests +func ListManifests(s *state.State) (types.Manifests, error) { + manifests := types.Manifests{} + + // Get the manifests from the database. + err := s.Database.Transaction(s.Context, func(ctx context.Context, tx *sql.Tx) error { + records, err := database.GetManifestItems(ctx, tx) + if err != nil { + return fmt.Errorf("Failed to fetch manifests: %w", err) + } + + for _, manifest := range records { + manifests = append(manifests, types.Manifest{ + ManifestID: manifest.ManifestID, + AppliedDate: manifest.AppliedDate, + Data: manifest.Data, + }) + } + + return nil + }) + if err != nil { + return nil, err + } + + return manifests, nil +} + +// GetManifest returns a Manifest with the given id +func GetManifest(s *state.State, manifestid string) (types.Manifest, error) { + manifest := types.Manifest{} + + err := s.Database.Transaction(s.Context, func(ctx context.Context, tx *sql.Tx) error { + var record *database.ManifestItem + var err error + // If manifest id is latest, retrieve the latest inserted record. + if manifestid == "latest" { + record, err = database.GetLatestManifestItem(ctx, tx) + } else { + record, err = database.GetManifestItem(ctx, tx, manifestid) + } + if err != nil { + return err + } + + manifest.ManifestID = record.ManifestID + manifest.AppliedDate = record.AppliedDate + manifest.Data = record.Data + + return nil + }) + + return manifest, err +} + +// AddManifest adds a manifest to the database +func AddManifest(s *state.State, manifestid string, data string) error { + // Add manifest to the database. + err := s.Database.Transaction(s.Context, func(ctx context.Context, tx *sql.Tx) error { + _, err := database.CreateManifestItem(ctx, tx, database.ManifestItem{ManifestID: manifestid, Data: data}) + if err != nil { + return fmt.Errorf("Failed to record manifest: %w", err) + } + + return nil + }) + if err != nil { + return err + } + + return nil +} + +// DeleteManifest deletes a manifest from database +func DeleteManifest(s *state.State, manifestid string) error { + // Delete manifest from the database. + err := s.Database.Transaction(s.Context, func(ctx context.Context, tx *sql.Tx) error { + err := database.DeleteManifestItem(ctx, tx, manifestid) + if err != nil { + return fmt.Errorf("Failed to delete manifest: %w", err) + } + + return nil + }) + if err != nil { + return err + } + + return nil +} From a0b74d0c672b557c1d04e3207bd9f10c221c5bf3 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Wed, 3 Jan 2024 17:29:30 +0530 Subject: [PATCH 02/27] Add helper functions for manifest crud operations Add python helper functions to create/retreive/ delete manifest from cluster database. --- sunbeam-python/sunbeam/clusterd/cluster.py | 25 ++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/sunbeam-python/sunbeam/clusterd/cluster.py b/sunbeam-python/sunbeam/clusterd/cluster.py index 47b37086..a1cfd371 100644 --- a/sunbeam-python/sunbeam/clusterd/cluster.py +++ b/sunbeam-python/sunbeam/clusterd/cluster.py @@ -194,6 +194,31 @@ def unlock_terraform_plan(self, plan: str, lock: dict) -> None: """Unlock plan.""" self._put(f"/1.0/terraformunlock/{plan}", data=json.dumps(lock)) + def add_manifest(self, data: str) -> str: + """Add manifest to cluster database.""" + manifest_id = secrets.token_hex(16) + data = {"manifestid": manifest_id, "data": data} + self._post("/1.0/manifests", data=json.dumps(data)) + return manifest_id + + def list_manifests(self) -> list: + """List all manifests.""" + manifests = self._get("/1.0/manifests") + return manifests.get("metadata") + + def get_manifest(self, manifest_id: str) -> dict: + """Get manifest info along with data.""" + manifest = self._get(f"/1.0/manifests/{manifest_id}") + return manifest + + def get_latest_manifest(self) -> dict: + """Get latest manifest.""" + return self.get_manifest("latest") + + def delete_manifest(self, manifest_id: str) -> None: + """Remove manifest from database.""" + self._delete(f"/1.0/manifest/{manifest_id}") + class ClusterService(MicroClusterService, ExtendedAPIService): """Lists and manages cluster.""" From 3743323d53428b9528bf8316805365905fbbc6bd Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Fri, 5 Jan 2024 13:40:10 +0530 Subject: [PATCH 03/27] Add manifest option to bootstrap command Add manifest option to sunbeam bootstrap. Add step to write the manifest to cluster db. --- sunbeam-python/requirements.txt | 3 + sunbeam-python/sunbeam/clusterd/cluster.py | 1 + sunbeam-python/sunbeam/commands/bootstrap.py | 16 ++++ sunbeam-python/sunbeam/jobs/manifest.py | 93 ++++++++++++++++++++ 4 files changed, 113 insertions(+) create mode 100644 sunbeam-python/sunbeam/jobs/manifest.py diff --git a/sunbeam-python/requirements.txt b/sunbeam-python/requirements.txt index 0b417c72..f570569e 100644 --- a/sunbeam-python/requirements.txt +++ b/sunbeam-python/requirements.txt @@ -38,3 +38,6 @@ GitPython # Regression introduced in 1.3.3 macaroonbakery!=1.3.3 + +# For Manifest dataclasses +pydantic diff --git a/sunbeam-python/sunbeam/clusterd/cluster.py b/sunbeam-python/sunbeam/clusterd/cluster.py index a1cfd371..73554711 100644 --- a/sunbeam-python/sunbeam/clusterd/cluster.py +++ b/sunbeam-python/sunbeam/clusterd/cluster.py @@ -15,6 +15,7 @@ import json import logging +import secrets from typing import Any, List, Optional, Union from requests import codes diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index d07b6eb9..3e36e954 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -81,6 +81,7 @@ validate_roles, ) from sunbeam.jobs.juju import CONTROLLER, JujuHelper +from sunbeam.jobs.manifest import AddManifestStep, Manifest LOG = logging.getLogger(__name__) console = Console() @@ -95,6 +96,12 @@ help="Preseed file.", type=click.Path(exists=True, dir_okay=False, path_type=Path), ) +@click.option( + "-m", + "--manifest", + help="Manifest file.", + type=click.Path(exists=True, dir_okay=False, path_type=Path), +) @click.option( "--role", "roles", @@ -128,6 +135,7 @@ def bootstrap( roles: List[Role], topology: str, database: str, + manifest: Optional[Path] = None, preseed: Optional[Path] = None, accept_defaults: bool = False, ) -> None: @@ -135,6 +143,12 @@ def bootstrap( Initialize the sunbeam cluster. """ + # Validate manifest file + manifest_obj = None + if manifest: + manifest_obj = Manifest.load(manifest_file=manifest) + LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + # Bootstrap node must always have the control role if Role.CONTROL not in roles: LOG.debug("Enabling control role for bootstrap") @@ -188,6 +202,8 @@ def bootstrap( plan = [] plan.append(JujuLoginStep(data_location)) plan.append(ClusterInitStep(roles_to_str_list(roles))) + if manifest: + plan.append(AddManifestStep(manifest)) plan.append( BootstrapJujuStep( cloud_name, diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py new file mode 100644 index 00000000..61a19efe --- /dev/null +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -0,0 +1,93 @@ +# Copyright (c) 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from pathlib import Path +from typing import Any, Dict, List, Optional + +import yaml +from pydantic import Field, ValidationError +from pydantic.dataclasses import dataclass + +from sunbeam.clusterd.client import Client as clusterClient +from sunbeam.jobs.common import BaseStep, Result, ResultType, Status + +LOG = logging.getLogger(__name__) + + +@dataclass +class JujuManifest: + bootstrap_args: List[str] = Field( + alias="bootstrap-args", description="Extra args for juju bootstrap" + ) + + +@dataclass +class CharmsManifest: + channel: Optional[str] = Field(default=None, description="Channel for the charm") + revision: Optional[int] = Field( + default=None, description="Revision number of the charm" + ) + rocks: Optional[Dict[str, str]] = Field( + default=None, description="Rock images for the charm" + ) + config: Optional[Dict[str, Any]] = Field( + default=None, description="Config options of the charm" + ) + source: Optional[Path] = Field(default=None, description="Local charm bundle path") + + +@dataclass +class TerraformManifest: + source: Path = Field(description="Path to Terraform plan") + + +@dataclass +class Manifest: + juju: Optional[JujuManifest] = None + charms: Optional[Dict[str, CharmsManifest]] = None + terraform_plans: Optional[Dict[str, TerraformManifest]] = Field( + default=None, alias="terraform-plans" + ) + + @classmethod + def load(cls, manifest_file: Path) -> "Manifest": + try: + with manifest_file.open() as file: + return Manifest(**yaml.safe_load(file)) + except FileNotFoundError as e: + raise e + except ValidationError as e: + raise e + + +class AddManifestStep(BaseStep): + """Add Manifest file to cluster database""" + + def __init__(self, manifest: Path): + super().__init__("Write Manifest to database", "Writing Manifest to database") + self.manifest = manifest + self.client = clusterClient() + + def run(self, status: Optional[Status] = None) -> Result: + """Write manifest to cluster db""" + try: + with self.manifest.open("r") as file: + data = yaml.safe_load(file) + id = self.client.cluster.add_manifest(data=yaml.safe_dump(data)) + return Result(ResultType.COMPLETED, id) + except Exception as e: + LOG.warning(str(e)) + return Result(ResultType.FAILED, str(e)) From ed03381b50666906fca08707377a9ea44b42f063 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Mon, 8 Jan 2024 10:59:13 +0530 Subject: [PATCH 04/27] Add manifest to refresh command Add following options manifest and clear-manifest to refresh command. clear-manifest option updates cluster database manifest table with empty manifest. --- sunbeam-python/sunbeam/commands/refresh.py | 48 +++++++++++++++++++++- sunbeam-python/sunbeam/jobs/manifest.py | 8 ++++ 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/sunbeam-python/sunbeam/commands/refresh.py b/sunbeam-python/sunbeam/commands/refresh.py index 7f71b906..15d7ba1a 100644 --- a/sunbeam-python/sunbeam/commands/refresh.py +++ b/sunbeam-python/sunbeam/commands/refresh.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import tempfile +from pathlib import Path +from typing import Optional import click from rich.console import Console @@ -21,7 +24,9 @@ from sunbeam.commands.terraform import TerraformHelper from sunbeam.commands.upgrades.inter_channel import ChannelUpgradeCoordinator from sunbeam.commands.upgrades.intra_channel import LatestInChannelCoordinator +from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper +from sunbeam.jobs.manifest import EMPTY_MANIFEST, AddManifestStep, Manifest LOG = logging.getLogger(__name__) console = Console() @@ -29,6 +34,20 @@ @click.command() +@click.option( + "-c", + "--clear-manifest", + is_flag=True, + default=False, + help="Clear the manifest file.", + type=bool, +) +@click.option( + "-m", + "--manifest", + help="Manifest file.", + type=click.Path(exists=True, dir_okay=False, path_type=Path), +) @click.option( "--upgrade-release", is_flag=True, @@ -36,12 +55,39 @@ default=False, help="Upgrade OpenStack release.", ) -def refresh(upgrade_release) -> None: +def refresh( + upgrade_release: bool, + manifest: Optional[Path] = None, + clear_manifest: bool = False, +) -> None: """Refresh deployment. Refresh the deployment. If --upgrade-release is supplied then charms are upgraded the channels aligned with this snap revision """ + if clear_manifest and manifest: + raise click.ClickException( + "Options manifest and clear_manifest are mutually exclusive" + ) + + # Validate manifest file + manifest_obj = None + if clear_manifest: + with tempfile.NamedTemporaryFile(mode="w+t") as tmpfile: + tmpfile.write(EMPTY_MANIFEST) + tmpfile.seek(0) + manifest_obj = Manifest.load(manifest_file=Path(tmpfile.name)) + LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + run_plan([AddManifestStep(Path(tmpfile.name))], console) + elif manifest: + manifest_obj = Manifest.load(manifest_file=manifest) + LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + run_plan([AddManifestStep(manifest)], console) + else: + LOG.debug("Getting latest manifest") + manifest_obj = Manifest.load_latest_from_cluserdb() + LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + tfplan = "deploy-openstack" data_location = snap.paths.user_data tfhelper = TerraformHelper( diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 61a19efe..6ec08f57 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -25,6 +25,9 @@ from sunbeam.jobs.common import BaseStep, Result, ResultType, Status LOG = logging.getLogger(__name__) +EMPTY_MANIFEST = """charms: {} +terraform-plans: {} +""" @dataclass @@ -72,6 +75,11 @@ def load(cls, manifest_file: Path) -> "Manifest": except ValidationError as e: raise e + @classmethod + def load_latest_from_cluserdb(cls) -> "Manifest": + manifest_latest = clusterClient().cluster.get_latest_manifest() + return Manifest(**manifest_latest) + class AddManifestStep(BaseStep): """Add Manifest file to cluster database""" From b6e725b8cfa935ef3a4aef9ccd225d1692170900 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Tue, 9 Jan 2024 11:23:46 +0530 Subject: [PATCH 05/27] Support adding local terraform plans to manifest Add terraform to manifest file. terraform will be a dictionary of terraform plan and local path of the terraform plan. --- sunbeam-microcluster/api/manifests.go | 8 +-- sunbeam-python/sunbeam/clusterd/cluster.py | 2 +- sunbeam-python/sunbeam/commands/bootstrap.py | 8 ++- sunbeam-python/sunbeam/commands/configure.py | 9 ++- sunbeam-python/sunbeam/commands/node.py | 9 ++- sunbeam-python/sunbeam/commands/resize.py | 9 ++- sunbeam-python/sunbeam/jobs/manifest.py | 65 ++++++++++++++----- sunbeam-python/sunbeam/jobs/plugin.py | 11 ++++ sunbeam-python/sunbeam/plugins/caas/plugin.py | 4 ++ .../sunbeam/plugins/interface/v1/base.py | 4 ++ .../sunbeam/plugins/interface/v1/openstack.py | 16 ++++- .../sunbeam/plugins/observability/plugin.py | 17 ++++- sunbeam-python/sunbeam/plugins/pro/plugin.py | 12 +++- 13 files changed, 144 insertions(+), 30 deletions(-) diff --git a/sunbeam-microcluster/api/manifests.go b/sunbeam-microcluster/api/manifests.go index 88921891..c952e041 100644 --- a/sunbeam-microcluster/api/manifests.go +++ b/sunbeam-microcluster/api/manifests.go @@ -28,8 +28,8 @@ var manifestsCmd = rest.Endpoint{ var manifestCmd = rest.Endpoint{ Path: "manifests/{manifestid}", - Get: rest.EndpointAction{Handler: cmdManifestsGet, ProxyTarget: true}, - Delete: rest.EndpointAction{Handler: cmdManifestsDelete, ProxyTarget: true}, + Get: rest.EndpointAction{Handler: cmdManifestGet, ProxyTarget: true}, + Delete: rest.EndpointAction{Handler: cmdManifestDelete, ProxyTarget: true}, } func cmdManifestsGetAll(s *state.State, _ *http.Request) response.Response { @@ -42,7 +42,7 @@ func cmdManifestsGetAll(s *state.State, _ *http.Request) response.Response { return response.SyncResponse(true, manifests) } -func cmdManifestsGet(s *state.State, r *http.Request) response.Response { +func cmdManifestGet(s *state.State, r *http.Request) response.Response { var manifestid string manifestid, err := url.PathUnescape(mux.Vars(r)["manifestid"]) if err != nil { @@ -77,7 +77,7 @@ func cmdManifestsPost(s *state.State, r *http.Request) response.Response { return response.EmptySyncResponse } -func cmdManifestsDelete(s *state.State, r *http.Request) response.Response { +func cmdManifestDelete(s *state.State, r *http.Request) response.Response { manifestid, err := url.PathUnescape(mux.Vars(r)["manifestid"]) if err != nil { return response.SmartError(err) diff --git a/sunbeam-python/sunbeam/clusterd/cluster.py b/sunbeam-python/sunbeam/clusterd/cluster.py index 73554711..86ad6032 100644 --- a/sunbeam-python/sunbeam/clusterd/cluster.py +++ b/sunbeam-python/sunbeam/clusterd/cluster.py @@ -210,7 +210,7 @@ def list_manifests(self) -> list: def get_manifest(self, manifest_id: str) -> dict: """Get manifest info along with data.""" manifest = self._get(f"/1.0/manifests/{manifest_id}") - return manifest + return manifest.get("metadata") def get_latest_manifest(self) -> dict: """Get latest manifest.""" diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index 3e36e954..c78d8286 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -148,6 +148,8 @@ def bootstrap( if manifest: manifest_obj = Manifest.load(manifest_file=manifest) LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + else: + manifest_obj = Manifest() # Bootstrap node must always have the control role if Role.CONTROL not in roles: @@ -179,8 +181,12 @@ def bootstrap( "deploy-openstack-hypervisor", ] ) + manifest_tfplans = manifest_obj.terraform for tfplan_dir in tfplan_dirs: - src = snap.paths.snap / "etc" / tfplan_dir + if manifest_tfplans and manifest_tfplans.get(tfplan_dir): + src = manifest_tfplans.get(tfplan_dir).source + else: + src = snap.paths.snap / "etc" / tfplan_dir dst = snap.paths.user_common / "etc" / tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) diff --git a/sunbeam-python/sunbeam/commands/configure.py b/sunbeam-python/sunbeam/commands/configure.py index c37e231c..16c7bec1 100644 --- a/sunbeam-python/sunbeam/commands/configure.py +++ b/sunbeam-python/sunbeam/commands/configure.py @@ -52,6 +52,7 @@ ModelNotFoundException, run_sync, ) +from sunbeam.jobs.manifest import Manifest CLOUD_CONFIG_SECTION = "CloudConfig" LOG = logging.getLogger(__name__) @@ -727,9 +728,15 @@ def _configure( preflight_checks.append(VerifyBootstrappedCheck()) run_preflight_checks(preflight_checks, console) + manifest_obj = Manifest.load_latest_from_clusterdb() + name = utils.get_fqdn() snap = Snap() - src = snap.paths.snap / "etc" / "demo-setup/" + manifest_tfplans = manifest_obj.terraform + if manifest_tfplans and manifest_tfplans.get("demo-setup"): + src = manifest_tfplans.get("demo-setup").source + else: + src = snap.paths.snap / "etc" / "demo-setup" dst = snap.paths.user_common / "etc" / "demo-setup" try: os.mkdir(dst) diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index b5d39603..6267688f 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -83,6 +83,7 @@ validate_roles, ) from sunbeam.jobs.juju import CONTROLLER, JujuHelper +from sunbeam.jobs.manifest import Manifest LOG = logging.getLogger(__name__) console = Console() @@ -207,14 +208,20 @@ def join( controller = CONTROLLER data_location = snap.paths.user_data + manifest_obj = Manifest.load_latest_from_cluserdb() + # NOTE: install to user writable location tfplan_dirs = ["deploy-sunbeam-machine"] if is_control_node: tfplan_dirs.extend(["deploy-microk8s", "deploy-microceph", "deploy-openstack"]) if is_compute_node: tfplan_dirs.extend(["deploy-openstack-hypervisor"]) + manifest_tfplans = manifest_obj.terraform for tfplan_dir in tfplan_dirs: - src = snap.paths.snap / "etc" / tfplan_dir + if manifest_tfplans and manifest_tfplans.get(tfplan_dir): + src = manifest_tfplans.get(tfplan_dir).source + else: + src = snap.paths.snap / "etc" / tfplan_dir dst = snap.paths.user_common / "etc" / tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) diff --git a/sunbeam-python/sunbeam/commands/resize.py b/sunbeam-python/sunbeam/commands/resize.py index adf41bd8..8c3341fd 100644 --- a/sunbeam-python/sunbeam/commands/resize.py +++ b/sunbeam-python/sunbeam/commands/resize.py @@ -23,6 +23,7 @@ from sunbeam.commands.terraform import TerraformHelper, TerraformInitStep from sunbeam.jobs.common import click_option_topology, run_plan from sunbeam.jobs.juju import JujuHelper +from sunbeam.jobs.manifest import Manifest LOG = logging.getLogger(__name__) console = Console() @@ -37,8 +38,14 @@ def resize(topology: str, force: bool = False) -> None: """Expand the control plane to fit available nodes.""" + manifest_obj = Manifest.load_latest_from_clusterdb() + tfplan = "deploy-openstack" - src = snap.paths.snap / "etc" / tfplan + manifest_tfplans = manifest_obj.terraform + if manifest_tfplans and manifest_tfplans.get(tfplan): + src = manifest_tfplans.get(tfplan).source + else: + src = snap.paths.snap / "etc" / tfplan dst = snap.paths.user_common / "etc" / tfplan LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 6ec08f57..1914a41a 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023 Canonical Ltd. +# Copyright (c) 2024 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,16 +18,25 @@ from typing import Any, Dict, List, Optional import yaml -from pydantic import Field, ValidationError +from pydantic import Field from pydantic.dataclasses import dataclass from sunbeam.clusterd.client import Client as clusterClient from sunbeam.jobs.common import BaseStep, Result, ResultType, Status +from sunbeam.jobs.plugin import PluginManager LOG = logging.getLogger(__name__) EMPTY_MANIFEST = """charms: {} terraform-plans: {} """ +VALID_CORE_TERRAFORM_PLANS = { + "deploy-sunbeam-machine", + "deploy-microk8s", + "deploy-microceph", + "deploy-openstack", + "deploy-openstack-hypervisor", + "demo-setup", +} @dataclass @@ -61,24 +70,50 @@ class TerraformManifest: class Manifest: juju: Optional[JujuManifest] = None charms: Optional[Dict[str, CharmsManifest]] = None - terraform_plans: Optional[Dict[str, TerraformManifest]] = Field( - default=None, alias="terraform-plans" - ) + terraform: Optional[Dict[str, TerraformManifest]] = None @classmethod def load(cls, manifest_file: Path) -> "Manifest": - try: - with manifest_file.open() as file: - return Manifest(**yaml.safe_load(file)) - except FileNotFoundError as e: - raise e - except ValidationError as e: - raise e + with manifest_file.open() as file: + return Manifest(**yaml.safe_load(file)) @classmethod - def load_latest_from_cluserdb(cls) -> "Manifest": - manifest_latest = clusterClient().cluster.get_latest_manifest() - return Manifest(**manifest_latest) + def load_latest_from_clusterdb(cls) -> "Manifest": + try: + manifest_latest = clusterClient().cluster.get_latest_manifest() + return Manifest(**yaml.safe_load(manifest_latest.get("data"))) + except Exception as e: + LOG.debug(f"Got error in creating latest manifest object: {str(e)}") + return Manifest() + + """ + # field_validator supported only in pydantix 2.x + @field_validator("terraform", "mode_after") + def validate_terraform(cls, terraform): + if terraform: + tf_keys = list(terraform.keys()) + if not set(tf_keys) <= set(VALID_TERRAFORM_PLANS): + raise ValueError( + f"Terraform keys should be one of {VALID_TERRAFORM_PLANS}" + ) + + return terraform + """ + + def validate_terraform_keys(self): + if self.terraform: + tf_keys = set(self.terraform.keys()) + plugin_terraform_plans = PluginManager().get_all_terraform_plan_dir_names() + LOG.debug( + f"Plugin terraform plan directory names: {plugin_terraform_plans}" + ) + all_tfplans = VALID_CORE_TERRAFORM_PLANS.union(plugin_terraform_plans) + if not tf_keys <= all_tfplans: + raise ValueError(f"Terraform keys should be one of {all_tfplans} ") + + def __post_init__(self): + # Add custom validations + self.validate_terraform_keys() class AddManifestStep(BaseStep): diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index e079b41e..ae055484 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -253,6 +253,17 @@ def enabled_plugins(cls, repos: Optional[list] = []) -> list: LOG.debug(f"Enabledplugins in repos {repos}: {enabled_plugins}") return enabled_plugins + @classmethod + def get_all_terraform_plan_dir_names(cls) -> set: + tf_plans = set() + plugins = cls.get_all_plugin_classes() + for klass in plugins: + plugin = klass() + for plan in plugin.get_terraform_plan_dir_names(): + tf_plans.add(plan) + + return tf_plans + @classmethod def register(cls, cli: click.Group) -> None: """Register the plugins. diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index 1efbe72b..e4db6e93 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -87,6 +87,10 @@ def __init__(self) -> None: ) self.configure_plan = "caas-setup" + def get_terraform_plan_dir_names(self) -> set: + """Return all terraform plan directory names.""" + return {f"deploy-{self.tfplan}", self.configure_plan} + def set_application_names(self) -> list: """Application names handled by the terraform plan.""" apps = ["magnum", "magnum-mysql-router"] diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index a108534b..fa4436cc 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -189,6 +189,10 @@ def fetch_plugin_version(self, plugin: str) -> Version: return Version(version) + def get_terraform_plan_dir_names(self) -> set: + """Return all terraform plan directory names.""" + return set() + def get_terraform_plans_base_path(self) -> Path: """Return Terraform plan base location.""" return Snap().paths.user_common diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index c5e1c695..c7d4f0be 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -58,6 +58,7 @@ TimeoutException, run_sync, ) +from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import EnableDisablePlugin LOG = logging.getLogger(__name__) @@ -123,11 +124,16 @@ def __init__(self, name: str, tf_plan_location: TerraformPlanLocation) -> None: def _get_tf_plan_full_path(self) -> Path: """Returns terraform plan absolute path.""" - if self.tf_plan_location == TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO: - return self.snap.paths.snap / "etc" / f"deploy-{self.tfplan}" + manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_tfplans = manifest_obj.terraform + tfplan_dir = f"deploy-{self.tfplan}" + if manifest_tfplans and manifest_tfplans.get(tfplan_dir): + return manifest_tfplans.get(tfplan_dir).source + elif self.tf_plan_location == TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO: + return self.snap.paths.snap / "etc" / tfplan_dir else: plugin_class_dir = Path(inspect.getfile(self.__class__)).parent - return plugin_class_dir / "etc" / f"deploy-{self.tfplan}" + return plugin_class_dir / "etc" / tfplan_dir def _get_plan_name(self) -> str: """Returns plan name in format defined in cluster db.""" @@ -144,6 +150,10 @@ def get_terraform_openstack_plan_path(self) -> Path: """Return Terraform OpenStack plan location.""" return self.get_terraform_plans_base_path() / "etc" / "deploy-openstack" + def get_terraform_plan_dir_names(self) -> set: + """Return all terraform plan directory names.""" + return {f"deploy-{self.tfplan}"} + def pre_checks(self) -> None: """Perform preflight checks before enabling the plugin. diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index 5fe5b8d1..a5e0b5fe 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -62,6 +62,7 @@ TimeoutException, run_sync, ) +from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import EnableDisablePlugin, PluginRequirement from sunbeam.plugins.interface.v1.openstack import ( OPENSTACK_TERRAFORM_PLAN, @@ -436,13 +437,25 @@ def __init__(self) -> None: self.tfplan_cos = "deploy-cos" self.tfplan_grafana_agent = "deploy-grafana-agent" + def get_terraform_plan_dir_names(self) -> set: + """Return all terraform plan directory names.""" + return {self.tfplan_cos, self.tfplan_grafana_agent} + def pre_enable(self): - src = Path(__file__).parent / "etc" / self.tfplan_cos + manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_tfplans = manifest_obj.terraform + if manifest_tfplans and manifest_tfplans.get(self.tfplan_cos): + src = manifest_tfplans.get(self.tfplan_cos).source + else: + src = Path(__file__).parent / "etc" / self.tfplan_cos dst = self.snap.paths.user_common / "etc" / self.tfplan_cos LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) - src = Path(__file__).parent / "etc" / self.tfplan_grafana_agent + if manifest_tfplans and manifest_tfplans.get(self.tfplan_grafana_agent): + src = manifest_tfplans.get(self.tfplan_grafana_agent).source + else: + src = Path(__file__).parent / "etc" / self.tfplan_grafana_agent dst = self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) diff --git a/sunbeam-python/sunbeam/plugins/pro/plugin.py b/sunbeam-python/sunbeam/plugins/pro/plugin.py index 0042d4e5..ef528d4f 100644 --- a/sunbeam-python/sunbeam/plugins/pro/plugin.py +++ b/sunbeam-python/sunbeam/plugins/pro/plugin.py @@ -34,6 +34,7 @@ ) from sunbeam.jobs.common import BaseStep, Result, ResultType, run_plan from sunbeam.jobs.juju import MODEL, JujuHelper, TimeoutException, run_sync +from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import EnableDisablePlugin LOG = logging.getLogger(__name__) @@ -153,8 +154,17 @@ def __init__(self) -> None: self.snap = Snap() self.tfplan = f"deploy-{self.name}" + def get_terraform_plan_dir_names(self) -> set: + """Return all terraform plan directory names.""" + return {self.tfplan} + def pre_enable(self): - src = Path(__file__).parent / "etc" / self.tfplan + manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_tfplans = manifest_obj.terraform + if manifest_tfplans and manifest_tfplans.get(self.tfplan): + src = manifest_tfplans.get(self.tfplan).source + else: + src = Path(__file__).parent / "etc" / self.tfplan dst = self.snap.paths.user_common / "etc" / self.tfplan LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) From 12eb74733316723bd4f0715df19fe2f7b9872077 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Thu, 11 Jan 2024 07:11:12 +0530 Subject: [PATCH 06/27] Add revision/config to terraform plans Add revision/config to microk8s, microceph, openstack-hypervisor terraform plans. --- cloud/etc/deploy-microceph/main.tf | 11 ++++++----- cloud/etc/deploy-microceph/variables.tf | 13 +++++++++++++ cloud/etc/deploy-microk8s/main.tf | 11 ++++++----- cloud/etc/deploy-microk8s/variables.tf | 13 +++++++++++++ cloud/etc/deploy-openstack-hypervisor/main.tf | 11 ++++++----- cloud/etc/deploy-openstack-hypervisor/variables.tf | 12 ++++++++++++ cloud/etc/deploy-sunbeam-machine/main.tf | 9 ++++++--- cloud/etc/deploy-sunbeam-machine/variables.tf | 12 ++++++++++++ 8 files changed, 74 insertions(+), 18 deletions(-) diff --git a/cloud/etc/deploy-microceph/main.tf b/cloud/etc/deploy-microceph/main.tf index 35e75f99..d4d5921c 100644 --- a/cloud/etc/deploy-microceph/main.tf +++ b/cloud/etc/deploy-microceph/main.tf @@ -37,14 +37,15 @@ resource "juju_application" "microceph" { units = length(var.machine_ids) # need to manage the number of units charm { - name = "microceph" - channel = var.charm_microceph_channel - series = "jammy" + name = "microceph" + channel = var.charm_microceph_channel + revision = var.charm_microceph_revision + series = "jammy" } - config = { + config = merge({ snap-channel = var.microceph_channel - } + }, var.charm_microceph_config) } # juju_offer.microceph_offer will be created diff --git a/cloud/etc/deploy-microceph/variables.tf b/cloud/etc/deploy-microceph/variables.tf index 9d12d794..7495dd6d 100644 --- a/cloud/etc/deploy-microceph/variables.tf +++ b/cloud/etc/deploy-microceph/variables.tf @@ -15,9 +15,22 @@ variable "charm_microceph_channel" { description = "Operator channel for microceph deployment" + type = string default = "edge" } +variable "charm_microceph_revision" { + description = "Operator channel revision for microceph deployment" + type = number + default = null +} + +variable "charm_microceph_config" { + description = "Operator config for microceph deployment" + type = map(string) + default = {} +} + variable "microceph_channel" { description = "K8S channel to deploy, not the operator channel" default = "latest/stable" diff --git a/cloud/etc/deploy-microk8s/main.tf b/cloud/etc/deploy-microk8s/main.tf index 60cad3ff..88827798 100644 --- a/cloud/etc/deploy-microk8s/main.tf +++ b/cloud/etc/deploy-microk8s/main.tf @@ -37,15 +37,16 @@ resource "juju_application" "microk8s" { units = length(var.machine_ids) # need to manage the number of units charm { - name = "microk8s" - channel = var.charm_microk8s_channel - series = "jammy" + name = "microk8s" + channel = var.charm_microk8s_channel + revision = var.charm_microk8s_revision + series = "jammy" } - config = { + config = merge({ channel = var.microk8s_channel addons = join(" ", [for key, value in var.addons : "${key}:${value}"]) disable_cert_reissue = true kubelet_serialize_image_pulls = false - } + }, var.charm_microk8s_config) } diff --git a/cloud/etc/deploy-microk8s/variables.tf b/cloud/etc/deploy-microk8s/variables.tf index 714ff159..fe05f087 100644 --- a/cloud/etc/deploy-microk8s/variables.tf +++ b/cloud/etc/deploy-microk8s/variables.tf @@ -15,9 +15,22 @@ variable "charm_microk8s_channel" { description = "Operator channel for microk8s deployment" + type = string default = "legacy/stable" } +variable "charm_microk8s_revision" { + description = "Operator channel revision for microk8s deployment" + type = number + default = null +} + +variable "charm_microk8s_config" { + description = "Operator config for microk8s deployment" + type = map(string) + default = {} +} + variable "microk8s_channel" { description = "K8S channel to deploy, not the operator channel" default = "1.28-strict/stable" diff --git a/cloud/etc/deploy-openstack-hypervisor/main.tf b/cloud/etc/deploy-openstack-hypervisor/main.tf index e1ec5c76..854a3cea 100644 --- a/cloud/etc/deploy-openstack-hypervisor/main.tf +++ b/cloud/etc/deploy-openstack-hypervisor/main.tf @@ -38,14 +38,15 @@ resource "juju_application" "openstack-hypervisor" { units = length(var.machine_ids) # need to manage the number of units charm { - name = "openstack-hypervisor" - channel = var.charm_channel - series = "jammy" + name = "openstack-hypervisor" + channel = var.charm_channel + revision = var.charm_revision + series = "jammy" } - config = { + config = merge({ snap-channel = var.snap_channel - } + }, var.charm_config) } diff --git a/cloud/etc/deploy-openstack-hypervisor/variables.tf b/cloud/etc/deploy-openstack-hypervisor/variables.tf index a60737ff..3b0ed46e 100644 --- a/cloud/etc/deploy-openstack-hypervisor/variables.tf +++ b/cloud/etc/deploy-openstack-hypervisor/variables.tf @@ -31,6 +31,18 @@ variable "charm_channel" { default = "2023.2/edge" } +variable "charm_revision" { + description = "Charm channel revision to deploy openstack-hypervisor charm from" + type = number + default = null +} + +variable "charm_config" { + description = "Charm config to deploy openstack-hypervisor charm from" + type = map(string) + default = {} +} + variable "openstack_model" { description = "Name of OpenStack model." type = string diff --git a/cloud/etc/deploy-sunbeam-machine/main.tf b/cloud/etc/deploy-sunbeam-machine/main.tf index 6cd417ee..b02d94f6 100644 --- a/cloud/etc/deploy-sunbeam-machine/main.tf +++ b/cloud/etc/deploy-sunbeam-machine/main.tf @@ -33,9 +33,12 @@ resource "juju_application" "sunbeam-machine" { units = length(var.machine_ids) # need to manage the number of units charm { - name = "sunbeam-machine" - channel = var.charm_channel - series = "jammy" + name = "sunbeam-machine" + channel = var.charm_channel + revision = var.charm_revision + series = "jammy" } + config = var.charm_config + } diff --git a/cloud/etc/deploy-sunbeam-machine/variables.tf b/cloud/etc/deploy-sunbeam-machine/variables.tf index 61842869..4784c610 100644 --- a/cloud/etc/deploy-sunbeam-machine/variables.tf +++ b/cloud/etc/deploy-sunbeam-machine/variables.tf @@ -25,6 +25,18 @@ variable "charm_channel" { default = "latest/edge" } +variable "charm_revision" { + description = "Charm channel revision to deploy openstack-hypervisor charm from" + type = number + default = null +} + +variable "charm_config" { + description = "Charm config to deploy openstack-hypervisor charm from" + type = map(string) + default = {} +} + variable "machine_model" { description = "Name of model to deploy sunbeam-machine into." type = string From df441b90d880417299245b8f7fda1b316a1aef92 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Sun, 14 Jan 2024 11:24:09 +0530 Subject: [PATCH 07/27] Use manifest info for terraform plans * Create default manifest * Add terraform plan location to default manifest * Define manifest part and tfvar/manifest map in plugins * Remove configurable tfplan options * Use latest manifest to populate terraform vars in openstack, configure plan * Use latest manifest to populate terraform vars in plugins --- sunbeam-python/sunbeam/clusterd/service.py | 8 ++ sunbeam-python/sunbeam/commands/bootstrap.py | 29 ++-- sunbeam-python/sunbeam/commands/configure.py | 16 +-- sunbeam-python/sunbeam/commands/hypervisor.py | 3 + sunbeam-python/sunbeam/commands/node.py | 24 ++-- sunbeam-python/sunbeam/commands/openstack.py | 9 +- sunbeam-python/sunbeam/commands/refresh.py | 20 ++- sunbeam-python/sunbeam/commands/resize.py | 17 ++- sunbeam-python/sunbeam/jobs/manifest.py | 129 ++++++++++++++---- sunbeam-python/sunbeam/jobs/plugin.py | 22 ++- sunbeam-python/sunbeam/jobs/steps.py | 3 + sunbeam-python/sunbeam/plugins/caas/plugin.py | 33 ++++- sunbeam-python/sunbeam/plugins/dns/plugin.py | 32 ++++- .../sunbeam/plugins/interface/v1/base.py | 42 +++++- .../sunbeam/plugins/interface/v1/openstack.py | 34 ++--- sunbeam-python/sunbeam/plugins/ldap/plugin.py | 28 +++- .../sunbeam/plugins/loadbalancer/plugin.py | 17 +++ .../sunbeam/plugins/observability/plugin.py | 71 +++++----- .../sunbeam/plugins/orchestration/plugin.py | 17 +++ sunbeam-python/sunbeam/plugins/pro/plugin.py | 30 ++-- .../sunbeam/plugins/secrets/plugin.py | 17 +++ .../sunbeam/plugins/telemetry/plugin.py | 41 +++++- .../sunbeam/plugins/vault/plugin.py | 17 +++ sunbeam-python/sunbeam/utils.py | 15 ++ sunbeam-python/sunbeam/versions.py | 90 +++++++++++- .../unit/sunbeam/commands/test_hypervisor.py | 13 +- .../unit/sunbeam/commands/test_openstack.py | 17 ++- .../sunbeam/commands/upgrades/test_base.py | 2 + .../tests/unit/sunbeam/jobs/test_steps.py | 29 +++- .../unit/sunbeam/plugins/test_openstack.py | 19 ++- 30 files changed, 651 insertions(+), 193 deletions(-) diff --git a/sunbeam-python/sunbeam/clusterd/service.py b/sunbeam-python/sunbeam/clusterd/service.py index 6396fff5..49381428 100644 --- a/sunbeam-python/sunbeam/clusterd/service.py +++ b/sunbeam-python/sunbeam/clusterd/service.py @@ -49,6 +49,12 @@ class ConfigItemNotFoundException(RemoteException): pass +class ManifestItemNotFoundException(RemoteException): + """Raise when ManifestItem cannot be found on the remote""" + + pass + + class NodeAlreadyExistsException(RemoteException): """Raised when the node already exists""" @@ -171,6 +177,8 @@ def _request(self, method, path, **kwargs): ) elif "ConfigItem not found" in error: raise ConfigItemNotFoundException("ConfigItem not found") + elif "ManifestItem not found" in error: + raise ManifestItemNotFoundException("ManifestItem not found") raise e return response.json() diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index c78d8286..eda5bdf3 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -82,6 +82,7 @@ ) from sunbeam.jobs.juju import CONTROLLER, JujuHelper from sunbeam.jobs.manifest import AddManifestStep, Manifest +from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -146,10 +147,11 @@ def bootstrap( # Validate manifest file manifest_obj = None if manifest: - manifest_obj = Manifest.load(manifest_file=manifest) - LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + manifest_obj = Manifest.load(manifest_file=manifest, on_default=True) else: - manifest_obj = Manifest() + manifest_obj = Manifest.get_default_manifest() + + LOG.debug(f"Manifest used for deployment: {manifest_obj}") # Bootstrap node must always have the control role if Role.CONTROL not in roles: @@ -171,23 +173,20 @@ def bootstrap( data_location = snap.paths.user_data # NOTE: install to user writable location - tfplan_dirs = ["deploy-sunbeam-machine"] + tfplans = ["sunbeam-machine-plan"] if is_control_node: - tfplan_dirs.extend( + tfplans.extend( [ - "deploy-microk8s", - "deploy-microceph", - "deploy-openstack", - "deploy-openstack-hypervisor", + "microk8s-plan", + "microceph-plan", + "openstack-plan", + "hypervisor-plan", ] ) manifest_tfplans = manifest_obj.terraform - for tfplan_dir in tfplan_dirs: - if manifest_tfplans and manifest_tfplans.get(tfplan_dir): - src = manifest_tfplans.get(tfplan_dir).source - else: - src = snap.paths.snap / "etc" / tfplan_dir - dst = snap.paths.user_common / "etc" / tfplan_dir + for tfplan in tfplans: + src = manifest_tfplans.get(tfplan).source + dst = snap.paths.user_common / "etc" / TERRAFORM_DIR_NAMES.get(tfplan, tfplan) LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) diff --git a/sunbeam-python/sunbeam/commands/configure.py b/sunbeam-python/sunbeam/commands/configure.py index 16c7bec1..6769a4f0 100644 --- a/sunbeam-python/sunbeam/commands/configure.py +++ b/sunbeam-python/sunbeam/commands/configure.py @@ -53,6 +53,7 @@ run_sync, ) from sunbeam.jobs.manifest import Manifest +from sunbeam.versions import TERRAFORM_DIR_NAMES CLOUD_CONFIG_SECTION = "CloudConfig" LOG = logging.getLogger(__name__) @@ -728,16 +729,15 @@ def _configure( preflight_checks.append(VerifyBootstrappedCheck()) run_preflight_checks(preflight_checks, console) - manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) name = utils.get_fqdn() + tfplan = "demo-setup" + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) snap = Snap() manifest_tfplans = manifest_obj.terraform - if manifest_tfplans and manifest_tfplans.get("demo-setup"): - src = manifest_tfplans.get("demo-setup").source - else: - src = snap.paths.snap / "etc" / "demo-setup" - dst = snap.paths.user_common / "etc" / "demo-setup" + src = manifest_tfplans.get(tfplan).source + dst = snap.paths.user_common / "etc" / tfplan_dir try: os.mkdir(dst) except FileExistsError: @@ -755,9 +755,9 @@ def _configure( raise click.ClickException("Please run `sunbeam cluster bootstrap` first") admin_credentials = retrieve_admin_credentials(jhelper, OPENSTACK_MODEL) tfhelper = TerraformHelper( - path=snap.paths.user_common / "etc" / "demo-setup", + path=snap.paths.user_common / "etc" / tfplan_dir, env=admin_credentials, - plan="demo-setup", + plan=tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/commands/hypervisor.py b/sunbeam-python/sunbeam/commands/hypervisor.py index 00221938..a67d436e 100644 --- a/sunbeam-python/sunbeam/commands/hypervisor.py +++ b/sunbeam-python/sunbeam/commands/hypervisor.py @@ -38,6 +38,7 @@ TimeoutException, run_sync, ) +from sunbeam.jobs.manifest import Manifest LOG = logging.getLogger(__name__) CONFIG_KEY = "TerraformVarsHypervisor" @@ -105,6 +106,8 @@ def run(self, status: Optional[Status] = None) -> Result: "openstack-state-config": openstack_backend_config, } ) + m = Manifest.load_latest_from_clusterdb(on_default=True) + tfvars.update(m.get_tfvars(self.tfhelper.plan)) update_config(self.client, CONFIG_KEY, tfvars) self.tfhelper.write_tfvars(tfvars) diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index 6267688f..b88f06cc 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -84,6 +84,7 @@ ) from sunbeam.jobs.juju import CONTROLLER, JujuHelper from sunbeam.jobs.manifest import Manifest +from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -208,32 +209,31 @@ def join( controller = CONTROLLER data_location = snap.paths.user_data - manifest_obj = Manifest.load_latest_from_cluserdb() + manifest_obj = Manifest.load_latest_from_cluserdb_on_default() # NOTE: install to user writable location - tfplan_dirs = ["deploy-sunbeam-machine"] + tfplans = ["sunbeam-machine-plan"] if is_control_node: - tfplan_dirs.extend(["deploy-microk8s", "deploy-microceph", "deploy-openstack"]) + tfplans.extend(["microk8s-plan", "microceph-plan", "openstack-plan"]) if is_compute_node: - tfplan_dirs.extend(["deploy-openstack-hypervisor"]) + tfplans.extend(["hypervisor-plan"]) manifest_tfplans = manifest_obj.terraform - for tfplan_dir in tfplan_dirs: - if manifest_tfplans and manifest_tfplans.get(tfplan_dir): - src = manifest_tfplans.get(tfplan_dir).source - else: - src = snap.paths.snap / "etc" / tfplan_dir - dst = snap.paths.user_common / "etc" / tfplan_dir + for tfplan in tfplans: + src = manifest_tfplans.get(tfplan).source + dst = snap.paths.user_common / "etc" / TERRAFORM_DIR_NAMES.get(tfplan, tfplan) LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) tfhelper_openstack_deploy = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-openstack", + path=snap.paths.user_common / "etc" / TERRAFORM_DIR_NAMES.get("openstack-plan"), plan="openstack-plan", backend="http", data_location=data_location, ) tfhelper_hypervisor_deploy = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-openstack-hypervisor", + path=snap.paths.user_common + / "etc" # noqa: W503 + / TERRAFORM_DIR_NAMES.get("hypervisor-plan"), # noqa: W503 plan="hypervisor-plan", backend="http", data_location=data_location, diff --git a/sunbeam-python/sunbeam/commands/openstack.py b/sunbeam-python/sunbeam/commands/openstack.py index b4ffcb4e..160927ef 100644 --- a/sunbeam-python/sunbeam/commands/openstack.py +++ b/sunbeam-python/sunbeam/commands/openstack.py @@ -51,7 +51,7 @@ TimeoutException, run_sync, ) -from sunbeam.versions import OPENSTACK_CHANNEL, OVN_CHANNEL, RABBITMQ_CHANNEL +from sunbeam.jobs.manifest import Manifest LOG = logging.getLogger(__name__) OPENSTACK_MODEL = "openstack" @@ -214,10 +214,6 @@ def run(self, status: Optional[Status] = None) -> Result: tfvars.update( { "model": self.model, - # Make these channel options configurable by the user - "openstack-channel": OPENSTACK_CHANNEL, - "ovn-channel": OVN_CHANNEL, - "rabbitmq-channel": RABBITMQ_CHANNEL, "cloud": self.cloud, "credential": f"{self.cloud}{CREDENTIAL_SUFFIX}", "config": {"workload-storage": MICROK8S_DEFAULT_STORAGECLASS}, @@ -225,6 +221,9 @@ def run(self, status: Optional[Status] = None) -> Result: } ) tfvars.update(self.get_storage_tfvars()) + m = Manifest.load_latest_from_clusterdb(on_default=True) + LOG.debug(f"Latest manifest in openstack: {m}") + tfvars.update(m.get_tfvars(self.tfhelper.plan)) update_config(self.client, self._CONFIG, tfvars) self.tfhelper.write_tfvars(tfvars) self.update_status(status, "deploying services") diff --git a/sunbeam-python/sunbeam/commands/refresh.py b/sunbeam-python/sunbeam/commands/refresh.py index 15d7ba1a..a49c94c1 100644 --- a/sunbeam-python/sunbeam/commands/refresh.py +++ b/sunbeam-python/sunbeam/commands/refresh.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import tempfile from pathlib import Path from typing import Optional @@ -26,7 +25,8 @@ from sunbeam.commands.upgrades.intra_channel import LatestInChannelCoordinator from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper -from sunbeam.jobs.manifest import EMPTY_MANIFEST, AddManifestStep, Manifest +from sunbeam.jobs.manifest import AddManifestStep, Manifest +from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -73,26 +73,22 @@ def refresh( # Validate manifest file manifest_obj = None if clear_manifest: - with tempfile.NamedTemporaryFile(mode="w+t") as tmpfile: - tmpfile.write(EMPTY_MANIFEST) - tmpfile.seek(0) - manifest_obj = Manifest.load(manifest_file=Path(tmpfile.name)) - LOG.debug(f"Manifest object created with no errors: {manifest_obj}") - run_plan([AddManifestStep(Path(tmpfile.name))], console) + run_plan([AddManifestStep()], console) elif manifest: manifest_obj = Manifest.load(manifest_file=manifest) LOG.debug(f"Manifest object created with no errors: {manifest_obj}") run_plan([AddManifestStep(manifest)], console) else: LOG.debug("Getting latest manifest") - manifest_obj = Manifest.load_latest_from_cluserdb() + manifest_obj = Manifest.load_latest_from_cluserdb(on_default=True) LOG.debug(f"Manifest object created with no errors: {manifest_obj}") - tfplan = "deploy-openstack" + tfplan = "openstack-plan" + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) data_location = snap.paths.user_data tfhelper = TerraformHelper( - path=snap.paths.user_common / "etc" / tfplan, - plan="openstack-plan", + path=snap.paths.user_common / "etc" / tfplan_dir, + plan=tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/commands/resize.py b/sunbeam-python/sunbeam/commands/resize.py index 8c3341fd..13c3e000 100644 --- a/sunbeam-python/sunbeam/commands/resize.py +++ b/sunbeam-python/sunbeam/commands/resize.py @@ -24,6 +24,7 @@ from sunbeam.jobs.common import click_option_topology, run_plan from sunbeam.jobs.juju import JujuHelper from sunbeam.jobs.manifest import Manifest +from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -38,22 +39,20 @@ def resize(topology: str, force: bool = False) -> None: """Expand the control plane to fit available nodes.""" - manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) - tfplan = "deploy-openstack" + tfplan = "openstack-plan" + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) manifest_tfplans = manifest_obj.terraform - if manifest_tfplans and manifest_tfplans.get(tfplan): - src = manifest_tfplans.get(tfplan).source - else: - src = snap.paths.snap / "etc" / tfplan - dst = snap.paths.user_common / "etc" / tfplan + src = manifest_tfplans.get(tfplan).source + dst = snap.paths.user_common / "etc" / tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) data_location = snap.paths.user_data tfhelper = TerraformHelper( - path=snap.paths.user_common / "etc" / tfplan, - plan="openstack-plan", + path=snap.paths.user_common / "etc" / tfplan_dir, + plan=tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 1914a41a..47edf6a1 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -13,30 +13,32 @@ # See the License for the specific language governing permissions and # limitations under the License. +import copy import logging +from dataclasses import asdict from pathlib import Path from typing import Any, Dict, List, Optional import yaml from pydantic import Field from pydantic.dataclasses import dataclass +from snaphelpers import Snap +from sunbeam import utils from sunbeam.clusterd.client import Client as clusterClient +from sunbeam.clusterd.service import ManifestItemNotFoundException from sunbeam.jobs.common import BaseStep, Result, ResultType, Status from sunbeam.jobs.plugin import PluginManager +from sunbeam.versions import ( + CHARM_MANIFEST_TFVARS_MAP, + MANIFEST_CHARM_VERSIONS, + TERRAFORM_DIR_NAMES, +) LOG = logging.getLogger(__name__) EMPTY_MANIFEST = """charms: {} terraform-plans: {} """ -VALID_CORE_TERRAFORM_PLANS = { - "deploy-sunbeam-machine", - "deploy-microk8s", - "deploy-microceph", - "deploy-openstack", - "deploy-openstack-hypervisor", - "demo-setup", -} @dataclass @@ -73,19 +75,79 @@ class Manifest: terraform: Optional[Dict[str, TerraformManifest]] = None @classmethod - def load(cls, manifest_file: Path) -> "Manifest": + def load(cls, manifest_file: Path, on_default: bool = False) -> "Manifest": + """Load the manifest with the provided file input""" + if on_default: + return cls.load_on_default() + with manifest_file.open() as file: return Manifest(**yaml.safe_load(file)) @classmethod - def load_latest_from_clusterdb(cls) -> "Manifest": + def load_latest_from_clusterdb(cls, on_default: bool = False) -> "Manifest": + """Load the latest manifest from clusterdb + + If on_default is True, load this manifest data over the default + values. + """ + if on_default: + return cls.load_latest_from_clusterdb_on_default() + try: manifest_latest = clusterClient().cluster.get_latest_manifest() return Manifest(**yaml.safe_load(manifest_latest.get("data"))) - except Exception as e: - LOG.debug(f"Got error in creating latest manifest object: {str(e)}") + except ManifestItemNotFoundException as e: + LOG.debug(f"Error in getting latest manifest from cluster DB: {str(e)}") return Manifest() + @classmethod + def load_on_default(cls, manifest_file: Path) -> "Manifest": + """Load manifest and override the default manifest""" + with manifest_file.open() as file: + override = yaml.safe_load(file) + default = cls.get_default_manifest_as_dict() + utils.merge_dict(default, override) + return Manifest(**default) + + @classmethod + def load_latest_from_clusterdb_on_default(cls) -> "Manifest": + """Load the latest manifest from clusterdb""" + default = cls.get_default_manifest_as_dict() + try: + manifest_latest = clusterClient().cluster.get_latest_manifest() + override = yaml.safe_load(manifest_latest.get("data")) + except ManifestItemNotFoundException as e: + LOG.debug(f"Error in getting latest manifest from cluster DB: {str(e)}") + override = {} + + utils.merge_dict(default, override) + m = Manifest(**default) + LOG.debug(f"Latest applied manifest with defaults: {m}") + return m + + @classmethod + def get_default_manifest_as_dict(cls) -> dict: + snap = Snap() + m = {"juju": None, "charms": {}, "terraform": {}} + m["charms"] = { + charm: {"channel": channel} + for charm, channel in MANIFEST_CHARM_VERSIONS.items() + } + m["terraform"] = { + tfplan: {"source": Path(snap.paths.snap / "etc" / tfplan_dir)} + for tfplan, tfplan_dir in TERRAFORM_DIR_NAMES.items() + } + + # Update manifests from plugins + m_plugin = PluginManager().get_all_plugin_manifests() + utils.merge_dict(m, m_plugin) + + return copy.deepcopy(m) + + @classmethod + def get_default_manifest(cls) -> "Manifest": + return Manifest(**cls.get_default_manifest_as_dict()) + """ # field_validator supported only in pydantix 2.x @field_validator("terraform", "mode_after") @@ -100,37 +162,56 @@ def validate_terraform(cls, terraform): return terraform """ - def validate_terraform_keys(self): + def validate_terraform_keys(self, default_manifest: dict): if self.terraform: tf_keys = set(self.terraform.keys()) - plugin_terraform_plans = PluginManager().get_all_terraform_plan_dir_names() - LOG.debug( - f"Plugin terraform plan directory names: {plugin_terraform_plans}" - ) - all_tfplans = VALID_CORE_TERRAFORM_PLANS.union(plugin_terraform_plans) + all_tfplans = default_manifest.get("terraform", {}).keys() if not tf_keys <= all_tfplans: raise ValueError(f"Terraform keys should be one of {all_tfplans} ") def __post_init__(self): + LOG.debug("Calling __post__init__") + manifest_dict = self.get_default_manifest_as_dict() # Add custom validations - self.validate_terraform_keys() + self.validate_terraform_keys(manifest_dict) + + def get_tfvars(self, plan: str) -> dict: + tfvars = {} + tfvar_map = copy.deepcopy(CHARM_MANIFEST_TFVARS_MAP) + tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map() + utils.merge_dict(tfvar_map, tfvar_map_plugin) + + for charm, value in tfvar_map.get(plan, {}).items(): + manifest_charm = asdict(self.charms.get(charm)) + for key, val in value.items(): + if manifest_charm.get(key): + tfvars[val] = manifest_charm.get(key) + + return tfvars class AddManifestStep(BaseStep): """Add Manifest file to cluster database""" - def __init__(self, manifest: Path): + def __init__(self, manifest: Optional[Path] = None): super().__init__("Write Manifest to database", "Writing Manifest to database") + # Write EMPTY_MANIFEST if manifest not provided self.manifest = manifest self.client = clusterClient() def run(self, status: Optional[Status] = None) -> Result: """Write manifest to cluster db""" try: - with self.manifest.open("r") as file: - data = yaml.safe_load(file) - id = self.client.cluster.add_manifest(data=yaml.safe_dump(data)) - return Result(ResultType.COMPLETED, id) + if self.manifest: + with self.manifest.open("r") as file: + data = yaml.safe_load(file) + id = self.client.cluster.add_manifest(data=yaml.safe_dump(data)) + else: + id = self.client.cluster.add_manifest( + data=yaml.safe_dump(EMPTY_MANIFEST) + ) + + return Result(ResultType.COMPLETED, id) except Exception as e: LOG.warning(str(e)) return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index ae055484..ad029b28 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -23,6 +23,7 @@ import yaml from snaphelpers import Snap +from sunbeam import utils from sunbeam.clusterd.client import Client from sunbeam.clusterd.service import ( ClusterServiceUnavailableException, @@ -254,15 +255,26 @@ def enabled_plugins(cls, repos: Optional[list] = []) -> list: return enabled_plugins @classmethod - def get_all_terraform_plan_dir_names(cls) -> set: - tf_plans = set() + def get_all_plugin_manifests(cls) -> dict: + manifest = {} plugins = cls.get_all_plugin_classes() for klass in plugins: plugin = klass() - for plan in plugin.get_terraform_plan_dir_names(): - tf_plans.add(plan) + m_dict = plugin.manifest() + utils.merge_dict(manifest, m_dict) - return tf_plans + return manifest + + @classmethod + def get_all_plugin_manfiest_tfvar_map(cls) -> dict: + tfvar_map = {} + plugins = cls.get_all_plugin_classes() + for klass in plugins: + plugin = klass() + m_dict = plugin.charm_manifest_tfvar_map() + utils.merge_dict(tfvar_map, m_dict) + + return tfvar_map @classmethod def register(cls, cli: click.Group) -> None: diff --git a/sunbeam-python/sunbeam/jobs/steps.py b/sunbeam-python/sunbeam/jobs/steps.py index a0fe7d14..1b425473 100644 --- a/sunbeam-python/sunbeam/jobs/steps.py +++ b/sunbeam-python/sunbeam/jobs/steps.py @@ -31,6 +31,7 @@ TimeoutException, run_sync, ) +from sunbeam.jobs.manifest import Manifest LOG = logging.getLogger(__name__) @@ -90,6 +91,8 @@ def run(self, status: Optional[Status] = None) -> Result: tfvars = {} tfvars.update(self.extra_tfvars()) + m = Manifest.load_latest_from_clusterdb(on_default=True) + tfvars.update(m.get_tfvars(self.tfhelper.plan)) tfvars.update({"machine_ids": machine_ids}) update_config(self.client, self.config, tfvars) self.tfhelper.write_tfvars(tfvars) diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index e4db6e93..44fb0fd8 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -36,6 +36,7 @@ ) from sunbeam.jobs.common import BaseStep, Result, ResultType, read_config, run_plan from sunbeam.jobs.juju import JujuHelper +from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import PluginRequirement from sunbeam.plugins.interface.v1.openstack import ( OpenStackControlPlanePlugin, @@ -43,6 +44,7 @@ ) from sunbeam.plugins.orchestration.plugin import OrchestrationPlugin from sunbeam.plugins.secrets.plugin import SecretsPlugin +from sunbeam.versions import OPENSTACK_CHANNEL LOG = logging.getLogger(__name__) console = Console() @@ -87,9 +89,30 @@ def __init__(self) -> None: ) self.configure_plan = "caas-setup" - def get_terraform_plan_dir_names(self) -> set: - """Return all terraform plan directory names.""" - return {f"deploy-{self.tfplan}", self.configure_plan} + def manifest(self) -> dict: + """Manifest in dict format.""" + return { + "charms": { + "magnum": {"channel": OPENSTACK_CHANNEL}, + }, + "terraform": { + self.configure_plan: { + "source": Path(__file__).parent / "etc" / self.configure_plan + }, + }, + } + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "magnum": { + "channel": "magnum-channel", + "revision": "magnum-revision", + "config": "magnum-config", + } + } + } def set_application_names(self) -> list: """Application names handled by the terraform plan.""" @@ -157,7 +180,9 @@ def disable_plugin(self) -> None: @click.command() def configure(self): """Configure Cloud for Container as a Service use.""" - src = Path(__file__).parent / "etc" / self.configure_plan + manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) + manifest_tfplans = manifest_obj.terraform + src = manifest_tfplans.get(self.configure_plan).source dst = self.snap.paths.user_common / "etc" / self.configure_plan LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index c071ffcf..306af9ae 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -31,7 +31,7 @@ OpenStackControlPlanePlugin, TerraformPlanLocation, ) -from sunbeam.versions import OPENSTACK_CHANNEL +from sunbeam.versions import BIND_CHANNEL, OPENSTACK_CHANNEL LOG = logging.getLogger(__name__) console = Console() @@ -52,12 +52,38 @@ def __init__(self) -> None: ) self.nameservers = None + def manifest(self) -> dict: + """Manifest in dict format.""" + return { + "charms": { + "designate": {"channel": OPENSTACK_CHANNEL}, + "bind": {"channel": BIND_CHANNEL}, + } + } + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "designate": { + "channel": "desginate-channel", + "revision": "designate-revision", + "config": "designate-config", + }, + "bind": { + "channel": "bind-channel", + "revision": "bind-revision", + "config": "bind-config", + }, + } + } + def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index fa4436cc..b9c949ea 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -189,9 +189,45 @@ def fetch_plugin_version(self, plugin: str) -> Version: return Version(version) - def get_terraform_plan_dir_names(self) -> set: - """Return all terraform plan directory names.""" - return set() + def manifest(self) -> dict: + """Return manifest part of the plugin. + + Define manifest charms involved and default values for charm attributes + and terraform plan. + Sample manifest: + { + "charms": { + "heat": { + "channel": <>. + "revision": <>, + "config": <>, + } + }, + "terraform": { + "-plan": { + "source": , + } + } + } + """ + return {} + + def charm_manifest_tfvar_map(self) -> dict: + """Return terraform var map for the manifest. + + Map terraform variable for each Charm manifest attribute. + Sample return value: + { + : { + "heat": { + "channel": , + "revision": , + "config": , + } + } + } + """ + return {} def get_terraform_plans_base_path(self) -> Path: """Return Terraform plan base location.""" diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index c7d4f0be..93fd40c5 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -14,7 +14,6 @@ # limitations under the License. -import inspect import logging import shutil from abc import abstractmethod @@ -116,28 +115,19 @@ def __init__(self, name: str, tf_plan_location: TerraformPlanLocation) -> None: # Based on terraform plan location, tfplan will be either # openstack or plugin name if self.tf_plan_location == TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO: - self.tfplan = OPENSTACK_TERRAFORM_PLAN + self.tfplan = f"{OPENSTACK_TERRAFORM_PLAN}-plan" + self.tfplan_dir = f"deploy-{OPENSTACK_TERRAFORM_PLAN}" else: - self.tfplan = self.name + self.tfplan = f"{self.name}-plan" + self.tfplan_dir = f"deploy-{self.name}" self.snap = Snap() def _get_tf_plan_full_path(self) -> Path: """Returns terraform plan absolute path.""" - manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) manifest_tfplans = manifest_obj.terraform - tfplan_dir = f"deploy-{self.tfplan}" - if manifest_tfplans and manifest_tfplans.get(tfplan_dir): - return manifest_tfplans.get(tfplan_dir).source - elif self.tf_plan_location == TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO: - return self.snap.paths.snap / "etc" / tfplan_dir - else: - plugin_class_dir = Path(inspect.getfile(self.__class__)).parent - return plugin_class_dir / "etc" / tfplan_dir - - def _get_plan_name(self) -> str: - """Returns plan name in format defined in cluster db.""" - return f"{self.tfplan}-plan" + return manifest_tfplans.get(self.tfplan).source def is_openstack_control_plane(self) -> bool: """Is plugin deploys openstack control plane. @@ -150,10 +140,6 @@ def get_terraform_openstack_plan_path(self) -> Path: """Return Terraform OpenStack plan location.""" return self.get_terraform_plans_base_path() / "etc" / "deploy-openstack" - def get_terraform_plan_dir_names(self) -> set: - """Return all terraform plan directory names.""" - return {f"deploy-{self.tfplan}"} - def pre_checks(self) -> None: """Perform preflight checks before enabling the plugin. @@ -163,7 +149,7 @@ def pre_checks(self) -> None: preflight_checks.append(VerifyBootstrappedCheck()) run_preflight_checks(preflight_checks, console) src = self._get_tf_plan_full_path() - dst = self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}" + dst = self.snap.paths.user_common / "etc" / self.tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) @@ -175,8 +161,8 @@ def pre_enable(self) -> None: def get_tfhelper(self): data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) @@ -463,6 +449,8 @@ def run(self, status: Optional[Status] = None) -> Result: except ConfigItemNotFoundException: tfvars = {} tfvars.update(self.plugin.set_tfvars_on_enable()) + m = Manifest.load_latest_from_clusterdb(on_default=True) + tfvars.update(m.get_tfvars(self.tfhelper.plan)) update_config(self.client, config_key, tfvars) self.tfhelper.write_tfvars(tfvars) diff --git a/sunbeam-python/sunbeam/plugins/ldap/plugin.py b/sunbeam-python/sunbeam/plugins/ldap/plugin.py index 249952ca..da8d1236 100644 --- a/sunbeam-python/sunbeam/plugins/ldap/plugin.py +++ b/sunbeam-python/sunbeam/plugins/ldap/plugin.py @@ -49,6 +49,7 @@ OpenStackControlPlanePlugin, TerraformPlanLocation, ) +from sunbeam.versions import OPENSTACK_CHANNEL LOG = logging.getLogger(__name__) console = Console() @@ -270,6 +271,21 @@ def __init__(self) -> None: ) self.config_flags = None + def manifest(self) -> dict: + """Manifest in dict format.""" + return {"charms": {"keystone-ldap": {"channel": OPENSTACK_CHANNEL}}} + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "keystone-ldap": { + "channel": "ldap-channel", + "revision": "ldap-revision", + } + } + } + def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { @@ -343,8 +359,8 @@ def add_domain( } data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) @@ -388,8 +404,8 @@ def update_domain( charm_config["tls-ca-ldap"] = ca data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) @@ -407,8 +423,8 @@ def remove_domain(self, domain_name: str) -> None: """Remove LDAP backed domain.""" data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py index 8e736af4..e83437af 100644 --- a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py +++ b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py @@ -22,6 +22,7 @@ OpenStackControlPlanePlugin, TerraformPlanLocation, ) +from sunbeam.versions import OPENSTACK_CHANNEL LOG = logging.getLogger(__name__) @@ -35,6 +36,22 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) + def manifest(self) -> dict: + """Manifest in dict format.""" + return {"charms": {"octavia": {"channel": OPENSTACK_CHANNEL}}} + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "octavia": { + "channel": "octavia-channel", + "revision": "octavia-revision", + "config": "octavia-config", + } + } + } + def set_application_names(self) -> list: """Application names handled by the terraform plan.""" apps = ["octavia", "octavia-mysql-router"] diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index a5e0b5fe..a97f5ad6 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -434,50 +434,58 @@ class ObservabilityPlugin(EnableDisablePlugin): def __init__(self) -> None: super().__init__(name="observability") self.snap = Snap() - self.tfplan_cos = "deploy-cos" - self.tfplan_grafana_agent = "deploy-grafana-agent" - - def get_terraform_plan_dir_names(self) -> set: - """Return all terraform plan directory names.""" - return {self.tfplan_cos, self.tfplan_grafana_agent} + self.tfplan_cos = "cos-plan" + self.tfplan_cos_dir = "deploy-cos" + self.tfplan_grafana_agent = "grafana-agent-plan" + self.tfplan_grafana_agent_dir = "deploy-grafana-agent" + + def manifest(self) -> dict: + """Manifest in dict format.""" + return { + "terraform": { + self.tfplan_cos: { + "source": Path(__file__).parent / "etc" / self.tfplan_cos_dir + }, + self.tfplan_grafana_agent: { + "source": Path(__file__).parent + / "etc" # noqa: W503 + / self.tfplan_grafana_agent_dir # noqa: W503 + }, + } + } def pre_enable(self): - manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) manifest_tfplans = manifest_obj.terraform - if manifest_tfplans and manifest_tfplans.get(self.tfplan_cos): - src = manifest_tfplans.get(self.tfplan_cos).source - else: - src = Path(__file__).parent / "etc" / self.tfplan_cos - dst = self.snap.paths.user_common / "etc" / self.tfplan_cos + src = manifest_tfplans.get(self.tfplan_cos).source + dst = self.snap.paths.user_common / "etc" / self.tfplan_cos_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) - if manifest_tfplans and manifest_tfplans.get(self.tfplan_grafana_agent): - src = manifest_tfplans.get(self.tfplan_grafana_agent).source - else: - src = Path(__file__).parent / "etc" / self.tfplan_grafana_agent - dst = self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent + src = manifest_tfplans.get(self.tfplan_grafana_agent).source + dst = self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) def run_enable_plans(self): data_location = self.snap.paths.user_data tfhelper_cos = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_cos, - plan="cos-plan", + path=self.snap.paths.user_common / "etc" / self.tfplan_cos_dir, + plan=self.tfplan_cos, backend="http", data_location=data_location, ) tfhelper_grafana_agent = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent, - plan="grafana-agent-plan", + path=self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent_dir, + plan=self.tfplan_grafana_agent, backend="http", data_location=data_location, ) - openstack_plan = "deploy-" + OPENSTACK_TERRAFORM_PLAN + tfplan_openstack = f"{OPENSTACK_TERRAFORM_PLAN}-plan" + tfplan_openstack_dir = f"deploy-{OPENSTACK_TERRAFORM_PLAN}" tfhelper_openstack = TerraformHelper( - path=self.snap.paths.user_common / "etc" / openstack_plan, - plan=OPENSTACK_TERRAFORM_PLAN + "-plan", + path=self.snap.paths.user_common / "etc" / tfplan_openstack_dir, + plan=tfplan_openstack, backend="http", data_location=data_location, ) @@ -507,21 +515,22 @@ def pre_disable(self): def run_disable_plans(self): data_location = self.snap.paths.user_data tfhelper_cos = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_cos, - plan="cos-plan", + path=self.snap.paths.user_common / "etc" / self.tfplan_cos_dir, + plan=self.tfplan_cos, backend="http", data_location=data_location, ) tfhelper_grafana_agent = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent, - plan="grafana-agent-plan", + path=self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent_dir, + plan=self.tfplan_grafana_agent, backend="http", data_location=data_location, ) - openstack_plan = "deploy-" + OPENSTACK_TERRAFORM_PLAN + tfplan_openstack = f"{OPENSTACK_TERRAFORM_PLAN}-plan" + tfplan_openstack_dir = f"deploy-{OPENSTACK_TERRAFORM_PLAN}" tfhelper_openstack = TerraformHelper( - path=self.snap.paths.user_common / "etc" / openstack_plan, - plan=OPENSTACK_TERRAFORM_PLAN + "-plan", + path=self.snap.paths.user_common / "etc" / tfplan_openstack_dir, + plan=tfplan_openstack, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py index 393ef70b..367e2761 100644 --- a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py +++ b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py @@ -22,6 +22,7 @@ OpenStackControlPlanePlugin, TerraformPlanLocation, ) +from sunbeam.versions import OPENSTACK_CHANNEL LOG = logging.getLogger(__name__) @@ -35,6 +36,22 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) + def manifest(self) -> dict: + """Manifest in dict format.""" + return {"charms": {"heat": {"channel": OPENSTACK_CHANNEL}}} + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "heat": { + "channel": "heat-channel", + "revision": "heat-revision", + "config": "heat-config", + } + } + } + def set_application_names(self) -> list: """Application names handled by the terraform plan.""" apps = ["heat", "heat-mysql-router"] diff --git a/sunbeam-python/sunbeam/plugins/pro/plugin.py b/sunbeam-python/sunbeam/plugins/pro/plugin.py index ef528d4f..834c9690 100644 --- a/sunbeam-python/sunbeam/plugins/pro/plugin.py +++ b/sunbeam-python/sunbeam/plugins/pro/plugin.py @@ -152,28 +152,30 @@ def __init__(self) -> None: super().__init__(name="pro") self.token = None self.snap = Snap() - self.tfplan = f"deploy-{self.name}" + self.tfplan = "ubuntu-pro-plan" + self.tfplan_dir = f"deploy-{self.name}" - def get_terraform_plan_dir_names(self) -> set: - """Return all terraform plan directory names.""" - return {self.tfplan} + def manifest(self) -> dict: + """Manifest in dict format.""" + return { + "terraform": { + self.tfplan: {"source": Path(__file__).parent / "etc" / self.tfplan_dir} + } + } def pre_enable(self): - manifest_obj = Manifest.load_latest_from_clusterdb() + manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) manifest_tfplans = manifest_obj.terraform - if manifest_tfplans and manifest_tfplans.get(self.tfplan): - src = manifest_tfplans.get(self.tfplan).source - else: - src = Path(__file__).parent / "etc" / self.tfplan - dst = self.snap.paths.user_common / "etc" / self.tfplan + src = manifest_tfplans.get(self.tfplan).source + dst = self.snap.paths.user_common / "etc" / self.tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) def run_enable_plans(self): data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan, - plan="ubuntu-pro-plan", + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) @@ -197,8 +199,8 @@ def pre_disable(self): def run_disable_plans(self): data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan, - plan="ubuntu-pro-plan", + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/plugins/secrets/plugin.py b/sunbeam-python/sunbeam/plugins/secrets/plugin.py index daccf2f8..b9ac1d5a 100644 --- a/sunbeam-python/sunbeam/plugins/secrets/plugin.py +++ b/sunbeam-python/sunbeam/plugins/secrets/plugin.py @@ -24,6 +24,7 @@ TerraformPlanLocation, ) from sunbeam.plugins.vault.plugin import VaultPlugin +from sunbeam.versions import OPENSTACK_CHANNEL class SecretsPlugin(OpenStackControlPlanePlugin): @@ -37,6 +38,22 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) + def manifest(self) -> dict: + """Manifest in dict format.""" + return {"charms": {"barbican": {"channel": OPENSTACK_CHANNEL}}} + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "barbican": { + "channel": "barbican-channel", + "revision": "barbican-revision", + "config": "barbican-config", + } + } + } + def set_application_names(self) -> list: """Application names handled by the terraform plan.""" apps = ["barbican", "barbican-mysql-router"] diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index 17239620..e4719bde 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -29,6 +29,7 @@ OpenStackControlPlanePlugin, TerraformPlanLocation, ) +from sunbeam.versions import OPENSTACK_CHANNEL LOG = logging.getLogger(__name__) console = Console() @@ -43,12 +44,44 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) + def manifest(self) -> dict: + """Manifest in dict format.""" + return { + "charms": { + "aodh": {"channel": OPENSTACK_CHANNEL}, + "gnocchi": {"channel": OPENSTACK_CHANNEL}, + "ceilometer": {"channel": OPENSTACK_CHANNEL}, + } + } + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "aodh": { + "channel": "aodh-channel", + "revision": "aodh-revision", + "config": "aodh-config", + }, + "gnocchi": { + "channel": "gnocchi-channel", + "revision": "gnocchi-revision", + "config": "gnocchi-config", + }, + "ceilometer": { + "channel": "ceilometer-channel", + "revision": "ceilometer-revision", + "config": "ceilometer-config", + }, + } + } + def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) @@ -73,8 +106,8 @@ def run_disable_plans(self) -> None: """Run plans to disable the plugin.""" data_location = self.snap.paths.user_data tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / f"deploy-{self.tfplan}", - plan=self._get_plan_name(), + path=self.snap.paths.user_common / "etc" / self.tfplan_dir, + plan=self.tfplan, backend="http", data_location=data_location, ) diff --git a/sunbeam-python/sunbeam/plugins/vault/plugin.py b/sunbeam-python/sunbeam/plugins/vault/plugin.py index 92cfa6c6..e0d062f2 100644 --- a/sunbeam-python/sunbeam/plugins/vault/plugin.py +++ b/sunbeam-python/sunbeam/plugins/vault/plugin.py @@ -28,6 +28,7 @@ OpenStackControlPlanePlugin, TerraformPlanLocation, ) +from sunbeam.versions import VAULT_CHANNEL LOG = logging.getLogger(__name__) @@ -41,6 +42,22 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) + def manifest(self) -> dict: + """Manifest in dict format.""" + return {"charms": {"vault": {"channel": VAULT_CHANNEL}}} + + def charm_manifest_tfvar_map(self) -> dict: + """Charm manifest terraformvars map.""" + return { + self.tfplan: { + "vault": { + "channel": "vault-channel", + "revision": "vault-revision", + "config": "vault-config", + } + } + } + def set_application_names(self) -> list: """Application names handled by the terraform plan.""" return ["vault"] diff --git a/sunbeam-python/sunbeam/utils.py b/sunbeam-python/sunbeam/utils.py index 9242e8d1..5be0f7d2 100644 --- a/sunbeam-python/sunbeam/utils.py +++ b/sunbeam-python/sunbeam/utils.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections.abc import glob import ipaddress import logging @@ -287,3 +288,17 @@ def __call__(self, *args, **kwargs): LOG.warn(message) LOG.error("Error: %s", e) sys.exit(1) + + +def merge_dict(d: dict, u: dict) -> dict: + """Merges nested dicts and updates the first dict.""" + for k, v in u.items(): + if not d.get(k): + d[k] = v + elif isinstance(v, collections.abc.Mapping): + d[k] = merge_dict(d.get(k, {}), v) + else: + if v: + d[k] = v + + return d diff --git a/sunbeam-python/sunbeam/versions.py b/sunbeam-python/sunbeam/versions.py index 1b2cf2b4..715500f9 100644 --- a/sunbeam-python/sunbeam/versions.py +++ b/sunbeam-python/sunbeam/versions.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + OPENSTACK_CHANNEL = "2023.2/edge" OVN_CHANNEL = "23.09/edge" RABBITMQ_CHANNEL = "3.12/edge" @@ -22,6 +23,8 @@ MICROK8S_CHANNEL = "legacy/stable" MYSQL_CHANNEL = "8.0/candidate" CERT_AUTH_CHANNEL = "latest/beta" +BIND_CHANNEL = "9/edge" +VAULT_CHANNEL = "latest/edge" # The lists of services are needed for switching charm channels outside # of the terraform provider. If it ok to upgrade in one big-bang and @@ -41,8 +44,8 @@ "ovn-central": OVN_CHANNEL, "ovn-relay": OVN_CHANNEL, } -MYSQL_SERVICES_K8S = { - "mysql": MYSQL_CHANNEL, +MYSQL_SERVICES_K8S = {"mysql": MYSQL_CHANNEL} +MYSQL_ROUTER_SERVICES_K8S = { "cinder-ceph-mysql-router": MYSQL_CHANNEL, "cinder-mysql-router": MYSQL_CHANNEL, "glance-mysql-router": MYSQL_CHANNEL, @@ -71,8 +74,91 @@ K8S_SERVICES |= OPENSTACK_SERVICES_K8S K8S_SERVICES |= OVN_SERVICES_K8S K8S_SERVICES |= MYSQL_SERVICES_K8S +K8S_SERVICES |= MYSQL_ROUTER_SERVICES_K8S K8S_SERVICES |= MISC_SERVICES_K8S CHARM_VERSIONS = {} CHARM_VERSIONS |= K8S_SERVICES CHARM_VERSIONS |= MACHINE_SERVICES + +# Similar to CHARM_VERSIONS except this is not per service +# but per charm. So all *-mysql-router wont be included +# and instead only mysql-router is included. Same is the +# case of traefik charm. +MANIFEST_CHARM_VERSIONS = {} +MANIFEST_CHARM_VERSIONS |= OPENSTACK_SERVICES_K8S +MANIFEST_CHARM_VERSIONS |= OVN_SERVICES_K8S +MANIFEST_CHARM_VERSIONS |= MYSQL_SERVICES_K8S +MANIFEST_CHARM_VERSIONS |= MISC_SERVICES_K8S +MANIFEST_CHARM_VERSIONS |= MACHINE_SERVICES +MANIFEST_CHARM_VERSIONS |= {"mysql-router": MYSQL_CHANNEL} +MANIFEST_CHARM_VERSIONS.pop("traefik-public") + + +# : +TERRAFORM_DIR_NAMES = { + "sunbeam-machine-plan": "deploy-sunbeam-machine", + "microk8s-plan": "deploy-microk8s", + "microceph-plan": "deploy-microceph", + "openstack-plan": "deploy-openstack", + "hypervisor-plan": "deploy-openstack-hypervisor", + "demo-setup": "demo-setup", +} + +K8S_CHARMS = {} +K8S_CHARMS |= OPENSTACK_SERVICES_K8S +K8S_CHARMS |= OVN_SERVICES_K8S +K8S_CHARMS |= MYSQL_SERVICES_K8S +K8S_CHARMS |= MISC_SERVICES_K8S +DEPLOY_OPENSTACK_TFVAR_MAP = { + svc: { + "channel": f"{svc}-channel", + "revision": f"{svc}-revision", + "config": f"{svc}-config", + } + for svc, channel in K8S_CHARMS.items() +} +DEPLOY_OPENSTACK_TFVAR_MAP.pop("traefik-public") +DEPLOY_OPENSTACK_TFVAR_MAP["mysql-router"] = { + "channel": "mysql-router-channel", + "revision": "mysql-router-revision", + "config": "mysql-router-config", +} + +DEPLOY_MICROK8S_TFVAR_MAP = { + "microk8s": { + "channel": "charm_microk8s_channel", + "revision": "charm_microk8s_revision", + "config": "charm_microk8s_config", + } +} +DEPLOY_MICROCEPH_TFVAR_MAP = { + "microceph": { + "channel": "charm_microceph_channel", + "revision": "charm_microceph_revision", + "config": "charm_microceph_config", + } +} +DEPLOY_OPENSTACK_HYPERVISOR_TFVAR_MAP = { + "openstack-hypervisor": { + "channel": "charm_channel", + "revision": "charm_revision", + "config": "charm_config", + } +} +DEPLOY_SUNBEAM_MACHINE_TFVAR_MAP = { + "sunbeam-machine": { + "channel": "charm_channel", + "revision": "charm_revision", + "config": "charm_config", + } +} + + +CHARM_MANIFEST_TFVARS_MAP = { + "sunbeam-machine-plan": DEPLOY_SUNBEAM_MACHINE_TFVAR_MAP, + "microk8s-plan": DEPLOY_MICROK8S_TFVAR_MAP, + "microceph-plan": DEPLOY_MICROCEPH_TFVAR_MAP, + "openstack-plan": DEPLOY_OPENSTACK_TFVAR_MAP, + "hypervisor-plan": DEPLOY_OPENSTACK_HYPERVISOR_TFVAR_MAP, +} diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py b/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py index 98a45c7f..cbc44d86 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py @@ -29,6 +29,7 @@ from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import ResultType from sunbeam.jobs.juju import ApplicationNotFoundException, TimeoutException +from sunbeam.jobs.manifest import Manifest @pytest.fixture(autouse=True) @@ -94,7 +95,9 @@ def test_is_skip_app_already_deployed(self): self.jhelper.get_application.assert_called_once() assert result.result_type == ResultType.SKIPPED - def test_run_pristine_installation(self): + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") + def test_run_pristine_installation(self, manifest, pluginmanager): self.jhelper.get_application.side_effect = ApplicationNotFoundException( "not found" ) @@ -108,7 +111,9 @@ def test_run_pristine_installation(self): self.tfhelper.apply.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_apply_failed(self): + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") + def test_run_tf_apply_failed(self, manifest, pluginmanager): self.tfhelper.apply.side_effect = TerraformException("apply failed...") step = DeployHypervisorApplicationStep( @@ -120,7 +125,9 @@ def test_run_tf_apply_failed(self): assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - def test_run_waiting_timed_out(self): + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") + def test_run_waiting_timed_out(self, manifest, pluginmanager): self.jhelper.wait_application_ready.side_effect = TimeoutException("timed out") step = DeployHypervisorApplicationStep( diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py index fd9b4077..120f56c4 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py @@ -37,6 +37,7 @@ JujuWaitException, TimeoutException, ) +from sunbeam.jobs.manifest import Manifest TOPOLOGY = "single" DATABASE = "single" @@ -65,8 +66,10 @@ def setUp(self): self.jhelper = AsyncMock() self.tfhelper = Mock(path=Path()) + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_pristine_installation(self, client): + def test_run_pristine_installation(self, client, manifest, pluginmanager): self.jhelper.get_application.side_effect = ApplicationNotFoundException( "not found" ) @@ -82,8 +85,10 @@ def test_run_pristine_installation(self, client): self.tfhelper.apply.assert_called_once() assert result.result_type == ResultType.COMPLETED + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_tf_apply_failed(self, client): + def test_run_tf_apply_failed(self, client, manifest, pluginmanager): self.tfhelper.apply.side_effect = TerraformException("apply failed...") step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) @@ -97,8 +102,10 @@ def test_run_tf_apply_failed(self, client): assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_waiting_timed_out(self, client): + def test_run_waiting_timed_out(self, client, manifest, pluginmanager): self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) @@ -112,8 +119,10 @@ def test_run_waiting_timed_out(self, client): assert result.result_type == ResultType.FAILED assert result.message == "timed out" + @patch("sunbeam.jobs.manifest.PluginManager") + @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_unit_in_error_state(self, client): + def test_run_unit_in_error_state(self, client, manifest, pluginmanager): self.jhelper.wait_until_active.side_effect = JujuWaitException( "Unit in error: placement/0" ) diff --git a/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py b/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py index f1de50f2..9bf43f49 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py @@ -16,6 +16,7 @@ from sunbeam.commands.upgrades.inter_channel import BaseUpgrade from sunbeam.versions import ( + MYSQL_ROUTER_SERVICES_K8S, MYSQL_SERVICES_K8S, OPENSTACK_SERVICES_K8S, OVN_SERVICES_K8S, @@ -28,6 +29,7 @@ def setup_method(self): self.tfhelper = Mock() self.upgrade_service = ( list(MYSQL_SERVICES_K8S.keys()) # noqa + + list(MYSQL_ROUTER_SERVICES_K8S.keys()) # noqa + list(OVN_SERVICES_K8S.keys()) # noqa + list(OPENSTACK_SERVICES_K8S.keys()) # noqa ) diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py index 1b843914..bc6bf347 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py @@ -23,6 +23,7 @@ from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import ResultType from sunbeam.jobs.juju import ApplicationNotFoundException, TimeoutException +from sunbeam.jobs.manifest import Manifest from sunbeam.jobs.steps import ( AddMachineUnitStep, DeployMachineApplicationStep, @@ -67,6 +68,18 @@ def read_config(): yield p +@pytest.fixture() +def manifest(): + with patch.object(Manifest, "load_latest_from_clusterdb_on_default") as p: + yield p + + +@pytest.fixture() +def pluginmanager(): + with patch("sunbeam.jobs.manifest.PluginManager") as p: + yield p + + class TestDeployMachineApplicationStep: def test_is_skip(self, cclient, jhelper, tfhelper): jhelper.get_application.side_effect = ApplicationNotFoundException("not found") @@ -88,7 +101,9 @@ def test_is_skip_application_already_deployed(self, cclient, jhelper, tfhelper): jhelper.get_application.assert_called_once() assert result.result_type == ResultType.SKIPPED - def test_run_pristine_installation(self, cclient, jhelper, tfhelper, read_config): + def test_run_pristine_installation( + self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager + ): jhelper.get_application.side_effect = ApplicationNotFoundException("not found") step = DeployMachineApplicationStep( @@ -101,7 +116,9 @@ def test_run_pristine_installation(self, cclient, jhelper, tfhelper, read_config tfhelper.apply.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_already_deployed(self, cclient, jhelper, tfhelper, read_config): + def test_run_already_deployed( + self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager + ): machines = ["1", "2"] application = Mock(units=[Mock(machine=Mock(id=m)) for m in machines]) jhelper.get_application.return_value = application @@ -116,7 +133,9 @@ def test_run_already_deployed(self, cclient, jhelper, tfhelper, read_config): tfhelper.apply.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_apply_failed(self, cclient, jhelper, tfhelper, read_config): + def test_run_tf_apply_failed( + self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager + ): tfhelper.apply.side_effect = TerraformException("apply failed...") step = DeployMachineApplicationStep( @@ -128,7 +147,9 @@ def test_run_tf_apply_failed(self, cclient, jhelper, tfhelper, read_config): assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - def test_run_waiting_timed_out(self, cclient, jhelper, tfhelper, read_config): + def test_run_waiting_timed_out( + self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager + ): jhelper.wait_application_ready.side_effect = TimeoutException("timed out") step = DeployMachineApplicationStep( diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py index 4d84ebb7..f8eb7c0d 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py @@ -22,6 +22,7 @@ from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import ResultType from sunbeam.jobs.juju import TimeoutException +from sunbeam.jobs.manifest import Manifest @pytest.fixture(autouse=True) @@ -70,6 +71,18 @@ def osplugin(): yield p +@pytest.fixture() +def manifest(): + with patch.object(Manifest, "load_latest_from_clusterdb_on_default") as p: + yield p + + +@pytest.fixture() +def pluginmanager(): + with patch("sunbeam.jobs.manifest.PluginManager") as p: + yield p + + class TestEnableOpenStackApplicationStep: def test_run( self, @@ -78,6 +91,8 @@ def test_run( jhelper, tfhelper, osplugin, + manifest, + pluginmanager, ): step = openstack.EnableOpenStackApplicationStep(tfhelper, jhelper, osplugin) result = step.run() @@ -88,7 +103,7 @@ def test_run( assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed( - self, cclient, read_config, jhelper, tfhelper, osplugin + self, cclient, read_config, jhelper, tfhelper, osplugin, manifest, pluginmanager ): tfhelper.apply.side_effect = TerraformException("apply failed...") @@ -102,7 +117,7 @@ def test_run_tf_apply_failed( assert result.message == "apply failed..." def test_run_waiting_timed_out( - self, cclient, read_config, jhelper, tfhelper, osplugin + self, cclient, read_config, jhelper, tfhelper, osplugin, manifest, pluginmanager ): jhelper.wait_until_active.side_effect = TimeoutException("timed out") From bdd4ec556a6c94484a772a19c26db916983d4b27 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Wed, 17 Jan 2024 15:38:18 +0530 Subject: [PATCH 08/27] Generalise terraform functions into manifest * All the commands have logic to copy the teraform plans to snap user common and creation of terraform helper object. Move this logic as part of Manifest class. * All the plans more or less get terraform vars from cluster db, update with generated tfvars based on plan, update with tfvars from manifest, write tfvars to the terraform plan and apply the plan. Move this logic as part of Manifest class --- sunbeam-python/sunbeam/commands/bootstrap.py | 80 ++--------- sunbeam-python/sunbeam/commands/hypervisor.py | 65 ++++----- sunbeam-python/sunbeam/commands/microceph.py | 7 +- sunbeam-python/sunbeam/commands/microk8s.py | 12 +- sunbeam-python/sunbeam/commands/node.py | 41 +----- sunbeam-python/sunbeam/commands/openstack.py | 66 ++++----- sunbeam-python/sunbeam/commands/resize.py | 21 +-- .../sunbeam/commands/sunbeam_machine.py | 7 +- sunbeam-python/sunbeam/jobs/manifest.py | 81 +++++++++-- sunbeam-python/sunbeam/jobs/plugin.py | 2 +- sunbeam-python/sunbeam/jobs/steps.py | 26 ++-- sunbeam-python/sunbeam/plugins/caas/plugin.py | 24 +--- sunbeam-python/sunbeam/plugins/dns/plugin.py | 16 +-- .../sunbeam/plugins/interface/v1/base.py | 2 +- .../sunbeam/plugins/interface/v1/openstack.py | 92 ++++--------- sunbeam-python/sunbeam/plugins/ldap/plugin.py | 56 ++------ .../sunbeam/plugins/loadbalancer/plugin.py | 5 +- .../sunbeam/plugins/observability/plugin.py | 84 ++++------- .../sunbeam/plugins/orchestration/plugin.py | 5 +- sunbeam-python/sunbeam/plugins/pro/plugin.py | 78 +++++------ .../sunbeam/plugins/secrets/plugin.py | 5 +- .../sunbeam/plugins/telemetry/plugin.py | 43 ++---- .../sunbeam/plugins/vault/plugin.py | 5 +- .../unit/sunbeam/commands/test_hypervisor.py | 75 ++++------ .../unit/sunbeam/commands/test_openstack.py | 66 ++++----- .../tests/unit/sunbeam/jobs/test_steps.py | 60 +++----- .../tests/unit/sunbeam/plugins/test_ldap.py | 130 +++++++----------- .../unit/sunbeam/plugins/test_openstack.py | 43 +++--- .../tests/unit/sunbeam/plugins/test_pro.py | 57 +++++--- 29 files changed, 474 insertions(+), 780 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index eda5bdf3..e1b7166e 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -15,7 +15,6 @@ import logging -import shutil from pathlib import Path from typing import List, Optional @@ -62,7 +61,7 @@ AddSunbeamMachineUnitStep, DeploySunbeamMachineApplicationStep, ) -from sunbeam.commands.terraform import TerraformHelper, TerraformInitStep +from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.checks import ( DaemonGroupCheck, JujuSnapCheck, @@ -82,7 +81,6 @@ ) from sunbeam.jobs.juju import CONTROLLER, JujuHelper from sunbeam.jobs.manifest import AddManifestStep, Manifest -from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -172,24 +170,6 @@ def bootstrap( data_location = snap.paths.user_data - # NOTE: install to user writable location - tfplans = ["sunbeam-machine-plan"] - if is_control_node: - tfplans.extend( - [ - "microk8s-plan", - "microceph-plan", - "openstack-plan", - "hypervisor-plan", - ] - ) - manifest_tfplans = manifest_obj.terraform - for tfplan in tfplans: - src = manifest_tfplans.get(tfplan).source - dst = snap.paths.user_common / "etc" / TERRAFORM_DIR_NAMES.get(tfplan, tfplan) - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - preflight_checks = [] preflight_checks.append(SystemRequirementsCheck()) preflight_checks.append(JujuSnapCheck()) @@ -233,57 +213,27 @@ def bootstrap( plan3.append(SaveJujuUserLocallyStep(fqdn, data_location)) run_plan(plan3, console) - tfhelper = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-microk8s", - plan="microk8s-plan", - backend="http", - data_location=data_location, - ) - tfhelper_openstack_deploy = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-openstack", - plan="openstack-plan", - backend="http", - data_location=data_location, - ) - tfhelper_hypervisor_deploy = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-openstack-hypervisor", - plan="hypervisor-plan", - backend="http", - data_location=data_location, - ) - tfhelper_microceph_deploy = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-microceph", - plan="microceph-plan", - backend="http", - data_location=data_location, - ) - tfhelper_sunbeam_machine = TerraformHelper( - path=snap.paths.user_common / "etc" / "deploy-sunbeam-machine", - plan="sunbeam-machine-plan", - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan4 = [] plan4.append(RegisterJujuUserStep(fqdn, CONTROLLER, data_location, replace=True)) # Deploy sunbeam machine charm - plan4.append(TerraformInitStep(tfhelper_sunbeam_machine)) - plan4.append(DeploySunbeamMachineApplicationStep(tfhelper_sunbeam_machine, jhelper)) + plan4.append(TerraformInitStep(manifest_obj.get_tfhelper("sunbeam-machine-plan"))) + plan4.append(DeploySunbeamMachineApplicationStep(manifest_obj, jhelper)) plan4.append(AddSunbeamMachineUnitStep(fqdn, jhelper)) # Deploy Microk8s application during bootstrap irrespective of node role. - plan4.append(TerraformInitStep(tfhelper)) + plan4.append(TerraformInitStep(manifest_obj.get_tfhelper("microk8s-plan"))) plan4.append( DeployMicrok8sApplicationStep( - tfhelper, jhelper, accept_defaults=accept_defaults, preseed_file=preseed + manifest_obj, jhelper, accept_defaults=accept_defaults, preseed_file=preseed ) ) plan4.append(AddMicrok8sUnitStep(fqdn, jhelper)) plan4.append(StoreMicrok8sConfigStep(jhelper)) plan4.append(AddMicrok8sCloudStep(jhelper)) # Deploy Microceph application during bootstrap irrespective of node role. - plan4.append(TerraformInitStep(tfhelper_microceph_deploy)) - plan4.append(DeployMicrocephApplicationStep(tfhelper_microceph_deploy, jhelper)) + plan4.append(TerraformInitStep(manifest_obj.get_tfhelper("microceph-plan"))) + plan4.append(DeployMicrocephApplicationStep(manifest_obj, jhelper)) if is_storage_node: plan4.append(AddMicrocephUnitStep(fqdn, jhelper)) @@ -294,12 +244,8 @@ def bootstrap( ) if is_control_node: - plan4.append(TerraformInitStep(tfhelper_openstack_deploy)) - plan4.append( - DeployControlPlaneStep( - tfhelper_openstack_deploy, jhelper, topology, database - ) - ) + plan4.append(TerraformInitStep(manifest_obj.get_tfhelper("openstack-plan"))) + plan4.append(DeployControlPlaneStep(manifest_obj, jhelper, topology, database)) run_plan(plan4, console) @@ -312,12 +258,8 @@ def bootstrap( # NOTE(jamespage): # As with MicroCeph, always deploy the openstack-hypervisor charm # and add a unit to the bootstrap node if required. - plan5.append(TerraformInitStep(tfhelper_hypervisor_deploy)) - plan5.append( - DeployHypervisorApplicationStep( - tfhelper_hypervisor_deploy, tfhelper_openstack_deploy, jhelper - ) - ) + plan5.append(TerraformInitStep(manifest_obj.get_tfhelper("hypervisor-plan"))) + plan5.append(DeployHypervisorApplicationStep(manifest_obj, jhelper)) if is_compute_node: plan5.append(AddHypervisorUnitStep(fqdn, jhelper)) diff --git a/sunbeam-python/sunbeam/commands/hypervisor.py b/sunbeam-python/sunbeam/commands/hypervisor.py index a67d436e..5e20f4e0 100644 --- a/sunbeam-python/sunbeam/commands/hypervisor.py +++ b/sunbeam-python/sunbeam/commands/hypervisor.py @@ -28,7 +28,7 @@ from sunbeam.commands.juju import JujuStepHelper from sunbeam.commands.openstack import OPENSTACK_MODEL from sunbeam.commands.openstack_api import guests_on_hypervisor, remove_hypervisor -from sunbeam.commands.terraform import TerraformException, TerraformHelper +from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import BaseStep, Result, ResultType, read_config, update_config from sunbeam.jobs.juju import ( CONTROLLER_MODEL, @@ -52,22 +52,23 @@ class DeployHypervisorApplicationStep(BaseStep, JujuStepHelper): """Deploy openstack-hyervisor application using Terraform cloud""" + _CONFIG = CONFIG_KEY + def __init__( self, - tfhelper: TerraformHelper, - tfhelper_openstack: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, ): super().__init__( "Deploy OpenStack Hypervisor", "Deploying OpenStack Hypervisor", ) - self.tfhelper = tfhelper - self.tfhelper_openstack = tfhelper_openstack + self.manifest = manifest self.jhelper = jhelper self.client = Client() self.hypervisor_model = CONTROLLER_MODEL.split("/")[-1] self.openstack_model = OPENSTACK_MODEL + self.tfplan = "hypervisor-plan" def is_skip(self, status: Optional[Status] = None) -> Result: """Determines if the step should be skipped or not. @@ -91,28 +92,20 @@ def run(self, status: Optional[Status] = None) -> Result: except ApplicationNotFoundException as e: LOG.debug(str(e)) - try: - tfvars = read_config(self.client, CONFIG_KEY) - except ConfigItemNotFoundException: - tfvars = {} - - openstack_backend_config = self.tfhelper_openstack.backend_config() - tfvars.update( - { - "hypervisor_model": self.hypervisor_model, - "openstack_model": self.openstack_model, - "machine_ids": machine_ids, - "openstack-state-backend": self.tfhelper_openstack.backend, - "openstack-state-config": openstack_backend_config, - } - ) - m = Manifest.load_latest_from_clusterdb(on_default=True) - tfvars.update(m.get_tfvars(self.tfhelper.plan)) - update_config(self.client, CONFIG_KEY, tfvars) - self.tfhelper.write_tfvars(tfvars) + tfhelper_openstack = self.manifest.get_tfhelper("openstack-plan") + openstack_backend_config = tfhelper_openstack.backend_config() + extra_tfvars = { + "hypervisor_model": self.hypervisor_model, + "openstack_model": self.openstack_model, + "machine_ids": machine_ids, + "openstack-state-backend": tfhelper_openstack.backend, + "openstack-state-config": openstack_backend_config, + } try: - self.tfhelper.apply() + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, tfvar_config=self._CONFIG, extra_tfvars=extra_tfvars + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -320,9 +313,11 @@ def run(self, status: Optional[Status] = None) -> Result: class ReapplyHypervisorTerraformPlanStep(BaseStep): """Reapply openstack-hyervisor terraform plan""" + _CONFIG = CONFIG_KEY + def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, extra_tfvars: dict = {}, ): @@ -330,10 +325,11 @@ def __init__( "Reapply OpenStack Hypervisor Terraform plan", "Reapply OpenStack Hypervisor Terraform plan", ) - self.tfhelper = tfhelper + self.manifest = manifest self.jhelper = jhelper self.extra_tfvars = extra_tfvars self.client = Client() + self.tfplan = "hypervisor-plan" def is_skip(self, status: Optional[Status] = None) -> Result: """Determines if the step should be skipped or not. @@ -349,16 +345,11 @@ def is_skip(self, status: Optional[Status] = None) -> Result: def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy hypervisor""" try: - tfvars = read_config(self.client, CONFIG_KEY) - except ConfigItemNotFoundException: - tfvars = {} - - tfvars.update(self.extra_tfvars) - update_config(self.client, CONFIG_KEY, tfvars) - self.tfhelper.write_tfvars(tfvars) - - try: - self.tfhelper.apply() + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + extra_tfvars=self.extra_tfvars, + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/commands/microceph.py b/sunbeam-python/sunbeam/commands/microceph.py index 80a3d1eb..9221860d 100644 --- a/sunbeam-python/sunbeam/commands/microceph.py +++ b/sunbeam-python/sunbeam/commands/microceph.py @@ -23,7 +23,6 @@ from rich.status import Status from sunbeam.clusterd.client import Client -from sunbeam.commands.terraform import TerraformHelper from sunbeam.jobs import questions from sunbeam.jobs.common import BaseStep, Result, ResultType from sunbeam.jobs.juju import ( @@ -33,6 +32,7 @@ UnitNotFoundException, run_sync, ) +from sunbeam.jobs.manifest import Manifest from sunbeam.jobs.steps import ( AddMachineUnitStep, DeployMachineApplicationStep, @@ -63,15 +63,16 @@ class DeployMicrocephApplicationStep(DeployMachineApplicationStep): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, ): super().__init__( - tfhelper, + manifest, jhelper, CONFIG_KEY, APPLICATION, MODEL, + "microceph-plan", "Deploy MicroCeph", "Deploying MicroCeph", ) diff --git a/sunbeam-python/sunbeam/commands/microk8s.py b/sunbeam-python/sunbeam/commands/microk8s.py index c32efd8d..be6c0741 100644 --- a/sunbeam-python/sunbeam/commands/microk8s.py +++ b/sunbeam-python/sunbeam/commands/microk8s.py @@ -25,7 +25,6 @@ from sunbeam.clusterd.client import Client from sunbeam.clusterd.service import ConfigItemNotFoundException from sunbeam.commands.juju import JujuStepHelper -from sunbeam.commands.terraform import TerraformHelper from sunbeam.jobs import questions from sunbeam.jobs.common import BaseStep, Result, ResultType, read_config, update_config from sunbeam.jobs.juju import ( @@ -37,6 +36,7 @@ UnsupportedKubeconfigException, run_sync, ) +from sunbeam.jobs.manifest import Manifest from sunbeam.jobs.steps import ( AddMachineUnitStep, DeployMachineApplicationStep, @@ -91,24 +91,24 @@ class DeployMicrok8sApplicationStep(DeployMachineApplicationStep): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, preseed_file: Optional[Path] = None, accept_defaults: bool = False, ): super().__init__( - tfhelper, + manifest, jhelper, MICROK8S_CONFIG_KEY, APPLICATION, MODEL, + "microk8s-plan", "Deploy MicroK8S", "Deploying MicroK8S", ) self.preseed_file = preseed_file self.accept_defaults = accept_defaults - self.answer_file = self.tfhelper.path / "addons.auto.tfvars.json" self.variables = {} def get_application_timeout(self) -> int: @@ -144,7 +144,9 @@ def prompt(self, console: Optional[Console] = None) -> None: LOG.debug(self.variables) questions.write_answers(self.client, self._ADDONS_CONFIG, self.variables) # Write answers to terraform location as a separate variables file - self.tfhelper.write_tfvars(self.variables, self.answer_file) + tfhelper = self.manifest.get_tfhelper(self.tfplan) + answer_file = tfhelper.path / "addons.auto.tfvars.json" + tfhelper.write_tfvars(self.variables, answer_file) def has_prompts(self) -> bool: """Returns true if the step has prompts that it can ask the user. diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index b88f06cc..30c45365 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -14,7 +14,6 @@ # limitations under the License. import logging -import shutil from pathlib import Path from typing import List, Optional @@ -59,7 +58,7 @@ AddSunbeamMachineUnitStep, RemoveSunbeamMachineStep, ) -from sunbeam.commands.terraform import TerraformHelper, TerraformInitStep +from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.checks import ( DaemonGroupCheck, JujuSnapCheck, @@ -84,7 +83,6 @@ ) from sunbeam.jobs.juju import CONTROLLER, JujuHelper from sunbeam.jobs.manifest import Manifest -from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -208,37 +206,8 @@ def join( controller = CONTROLLER data_location = snap.paths.user_data - - manifest_obj = Manifest.load_latest_from_cluserdb_on_default() - - # NOTE: install to user writable location - tfplans = ["sunbeam-machine-plan"] - if is_control_node: - tfplans.extend(["microk8s-plan", "microceph-plan", "openstack-plan"]) - if is_compute_node: - tfplans.extend(["hypervisor-plan"]) - manifest_tfplans = manifest_obj.terraform - for tfplan in tfplans: - src = manifest_tfplans.get(tfplan).source - dst = snap.paths.user_common / "etc" / TERRAFORM_DIR_NAMES.get(tfplan, tfplan) - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - - tfhelper_openstack_deploy = TerraformHelper( - path=snap.paths.user_common / "etc" / TERRAFORM_DIR_NAMES.get("openstack-plan"), - plan="openstack-plan", - backend="http", - data_location=data_location, - ) - tfhelper_hypervisor_deploy = TerraformHelper( - path=snap.paths.user_common - / "etc" # noqa: W503 - / TERRAFORM_DIR_NAMES.get("hypervisor-plan"), # noqa: W503 - plan="hypervisor-plan", - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) + manifest_obj = Manifest.load_latest_from_cluserdb(on_default=True) plan1 = [ JujuLoginStep(data_location), @@ -275,10 +244,8 @@ def join( if is_compute_node: plan2.extend( [ - TerraformInitStep(tfhelper_hypervisor_deploy), - DeployHypervisorApplicationStep( - tfhelper_hypervisor_deploy, tfhelper_openstack_deploy, jhelper - ), + TerraformInitStep(manifest_obj.get_tfhelper("hypervisor-plan")), + DeployHypervisorApplicationStep(manifest_obj, jhelper), AddHypervisorUnitStep(name, jhelper), SetLocalHypervisorOptions( name, jhelper, join_mode=True, preseed_file=preseed diff --git a/sunbeam-python/sunbeam/commands/openstack.py b/sunbeam-python/sunbeam/commands/openstack.py index 160927ef..237e310f 100644 --- a/sunbeam-python/sunbeam/commands/openstack.py +++ b/sunbeam-python/sunbeam/commands/openstack.py @@ -32,7 +32,7 @@ MICROK8S_DEFAULT_STORAGECLASS, MICROK8S_KUBECONFIG_KEY, ) -from sunbeam.commands.terraform import TerraformException, TerraformHelper +from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import ( RAM_32_GB_IN_KB, BaseStep, @@ -130,7 +130,7 @@ class DeployControlPlaneStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, topology: str, database: str, @@ -139,13 +139,14 @@ def __init__( "Deploying OpenStack Control Plane", "Deploying OpenStack Control Plane to Kubernetes (this may take a while)", ) - self.tfhelper = tfhelper + self.manifest = manifest self.jhelper = jhelper self.topology = topology self.database = database self.model = OPENSTACK_MODEL self.cloud = MICROK8S_CLOUD self.client = Client() + self.tfplan = "openstack-plan" def get_storage_tfvars(self) -> dict: """Create terraform variables related to storage.""" @@ -203,15 +204,8 @@ def run(self, status: Optional[Status] = None) -> Result: {"topology": self.topology, "database": self.database}, ) - # Always get terraform variables from cluster database and - # update them. This is to ensure not to skip the terraform - # variables updated by plugins. - try: - tfvars = read_config(self.client, self._CONFIG) - except ConfigItemNotFoundException: - tfvars = {} - - tfvars.update( + extra_tfvars = self.get_storage_tfvars() + extra_tfvars.update( { "model": self.model, "cloud": self.cloud, @@ -220,22 +214,18 @@ def run(self, status: Optional[Status] = None) -> Result: "many-mysql": self.database == "multi", } ) - tfvars.update(self.get_storage_tfvars()) - m = Manifest.load_latest_from_clusterdb(on_default=True) - LOG.debug(f"Latest manifest in openstack: {m}") - tfvars.update(m.get_tfvars(self.tfhelper.plan)) - update_config(self.client, self._CONFIG, tfvars) - self.tfhelper.write_tfvars(tfvars) - self.update_status(status, "deploying services") try: - self.tfhelper.apply() + self.update_status(status, "deploying services") + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, tfvar_config=self._CONFIG, extra_tfvars=extra_tfvars + ) except TerraformException as e: LOG.exception("Error configuring cloud") return Result(ResultType.FAILED, str(e)) # Remove cinder-ceph from apps to wait on if ceph is not enabled apps = run_sync(self.jhelper.get_application_names(self.model)) - if not tfvars.get("enable-ceph") and "cinder-ceph" in apps: + if not extra_tfvars.get("enable-ceph") and "cinder-ceph" in apps: apps.remove("cinder-ceph") LOG.debug(f"Application monitored for readiness: {apps}") task = run_sync(update_status_background(self, apps, status)) @@ -264,7 +254,7 @@ class ResizeControlPlaneStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, topology: str, force: bool, @@ -273,11 +263,12 @@ def __init__( "Resizing OpenStack Control Plane", "Resizing OpenStack Control Plane to match appropriate topology", ) - self.tfhelper = tfhelper + self.manifest = manifest self.jhelper = jhelper self.topology = topology self.force = force self.model = OPENSTACK_MODEL + self.tfplan = "openstack-plan" def is_skip(self, status: Optional[Status] = None) -> Result: """Determines if the step should be skipped or not. @@ -321,30 +312,27 @@ def run(self, status: Optional[Status] = None) -> Result: TOPOLOGY_KEY, topology_dict, ) - tf_vars = read_config(client, self._CONFIG) control_nodes = client.cluster.list_nodes_by_role("control") storage_nodes = client.cluster.list_nodes_by_role("storage") # NOTE(jamespage) # When dedicated control nodes are used, ceph is not enabled during # bootstrap - however storage nodes may be added later so re-assess - tf_vars.update( - { - "ha-scale": compute_ha_scale(topology, len(control_nodes)), - "os-api-scale": compute_os_api_scale(topology, len(control_nodes)), - "ingress-scale": compute_ingress_scale(topology, len(control_nodes)), - "ceph-osd-replication-count": compute_ceph_replica_scale( - topology, len(storage_nodes) - ), - "enable-ceph": len(storage_nodes) > 0, - "ceph-offer-url": f"{CONTROLLER_MODEL}.{MICROCEPH_APPLICATION}", - } - ) + extra_tfvars = { + "ha-scale": compute_ha_scale(topology, len(control_nodes)), + "os-api-scale": compute_os_api_scale(topology, len(control_nodes)), + "ingress-scale": compute_ingress_scale(topology, len(control_nodes)), + "ceph-osd-replication-count": compute_ceph_replica_scale( + topology, len(storage_nodes) + ), + "enable-ceph": len(storage_nodes) > 0, + "ceph-offer-url": f"{CONTROLLER_MODEL}.{MICROCEPH_APPLICATION}", + } - update_config(client, self._CONFIG, tf_vars) self.update_status(status, "scaling services") - self.tfhelper.write_tfvars(tf_vars) try: - self.tfhelper.apply() + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, tfvar_config=self._CONFIG, extra_tfvars=extra_tfvars + ) except TerraformException as e: LOG.exception("Error resizing control plane") return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/commands/resize.py b/sunbeam-python/sunbeam/commands/resize.py index 13c3e000..ed7d198a 100644 --- a/sunbeam-python/sunbeam/commands/resize.py +++ b/sunbeam-python/sunbeam/commands/resize.py @@ -13,18 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import shutil import click from rich.console import Console from snaphelpers import Snap from sunbeam.commands.openstack import ResizeControlPlaneStep -from sunbeam.commands.terraform import TerraformHelper, TerraformInitStep +from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.common import click_option_topology, run_plan from sunbeam.jobs.juju import JujuHelper from sunbeam.jobs.manifest import Manifest -from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -42,24 +40,11 @@ def resize(topology: str, force: bool = False) -> None: manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) tfplan = "openstack-plan" - tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) - manifest_tfplans = manifest_obj.terraform - src = manifest_tfplans.get(tfplan).source - dst = snap.paths.user_common / "etc" / tfplan_dir - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - data_location = snap.paths.user_data - tfhelper = TerraformHelper( - path=snap.paths.user_common / "etc" / tfplan_dir, - plan=tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - ResizeControlPlaneStep(tfhelper, jhelper, topology, force), + TerraformInitStep(manifest_obj.get_tfhelper(tfplan)), + ResizeControlPlaneStep(manifest_obj, jhelper, topology, force), ] run_plan(plan, console) diff --git a/sunbeam-python/sunbeam/commands/sunbeam_machine.py b/sunbeam-python/sunbeam/commands/sunbeam_machine.py index 9299c7ab..e4c2775a 100644 --- a/sunbeam-python/sunbeam/commands/sunbeam_machine.py +++ b/sunbeam-python/sunbeam/commands/sunbeam_machine.py @@ -15,8 +15,8 @@ import logging -from sunbeam.commands.terraform import TerraformHelper from sunbeam.jobs.juju import MODEL, JujuHelper +from sunbeam.jobs.manifest import Manifest from sunbeam.jobs.steps import ( AddMachineUnitStep, DeployMachineApplicationStep, @@ -37,15 +37,16 @@ class DeploySunbeamMachineApplicationStep(DeployMachineApplicationStep): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, ): super().__init__( - tfhelper, + manifest, jhelper, CONFIG_KEY, APPLICATION, MODEL, + "sunbeam-machine-plan", "Deploy sunbeam-machine", "Deploying Sunbeam Machine", ) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 47edf6a1..b2090668 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -15,6 +15,7 @@ import copy import logging +import shutil from dataclasses import asdict from pathlib import Path from typing import Any, Dict, List, Optional @@ -26,8 +27,19 @@ from sunbeam import utils from sunbeam.clusterd.client import Client as clusterClient -from sunbeam.clusterd.service import ManifestItemNotFoundException -from sunbeam.jobs.common import BaseStep, Result, ResultType, Status +from sunbeam.clusterd.service import ( + ConfigItemNotFoundException, + ManifestItemNotFoundException, +) +from sunbeam.commands.terraform import TerraformHelper +from sunbeam.jobs.common import ( + BaseStep, + Result, + ResultType, + Status, + read_config, + update_config, +) from sunbeam.jobs.plugin import PluginManager from sunbeam.versions import ( CHARM_MANIFEST_TFVARS_MAP, @@ -54,13 +66,15 @@ class CharmsManifest: revision: Optional[int] = Field( default=None, description="Revision number of the charm" ) - rocks: Optional[Dict[str, str]] = Field( - default=None, description="Rock images for the charm" - ) + # rocks: Optional[Dict[str, str]] = Field( + # default=None, description="Rock images for the charm" + # ) config: Optional[Dict[str, Any]] = Field( default=None, description="Config options of the charm" ) - source: Optional[Path] = Field(default=None, description="Local charm bundle path") + # source: Optional[Path] = Field( + # default=None, description="Local charm bundle path" + # ) @dataclass @@ -78,7 +92,7 @@ class Manifest: def load(cls, manifest_file: Path, on_default: bool = False) -> "Manifest": """Load the manifest with the provided file input""" if on_default: - return cls.load_on_default() + return cls.load_on_default(manifest_file) with manifest_file.open() as file: return Manifest(**yaml.safe_load(file)) @@ -171,17 +185,62 @@ def validate_terraform_keys(self, default_manifest: dict): def __post_init__(self): LOG.debug("Calling __post__init__") - manifest_dict = self.get_default_manifest_as_dict() + self.default_manifest_dict = self.get_default_manifest_as_dict() # Add custom validations - self.validate_terraform_keys(manifest_dict) + self.validate_terraform_keys(self.default_manifest_dict) - def get_tfvars(self, plan: str) -> dict: + # Add object variables to store + self.tf_helpers = {} + self.snap = Snap() + self.data_location = self.snap.paths.user_data + self.client = clusterClient() + + # Terraform helper classes + def get_tfhelper(self, tfplan: str) -> TerraformHelper: + if self.tf_helpers.get(tfplan): + return self.tf_helpers.get(tfplan) + + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan, tfplan) + src = self.terraform.get(tfplan).source + dst = self.snap.paths.user_common / "etc" / tfplan_dir + LOG.debug(f"Updating {dst} from {src}...") + shutil.copytree(src, dst, dirs_exist_ok=True) + + self.tf_helpers[tfplan] = TerraformHelper( + path=self.snap.paths.user_common / "etc" / tfplan_dir, + plan=tfplan, + backend="http", + data_location=self.data_location, + ) + + return self.tf_helpers[tfplan] + + def update_tfvar_and_apply_tf( + self, tfplan: str, tfvar_config: Optional[str] = None, extra_tfvars: dict = {} + ) -> None: + tfvars = {} + if tfvar_config: + try: + tfvars = read_config(self.client, tfvar_config) + except ConfigItemNotFoundException: + pass + + tfvars.update(extra_tfvars) + tfvars.update(self.get_tfvars(tfplan)) + if tfvar_config: + update_config(self.client, tfvar_config, tfvars) + + tfhelper = self.get_tfhelper(tfplan) + tfhelper.write_tfvars(tfvars) + tfhelper.apply() + + def get_tfvars(self, tfplan: str) -> dict: tfvars = {} tfvar_map = copy.deepcopy(CHARM_MANIFEST_TFVARS_MAP) tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map() utils.merge_dict(tfvar_map, tfvar_map_plugin) - for charm, value in tfvar_map.get(plan, {}).items(): + for charm, value in tfvar_map.get(tfplan, {}).items(): manifest_charm = asdict(self.charms.get(charm)) for key, val in value.items(): if manifest_charm.get(key): diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index ad029b28..33cfc28a 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -260,7 +260,7 @@ def get_all_plugin_manifests(cls) -> dict: plugins = cls.get_all_plugin_classes() for klass in plugins: plugin = klass() - m_dict = plugin.manifest() + m_dict = plugin.manifest_part() utils.merge_dict(manifest, m_dict) return manifest diff --git a/sunbeam-python/sunbeam/jobs/steps.py b/sunbeam-python/sunbeam/jobs/steps.py index 1b425473..ba50f0cc 100644 --- a/sunbeam-python/sunbeam/jobs/steps.py +++ b/sunbeam-python/sunbeam/jobs/steps.py @@ -23,7 +23,7 @@ ConfigItemNotFoundException, NodeNotExistInClusterException, ) -from sunbeam.commands.terraform import TerraformException, TerraformHelper +from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import BaseStep, Result, ResultType, read_config, update_config from sunbeam.jobs.juju import ( ApplicationNotFoundException, @@ -41,21 +41,23 @@ class DeployMachineApplicationStep(BaseStep): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, config: str, application: str, model: str, + tfplan: str, banner: str = "", description: str = "", ): super().__init__(banner, description) - self.tfhelper = tfhelper + self.manifest = manifest self.jhelper = jhelper self.config = config self.application = application self.model = model self.client = Client() + self.tfplan = tfplan def extra_tfvars(self) -> dict: return {} @@ -86,19 +88,11 @@ def run(self, status: Optional[Status] = None) -> Result: LOG.debug(str(e)) try: - tfvars = read_config(self.client, self.config) - except ConfigItemNotFoundException: - tfvars = {} - - tfvars.update(self.extra_tfvars()) - m = Manifest.load_latest_from_clusterdb(on_default=True) - tfvars.update(m.get_tfvars(self.tfhelper.plan)) - tfvars.update({"machine_ids": machine_ids}) - update_config(self.client, self.config, tfvars) - self.tfhelper.write_tfvars(tfvars) - - try: - self.tfhelper.apply() + extra_tfvars = self.extra_tfvars() + extra_tfvars.update({"machine_ids": machine_ids}) + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, tfvar_config=self.config, extra_tfvars=extra_tfvars + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index 44fb0fd8..16a83cf2 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -14,7 +14,6 @@ # limitations under the License. import logging -import shutil from pathlib import Path from typing import Optional @@ -36,7 +35,6 @@ ) from sunbeam.jobs.common import BaseStep, Result, ResultType, read_config, run_plan from sunbeam.jobs.juju import JujuHelper -from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import PluginRequirement from sunbeam.plugins.interface.v1.openstack import ( OpenStackControlPlanePlugin, @@ -89,8 +87,8 @@ def __init__(self) -> None: ) self.configure_plan = "caas-setup" - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return { "charms": { "magnum": {"channel": OPENSTACK_CHANNEL}, @@ -125,7 +123,6 @@ def set_application_names(self) -> list: def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { - "magnum-channel": "2023.2/edge", "enable-magnum": True, **self.add_horizon_plugin_to_tfvars("magnum"), } @@ -180,23 +177,12 @@ def disable_plugin(self) -> None: @click.command() def configure(self): """Configure Cloud for Container as a Service use.""" - manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) - manifest_tfplans = manifest_obj.terraform - src = manifest_tfplans.get(self.configure_plan).source - dst = self.snap.paths.user_common / "etc" / self.configure_plan - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - data_location = self.snap.paths.user_data jhelper = JujuHelper(data_location) admin_credentials = retrieve_admin_credentials(jhelper, OPENSTACK_MODEL) - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.configure_plan, - env=admin_credentials, - plan="caas-plan", - backend="http", - data_location=data_location, - ) + + tfhelper = self.manifest.get_tfplan(self.configure_plan) + tfhelper.env = admin_credentials plan = [ TerraformInitStep(tfhelper), CaasConfigureStep(tfhelper), diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index 306af9ae..0851bbb9 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -22,7 +22,7 @@ from sunbeam.clusterd.service import ClusterServiceUnavailableException from sunbeam.commands.openstack import OPENSTACK_MODEL, PatchLoadBalancerServicesStep -from sunbeam.commands.terraform import TerraformHelper, TerraformInitStep +from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper, run_sync from sunbeam.plugins.interface.v1.openstack import ( @@ -52,8 +52,8 @@ def __init__(self) -> None: ) self.nameservers = None - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return { "charms": { "designate": {"channel": OPENSTACK_CHANNEL}, @@ -81,16 +81,10 @@ def charm_manifest_tfvar_map(self) -> dict: def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - EnableOpenStackApplicationStep(tfhelper, jhelper, self), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableOpenStackApplicationStep(jhelper, self), PatchBindLoadBalancerStep(), ] diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index b9c949ea..c867fcad 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -189,7 +189,7 @@ def fetch_plugin_version(self, plugin: str) -> Version: return Version(version) - def manifest(self) -> dict: + def manifest_part(self) -> dict: """Return manifest part of the plugin. Define manifest charms involved and default values for charm attributes diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index 93fd40c5..bf266ec8 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -15,7 +15,6 @@ import logging -import shutil from abc import abstractmethod from enum import Enum from pathlib import Path @@ -34,11 +33,7 @@ OPENSTACK_MODEL, determine_target_topology_at_bootstrap, ) -from sunbeam.commands.terraform import ( - TerraformException, - TerraformHelper, - TerraformInitStep, -) +from sunbeam.commands.terraform import TerraformException, TerraformInitStep from sunbeam.jobs.checks import VerifyBootstrappedCheck from sunbeam.jobs.common import ( BaseStep, @@ -122,12 +117,15 @@ def __init__(self, name: str, tf_plan_location: TerraformPlanLocation) -> None: self.tfplan_dir = f"deploy-{self.name}" self.snap = Snap() + self._manifest = None + + @property + def manifest(self) -> Manifest: + if self._manifest: + return self._manifest - def _get_tf_plan_full_path(self) -> Path: - """Returns terraform plan absolute path.""" - manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) - manifest_tfplans = manifest_obj.terraform - return manifest_tfplans.get(self.tfplan).source + self._manifest = Manifest.load_latest_from_clusterdb(on_default=True) + return self._manifest def is_openstack_control_plane(self) -> bool: """Is plugin deploys openstack control plane. @@ -148,34 +146,19 @@ def pre_checks(self) -> None: preflight_checks = [] preflight_checks.append(VerifyBootstrappedCheck()) run_preflight_checks(preflight_checks, console) - src = self._get_tf_plan_full_path() - dst = self.snap.paths.user_common / "etc" / self.tfplan_dir - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) def pre_enable(self) -> None: """Handler to perform tasks before enabling the plugin.""" self.pre_checks() super().pre_enable() - def get_tfhelper(self): - data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) - return tfhelper - def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data - tfhelper = self.get_tfhelper() jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - EnableOpenStackApplicationStep(tfhelper, jhelper, self), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableOpenStackApplicationStep(jhelper, self), ] run_plan(plan, console) @@ -189,11 +172,10 @@ def pre_disable(self) -> None: def run_disable_plans(self) -> None: """Run plans to disable the plugin.""" data_location = self.snap.paths.user_data - tfhelper = self.get_tfhelper() jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - DisableOpenStackApplicationStep(tfhelper, jhelper, self), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + DisableOpenStackApplicationStep(jhelper, self), ] run_plan(plan, console) @@ -313,10 +295,9 @@ def upgrade_hook(self, upgrade_release: bool = False): :param upgrade_release: Whether to upgrade release """ data_location = self.snap.paths.user_data - tfhelper = self.get_tfhelper() jhelper = JujuHelper(data_location) plan = [ - UpgradeApplicationStep(tfhelper, jhelper, self, upgrade_release), + UpgradeApplicationStep(jhelper, self, upgrade_release), ] run_plan(plan, console) @@ -325,14 +306,12 @@ def upgrade_hook(self, upgrade_release: bool = False): class UpgradeApplicationStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, jhelper: JujuHelper, plugin: OpenStackControlPlanePlugin, upgrade_release: bool = False, ) -> None: """Constructor for the generic plan. - :param tfhelper: Terraform helper pointing to terraform plan :param jhelper: Juju helper with loaded juju credentials :param plugin: Plugin that uses this plan to perform callbacks to plugin. @@ -341,11 +320,11 @@ def __init__( f"Refresh OpenStack {plugin.name}", f"Refresh OpenStack {plugin.name} application", ) - self.tfhelper = tfhelper self.jhelper = jhelper self.plugin = plugin self.model = OPENSTACK_MODEL self.upgrade_release = upgrade_release + self.tfhelper = self.plugin.manifest.get_tfhelper(self.plugin.tfplan) def terraform_sync(self, config_key: str, tfvars_delta: dict): """Sync the running state back to the Terraform state file. @@ -419,13 +398,11 @@ class EnableOpenStackApplicationStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, jhelper: JujuHelper, plugin: OpenStackControlPlanePlugin, ) -> None: """Constructor for the generic plan. - :param tfhelper: Terraform helper pointing to terraform plan :param jhelper: Juju helper with loaded juju credentials :param plugin: Plugin that uses this plan to perform callbacks to plugin. @@ -434,7 +411,6 @@ def __init__( f"Enable OpenStack {plugin.name}", f"Enabling OpenStack {plugin.name} application", ) - self.tfhelper = tfhelper self.jhelper = jhelper self.plugin = plugin self.model = OPENSTACK_MODEL @@ -443,19 +419,14 @@ def __init__( def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy openstack application""" config_key = self.plugin.get_tfvar_config_key() + extra_tfvars = self.plugin.set_tfvars_on_enable() try: - tfvars = read_config(self.client, config_key) - except ConfigItemNotFoundException: - tfvars = {} - tfvars.update(self.plugin.set_tfvars_on_enable()) - m = Manifest.load_latest_from_clusterdb(on_default=True) - tfvars.update(m.get_tfvars(self.tfhelper.plan)) - update_config(self.client, config_key, tfvars) - self.tfhelper.write_tfvars(tfvars) - - try: - self.tfhelper.apply() + self.plugin.manifest.update_tfvar_and_apply_tf( + tfplan=self.plugin.tfplan, + tfvar_config=config_key, + extra_tfvars=extra_tfvars, + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -481,13 +452,11 @@ class DisableOpenStackApplicationStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, jhelper: JujuHelper, plugin: OpenStackControlPlanePlugin, ) -> None: """Constructor for the generic plan. - :param tfhelper: Terraform helper pointing to terraform plan :param jhelper: Juju helper with loaded juju credentials :param plugin: Plugin that uses this plan to perform callbacks to plugin. @@ -496,7 +465,6 @@ def __init__( f"Disable OpenStack {plugin.name}", f"Disabling OpenStack {plugin.name} application", ) - self.tfhelper = tfhelper self.jhelper = jhelper self.plugin = plugin self.model = OPENSTACK_MODEL @@ -506,22 +474,20 @@ def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to remove openstack application""" config_key = self.plugin.get_tfvar_config_key() - try: - tfvars = read_config(self.client, config_key) - except ConfigItemNotFoundException: - tfvars = {} - try: if self.plugin.tf_plan_location == TerraformPlanLocation.PLUGIN_REPO: # Just destroy the terraform plan - self.tfhelper.destroy() + tfhelper = self.manifest.get_tfhelper(self.plugin.tfplan) + tfhelper.destroy() delete_config(self.client, config_key) else: # Update terraform variables to disable the application - tfvars.update(self.plugin.set_tfvars_on_disable()) - update_config(self.client, config_key, tfvars) - self.tfhelper.write_tfvars(tfvars) - self.tfhelper.apply() + extra_tfvars = self.plugin.set_tfvars_on_disable() + self.plugin.manifest.update_tfvar_and_apply_tf( + tfplan=self.plugin.tfplan, + tfvar_config=config_key, + extra_tfvars=extra_tfvars, + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/plugins/ldap/plugin.py b/sunbeam-python/sunbeam/plugins/ldap/plugin.py index da8d1236..bda7414d 100644 --- a/sunbeam-python/sunbeam/plugins/ldap/plugin.py +++ b/sunbeam-python/sunbeam/plugins/ldap/plugin.py @@ -31,11 +31,7 @@ ) from sunbeam.commands.juju import JujuStepHelper from sunbeam.commands.openstack import OPENSTACK_MODEL -from sunbeam.commands.terraform import ( - TerraformException, - TerraformHelper, - TerraformInitStep, -) +from sunbeam.commands.terraform import TerraformException, TerraformInitStep from sunbeam.jobs.common import ( BaseStep, Result, @@ -63,14 +59,12 @@ class DisableLDAPDomainStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, jhelper: JujuHelper, plugin: OpenStackControlPlanePlugin, domain_name: str, ) -> None: """Constructor for the generic plan. - :param tfhelper: Terraform helper pointing to terraform plan :param jhelper: Juju helper with loaded juju credentials :param plugin: Plugin that uses this plan to perform callbacks to plugin. @@ -79,12 +73,12 @@ def __init__( f"Enable OpenStack {plugin.name}", f"Enabling OpenStack {plugin.name} application", ) - self.tfhelper = tfhelper self.jhelper = jhelper self.plugin = plugin self.model = OPENSTACK_MODEL self.domain_name = domain_name self.client = Client() + self.tfhelper = self.plugin.manifest.get_tfhelper(self.plugin.tfplan) def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy openstack application""" @@ -132,14 +126,12 @@ def run(self, status: Optional[Status] = None) -> Result: class UpdateLDAPDomainStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, jhelper: JujuHelper, plugin: OpenStackControlPlanePlugin, charm_config: str, ) -> None: """Constructor for the generic plan. - :param tfhelper: Terraform helper pointing to terraform plan :param jhelper: Juju helper with loaded juju credentials :param plugin: Plugin that uses this plan to perform callbacks to plugin. @@ -148,12 +140,12 @@ def __init__( f"Enable OpenStack {plugin.name}", f"Enabling OpenStack {plugin.name} application", ) - self.tfhelper = tfhelper self.jhelper = jhelper self.plugin = plugin self.model = OPENSTACK_MODEL self.charm_config = charm_config self.client = Client() + self.tfhelper = self.plugin.manifest.get_tfhelper(self.plugin.tfplan) def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy openstack application""" @@ -200,14 +192,12 @@ class AddLDAPDomainStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, jhelper: JujuHelper, plugin: OpenStackControlPlanePlugin, charm_config: str, ) -> None: """Constructor for the generic plan. - :param tfhelper: Terraform helper pointing to terraform plan :param jhelper: Juju helper with loaded juju credentials :param plugin: Plugin that uses this plan to perform callbacks to plugin. @@ -216,12 +206,12 @@ def __init__( f"Enable OpenStack {plugin.name}", f"Enabling OpenStack {plugin.name} application", ) - self.tfhelper = tfhelper self.jhelper = jhelper self.plugin = plugin self.model = OPENSTACK_MODEL self.charm_config = charm_config self.client = Client() + self.tfhelper = self.plugin.manifest.get_tfhelper(self.plugin.tfplan) def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy openstack application""" @@ -271,8 +261,8 @@ def __init__(self) -> None: ) self.config_flags = None - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return {"charms": {"keystone-ldap": {"channel": OPENSTACK_CHANNEL}}} def charm_manifest_tfvar_map(self) -> dict: @@ -288,9 +278,7 @@ def charm_manifest_tfvar_map(self) -> dict: def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" - return { - "ldap-channel": "2023.2/edge", - } + return {} def set_tfvars_on_disable(self) -> dict: """Set terraform variables to disable the application.""" @@ -358,16 +346,10 @@ def add_domain( "tls-ca-ldap": ca, } data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - AddLDAPDomainStep(tfhelper, jhelper, self, charm_config), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + AddLDAPDomainStep(jhelper, self, charm_config), ] run_plan(plan, console) @@ -403,16 +385,10 @@ def update_domain( ca = f.read() charm_config["tls-ca-ldap"] = ca data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - UpdateLDAPDomainStep(tfhelper, jhelper, self, charm_config), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + UpdateLDAPDomainStep(jhelper, self, charm_config), ] run_plan(plan, console) @@ -422,16 +398,10 @@ def update_domain( def remove_domain(self, domain_name: str) -> None: """Remove LDAP backed domain.""" data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - DisableLDAPDomainStep(tfhelper, jhelper, self, domain_name), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + DisableLDAPDomainStep(jhelper, self, domain_name), ] run_plan(plan, console) click.echo(f"{domain_name} removed.") diff --git a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py index e83437af..c3bb2fb3 100644 --- a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py +++ b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py @@ -36,8 +36,8 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return {"charms": {"octavia": {"channel": OPENSTACK_CHANNEL}}} def charm_manifest_tfvar_map(self) -> dict: @@ -63,7 +63,6 @@ def set_application_names(self) -> list: def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { - "octavia-channel": "2023.2/edge", "enable-octavia": True, **self.add_horizon_plugin_to_tfvars("octavia"), } diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index a97f5ad6..ee676beb 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -19,7 +19,6 @@ """ import logging -import shutil from pathlib import Path from typing import Optional @@ -438,9 +437,18 @@ def __init__(self) -> None: self.tfplan_cos_dir = "deploy-cos" self.tfplan_grafana_agent = "grafana-agent-plan" self.tfplan_grafana_agent_dir = "deploy-grafana-agent" + self._manifest = None - def manifest(self) -> dict: - """Manifest in dict format.""" + @property + def manifest(self) -> Manifest: + if self._manifest: + return self._manifest + + self._manifest = Manifest.load_latest_from_clusterdb(on_default=True) + return self._manifest + + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return { "terraform": { self.tfplan_cos: { @@ -454,44 +462,16 @@ def manifest(self) -> dict: } } - def pre_enable(self): - manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) - manifest_tfplans = manifest_obj.terraform - src = manifest_tfplans.get(self.tfplan_cos).source - dst = self.snap.paths.user_common / "etc" / self.tfplan_cos_dir - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - - src = manifest_tfplans.get(self.tfplan_grafana_agent).source - dst = self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent_dir - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - def run_enable_plans(self): data_location = self.snap.paths.user_data - tfhelper_cos = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_cos_dir, - plan=self.tfplan_cos, - backend="http", - data_location=data_location, - ) - tfhelper_grafana_agent = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent_dir, - plan=self.tfplan_grafana_agent, - backend="http", - data_location=data_location, - ) - tfplan_openstack = f"{OPENSTACK_TERRAFORM_PLAN}-plan" - tfplan_openstack_dir = f"deploy-{OPENSTACK_TERRAFORM_PLAN}" - tfhelper_openstack = TerraformHelper( - path=self.snap.paths.user_common / "etc" / tfplan_openstack_dir, - plan=tfplan_openstack, - backend="http", - data_location=data_location, - ) - jhelper = JujuHelper(data_location) + tfhelper_cos = self.manifest.get_tfhelper(self.tfplan_cos) + tfhelper_grafana_agent = self.manifest.get_tfhelper(self.tfplan_grafana_agent) + tfhelper_openstack = self.manifest.get_tfhelper( + f"{OPENSTACK_TERRAFORM_PLAN}-plan" + ) + cos_plan = [ TerraformInitStep(tfhelper_cos), DeployObservabilityStackStep(self, tfhelper_cos, jhelper), @@ -509,34 +489,16 @@ def run_enable_plans(self): click.echo("Observability enabled.") - def pre_disable(self): - self.pre_enable() - def run_disable_plans(self): data_location = self.snap.paths.user_data - tfhelper_cos = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_cos_dir, - plan=self.tfplan_cos, - backend="http", - data_location=data_location, - ) - tfhelper_grafana_agent = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_grafana_agent_dir, - plan=self.tfplan_grafana_agent, - backend="http", - data_location=data_location, - ) - tfplan_openstack = f"{OPENSTACK_TERRAFORM_PLAN}-plan" - tfplan_openstack_dir = f"deploy-{OPENSTACK_TERRAFORM_PLAN}" - tfhelper_openstack = TerraformHelper( - path=self.snap.paths.user_common / "etc" / tfplan_openstack_dir, - plan=tfplan_openstack, - backend="http", - data_location=data_location, - ) - jhelper = JujuHelper(data_location) + tfhelper_cos = self.manifest.get_tfhelper(self.tfplan_cos) + tfhelper_grafana_agent = self.manifest.get_tfhelper(self.tfplan_grafana_agent) + tfhelper_openstack = self.manifest.get_tfhelper( + f"{OPENSTACK_TERRAFORM_PLAN}-plan" + ) + cos_plan = [ TerraformInitStep(tfhelper_cos), RemoveObservabilityIntegrationStep(tfhelper_openstack, jhelper), diff --git a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py index 367e2761..fabc421f 100644 --- a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py +++ b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py @@ -36,8 +36,8 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return {"charms": {"heat": {"channel": OPENSTACK_CHANNEL}}} def charm_manifest_tfvar_map(self) -> dict: @@ -63,7 +63,6 @@ def set_application_names(self) -> list: def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { - "heat-channel": "2023.2/edge", "enable-heat": True, **self.add_horizon_plugin_to_tfvars("heat"), } diff --git a/sunbeam-python/sunbeam/plugins/pro/plugin.py b/sunbeam-python/sunbeam/plugins/pro/plugin.py index 834c9690..9e2e9e05 100644 --- a/sunbeam-python/sunbeam/plugins/pro/plugin.py +++ b/sunbeam-python/sunbeam/plugins/pro/plugin.py @@ -16,7 +16,6 @@ """Ubuntu Pro subscription management plugin.""" import logging -import shutil from pathlib import Path from typing import Optional @@ -27,11 +26,7 @@ from snaphelpers import Snap from sunbeam.commands.juju import JujuStepHelper -from sunbeam.commands.terraform import ( - TerraformException, - TerraformHelper, - TerraformInitStep, -) +from sunbeam.commands.terraform import TerraformException, TerraformInitStep from sunbeam.jobs.common import BaseStep, Result, ResultType, run_plan from sunbeam.jobs.juju import MODEL, JujuHelper, TimeoutException, run_sync from sunbeam.jobs.manifest import Manifest @@ -50,14 +45,16 @@ class EnableUbuntuProApplicationStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, jhelper: JujuHelper, token: str, + tfplan: str, ): super().__init__("Enable Ubuntu Pro", "Enabling Ubuntu Pro support") - self.tfhelper = tfhelper + self.manifest = manifest self.jhelper = jhelper self.token = token + self.tfplan = tfplan def has_prompts(self) -> bool: """Returns true if the step has prompts that it can ask the user.""" @@ -73,9 +70,11 @@ def is_skip(self, status: Optional[Status] = None) -> Result: def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy ubuntu-pro""" - self.tfhelper.write_tfvars({"token": self.token}) + extra_tfvars = {"token": self.token} try: - self.tfhelper.apply() + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, tfvar_config=None, extra_tfvars=extra_tfvars + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -117,10 +116,12 @@ class DisableUbuntuProApplicationStep(BaseStep, JujuStepHelper): def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, + tfplan: str, ): super().__init__("Disable Ubuntu Pro", "Disabling Ubuntu Pro support") - self.tfhelper = tfhelper + self.manifest = manifest + self.tfplan = tfplan def has_prompts(self) -> bool: """Returns true if the step has prompts that it can ask the user.""" @@ -136,9 +137,11 @@ def is_skip(self, status: Optional[Status] = None) -> Result: def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to disable ubuntu-pro""" - self.tfhelper.write_tfvars({"token": ""}) + extra_tfvars = {"token": ""} try: - self.tfhelper.apply() + self.manifest.update_tfvar_and_apply_tf( + tfplan=self.tfplan, tfvar_config=None, extra_tfvars=extra_tfvars + ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -154,35 +157,32 @@ def __init__(self) -> None: self.snap = Snap() self.tfplan = "ubuntu-pro-plan" self.tfplan_dir = f"deploy-{self.name}" + self._manifest = None + + @property + def manifest(self) -> Manifest: + if self._manifest: + return self._manifest - def manifest(self) -> dict: - """Manifest in dict format.""" + self._manifest = Manifest.load_latest_from_clusterdb(on_default=True) + return self._manifest + + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return { "terraform": { self.tfplan: {"source": Path(__file__).parent / "etc" / self.tfplan_dir} } } - def pre_enable(self): - manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) - manifest_tfplans = manifest_obj.terraform - src = manifest_tfplans.get(self.tfplan).source - dst = self.snap.paths.user_common / "etc" / self.tfplan_dir - LOG.debug(f"Updating {dst} from {src}...") - shutil.copytree(src, dst, dirs_exist_ok=True) - def run_enable_plans(self): data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - EnableUbuntuProApplicationStep(tfhelper, jhelper, self.token), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableUbuntuProApplicationStep( + self.manifest, jhelper, self.token, self.tfplan + ), ] run_plan(plan, console) @@ -193,20 +193,10 @@ def run_enable_plans(self): ) click.echo("Ubuntu Pro enabled.") - def pre_disable(self): - self.pre_enable() - def run_disable_plans(self): - data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) plan = [ - TerraformInitStep(tfhelper), - DisableUbuntuProApplicationStep(tfhelper), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + DisableUbuntuProApplicationStep(self.manifest, self.tfplan), ] run_plan(plan, console) diff --git a/sunbeam-python/sunbeam/plugins/secrets/plugin.py b/sunbeam-python/sunbeam/plugins/secrets/plugin.py index b9ac1d5a..95f91d54 100644 --- a/sunbeam-python/sunbeam/plugins/secrets/plugin.py +++ b/sunbeam-python/sunbeam/plugins/secrets/plugin.py @@ -38,8 +38,8 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return {"charms": {"barbican": {"channel": OPENSTACK_CHANNEL}}} def charm_manifest_tfvar_map(self) -> dict: @@ -66,7 +66,6 @@ def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { "enable-barbican": True, - "barbican-channel": "2023.2/edge", } def set_tfvars_on_disable(self) -> dict: diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index e4719bde..787f4795 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -20,7 +20,7 @@ from rich.console import Console from sunbeam.commands.hypervisor import ReapplyHypervisorTerraformPlanStep -from sunbeam.commands.terraform import TerraformHelper, TerraformInitStep +from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper, ModelNotFoundException, run_sync from sunbeam.plugins.interface.v1.openstack import ( @@ -44,8 +44,8 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest plugin part in dict format.""" return { "charms": { "aodh": {"channel": OPENSTACK_CHANNEL}, @@ -79,24 +79,12 @@ def charm_manifest_tfvar_map(self) -> dict: def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) - tfhelper_hypervisor_deploy = TerraformHelper( - path=self.snap.paths.user_common / "etc" / "deploy-openstack-hypervisor", - plan="hypervisor-plan", - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - EnableOpenStackApplicationStep(tfhelper, jhelper, self), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableOpenStackApplicationStep(jhelper, self), # No need to pass any extra terraform vars for this plugin - ReapplyHypervisorTerraformPlanStep(tfhelper_hypervisor_deploy, jhelper), + ReapplyHypervisorTerraformPlanStep(self.manifest, jhelper), ] run_plan(plan, console) @@ -105,23 +93,11 @@ def run_enable_plans(self) -> None: def run_disable_plans(self) -> None: """Run plans to disable the plugin.""" data_location = self.snap.paths.user_data - tfhelper = TerraformHelper( - path=self.snap.paths.user_common / "etc" / self.tfplan_dir, - plan=self.tfplan, - backend="http", - data_location=data_location, - ) - tfhelper_hypervisor_deploy = TerraformHelper( - path=self.snap.paths.user_common / "etc" / "deploy-openstack-hypervisor", - plan="hypervisor-plan", - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(data_location) plan = [ - TerraformInitStep(tfhelper), - DisableOpenStackApplicationStep(tfhelper, jhelper, self), - ReapplyHypervisorTerraformPlanStep(tfhelper_hypervisor_deploy, jhelper), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + DisableOpenStackApplicationStep(jhelper, self), + ReapplyHypervisorTerraformPlanStep(self.manifest, jhelper), ] run_plan(plan, console) @@ -162,7 +138,6 @@ def set_application_names(self) -> list: def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { - "telemetry-channel": "2023.2/edge", "enable-telemetry": True, **self._get_observability_offer_endpoints(), } diff --git a/sunbeam-python/sunbeam/plugins/vault/plugin.py b/sunbeam-python/sunbeam/plugins/vault/plugin.py index e0d062f2..9cc75bab 100644 --- a/sunbeam-python/sunbeam/plugins/vault/plugin.py +++ b/sunbeam-python/sunbeam/plugins/vault/plugin.py @@ -42,8 +42,8 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest(self) -> dict: - """Manifest in dict format.""" + def manifest_part(self) -> dict: + """Manifest pluing part in dict format.""" return {"charms": {"vault": {"channel": VAULT_CHANNEL}}} def charm_manifest_tfvar_map(self) -> dict: @@ -66,7 +66,6 @@ def set_tfvars_on_enable(self) -> dict: """Set terraform variables to enable the application.""" return { "enable-vault": True, - "vault-channel": "latest/edge", } def set_tfvars_on_disable(self) -> dict: diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py b/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py index cbc44d86..c72d7c2c 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py @@ -29,7 +29,6 @@ from sunbeam.commands.terraform import TerraformException from sunbeam.jobs.common import ResultType from sunbeam.jobs.juju import ApplicationNotFoundException, TimeoutException -from sunbeam.jobs.manifest import Manifest @pytest.fixture(autouse=True) @@ -68,6 +67,7 @@ def setUp(self): self.tfhelper_openstack = Mock(output=Mock(return_value={})) self.tfhelper_openstack.backend = "http" self.tfhelper_openstack.backend_config.return_value = {} + self.manifest = Mock() def tearDown(self): self.client.stop() @@ -78,61 +78,46 @@ def test_is_skip(self): "not found" ) - step = DeployHypervisorApplicationStep( - self.tfhelper, self.tfhelper_openstack, self.jhelper - ) + step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.is_skip() self.jhelper.get_application.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_is_skip_app_already_deployed(self): - step = DeployHypervisorApplicationStep( - self.tfhelper, self.tfhelper_openstack, self.jhelper - ) + step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.is_skip() self.jhelper.get_application.assert_called_once() assert result.result_type == ResultType.SKIPPED - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") - def test_run_pristine_installation(self, manifest, pluginmanager): + def test_run_pristine_installation(self): self.jhelper.get_application.side_effect = ApplicationNotFoundException( "not found" ) - step = DeployHypervisorApplicationStep( - self.tfhelper, self.tfhelper_openstack, self.jhelper - ) + step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.run() - self.tfhelper.write_tfvars.assert_called_once() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") - def test_run_tf_apply_failed(self, manifest, pluginmanager): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") - - step = DeployHypervisorApplicationStep( - self.tfhelper, self.tfhelper_openstack, self.jhelper + def test_run_tf_apply_failed(self): + self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." ) + + step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.run() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") - def test_run_waiting_timed_out(self, manifest, pluginmanager): + def test_run_waiting_timed_out(self): self.jhelper.wait_application_ready.side_effect = TimeoutException("timed out") - step = DeployHypervisorApplicationStep( - self.tfhelper, self.tfhelper_openstack, self.jhelper - ) + step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.run() self.jhelper.wait_application_ready.assert_called_once() @@ -378,30 +363,17 @@ class TestReapplyHypervisorTerraformPlanStep(unittest.TestCase): def __init__(self, methodName: str = "runTest") -> None: super().__init__(methodName) self.client = patch("sunbeam.commands.hypervisor.Client", Mock()) - self.read_config = patch( - "sunbeam.commands.hypervisor.read_config", - Mock( - return_value={ - "openstack_model": "openstack", - } - ), - ) def setUp(self): self.client.start() - self.read_config.start() self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) - self.tfhelper_openstack = Mock(output=Mock(return_value={})) - self.tfhelper_openstack.backend = "http" - self.tfhelper_openstack.backend_config.return_value = {} + self.manifest = Mock() def tearDown(self): self.client.stop() - self.read_config.stop() def test_is_skip(self): - step = ReapplyHypervisorTerraformPlanStep(self.tfhelper, self.jhelper) + step = ReapplyHypervisorTerraformPlanStep(self.manifest, self.jhelper) result = step.is_skip() assert result.result_type == ResultType.COMPLETED @@ -411,27 +383,28 @@ def test_run_pristine_installation(self): "not found" ) - step = ReapplyHypervisorTerraformPlanStep(self.tfhelper, self.jhelper) + step = ReapplyHypervisorTerraformPlanStep(self.manifest, self.jhelper) result = step.run() - self.tfhelper.write_tfvars.assert_called_once() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") + self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = ReapplyHypervisorTerraformPlanStep(self.tfhelper, self.jhelper) + step = ReapplyHypervisorTerraformPlanStep(self.manifest, self.jhelper) result = step.run() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." def test_run_waiting_timed_out(self): self.jhelper.wait_application_ready.side_effect = TimeoutException("timed out") - step = ReapplyHypervisorTerraformPlanStep(self.tfhelper, self.jhelper) + step = ReapplyHypervisorTerraformPlanStep(self.manifest, self.jhelper) result = step.run() self.jhelper.wait_application_ready.assert_called_once() diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py index 120f56c4..f2e4017a 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py @@ -14,7 +14,6 @@ import asyncio import unittest -from pathlib import Path from unittest.mock import AsyncMock, Mock, patch import pytest @@ -37,7 +36,6 @@ JujuWaitException, TimeoutException, ) -from sunbeam.jobs.manifest import Manifest TOPOLOGY = "single" DATABASE = "single" @@ -64,51 +62,46 @@ def __init__(self, methodName: str = "runTest") -> None: def setUp(self): self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) + self.manifest = Mock() - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_pristine_installation(self, client, manifest, pluginmanager): + def test_run_pristine_installation(self, client): self.jhelper.get_application.side_effect = ApplicationNotFoundException( "not found" ) - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(return_value={}), ): result = step.run() - self.tfhelper.write_tfvars.assert_called_once() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_tf_apply_failed(self, client, manifest, pluginmanager): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") + def test_run_tf_apply_failed(self, client): + self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(return_value={}), ): result = step.run() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_waiting_timed_out(self, client, manifest, pluginmanager): + def test_run_waiting_timed_out(self, client): self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(return_value={}), @@ -119,15 +112,13 @@ def test_run_waiting_timed_out(self, client, manifest, pluginmanager): assert result.result_type == ResultType.FAILED assert result.message == "timed out" - @patch("sunbeam.jobs.manifest.PluginManager") - @patch.object(Manifest, "load_latest_from_clusterdb_on_default") @patch("sunbeam.commands.openstack.Client") - def test_run_unit_in_error_state(self, client, manifest, pluginmanager): + def test_run_unit_in_error_state(self, client): self.jhelper.wait_until_active.side_effect = JujuWaitException( "Unit in error: placement/0" ) - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(return_value={}), @@ -140,7 +131,7 @@ def test_run_unit_in_error_state(self, client, manifest, pluginmanager): @patch("sunbeam.commands.openstack.Client") def test_is_skip_pristine(self, client): - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(side_effect=ConfigItemNotFoundException("not found")), @@ -151,7 +142,7 @@ def test_is_skip_pristine(self, client): @patch("sunbeam.commands.openstack.Client") def test_is_skip_subsequent_run(self, client): - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(return_value={"topology": "single", "database": "single"}), @@ -162,7 +153,7 @@ def test_is_skip_subsequent_run(self, client): @patch("sunbeam.commands.openstack.Client") def test_is_skip_database_changed(self, client): - step = DeployControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, DATABASE) + step = DeployControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, DATABASE) with patch( "sunbeam.commands.openstack.read_config", Mock(return_value={"topology": "single", "database": "multi"}), @@ -185,7 +176,7 @@ def setUp(self): self.client.start() self.read_config.start() self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) + self.manifest = Mock() def tearDown(self): self.client.stop() @@ -196,27 +187,28 @@ def test_run_pristine_installation(self): "not found" ) - step = ResizeControlPlaneStep(self.tfhelper, self.jhelper, "single", False) + step = ResizeControlPlaneStep(self.manifest, self.jhelper, "single", False) result = step.run() - self.tfhelper.write_tfvars.assert_called_once() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") + self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = ResizeControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, False) + step = ResizeControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, False) result = step.run() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." def test_run_waiting_timed_out(self): self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") - step = ResizeControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, False) + step = ResizeControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, False) result = step.run() self.jhelper.wait_until_active.assert_called_once() @@ -228,7 +220,7 @@ def test_run_unit_in_error_state(self): "Unit in error: placement/0" ) - step = ResizeControlPlaneStep(self.tfhelper, self.jhelper, TOPOLOGY, False) + step = ResizeControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, False) result = step.run() self.jhelper.wait_until_active.assert_called_once() @@ -236,14 +228,14 @@ def test_run_unit_in_error_state(self): assert result.message == "Unit in error: placement/0" def test_run_incompatible_topology(self): - step = ResizeControlPlaneStep(self.tfhelper, self.jhelper, "large", False) + step = ResizeControlPlaneStep(self.manifest, self.jhelper, "large", False) result = step.run() assert result.result_type == ResultType.FAILED assert "Cannot resize control plane to large" in result.message def test_run_force_incompatible_topology(self): - step = ResizeControlPlaneStep(self.tfhelper, self.jhelper, "large", True) + step = ResizeControlPlaneStep(self.manifest, self.jhelper, "large", True) result = step.run() self.jhelper.wait_until_active.assert_called_once() diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py index bc6bf347..3433605f 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py @@ -14,7 +14,6 @@ # limitations under the License. import asyncio -from pathlib import Path from unittest.mock import AsyncMock, Mock, patch import pytest @@ -57,11 +56,6 @@ def jhelper(): yield AsyncMock() -@pytest.fixture() -def tfhelper(): - yield Mock(path=Path()) - - @pytest.fixture() def read_config(): with patch("sunbeam.jobs.steps.read_config", return_value={}) as p: @@ -74,86 +68,76 @@ def manifest(): yield p -@pytest.fixture() -def pluginmanager(): - with patch("sunbeam.jobs.manifest.PluginManager") as p: - yield p - - class TestDeployMachineApplicationStep: - def test_is_skip(self, cclient, jhelper, tfhelper): + def test_is_skip(self, cclient, jhelper): jhelper.get_application.side_effect = ApplicationNotFoundException("not found") step = DeployMachineApplicationStep( - tfhelper, jhelper, "tfconfig", "app1", "model1" + manifest, jhelper, "tfconfig", "app1", "model1", "fake-plan" ) result = step.is_skip() jhelper.get_application.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_is_skip_application_already_deployed(self, cclient, jhelper, tfhelper): + def test_is_skip_application_already_deployed(self, cclient, jhelper): step = DeployMachineApplicationStep( - tfhelper, jhelper, "tfconfig", "app1", "model1" + manifest, jhelper, "tfconfig", "app1", "model1", "fake-plan" ) result = step.is_skip() jhelper.get_application.assert_called_once() assert result.result_type == ResultType.SKIPPED - def test_run_pristine_installation( - self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager - ): + def test_run_pristine_installation(self, cclient, jhelper, manifest): jhelper.get_application.side_effect = ApplicationNotFoundException("not found") step = DeployMachineApplicationStep( - tfhelper, jhelper, "tfconfig", "app1", "model1" + manifest, jhelper, "tfconfig", "app1", "model1", "fake-plan" ) result = step.run() jhelper.get_application.assert_called_once() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_already_deployed( - self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager - ): + def test_run_already_deployed(self, cclient, jhelper, manifest): + tfconfig = "tfconfig" + tfplan = "fake-plan" machines = ["1", "2"] application = Mock(units=[Mock(machine=Mock(id=m)) for m in machines]) jhelper.get_application.return_value = application step = DeployMachineApplicationStep( - tfhelper, jhelper, "tfconfig", "app1", "model1" + manifest, jhelper, tfconfig, "app1", "model1", tfplan ) result = step.run() jhelper.get_application.assert_called_once() - tfhelper.write_tfvars.assert_called_with({"machine_ids": machines}) - tfhelper.apply.assert_called_once() + manifest.update_tfvar_and_apply_tf.assert_called_with( + tfplan=tfplan, tfvar_config=tfconfig, extra_tfvars={"machine_ids": machines} + ) assert result.result_type == ResultType.COMPLETED - def test_run_tf_apply_failed( - self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager - ): - tfhelper.apply.side_effect = TerraformException("apply failed...") + def test_run_tf_apply_failed(self, cclient, jhelper, manifest): + manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) step = DeployMachineApplicationStep( - tfhelper, jhelper, "tfconfig", "app1", "model1" + manifest, jhelper, "tfconfig", "app1", "model1", "fake-plan" ) result = step.run() - tfhelper.apply.assert_called_once() + manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - def test_run_waiting_timed_out( - self, cclient, jhelper, tfhelper, read_config, manifest, pluginmanager - ): + def test_run_waiting_timed_out(self, cclient, jhelper, manifest): jhelper.wait_application_ready.side_effect = TimeoutException("timed out") step = DeployMachineApplicationStep( - tfhelper, jhelper, "tfconfig", "app1", "model1" + manifest, jhelper, "tfconfig", "app1", "model1", "fake-plan" ) result = step.run() diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_ldap.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_ldap.py index 7c9172b4..7bd4a22f 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_ldap.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_ldap.py @@ -13,7 +13,6 @@ # limitations under the License. import asyncio -from pathlib import Path from unittest.mock import AsyncMock, Mock, patch import pytest @@ -80,96 +79,80 @@ def __init__(self): self.name = "ldap" self.app_name = self.name.capitalize() self.tf_plan_location = 1 + self.tfplan = "fake-plan" + self._manifest = Mock() class TestAddLDAPDomainStep: def setup_method(self): self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) self.charm_config = {"domain-name": "dom1"} + self.plugin = FakeLDAPPlugin() def test_is_skip(self, cclient): - self.plugin = FakeLDAPPlugin() - step = AddLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, {}) + step = AddLDAPDomainStep(self.jhelper, self.plugin, {}) result = step.is_skip() assert result.result_type == ResultType.COMPLETED def test_has_prompts(self, cclient): - self.plugin = FakeLDAPPlugin() - step = AddLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, {}) + step = AddLDAPDomainStep(self.jhelper, self.plugin, {}) assert not step.has_prompts() def test_enable_first_domain(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = {} - step = AddLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + step = AddLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) result = step.run() - self.tfhelper.write_tfvars.assert_called_with( + step.tfhelper.write_tfvars.assert_called_with( { - "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } ) - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() self.jhelper.wait_until_active.assert_called_once_with( "openstack", ["keystone", "keystone-ldap-dom1"], timeout=900 ) assert result.result_type == ResultType.COMPLETED def test_enable_second_domain(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = { - "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = AddLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, {"domain-name": "dom2"} - ) + step = AddLDAPDomainStep(self.jhelper, self.plugin, {"domain-name": "dom2"}) result = step.run() - self.tfhelper.write_tfvars.assert_called_with( + step.tfhelper.write_tfvars.assert_called_with( { - "ldap-channel": "2023.2/edge", "ldap-apps": { "dom1": {"domain-name": "dom1"}, "dom2": {"domain-name": "dom2"}, }, } ) - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() self.jhelper.wait_until_active.assert_called_once_with( "openstack", ["keystone", "keystone-ldap-dom2"], timeout=900 ) assert result.result_type == ResultType.COMPLETED def test_enable_tf_apply_failed(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = {} - self.tfhelper.apply.side_effect = TerraformException("apply failed...") - step = AddLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + step = AddLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) + step.tfhelper.apply.side_effect = TerraformException("apply failed...") result = step.run() - self.tfhelper.apply.assert_called_once() + step.tfhelper.apply.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." def test_enable_waiting_timed_out(self, cclient, read_config, update_config, snap): self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") - self.plugin = FakeLDAPPlugin() read_config.return_value = {} - step = AddLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + step = AddLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) result = step.run() - self.tfhelper.write_tfvars.assert_called_with( + step.tfhelper.write_tfvars.assert_called_with( { - "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } ) - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() self.jhelper.wait_until_active.assert_called_once_with( "openstack", ["keystone", "keystone-ldap-dom1"], timeout=900 ) @@ -180,56 +163,51 @@ def test_enable_waiting_timed_out(self, cclient, read_config, update_config, sna class TestDisableLDAPDomainStep: def setup_method(self): self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) self.charm_config = {"domain-name": "dom1"} + self.plugin = FakeLDAPPlugin() def test_is_skip(self, cclient): - self.plugin = FakeLDAPPlugin() - step = DisableLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, "dom1") + step = DisableLDAPDomainStep(self.jhelper, self.plugin, "dom1") result = step.is_skip() assert result.result_type == ResultType.COMPLETED def test_has_prompts(self, cclient): - self.plugin = FakeLDAPPlugin() - step = DisableLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, "dom1") + step = DisableLDAPDomainStep(self.jhelper, self.plugin, "dom1") assert not step.has_prompts() def test_disable(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = DisableLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, "dom1") + step = DisableLDAPDomainStep(self.jhelper, self.plugin, "dom1") step.run() - self.tfhelper.write_tfvars.assert_called_with( + step.tfhelper.write_tfvars.assert_called_with( {"ldap-channel": "2023.2/edge", "ldap-apps": {}} ) - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() def test_disable_tf_apply_failed(self, cclient, read_config, update_config, snap): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = DisableLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, "dom1") + step = DisableLDAPDomainStep(self.jhelper, self.plugin, "dom1") + step.tfhelper.apply.side_effect = TerraformException("apply failed...") result = step.run() - self.tfhelper.write_tfvars.assert_called_with( + step.tfhelper.write_tfvars.assert_called_with( {"ldap-channel": "2023.2/edge", "ldap-apps": {}} ) - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." def test_disable_wrong_domain(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = DisableLDAPDomainStep(self.tfhelper, self.jhelper, self.plugin, "dom2") + step = DisableLDAPDomainStep(self.jhelper, self.plugin, "dom2") result = step.run() assert result.result_type == ResultType.FAILED assert result.message == "Domain not found" @@ -238,86 +216,74 @@ def test_disable_wrong_domain(self, cclient, read_config, update_config, snap): class TestUpdateLDAPDomainStep: def setup_method(self): self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) self.charm_config = {"domain-name": "dom1"} + self.plugin = FakeLDAPPlugin() def test_is_skip(self, cclient): - self.plugin = FakeLDAPPlugin() - step = UpdateLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + step = UpdateLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) result = step.is_skip() assert result.result_type == ResultType.COMPLETED def test_has_prompts(self, cclient): - self.plugin = FakeLDAPPlugin() - step = UpdateLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + step = UpdateLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) assert not step.has_prompts() def test_update_domain(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = UpdateLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + step = UpdateLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) result = step.run() - self.tfhelper.write_tfvars.assert_called_with( + step.tfhelper.write_tfvars.assert_called_with( { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } ) - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() self.jhelper.wait_until_active.assert_called_once_with( "openstack", ["keystone", "keystone-ldap-dom1"], timeout=900 ) assert result.result_type == ResultType.COMPLETED def test_update_wrong_domain(self, cclient, read_config, update_config, snap): - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = UpdateLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, {"domain-name": "dom2"} - ) + step = UpdateLDAPDomainStep(self.jhelper, self.plugin, {"domain-name": "dom2"}) result = step.run() assert result.result_type == ResultType.FAILED assert result.message == "Domain not found" def test_tf_apply_failed(self, cclient, read_config, update_config, snap): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = UpdateLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + + step = UpdateLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) + + step.tfhelper.apply.side_effect = TerraformException("apply failed...") + result = step.run() - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." def test_update_waiting_timed_out(self, cclient, read_config, update_config, snap): - self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") - self.tfhelper.apply.side_effect = TerraformException("apply failed...") - self.plugin = FakeLDAPPlugin() read_config.return_value = { "ldap-channel": "2023.2/edge", "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } - step = UpdateLDAPDomainStep( - self.tfhelper, self.jhelper, self.plugin, self.charm_config - ) + + step = UpdateLDAPDomainStep(self.jhelper, self.plugin, self.charm_config) + + self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") + step.tfhelper.apply.side_effect = TerraformException("apply failed...") + result = step.run() - self.tfhelper.apply.assert_called_once_with() + step.tfhelper.apply.assert_called_once_with() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py index f8eb7c0d..b55ebc95 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py @@ -87,31 +87,27 @@ class TestEnableOpenStackApplicationStep: def test_run( self, cclient, - read_config, jhelper, - tfhelper, osplugin, - manifest, - pluginmanager, ): - step = openstack.EnableOpenStackApplicationStep(tfhelper, jhelper, osplugin) + step = openstack.EnableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed( self, cclient, read_config, jhelper, tfhelper, osplugin, manifest, pluginmanager ): - tfhelper.apply.side_effect = TerraformException("apply failed...") + osplugin.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = openstack.EnableOpenStackApplicationStep(tfhelper, jhelper, osplugin) + step = openstack.EnableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_not_called() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." @@ -121,34 +117,31 @@ def test_run_waiting_timed_out( ): jhelper.wait_until_active.side_effect = TimeoutException("timed out") - step = openstack.EnableOpenStackApplicationStep(tfhelper, jhelper, osplugin) + step = openstack.EnableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "timed out" class TestDisableOpenStackApplicationStep: - def test_run(self, cclient, read_config, jhelper, tfhelper, osplugin): - step = openstack.DisableOpenStackApplicationStep(tfhelper, jhelper, osplugin) + def test_run(self, cclient, jhelper, osplugin): + step = openstack.DisableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_apply_failed( - self, cclient, read_config, jhelper, tfhelper, osplugin - ): - tfhelper.apply.side_effect = TerraformException("apply failed...") + def test_run_tf_apply_failed(self, cclient, jhelper, osplugin): + osplugin.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = openstack.DisableOpenStackApplicationStep(tfhelper, jhelper, osplugin) + step = openstack.DisableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py index 32ce6f7b..b98f5e12 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py @@ -14,7 +14,6 @@ import asyncio import unittest -from pathlib import Path from unittest.mock import AsyncMock, Mock import pytest @@ -46,40 +45,54 @@ def run_sync(coro): class TestEnableUbuntuProApplicationStep(unittest.TestCase): def setUp(self): self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) + self.manifest = Mock() + self.tfplan = "fake-plan" self.token = "TOKENFORTESTING" def test_is_skip(self): - step = EnableUbuntuProApplicationStep(self.tfhelper, self.jhelper, self.token) + step = EnableUbuntuProApplicationStep( + self.manifest, self.jhelper, self.token, self.tfplan + ) result = step.is_skip() assert result.result_type == ResultType.COMPLETED def test_has_prompts(self): - step = EnableUbuntuProApplicationStep(self.tfhelper, self.jhelper, self.token) + step = EnableUbuntuProApplicationStep( + self.manifest, self.jhelper, self.token, self.tfplan + ) assert not step.has_prompts() def test_enable(self): - step = EnableUbuntuProApplicationStep(self.tfhelper, self.jhelper, self.token) + step = EnableUbuntuProApplicationStep( + self.manifest, self.jhelper, self.token, self.tfplan + ) result = step.run() - self.tfhelper.write_tfvars.assert_called_with({"token": self.token}) - self.tfhelper.apply.assert_called_once_with() + self.manifest.update_tfvar_and_apply_tf.assert_called_with( + tfplan=self.tfplan, tfvar_config=None, extra_tfvars={"token": self.token} + ) self.jhelper.wait_application_ready.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_enable_tf_apply_failed(self): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") + self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = EnableUbuntuProApplicationStep(self.tfhelper, self.jhelper, self.token) + step = EnableUbuntuProApplicationStep( + self.manifest, self.jhelper, self.token, self.tfplan + ) result = step.run() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." def test_enable_waiting_timed_out(self): self.jhelper.wait_application_ready.side_effect = TimeoutException("timed out") - step = EnableUbuntuProApplicationStep(self.tfhelper, self.jhelper, self.token) + step = EnableUbuntuProApplicationStep( + self.manifest, self.jhelper, self.token, self.tfplan + ) result = step.run() self.jhelper.wait_application_ready.assert_called_once() @@ -90,30 +103,34 @@ def test_enable_waiting_timed_out(self): class TestDisableUbuntuProApplicationStep(unittest.TestCase): def setUp(self): self.jhelper = AsyncMock() - self.tfhelper = Mock(path=Path()) + self.manifest = Mock() + self.tfplan = "fake-plan" def test_is_skip(self): - step = DisableUbuntuProApplicationStep(self.tfhelper) + step = DisableUbuntuProApplicationStep(self.manifest, self.tfplan) result = step.is_skip() assert result.result_type == ResultType.COMPLETED def test_has_prompts(self): - step = DisableUbuntuProApplicationStep(self.tfhelper) + step = DisableUbuntuProApplicationStep(self.manifest, self.tfplan) assert not step.has_prompts() def test_disable(self): - step = DisableUbuntuProApplicationStep(self.tfhelper) + step = DisableUbuntuProApplicationStep(self.manifest, self.tfplan) result = step.run() - self.tfhelper.write_tfvars.assert_called_with({"token": ""}) - self.tfhelper.apply.assert_called_once_with() + self.manifest.update_tfvar_and_apply_tf.assert_called_with( + tfplan=self.tfplan, tfvar_config=None, extra_tfvars={"token": ""} + ) assert result.result_type == ResultType.COMPLETED def test_disable_tf_apply_failed(self): - self.tfhelper.apply.side_effect = TerraformException("apply failed...") + self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) - step = DisableUbuntuProApplicationStep(self.tfhelper) + step = DisableUbuntuProApplicationStep(self.manifest, self.tfplan) result = step.run() - self.tfhelper.apply.assert_called_once() + self.manifest.update_tfvar_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." From 6a334139d0d0562742abf1746a0d029dc3029d49 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Thu, 18 Jan 2024 11:48:04 +0530 Subject: [PATCH 09/27] Add unit tests to Manifest Add unit tests to Manifest class Rename few functions and arguments --- sunbeam-python/sunbeam/commands/bootstrap.py | 2 +- sunbeam-python/sunbeam/commands/configure.py | 2 +- sunbeam-python/sunbeam/commands/hypervisor.py | 10 +- sunbeam-python/sunbeam/commands/node.py | 2 +- sunbeam-python/sunbeam/commands/openstack.py | 12 +- sunbeam-python/sunbeam/commands/refresh.py | 2 +- sunbeam-python/sunbeam/commands/resize.py | 2 +- sunbeam-python/sunbeam/jobs/manifest.py | 79 +++- sunbeam-python/sunbeam/jobs/plugin.py | 2 +- sunbeam-python/sunbeam/jobs/steps.py | 6 +- sunbeam-python/sunbeam/plugins/caas/plugin.py | 2 +- sunbeam-python/sunbeam/plugins/dns/plugin.py | 2 +- .../sunbeam/plugins/interface/v1/base.py | 6 +- .../sunbeam/plugins/interface/v1/openstack.py | 10 +- sunbeam-python/sunbeam/plugins/ldap/plugin.py | 2 +- .../sunbeam/plugins/loadbalancer/plugin.py | 2 +- .../sunbeam/plugins/observability/plugin.py | 4 +- .../sunbeam/plugins/orchestration/plugin.py | 2 +- sunbeam-python/sunbeam/plugins/pro/plugin.py | 12 +- .../sunbeam/plugins/secrets/plugin.py | 2 +- .../sunbeam/plugins/telemetry/plugin.py | 2 +- .../sunbeam/plugins/vault/plugin.py | 2 +- sunbeam-python/sunbeam/versions.py | 33 ++ .../unit/sunbeam/commands/test_configure.py | 3 - .../unit/sunbeam/commands/test_hypervisor.py | 12 +- .../unit/sunbeam/commands/test_openstack.py | 12 +- sunbeam-python/tests/unit/sunbeam/conftest.py | 8 +- .../tests/unit/sunbeam/jobs/test_manifest.py | 361 ++++++++++++++++++ .../tests/unit/sunbeam/jobs/test_steps.py | 12 +- .../unit/sunbeam/plugins/test_openstack.py | 14 +- .../tests/unit/sunbeam/plugins/test_pro.py | 16 +- 31 files changed, 548 insertions(+), 90 deletions(-) create mode 100644 sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index e1b7166e..23103bfb 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -145,7 +145,7 @@ def bootstrap( # Validate manifest file manifest_obj = None if manifest: - manifest_obj = Manifest.load(manifest_file=manifest, on_default=True) + manifest_obj = Manifest.load(manifest_file=manifest, include_defaults=True) else: manifest_obj = Manifest.get_default_manifest() diff --git a/sunbeam-python/sunbeam/commands/configure.py b/sunbeam-python/sunbeam/commands/configure.py index 6769a4f0..a8d0b25b 100644 --- a/sunbeam-python/sunbeam/commands/configure.py +++ b/sunbeam-python/sunbeam/commands/configure.py @@ -729,7 +729,7 @@ def _configure( preflight_checks.append(VerifyBootstrappedCheck()) run_preflight_checks(preflight_checks, console) - manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) + manifest_obj = Manifest.load_latest_from_clusterdb(include_defaults=True) name = utils.get_fqdn() tfplan = "demo-setup" diff --git a/sunbeam-python/sunbeam/commands/hypervisor.py b/sunbeam-python/sunbeam/commands/hypervisor.py index 5e20f4e0..69c449e0 100644 --- a/sunbeam-python/sunbeam/commands/hypervisor.py +++ b/sunbeam-python/sunbeam/commands/hypervisor.py @@ -103,8 +103,10 @@ def run(self, status: Optional[Status] = None) -> Result: } try: - self.manifest.update_tfvar_and_apply_tf( - tfplan=self.tfplan, tfvar_config=self._CONFIG, extra_tfvars=extra_tfvars + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + override_tfvars=extra_tfvars, ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -345,10 +347,10 @@ def is_skip(self, status: Optional[Status] = None) -> Result: def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy hypervisor""" try: - self.manifest.update_tfvar_and_apply_tf( + self.manifest.update_tfvars_and_apply_tf( tfplan=self.tfplan, tfvar_config=self._CONFIG, - extra_tfvars=self.extra_tfvars, + override_tfvars=self.extra_tfvars, ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index 30c45365..3a6abf9e 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -207,7 +207,7 @@ def join( controller = CONTROLLER data_location = snap.paths.user_data jhelper = JujuHelper(data_location) - manifest_obj = Manifest.load_latest_from_cluserdb(on_default=True) + manifest_obj = Manifest.load_latest_from_cluserdb(include_defaults=True) plan1 = [ JujuLoginStep(data_location), diff --git a/sunbeam-python/sunbeam/commands/openstack.py b/sunbeam-python/sunbeam/commands/openstack.py index 237e310f..0021e086 100644 --- a/sunbeam-python/sunbeam/commands/openstack.py +++ b/sunbeam-python/sunbeam/commands/openstack.py @@ -216,8 +216,10 @@ def run(self, status: Optional[Status] = None) -> Result: ) try: self.update_status(status, "deploying services") - self.manifest.update_tfvar_and_apply_tf( - tfplan=self.tfplan, tfvar_config=self._CONFIG, extra_tfvars=extra_tfvars + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + override_tfvars=extra_tfvars, ) except TerraformException as e: LOG.exception("Error configuring cloud") @@ -330,8 +332,10 @@ def run(self, status: Optional[Status] = None) -> Result: self.update_status(status, "scaling services") try: - self.manifest.update_tfvar_and_apply_tf( - tfplan=self.tfplan, tfvar_config=self._CONFIG, extra_tfvars=extra_tfvars + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + override_tfvars=extra_tfvars, ) except TerraformException as e: LOG.exception("Error resizing control plane") diff --git a/sunbeam-python/sunbeam/commands/refresh.py b/sunbeam-python/sunbeam/commands/refresh.py index a49c94c1..08c96fd5 100644 --- a/sunbeam-python/sunbeam/commands/refresh.py +++ b/sunbeam-python/sunbeam/commands/refresh.py @@ -80,7 +80,7 @@ def refresh( run_plan([AddManifestStep(manifest)], console) else: LOG.debug("Getting latest manifest") - manifest_obj = Manifest.load_latest_from_cluserdb(on_default=True) + manifest_obj = Manifest.load_latest_from_cluserdb(include_defaults=True) LOG.debug(f"Manifest object created with no errors: {manifest_obj}") tfplan = "openstack-plan" diff --git a/sunbeam-python/sunbeam/commands/resize.py b/sunbeam-python/sunbeam/commands/resize.py index ed7d198a..b3dd5fed 100644 --- a/sunbeam-python/sunbeam/commands/resize.py +++ b/sunbeam-python/sunbeam/commands/resize.py @@ -37,7 +37,7 @@ def resize(topology: str, force: bool = False) -> None: """Expand the control plane to fit available nodes.""" - manifest_obj = Manifest.load_latest_from_clusterdb(on_default=True) + manifest_obj = Manifest.load_latest_from_clusterdb(include_defaults=True) tfplan = "openstack-plan" data_location = snap.paths.user_data diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index b2090668..30e5ac19 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -53,6 +53,12 @@ """ +class MissingTerraformInfoException(Exception): + """An Exception raised when terraform information is missing in manifest""" + + pass + + @dataclass class JujuManifest: bootstrap_args: List[str] = Field( @@ -89,22 +95,25 @@ class Manifest: terraform: Optional[Dict[str, TerraformManifest]] = None @classmethod - def load(cls, manifest_file: Path, on_default: bool = False) -> "Manifest": - """Load the manifest with the provided file input""" - if on_default: + def load(cls, manifest_file: Path, include_defaults: bool = False) -> "Manifest": + """Load the manifest with the provided file input + + If include_defaults is True, load the manifest over the defaut manifest. + """ + if include_defaults: return cls.load_on_default(manifest_file) with manifest_file.open() as file: return Manifest(**yaml.safe_load(file)) @classmethod - def load_latest_from_clusterdb(cls, on_default: bool = False) -> "Manifest": + def load_latest_from_clusterdb(cls, include_defaults: bool = False) -> "Manifest": """Load the latest manifest from clusterdb - If on_default is True, load this manifest data over the default + If include_defaults is True, load the manifest over the defaut manifest. values. """ - if on_default: + if include_defaults: return cls.load_latest_from_clusterdb_on_default() try: @@ -200,6 +209,11 @@ def get_tfhelper(self, tfplan: str) -> TerraformHelper: if self.tf_helpers.get(tfplan): return self.tf_helpers.get(tfplan) + if not (self.terraform and self.terraform.get(tfplan)): + raise MissingTerraformInfoException( + f"Terraform information missing in manifest for {tfplan}" + ) + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan, tfplan) src = self.terraform.get(tfplan).source dst = self.snap.paths.user_common / "etc" / tfplan_dir @@ -215,9 +229,27 @@ def get_tfhelper(self, tfplan: str) -> TerraformHelper: return self.tf_helpers[tfplan] - def update_tfvar_and_apply_tf( - self, tfplan: str, tfvar_config: Optional[str] = None, extra_tfvars: dict = {} + def update_tfvars_and_apply_tf( + self, + tfplan: str, + tfvar_config: Optional[str] = None, + override_tfvars: dict = {}, ) -> None: + """Updates terraform vars and Apply the terraform. + + Get tfvars from cluster db using tfvar_config key, Manifest file using + Charm Manifest tfvar map from core and plugins, User provided override_tfvars. + Merge the tfvars in the above order so that terraform vars in override_tfvars + will have highest priority. + Get tfhelper object for tfplan and write tfvars and apply the terraform plan. + + :param tfplan: Terraform plan to use to get tfhelper + :type tfplan: str + :param tfvar_config: TerraformVar key name used to save tfvar in clusterdb + :type tfvar_config: str or None + :param override_tfvars: Terraform vars to override + :type override_tfvars: dict + """ tfvars = {} if tfvar_config: try: @@ -225,8 +257,12 @@ def update_tfvar_and_apply_tf( except ConfigItemNotFoundException: pass - tfvars.update(extra_tfvars) - tfvars.update(self.get_tfvars(tfplan)) + # NOTE: It is expected for Manifest to contain all previous changes + # So override tfvars from configdb to defaults if not specified in + # manifest file + tfvars.update(self._get_tfvars(tfplan)) + + tfvars.update(override_tfvars) if tfvar_config: update_config(self.client, tfvar_config, tfvars) @@ -234,17 +270,28 @@ def update_tfvar_and_apply_tf( tfhelper.write_tfvars(tfvars) tfhelper.apply() - def get_tfvars(self, tfplan: str) -> dict: + def _get_tfvars(self, tfplan: str) -> dict: + """Get tfvars from the manifest. + + CHARM_MANIFEST_TFVARS_MAP holds the mapping of CharmManifest and the + terraform variable name for each CharmManifest attribute. + For each terraform variable in CHARM_MANIFEST_TFVARS_MAP, get the + corresponding value from Manifest and return all terraform variables + as dict. + """ tfvars = {} tfvar_map = copy.deepcopy(CHARM_MANIFEST_TFVARS_MAP) tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map() utils.merge_dict(tfvar_map, tfvar_map_plugin) - for charm, value in tfvar_map.get(tfplan, {}).items(): - manifest_charm = asdict(self.charms.get(charm)) - for key, val in value.items(): - if manifest_charm.get(key): - tfvars[val] = manifest_charm.get(key) + for charm, per_charm_tfvar_map in tfvar_map.get(tfplan, {}).items(): + charm_ = self.charms.get(charm) + if charm_: + manifest_charm = asdict(charm_) + for charm_attribute, tfvar_name in per_charm_tfvar_map.items(): + charm_attribute_ = manifest_charm.get(charm_attribute) + if charm_attribute_: + tfvars[tfvar_name] = charm_attribute_ return tfvars diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index 33cfc28a..f278a7cd 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -260,7 +260,7 @@ def get_all_plugin_manifests(cls) -> dict: plugins = cls.get_all_plugin_classes() for klass in plugins: plugin = klass() - m_dict = plugin.manifest_part() + m_dict = plugin.manifest_defaults() utils.merge_dict(manifest, m_dict) return manifest diff --git a/sunbeam-python/sunbeam/jobs/steps.py b/sunbeam-python/sunbeam/jobs/steps.py index ba50f0cc..0b55f0df 100644 --- a/sunbeam-python/sunbeam/jobs/steps.py +++ b/sunbeam-python/sunbeam/jobs/steps.py @@ -90,8 +90,10 @@ def run(self, status: Optional[Status] = None) -> Result: try: extra_tfvars = self.extra_tfvars() extra_tfvars.update({"machine_ids": machine_ids}) - self.manifest.update_tfvar_and_apply_tf( - tfplan=self.tfplan, tfvar_config=self.config, extra_tfvars=extra_tfvars + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self.config, + override_tfvars=extra_tfvars, ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index 16a83cf2..458fc956 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -87,7 +87,7 @@ def __init__(self) -> None: ) self.configure_plan = "caas-setup" - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "charms": { diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index 0851bbb9..f71fa4de 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -52,7 +52,7 @@ def __init__(self) -> None: ) self.nameservers = None - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "charms": { diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index c867fcad..8bc41db8 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -189,7 +189,7 @@ def fetch_plugin_version(self, plugin: str) -> Version: return Version(version) - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Return manifest part of the plugin. Define manifest charms involved and default values for charm attributes @@ -209,6 +209,8 @@ def manifest_part(self) -> dict: } } } + + The plugins that uses terraform plan should override this function. """ return {} @@ -226,6 +228,8 @@ def charm_manifest_tfvar_map(self) -> dict: } } } + + The plugins that uses terraform plan should override this function. """ return {} diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index bf266ec8..14e12816 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -124,7 +124,7 @@ def manifest(self) -> Manifest: if self._manifest: return self._manifest - self._manifest = Manifest.load_latest_from_clusterdb(on_default=True) + self._manifest = Manifest.load_latest_from_clusterdb(include_defaults=True) return self._manifest def is_openstack_control_plane(self) -> bool: @@ -422,10 +422,10 @@ def run(self, status: Optional[Status] = None) -> Result: extra_tfvars = self.plugin.set_tfvars_on_enable() try: - self.plugin.manifest.update_tfvar_and_apply_tf( + self.plugin.manifest.update_tfvars_and_apply_tf( tfplan=self.plugin.tfplan, tfvar_config=config_key, - extra_tfvars=extra_tfvars, + override_tfvars=extra_tfvars, ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -483,10 +483,10 @@ def run(self, status: Optional[Status] = None) -> Result: else: # Update terraform variables to disable the application extra_tfvars = self.plugin.set_tfvars_on_disable() - self.plugin.manifest.update_tfvar_and_apply_tf( + self.plugin.manifest.update_tfvars_and_apply_tf( tfplan=self.plugin.tfplan, tfvar_config=config_key, - extra_tfvars=extra_tfvars, + override_tfvars=extra_tfvars, ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/sunbeam/plugins/ldap/plugin.py b/sunbeam-python/sunbeam/plugins/ldap/plugin.py index bda7414d..d0081199 100644 --- a/sunbeam-python/sunbeam/plugins/ldap/plugin.py +++ b/sunbeam-python/sunbeam/plugins/ldap/plugin.py @@ -261,7 +261,7 @@ def __init__(self) -> None: ) self.config_flags = None - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"keystone-ldap": {"channel": OPENSTACK_CHANNEL}}} diff --git a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py index c3bb2fb3..64133a5a 100644 --- a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py +++ b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py @@ -36,7 +36,7 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"octavia": {"channel": OPENSTACK_CHANNEL}}} diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index ee676beb..c95b93a3 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -444,10 +444,10 @@ def manifest(self) -> Manifest: if self._manifest: return self._manifest - self._manifest = Manifest.load_latest_from_clusterdb(on_default=True) + self._manifest = Manifest.load_latest_from_clusterdb(include_defaults=True) return self._manifest - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "terraform": { diff --git a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py index fabc421f..52ddba36 100644 --- a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py +++ b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py @@ -36,7 +36,7 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"heat": {"channel": OPENSTACK_CHANNEL}}} diff --git a/sunbeam-python/sunbeam/plugins/pro/plugin.py b/sunbeam-python/sunbeam/plugins/pro/plugin.py index 9e2e9e05..639259f4 100644 --- a/sunbeam-python/sunbeam/plugins/pro/plugin.py +++ b/sunbeam-python/sunbeam/plugins/pro/plugin.py @@ -72,8 +72,8 @@ def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to deploy ubuntu-pro""" extra_tfvars = {"token": self.token} try: - self.manifest.update_tfvar_and_apply_tf( - tfplan=self.tfplan, tfvar_config=None, extra_tfvars=extra_tfvars + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, tfvar_config=None, override_tfvars=extra_tfvars ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -139,8 +139,8 @@ def run(self, status: Optional[Status] = None) -> Result: """Apply terraform configuration to disable ubuntu-pro""" extra_tfvars = {"token": ""} try: - self.manifest.update_tfvar_and_apply_tf( - tfplan=self.tfplan, tfvar_config=None, extra_tfvars=extra_tfvars + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, tfvar_config=None, override_tfvars=extra_tfvars ) except TerraformException as e: return Result(ResultType.FAILED, str(e)) @@ -164,10 +164,10 @@ def manifest(self) -> Manifest: if self._manifest: return self._manifest - self._manifest = Manifest.load_latest_from_clusterdb(on_default=True) + self._manifest = Manifest.load_latest_from_clusterdb(include_defaults=True) return self._manifest - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "terraform": { diff --git a/sunbeam-python/sunbeam/plugins/secrets/plugin.py b/sunbeam-python/sunbeam/plugins/secrets/plugin.py index 95f91d54..58738808 100644 --- a/sunbeam-python/sunbeam/plugins/secrets/plugin.py +++ b/sunbeam-python/sunbeam/plugins/secrets/plugin.py @@ -38,7 +38,7 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"barbican": {"channel": OPENSTACK_CHANNEL}}} diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index 787f4795..54dfb038 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -44,7 +44,7 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "charms": { diff --git a/sunbeam-python/sunbeam/plugins/vault/plugin.py b/sunbeam-python/sunbeam/plugins/vault/plugin.py index 9cc75bab..80824837 100644 --- a/sunbeam-python/sunbeam/plugins/vault/plugin.py +++ b/sunbeam-python/sunbeam/plugins/vault/plugin.py @@ -42,7 +42,7 @@ def __init__(self) -> None: tf_plan_location=TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO, ) - def manifest_part(self) -> dict: + def manifest_defaults(self) -> dict: """Manifest pluing part in dict format.""" return {"charms": {"vault": {"channel": VAULT_CHANNEL}}} diff --git a/sunbeam-python/sunbeam/versions.py b/sunbeam-python/sunbeam/versions.py index 715500f9..84279950 100644 --- a/sunbeam-python/sunbeam/versions.py +++ b/sunbeam-python/sunbeam/versions.py @@ -105,6 +105,39 @@ "demo-setup": "demo-setup", } + +""" +Format of CHARM_MANIFEST_TFVAR_MAP +{ + : { + : { + : + ... + ... + }, + ... + }, + ... +} + +Example: +{ + "openstack-plan": { + "keystone": { + "channel": "keystone-channel", + "revision": "keystone-revision", + "config": "keystone-config" + }, + }, + "microk8s-plan": { + "microk8s": { + "channel": "charm_microk8s_channel", + "revision": "charm_microk8s_revision", + "config": "charm_microk8s_config", + }, + }, +} +""" K8S_CHARMS = {} K8S_CHARMS |= OPENSTACK_SERVICES_K8S K8S_CHARMS |= OVN_SERVICES_K8S diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_configure.py b/sunbeam-python/tests/unit/sunbeam/commands/test_configure.py index ad8197f0..950c4cb2 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_configure.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_configure.py @@ -462,7 +462,6 @@ def test_good_choice( ) assert name == "eth1" output = console.file.getvalue() - print(repr(output)) assert output == self.short_question def test_good_choice_default( @@ -478,7 +477,6 @@ def test_good_choice_default( ) assert name == "eth2" output = console.file.getvalue() - print(repr(output)) expected = "Short Question [eth1/eth2] (eth2): " assert output == expected @@ -496,5 +494,4 @@ def test_default_missing_from_machine( # The default eth3 does not exist so it was discarded. assert name == "eth1" output = console.file.getvalue() - print(repr(output)) assert output == self.short_question diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py b/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py index c72d7c2c..098eb1b7 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_hypervisor.py @@ -99,18 +99,18 @@ def test_run_pristine_installation(self): step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self): - self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) step = DeployHypervisorApplicationStep(self.manifest, self.jhelper) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." @@ -386,18 +386,18 @@ def test_run_pristine_installation(self): step = ReapplyHypervisorTerraformPlanStep(self.manifest, self.jhelper) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self): - self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) step = ReapplyHypervisorTerraformPlanStep(self.manifest, self.jhelper) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py index f2e4017a..5315d55a 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py @@ -77,12 +77,12 @@ def test_run_pristine_installation(self, client): ): result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED @patch("sunbeam.commands.openstack.Client") def test_run_tf_apply_failed(self, client): - self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) @@ -93,7 +93,7 @@ def test_run_tf_apply_failed(self, client): ): result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." @@ -190,18 +190,18 @@ def test_run_pristine_installation(self): step = ResizeControlPlaneStep(self.manifest, self.jhelper, "single", False) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self): - self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) step = ResizeControlPlaneStep(self.manifest, self.jhelper, TOPOLOGY, False) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." diff --git a/sunbeam-python/tests/unit/sunbeam/conftest.py b/sunbeam-python/tests/unit/sunbeam/conftest.py index 5f2ce4e4..57f74bd6 100644 --- a/sunbeam-python/tests/unit/sunbeam/conftest.py +++ b/sunbeam-python/tests/unit/sunbeam/conftest.py @@ -31,7 +31,7 @@ def snap_env(): "SNAP_INSTANCE_NAME": "", "SNAP_NAME": "mysnap", "SNAP_REVISION": "2", - "SNAP_USER_COMMON": "", + "SNAP_USER_COMMON": "/var/snap/mysnap/usercommon", "SNAP_USER_DATA": "", "SNAP_VERSION": "1.2.3", "SNAP_REAL_HOME": "/home/ubuntu", @@ -68,3 +68,9 @@ def check_output(): def environ(): with patch("os.environ") as p: yield p + + +@pytest.fixture +def copytree(): + with patch("shutil.copytree") as p: + yield p diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py new file mode 100644 index 00000000..607c9a7d --- /dev/null +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -0,0 +1,361 @@ +# Copyright (c) 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +from unittest.mock import patch + +import pytest +import yaml +from pydantic import ValidationError + +import sunbeam.commands.terraform as terraform +import sunbeam.jobs.manifest as manifest +from sunbeam.clusterd.service import ClusterServiceUnavailableException +from sunbeam.jobs.common import ResultType +from sunbeam.versions import OPENSTACK_CHANNEL, TERRAFORM_DIR_NAMES + +test_manifest = """ +charms: + keystone: + channel: 2023.1/stable + revision: 234 + config: + debug: True + glance: + channel: 2023.1/stable + revision: 134 +terraform: + openstack-plan: + source: /home/ubuntu/openstack-tf + hypervisor-plan: + source: /home/ubuntu/hypervisor-tf +""" + +malformed_test_manifest = """ +charms: + keystone: + channel: 2023.1/stable + revision: 234 + conf +""" + +test_manifest_invalid_values = """ +charms: + keystone: + channel: 2023.1/stable + revision: 234 + # Config value should be dictionary but provided str + config: debug +""" + +test_manifest_incorrect_terraform_key = """ +charms: + keystone: + channel: 2023.1/stable + revision: 234 + config: + debug: True +terraform: + fake-plan: + source: /home/ubuntu/tfplan +""" + + +@pytest.fixture() +def cclient(): + with patch("sunbeam.jobs.manifest.clusterClient") as p: + yield p + + +@pytest.fixture() +def pluginmanager(): + with patch("sunbeam.jobs.manifest.PluginManager") as p: + yield p + + +@pytest.fixture() +def tfhelper(): + with patch("sunbeam.jobs.manifest.TerraformHelper") as p: + yield p + + +@pytest.fixture() +def read_config(): + with patch("sunbeam.jobs.manifest.read_config") as p: + yield p + + +@pytest.fixture() +def update_config(): + with patch("sunbeam.jobs.manifest.update_config") as p: + yield p + + +class TestManifest: + def test_load(self, mocker, snap, cclient, pluginmanager, tmpdir): + mocker.patch.object(manifest, "Snap", return_value=snap) + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest) + manifest_obj = manifest.Manifest.load(manifest_file) + ks_manifest = manifest_obj.charms.get("keystone") + assert ks_manifest.channel == "2023.1/stable" + assert ks_manifest.revision == 234 + assert ks_manifest.config == {"debug": True} + + # Assert defaults does not exist + assert "nova" not in manifest_obj.charms.keys() + + def test_load_on_default(self, mocker, snap, cclient, pluginmanager, tmpdir): + mocker.patch.object(manifest, "Snap", return_value=snap) + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest) + manifest_obj = manifest.Manifest.load(manifest_file, include_defaults=True) + + # Check updates from manifest file + ks_manifest = manifest_obj.charms.get("keystone") + assert ks_manifest.channel == "2023.1/stable" + assert ks_manifest.revision == 234 + assert ks_manifest.config == {"debug": True} + + # Check default ones + nova_manifest = manifest_obj.charms.get("nova") + assert nova_manifest.channel == OPENSTACK_CHANNEL + assert nova_manifest.revision is None + assert nova_manifest.config is None + + def test_load_latest_from_clusterdb(self, mocker, snap, cclient, pluginmanager): + mocker.patch.object(manifest, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb() + ks_manifest = manifest_obj.charms.get("keystone") + assert ks_manifest.channel == "2023.1/stable" + assert ks_manifest.revision == 234 + assert ks_manifest.config == {"debug": True} + + # Assert defaults does not exist + assert "nova" not in manifest_obj.charms.keys() + + def test_load_latest_from_clusterdb_on_default( + self, mocker, snap, cclient, pluginmanager + ): + mocker.patch.object(manifest, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb( + include_defaults=True + ) + ks_manifest = manifest_obj.charms.get("keystone") + assert ks_manifest.channel == "2023.1/stable" + assert ks_manifest.revision == 234 + assert ks_manifest.config == {"debug": True} + + # Check default ones + nova_manifest = manifest_obj.charms.get("nova") + assert nova_manifest.channel == OPENSTACK_CHANNEL + assert nova_manifest.revision is None + assert nova_manifest.config is None + + def test_get_default_manifest(self, mocker, snap, cclient, pluginmanager): + mocker.patch.object(manifest, "Snap", return_value=snap) + default_manifest = manifest.Manifest.get_default_manifest() + nova_manifest = default_manifest.charms.get("nova") + assert nova_manifest.channel == OPENSTACK_CHANNEL + assert nova_manifest.revision is None + assert nova_manifest.config is None + + def test_malformed_manifest(self, mocker, snap, cclient, pluginmanager, tmpdir): + mocker.patch.object(manifest, "Snap", return_value=snap) + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(malformed_test_manifest) + with pytest.raises(yaml.scanner.ScannerError): + manifest.Manifest.load(manifest_file) + + def test_load_manifest_invalid_values( + self, mocker, snap, cclient, pluginmanager, tmpdir + ): + mocker.patch.object(manifest, "Snap", return_value=snap) + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest_invalid_values) + with pytest.raises(ValidationError): + manifest.Manifest.load(manifest_file) + + def test_validate_terraform_keys( + self, mocker, snap, cclient, pluginmanager, tmpdir + ): + mocker.patch.object(manifest, "Snap", return_value=snap) + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest_incorrect_terraform_key) + with pytest.raises(ValueError): + manifest.Manifest.load(manifest_file) + + def test_get_tfhelper(self, mocker, snap, copytree, cclient, pluginmanager): + tfplan = "microk8s-plan" + mocker.patch.object(manifest, "Snap", return_value=snap) + mocker.patch.object(terraform, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb( + include_defaults=True + ) + tfhelper = manifest_obj.get_tfhelper(tfplan) + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) + copytree.assert_called_once_with( + Path(snap.paths.snap / "etc" / tfplan_dir), + Path(snap.paths.user_common / "etc" / tfplan_dir), + dirs_exist_ok=True, + ) + assert tfhelper.plan == tfplan + + def test_get_tfhelper_tfplan_override_in_manifest( + self, mocker, snap, copytree, cclient, pluginmanager + ): + tfplan = "openstack-plan" + mocker.patch.object(manifest, "Snap", return_value=snap) + mocker.patch.object(terraform, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb( + include_defaults=True + ) + tfhelper = manifest_obj.get_tfhelper(tfplan) + tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) + test_manifest_dict = yaml.safe_load(test_manifest) + copytree.assert_called_once_with( + Path( + test_manifest_dict.get("terraform", {}) + .get("openstack-plan", {}) + .get("source") + ), + Path(snap.paths.user_common / "etc" / tfplan_dir), + dirs_exist_ok=True, + ) + assert tfhelper.plan == tfplan + + def test_get_tfhelper_multiple_calls( + self, mocker, snap, copytree, cclient, pluginmanager + ): + tfplan = "openstack-plan" + mocker.patch.object(manifest, "Snap", return_value=snap) + mocker.patch.object(terraform, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb( + include_defaults=True + ) + manifest_obj.get_tfhelper(tfplan) + assert copytree.call_count == 1 + # Calling second time should return the value from cache instead of creating + # new object + manifest_obj.get_tfhelper(tfplan) + assert copytree.call_count == 1 + + def test_get_tfhelper_missing_terraform_source( + self, mocker, snap, copytree, cclient, pluginmanager + ): + tfplan = "microk8s-plan" + mocker.patch.object(manifest, "Snap", return_value=snap) + mocker.patch.object(terraform, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb( + include_defaults=False + ) + with pytest.raises(manifest.MissingTerraformInfoException): + manifest_obj.get_tfhelper(tfplan) + copytree.assert_not_called() + + def test_update_tfvars_and_apply_tf( + self, + mocker, + snap, + copytree, + cclient, + pluginmanager, + tfhelper, + read_config, + update_config, + ): + tfplan = "openstack-plan" + extra_tfvars = { + "ldap-apps": {"dom2": {"domain-name": "dom2"}}, + "glance-revision": 555, + } + read_config.return_value = { + "keystone-channel": OPENSTACK_CHANNEL, + "neutron-channel": "2023.1/stable", + "ldap-apps": {"dom1": {"domain-name": "dom1"}}, + } + mocker.patch.object(manifest, "Snap", return_value=snap) + mocker.patch.object(terraform, "Snap", return_value=snap) + cclient().cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_obj = manifest.Manifest.load_latest_from_clusterdb( + include_defaults=True + ) + manifest_obj.update_tfvars_and_apply_tf(tfplan, "fake-config", extra_tfvars) + manifest_obj.tf_helpers.get(tfplan).write_tfvars.assert_called_once() + manifest_obj.tf_helpers.get(tfplan).apply.assert_called_once() + applied_tfvars = manifest_obj.tf_helpers.get( + tfplan + ).write_tfvars.call_args.args[0] + + # Assert values coming from manifest and not in config db + assert applied_tfvars.get("glance-channel") == "2023.1/stable" + + # Assert values coming from manifest and in config db + assert applied_tfvars.get("keystone-channel") == "2023.1/stable" + assert applied_tfvars.get("keystone-revision") == 234 + assert applied_tfvars.get("keystone-config") == {"debug": True} + + # Assert values coming from default not in config db + assert applied_tfvars.get("nova-channel") == OPENSTACK_CHANNEL + + # Assert values coming from default and in config db + assert applied_tfvars.get("neutron-channel") == OPENSTACK_CHANNEL + + # Assert values coming from extra_tfvars and in config db + assert applied_tfvars.get("ldap-apps") == extra_tfvars.get("ldap-apps") + + # Assert values coming from extra_tfvars and in manifest + assert applied_tfvars.get("glance-revision") == 555 + + +class TestAddManifestStep: + def test_run(self, cclient, tmpdir): + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest) + step = manifest.AddManifestStep(manifest_file) + result = step.run() + + test_manifest_dict = yaml.safe_load(test_manifest) + cclient().cluster.add_manifest.assert_called_once_with( + data=yaml.safe_dump(test_manifest_dict) + ) + assert result.result_type == ResultType.COMPLETED + + def test_run_with_no_manifest(self, cclient): + step = manifest.AddManifestStep() + result = step.run() + + cclient().cluster.add_manifest.assert_called_once_with( + data=yaml.safe_dump(manifest.EMPTY_MANIFEST) + ) + assert result.result_type == ResultType.COMPLETED + + def test_run_with_no_connection_to_clusterdb(self, cclient): + cclient().cluster.add_manifest.side_effect = ClusterServiceUnavailableException( + "Cluster unavailable.." + ) + step = manifest.AddManifestStep() + result = step.run() + + cclient().cluster.add_manifest.assert_called_once_with( + data=yaml.safe_dump(manifest.EMPTY_MANIFEST) + ) + assert result.result_type == ResultType.FAILED diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py index 3433605f..be91a08d 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py @@ -98,7 +98,7 @@ def test_run_pristine_installation(self, cclient, jhelper, manifest): result = step.run() jhelper.get_application.assert_called_once() - manifest.update_tfvar_and_apply_tf.assert_called_once() + manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_already_deployed(self, cclient, jhelper, manifest): @@ -114,13 +114,15 @@ def test_run_already_deployed(self, cclient, jhelper, manifest): result = step.run() jhelper.get_application.assert_called_once() - manifest.update_tfvar_and_apply_tf.assert_called_with( - tfplan=tfplan, tfvar_config=tfconfig, extra_tfvars={"machine_ids": machines} + manifest.update_tfvars_and_apply_tf.assert_called_with( + tfplan=tfplan, + tfvar_config=tfconfig, + override_tfvars={"machine_ids": machines}, ) assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self, cclient, jhelper, manifest): - manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) @@ -129,7 +131,7 @@ def test_run_tf_apply_failed(self, cclient, jhelper, manifest): ) result = step.run() - manifest.update_tfvar_and_apply_tf.assert_called_once() + manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py index b55ebc95..2519a493 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py @@ -93,21 +93,21 @@ def test_run( step = openstack.EnableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() + osplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed( self, cclient, read_config, jhelper, tfhelper, osplugin, manifest, pluginmanager ): - osplugin.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + osplugin.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) step = openstack.EnableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() + osplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_not_called() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." @@ -120,7 +120,7 @@ def test_run_waiting_timed_out( step = openstack.EnableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() + osplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "timed out" @@ -131,17 +131,17 @@ def test_run(self, cclient, jhelper, osplugin): step = openstack.DisableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() + osplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_run_tf_apply_failed(self, cclient, jhelper, osplugin): - osplugin.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + osplugin.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) step = openstack.DisableOpenStackApplicationStep(jhelper, osplugin) result = step.run() - osplugin.manifest.update_tfvar_and_apply_tf.assert_called_once() + osplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py index b98f5e12..2bd965bb 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_pro.py @@ -67,14 +67,14 @@ def test_enable(self): self.manifest, self.jhelper, self.token, self.tfplan ) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_with( - tfplan=self.tfplan, tfvar_config=None, extra_tfvars={"token": self.token} + self.manifest.update_tfvars_and_apply_tf.assert_called_with( + tfplan=self.tfplan, tfvar_config=None, override_tfvars={"token": self.token} ) self.jhelper.wait_application_ready.assert_called_once() assert result.result_type == ResultType.COMPLETED def test_enable_tf_apply_failed(self): - self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) @@ -83,7 +83,7 @@ def test_enable_tf_apply_failed(self): ) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." @@ -118,19 +118,19 @@ def test_has_prompts(self): def test_disable(self): step = DisableUbuntuProApplicationStep(self.manifest, self.tfplan) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_with( - tfplan=self.tfplan, tfvar_config=None, extra_tfvars={"token": ""} + self.manifest.update_tfvars_and_apply_tf.assert_called_with( + tfplan=self.tfplan, tfvar_config=None, override_tfvars={"token": ""} ) assert result.result_type == ResultType.COMPLETED def test_disable_tf_apply_failed(self): - self.manifest.update_tfvar_and_apply_tf.side_effect = TerraformException( + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( "apply failed..." ) step = DisableUbuntuProApplicationStep(self.manifest, self.tfplan) result = step.run() - self.manifest.update_tfvar_and_apply_tf.assert_called_once() + self.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." From 7c65aa8d2eda4a4e42cd7100873ba67b77abf19c Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Fri, 19 Jan 2024 09:51:38 +0530 Subject: [PATCH 10/27] Add manifest override for juju bootstrap args Add bootstrap args from the manifest to the juju bootstrap command. The bootstrap args will be appended to the juju bootstrap command. Manifest for juju bootstrap args looks like: juju: bootstrap_args: - --agent-version=3.2.4 --- sunbeam-python/sunbeam/commands/bootstrap.py | 2 ++ sunbeam-python/sunbeam/commands/juju.py | 11 +++++------ sunbeam-python/sunbeam/jobs/manifest.py | 10 ++++++++-- .../tests/unit/sunbeam/jobs/test_manifest.py | 8 ++++++++ 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index 23103bfb..fb84389d 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -167,6 +167,7 @@ def bootstrap( cloud_type = snap.config.get("juju.cloud.type") cloud_name = snap.config.get("juju.cloud.name") + juju_bootstrap_args = manifest_obj.juju.bootstrap_args data_location = snap.paths.user_data @@ -194,6 +195,7 @@ def bootstrap( cloud_name, cloud_type, CONTROLLER, + bootstrap_args=juju_bootstrap_args, accept_defaults=accept_defaults, preseed_file=preseed, ) diff --git a/sunbeam-python/sunbeam/commands/juju.py b/sunbeam-python/sunbeam/commands/juju.py index 3626fc4e..338d2061 100644 --- a/sunbeam-python/sunbeam/commands/juju.py +++ b/sunbeam-python/sunbeam/commands/juju.py @@ -268,6 +268,7 @@ def __init__( cloud_name: str, cloud_type: str, controller: str, + bootstrap_args: list = [], preseed_file: Optional[Path] = None, accept_defaults: bool = False, ): @@ -276,6 +277,7 @@ def __init__( self.cloud = cloud_name self.cloud_type = cloud_type self.controller = controller + self.bootstrap_args = bootstrap_args self.preseed_file = preseed_file self.accept_defaults = accept_defaults self.juju_clouds = [] @@ -356,12 +358,9 @@ def run(self, status: Optional["Status"] = None) -> Result: if not result: return Result(ResultType.FAILED, "Not able to create cloud") - cmd = [ - self._get_juju_binary(), - "bootstrap", - self.cloud, - self.controller, - ] + cmd = [self._get_juju_binary(), "bootstrap"] + cmd.extend(self.bootstrap_args) + cmd.extend([self.cloud, self.controller]) LOG.debug(f'Running command {" ".join(cmd)}') process = subprocess.run(cmd, capture_output=True, text=True, check=True) LOG.debug( diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 30e5ac19..e0e82d16 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -61,8 +61,14 @@ class MissingTerraformInfoException(Exception): @dataclass class JujuManifest: + # Setting Field alias not supported in pydantic 1.10.0 + # Old version of pydantic is used due to dependencies + # with older version of paramiko from python-libjuju + # Newer version of pydantic can be used once the below + # PR is released + # https://github.com/juju/python-libjuju/pull/1005 bootstrap_args: List[str] = Field( - alias="bootstrap-args", description="Extra args for juju bootstrap" + default=[], description="Extra args for juju bootstrap" ) @@ -151,7 +157,7 @@ def load_latest_from_clusterdb_on_default(cls) -> "Manifest": @classmethod def get_default_manifest_as_dict(cls) -> dict: snap = Snap() - m = {"juju": None, "charms": {}, "terraform": {}} + m = {"juju": {"bootstrap_args": []}, "charms": {}, "terraform": {}} m["charms"] = { charm: {"channel": channel} for charm, channel in MANIFEST_CHARM_VERSIONS.items() diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py index 607c9a7d..80712e45 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -27,6 +27,9 @@ from sunbeam.versions import OPENSTACK_CHANNEL, TERRAFORM_DIR_NAMES test_manifest = """ +juju: + bootstrap_args: + - --agent-version=3.2.4 charms: keystone: channel: 2023.1/stable @@ -117,6 +120,11 @@ def test_load(self, mocker, snap, cclient, pluginmanager, tmpdir): # Assert defaults does not exist assert "nova" not in manifest_obj.charms.keys() + test_manifest_dict = yaml.safe_load(test_manifest) + assert manifest_obj.juju.bootstrap_args == test_manifest_dict.get( + "juju", {} + ).get("bootstrap_args", []) + def test_load_on_default(self, mocker, snap, cclient, pluginmanager, tmpdir): mocker.patch.object(manifest, "Snap", return_value=snap) manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") From 45ff59f4bae7015552343989ec070ae5284fb39a Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Fri, 19 Jan 2024 11:20:43 +0530 Subject: [PATCH 11/27] [manifest] miscellaneous fixes Fix typos in the dns plugin. Create manifest after cluster is joined in join command. --- sunbeam-python/sunbeam/commands/node.py | 4 +++- sunbeam-python/sunbeam/plugins/dns/plugin.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index 3a6abf9e..f058589c 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -207,7 +207,6 @@ def join( controller = CONTROLLER data_location = snap.paths.user_data jhelper = JujuHelper(data_location) - manifest_obj = Manifest.load_latest_from_cluserdb(include_defaults=True) plan1 = [ JujuLoginStep(data_location), @@ -218,6 +217,9 @@ def join( ] plan1_results = run_plan(plan1, console) + # Get manifest object once the cluster is joined + manifest_obj = Manifest.load_latest_from_clusterdb(include_defaults=True) + machine_id = -1 machine_id_result = get_step_message(plan1_results, AddJujuMachineStep) if machine_id_result is not None: diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index f71fa4de..17c753ed 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -66,7 +66,7 @@ def charm_manifest_tfvar_map(self) -> dict: return { self.tfplan: { "designate": { - "channel": "desginate-channel", + "channel": "designate-channel", "revision": "designate-revision", "config": "designate-config", }, From 5d593aeacae725004ac7669da2b0389d98a49de4 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Sat, 20 Jan 2024 12:16:02 +0530 Subject: [PATCH 12/27] Add manifest management commands Add manifest list and manifest show commands. --- sunbeam-python/sunbeam/commands/manifest.py | 89 +++++++++++++++++++++ sunbeam-python/sunbeam/main.py | 12 +++ 2 files changed, 101 insertions(+) create mode 100644 sunbeam-python/sunbeam/commands/manifest.py diff --git a/sunbeam-python/sunbeam/commands/manifest.py b/sunbeam-python/sunbeam/commands/manifest.py new file mode 100644 index 00000000..5f5d94ef --- /dev/null +++ b/sunbeam-python/sunbeam/commands/manifest.py @@ -0,0 +1,89 @@ +# Copyright (c) 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging + +import click +import yaml +from rich.console import Console +from rich.table import Table + +from sunbeam.clusterd.client import Client +from sunbeam.clusterd.service import ( + ClusterServiceUnavailableException, + ManifestItemNotFoundException, +) +from sunbeam.jobs.checks import DaemonGroupCheck +from sunbeam.jobs.common import FORMAT_TABLE, FORMAT_YAML, run_preflight_checks + +LOG = logging.getLogger(__name__) +console = Console() + + +@click.command() +@click.option( + "-f", + "--format", + type=click.Choice([FORMAT_TABLE, FORMAT_YAML]), + default=FORMAT_TABLE, + help="Output format.", +) +def list(format: str) -> None: + """List manifests""" + client = Client() + manifests = [] + + preflight_checks = [DaemonGroupCheck()] + run_preflight_checks(preflight_checks, console) + + try: + manifests = client.cluster.list_manifests() + except ClusterServiceUnavailableException: + click.echo("Error: Not able to connect to Cluster DB") + return + + if format == FORMAT_TABLE: + table = Table() + table.add_column("ID", justify="left") + table.add_column("Applied Date", justify="left") + for manifest in manifests: + table.add_row(manifest.get("manifestid"), manifest.get("applieddate")) + console.print(table) + elif format == FORMAT_YAML: + for manifest in manifests: + manifest.pop("data") + click.echo(yaml.dump(manifests)) + + +@click.command() +@click.option("--id", type=str, prompt=True, help="Manifest ID") +def show(id: str) -> None: + """Show Manifest data. + + Use '--id=latest' to get the last committed manifest. + """ + client = Client() + + preflight_checks = [DaemonGroupCheck()] + run_preflight_checks(preflight_checks, console) + + try: + manifest = client.cluster.get_manifest(id) + click.echo(manifest.get("data")) + except ClusterServiceUnavailableException: + click.echo("Error: Not able to connect to Cluster DB") + except ManifestItemNotFoundException: + click.echo(f"Error: No manifest exists with id {id}") diff --git a/sunbeam-python/sunbeam/main.py b/sunbeam-python/sunbeam/main.py index f847f83c..dc4e53d0 100644 --- a/sunbeam-python/sunbeam/main.py +++ b/sunbeam-python/sunbeam/main.py @@ -26,6 +26,7 @@ from sunbeam.commands import generate_preseed as generate_preseed_cmds from sunbeam.commands import inspect as inspect_cmds from sunbeam.commands import launch as launch_cmds +from sunbeam.commands import manifest as manifest_commands from sunbeam.commands import node as node_cmds from sunbeam.commands import openrc as openrc_cmds from sunbeam.commands import prepare_node as prepare_node_cmds @@ -64,6 +65,12 @@ def cluster(ctx): """Manage the Sunbeam Cluster""" +@click.group("manifest", context_settings=CONTEXT_SETTINGS, cls=CatchGroup) +@click.pass_context +def manifest(ctx): + """Manage manifests (read-only commands)""" + + @click.group("enable", context_settings=CONTEXT_SETTINGS, cls=CatchGroup) @click.pass_context def enable(ctx): @@ -105,6 +112,11 @@ def main(): cluster.add_command(refresh_cmds.refresh) cluster.add_command(resize_cmds.resize) + # Manifst management + cli.add_command(manifest) + manifest.add_command(manifest_commands.list) + manifest.add_command(manifest_commands.show) + cli.add_command(enable) cli.add_command(disable) From ffa3259e6df07a76c439e23c13be1b889faf025c Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Fri, 19 Jan 2024 15:01:31 +0530 Subject: [PATCH 13/27] [maifest] Add caas_config section * Add add_manifest_section to plugin interface so that plugins can add new manifest attributes. * Add caas_config section to manifest as part of caas plugin * Update caas plugin terraform plan to introduce variables that can be overriden by manifest * Update plan for caas configure to read manifest caas_config section and use them as override_tfvars --- sunbeam-python/sunbeam/jobs/manifest.py | 28 ++++-- sunbeam-python/sunbeam/jobs/plugin.py | 9 +- .../plugins/caas/etc/caas-setup/main.tf | 31 ++++-- .../plugins/caas/etc/caas-setup/variables.tf | 48 +++++++++ sunbeam-python/sunbeam/plugins/caas/plugin.py | 92 +++++++++++++---- sunbeam-python/sunbeam/plugins/dns/plugin.py | 26 ++--- .../sunbeam/plugins/interface/v1/base.py | 33 +++++-- sunbeam-python/sunbeam/plugins/ldap/plugin.py | 12 ++- .../sunbeam/plugins/loadbalancer/plugin.py | 14 +-- .../sunbeam/plugins/orchestration/plugin.py | 14 +-- .../sunbeam/plugins/secrets/plugin.py | 14 +-- .../sunbeam/plugins/telemetry/plugin.py | 36 +++---- .../sunbeam/plugins/vault/plugin.py | 14 +-- sunbeam-python/sunbeam/versions.py | 99 ++++++++++++------- 14 files changed, 330 insertions(+), 140 deletions(-) create mode 100644 sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/variables.tf diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index e0e82d16..3ed5689d 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -42,7 +42,7 @@ ) from sunbeam.jobs.plugin import PluginManager from sunbeam.versions import ( - CHARM_MANIFEST_TFVARS_MAP, + MANIFEST_ATTRIBUTES_TFVAR_MAP, MANIFEST_CHARM_VERSIONS, TERRAFORM_DIR_NAMES, ) @@ -94,7 +94,7 @@ class TerraformManifest: source: Path = Field(description="Path to Terraform plan") -@dataclass +@dataclass(config=dict(extra="allow")) class Manifest: juju: Optional[JujuManifest] = None charms: Optional[Dict[str, CharmsManifest]] = None @@ -157,7 +157,11 @@ def load_latest_from_clusterdb_on_default(cls) -> "Manifest": @classmethod def get_default_manifest_as_dict(cls) -> dict: snap = Snap() - m = {"juju": {"bootstrap_args": []}, "charms": {}, "terraform": {}} + m = { + "juju": {"bootstrap_args": []}, + "charms": {}, + "terraform": {}, + } m["charms"] = { charm: {"channel": channel} for charm, channel in MANIFEST_CHARM_VERSIONS.items() @@ -200,6 +204,7 @@ def validate_terraform_keys(self, default_manifest: dict): def __post_init__(self): LOG.debug("Calling __post__init__") + PluginManager().add_manifest_section(self) self.default_manifest_dict = self.get_default_manifest_as_dict() # Add custom validations self.validate_terraform_keys(self.default_manifest_dict) @@ -273,24 +278,27 @@ def update_tfvars_and_apply_tf( update_config(self.client, tfvar_config, tfvars) tfhelper = self.get_tfhelper(tfplan) + LOG.debug(f"Writing tfvars {tfvars}") tfhelper.write_tfvars(tfvars) tfhelper.apply() def _get_tfvars(self, tfplan: str) -> dict: """Get tfvars from the manifest. - CHARM_MANIFEST_TFVARS_MAP holds the mapping of CharmManifest and the - terraform variable name for each CharmManifest attribute. - For each terraform variable in CHARM_MANIFEST_TFVARS_MAP, get the - corresponding value from Manifest and return all terraform variables - as dict. + MANIFEST_ATTRIBUTES_TFVAR_MAP holds the mapping of Manifest attributes + and the terraform variable name. For each terraform variable in + MANIFEST_ATTRIBUTES_TFVAR_MAP, get the corresponding value from Manifest + and return all terraform variables as dict. """ tfvars = {} - tfvar_map = copy.deepcopy(CHARM_MANIFEST_TFVARS_MAP) + tfvar_map = copy.deepcopy(MANIFEST_ATTRIBUTES_TFVAR_MAP) tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map() utils.merge_dict(tfvar_map, tfvar_map_plugin) - for charm, per_charm_tfvar_map in tfvar_map.get(tfplan, {}).items(): + charms_tfvar_map = tfvar_map.get(tfplan, {}).get("charms", {}) + + # handle tfvars for charms section + for charm, per_charm_tfvar_map in charms_tfvar_map.items(): charm_ = self.charms.get(charm) if charm_: manifest_charm = asdict(charm_) diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index f278a7cd..0bf8d89a 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -271,11 +271,18 @@ def get_all_plugin_manfiest_tfvar_map(cls) -> dict: plugins = cls.get_all_plugin_classes() for klass in plugins: plugin = klass() - m_dict = plugin.charm_manifest_tfvar_map() + m_dict = plugin.manifest_attributes_tfvar_map() utils.merge_dict(tfvar_map, m_dict) return tfvar_map + @classmethod + def add_manifest_section(cls, manifest) -> None: + plugins = cls.get_all_plugin_classes() + for klass in plugins: + plugin = klass() + plugin.add_manifest_section(manifest) + @classmethod def register(cls, cli: click.Group) -> None: """Register the plugins. diff --git a/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/main.tf b/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/main.tf index 5c362a16..2d667f95 100644 --- a/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/main.tf +++ b/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/main.tf @@ -1,3 +1,18 @@ +# Copyright (c) 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + terraform { required_version = ">= 0.14.0" required_providers { @@ -10,16 +25,12 @@ terraform { provider "openstack" {} -resource "openstack_images_image_v2" "fedora-coreos" { - name = "fedora-coreos-38" - image_source_url = "https://builds.coreos.fedoraproject.org/prod/streams/stable/builds/38.20230806.3.0/x86_64/fedora-coreos-38.20230806.3.0-openstack.x86_64.qcow2.xz" - container_format = "bare" - disk_format = "qcow2" +resource "openstack_images_image_v2" "caas-image" { + name = var.image-name + image_source_url = var.image-source-url + container_format = var.image-container-format + disk_format = var.image-disk-format decompress = true visibility = "public" - properties = { - os_distro = "fedora-coreos" - architecture = "x86_64" - hypervisor_type = "qemu" - } + properties = var.image-properties } diff --git a/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/variables.tf b/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/variables.tf new file mode 100644 index 00000000..c234a22e --- /dev/null +++ b/sunbeam-python/sunbeam/plugins/caas/etc/caas-setup/variables.tf @@ -0,0 +1,48 @@ +# Copyright (c) 2024 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +variable "image-name" { + description = "Image name to set in glance" + type = string + default = "fedora-coreos-38" +} + +variable "image-source-url" { + description = "Image URL to upload to glance" + type = string + default = "https://builds.coreos.fedoraproject.org/prod/streams/stable/builds/38.20230806.3.0/x86_64/fedora-coreos-38.20230806.3.0-openstack.x86_64.qcow2.xz" +} + +variable "image-container-format" { + description = "Image container format" + type = string + default = "bare" +} + +variable "image-disk-format" { + description = "Image disk format" + type = string + default = "qcow2" +} + +variable "image-properties" { + description = "Properties to set on image in glance" + type = map(string) + default = { + os_distro = "fedora-coreos" + architecture = "x86_64" + hypervisor_type = "qemu" + } +} diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index 458fc956..b010e865 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -14,11 +14,14 @@ # limitations under the License. import logging +from dataclasses import asdict from pathlib import Path from typing import Optional import click from packaging.version import Version +from pydantic import Field +from pydantic.dataclasses import dataclass from rich.console import Console from rich.status import Status @@ -28,13 +31,10 @@ ) from sunbeam.commands.configure import retrieve_admin_credentials from sunbeam.commands.openstack import OPENSTACK_MODEL -from sunbeam.commands.terraform import ( - TerraformException, - TerraformHelper, - TerraformInitStep, -) +from sunbeam.commands.terraform import TerraformException, TerraformInitStep from sunbeam.jobs.common import BaseStep, Result, ResultType, read_config, run_plan from sunbeam.jobs.juju import JujuHelper +from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import PluginRequirement from sunbeam.plugins.interface.v1.openstack import ( OpenStackControlPlanePlugin, @@ -48,23 +48,59 @@ console = Console() +@dataclass +class CaasConfig: + image_name: Optional[str] = Field(default=None, description="CAAS Image name") + image_url: Optional[str] = Field( + default=None, description="CAAS Image URL to upload to glance" + ) + container_format: Optional[str] = Field( + default=None, description="Image container format" + ) + disk_format: Optional[str] = Field(default=None, description="Image disk format") + properties: dict = Field( + default={}, description="Properties to set for image in glance" + ) + + class CaasConfigureStep(BaseStep): """Configure CaaS service.""" def __init__( self, - tfhelper: TerraformHelper, + manifest: Manifest, + tfplan: str, + tfvar_map: dict, ): super().__init__( "Configure Container as a Service", "Configure Cloud for Container as a Service use", ) - self.tfhelper = tfhelper + self.manifest = manifest + self.tfplan = tfplan + self.tfvar_map = tfvar_map def run(self, status: Optional[Status] = None) -> Result: """Execute configuration using terraform.""" try: - self.tfhelper.apply() + override_tfvars = {} + try: + manifest_caas_config = asdict(self.manifest.caas_config) + for caas_config_attribute, tfvar_name in ( + self.tfvar_map.get(self.tfplan, {}).get("caas_config", {}).items() + ): + caas_config_attribute_ = manifest_caas_config.get( + caas_config_attribute + ) + if caas_config_attribute_: + override_tfvars[tfvar_name] = caas_config_attribute_ + except AttributeError: + # caas_config not defined in manifest, ignore + pass + + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, override_tfvars=override_tfvars + ) except TerraformException as e: LOG.exception("Error configuring Container as a Service plugin.") return Result(ResultType.FAILED, str(e)) @@ -100,18 +136,38 @@ def manifest_defaults(self) -> dict: }, } - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "magnum": { - "channel": "magnum-channel", - "revision": "magnum-revision", - "config": "magnum-config", + "charms": { + "magnum": { + "channel": "magnum-channel", + "revision": "magnum-revision", + "config": "magnum-config", + } } - } + }, + self.configure_plan: { + "caas_config": { + "image_name": "image-name", + "image_url": "image-source-url", + "container_format": "image-container-format", + "disk_format": "image-disk-format", + "properties": "image-properties", + } + }, } + def add_manifest_section(self, manifest: Manifest) -> None: + """Adds manifest section""" + try: + _caas_config = manifest.caas_config + manifest.caas_config = CaasConfig(**_caas_config) + except AttributeError: + # Attribute not defined in manifest + pass + def set_application_names(self) -> list: """Application names handled by the terraform plan.""" apps = ["magnum", "magnum-mysql-router"] @@ -181,11 +237,13 @@ def configure(self): jhelper = JujuHelper(data_location) admin_credentials = retrieve_admin_credentials(jhelper, OPENSTACK_MODEL) - tfhelper = self.manifest.get_tfplan(self.configure_plan) + tfhelper = self.manifest.get_tfhelper(self.configure_plan) tfhelper.env = admin_credentials plan = [ TerraformInitStep(tfhelper), - CaasConfigureStep(tfhelper), + CaasConfigureStep( + self.manifest, self.configure_plan, self.manifest_attributes_tfvar_map() + ), ] run_plan(plan, console) diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index 17c753ed..38152df0 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -61,20 +61,22 @@ def manifest_defaults(self) -> dict: } } - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "designate": { - "channel": "designate-channel", - "revision": "designate-revision", - "config": "designate-config", - }, - "bind": { - "channel": "bind-channel", - "revision": "bind-revision", - "config": "bind-config", - }, + "charms": { + "designate": { + "channel": "designate-channel", + "revision": "designate-revision", + "config": "designate-config", + }, + "bind": { + "channel": "bind-channel", + "revision": "bind-revision", + "config": "bind-config", + }, + } } } diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index 8bc41db8..087a4389 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -214,17 +214,34 @@ def manifest_defaults(self) -> dict: """ return {} - def charm_manifest_tfvar_map(self) -> dict: - """Return terraform var map for the manifest. + def add_manifest_section(self, manifest) -> None: + """Add manifest section. - Map terraform variable for each Charm manifest attribute. + Any new attributes to the manifest introduced by the plugin will be read as + dict. This function should convert the new attribute to a dataclass if + required and reassign it to manifest object. This will also help in + validation of new attributes. + """ + pass + + def manifest_attributes_tfvar_map(self) -> dict: + """Return terraform var map for the manifest attributes. + + Map terraform variable for each manifest attribute. Sample return value: { - : { - "heat": { - "channel": , - "revision": , - "config": , + : { + "charms": { + "heat": { + "channel": , + "revision": , + "config": , + } + } + }, + : { + "caas-config": { + "image-url": } } } diff --git a/sunbeam-python/sunbeam/plugins/ldap/plugin.py b/sunbeam-python/sunbeam/plugins/ldap/plugin.py index d0081199..55e214b0 100644 --- a/sunbeam-python/sunbeam/plugins/ldap/plugin.py +++ b/sunbeam-python/sunbeam/plugins/ldap/plugin.py @@ -265,13 +265,15 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"keystone-ldap": {"channel": OPENSTACK_CHANNEL}}} - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "keystone-ldap": { - "channel": "ldap-channel", - "revision": "ldap-revision", + "charms": { + "keystone-ldap": { + "channel": "ldap-channel", + "revision": "ldap-revision", + } } } } diff --git a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py index 64133a5a..2dbcb9fc 100644 --- a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py +++ b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py @@ -40,14 +40,16 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"octavia": {"channel": OPENSTACK_CHANNEL}}} - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "octavia": { - "channel": "octavia-channel", - "revision": "octavia-revision", - "config": "octavia-config", + "charms": { + "octavia": { + "channel": "octavia-channel", + "revision": "octavia-revision", + "config": "octavia-config", + } } } } diff --git a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py index 52ddba36..80dbd136 100644 --- a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py +++ b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py @@ -40,14 +40,16 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"heat": {"channel": OPENSTACK_CHANNEL}}} - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "heat": { - "channel": "heat-channel", - "revision": "heat-revision", - "config": "heat-config", + "charms": { + "heat": { + "channel": "heat-channel", + "revision": "heat-revision", + "config": "heat-config", + } } } } diff --git a/sunbeam-python/sunbeam/plugins/secrets/plugin.py b/sunbeam-python/sunbeam/plugins/secrets/plugin.py index 58738808..d834a285 100644 --- a/sunbeam-python/sunbeam/plugins/secrets/plugin.py +++ b/sunbeam-python/sunbeam/plugins/secrets/plugin.py @@ -42,14 +42,16 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return {"charms": {"barbican": {"channel": OPENSTACK_CHANNEL}}} - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "barbican": { - "channel": "barbican-channel", - "revision": "barbican-revision", - "config": "barbican-config", + "charms": { + "barbican": { + "channel": "barbican-channel", + "revision": "barbican-revision", + "config": "barbican-config", + } } } } diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index 54dfb038..72897f54 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -54,25 +54,27 @@ def manifest_defaults(self) -> dict: } } - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" return { self.tfplan: { - "aodh": { - "channel": "aodh-channel", - "revision": "aodh-revision", - "config": "aodh-config", - }, - "gnocchi": { - "channel": "gnocchi-channel", - "revision": "gnocchi-revision", - "config": "gnocchi-config", - }, - "ceilometer": { - "channel": "ceilometer-channel", - "revision": "ceilometer-revision", - "config": "ceilometer-config", - }, + "charms": { + "aodh": { + "channel": "aodh-channel", + "revision": "aodh-revision", + "config": "aodh-config", + }, + "gnocchi": { + "channel": "gnocchi-channel", + "revision": "gnocchi-revision", + "config": "gnocchi-config", + }, + "ceilometer": { + "channel": "ceilometer-channel", + "revision": "ceilometer-revision", + "config": "ceilometer-config", + }, + } } } diff --git a/sunbeam-python/sunbeam/plugins/vault/plugin.py b/sunbeam-python/sunbeam/plugins/vault/plugin.py index 80824837..d549d9c0 100644 --- a/sunbeam-python/sunbeam/plugins/vault/plugin.py +++ b/sunbeam-python/sunbeam/plugins/vault/plugin.py @@ -46,14 +46,16 @@ def manifest_defaults(self) -> dict: """Manifest pluing part in dict format.""" return {"charms": {"vault": {"channel": VAULT_CHANNEL}}} - def charm_manifest_tfvar_map(self) -> dict: - """Charm manifest terraformvars map.""" + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attrbitues to terraformvars map.""" return { self.tfplan: { - "vault": { - "channel": "vault-channel", - "revision": "vault-revision", - "config": "vault-config", + "charms": { + "vault": { + "channel": "vault-channel", + "revision": "vault-revision", + "config": "vault-config", + } } } } diff --git a/sunbeam-python/sunbeam/versions.py b/sunbeam-python/sunbeam/versions.py index 84279950..f2b992f2 100644 --- a/sunbeam-python/sunbeam/versions.py +++ b/sunbeam-python/sunbeam/versions.py @@ -107,15 +107,22 @@ """ -Format of CHARM_MANIFEST_TFVAR_MAP +Format of MANIFEST_ATTRIBUTES_TFVAR_MAP { : { - : { - : + "charms": { + : { + : + ... + ... + }, + ... + }, + "caas_config": { + : ... ... }, - ... }, ... } @@ -123,19 +130,29 @@ Example: { "openstack-plan": { - "keystone": { - "channel": "keystone-channel", - "revision": "keystone-revision", - "config": "keystone-config" + "charms": { + "keystone": { + "channel": "keystone-channel", + "revision": "keystone-revision", + "config": "keystone-config" + }, }, }, "microk8s-plan": { - "microk8s": { - "channel": "charm_microk8s_channel", - "revision": "charm_microk8s_revision", - "config": "charm_microk8s_config", + "charms": { + "microk8s": { + "channel": "charm_microk8s_channel", + "revision": "charm_microk8s_revision", + "config": "charm_microk8s_config", + }, }, }, + "caas-setup": { + "caas_config": { + "image_name": "image-name", + "image_url": "image-source-url" + } + } } """ K8S_CHARMS = {} @@ -144,51 +161,61 @@ K8S_CHARMS |= MYSQL_SERVICES_K8S K8S_CHARMS |= MISC_SERVICES_K8S DEPLOY_OPENSTACK_TFVAR_MAP = { - svc: { - "channel": f"{svc}-channel", - "revision": f"{svc}-revision", - "config": f"{svc}-config", + "charms": { + svc: { + "channel": f"{svc}-channel", + "revision": f"{svc}-revision", + "config": f"{svc}-config", + } + for svc, channel in K8S_CHARMS.items() } - for svc, channel in K8S_CHARMS.items() } -DEPLOY_OPENSTACK_TFVAR_MAP.pop("traefik-public") -DEPLOY_OPENSTACK_TFVAR_MAP["mysql-router"] = { +DEPLOY_OPENSTACK_TFVAR_MAP.get("charms").pop("traefik-public") +DEPLOY_OPENSTACK_TFVAR_MAP["charms"]["mysql-router"] = { "channel": "mysql-router-channel", "revision": "mysql-router-revision", "config": "mysql-router-config", } DEPLOY_MICROK8S_TFVAR_MAP = { - "microk8s": { - "channel": "charm_microk8s_channel", - "revision": "charm_microk8s_revision", - "config": "charm_microk8s_config", + "charms": { + "microk8s": { + "channel": "charm_microk8s_channel", + "revision": "charm_microk8s_revision", + "config": "charm_microk8s_config", + } } } DEPLOY_MICROCEPH_TFVAR_MAP = { - "microceph": { - "channel": "charm_microceph_channel", - "revision": "charm_microceph_revision", - "config": "charm_microceph_config", + "charms": { + "microceph": { + "channel": "charm_microceph_channel", + "revision": "charm_microceph_revision", + "config": "charm_microceph_config", + } } } DEPLOY_OPENSTACK_HYPERVISOR_TFVAR_MAP = { - "openstack-hypervisor": { - "channel": "charm_channel", - "revision": "charm_revision", - "config": "charm_config", + "charms": { + "openstack-hypervisor": { + "channel": "charm_channel", + "revision": "charm_revision", + "config": "charm_config", + } } } DEPLOY_SUNBEAM_MACHINE_TFVAR_MAP = { - "sunbeam-machine": { - "channel": "charm_channel", - "revision": "charm_revision", - "config": "charm_config", + "charms": { + "sunbeam-machine": { + "channel": "charm_channel", + "revision": "charm_revision", + "config": "charm_config", + } } } -CHARM_MANIFEST_TFVARS_MAP = { +MANIFEST_ATTRIBUTES_TFVAR_MAP = { "sunbeam-machine-plan": DEPLOY_SUNBEAM_MACHINE_TFVAR_MAP, "microk8s-plan": DEPLOY_MICROK8S_TFVAR_MAP, "microceph-plan": DEPLOY_MICROCEPH_TFVAR_MAP, From d8ba0b762f6911b4b26454e5f082b2b8cfadf293 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Tue, 23 Jan 2024 10:48:05 +0530 Subject: [PATCH 14/27] [manifest] fix terraformvar updates terraform vars for the plan are created by merging a. tfvars from cluster db b. From manifest along with defaults c. override_tfvars from the plan step. If any attribute in manifest is removed in subsequent update of manifest, the merged tfvar map still contains the value from cluster db (step a). To avoid this, remove all the tfvar keys that can come from manifest in step a. --- sunbeam-python/sunbeam/jobs/manifest.py | 29 +++++++++++++++---- .../tests/unit/sunbeam/jobs/test_manifest.py | 5 ++++ 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 3ed5689d..1836c297 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -214,6 +214,13 @@ def __post_init__(self): self.snap = Snap() self.data_location = self.snap.paths.user_data self.client = clusterClient() + self.tfvar_map = self._get_all_tfvar_map() + + def _get_all_tfvar_map(self) -> dict: + tfvar_map = copy.deepcopy(MANIFEST_ATTRIBUTES_TFVAR_MAP) + tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map() + utils.merge_dict(tfvar_map, tfvar_map_plugin) + return tfvar_map # Terraform helper classes def get_tfhelper(self, tfplan: str) -> TerraformHelper: @@ -264,7 +271,13 @@ def update_tfvars_and_apply_tf( tfvars = {} if tfvar_config: try: - tfvars = read_config(self.client, tfvar_config) + tfvars_from_config = read_config(self.client, tfvar_config) + # Exclude all default tfvar keys from the previous terraform + # vars applied to the plan. + _tfvar_names = self._get_tfvar_names(tfplan) + tfvars = { + k: v for k, v in tfvars_from_config.items() if k not in _tfvar_names + } except ConfigItemNotFoundException: pass @@ -291,11 +304,8 @@ def _get_tfvars(self, tfplan: str) -> dict: and return all terraform variables as dict. """ tfvars = {} - tfvar_map = copy.deepcopy(MANIFEST_ATTRIBUTES_TFVAR_MAP) - tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map() - utils.merge_dict(tfvar_map, tfvar_map_plugin) - charms_tfvar_map = tfvar_map.get(tfplan, {}).get("charms", {}) + charms_tfvar_map = self.tfvar_map.get(tfplan, {}).get("charms", {}) # handle tfvars for charms section for charm, per_charm_tfvar_map in charms_tfvar_map.items(): @@ -309,6 +319,15 @@ def _get_tfvars(self, tfplan: str) -> dict: return tfvars + def _get_tfvar_names(self, tfplan: str) -> list: + return [ + tfvar_name + for charm, per_charm_tfvar_map in self.tfvar_map.get(tfplan, {}) + .get("charms", {}) + .items() + for charm_attribute, tfvar_name in per_charm_tfvar_map.items() + ] + class AddManifestStep(BaseStep): """Add Manifest file to cluster database""" diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py index 80712e45..d6489eac 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -298,6 +298,7 @@ def test_update_tfvars_and_apply_tf( read_config.return_value = { "keystone-channel": OPENSTACK_CHANNEL, "neutron-channel": "2023.1/stable", + "neutron-revision": 123, "ldap-apps": {"dom1": {"domain-name": "dom1"}}, } mocker.patch.object(manifest, "Snap", return_value=snap) @@ -333,6 +334,10 @@ def test_update_tfvars_and_apply_tf( # Assert values coming from extra_tfvars and in manifest assert applied_tfvars.get("glance-revision") == 555 + # Assert remove keys from read_config if not present in manifest+defaults + # or override + assert "neutron-revision" not in applied_tfvars.keys() + class TestAddManifestStep: def test_run(self, cclient, tmpdir): From 44214eaa614439ed51eb28a423681ccd03878c24 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Tue, 23 Jan 2024 07:01:16 +0530 Subject: [PATCH 15/27] [manifest] support for intra channel refresh * Use manifest for refresh intra channel updates * Refresh the apps in openstack and controller only when channel is specified in manifest (including default values) and no revision is specified. This will take care of app refresh for new revisions on the same channel. The logic to check if there is any latest revision on the channel is removed since the juju refresh should take care of the logic. * Add Reapply terraform plan for Control plane. * Add flag refresh to DeployMachineApplication so that the terraform plan can be reapplied when refresh is true. This covers terraform plans for microk8s, microceph. sunbeam-machine. * Use ReapplyHypervisorTerraformPlan for openstack-hypervisor. * Add all the above plans to the list of plans to execute on refresh * For the plugins, do not apply any terraform plan if the plan is part of sunbeam-terraform as the control plane refresh should take care. * Do not apply terraform plans if there is no change in current terraform vars and updated terraform vars. --- sunbeam-python/sunbeam/commands/juju.py | 19 +++ sunbeam-python/sunbeam/commands/microceph.py | 2 + sunbeam-python/sunbeam/commands/microk8s.py | 6 + sunbeam-python/sunbeam/commands/openstack.py | 58 ++++++++ sunbeam-python/sunbeam/commands/refresh.py | 30 ++-- .../sunbeam/commands/sunbeam_machine.py | 2 + .../sunbeam/commands/upgrades/base.py | 8 +- .../commands/upgrades/intra_channel.py | 128 ++++++++++++------ sunbeam-python/sunbeam/jobs/manifest.py | 25 ++-- sunbeam-python/sunbeam/jobs/steps.py | 6 + sunbeam-python/sunbeam/plugins/caas/plugin.py | 4 +- sunbeam-python/sunbeam/plugins/dns/plugin.py | 8 +- .../sunbeam/plugins/interface/v1/base.py | 4 +- .../sunbeam/plugins/interface/v1/openstack.py | 14 ++ sunbeam-python/sunbeam/plugins/ldap/plugin.py | 4 +- .../sunbeam/plugins/loadbalancer/plugin.py | 4 +- .../sunbeam/plugins/orchestration/plugin.py | 4 +- .../sunbeam/plugins/secrets/plugin.py | 4 +- .../sunbeam/plugins/telemetry/plugin.py | 12 +- .../sunbeam/plugins/vault/plugin.py | 4 +- sunbeam-python/sunbeam/versions.py | 72 ++++++---- .../unit/sunbeam/commands/test_openstack.py | 70 ++++++++++ .../tests/unit/sunbeam/jobs/test_manifest.py | 26 ++-- .../tests/unit/sunbeam/jobs/test_steps.py | 16 +++ 24 files changed, 400 insertions(+), 130 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/juju.py b/sunbeam-python/sunbeam/commands/juju.py index 2c9f9a2b..dddb63cb 100644 --- a/sunbeam-python/sunbeam/commands/juju.py +++ b/sunbeam-python/sunbeam/commands/juju.py @@ -202,6 +202,25 @@ def revision_update_needed( ) return bool(available_revision > deployed_revision) + def get_charm_deployed_versions(self, model: str) -> dict: + """Return charm deployed info for all the applications in model. + + For each application, return a tuple of charm name, channel and revision. + Example output: + {"keystone": ("keystone-k8s", "2023.2/stable", 234)} + """ + _status = run_sync(self.jhelper.get_model_status_full(model)) + status = json.loads(_status.to_json()) + + apps = {} + for app_name, app_status in status.get("applications", {}).items(): + charm_name = self._extract_charm_name(app_status["charm"]) + deployed_channel = self.normalise_channel(app_status["charm-channel"]) + deployed_revision = int(self._extract_charm_revision(app_status["charm"])) + apps[app_name] = (charm_name, deployed_channel, deployed_revision) + + return apps + def normalise_channel(self, channel: str) -> str: """Expand channel if it is using abbreviation. diff --git a/sunbeam-python/sunbeam/commands/microceph.py b/sunbeam-python/sunbeam/commands/microceph.py index a46dac02..2e05f151 100644 --- a/sunbeam-python/sunbeam/commands/microceph.py +++ b/sunbeam-python/sunbeam/commands/microceph.py @@ -66,6 +66,7 @@ def __init__( client: Client, manifest: Manifest, jhelper: JujuHelper, + refresh: bool = False, ): super().__init__( client, @@ -77,6 +78,7 @@ def __init__( "microceph-plan", "Deploy MicroCeph", "Deploying MicroCeph", + refresh, ) def get_application_timeout(self) -> int: diff --git a/sunbeam-python/sunbeam/commands/microk8s.py b/sunbeam-python/sunbeam/commands/microk8s.py index 4640fef0..bd5cb942 100644 --- a/sunbeam-python/sunbeam/commands/microk8s.py +++ b/sunbeam-python/sunbeam/commands/microk8s.py @@ -96,6 +96,7 @@ def __init__( jhelper: JujuHelper, preseed_file: Optional[Path] = None, accept_defaults: bool = False, + refresh: bool = False, ): super().__init__( client, @@ -107,6 +108,7 @@ def __init__( "microk8s-plan", "Deploy MicroK8S", "Deploying MicroK8S", + refresh, ) self.preseed_file = preseed_file @@ -156,6 +158,10 @@ def has_prompts(self) -> bool: :return: True if the step can ask the user for prompts, False otherwise """ + # No need to prompt for questions in case of refresh + if self.refresh: + return False + return True diff --git a/sunbeam-python/sunbeam/commands/openstack.py b/sunbeam-python/sunbeam/commands/openstack.py index ff6766eb..309400bb 100644 --- a/sunbeam-python/sunbeam/commands/openstack.py +++ b/sunbeam-python/sunbeam/commands/openstack.py @@ -419,3 +419,61 @@ def run(self, status: Optional[Status] = None) -> Result: self.kube.patch(Service, service_name, obj=service) return Result(ResultType.COMPLETED) + + +class ReapplyOpenStackTerraformPlanStep(BaseStep, JujuStepHelper): + """Reapply OpenStack Terraform plan""" + + _CONFIG = CONFIG_KEY + + def __init__( + self, + client: Client, + manifest: Manifest, + jhelper: JujuHelper, + ): + super().__init__( + "Applying Control plane Terraform plan", + "Applying Control plane Terraform plan (this may take a while)", + ) + self.manifest = manifest + self.jhelper = jhelper + self.client = client + self.tfplan = "openstack-plan" + self.model = OPENSTACK_MODEL + + def run(self, status: Optional[Status] = None) -> Result: + """Reapply Terraform plan if there are changes in tfvars.""" + try: + self.update_status(status, "deploying services") + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + ) + except TerraformException as e: + LOG.exception("Error reconfiguring cloud") + return Result(ResultType.FAILED, str(e)) + + storage_nodes = self.client.cluster.list_nodes_by_role("storage") + # Remove cinder-ceph from apps to wait on if ceph is not enabled + apps = run_sync(self.jhelper.get_application_names(self.model)) + if not storage_nodes and "cinder-ceph" in apps: + apps.remove("cinder-ceph") + LOG.debug(f"Application monitored for readiness: {apps}") + task = run_sync(update_status_background(self, apps, status)) + try: + run_sync( + self.jhelper.wait_until_active( + self.model, + apps, + timeout=OPENSTACK_DEPLOY_TIMEOUT, + ) + ) + except (JujuWaitException, TimeoutException) as e: + LOG.debug(str(e)) + return Result(ResultType.FAILED, str(e)) + finally: + if not task.done(): + task.cancel() + + return Result(ResultType.COMPLETED) diff --git a/sunbeam-python/sunbeam/commands/refresh.py b/sunbeam-python/sunbeam/commands/refresh.py index 55940a63..8289759b 100644 --- a/sunbeam-python/sunbeam/commands/refresh.py +++ b/sunbeam-python/sunbeam/commands/refresh.py @@ -21,13 +21,11 @@ from snaphelpers import Snap from sunbeam.clusterd.client import Client -from sunbeam.commands.terraform import TerraformHelper from sunbeam.commands.upgrades.inter_channel import ChannelUpgradeCoordinator from sunbeam.commands.upgrades.intra_channel import LatestInChannelCoordinator from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper from sunbeam.jobs.manifest import AddManifestStep, Manifest -from sunbeam.versions import TERRAFORM_DIR_NAMES LOG = logging.getLogger(__name__) console = Console() @@ -80,28 +78,24 @@ def refresh( if clear_manifest: run_plan([AddManifestStep(client)], console) elif manifest: - manifest_obj = Manifest.load(client, manifest_file=manifest) - LOG.debug(f"Manifest object created with no errors: {manifest_obj}") + manifest_obj = Manifest.load( + client, manifest_file=manifest, include_defaults=True + ) run_plan([AddManifestStep(client, manifest)], console) - else: - LOG.debug("Getting latest manifest") - manifest_obj = Manifest.load_latest_from_cluserdb(client, include_defaults=True) - LOG.debug(f"Manifest object created with no errors: {manifest_obj}") - tfplan = "openstack-plan" - tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) + if not manifest_obj: + LOG.debug("Getting latest manifest from cluster db") + manifest_obj = Manifest.load_latest_from_clusterdb( + client, include_defaults=True + ) + + LOG.debug(f"Manifest object used for refresh: {manifest_obj}") data_location = snap.paths.user_data - tfhelper = TerraformHelper( - path=snap.paths.user_common / "etc" / tfplan_dir, - plan=tfplan, - backend="http", - data_location=data_location, - ) jhelper = JujuHelper(client, data_location) if upgrade_release: - a = ChannelUpgradeCoordinator(client, jhelper, tfhelper) + a = ChannelUpgradeCoordinator(client, jhelper, manifest_obj) a.run_plan() else: - a = LatestInChannelCoordinator(client, jhelper, tfhelper) + a = LatestInChannelCoordinator(client, jhelper, manifest_obj) a.run_plan() click.echo("Refresh complete.") diff --git a/sunbeam-python/sunbeam/commands/sunbeam_machine.py b/sunbeam-python/sunbeam/commands/sunbeam_machine.py index dab87f30..798cc971 100644 --- a/sunbeam-python/sunbeam/commands/sunbeam_machine.py +++ b/sunbeam-python/sunbeam/commands/sunbeam_machine.py @@ -41,6 +41,7 @@ def __init__( client: Client, manifest: Manifest, jhelper: JujuHelper, + refresh: bool = False, ): super().__init__( client, @@ -52,6 +53,7 @@ def __init__( "sunbeam-machine-plan", "Deploy sunbeam-machine", "Deploying Sunbeam Machine", + refresh, ) def extra_tfvars(self) -> dict: diff --git a/sunbeam-python/sunbeam/commands/upgrades/base.py b/sunbeam-python/sunbeam/commands/upgrades/base.py index 90a64b7a..c035db9f 100644 --- a/sunbeam-python/sunbeam/commands/upgrades/base.py +++ b/sunbeam-python/sunbeam/commands/upgrades/base.py @@ -23,6 +23,7 @@ from sunbeam.commands.terraform import TerraformHelper from sunbeam.jobs.common import BaseStep, Result, ResultType, run_plan from sunbeam.jobs.juju import JujuHelper +from sunbeam.jobs.manifest import Manifest from sunbeam.jobs.plugin import PluginManager LOG = logging.getLogger(__name__) @@ -62,7 +63,7 @@ def __init__( self, client: Client, jhelper: JujuHelper, - tfhelper: TerraformHelper, + manifest: Manifest, channel: str | None = None, ): """Upgrade coordinator. @@ -71,13 +72,14 @@ def __init__( :client: Helper for interacting with clusterd :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform + :manifest: Manifest object :channel: OpenStack channel to upgrade charms to """ self.client = client self.channel = channel self.jhelper = jhelper - self.tfhelper = tfhelper + self.manifest = manifest + self.tfhelper = self.manifest.get_tfhelper("openstack-plan") def get_plan(self) -> list[BaseStep]: """Return the plan for this upgrade. diff --git a/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py b/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py index 6cfe83b5..055c4a29 100644 --- a/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py +++ b/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py @@ -13,25 +13,29 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json import logging from typing import Optional from rich.console import Console from rich.status import Status +from sunbeam.commands.hypervisor import ReapplyHypervisorTerraformPlanStep from sunbeam.commands.juju import JujuStepHelper +from sunbeam.commands.microceph import DeployMicrocephApplicationStep +from sunbeam.commands.microk8s import DeployMicrok8sApplicationStep +from sunbeam.commands.openstack import ReapplyOpenStackTerraformPlanStep +from sunbeam.commands.sunbeam_machine import DeploySunbeamMachineApplicationStep +from sunbeam.commands.terraform import TerraformInitStep from sunbeam.commands.upgrades.base import UpgradeCoordinator, UpgradePlugins from sunbeam.jobs.common import BaseStep, Result, ResultType from sunbeam.jobs.juju import run_sync -from sunbeam.versions import K8S_SERVICES, MACHINE_SERVICES LOG = logging.getLogger(__name__) console = Console() class LatestInChannel(BaseStep, JujuStepHelper): - def __init__(self, jhelper): + def __init__(self, jhelper, manifest): """Upgrade all charms to latest in current channel. :jhelper: Helper for interacting with pylibjuju @@ -40,47 +44,73 @@ def __init__(self, jhelper): "In channel upgrade", "Upgrade charms to latest revision in current channel" ) self.jhelper = jhelper - - def get_charm_update(self, applications, model) -> list[str]: - """Return a list applications that need to be refreshed.""" - candidates = [] - _status = run_sync(self.jhelper.get_model_status_full(model)) - status = json.loads(_status.to_json()) - for app_name in applications: - if self.revision_update_needed(app_name, model, status=status): - candidates.append(app_name) - else: - LOG.debug(f"{app_name} already at latest version in current channel") - return candidates - - def get_charm_update_candidates_k8s(self) -> list[str]: - """Return a list of all k8s charms that need to be refreshed.""" - return self.get_charm_update(K8S_SERVICES.keys(), "openstack") - - def get_charm_update_candidates_machine(self) -> list[str]: - """Return a list of all machine charms that need to be refreshed.""" - return self.get_charm_update(MACHINE_SERVICES.keys(), "controller") + self.manifest = manifest def is_skip(self, status: Optional[Status] = None) -> Result: """Step can be skipped if nothing needs refreshing.""" - if ( - self.get_charm_update_candidates_k8s() - or self.get_charm_update_candidates_machine() # noqa - ): - return Result(ResultType.COMPLETED) - else: - return Result(ResultType.SKIPPED) + return Result(ResultType.COMPLETED) + + def is_track_changed_for_any_charm(self, deployed_apps: dict): + """Check if chanel track is same in manifest and deployed app.""" + for app_name, (charm, channel, revision) in deployed_apps.items(): + if not self.manifest.charms.get(charm): + LOG.debug(f"Charm not present in manifest: {charm}") + continue + + channel_from_manifest = self.manifest.charms.get(charm).channel or "" + track_from_manifest = channel_from_manifest.split("/")[0] + track_from_deployed_app = channel.split("/")[0] + # Compare tracks + if track_from_manifest != track_from_deployed_app: + LOG.debug( + "Channel track for app {app_name} different in manifest " + "and actual deployed" + ) + return True + + return False + + def refresh_apps(self, apps: dict, model: str) -> None: + """Refresh apps in the model. + + If the charm has no revision in manifest and channel mentioned in manifest + and the deployed app is same, run juju refresh. + Otherwise ignore so that terraform plan apply will take care of charm upgrade. + """ + for app_name, (charm, channel, revision) in apps.items(): + manifest_charm = self.manifest.charms.get(charm) + if not manifest_charm: + continue + + if not manifest_charm.revision and manifest_charm.channel == channel: + app = run_sync(self.jhelper.get_application(app_name, model)) + LOG.debug(f"Running refresh for app {app_name}") + # refresh() checks for any new revision and updates if available + run_sync(app.refresh()) def run(self, status: Optional[Status] = None) -> Result: - """Refresh all charms identified as needing a refresh.""" - for app_name in self.get_charm_update_candidates_k8s(): - LOG.debug(f"Refreshing {app_name}") - app = run_sync(self.jhelper.get_application(app_name, "openstack")) - run_sync(app.refresh()) - for app_name in self.get_charm_update_candidates_machine(): - LOG.debug(f"Refreshing {app_name}") - app = run_sync(self.jhelper.get_application(app_name, "controller")) - run_sync(app.refresh()) + """Refresh all charms identified as needing a refresh. + + If the manifest has charm channel and revision, terraform apply should update + the charms. + If the manifest has only charm, then juju refresh is required if channel is + same as deployed charm, otherwise juju upgrade charm. + """ + deployed_k8s_apps = self.get_charm_deployed_versions("openstack") + deployed_machine_apps = self.get_charm_deployed_versions("controller") + + all_deployed_apps = deployed_k8s_apps.copy() + all_deployed_apps.update(deployed_machine_apps) + LOG.debug(f"Al deployed apps: {all_deployed_apps}") + if self.is_track_changed_for_any_charm(all_deployed_apps): + error_msg = ( + "Manifest has track values that require upgrades, rerun with " + "option --upgrade-release for release upgrades." + ) + return Result(ResultType.FAILED, error_msg) + + self.refresh_apps(deployed_k8s_apps, "openstack") + self.refresh_apps(deployed_machine_apps, "controller") return Result(ResultType.COMPLETED) @@ -89,7 +119,25 @@ class LatestInChannelCoordinator(UpgradeCoordinator): def get_plan(self) -> list[BaseStep]: return [ - LatestInChannel(self.jhelper), + LatestInChannel(self.jhelper, self.manifest), + TerraformInitStep(self.manifest.get_tfhelper("openstack-plan")), + ReapplyOpenStackTerraformPlanStep(self.client, self.manifest, self.jhelper), + TerraformInitStep(self.manifest.get_tfhelper("sunbeam-machine-plan")), + DeploySunbeamMachineApplicationStep( + self.client, self.manifest, self.jhelper, refresh=True + ), + TerraformInitStep(self.manifest.get_tfhelper("microk8s-plan")), + DeployMicrok8sApplicationStep( + self.client, self.manifest, self.jhelper, refresh=True + ), + TerraformInitStep(self.manifest.get_tfhelper("microceph-plan")), + DeployMicrocephApplicationStep( + self.client, self.manifest, self.jhelper, refresh=True + ), + TerraformInitStep(self.manifest.get_tfhelper("hypervisor-plan")), + ReapplyHypervisorTerraformPlanStep( + self.client, self.manifest, self.jhelper + ), UpgradePlugins( self.client, self.jhelper, self.tfhelper, upgrade_release=False ), diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index dd93f78b..7600013b 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -271,15 +271,16 @@ def update_tfvars_and_apply_tf( :param override_tfvars: Terraform vars to override :type override_tfvars: dict """ - tfvars = {} + current_tfvars = {} + updated_tfvars = {} if tfvar_config: try: - tfvars_from_config = read_config(self.client, tfvar_config) + current_tfvars = read_config(self.client, tfvar_config) # Exclude all default tfvar keys from the previous terraform # vars applied to the plan. _tfvar_names = self._get_tfvar_names(tfplan) - tfvars = { - k: v for k, v in tfvars_from_config.items() if k not in _tfvar_names + updated_tfvars = { + k: v for k, v in current_tfvars.items() if k not in _tfvar_names } except ConfigItemNotFoundException: pass @@ -287,15 +288,21 @@ def update_tfvars_and_apply_tf( # NOTE: It is expected for Manifest to contain all previous changes # So override tfvars from configdb to defaults if not specified in # manifest file - tfvars.update(self._get_tfvars(tfplan)) + updated_tfvars.update(self._get_tfvars(tfplan)) + + updated_tfvars.update(override_tfvars) + + # No need to apply plan if there is no change in terraform vars. + if current_tfvars == updated_tfvars: + LOG.debug(f"Not running plan {tfplan} as there is no change in tfvars") + return - tfvars.update(override_tfvars) if tfvar_config: - update_config(self.client, tfvar_config, tfvars) + update_config(self.client, tfvar_config, updated_tfvars) tfhelper = self.get_tfhelper(tfplan) - LOG.debug(f"Writing tfvars {tfvars}") - tfhelper.write_tfvars(tfvars) + tfhelper.write_tfvars(updated_tfvars) + LOG.debug(f"Applying plan {tfplan} with tfvars {updated_tfvars}") tfhelper.apply() def _get_tfvars(self, tfplan: str) -> dict: diff --git a/sunbeam-python/sunbeam/jobs/steps.py b/sunbeam-python/sunbeam/jobs/steps.py index 495e215d..319525a1 100644 --- a/sunbeam-python/sunbeam/jobs/steps.py +++ b/sunbeam-python/sunbeam/jobs/steps.py @@ -50,6 +50,7 @@ def __init__( tfplan: str, banner: str = "", description: str = "", + refresh: bool = False, ): super().__init__(banner, description) self.manifest = manifest @@ -59,6 +60,8 @@ def __init__( self.model = model self.client = client self.tfplan = tfplan + # Set refresh flag to True to redeploy the application + self.refresh = refresh def extra_tfvars(self) -> dict: return {} @@ -72,6 +75,9 @@ def is_skip(self, status: Optional[Status] = None) -> Result: :return: ResultType.SKIPPED if the Step should be skipped, ResultType.COMPLETED or ResultType.FAILED otherwise """ + if self.refresh: + return Result(ResultType.COMPLETED) + try: run_sync(self.jhelper.get_application(self.application, self.model)) except ApplicationNotFoundException: diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index 1447a730..e3520283 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -124,7 +124,7 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "charms": { - "magnum": {"channel": OPENSTACK_CHANNEL}, + "magnum-k8s": {"channel": OPENSTACK_CHANNEL}, }, "terraform": { self.configure_plan: { @@ -138,7 +138,7 @@ def manifest_attributes_tfvar_map(self) -> dict: return { self.tfplan: { "charms": { - "magnum": { + "magnum-k8s": { "channel": "magnum-channel", "revision": "magnum-revision", "config": "magnum-config", diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index 406a802c..13eff6cb 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -58,8 +58,8 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "charms": { - "designate": {"channel": OPENSTACK_CHANNEL}, - "bind": {"channel": BIND_CHANNEL}, + "designate-k8s": {"channel": OPENSTACK_CHANNEL}, + "designate-bind-k8s": {"channel": BIND_CHANNEL}, } } @@ -68,12 +68,12 @@ def manifest_attributes_tfvar_map(self) -> dict: return { self.tfplan: { "charms": { - "designate": { + "designate-k8s": { "channel": "designate-channel", "revision": "designate-revision", "config": "designate-config", }, - "bind": { + "designate-bind-k8s": { "channel": "bind-channel", "revision": "bind-revision", "config": "bind-config", diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index 58642ff7..57ad1a7a 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -198,7 +198,7 @@ def manifest_defaults(self) -> dict: Sample manifest: { "charms": { - "heat": { + "heat-k8s": { "channel": <>. "revision": <>, "config": <>, @@ -233,7 +233,7 @@ def manifest_attributes_tfvar_map(self) -> dict: { : { "charms": { - "heat": { + "heat-k8s": { "channel": , "revision": , "config": , diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index 50a45a1f..f00c15ee 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -298,6 +298,20 @@ def upgrade_hook(self, upgrade_release: bool = False): :param upgrade_release: Whether to upgrade release """ + # For Intra channel upgrade, if the plan is openstack-plan, + # upgrade already applied at core and not required at plugin + # level + if ( + not upgrade_release + and self.tf_plan_location # noqa W503 + == TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO # noqa: W503 + ): + LOG.debug( + f"Ignore upgrade_hook for plugin {self.name}, the corresponding apps" + f" will be refreshed as part of Control plane refresh" + ) + return + data_location = self.snap.paths.user_data jhelper = JujuHelper(self.client, data_location) plan = [ diff --git a/sunbeam-python/sunbeam/plugins/ldap/plugin.py b/sunbeam-python/sunbeam/plugins/ldap/plugin.py index 938c2c0f..bfbea0c2 100644 --- a/sunbeam-python/sunbeam/plugins/ldap/plugin.py +++ b/sunbeam-python/sunbeam/plugins/ldap/plugin.py @@ -264,14 +264,14 @@ def __init__(self, client: Client) -> None: def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" - return {"charms": {"keystone-ldap": {"channel": OPENSTACK_CHANNEL}}} + return {"charms": {"keystone-ldap-k8s": {"channel": OPENSTACK_CHANNEL}}} def manifest_attributes_tfvar_map(self) -> dict: """Manifest attributes terraformvars map.""" return { self.tfplan: { "charms": { - "keystone-ldap": { + "keystone-ldap-k8s": { "channel": "ldap-channel", "revision": "ldap-revision", } diff --git a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py index 6f2827bc..5da5eaff 100644 --- a/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py +++ b/sunbeam-python/sunbeam/plugins/loadbalancer/plugin.py @@ -40,14 +40,14 @@ def __init__(self, client: Client) -> None: def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" - return {"charms": {"octavia": {"channel": OPENSTACK_CHANNEL}}} + return {"charms": {"octavia-k8s": {"channel": OPENSTACK_CHANNEL}}} def manifest_attributes_tfvar_map(self) -> dict: """Manifest attributes terraformvars map.""" return { self.tfplan: { "charms": { - "octavia": { + "octavia-k8s": { "channel": "octavia-channel", "revision": "octavia-revision", "config": "octavia-config", diff --git a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py index 153a2769..a0e337f4 100644 --- a/sunbeam-python/sunbeam/plugins/orchestration/plugin.py +++ b/sunbeam-python/sunbeam/plugins/orchestration/plugin.py @@ -40,14 +40,14 @@ def __init__(self, client: Client) -> None: def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" - return {"charms": {"heat": {"channel": OPENSTACK_CHANNEL}}} + return {"charms": {"heat-k8s": {"channel": OPENSTACK_CHANNEL}}} def manifest_attributes_tfvar_map(self) -> dict: """Manifest attributes terraformvars map.""" return { self.tfplan: { "charms": { - "heat": { + "heat-k8s": { "channel": "heat-channel", "revision": "heat-revision", "config": "heat-config", diff --git a/sunbeam-python/sunbeam/plugins/secrets/plugin.py b/sunbeam-python/sunbeam/plugins/secrets/plugin.py index b2322dd1..79906321 100644 --- a/sunbeam-python/sunbeam/plugins/secrets/plugin.py +++ b/sunbeam-python/sunbeam/plugins/secrets/plugin.py @@ -39,14 +39,14 @@ def __init__(self, client: Client) -> None: def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" - return {"charms": {"barbican": {"channel": OPENSTACK_CHANNEL}}} + return {"charms": {"barbican-k8s": {"channel": OPENSTACK_CHANNEL}}} def manifest_attributes_tfvar_map(self) -> dict: """Manifest attributes terraformvars map.""" return { self.tfplan: { "charms": { - "barbican": { + "barbican-k8s": { "channel": "barbican-channel", "revision": "barbican-revision", "config": "barbican-config", diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index 4c8b1b58..bb2cc09a 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -50,9 +50,9 @@ def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { "charms": { - "aodh": {"channel": OPENSTACK_CHANNEL}, - "gnocchi": {"channel": OPENSTACK_CHANNEL}, - "ceilometer": {"channel": OPENSTACK_CHANNEL}, + "aodh-k8s": {"channel": OPENSTACK_CHANNEL}, + "gnocchi-k8s": {"channel": OPENSTACK_CHANNEL}, + "ceilometer-k8s": {"channel": OPENSTACK_CHANNEL}, } } @@ -61,17 +61,17 @@ def manifest_attributes_tfvar_map(self) -> dict: return { self.tfplan: { "charms": { - "aodh": { + "aodh-k8s": { "channel": "aodh-channel", "revision": "aodh-revision", "config": "aodh-config", }, - "gnocchi": { + "gnocchi-k8s": { "channel": "gnocchi-channel", "revision": "gnocchi-revision", "config": "gnocchi-config", }, - "ceilometer": { + "ceilometer-k8s": { "channel": "ceilometer-channel", "revision": "ceilometer-revision", "config": "ceilometer-config", diff --git a/sunbeam-python/sunbeam/plugins/vault/plugin.py b/sunbeam-python/sunbeam/plugins/vault/plugin.py index b66ceeaf..fc156a27 100644 --- a/sunbeam-python/sunbeam/plugins/vault/plugin.py +++ b/sunbeam-python/sunbeam/plugins/vault/plugin.py @@ -46,14 +46,14 @@ def __init__(self, client: Client) -> None: def manifest_defaults(self) -> dict: """Manifest pluing part in dict format.""" - return {"charms": {"vault": {"channel": VAULT_CHANNEL}}} + return {"charms": {"vault-k8s": {"channel": VAULT_CHANNEL}}} def manifest_attributes_tfvar_map(self) -> dict: """Manifest attrbitues to terraformvars map.""" return { self.tfplan: { "charms": { - "vault": { + "vault-k8s": { "channel": "vault-channel", "revision": "vault-revision", "config": "vault-config", diff --git a/sunbeam-python/sunbeam/versions.py b/sunbeam-python/sunbeam/versions.py index 92f6f34b..be9916c8 100644 --- a/sunbeam-python/sunbeam/versions.py +++ b/sunbeam-python/sunbeam/versions.py @@ -85,14 +85,46 @@ # but per charm. So all *-mysql-router wont be included # and instead only mysql-router is included. Same is the # case of traefik charm. +OPENSTACK_CHARMS_K8S = { + "cinder-ceph-k8s": OPENSTACK_CHANNEL, + "cinder-k8s": OPENSTACK_CHANNEL, + "glance-k8s": OPENSTACK_CHANNEL, + "horizon-k8s": OPENSTACK_CHANNEL, + "keystone-k8s": OPENSTACK_CHANNEL, + "neutron-k8s": OPENSTACK_CHANNEL, + "nova-k8s": OPENSTACK_CHANNEL, + "placement-k8s": OPENSTACK_CHANNEL, +} +OVN_CHARMS_K8S = { + "ovn-central-k8s": OVN_CHANNEL, + "ovn-relay-k8s": OVN_CHANNEL, +} +MYSQL_CHARMS_K8S = { + "mysql-k8s": MYSQL_CHANNEL, + "mysql-router-k8s": MYSQL_CHANNEL, +} +MISC_CHARMS_K8S = { + "self-signed-certificates": CERT_AUTH_CHANNEL, + "rabbitmq-k8s": RABBITMQ_CHANNEL, + "traefik-k8s": TRAEFIK_CHANNEL, +} +MACHINE_CHARMS = { + "microceph": MICROCEPH_CHANNEL, + "microk8s": MICROK8S_CHANNEL, + "openstack-hypervisor": OPENSTACK_CHANNEL, + "sunbeam-machine": SUNBEAM_MACHINE_CHANNEL, +} + + +K8S_CHARMS = {} +K8S_CHARMS |= OPENSTACK_CHARMS_K8S +K8S_CHARMS |= OVN_CHARMS_K8S +K8S_CHARMS |= MYSQL_CHARMS_K8S +K8S_CHARMS |= MISC_CHARMS_K8S + MANIFEST_CHARM_VERSIONS = {} -MANIFEST_CHARM_VERSIONS |= OPENSTACK_SERVICES_K8S -MANIFEST_CHARM_VERSIONS |= OVN_SERVICES_K8S -MANIFEST_CHARM_VERSIONS |= MYSQL_SERVICES_K8S -MANIFEST_CHARM_VERSIONS |= MISC_SERVICES_K8S -MANIFEST_CHARM_VERSIONS |= MACHINE_SERVICES -MANIFEST_CHARM_VERSIONS |= {"mysql-router": MYSQL_CHANNEL} -MANIFEST_CHARM_VERSIONS.pop("traefik-public") +MANIFEST_CHARM_VERSIONS |= K8S_CHARMS +MANIFEST_CHARM_VERSIONS |= MACHINE_CHARMS # : @@ -131,7 +163,7 @@ { "openstack-plan": { "charms": { - "keystone": { + "keystone-k8s": { "channel": "keystone-channel", "revision": "keystone-revision", "config": "keystone-config" @@ -155,26 +187,20 @@ } } """ -K8S_CHARMS = {} -K8S_CHARMS |= OPENSTACK_SERVICES_K8S -K8S_CHARMS |= OVN_SERVICES_K8S -K8S_CHARMS |= MYSQL_SERVICES_K8S -K8S_CHARMS |= MISC_SERVICES_K8S DEPLOY_OPENSTACK_TFVAR_MAP = { "charms": { - svc: { - "channel": f"{svc}-channel", - "revision": f"{svc}-revision", - "config": f"{svc}-config", + charm: { + "channel": f"{charm.removesuffix('-k8s')}-channel", + "revision": f"{charm.removesuffix('-k8s')}-revision", + "config": f"{charm.removesuffix('-k8s')}-config", } - for svc, channel in K8S_CHARMS.items() + for charm, channel in K8S_CHARMS.items() } } -DEPLOY_OPENSTACK_TFVAR_MAP.get("charms").pop("traefik-public") -DEPLOY_OPENSTACK_TFVAR_MAP["charms"]["mysql-router"] = { - "channel": "mysql-router-channel", - "revision": "mysql-router-revision", - "config": "mysql-router-config", +DEPLOY_OPENSTACK_TFVAR_MAP["charms"]["self-signed-certificates"] = { + "channel": "certificate-authority-channel", + "revision": "certificate-authority-revision", + "config": "certificate-authority-config", } DEPLOY_MICROK8S_TFVAR_MAP = { diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py index 413cc89e..5fde8be7 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py @@ -23,6 +23,7 @@ METALLB_ANNOTATION, DeployControlPlaneStep, PatchLoadBalancerServicesStep, + ReapplyOpenStackTerraformPlanStep, ResizeControlPlaneStep, compute_ceph_replica_scale, compute_ha_scale, @@ -421,3 +422,72 @@ def test_compute_ingress_scale(topology, control_nodes, scale): ) def test_compute_ceph_replica_scale(topology, storage_nodes, scale): assert compute_ceph_replica_scale(topology, storage_nodes) == scale + + +class TestReapplyOpenStackTerraformPlanStep(unittest.TestCase): + def __init__(self, methodName: str = "runTest") -> None: + super().__init__(methodName) + self.read_config = patch( + "sunbeam.commands.openstack.read_config", + Mock(return_value={"topology": "single", "database": "single"}), + ) + + def setUp(self): + self.client = Mock( + cluster=Mock(list_nodes_by_role=Mock(return_value=[1, 2, 3, 4])) + ) + self.read_config.start() + self.jhelper = AsyncMock() + self.manifest = Mock() + + def tearDown(self): + self.read_config.stop() + + def test_run(self): + step = ReapplyOpenStackTerraformPlanStep( + self.client, self.manifest, self.jhelper + ) + result = step.run() + + self.manifest.update_tfvars_and_apply_tf.assert_called_once() + assert result.result_type == ResultType.COMPLETED + + def test_run_tf_apply_failed(self): + self.manifest.update_tfvars_and_apply_tf.side_effect = TerraformException( + "apply failed..." + ) + + step = ReapplyOpenStackTerraformPlanStep( + self.client, self.manifest, self.jhelper + ) + result = step.run() + + self.manifest.update_tfvars_and_apply_tf.assert_called_once() + assert result.result_type == ResultType.FAILED + assert result.message == "apply failed..." + + def test_run_waiting_timed_out(self): + self.jhelper.wait_until_active.side_effect = TimeoutException("timed out") + + step = ReapplyOpenStackTerraformPlanStep( + self.client, self.manifest, self.jhelper + ) + result = step.run() + + self.jhelper.wait_until_active.assert_called_once() + assert result.result_type == ResultType.FAILED + assert result.message == "timed out" + + def test_run_unit_in_error_state(self): + self.jhelper.wait_until_active.side_effect = JujuWaitException( + "Unit in error: placement/0" + ) + + step = ReapplyOpenStackTerraformPlanStep( + self.client, self.manifest, self.jhelper + ) + result = step.run() + + self.jhelper.wait_until_active.assert_called_once() + assert result.result_type == ResultType.FAILED + assert result.message == "Unit in error: placement/0" diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py index 3e9a3bd6..90d050b0 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -31,12 +31,12 @@ bootstrap_args: - --agent-version=3.2.4 charms: - keystone: + keystone-k8s: channel: 2023.1/stable revision: 234 config: debug: True - glance: + glance-k8s: channel: 2023.1/stable revision: 134 terraform: @@ -48,7 +48,7 @@ malformed_test_manifest = """ charms: - keystone: + keystone-k8s: channel: 2023.1/stable revision: 234 conf @@ -56,7 +56,7 @@ test_manifest_invalid_values = """ charms: - keystone: + keystone-k8s: channel: 2023.1/stable revision: 234 # Config value should be dictionary but provided str @@ -65,7 +65,7 @@ test_manifest_incorrect_terraform_key = """ charms: - keystone: + keystone-k8s: channel: 2023.1/stable revision: 234 config: @@ -111,7 +111,7 @@ def test_load(self, mocker, snap, cclient, pluginmanager, tmpdir): manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") manifest_file.write(test_manifest) manifest_obj = manifest.Manifest.load(cclient, manifest_file) - ks_manifest = manifest_obj.charms.get("keystone") + ks_manifest = manifest_obj.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} @@ -133,13 +133,13 @@ def test_load_on_default(self, mocker, snap, cclient, pluginmanager, tmpdir): ) # Check updates from manifest file - ks_manifest = manifest_obj.charms.get("keystone") + ks_manifest = manifest_obj.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Check default ones - nova_manifest = manifest_obj.charms.get("nova") + nova_manifest = manifest_obj.charms.get("nova-k8s") assert nova_manifest.channel == OPENSTACK_CHANNEL assert nova_manifest.revision is None assert nova_manifest.config is None @@ -148,13 +148,13 @@ def test_load_latest_from_clusterdb(self, mocker, snap, cclient, pluginmanager): mocker.patch.object(manifest, "Snap", return_value=snap) cclient.cluster.get_latest_manifest.return_value = {"data": test_manifest} manifest_obj = manifest.Manifest.load_latest_from_clusterdb(cclient) - ks_manifest = manifest_obj.charms.get("keystone") + ks_manifest = manifest_obj.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Assert defaults does not exist - assert "nova" not in manifest_obj.charms.keys() + assert "nova-k8s" not in manifest_obj.charms.keys() def test_load_latest_from_clusterdb_on_default( self, mocker, snap, cclient, pluginmanager @@ -164,13 +164,13 @@ def test_load_latest_from_clusterdb_on_default( manifest_obj = manifest.Manifest.load_latest_from_clusterdb( cclient, include_defaults=True ) - ks_manifest = manifest_obj.charms.get("keystone") + ks_manifest = manifest_obj.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Check default ones - nova_manifest = manifest_obj.charms.get("nova") + nova_manifest = manifest_obj.charms.get("nova-k8s") assert nova_manifest.channel == OPENSTACK_CHANNEL assert nova_manifest.revision is None assert nova_manifest.config is None @@ -178,7 +178,7 @@ def test_load_latest_from_clusterdb_on_default( def test_get_default_manifest(self, mocker, snap, cclient, pluginmanager): mocker.patch.object(manifest, "Snap", return_value=snap) default_manifest = manifest.Manifest.get_default_manifest(cclient) - nova_manifest = default_manifest.charms.get("nova") + nova_manifest = default_manifest.charms.get("nova-k8s") assert nova_manifest.channel == OPENSTACK_CHANNEL assert nova_manifest.revision is None assert nova_manifest.config is None diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py index 08d56411..16113afd 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_steps.py @@ -88,6 +88,22 @@ def test_is_skip_application_already_deployed(self, cclient, jhelper): jhelper.get_application.assert_called_once() assert result.result_type == ResultType.SKIPPED + def test_is_skip_application_refresh(self, cclient, jhelper): + step = DeployMachineApplicationStep( + cclient, + manifest, + jhelper, + "tfconfig", + "app1", + "model1", + "fake-plan", + refresh=True, + ) + result = step.is_skip() + + jhelper.get_application.assert_not_called() + assert result.result_type == ResultType.COMPLETED + def test_run_pristine_installation(self, cclient, jhelper, manifest): jhelper.get_application.side_effect = ApplicationNotFoundException("not found") From 9dfdd08460afc93c5c3156e63ba238af31bbc6d6 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Tue, 30 Jan 2024 12:21:46 +0530 Subject: [PATCH 16/27] [manifest] Support for upgrade-release Use manifest for Upgrade releases. For control plane, upgrade charms in following order - mysql charms, openstack core charms, openstack plugins. In each upgrade, only update the terraform variables corresponding to charms and apply the terraform plan. Followed similar approach for machine charms as well. Split machine charms to individual ones as each machine charm may have pre and post steps during upgrade. Upgrade plugins using terraform plan openstack-plan as part of control plane upgrade. --- sunbeam-python/sunbeam/commands/juju.py | 13 + sunbeam-python/sunbeam/commands/node.py | 8 +- .../sunbeam/commands/upgrades/base.py | 10 - .../commands/upgrades/inter_channel.py | 480 ++++++++++-------- .../commands/upgrades/intra_channel.py | 6 +- sunbeam-python/sunbeam/jobs/juju.py | 87 +++- sunbeam-python/sunbeam/jobs/manifest.py | 72 ++- sunbeam-python/sunbeam/jobs/plugin.py | 14 + .../sunbeam/plugins/interface/v1/openstack.py | 111 ++-- sunbeam-python/sunbeam/versions.py | 60 +-- .../sunbeam/commands/upgrades/test_base.py | 138 +++-- .../unit/sunbeam/plugins/test_openstack.py | 82 +++ 12 files changed, 646 insertions(+), 435 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/juju.py b/sunbeam-python/sunbeam/commands/juju.py index dddb63cb..6deeddfd 100644 --- a/sunbeam-python/sunbeam/commands/juju.py +++ b/sunbeam-python/sunbeam/commands/juju.py @@ -221,6 +221,19 @@ def get_charm_deployed_versions(self, model: str) -> dict: return apps + def get_apps_filter_by_charms(self, model: str, charms: list) -> list: + """Return apps filtered by given charms. + + Get all apps from the model and return only the apps deployed with + charms in the provided list. + """ + deployed_all_apps = self.get_charm_deployed_versions(model) + return [ + app_name + for app_name, (charm, channel, revision) in deployed_all_apps.items() + if charm in charms + ] + def normalise_channel(self, channel: str) -> str: """Expand channel if it is using abbreviation. diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index e6caa4f9..f0e4fe7b 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -300,9 +300,11 @@ def list(ctx: click.Context, format: str) -> None: for name, node in nodes.items(): table.add_row( name, - "[green]up[/green]" - if node.get("status") == "ONLINE" - else "[red]down[/red]", + ( + "[green]up[/green]" + if node.get("status") == "ONLINE" + else "[red]down[/red]" + ), "x" if "control" in node.get("roles", []) else "", "x" if "compute" in node.get("roles", []) else "", "x" if "storage" in node.get("roles", []) else "", diff --git a/sunbeam-python/sunbeam/commands/upgrades/base.py b/sunbeam-python/sunbeam/commands/upgrades/base.py index c035db9f..4b8dd770 100644 --- a/sunbeam-python/sunbeam/commands/upgrades/base.py +++ b/sunbeam-python/sunbeam/commands/upgrades/base.py @@ -20,7 +20,6 @@ from rich.status import Status from sunbeam.clusterd.client import Client -from sunbeam.commands.terraform import TerraformHelper from sunbeam.jobs.common import BaseStep, Result, ResultType, run_plan from sunbeam.jobs.juju import JujuHelper from sunbeam.jobs.manifest import Manifest @@ -34,21 +33,15 @@ class UpgradePlugins(BaseStep): def __init__( self, client: Client, - jhelper: JujuHelper, - tfhelper: TerraformHelper, upgrade_release: bool = False, ): """Upgrade plugins. :client: Helper for interacting with clusterd - :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform :upgrade_release: Whether to upgrade channel """ super().__init__("Validation", "Running pre-upgrade validation") self.client = client - self.jhelper = jhelper - self.tfhelper = tfhelper self.upgrade_release = upgrade_release def run(self, status: Optional[Status] = None) -> Result: @@ -64,7 +57,6 @@ def __init__( client: Client, jhelper: JujuHelper, manifest: Manifest, - channel: str | None = None, ): """Upgrade coordinator. @@ -73,10 +65,8 @@ def __init__( :client: Helper for interacting with clusterd :jhelper: Helper for interacting with pylibjuju :manifest: Manifest object - :channel: OpenStack channel to upgrade charms to """ self.client = client - self.channel = channel self.jhelper = jhelper self.manifest = manifest self.tfhelper = self.manifest.get_tfhelper("openstack-plan") diff --git a/sunbeam-python/sunbeam/commands/upgrades/inter_channel.py b/sunbeam-python/sunbeam/commands/upgrades/inter_channel.py index 6d7ec6e5..a740d307 100644 --- a/sunbeam-python/sunbeam/commands/upgrades/inter_channel.py +++ b/sunbeam-python/sunbeam/commands/upgrades/inter_channel.py @@ -14,59 +14,42 @@ # limitations under the License. import logging -from typing import Callable, Dict, List, Optional, TypedDict, Union +from typing import List, Optional from rich.console import Console from rich.status import Status from sunbeam.clusterd.client import Client +from sunbeam.commands.hypervisor import CONFIG_KEY as HYPERVISOR_CONFIG_KEY from sunbeam.commands.juju import JujuStepHelper -from sunbeam.commands.terraform import TerraformHelper +from sunbeam.commands.microceph import CONFIG_KEY as MICROCEPH_CONFIG_KEY +from sunbeam.commands.microk8s import MICROK8S_CONFIG_KEY +from sunbeam.commands.openstack import CONFIG_KEY as OPENSTACK_CONFIG_KEY +from sunbeam.commands.openstack import OPENSTACK_DEPLOY_TIMEOUT +from sunbeam.commands.sunbeam_machine import CONFIG_KEY as SUNBEAM_MACHINE_CONFIG_KEY +from sunbeam.commands.terraform import TerraformException from sunbeam.commands.upgrades.base import UpgradeCoordinator, UpgradePlugins -from sunbeam.commands.upgrades.intra_channel import LatestInChannel from sunbeam.jobs.common import ( BaseStep, Result, ResultType, - read_config, run_plan, - update_config, + update_status_background, ) -from sunbeam.jobs.juju import ChannelUpdate, JujuHelper, run_sync +from sunbeam.jobs.juju import JujuHelper, JujuWaitException, TimeoutException, run_sync +from sunbeam.jobs.manifest import Manifest +from sunbeam.jobs.plugin import PluginManager from sunbeam.versions import ( - CHARM_VERSIONS, - MACHINE_SERVICES, - MISC_SERVICES_K8S, - OPENSTACK_SERVICES_K8S, - OVN_SERVICES_K8S, + MISC_CHARMS_K8S, + MYSQL_CHARMS_K8S, + OPENSTACK_CHARMS_K8S, + OVN_CHARMS_K8S, ) LOG = logging.getLogger(__name__) console = Console() -class UpgradeStrategy(TypedDict): - """A strategy for upgrading applications. - - The strategy is a list of dicts. Each dict consists of: - { - "upgrade_f": f, - "applications": [apps] - }, - - upgrade_f is the function to be applied to each application (in - parallel) to perform the upgrade. - - applications is a list of applications that can be upgraded in parallel. - - Currently only apps that are upgraded with the same function can be - grouped together. - """ - - upgrade_f: Callable[[list[str], str], None] - applications: list[str] - - class BaseUpgrade(BaseStep, JujuStepHelper): def __init__( self, @@ -74,249 +57,345 @@ def __init__( description: str, client: Client, jhelper: JujuHelper, - tfhelper: TerraformHelper, + manifest: Manifest, model: str, ): """Create instance of BaseUpgrade class. :client: Client for interacting with clusterd :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform + :manifest: Manifest object :model: Name of model containing charms. """ super().__init__(name, description) self.client = client self.jhelper = jhelper - self.tfhelper = tfhelper + self.manifest = manifest self.model = model - def get_upgrade_strategy_steps(self) -> UpgradeStrategy: - """Return a strategy for performing the upgrade.""" - - raise NotImplementedError - def run(self, status: Optional[Status] = None) -> Result: """Run control plane and machine charm upgrade.""" - self.pre_upgrade_tasks() - for step in self.get_upgrade_strategy_steps(): - step["upgrade_f"](step["applications"], self.model) - self.post_upgrade_tasks() - return Result(ResultType.COMPLETED) + result = self.pre_upgrade_tasks(status) + if result.result_type == ResultType.FAILED: + return result + + self.upgrade_tasks(status) + if result.result_type == ResultType.FAILED: + return result - def pre_upgrade_tasks(self) -> None: + result = self.post_upgrade_tasks(status) + return result + + def pre_upgrade_tasks(self, status: Optional[Status] = None) -> Result: """Tasks to run before the upgrade.""" - return + return Result(ResultType.COMPLETED) - def post_upgrade_tasks(self) -> None: + def post_upgrade_tasks(self, status: Optional[Status] = None) -> Result: """Tasks to run after the upgrade.""" - return + return Result(ResultType.COMPLETED) def upgrade_applications( self, - application_list: List[str], + apps: List[str], + charms: List[str], model: str, - expect_wls: Optional[Dict[str, list[str]]] = None, - ) -> None: + tfplan: str, + config: str, + timeout: int, + status: Optional[Status] = None, + ) -> Result: """Upgrade applications. - :param application_list: List of applications to be upgraded + :param apps: List of applications to be upgraded + :param charms: List of charms :param model: Name of model - :param expect_wls: The expected workload status after charm upgrade. + :param tfplan: Name of plan + :param config: Terraform config key used to store config in clusterdb + :param timeout: Timeout to wait for apps in expected status + :param status: Status object to update charm status """ - if not expect_wls: - expect_wls = {"workload": ["blocked", "active"]} - batch = {} - for app_name in application_list: - new_channel = self.get_new_channel(app_name, model) - if new_channel: - LOG.debug(f"Upgrade needed for {app_name}") - batch[app_name] = ChannelUpdate( - channel=new_channel, - expected_status=expect_wls, + expected_wls = ["active", "blocked", "unknown"] + LOG.debug(f"Upgrading applications using terraform plan {tfplan}: {apps}") + try: + self.manifest.update_partial_tfvars_and_apply_tf(charms, tfplan, config) + except TerraformException as e: + LOG.exception("Error upgrading cloud") + return Result(ResultType.FAILED, str(e)) + + task = run_sync(update_status_background(self, apps, status)) + try: + run_sync( + self.jhelper.wait_until_desired_status( + model, + apps, + expected_wls, + timeout=timeout, ) - else: - LOG.debug(f"{app_name} no channel upgrade needed") - run_sync(self.jhelper.update_applications_channel(model, batch)) + ) + except (JujuWaitException, TimeoutException) as e: + LOG.debug(str(e)) + return Result(ResultType.FAILED, str(e)) + finally: + if not task.done(): + task.cancel() - def get_new_channel(self, application_name: str, model: str) -> Union[str, None]: - """Check application to see if an upgrade is needed. + return Result(ResultType.COMPLETED) - Check application to see if an upgrade is needed. A 'None' - returned indicates no upgrade is needed. - :param application_name: Name of application - :param model: Model application is in +class UpgradeControlPlane(BaseUpgrade): + def __init__( + self, + client: Client, + jhelper: JujuHelper, + manifest: Manifest, + model: str, + ): + """Create instance of BaseUpgrade class. + + :client: Client for interacting with clusterd + :jhelper: Helper for interacting with pylibjuju + :manifest: Manifest object + :model: Name of model containing charms. """ - new_channel = None - current_channel = run_sync( - self.jhelper.get_charm_channel(application_name, model) + super().__init__( + "Upgrade Openstack charms", + "Upgrading Openstack charms", + client, + jhelper, + manifest, + model, ) - new_channel = CHARM_VERSIONS.get(application_name) - if current_channel and new_channel: - if self.channel_update_needed(current_channel, new_channel): - return new_channel - else: - return None - else: - # No current_channel indicates application is missing - return new_channel + self.tfplan = "openstack-plan" + self.config = OPENSTACK_CONFIG_KEY + + def upgrade_tasks(self, status: Optional[Status] = None) -> Result: + # Step 1: Upgrade mysql charms + LOG.debug("Upgrading Mysql charms") + charms = list(MYSQL_CHARMS_K8S.keys()) + apps = self.get_apps_filter_by_charms(self.model, charms) + result = self.upgrade_applications( + apps, charms, self.model, self.tfplan, self.config, 1200, status + ) + if result.result_type == ResultType.FAILED: + return result + + # Step 2: Upgrade all openstack core charms + LOG.debug("Upgrading openstack core charms") + charms = ( + list(MISC_CHARMS_K8S.keys()) + + list(OVN_CHARMS_K8S.keys()) # noqa: W503 + + list(OPENSTACK_CHARMS_K8S.keys()) # noqa: W503 + ) + apps = self.get_apps_filter_by_charms(self.model, charms) + result = self.upgrade_applications( + apps, + charms, + self.model, + self.tfplan, + self.config, + OPENSTACK_DEPLOY_TIMEOUT, + status, + ) + if result.result_type == ResultType.FAILED: + return result + + # Step 3: Upgrade all plugins that uses openstack-plan + LOG.debug("Upgrading openstack plugins that are enabled") + charms = PluginManager().get_all_charms_in_openstack_plan(self.client) + apps = self.get_apps_filter_by_charms(self.model, charms) + result = self.upgrade_applications( + apps, + charms, + self.model, + self.tfplan, + self.config, + OPENSTACK_DEPLOY_TIMEOUT, + status, + ) + return result - def terraform_sync(self, config_key: str, tfvars_delta: dict) -> None: - """Sync the running state back to the Terraform state file. - :param config_key: The config key used to access the data in microcluster - :param tfvars_delta: The delta of changes to be applied to the terraform - vars stored in microcluster. +class UpgradeMachineCharm(BaseUpgrade): + def __init__( + self, + name: str, + description: str, + client: Client, + jhelper: JujuHelper, + manifest: Manifest, + model: str, + charms: list, + tfplan: str, + config: str, + timeout: int, + ): + """Create instance of BaseUpgrade class. + + :jhelper: Helper for interacting with pylibjuju + :manifest: Manifest object + :model: Name of model containing charms. + :charms: List of charms to upgrade + :tfplan: Terraform plan to reapply + :config: Config key used to save tfvars in clusterdb + :timeout: Time to wait for apps to come to desired status """ - tfvars = read_config(self.client, config_key) - tfvars.update(tfvars_delta) - update_config(self.client, config_key, tfvars) - self.tfhelper.write_tfvars(tfvars) - self.tfhelper.sync() + super().__init__( + name, + description, + client, + jhelper, + manifest, + model, + ) + self.charms = charms + self.tfplan = tfplan + self.config = config + self.timeout = timeout + + def upgrade_tasks(self, status: Optional[Status] = None) -> Result: + apps = self.get_apps_filter_by_charms(self.model, self.charms) + result = self.upgrade_applications( + apps, + self.charms, + self.model, + self.tfplan, + self.config, + self.timeout, + status, + ) + return result -class UpgradeControlPlane(BaseUpgrade): + +class UpgradeMicrocephCharm(UpgradeMachineCharm): def __init__( self, client: Client, jhelper: JujuHelper, - tfhelper: TerraformHelper, + manifest: Manifest, model: str, ): - """Create instance of BaseUpgrade class. + """Create instance of UpgradeMicrocephCharm class. - :client: Client for interacting with clusterd + :client: Client to connect to clusterdb :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform + :manifest: Manifest object :model: Name of model containing charms. """ super().__init__( - "Upgrade K8S charms", - "Upgrade K8S charms channels to align with snap", + "Upgrade Microceph charm", + "Upgrading microceph charm", client, jhelper, - tfhelper, + manifest, model, + ["microceph"], + "microceph-plan", + MICROCEPH_CONFIG_KEY, + 1200, ) - def get_upgrade_strategy_steps(self) -> List[Dict[str, Union[Callable, List]]]: - """Return a strategy for performing the upgrade. - Upgrade all control plane applications in parallel. - """ - upgrade_strategy_steps = [ - UpgradeStrategy( - upgrade_f=self.upgrade_applications, - applications=list(MISC_SERVICES_K8S.keys()) - + list(OVN_SERVICES_K8S.keys()) # noqa - + list(OPENSTACK_SERVICES_K8S.keys()), # noqa - ), - ] - return upgrade_strategy_steps +class UpgradeMicrok8sCharm(UpgradeMachineCharm): + def __init__( + self, + client: Client, + jhelper: JujuHelper, + manifest: Manifest, + model: str, + ): + """Create instance of UpgradeMicrok8sCharm class. - def post_upgrade_tasks(self) -> None: - """Update channels in terraform vars db.""" - tfvars_delta = { - "openstack-channel": run_sync( - self.jhelper.get_charm_channel("keystone", "openstack") - ), - "ovn-channel": run_sync( - self.jhelper.get_charm_channel("ovn-central", "openstack") - ), - "rabbitmq-channel": run_sync( - self.jhelper.get_charm_channel("rabbitmq", "openstack") - ), - "traefik-channel": run_sync( - self.jhelper.get_charm_channel("traefik", "openstack") - ), - } - self.terraform_sync("TerraformVarsOpenstack", tfvars_delta) + :client: Client to connect to clusterdb + :jhelper: Helper for interacting with pylibjuju + :manifest: Manifest object + :model: Name of model containing charms. + """ + super().__init__( + "Upgrade Microk8s charm", + "Upgrading microk8s charm", + client, + jhelper, + manifest, + model, + ["microk8s"], + "microk8s-plan", + MICROK8S_CONFIG_KEY, + 1200, + ) -class UpgradeMachineCharms(BaseUpgrade): +class UpgradeOpenstackHypervisorCharm(UpgradeMachineCharm): def __init__( self, client: Client, jhelper: JujuHelper, - tfhelper: TerraformHelper, + manifest: Manifest, model: str, ): - """Create instance of BaseUpgrade class. + """Create instance of UpgradeOpenstackHypervisorCharm class. + :client: Client to connect to clusterdb :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform + :manifest: Manifest object :model: Name of model containing charms. """ super().__init__( - "Upgrade Machine charms", - "Upgrade machine charms channels to align with snap", + "Upgrade hypervisor charm", + "Upgrading hypervisor charm", client, jhelper, - tfhelper, + manifest, model, + ["openstack-hypervisor"], + "hypervisor-plan", + HYPERVISOR_CONFIG_KEY, + 1200, ) - def get_upgrade_strategy_steps(self) -> List[Dict[str, Union[Callable, List]]]: - """Return a strategy for performing the upgrade. - Upgrade all machine applications in parallel. +class UpgradeSunbeamMachineCharm(UpgradeMachineCharm): + def __init__( + self, + client: Client, + jhelper: JujuHelper, + manifest: Manifest, + model: str, + ): + """Create instance of UpgradeSunbeamMachineCharm class. + + :client: Client to connect to clusterdb + :jhelper: Helper for interacting with pylibjuju + :manifest: Manifest object + :model: Name of model containing charms. """ - upgrade_strategy_steps = [ - UpgradeStrategy( - upgrade_f=self.upgrade_applications, applications=MACHINE_SERVICES - ), - ] - return upgrade_strategy_steps - - def post_upgrade_tasks(self) -> None: - """Update channels in terraform vars db.""" - self.terraform_sync( - "TerraformVarsMicrocephPlan", - { - "microceph_channel": run_sync( - self.jhelper.get_charm_channel("microceph", "controller") - ) - }, - ) - self.terraform_sync( - "TerraformVarsSunbeamMachine", - { - "charm_channel": run_sync( - self.jhelper.get_charm_channel("sunbeam-machine", "controller") - ) - }, - ) - self.terraform_sync( - "TerraformVarsHypervisor", - { - "charm_channel": run_sync( - self.jhelper.get_charm_channel("openstack-hypervisor", "controller") - ) - }, - ) - self.terraform_sync( - "TerraformVarsMicrok8sAddons", - { - "microk8s_channel": run_sync( - self.jhelper.get_charm_channel("microk8s", "controller") - ) - }, + super().__init__( + "Upgrade sunbeam-machine charm", + "Upgrading sunbeam-machine charm", + client, + jhelper, + manifest, + model, + ["sunbeam-machine"], + "sunbeam-machine-plan", + SUNBEAM_MACHINE_CONFIG_KEY, + 1200, ) class ChannelUpgradeCoordinator(UpgradeCoordinator): - def __init__(self, client: Client, jhelper: JujuHelper, tfhelper: TerraformHelper): + def __init__(self, client: Client, jhelper: JujuHelper, manifest: Manifest): """Upgrade coordinator. Execute plan for conducting an upgrade. :client: Client for interacting with clusterd :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform + :manifest: Manifest object """ self.client = client self.jhelper = jhelper - self.tfhelper = tfhelper + self.manifest = manifest def get_plan(self) -> list[BaseStep]: """Return the plan for this upgrade. @@ -324,15 +403,21 @@ def get_plan(self) -> list[BaseStep]: Return the steps to complete this upgrade. """ plan = [ - ValidationCheck(self.jhelper, self.tfhelper), - LatestInChannel(self.jhelper), - UpgradeControlPlane(self.client, self.jhelper, self.tfhelper, "openstack"), - UpgradeMachineCharms( - self.client, self.jhelper, self.tfhelper, "controller" + ValidationCheck(self.jhelper, self.manifest), + UpgradeControlPlane(self.client, self.jhelper, self.manifest, "openstack"), + UpgradeMicrocephCharm( + self.client, self.jhelper, self.manifest, "controller" + ), + UpgradeMicrok8sCharm( + self.client, self.jhelper, self.manifest, "controller" + ), + UpgradeOpenstackHypervisorCharm( + self.client, self.jhelper, self.manifest, "controller" ), - UpgradePlugins( - self.client, self.jhelper, self.tfhelper, upgrade_release=True + UpgradeSunbeamMachineCharm( + self.client, self.jhelper, self.manifest, "controller" ), + UpgradePlugins(self.client, upgrade_release=True), ] return plan @@ -343,18 +428,17 @@ def run_plan(self) -> None: class ValidationCheck(BaseStep): - def __init__(self, jhelper: JujuHelper, tfhelper: TerraformHelper): + def __init__(self, jhelper: JujuHelper, manifest: Manifest): """Run validation on the deployment. Check whether the requested upgrade is possible. :jhelper: Helper for interacting with pylibjuju - :tfhelper: Helper for interaction with Terraform - :channel: OpenStack channel to upgrade charms to + :manifest: Manifest object """ super().__init__("Validation", "Running pre-upgrade validation") self.jhelper = jhelper - self.tfhelper = tfhelper + self.manifest = manifest def run(self, status: Optional[Status] = None) -> Result: """Run validation check.""" diff --git a/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py b/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py index 055c4a29..1479eebd 100644 --- a/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py +++ b/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py @@ -101,7 +101,7 @@ def run(self, status: Optional[Status] = None) -> Result: all_deployed_apps = deployed_k8s_apps.copy() all_deployed_apps.update(deployed_machine_apps) - LOG.debug(f"Al deployed apps: {all_deployed_apps}") + LOG.debug(f"All deployed apps: {all_deployed_apps}") if self.is_track_changed_for_any_charm(all_deployed_apps): error_msg = ( "Manifest has track values that require upgrades, rerun with " @@ -138,7 +138,5 @@ def get_plan(self) -> list[BaseStep]: ReapplyHypervisorTerraformPlanStep( self.client, self.manifest, self.jhelper ), - UpgradePlugins( - self.client, self.jhelper, self.tfhelper, upgrade_release=False - ), + UpgradePlugins(self.client, upgrade_release=False), ] diff --git a/sunbeam-python/sunbeam/jobs/juju.py b/sunbeam-python/sunbeam/jobs/juju.py index 1c172a14..dc7fe41e 100644 --- a/sunbeam-python/sunbeam/jobs/juju.py +++ b/sunbeam-python/sunbeam/jobs/juju.py @@ -18,7 +18,7 @@ import json import logging from dataclasses import asdict, dataclass -from datetime import datetime +from datetime import datetime, timedelta from functools import wraps from pathlib import Path from typing import Awaitable, Dict, List, Optional, TypedDict, TypeVar, cast @@ -721,6 +721,91 @@ async def wait_until_active( f"Timed out while waiting for model {model!r} to be ready" ) from e + @controller + async def wait_until_desired_status( + self, + model: str, + apps: list, + status: list = ["active"], + timeout: int = 10 * 60, + ) -> None: + """Wait for all agents in model to reach desired status + + :model: Name of the model to wait for readiness + :apps: Applications to check the status for + :status: Desired status list + :timeout: Waiting timeout in seconds + """ + check_freq = 0.5 + idle_period = 15 + model_impl = await self.get_model(model) + + timeout = timedelta(seconds=timeout) + idle_period = timedelta(seconds=idle_period) + start_time = datetime.now() + + idle_times = {} + units_ready = set() # The units that are in the desired state + last_log_time = None + log_interval = timedelta(seconds=30) + + try: + while True: + busy = [] + for app_name in apps: + if app_name not in model_impl.applications: + busy.append(app_name + " (missing)") + continue + app = model_impl.applications[app_name] + app_status = await app.get_status() + + for unit in app.units: + need_to_wait_more_for_a_particular_status = ( + unit.workload_status not in status + ) + app_is_in_desired_status = app_status in status + if ( + not need_to_wait_more_for_a_particular_status + and unit.agent_status == "idle" # noqa: W503 + and app_is_in_desired_status # noqa: W503 + ): + units_ready.add(unit.name) + now = datetime.now() + idle_start = idle_times.setdefault(unit.name, now) + + if now - idle_start < idle_period: + busy.append( + f"{unit.name} [{unit.agent_status}] " + f"{unit.workload_status}: " + f"{unit.workload_status_message}" + ) + else: + idle_times.pop(unit.name, None) + busy.append( + f"{unit.name} [{unit.agent_status}] " + f"{unit.workload_status}: " + f"{unit.workload_status_message}" + ) + + if not busy: + break + busy = "\n ".join(busy) + if timeout is not None and datetime.now() - start_time > timeout: + raise TimeoutException( + f"Timed out while waiting for model {model!r} to be ready: " + f"{busy}" + ) + if ( + last_log_time is None + or datetime.now() - last_log_time > log_interval # noqa: W503 + ): + last_log_time = datetime.now() + await asyncio.sleep(check_freq) + except (JujuMachineError, JujuAgentError, JujuUnitError, JujuAppError) as e: + raise JujuWaitException( + f"Error while waiting for model {model!r} to be ready: {str(e)}" + ) from e + @controller async def set_application_config(self, model: str, app: str, config: dict): """Update application configuration diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 7600013b..5497a75d 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -48,9 +48,7 @@ ) LOG = logging.getLogger(__name__) -EMPTY_MANIFEST = """charms: {} -terraform-plans: {} -""" +EMPTY_MANIFEST = {"charms": {}, "terraform": {}} class MissingTerraformInfoException(Exception): @@ -250,6 +248,39 @@ def get_tfhelper(self, tfplan: str) -> TerraformHelper: return self.tf_helpers[tfplan] + def update_partial_tfvars_and_apply_tf( + self, charms: List[str], tfplan: str, tfvar_config: Optional[str] = None + ) -> None: + """Updates tfvars for specific charms and apply the plan.""" + current_tfvars = {} + updated_tfvars = {} + if tfvar_config: + try: + current_tfvars = read_config(self.client, tfvar_config) + # Exclude all default tfvar keys from the previous terraform + # vars applied to the plan. + _tfvar_names = self._get_tfvar_names(tfplan, charms) + updated_tfvars = { + k: v for k, v in current_tfvars.items() if k not in _tfvar_names + } + except ConfigItemNotFoundException: + pass + + updated_tfvars.update(self._get_tfvars(tfplan, charms)) + + # No need to apply plan if there is no change in terraform vars. + if current_tfvars == updated_tfvars: + LOG.debug(f"Not running plan {tfplan} as there is no change in tfvars") + return + + if tfvar_config: + update_config(self.client, tfvar_config, updated_tfvars) + + tfhelper = self.get_tfhelper(tfplan) + tfhelper.write_tfvars(updated_tfvars) + LOG.debug(f"Applying plan {tfplan} with tfvars {updated_tfvars}") + tfhelper.apply() + def update_tfvars_and_apply_tf( self, tfplan: str, @@ -305,17 +336,24 @@ def update_tfvars_and_apply_tf( LOG.debug(f"Applying plan {tfplan} with tfvars {updated_tfvars}") tfhelper.apply() - def _get_tfvars(self, tfplan: str) -> dict: + def _get_tfvars(self, tfplan: str, charms: Optional[list] = None) -> dict: """Get tfvars from the manifest. MANIFEST_ATTRIBUTES_TFVAR_MAP holds the mapping of Manifest attributes and the terraform variable name. For each terraform variable in MANIFEST_ATTRIBUTES_TFVAR_MAP, get the corresponding value from Manifest and return all terraform variables as dict. + + If charms is passed as input, filter the charms based on the list + provided. """ tfvars = {} charms_tfvar_map = self.tfvar_map.get(tfplan, {}).get("charms", {}) + if charms: + charms_tfvar_map = { + k: v for k, v in charms_tfvar_map.items() if k in charms + } # handle tfvars for charms section for charm, per_charm_tfvar_map in charms_tfvar_map.items(): @@ -329,14 +367,24 @@ def _get_tfvars(self, tfplan: str) -> dict: return tfvars - def _get_tfvar_names(self, tfplan: str) -> list: - return [ - tfvar_name - for charm, per_charm_tfvar_map in self.tfvar_map.get(tfplan, {}) - .get("charms", {}) - .items() - for charm_attribute, tfvar_name in per_charm_tfvar_map.items() - ] + def _get_tfvar_names(self, tfplan: str, charms: Optional[list] = None) -> list: + if charms: + return [ + tfvar_name + for charm, per_charm_tfvar_map in self.tfvar_map.get(tfplan, {}) + .get("charms", {}) + .items() + for charm_attribute, tfvar_name in per_charm_tfvar_map.items() + if charm in charms + ] + else: + return [ + tfvar_name + for charm, per_charm_tfvar_map in self.tfvar_map.get(tfplan, {}) + .get("charms", {}) + .items() + for charm_attribute, tfvar_name in per_charm_tfvar_map.items() + ] class AddManifestStep(BaseStep): diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index a28f3dec..cc87542c 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -282,6 +282,20 @@ def add_manifest_section(cls, client, manifest) -> None: plugin = klass(client) plugin.add_manifest_section(manifest) + @classmethod + def get_all_charms_in_openstack_plan(cls, client: Client) -> list: + charms = [] + plugins = cls.get_all_plugin_classes() + for klass in plugins: + plugin = klass(client) + m_dict = plugin.manifest_attributes_tfvar_map() + charms_from_plugin = list( + m_dict.get("openstack-plan", {}).get("charms", {}).keys() + ) + charms.extend(charms_from_plugin) + + return charms + @classmethod def register( cls, diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index f00c15ee..39e6c65d 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -43,15 +43,9 @@ read_config, run_plan, run_preflight_checks, - update_config, -) -from sunbeam.jobs.juju import ( - ChannelUpdate, - JujuHelper, - JujuWaitException, - TimeoutException, - run_sync, + update_status_background, ) +from sunbeam.jobs.juju import JujuHelper, JujuWaitException, TimeoutException, run_sync from sunbeam.jobs.manifest import Manifest from sunbeam.plugins.interface.v1.base import EnableDisablePlugin @@ -298,12 +292,11 @@ def upgrade_hook(self, upgrade_release: bool = False): :param upgrade_release: Whether to upgrade release """ - # For Intra channel upgrade, if the plan is openstack-plan, - # upgrade already applied at core and not required at plugin - # level + # Nothig to do if the plan is openstack-plan as it is taken + # care during control plane refresh if ( not upgrade_release - and self.tf_plan_location # noqa W503 + or self.tf_plan_location # noqa W503 == TerraformPlanLocation.SUNBEAM_TERRAFORM_REPO # noqa: W503 ): LOG.debug( @@ -315,13 +308,13 @@ def upgrade_hook(self, upgrade_release: bool = False): data_location = self.snap.paths.user_data jhelper = JujuHelper(self.client, data_location) plan = [ - UpgradeApplicationStep(jhelper, self, upgrade_release), + UpgradeOpenStackApplicationStep(jhelper, self, upgrade_release), ] run_plan(plan, console) -class UpgradeApplicationStep(BaseStep, JujuStepHelper): +class UpgradeOpenStackApplicationStep(BaseStep, JujuStepHelper): def __init__( self, jhelper: JujuHelper, @@ -345,69 +338,41 @@ def __init__( self.tfhelper = self.plugin.manifest.get_tfhelper(self.plugin.tfplan) self.client = self.plugin.client - def terraform_sync(self, config_key: str, tfvars_delta: dict): - """Sync the running state back to the Terraform state file. - - :param config_key: The config key used to access the data in - microcluster - :param tfvars_delta: The delta of changes to be applied to the - terraform vars stored in microcluster. - """ - tfvars = read_config(self.client, config_key) - tfvars.update(tfvars_delta) - update_config(self.client, config_key, tfvars) - self.tfhelper.write_tfvars(tfvars) - self.tfhelper.sync() + def run(self, status: Optional[Status] = None) -> Result: + """Run plugin upgrade.""" + LOG.debug(f"Upgrading plugin {self.plugin.name}") + expected_wls = ["active", "blocked", "unknown"] + tfvar_map = self.plugin.manifest_attributes_tfvar_map() + charms = list(tfvar_map.get(self.plugin.tfplan, {}).get("charms", {}).keys()) + apps = self.get_apps_filter_by_charms(self.model, charms) + config = self.plugin.get_tfvar_config_key() + timeout = self.plugin.set_application_timeout_on_enable() - def upgrade_charms( - self, - application_data: dict[str, dict[str, str]], - model: str, - ): - """Upgrade applications. + try: + self.plugin.manifest.update_partial_tfvars_and_apply_tf( + charms, self.plugin.tfplan, config + ) + except TerraformException as e: + LOG.exception(f"Error upgrading plugin {self.plugin.name}") + return Result(ResultType.FAILED, str(e)) - :param application_data: Mapping of applications to their required channels - :param model: Name of model applications are in - """ - for application_name, config in application_data.items(): - if self.revision_update_needed(application_name, model): - run_sync(self.jhelper.charm_refresh(application_name, model)) - if self.upgrade_release: - batch = {} - expect_wls = {"workload": ["blocked", "active"]} - for application_name, config in application_data.items(): - current_channel = run_sync( - self.jhelper.get_charm_channel(application_name, model) - ) - new_channel = config["channel"] - LOG.debug( - f"new_channel: {new_channel} current_channel: {current_channel}" - ) - if self.channel_update_needed(current_channel, new_channel): - batch[application_name] = ChannelUpdate( - channel=new_channel, - expected_status=expect_wls, - ) - else: - LOG.debug(f"{application_name} no channel upgrade needed") - run_sync(self.jhelper.update_applications_channel(model, batch)) - - def terraform_sync_channel_updates(self, application_data): - for application_name, config in application_data.items(): - if config.get("tfvars_channel_var"): - self.terraform_sync( - self.plugin.get_tfvar_config_key(), - {config["tfvars_channel_var"]: config["channel"]}, + task = run_sync(update_status_background(self, apps, status)) + try: + run_sync( + self.jhelper.wait_until_desired_status( + self.model, + apps, + expected_wls, + timeout=timeout, ) + ) + except (JujuWaitException, TimeoutException) as e: + LOG.debug(str(e)) + return Result(ResultType.FAILED, str(e)) + finally: + if not task.done(): + task.cancel() - def run(self, status: Optional[Status] = None) -> Result: - """Run plugin upgrade.""" - self.upgrade_charms(self.plugin.k8s_application_data, "openstack") - if self.upgrade_release: - self.terraform_sync_channel_updates(self.plugin.k8s_application_data) - self.upgrade_charms(self.plugin.machine_application_data, "controller") - if self.upgrade_release: - self.terraform_sync_channel_updates(self.plugin.machine_application_data) return Result(ResultType.COMPLETED) diff --git a/sunbeam-python/sunbeam/versions.py b/sunbeam-python/sunbeam/versions.py index be9916c8..a5b12aa7 100644 --- a/sunbeam-python/sunbeam/versions.py +++ b/sunbeam-python/sunbeam/versions.py @@ -26,65 +26,7 @@ BIND_CHANNEL = "9/edge" VAULT_CHANNEL = "latest/edge" -# The lists of services are needed for switching charm channels outside -# of the terraform provider. If it ok to upgrade in one big-bang and -# the juju terraform provider supports it then the upgrades can be -# done by simply updating the tfvars and these lists are not needed. -OPENSTACK_SERVICES_K8S = { - "cinder-ceph": OPENSTACK_CHANNEL, - "cinder": OPENSTACK_CHANNEL, - "glance": OPENSTACK_CHANNEL, - "horizon": OPENSTACK_CHANNEL, - "keystone": OPENSTACK_CHANNEL, - "neutron": OPENSTACK_CHANNEL, - "nova": OPENSTACK_CHANNEL, - "placement": OPENSTACK_CHANNEL, -} -OVN_SERVICES_K8S = { - "ovn-central": OVN_CHANNEL, - "ovn-relay": OVN_CHANNEL, -} -MYSQL_SERVICES_K8S = {"mysql": MYSQL_CHANNEL} -MYSQL_ROUTER_SERVICES_K8S = { - "cinder-ceph-mysql-router": MYSQL_CHANNEL, - "cinder-mysql-router": MYSQL_CHANNEL, - "glance-mysql-router": MYSQL_CHANNEL, - "horizon-mysql-router": MYSQL_CHANNEL, - "keystone-mysql-router": MYSQL_CHANNEL, - "neutron-mysql-router": MYSQL_CHANNEL, - "nova-api-mysql-router": MYSQL_CHANNEL, - "nova-cell-mysql-router": MYSQL_CHANNEL, - "nova-mysql-router": MYSQL_CHANNEL, - "placement-mysql-router": MYSQL_CHANNEL, -} -MISC_SERVICES_K8S = { - "certificate-authority": CERT_AUTH_CHANNEL, - "rabbitmq": RABBITMQ_CHANNEL, - "traefik": TRAEFIK_CHANNEL, - "traefik-public": TRAEFIK_CHANNEL, -} -MACHINE_SERVICES = { - "microceph": MICROCEPH_CHANNEL, - "microk8s": MICROK8S_CHANNEL, - "openstack-hypervisor": OPENSTACK_CHANNEL, - "sunbeam-machine": SUNBEAM_MACHINE_CHANNEL, -} - -K8S_SERVICES = {} -K8S_SERVICES |= OPENSTACK_SERVICES_K8S -K8S_SERVICES |= OVN_SERVICES_K8S -K8S_SERVICES |= MYSQL_SERVICES_K8S -K8S_SERVICES |= MYSQL_ROUTER_SERVICES_K8S -K8S_SERVICES |= MISC_SERVICES_K8S - -CHARM_VERSIONS = {} -CHARM_VERSIONS |= K8S_SERVICES -CHARM_VERSIONS |= MACHINE_SERVICES - -# Similar to CHARM_VERSIONS except this is not per service -# but per charm. So all *-mysql-router wont be included -# and instead only mysql-router is included. Same is the -# case of traefik charm. +# List of charms with default channels OPENSTACK_CHARMS_K8S = { "cinder-ceph-k8s": OPENSTACK_CHANNEL, "cinder-k8s": OPENSTACK_CHANNEL, diff --git a/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py b/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py index ae0441c4..3ab3a2ac 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/upgrades/test_base.py @@ -12,116 +12,104 @@ # See the License for the specific language governing permissions and # limitations under the License. -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock +from sunbeam.commands.terraform import TerraformException from sunbeam.commands.upgrades.inter_channel import BaseUpgrade -from sunbeam.versions import ( - MYSQL_ROUTER_SERVICES_K8S, - MYSQL_SERVICES_K8S, - OPENSTACK_SERVICES_K8S, - OVN_SERVICES_K8S, -) +from sunbeam.jobs.common import ResultType +from sunbeam.jobs.juju import TimeoutException class TestBaseUpgrade: def setup_method(self): self.client = Mock() self.jhelper = AsyncMock() - self.tfhelper = Mock() - self.upgrade_service = ( - list(MYSQL_SERVICES_K8S.keys()) # noqa - + list(MYSQL_ROUTER_SERVICES_K8S.keys()) # noqa - + list(OVN_SERVICES_K8S.keys()) # noqa - + list(OPENSTACK_SERVICES_K8S.keys()) # noqa - ) + self.manifest = Mock() def test_upgrade_applications(self): - def _get_new_channel_mock(app_name, model): - channels = {"nova": "2023.2/edge", "neutron": None} - return channels[app_name] + model = "openstack" + apps = ["nova"] + charms = ["nova-k8s"] + tfplan = "openstack-plan" + config = "openstackterraformvar" + timeout = 60 upgrader = BaseUpgrade( "test name", "test description", self.client, self.jhelper, - self.tfhelper, - "openstack", + self.manifest, + model, + ) + + result = upgrader.upgrade_applications( + apps, charms, model, tfplan, config, timeout ) - get_new_channel_mock = Mock() - get_new_channel_mock.side_effect = _get_new_channel_mock - with patch.object(BaseUpgrade, "get_new_channel", get_new_channel_mock): - upgrader.upgrade_applications(["nova"], "openstack") - self.jhelper.update_applications_channel.assert_called_once_with( - "openstack", - { - "nova": { - "channel": "2023.2/edge", - "expected_status": {"workload": ["blocked", "active"]}, - } - }, + self.manifest.update_partial_tfvars_and_apply_tf.assert_called_once_with( + charms, tfplan, config ) + self.jhelper.wait_until_desired_status.assert_called_once() + assert result.result_type == ResultType.COMPLETED - def test_get_new_channel_os_service(self, mocker): - self.jhelper.get_charm_channel.return_value = "2023.1/edge" - upgrader = BaseUpgrade( - "test name", - "test description", - self.client, - self.jhelper, - self.tfhelper, - "openstack", + def test_upgrade_applications_tf_failed(self): + self.manifest.update_partial_tfvars_and_apply_tf.side_effect = ( + TerraformException("apply failed...") ) - new_channel = upgrader.get_new_channel("cinder", "openstack") - assert new_channel == "2023.2/edge" - def test_get_new_channel_os_service_same(self, mocker): - self.jhelper.get_charm_channel.return_value = "2023.2/edge" + model = "openstack" + apps = ["nova"] + charms = ["nova-k8s"] + tfplan = "openstack-plan" + config = "openstackterraformvar" + timeout = 60 + upgrader = BaseUpgrade( "test name", "test description", self.client, self.jhelper, - self.tfhelper, - "openstack", + self.manifest, + model, ) - new_channel = upgrader.get_new_channel("cinder", "openstack") - assert new_channel is None - def test_get_new_channel_os_downgrade(self, mocker): - self.jhelper.get_charm_channel.return_value = "2023.2/edge" - upgrader = BaseUpgrade( - "test name", - "test description", - self.client, - self.jhelper, - self.tfhelper, - "openstack", + result = upgrader.upgrade_applications( + apps, charms, model, tfplan, config, timeout + ) + self.manifest.update_partial_tfvars_and_apply_tf.assert_called_once_with( + charms, tfplan, config ) - new_channel = upgrader.get_new_channel("cinder", "openstack") - assert new_channel is None + self.jhelper.wait_until_desired_status.assert_not_called() + assert result.result_type == ResultType.FAILED + assert result.message == "apply failed..." - def test_get_new_channel_nonos_service(self, mocker): - self.jhelper.get_charm_channel.return_value = "3.8/stable" - upgrader = BaseUpgrade( - "test name", - "test description", - self.client, - self.jhelper, - self.tfhelper, - "openstack", + def test_upgrade_applications_waiting_timed_out(self): + self.jhelper.wait_until_desired_status.side_effect = TimeoutException( + "timed out" ) - new_channel = upgrader.get_new_channel("rabbitmq", "openstack") - assert new_channel == "3.12/edge" - def test_get_new_channel_unknown(self, mocker): + model = "openstack" + apps = ["nova"] + charms = ["nova-k8s"] + tfplan = "openstack-plan" + config = "openstackterraformvar" + timeout = 60 + upgrader = BaseUpgrade( "test name", "test description", self.client, self.jhelper, - self.tfhelper, - "openstack", + self.manifest, + model, + ) + + result = upgrader.upgrade_applications( + apps, charms, model, tfplan, config, timeout + ) + self.manifest.update_partial_tfvars_and_apply_tf.assert_called_once_with( + charms, tfplan, config ) - new_channel = upgrader.get_new_channel("foo", "openstack") - assert new_channel is None + self.jhelper.wait_until_desired_status.assert_called_once() + assert result.result_type == ResultType.FAILED + assert result.message == "timed out" diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py index 7ae0f9dc..daf9352d 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_openstack.py @@ -13,6 +13,7 @@ # limitations under the License. import asyncio +import json from pathlib import Path from unittest.mock import AsyncMock, Mock, patch @@ -144,3 +145,84 @@ def test_run_tf_apply_failed(self, cclient, jhelper, osplugin): osplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." + + +class MockStatus: + def __init__(self, value: dict): + self.status = value + + def to_json(self): + return json.dumps(self.status) + + +class TestUpgradeOpenStackApplicationStep: + def test_run( + self, + cclient, + jhelper, + osplugin, + ): + jhelper.get_model_status_full.return_value = MockStatus( + { + "applications": { + "keystone": { + "charm": "ch:amd64/jammy/keystone-k8s-148", + "charm-channel": "2023.2/stable", + } + } + } + ) + step = openstack.UpgradeOpenStackApplicationStep(jhelper, osplugin) + result = step.run() + + osplugin.manifest.update_partial_tfvars_and_apply_tf.assert_called_once() + jhelper.wait_until_desired_status.assert_called_once() + assert result.result_type == ResultType.COMPLETED + + def test_run_tf_apply_failed( + self, cclient, read_config, jhelper, tfhelper, osplugin, manifest, pluginmanager + ): + osplugin.manifest.update_partial_tfvars_and_apply_tf.side_effect = ( + TerraformException("apply failed...") + ) + + jhelper.get_model_status_full.return_value = MockStatus( + { + "applications": { + "keystone": { + "charm": "ch:amd64/jammy/keystone-k8s-148", + "charm-channel": "2023.2/stable", + } + } + } + ) + step = openstack.UpgradeOpenStackApplicationStep(jhelper, osplugin) + result = step.run() + + osplugin.manifest.update_partial_tfvars_and_apply_tf.assert_called_once() + jhelper.wait_until_desired_status.assert_not_called() + assert result.result_type == ResultType.FAILED + assert result.message == "apply failed..." + + def test_run_waiting_timed_out( + self, cclient, read_config, jhelper, tfhelper, osplugin, manifest, pluginmanager + ): + jhelper.wait_until_desired_status.side_effect = TimeoutException("timed out") + + jhelper.get_model_status_full.return_value = MockStatus( + { + "applications": { + "keystone": { + "charm": "ch:amd64/jammy/keystone-k8s-148", + "charm-channel": "2023.2/stable", + } + } + } + ) + step = openstack.UpgradeOpenStackApplicationStep(jhelper, osplugin) + result = step.run() + + osplugin.manifest.update_partial_tfvars_and_apply_tf.assert_called_once() + jhelper.wait_until_desired_status.assert_called_once() + assert result.result_type == ResultType.FAILED + assert result.message == "timed out" From 9a4b12cbbd9aa2dfbe3b5458849f3ef5c3ecb2a7 Mon Sep 17 00:00:00 2001 From: Chi Wai Chan Date: Wed, 31 Jan 2024 10:49:42 +0800 Subject: [PATCH 17/27] Fix regression from #91. Add missing client argument to `get_all_external_repos`. --- sunbeam-python/sunbeam/jobs/plugin.py | 10 +++++++--- sunbeam-python/sunbeam/plugins/repo/plugin.py | 6 +++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index d5251725..90135744 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -143,6 +143,7 @@ def get_all_external_repos(cls, client: Client, detail: bool = False) -> list: ... ] + :param client: Clusterd client object. :param detail: If true, includes repo path and branch as well. :returns: List of repos. """ @@ -162,7 +163,7 @@ def get_all_external_repos(cls, client: Client, detail: bool = False) -> list: return [] @classmethod - def get_plugins(cls, repos: Optional[list] = []) -> dict: + def get_plugins(cls, client: Client, repos: Optional[list] = []) -> dict: """Returns list of plugin name and description. Get all plugins information for each repo specified in repos. @@ -170,6 +171,7 @@ def get_plugins(cls, repos: Optional[list] = []) -> dict: including the internal plugins in snap-openstack repo. Repo name core is reserved for internal plugins in snap-openstack repo. + :param client: Clusterd client object. :param repos: List of repos :returns: Dictionary of repo with plugin name and description @@ -185,7 +187,7 @@ def get_plugins(cls, repos: Optional[list] = []) -> dict: """ if not repos: repos.append("core") - repos.extend(cls.get_all_external_repos()) + repos.extend(cls.get_all_external_repos(client)) plugins = {} for repo in repos: @@ -219,13 +221,14 @@ def enabled_plugins(cls, client: Client, repos: Optional[list] = []) -> list: If repos is None or empty list, get plugins from all repos defined in cluster db including the internal plugins. + :param client: Clusterd client object. :param repos: List of repos :returns: List of enabled plugins """ enabled_plugins = [] if not repos: repos.append("core") - repos.extend(cls.get_all_external_repos()) + repos.extend(cls.get_all_external_repos(client)) for repo in repos: if repo == "core": @@ -329,6 +332,7 @@ def update_plugins( upgrade hooks if the plugin is enabled and version is changed. Do not run any upgrade hooks if repos is empty list. + :param client: Clusterd client object. :param repos: List of repos """ if not repos: diff --git a/sunbeam-python/sunbeam/plugins/repo/plugin.py b/sunbeam-python/sunbeam/plugins/repo/plugin.py index 1b2558c4..a756d1c3 100644 --- a/sunbeam-python/sunbeam/plugins/repo/plugin.py +++ b/sunbeam-python/sunbeam/plugins/repo/plugin.py @@ -322,18 +322,18 @@ def list(self, format: str, plugins: bool, include_core: bool) -> None: repo_names = PluginManager.get_all_external_repos(self.client) if include_core: click.echo("Core plugins:") - plugins = PluginManager.get_plugins(["core"]) + plugins = PluginManager.get_plugins(self.client, ["core"]) self._print_plugins_table(plugins.get("core")) for repo in repo_names: click.echo(f"Plugins in repo {repo}:") - plugins = PluginManager.get_plugins([repo]) + plugins = PluginManager.get_plugins(self.client, [repo]) self._print_plugins_table(plugins.get(repo)) elif format == FORMAT_YAML: # Add plugins to the repos list if plugins: - plugins = PluginManager.get_plugins() + plugins = PluginManager.get_plugins(self.client) if include_core: repos.append({"name": "core"}) From 425c69866f46b1507e1aea0876d6422c836ce30f Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Wed, 31 Jan 2024 10:41:47 +0530 Subject: [PATCH 18/27] [manifest] Add option manifest to plugin enable Add manifest option to enable group. Plugins of type EnableDisablePlugin should read the user provided manifest during enable of the plugin. --- sunbeam-python/sunbeam/main.py | 9 +++++- sunbeam-python/sunbeam/plugins/dns/plugin.py | 17 +++++++---- .../sunbeam/plugins/interface/v1/base.py | 3 ++ .../sunbeam/plugins/interface/v1/openstack.py | 28 +++++++++++++------ .../sunbeam/plugins/observability/plugin.py | 7 ++++- .../sunbeam/plugins/telemetry/plugin.py | 19 +++++++++---- 6 files changed, 62 insertions(+), 21 deletions(-) diff --git a/sunbeam-python/sunbeam/main.py b/sunbeam-python/sunbeam/main.py index 807fb7f6..fff0f3af 100644 --- a/sunbeam-python/sunbeam/main.py +++ b/sunbeam-python/sunbeam/main.py @@ -14,6 +14,7 @@ # limitations under the License. import logging +from pathlib import Path import click from snaphelpers import Snap @@ -73,8 +74,14 @@ def manifest(ctx): @click.group("enable", context_settings=CONTEXT_SETTINGS, cls=CatchGroup) +@click.option( + "-m", + "--manifest", + help="Manifest file.", + type=click.Path(exists=True, dir_okay=False, path_type=Path), +) @click.pass_context -def enable(ctx): +def enable(ctx, manifest: Path | None = None): """Enable plugins""" diff --git a/sunbeam-python/sunbeam/plugins/dns/plugin.py b/sunbeam-python/sunbeam/plugins/dns/plugin.py index 13eff6cb..2fd4e7fc 100644 --- a/sunbeam-python/sunbeam/plugins/dns/plugin.py +++ b/sunbeam-python/sunbeam/plugins/dns/plugin.py @@ -26,6 +26,7 @@ from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper, run_sync +from sunbeam.jobs.manifest import AddManifestStep from sunbeam.plugins.interface.v1.openstack import ( ApplicationChannelData, EnableOpenStackApplicationStep, @@ -86,11 +87,17 @@ def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data jhelper = JujuHelper(self.client, data_location) - plan = [ - TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), - EnableOpenStackApplicationStep(jhelper, self), - PatchBindLoadBalancerStep(self.client), - ] + + plan = [] + if self.user_manifest: + plan.append(AddManifestStep(self.client, self.user_manifest)) + plan.extend( + [ + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableOpenStackApplicationStep(jhelper, self), + PatchBindLoadBalancerStep(self.client), + ] + ) run_plan(plan, console) click.echo(f"OpenStack {self.name!r} application enabled.") diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/base.py b/sunbeam-python/sunbeam/plugins/interface/v1/base.py index 57ad1a7a..d546006d 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/base.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/base.py @@ -479,6 +479,7 @@ def __init__(self, name: str, client: Client) -> None: :param name: Name of the plugin """ super().__init__(name, client) + self.user_manifest = None @property def enabled(self) -> bool: @@ -637,6 +638,8 @@ def run_enable_plans(self) -> None: @abstractmethod def enable_plugin(self) -> None: """Enable plugin command.""" + current_click_context = click.get_current_context() + self.user_manifest = current_click_context.parent.params.get("manifest") self.pre_enable() self.run_enable_plans() self.post_enable() diff --git a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py index 39e6c65d..8aa9417b 100644 --- a/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py +++ b/sunbeam-python/sunbeam/plugins/interface/v1/openstack.py @@ -46,7 +46,7 @@ update_status_background, ) from sunbeam.jobs.juju import JujuHelper, JujuWaitException, TimeoutException, run_sync -from sunbeam.jobs.manifest import Manifest +from sunbeam.jobs.manifest import AddManifestStep, Manifest from sunbeam.plugins.interface.v1.base import EnableDisablePlugin LOG = logging.getLogger(__name__) @@ -120,9 +120,15 @@ def manifest(self) -> Manifest: if self._manifest: return self._manifest - self._manifest = Manifest.load_latest_from_clusterdb( - self.client, include_defaults=True - ) + if self.user_manifest: + self._manifest = Manifest.load( + self.client, manifest_file=self.user_manifest, include_defaults=True + ) + else: + self._manifest = Manifest.load_latest_from_clusterdb( + self.client, include_defaults=True + ) + return self._manifest def is_openstack_control_plane(self) -> bool: @@ -154,10 +160,16 @@ def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data jhelper = JujuHelper(self.client, data_location) - plan = [ - TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), - EnableOpenStackApplicationStep(jhelper, self), - ] + + plan = [] + if self.user_manifest: + plan.append(AddManifestStep(self.client, self.user_manifest)) + plan.extend( + [ + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableOpenStackApplicationStep(jhelper, self), + ] + ) run_plan(plan, console) click.echo(f"OpenStack {self.name} application enabled.") diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index 5c104228..9dccbb48 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -61,7 +61,7 @@ TimeoutException, run_sync, ) -from sunbeam.jobs.manifest import Manifest +from sunbeam.jobs.manifest import AddManifestStep, Manifest from sunbeam.plugins.interface.v1.base import EnableDisablePlugin, PluginRequirement from sunbeam.plugins.interface.v1.openstack import ( OPENSTACK_TERRAFORM_PLAN, @@ -476,6 +476,10 @@ def run_enable_plans(self): f"{OPENSTACK_TERRAFORM_PLAN}-plan" ) + plan = [] + if self.user_manifest: + plan.append(AddManifestStep(self.client, self.user_manifest)) + cos_plan = [ TerraformInitStep(tfhelper_cos), DeployObservabilityStackStep(self, tfhelper_cos, jhelper), @@ -490,6 +494,7 @@ def run_enable_plans(self): DeployGrafanaAgentStep(self, tfhelper_grafana_agent, tfhelper_cos, jhelper), ] + run_plan(plan, console) run_plan(cos_plan, console) run_plan(grafana_agent_plan, console) diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index bb2cc09a..3aad963a 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -24,6 +24,7 @@ from sunbeam.commands.terraform import TerraformInitStep from sunbeam.jobs.common import run_plan from sunbeam.jobs.juju import JujuHelper, ModelNotFoundException, run_sync +from sunbeam.jobs.manifest import AddManifestStep from sunbeam.plugins.interface.v1.openstack import ( DisableOpenStackApplicationStep, EnableOpenStackApplicationStep, @@ -84,12 +85,18 @@ def run_enable_plans(self) -> None: """Run plans to enable plugin.""" data_location = self.snap.paths.user_data jhelper = JujuHelper(self.client, data_location) - plan = [ - TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), - EnableOpenStackApplicationStep(jhelper, self), - # No need to pass any extra terraform vars for this plugin - ReapplyHypervisorTerraformPlanStep(self.client, self.manifest, jhelper), - ] + + plan = [] + if self.user_manifest: + plan.append(AddManifestStep(self.client, self.user_manifest)) + plan.extend( + [ + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan)), + EnableOpenStackApplicationStep(jhelper, self), + # No need to pass any extra terraform vars for this plugin + ReapplyHypervisorTerraformPlanStep(self.client, self.manifest, jhelper), + ] + ) run_plan(plan, console) click.echo(f"OpenStack {self.name} application enabled.") From 82bf7371707a87d007b03c8d22293e09aa02672c Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Wed, 31 Jan 2024 14:46:07 +0530 Subject: [PATCH 19/27] Add manifest support to observability plugin Expand terraform variables for observability plugin to parameterise channel, revision, config. Update manifest_default and corresponding tfvar map in the plugin. Update plugin to use manifest functions instead of terraform helper functions. --- .../observability/etc/deploy-cos/main.tf | 59 +++-- .../observability/etc/deploy-cos/variables.tf | 108 +++++++++ .../etc/deploy-grafana-agent/main.tf | 13 +- .../etc/deploy-grafana-agent/variables.tf | 17 +- .../sunbeam/plugins/observability/plugin.py | 228 +++++++++--------- .../sunbeam/plugins/test_observability.py | 124 ++++------ 6 files changed, 321 insertions(+), 228 deletions(-) diff --git a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/main.tf b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/main.tf index d9ff6de4..5633bda8 100644 --- a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/main.tf +++ b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/main.tf @@ -20,7 +20,7 @@ terraform { required_providers { juju = { source = "juju/juju" - version = "= 0.8.0" + version = "= 0.10.1" } } } @@ -43,12 +43,14 @@ resource "juju_application" "traefik" { model = juju_model.cos.name charm { - name = "traefik-k8s" - channel = var.cos-channel - series = "focal" + name = "traefik-k8s" + channel = var.traefik-channel == null ? var.cos-channel : var.traefik-channel + revision = var.traefik-revision + base = "ubuntu@20.04" } - units = var.ingress-scale + config = var.traefik-config + units = var.ingress-scale } resource "juju_application" "alertmanager" { @@ -57,12 +59,14 @@ resource "juju_application" "alertmanager" { model = juju_model.cos.name charm { - name = "alertmanager-k8s" - channel = var.cos-channel - series = "focal" + name = "alertmanager-k8s" + channel = var.alertmanager-channel == null ? var.cos-channel : var.alertmanager-channel + revision = var.alertmanager-revision + base = "ubuntu@20.04" } - units = var.alertmanager-scale + config = var.alertmanager-config + units = var.alertmanager-scale } resource "juju_application" "prometheus" { @@ -71,12 +75,14 @@ resource "juju_application" "prometheus" { model = juju_model.cos.name charm { - name = "prometheus-k8s" - channel = var.cos-channel - series = "focal" + name = "prometheus-k8s" + channel = var.prometheus-channel == null ? var.cos-channel : var.prometheus-channel + revision = var.prometheus-revision + base = "ubuntu@20.04" } - units = var.prometheus-scale + config = var.prometheus-config + units = var.prometheus-scale } resource "juju_application" "grafana" { @@ -85,12 +91,14 @@ resource "juju_application" "grafana" { model = juju_model.cos.name charm { - name = "grafana-k8s" - channel = var.cos-channel - series = "focal" + name = "grafana-k8s" + channel = var.grafana-channel == null ? var.cos-channel : var.grafana-channel + revision = var.grafana-revision + base = "ubuntu@20.04" } - units = var.grafana-scale + config = var.grafana-config + units = var.grafana-scale } resource "juju_application" "catalogue" { @@ -100,15 +108,16 @@ resource "juju_application" "catalogue" { charm { name = "catalogue-k8s" - channel = var.cos-channel - series = "focal" + channel = var.catalogue-channel == null ? var.cos-channel : var.catalogue-channel + revision = var.catalogue-revision + base = "ubuntu@20.04" } - config = { + config = merge({ title = "Canonical Observability Stack" tagline = "Model-driven Observability Stack deployed with a single command." description = " Canonical Observability Stack Lite, or COS Lite, is a light-weight, highly-integrated, Juju-based observability suite running on Kubernetes." - } + }, var.catalogue-config) units = var.catalogue-scale } @@ -120,11 +129,13 @@ resource "juju_application" "loki" { charm { name = "loki-k8s" - channel = var.cos-channel - series = "focal" + channel = var.loki-channel == null ? var.cos-channel : var.loki-channel + revision = var.loki-revision + base = "ubuntu@20.04" } - units = var.loki-scale + config = var.loki-config + units = var.loki-scale } # juju integrate traefik prometheus diff --git a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/variables.tf b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/variables.tf index 22a88a56..d2e8dadf 100644 --- a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/variables.tf +++ b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-cos/variables.tf @@ -41,6 +41,114 @@ variable "cos-channel" { default = "1.0/stable" } +variable "traefik-channel" { + description = "Operator channel for COS Lite Traefik deployment" + type = string + default = "1.0/stable" +} + +variable "traefik-revision" { + description = "Operator channel revision for COS Lite Traefik deployment" + type = number + default = null +} + +variable "traefik-config" { + description = "Operator config for COS Lite Traefik deployment" + type = map(string) + default = {} +} + +variable "alertmanager-channel" { + description = "Operator channel for COS Lite Alert Manager deployment" + type = string + default = "1.0/stable" +} + +variable "alertmanager-revision" { + description = "Operator channel revision for COS Lite Alert Manager deployment" + type = number + default = null +} + +variable "alertmanager-config" { + description = "Operator config for COS Lite Alert Manager deployment" + type = map(string) + default = {} +} + +variable "prometheus-channel" { + description = "Operator channel for COS Lite Prometheus deployment" + type = string + default = "1.0/stable" +} + +variable "prometheus-revision" { + description = "Operator channel revision for COS Lite Prometheus deployment" + type = number + default = null +} + +variable "prometheus-config" { + description = "Operator config for COS Lite Prometheus deployment" + type = map(string) + default = {} +} + +variable "grafana-channel" { + description = "Operator channel for COS Lite Grafana deployment" + type = string + default = "1.0/stable" +} + +variable "grafana-revision" { + description = "Operator channel revision for COS Lite Grafana deployment" + type = number + default = null +} + +variable "grafana-config" { + description = "Operator config for COS Lite Grafana deployment" + type = map(string) + default = {} +} + +variable "catalogue-channel" { + description = "Operator channel for COS Lite Catalogue deployment" + type = string + default = "1.0/stable" +} + +variable "catalogue-revision" { + description = "Operator channel revision for COS Lite Catalogue deployment" + type = number + default = null +} + +variable "catalogue-config" { + description = "Operator config for COS Lite Catalogue deployment" + type = map(string) + default = {} +} + +variable "loki-channel" { + description = "Operator channel for COS Lite Loki deployment" + type = string + default = "1.0/stable" +} + +variable "loki-revision" { + description = "Operator channel revision for COS Lite Loki deployment" + type = number + default = null +} + +variable "loki-config" { + description = "Operator config for COS Lite Loki deployment" + type = map(string) + default = {} +} + variable "ingress-scale" { description = "Scale of ingress deployment" default = 1 diff --git a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/main.tf b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/main.tf index 03399b37..f1130d1b 100644 --- a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/main.tf +++ b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/main.tf @@ -19,7 +19,7 @@ terraform { required_providers { juju = { source = "juju/juju" - version = "= 0.8.0" + version = "= 0.10.1" } } } @@ -36,10 +36,13 @@ resource "juju_application" "grafana-agent" { units = 0 charm { - name = "grafana-agent" - channel = var.grafana-agent-channel - series = "jammy" + name = "grafana-agent" + channel = var.grafana-agent-channel + revision = var.grafana-agent-revision + base = "ubuntu@22.04" } + + config = var.grafana-agent-config } # juju integrate :cos-agent grafana-agent:cos-agent @@ -94,4 +97,4 @@ resource "juju_integration" "grafana-agent-to-cos-grafana" { application { offer_url = data.terraform_remote_state.cos.outputs.grafana-dashboard-offer-url } -} \ No newline at end of file +} diff --git a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/variables.tf b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/variables.tf index 0051f8e7..ddc270f9 100644 --- a/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/variables.tf +++ b/sunbeam-python/sunbeam/plugins/observability/etc/deploy-grafana-agent/variables.tf @@ -26,9 +26,22 @@ variable "principal-application-model" { variable "grafana-agent-channel" { description = "Channel to use when deploying grafana agent machine charm" + type = string # Note: Currently, latest/stable is not available for grafana-agent. So, # defaulting to latest/candidate. - default = "latest/candidate" + default = "latest/candidate" +} + +variable "grafana-agent-revision" { + description = "Channel revision to use when deploying grafana agent machine charm" + type = number + default = null +} + +variable "grafana-agent-config" { + description = "Config to use when deploying grafana agent machine charm" + type = map(string) + default = {} } variable "cos-state-backend" { @@ -39,4 +52,4 @@ variable "cos-state-backend" { variable "cos-state-config" { type = map(any) -} \ No newline at end of file +} diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index 9dccbb48..605ba9a7 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -74,6 +74,13 @@ OBSERVABILITY_MODEL = "observability" OBSERVABILITY_DEPLOY_TIMEOUT = 1200 # 20 minutes CONTROLLER_MODEL = CONTROLLER_MODEL.split("/")[-1] +COS_TFPLAN = "cos-plan" +GRAFANA_AGENT_TFPLAN = "grafana-agent-plan" +COS_CONFIG_KEY = "TerraformVarsPluginObservabilityPlanCos" +GRAFANA_AGENT_CONFIG_KEY = "TerraformVarsPluginObservabilityPlanGrafanaAgent" + +COS_CHANNEL = "1.0/candidate" +GRAFANA_AGENT_CHANNEL = "latest/edge" class FillObservabilityOffersStep(BaseStep): @@ -164,47 +171,37 @@ def run(self, status: Optional[Status] = None) -> Result: class DeployObservabilityStackStep(BaseStep, JujuStepHelper): """Deploy Observability Stack using Terraform""" + _CONFIG = COS_CONFIG_KEY + def __init__( self, plugin: "ObservabilityPlugin", - tfhelper: TerraformHelper, jhelper: JujuHelper, ): super().__init__("Deploy Observability Stack", "Deploying Observability Stack") - self.tfhelper = tfhelper + self.plugin = plugin self.jhelper = jhelper + self.manifest = self.plugin.manifest + self.tfplan = self.plugin.tfplan_cos self.model = OBSERVABILITY_MODEL self.cloud = MICROK8S_CLOUD - self.read_config = lambda: plugin.get_plugin_info().get( - "observability-stack-config", {} - ) - self.update_config = lambda c: plugin.update_plugin_info( - {"observability-stack-config": c} - ) def run(self, status: Optional[Status] = None) -> Result: """Execute configuration using terraform.""" - - try: - config = self.read_config() - except ConfigItemNotFoundException as e: - LOG.exception("Failed deploying Observability Stack: unable to read config") - return Result(ResultType.FAILED, str(e)) - - tfvars = { + extra_tfvars = { "model": self.model, - "cos-channel": "1.0/candidate", "cloud": self.cloud, "credential": f"{self.cloud}{CREDENTIAL_SUFFIX}", "config": {"workload-storage": MICROK8S_DEFAULT_STORAGECLASS}, } - config.update(tfvars) - self.update_config(config) - self.tfhelper.write_tfvars(tfvars) - self.update_status(status, "deploying services") try: - self.tfhelper.apply() + self.update_status(status, "deploying services") + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + override_tfvars=extra_tfvars, + ) except TerraformException as e: LOG.exception("Error deploying Observability Stack") return Result(ResultType.FAILED, str(e)) @@ -233,50 +230,40 @@ def run(self, status: Optional[Status] = None) -> Result: class DeployGrafanaAgentStep(BaseStep, JujuStepHelper): """Deploy Grafana Agent using Terraform""" + _CONFIG = GRAFANA_AGENT_CONFIG_KEY + def __init__( self, plugin: "ObservabilityPlugin", - tfhelper: TerraformHelper, - tfhelper_cos: TerraformHelper, jhelper: JujuHelper, ): super().__init__("Deploy Grafana Agent", "Deploy Grafana Agent") - self.tfhelper = tfhelper - self.tfhelper_cos = tfhelper_cos + self.plugin = plugin self.jhelper = jhelper + self.manifest = self.plugin.manifest + self.tfplan = self.plugin.tfplan_grafana_agent self.model = CONTROLLER_MODEL - self.read_config = lambda: plugin.get_plugin_info().get( - "grafana-agent-config", {} - ) - self.update_config = lambda c: plugin.update_plugin_info( - {"grafana-agent-config": c} - ) def run(self, status: Optional[Status] = None) -> Result: """Execute configuration using terraform.""" + tfhelper_cos = self.manifest.get_tfhelper(COS_TFPLAN) + cos_backend = tfhelper_cos.backend + cos_backend_config = tfhelper_cos.backend_config() - cos_backend = self.tfhelper_cos.backend - cos_backend_config = self.tfhelper_cos.backend_config() - try: - config = self.read_config() - except ConfigItemNotFoundException as e: - LOG.exception("Failed deploying grafana agent: unable to read config") - return Result(ResultType.FAILED, str(e)) - - tfvars = { - "grafana-agent-channel": "latest/edge", + extra_tfvars = { "principal-application-model": self.model, "cos-state-backend": cos_backend, "cos-state-config": cos_backend_config, "principal-application": "openstack-hypervisor", } - config.update(tfvars) - self.update_config(config) - self.tfhelper.write_tfvars(tfvars) - self.update_status(status, "deploying application") try: - self.tfhelper.apply() + self.update_status(status, "deploying services") + self.manifest.update_tfvars_and_apply_tf( + tfplan=self.tfplan, + tfvar_config=self._CONFIG, + override_tfvars=extra_tfvars, + ) except TerraformException as e: LOG.exception("Error deploying grafana agent") return Result(ResultType.FAILED, str(e)) @@ -304,42 +291,22 @@ class RemoveObservabilityStackStep(BaseStep, JujuStepHelper): def __init__( self, plugin: "ObservabilityPlugin", - tfhelper: TerraformHelper, jhelper: JujuHelper, ): super().__init__("Remove Observability Stack", "Removing Observability Stack") - self.tfhelper = tfhelper + self.plugin = plugin + self.manifest = self.plugin.manifest + self.tfplan = self.plugin.tfplan_cos self.jhelper = jhelper self.model = OBSERVABILITY_MODEL self.cloud = MICROK8S_CLOUD - self.read_config = lambda: plugin.get_plugin_info().get( - "observability-stack-config", {} - ) - self.update_config = lambda c: plugin.update_plugin_info( - {"observability-stack-config": c} - ) def run(self, status: Optional[Status] = None) -> Result: """Execute configuration using terraform.""" - + tfhelper = self.manifest.get_tfhelper(self.tfplan) + print(tfhelper) try: - config = self.read_config() - except ConfigItemNotFoundException as e: - LOG.exception("Failed removing Observability Stack: unable to read config") - return Result(ResultType.FAILED, str(e)) - - tfvars = { - "model": self.model, - "cos-channel": "1.0/candidate", - "cloud": self.cloud, - "credential": f"{self.cloud}{CREDENTIAL_SUFFIX}", - "config": {"workload-storage": MICROK8S_DEFAULT_STORAGECLASS}, - } - config.update(tfvars) - self.update_config(config) - self.tfhelper.write_tfvars(tfvars) - try: - self.tfhelper.destroy() + tfhelper.destroy() except TerraformException as e: LOG.exception("Error destroying Observability Stack") return Result(ResultType.FAILED, str(e)) @@ -364,45 +331,20 @@ class RemoveGrafanaAgentStep(BaseStep, JujuStepHelper): def __init__( self, plugin: "ObservabilityPlugin", - tfhelper: TerraformHelper, - tfhelper_cos: TerraformHelper, jhelper: JujuHelper, ): super().__init__("Remove Grafana Agent", "Removing Grafana Agent") - self.tfhelper = tfhelper - self.tfhelper_cos = tfhelper_cos + self.plugin = plugin + self.manifest = self.plugin.manifest + self.tfplan = self.plugin.tfplan_grafana_agent self.jhelper = jhelper self.model = CONTROLLER_MODEL - self.read_config = lambda: plugin.get_plugin_info().get( - "grafana-agent-config", {} - ) - self.update_config = lambda c: plugin.update_plugin_info( - {"grafana-agent-config": c} - ) def run(self, status: Optional[Status] = None) -> Result: """Execute configuration using terraform.""" - - cos_backend = self.tfhelper_cos.backend - cos_backend_config = self.tfhelper_cos.backend_config() + tfhelper = self.manifest.get_tfhelper(self.tfplan) try: - config = self.read_config() - except ConfigItemNotFoundException as e: - LOG.exception("Failed removing grafana agent: unable to read config") - return Result(ResultType.FAILED, str(e)) - - tfvars = { - "grafana-agent-channel": "latest/edge", - "principal-application-model": self.model, - "cos-state-backend": cos_backend, - "cos-state-config": cos_backend_config, - "principal-application": "openstack-hypervisor", - } - config.update(tfvars) - self.update_config(config) - self.tfhelper.write_tfvars(tfvars) - try: - self.tfhelper.destroy() + tfhelper.destroy() except TerraformException as e: LOG.exception("Error destroying grafana agent") return Result(ResultType.FAILED, str(e)) @@ -435,9 +377,9 @@ class ObservabilityPlugin(EnableDisablePlugin): def __init__(self, client: Client) -> None: super().__init__("observability", client) self.snap = Snap() - self.tfplan_cos = "cos-plan" + self.tfplan_cos = COS_TFPLAN self.tfplan_cos_dir = "deploy-cos" - self.tfplan_grafana_agent = "grafana-agent-plan" + self.tfplan_grafana_agent = GRAFANA_AGENT_TFPLAN self.tfplan_grafana_agent_dir = "deploy-grafana-agent" self._manifest = None @@ -454,6 +396,15 @@ def manifest(self) -> Manifest: def manifest_defaults(self) -> dict: """Manifest plugin part in dict format.""" return { + "charms": { + "cos-traefik-k8s": {"channel": COS_CHANNEL}, + "alertmanager-k8s": {"channel": COS_CHANNEL}, + "grafana-k8s": {"channel": COS_CHANNEL}, + "catalogue-k8s": {"channel": COS_CHANNEL}, + "prometheus-k8s": {"channel": COS_CHANNEL}, + "loki-k8s": {"channel": COS_CHANNEL}, + "grafana-agent": {"channel": GRAFANA_AGENT_CHANNEL}, + }, "terraform": { self.tfplan_cos: { "source": Path(__file__).parent / "etc" / self.tfplan_cos_dir @@ -463,7 +414,55 @@ def manifest_defaults(self) -> dict: / "etc" # noqa: W503 / self.tfplan_grafana_agent_dir # noqa: W503 }, - } + }, + } + + def manifest_attributes_tfvar_map(self) -> dict: + """Manifest attributes terraformvars map.""" + return { + self.tfplan_cos: { + "charms": { + "cos-traefik-k8s": { + "channel": "traefik-channel", + "revision": "traefik-revision", + "config": "traefik-config", + }, + "alertmanager-k8s": { + "channel": "alertmanager-channel", + "revision": "alertmanager-revision", + "config": "alertmanager-config", + }, + "grafana-k8s": { + "channel": "grafana-channel", + "revision": "grafana-revision", + "config": "grafana-config", + }, + "catalogue-k8s": { + "channel": "catalogue-channel", + "revision": "catalogue-revision", + "config": "catalogue-config", + }, + "prometheus-k8s": { + "channel": "prometheus-channel", + "revision": "prometheus-revision", + "config": "prometheus-config", + }, + "loki-k8s": { + "channel": "loki-channel", + "revision": "loki-revision", + "config": "loki-config", + }, + } + }, + self.tfplan_grafana_agent: { + "charms": { + "grafana-agent": { + "channel": "grafana-agent-channel", + "revision": "grafana-agent-revision", + "config": "grafana-agent-config", + } + } + }, } def run_enable_plans(self): @@ -471,7 +470,6 @@ def run_enable_plans(self): jhelper = JujuHelper(self.client, data_location) tfhelper_cos = self.manifest.get_tfhelper(self.tfplan_cos) - tfhelper_grafana_agent = self.manifest.get_tfhelper(self.tfplan_grafana_agent) tfhelper_openstack = self.manifest.get_tfhelper( f"{OPENSTACK_TERRAFORM_PLAN}-plan" ) @@ -482,7 +480,7 @@ def run_enable_plans(self): cos_plan = [ TerraformInitStep(tfhelper_cos), - DeployObservabilityStackStep(self, tfhelper_cos, jhelper), + DeployObservabilityStackStep(self, jhelper), PatchCosLoadBalancerStep(self.client), FillObservabilityOffersStep( self.client, tfhelper_openstack, tfhelper_cos, jhelper @@ -490,8 +488,8 @@ def run_enable_plans(self): ] grafana_agent_plan = [ - TerraformInitStep(tfhelper_grafana_agent), - DeployGrafanaAgentStep(self, tfhelper_grafana_agent, tfhelper_cos, jhelper), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan_grafana_agent)), + DeployGrafanaAgentStep(self, jhelper), ] run_plan(plan, console) @@ -504,23 +502,21 @@ def run_disable_plans(self): data_location = self.snap.paths.user_data jhelper = JujuHelper(self.client, data_location) - tfhelper_cos = self.manifest.get_tfhelper(self.tfplan_cos) - tfhelper_grafana_agent = self.manifest.get_tfhelper(self.tfplan_grafana_agent) tfhelper_openstack = self.manifest.get_tfhelper( f"{OPENSTACK_TERRAFORM_PLAN}-plan" ) cos_plan = [ - TerraformInitStep(tfhelper_cos), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan_cos)), RemoveObservabilityIntegrationStep( self.client, tfhelper_openstack, jhelper ), - RemoveObservabilityStackStep(self, tfhelper_cos, jhelper), + RemoveObservabilityStackStep(self, jhelper), ] grafana_agent_plan = [ - TerraformInitStep(tfhelper_grafana_agent), - RemoveGrafanaAgentStep(self, tfhelper_grafana_agent, tfhelper_cos, jhelper), + TerraformInitStep(self.manifest.get_tfhelper(self.tfplan_grafana_agent)), + RemoveGrafanaAgentStep(self, jhelper), ] run_plan(grafana_agent_plan, console) diff --git a/sunbeam-python/tests/unit/sunbeam/plugins/test_observability.py b/sunbeam-python/tests/unit/sunbeam/plugins/test_observability.py index f7b83115..7acb494a 100644 --- a/sunbeam-python/tests/unit/sunbeam/plugins/test_observability.py +++ b/sunbeam-python/tests/unit/sunbeam/plugins/test_observability.py @@ -13,8 +13,7 @@ # limitations under the License. import asyncio -from pathlib import Path -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch import pytest @@ -39,22 +38,11 @@ def run_sync(coro): loop.close() -@pytest.fixture() -def cclient(): - with patch("sunbeam.plugins.interface.v1.base.Client") as p: - yield p - - @pytest.fixture() def jhelper(): yield AsyncMock() -@pytest.fixture() -def tfhelper(): - yield Mock(path=Path()) - - @pytest.fixture() def observabilityplugin(): with patch("sunbeam.plugins.observability.plugin.ObservabilityPlugin") as p: @@ -62,87 +50,80 @@ def observabilityplugin(): class TestDeployObservabilityStackStep: - def test_run(self, cclient, jhelper, tfhelper, observabilityplugin): + def test_run(self, jhelper, observabilityplugin): step = observability_plugin.DeployObservabilityStackStep( - observabilityplugin, tfhelper, jhelper + observabilityplugin, jhelper ) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + observabilityplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_apply_failed(self, cclient, jhelper, tfhelper, observabilityplugin): - tfhelper.apply.side_effect = TerraformException("apply failed...") + def test_run_tf_apply_failed(self, jhelper, observabilityplugin): + observabilityplugin.manifest.update_tfvars_and_apply_tf.side_effect = ( + TerraformException("apply failed...") + ) step = observability_plugin.DeployObservabilityStackStep( - observabilityplugin, tfhelper, jhelper + observabilityplugin, jhelper ) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + observabilityplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_not_called() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - def test_run_waiting_timed_out( - self, cclient, jhelper, tfhelper, observabilityplugin - ): + def test_run_waiting_timed_out(self, jhelper, observabilityplugin): jhelper.wait_until_active.side_effect = TimeoutException("timed out") step = observability_plugin.DeployObservabilityStackStep( - observabilityplugin, tfhelper, jhelper + observabilityplugin, jhelper ) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + observabilityplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_until_active.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "timed out" class TestRemoveObservabilityStackStep: - def test_run(self, cclient, jhelper, tfhelper, observabilityplugin): + def test_run(self, jhelper, observabilityplugin): + tfhelper = observabilityplugin.manifest.get_tfhelper() step = observability_plugin.RemoveObservabilityStackStep( - observabilityplugin, tfhelper, jhelper + observabilityplugin, jhelper ) result = step.run() - tfhelper.write_tfvars.assert_called_once() tfhelper.destroy.assert_called_once() jhelper.wait_model_gone.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_destroy_failed( - self, cclient, jhelper, tfhelper, observabilityplugin - ): + def test_run_tf_destroy_failed(self, jhelper, observabilityplugin): + tfhelper = observabilityplugin.manifest.get_tfhelper() tfhelper.destroy.side_effect = TerraformException("destroy failed...") step = observability_plugin.RemoveObservabilityStackStep( - observabilityplugin, tfhelper, jhelper + observabilityplugin, jhelper ) result = step.run() - tfhelper.write_tfvars.assert_called_once() tfhelper.destroy.assert_called_once() jhelper.wait_model_gone.assert_not_called() assert result.result_type == ResultType.FAILED assert result.message == "destroy failed..." - def test_run_waiting_timed_out( - self, cclient, jhelper, tfhelper, observabilityplugin - ): + def test_run_waiting_timed_out(self, jhelper, observabilityplugin): + tfhelper = observabilityplugin.manifest.get_tfhelper() jhelper.wait_model_gone.side_effect = TimeoutException("timed out") step = observability_plugin.RemoveObservabilityStackStep( - observabilityplugin, tfhelper, jhelper + observabilityplugin, jhelper ) result = step.run() - tfhelper.write_tfvars.assert_called_once() tfhelper.destroy.assert_called_once() jhelper.wait_model_gone.assert_called_once() assert result.result_type == ResultType.FAILED @@ -150,87 +131,68 @@ def test_run_waiting_timed_out( class TestDeployGrafanaAgentStep: - def test_run(self, cclient, jhelper, tfhelper, observabilityplugin): - step = observability_plugin.DeployGrafanaAgentStep( - observabilityplugin, tfhelper, tfhelper, jhelper - ) + def test_run(self, jhelper, observabilityplugin): + step = observability_plugin.DeployGrafanaAgentStep(observabilityplugin, jhelper) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + observabilityplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_application_ready.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_apply_failed(self, cclient, jhelper, tfhelper, observabilityplugin): - tfhelper.apply.side_effect = TerraformException("apply failed...") - - step = observability_plugin.DeployGrafanaAgentStep( - observabilityplugin, tfhelper, tfhelper, jhelper + def test_run_tf_apply_failed(self, jhelper, observabilityplugin): + observabilityplugin.manifest.update_tfvars_and_apply_tf.side_effect = ( + TerraformException("apply failed...") ) + + step = observability_plugin.DeployGrafanaAgentStep(observabilityplugin, jhelper) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + observabilityplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_application_ready.assert_not_called() assert result.result_type == ResultType.FAILED assert result.message == "apply failed..." - def test_run_waiting_timed_out( - self, cclient, jhelper, tfhelper, observabilityplugin - ): + def test_run_waiting_timed_out(self, jhelper, observabilityplugin): jhelper.wait_application_ready.side_effect = TimeoutException("timed out") - step = observability_plugin.DeployGrafanaAgentStep( - observabilityplugin, tfhelper, tfhelper, jhelper - ) + step = observability_plugin.DeployGrafanaAgentStep(observabilityplugin, jhelper) result = step.run() - tfhelper.write_tfvars.assert_called_once() - tfhelper.apply.assert_called_once() + observabilityplugin.manifest.update_tfvars_and_apply_tf.assert_called_once() jhelper.wait_application_ready.assert_called_once() assert result.result_type == ResultType.FAILED assert result.message == "timed out" class TestRemoveGrafanaAgentStep: - def test_run(self, cclient, jhelper, tfhelper, observabilityplugin): - step = observability_plugin.RemoveGrafanaAgentStep( - observabilityplugin, tfhelper, tfhelper, jhelper - ) + def test_run(self, jhelper, observabilityplugin): + tfhelper = observabilityplugin.manifest.get_tfhelper() + step = observability_plugin.RemoveGrafanaAgentStep(observabilityplugin, jhelper) result = step.run() - tfhelper.write_tfvars.assert_called_once() tfhelper.destroy.assert_called_once() jhelper.wait_application_gone.assert_called_once() assert result.result_type == ResultType.COMPLETED - def test_run_tf_destroy_failed( - self, cclient, jhelper, tfhelper, observabilityplugin - ): + def test_run_tf_destroy_failed(self, jhelper, observabilityplugin): + tfhelper = observabilityplugin.manifest.get_tfhelper() tfhelper.destroy.side_effect = TerraformException("destroy failed...") - step = observability_plugin.RemoveGrafanaAgentStep( - observabilityplugin, tfhelper, tfhelper, jhelper - ) + step = observability_plugin.RemoveGrafanaAgentStep(observabilityplugin, jhelper) result = step.run() - tfhelper.write_tfvars.assert_called_once() tfhelper.destroy.assert_called_once() jhelper.wait_application_gone.assert_not_called() assert result.result_type == ResultType.FAILED assert result.message == "destroy failed..." - def test_run_waiting_timed_out( - self, cclient, jhelper, tfhelper, observabilityplugin - ): + def test_run_waiting_timed_out(self, jhelper, observabilityplugin): + tfhelper = observabilityplugin.manifest.get_tfhelper() jhelper.wait_application_gone.side_effect = TimeoutException("timed out") - step = observability_plugin.RemoveGrafanaAgentStep( - observabilityplugin, tfhelper, tfhelper, jhelper - ) + step = observability_plugin.RemoveGrafanaAgentStep(observabilityplugin, jhelper) result = step.run() - tfhelper.write_tfvars.assert_called_once() tfhelper.destroy.assert_called_once() jhelper.wait_application_gone.assert_called_once() assert result.result_type == ResultType.FAILED From 5d236b84ecaefe216f08b6df7588bcf705977582 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Wed, 31 Jan 2024 17:21:18 +0530 Subject: [PATCH 20/27] [manifest] Validate charms section in manifest file Validate the charms in the manifest file are valid charms known by the sunbeam defaults. If not, raise an error. --- sunbeam-python/sunbeam/jobs/manifest.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 5497a75d..695b033e 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -201,7 +201,16 @@ def validate_terraform_keys(self, default_manifest: dict): tf_keys = set(self.terraform.keys()) all_tfplans = default_manifest.get("terraform", {}).keys() if not tf_keys <= all_tfplans: - raise ValueError(f"Terraform keys should be one of {all_tfplans} ") + raise ValueError( + f"Manifest Terraform keys should be one of {all_tfplans} " + ) + + def validate_charm_keys(self, default_manifest: dict): + if self.charms: + charms_keys = set(self.charms.keys()) + all_charms = default_manifest.get("charms", {}).keys() + if not charms_keys <= all_charms: + raise ValueError(f"Manifest charms keys should be one of {all_charms} ") def __post_init__(self, client: Client): LOG.debug("Calling __post__init__") @@ -209,6 +218,7 @@ def __post_init__(self, client: Client): self.default_manifest_dict = self.get_default_manifest_as_dict(client) # Add custom validations self.validate_terraform_keys(self.default_manifest_dict) + self.validate_charm_keys(self.default_manifest_dict) # Add object variables to store self.tf_helpers = {} From ce9e3544b466ffb929da503ea563ca179d65a8d6 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Thu, 1 Feb 2024 12:43:34 +0530 Subject: [PATCH 21/27] Skip writing manifest to db if there is no change Skip the step AddManifest that writes manifest to cluster db if the content of provided manifest and the latest one in cluster db is same. --- sunbeam-python/sunbeam/jobs/manifest.py | 28 +++++++--- .../tests/unit/sunbeam/jobs/test_manifest.py | 56 ++++++++++++++++++- 2 files changed, 75 insertions(+), 9 deletions(-) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 695b033e..409c814f 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -405,19 +405,33 @@ def __init__(self, client: Client, manifest: Optional[Path] = None): # Write EMPTY_MANIFEST if manifest not provided self.manifest = manifest self.client = client + self.manifest_content = None - def run(self, status: Optional[Status] = None) -> Result: - """Write manifest to cluster db""" + def is_skip(self, status: Optional[Status] = None) -> Result: + """Skip if the user provided manifest and the latest from db are same.""" try: if self.manifest: with self.manifest.open("r") as file: - data = yaml.safe_load(file) - id = self.client.cluster.add_manifest(data=yaml.safe_dump(data)) + self.manifest_content = yaml.safe_load(file) else: - id = self.client.cluster.add_manifest( - data=yaml.safe_dump(EMPTY_MANIFEST) - ) + self.manifest_content = EMPTY_MANIFEST + + latest_manifest = self.client.cluster.get_latest_manifest() + except Exception as e: + LOG.debug(str(e)) + return Result(ResultType.FAILED, str(e)) + if yaml.safe_load(latest_manifest.get("data")) == self.manifest_content: + return Result(ResultType.SKIPPED) + + return Result(ResultType.COMPLETED) + + def run(self, status: Optional[Status] = None) -> Result: + """Write manifest to cluster db""" + try: + id = self.client.cluster.add_manifest( + data=yaml.safe_dump(self.manifest_content) + ) return Result(ResultType.COMPLETED, id) except Exception as e: LOG.warning(str(e)) diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py index 90d050b0..a6601b02 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -341,20 +341,71 @@ def test_update_tfvars_and_apply_tf( class TestAddManifestStep: + def test_is_skip(self, cclient, tmpdir): + # Manifest in cluster DB different from user provided manifest + cclient.cluster.get_latest_manifest.return_value = {"data": "charms: {}"} + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest) + step = manifest.AddManifestStep(cclient, manifest_file) + result = step.is_skip() + + assert result.result_type == ResultType.COMPLETED + + def test_is_skip_apply_same_manifest(self, cclient, tmpdir): + # Manifest in cluster DB same as user provided manifest + cclient.cluster.get_latest_manifest.return_value = {"data": test_manifest} + manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") + manifest_file.write(test_manifest) + step = manifest.AddManifestStep(cclient, manifest_file) + result = step.is_skip() + + assert result.result_type == ResultType.SKIPPED + + def test_is_skip_no_manifest(self, cclient): + # Manifest in cluster DB same as user provided manifest + cclient.cluster.get_latest_manifest.return_value = {"data": test_manifest} + step = manifest.AddManifestStep(cclient) + result = step.is_skip() + + assert step.manifest_content == manifest.EMPTY_MANIFEST + assert result.result_type == ResultType.COMPLETED + + def test_is_skip_no_manifest_apply_same(self, cclient): + # Manifest in cluster DB same as user provided manifest + empty_manifest_str = yaml.safe_dump(manifest.EMPTY_MANIFEST) + cclient.cluster.get_latest_manifest.return_value = {"data": empty_manifest_str} + step = manifest.AddManifestStep(cclient) + result = step.is_skip() + + assert step.manifest_content == manifest.EMPTY_MANIFEST + assert result.result_type == ResultType.SKIPPED + + def test_is_skip_no_connection_to_clusterdb(self, cclient): + cclient.cluster.get_latest_manifest.side_effect = ( + ClusterServiceUnavailableException("Cluster unavailable..") + ) + step = manifest.AddManifestStep(cclient) + result = step.is_skip() + + assert result.result_type == ResultType.FAILED + def test_run(self, cclient, tmpdir): + cclient.cluster.get_latest_manifest.return_value = {"data": "charms: {}"} manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") manifest_file.write(test_manifest) step = manifest.AddManifestStep(cclient, manifest_file) + step.manifest_content = yaml.safe_load(test_manifest) result = step.run() - test_manifest_dict = yaml.safe_load(test_manifest) cclient.cluster.add_manifest.assert_called_once_with( - data=yaml.safe_dump(test_manifest_dict) + data=yaml.safe_dump(step.manifest_content) ) assert result.result_type == ResultType.COMPLETED def test_run_with_no_manifest(self, cclient): + cclient.cluster.get_latest_manifest.return_value = {"data": test_manifest} step = manifest.AddManifestStep(cclient) + step.manifest_content = manifest.EMPTY_MANIFEST result = step.run() cclient.cluster.add_manifest.assert_called_once_with( @@ -367,6 +418,7 @@ def test_run_with_no_connection_to_clusterdb(self, cclient): "Cluster unavailable.." ) step = manifest.AddManifestStep(cclient) + step.manifest_content = manifest.EMPTY_MANIFEST result = step.run() cclient.cluster.add_manifest.assert_called_once_with( From e5a5806eafc16234ccbd5b5d5e295101bbe5e404 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Fri, 2 Feb 2024 12:35:28 +0530 Subject: [PATCH 22/27] Add command to generate manifest Add subcommand generate in sunbeam manifest. Write the manifest to user provided file with $HOME/.config/openstack/manifest.yaml as default. Generate manifest with the current configuration by default. If the cluster is not bootstrapped, generate manifest with default values. --- sunbeam-python/sunbeam/commands/manifest.py | 66 ++++++++++++++++++- sunbeam-python/sunbeam/jobs/manifest.py | 14 ++-- sunbeam-python/sunbeam/main.py | 1 + sunbeam-python/sunbeam/plugins/caas/plugin.py | 2 +- .../sunbeam/plugins/observability/plugin.py | 1 - sunbeam-python/sunbeam/utils.py | 44 +++++++++++++ .../tests/unit/sunbeam/test_utils.py | 48 ++++++++++++++ 7 files changed, 167 insertions(+), 9 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/manifest.py b/sunbeam-python/sunbeam/commands/manifest.py index b7eeb62b..e4961068 100644 --- a/sunbeam-python/sunbeam/commands/manifest.py +++ b/sunbeam-python/sunbeam/commands/manifest.py @@ -15,6 +15,8 @@ import logging +import os +from pathlib import Path import click import yaml @@ -26,8 +28,10 @@ ClusterServiceUnavailableException, ManifestItemNotFoundException, ) -from sunbeam.jobs.checks import DaemonGroupCheck +from sunbeam.jobs.checks import DaemonGroupCheck, VerifyBootstrappedCheck from sunbeam.jobs.common import FORMAT_TABLE, FORMAT_YAML, run_preflight_checks +from sunbeam.jobs.manifest import Manifest +from sunbeam.utils import asdict_with_extra_fields LOG = logging.getLogger(__name__) console = Console() @@ -89,3 +93,63 @@ def show(ctx: click.Context, id: str) -> None: click.echo("Error: Not able to connect to Cluster DB") except ManifestItemNotFoundException: click.echo(f"Error: No manifest exists with id {id}") + + +@click.command() +@click.option( + "-f", + "--manifest-file", + help="Output file for manifest, defaults to $HOME/.config/openstack/manifest.yaml", + type=click.Path(dir_okay=False, path_type=Path), +) +@click.pass_context +def generate( + ctx: click.Context, + manifest_file: Path | None = None, +): + """Generate manifest file. + + Generate manifest file with the deployed configuration. + If the cluster is not bootstrapped, fallback to default + configuration. + """ + client: Client = ctx.obj + + # TODO(hemanth): Add an option schema to print the JsonSchema for the + # Manifest. This will be easier when moved to pydantic 2.x + + if not manifest_file: + home = os.environ.get("SNAP_REAL_HOME") + manifest_file = Path(home) / ".config" / "openstack" / "manifest.yaml" + + LOG.debug(f"Creating {manifest_file} parent directory if it does not exist") + manifest_file.parent.mkdir(mode=0o775, parents=True, exist_ok=True) + + try: + preflight_checks = [DaemonGroupCheck(), VerifyBootstrappedCheck(client)] + run_preflight_checks(preflight_checks, console) + manifest_obj = Manifest.load_latest_from_clusterdb( + client, include_defaults=True + ) + except (click.ClickException, ClusterServiceUnavailableException) as e: + LOG.debug(e) + LOG.debug("Fallback to generating manifest with defaults") + manifest_obj = Manifest.get_default_manifest(client) + + try: + manifest_dict = asdict_with_extra_fields(manifest_obj) + LOG.debug(f"Manifest dict with extra fields: {manifest_dict}") + manifest_yaml = yaml.safe_dump(manifest_dict, sort_keys=False) + + # add comment to each line + manifest_lines = (f"# {line}" for line in manifest_yaml.split("\n")) + manifest_yaml_commented = "\n".join(manifest_lines) + + with manifest_file.open("w") as file: + file.write("# Generated Sunbeam Deployment Manifest\n\n") + file.write(manifest_yaml_commented) + except Exception as e: + LOG.debug(e) + raise click.ClickException(f"Manifest generation failed: {str(e)}") + + click.echo(f"Generated manifest is at {str(manifest_file)}") diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 409c814f..f3508cba 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -21,7 +21,7 @@ from typing import Any, Dict, List, Optional import yaml -from pydantic import Field +from pydantic import Field, PrivateAttr from pydantic.dataclasses import dataclass from snaphelpers import Snap @@ -95,6 +95,9 @@ class TerraformManifest: @dataclass(config=dict(extra="allow")) class Manifest: client: InitVar[Client] + tf_helpers: dict = PrivateAttr(default={}) + tfvar_map: dict = PrivateAttr(default={}) + default_manifest_dict: dict = PrivateAttr(default={}) juju: Optional[JujuManifest] = None charms: Optional[Dict[str, CharmsManifest]] = None terraform: Optional[Dict[str, TerraformManifest]] = None @@ -222,8 +225,6 @@ def __post_init__(self, client: Client): # Add object variables to store self.tf_helpers = {} - self.snap = Snap() - self.data_location = self.snap.paths.user_data self.tfvar_map = self._get_all_tfvar_map(client) self.client = client @@ -235,6 +236,7 @@ def _get_all_tfvar_map(self, client: Client) -> dict: # Terraform helper classes def get_tfhelper(self, tfplan: str) -> TerraformHelper: + snap = Snap() if self.tf_helpers.get(tfplan): return self.tf_helpers.get(tfplan) @@ -245,15 +247,15 @@ def get_tfhelper(self, tfplan: str) -> TerraformHelper: tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan, tfplan) src = self.terraform.get(tfplan).source - dst = self.snap.paths.user_common / "etc" / tfplan_dir + dst = snap.paths.user_common / "etc" / tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) self.tf_helpers[tfplan] = TerraformHelper( - path=self.snap.paths.user_common / "etc" / tfplan_dir, + path=snap.paths.user_common / "etc" / tfplan_dir, plan=tfplan, backend="http", - data_location=self.data_location, + data_location=snap.paths.user_data, ) return self.tf_helpers[tfplan] diff --git a/sunbeam-python/sunbeam/main.py b/sunbeam-python/sunbeam/main.py index fff0f3af..568e11d5 100644 --- a/sunbeam-python/sunbeam/main.py +++ b/sunbeam-python/sunbeam/main.py @@ -124,6 +124,7 @@ def main(): cli.add_command(manifest) manifest.add_command(manifest_commands.list) manifest.add_command(manifest_commands.show) + manifest.add_command(manifest_commands.generate) cli.add_command(enable) cli.add_command(disable) diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index e3520283..0a42499e 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -163,7 +163,7 @@ def add_manifest_section(self, manifest: Manifest) -> None: manifest.caas_config = CaasConfig(**_caas_config) except AttributeError: # Attribute not defined in manifest - pass + manifest.caas_config = CaasConfig() def set_application_names(self) -> list: """Application names handled by the terraform plan.""" diff --git a/sunbeam-python/sunbeam/plugins/observability/plugin.py b/sunbeam-python/sunbeam/plugins/observability/plugin.py index 605ba9a7..33876d37 100644 --- a/sunbeam-python/sunbeam/plugins/observability/plugin.py +++ b/sunbeam-python/sunbeam/plugins/observability/plugin.py @@ -304,7 +304,6 @@ def __init__( def run(self, status: Optional[Status] = None) -> Result: """Execute configuration using terraform.""" tfhelper = self.manifest.get_tfhelper(self.tfplan) - print(tfhelper) try: tfhelper.destroy() except TerraformException as e: diff --git a/sunbeam-python/sunbeam/utils.py b/sunbeam-python/sunbeam/utils.py index 5be0f7d2..67460a0b 100644 --- a/sunbeam-python/sunbeam/utils.py +++ b/sunbeam-python/sunbeam/utils.py @@ -20,12 +20,14 @@ import re import socket import sys +from dataclasses import InitVar, dataclass, is_dataclass from pathlib import Path from typing import Dict, List, Optional import click import netifaces import pwgen +from pydantic.fields import ModelPrivateAttr from pyroute2 import IPDB, NDB from sunbeam.plugins.interface.v1.base import PluginError @@ -302,3 +304,45 @@ def merge_dict(d: dict, u: dict) -> dict: d[k] = v return d + + +def asdict_with_extra_fields(dc: dataclass) -> dict: + """Returns dataclass in dict format. + + dataclasses.asdict only returns the fields defined in + the dataclass. If any new fields are added dynamically + they are not added as part of returned dictionary. + In sunbeam, Manifest dataclass can get additional fields + dynamically by the plugins. + This function returns all the fields as dict including + extra fields. However InitVar and Private fields are + dropped from the dict. + """ + + def handle_dict(obj: dict, ignored_fields: list = ["__pydantic_initialised__"]): + output = {} + for name, value in obj.items(): + if name in ignored_fields: + continue + + if is_dataclass(value): + value = asdict_with_extra_fields(value) + elif isinstance(value, dict): + value = handle_dict(value) + elif isinstance(value, Path): + value = str(value) + + output[name] = value + + return output + + obj = dc.__dict__ + + # Ignore InitVars and Private Attributes while generating manifest. + for field, value in dc.__dataclass_fields__.items(): + if isinstance(value.type, InitVar) or isinstance( + value.default, ModelPrivateAttr + ): + obj.pop(field, None) + + return handle_dict(obj) diff --git a/sunbeam-python/tests/unit/sunbeam/test_utils.py b/sunbeam-python/tests/unit/sunbeam/test_utils.py index 35dc94b9..44098dc0 100644 --- a/sunbeam-python/tests/unit/sunbeam/test_utils.py +++ b/sunbeam-python/tests/unit/sunbeam/test_utils.py @@ -11,10 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import copy import textwrap +from dataclasses import InitVar +from typing import Dict from unittest.mock import Mock, mock_open, patch import pytest +from pydantic import PrivateAttr +from pydantic.dataclasses import dataclass import sunbeam.utils as utils @@ -54,6 +59,25 @@ def ifaddresses(): yield p +@dataclass +class B: + b_str: str | None = None + b_dict: dict | None = None + + +@dataclass +class A: + a_init: InitVar[str] + a_priv: str = PrivateAttr(default="a_priv") + a_int: int | None = None + a_str: str | None = None + a_dict: dict | None = None + a_dict_b: Dict[str, B] | None = None + + def __post_init__(self, a_init): + self.a_priv = "a_priv" + + class TestUtils: def test_is_nic_connected(self, mocker): context_manager = mocker.patch("sunbeam.utils.IPDB") @@ -249,3 +273,27 @@ def test_generate_password(self, mocker): generate_password = mocker.patch("sunbeam.utils.generate_password") generate_password.return_value = "abcdefghijkl" assert utils.generate_password() == "abcdefghijkl" + + def test_asdict_with_extra_fields_no_extra_fields(self): + b = {"b_str": "b", "b_dict": {}} + b_dc = B(**b) + a = {"a_int": 1, "a_str": "a", "a_dict": {}, "a_dict_b": {"b1": b_dc}} + a_dc = A("initvar", **a) + a_dict_with_extra = utils.asdict_with_extra_fields(a_dc) + + a_dict_expected = copy.deepcopy(a) + a_dict_expected["a_dict_b"]["b1"] = b + assert a_dict_with_extra == a_dict_expected + + def test_asdict_with_extra_fields_with_extra_fields(self): + b = {"b_str": "b", "b_dict": {}} + b_dc = B(**b) + a = {"a_int": 1, "a_str": "a", "a_dict": {}, "a_dict_b": {"b1": b_dc}} + a_dc = A("initvar", **a) + a_dc.a_extra = "a_extra" + a_dict_with_extra = utils.asdict_with_extra_fields(a_dc) + + a_dict_expected = copy.deepcopy(a) + a_dict_expected["a_dict_b"]["b1"] = b + a_dict_expected["a_extra"] = "a_extra" + assert a_dict_with_extra == a_dict_expected From 30cb80c387924157ccc2497b64242f9d166bc417 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Sun, 4 Feb 2024 12:58:35 +0530 Subject: [PATCH 23/27] Fix writing manifest to cluster db Handle catching ManifestItemNotFound exception in getting latest manfiest from cluster db and return result as completed for AddManifest is_skip. --- sunbeam-python/sunbeam/jobs/manifest.py | 9 ++++++--- .../tests/unit/sunbeam/jobs/test_manifest.py | 14 +++++++++++++- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index f3508cba..e0bfa27d 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -28,6 +28,7 @@ from sunbeam import utils from sunbeam.clusterd.client import Client from sunbeam.clusterd.service import ( + ClusterServiceUnavailableException, ConfigItemNotFoundException, ManifestItemNotFoundException, ) @@ -419,8 +420,10 @@ def is_skip(self, status: Optional[Status] = None) -> Result: self.manifest_content = EMPTY_MANIFEST latest_manifest = self.client.cluster.get_latest_manifest() - except Exception as e: - LOG.debug(str(e)) + except ManifestItemNotFoundException: + return Result(ResultType.COMPLETED) + except (ClusterServiceUnavailableException, yaml.YAMLError, IOError) as e: + LOG.debug(e) return Result(ResultType.FAILED, str(e)) if yaml.safe_load(latest_manifest.get("data")) == self.manifest_content: @@ -436,5 +439,5 @@ def run(self, status: Optional[Status] = None) -> Result: ) return Result(ResultType.COMPLETED, id) except Exception as e: - LOG.warning(str(e)) + LOG.debug(e) return Result(ResultType.FAILED, str(e)) diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py index a6601b02..ceeaef1d 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -22,7 +22,10 @@ import sunbeam.commands.terraform as terraform import sunbeam.jobs.manifest as manifest -from sunbeam.clusterd.service import ClusterServiceUnavailableException +from sunbeam.clusterd.service import ( + ClusterServiceUnavailableException, + ManifestItemNotFoundException, +) from sunbeam.jobs.common import ResultType from sunbeam.versions import OPENSTACK_CHANNEL, TERRAFORM_DIR_NAMES @@ -389,6 +392,15 @@ def test_is_skip_no_connection_to_clusterdb(self, cclient): assert result.result_type == ResultType.FAILED + def test_is_skip_with_no_manifest_in_db(self, cclient): + cclient.cluster.get_latest_manifest.side_effect = ManifestItemNotFoundException( + "Manifest Item not found." + ) + step = manifest.AddManifestStep(cclient) + result = step.is_skip() + + assert result.result_type == ResultType.COMPLETED + def test_run(self, cclient, tmpdir): cclient.cluster.get_latest_manifest.return_value = {"data": "charms: {}"} manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") From 84876b2bb7a4bdffa7ca23e8931fd4653052949e Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Mon, 5 Feb 2024 14:32:45 +0530 Subject: [PATCH 24/27] Ignore default terraform sources in manifest generation Donot print default terraform sources in the generated manifest. --- sunbeam-python/sunbeam/commands/manifest.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/sunbeam-python/sunbeam/commands/manifest.py b/sunbeam-python/sunbeam/commands/manifest.py index e4961068..77fdf22d 100644 --- a/sunbeam-python/sunbeam/commands/manifest.py +++ b/sunbeam-python/sunbeam/commands/manifest.py @@ -139,6 +139,15 @@ def generate( try: manifest_dict = asdict_with_extra_fields(manifest_obj) LOG.debug(f"Manifest dict with extra fields: {manifest_dict}") + + # Remove terraform default sources + manifest_terraform_dict = manifest_dict.get("terraform", {}) + for name, value in manifest_terraform_dict.items(): + if value.get("source") and value.get("source").startswith( + "/snap/openstack" + ): + value["source"] = None + manifest_yaml = yaml.safe_dump(manifest_dict, sort_keys=False) # add comment to each line From 3fc153d480aa1f6749c9564cc21fc00e5d3b1065 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Mon, 5 Feb 2024 20:14:33 +0530 Subject: [PATCH 25/27] Merge manifest and preseed files Use single file for both manifest and preseed. The file should have deployment as key for preseed entries and software for software config entries. Remove --preseed option from commands and pass the preseed dict from manifest to the plans. --- sunbeam-python/sunbeam/commands/bootstrap.py | 19 +- sunbeam-python/sunbeam/commands/configure.py | 59 ++--- sunbeam-python/sunbeam/commands/juju.py | 10 +- sunbeam-python/sunbeam/commands/manifest.py | 185 +++++++++++++-- sunbeam-python/sunbeam/commands/microceph.py | 21 +- sunbeam-python/sunbeam/commands/microk8s.py | 11 +- sunbeam-python/sunbeam/commands/node.py | 15 +- sunbeam-python/sunbeam/commands/refresh.py | 2 +- .../commands/upgrades/intra_channel.py | 8 +- sunbeam-python/sunbeam/jobs/manifest.py | 215 +++++++++++------- sunbeam-python/sunbeam/jobs/plugin.py | 4 +- sunbeam-python/sunbeam/plugins/caas/plugin.py | 12 +- .../sunbeam/plugins/telemetry/plugin.py | 6 + .../tests/unit/sunbeam/jobs/test_manifest.py | 105 +++++---- 14 files changed, 425 insertions(+), 247 deletions(-) diff --git a/sunbeam-python/sunbeam/commands/bootstrap.py b/sunbeam-python/sunbeam/commands/bootstrap.py index 75776f81..38da007a 100644 --- a/sunbeam-python/sunbeam/commands/bootstrap.py +++ b/sunbeam-python/sunbeam/commands/bootstrap.py @@ -90,12 +90,6 @@ @click.command() @click.option("-a", "--accept-defaults", help="Accept all defaults.", is_flag=True) -@click.option( - "-p", - "--preseed", - help="Preseed file.", - type=click.Path(exists=True, dir_okay=False, path_type=Path), -) @click.option( "-m", "--manifest", @@ -138,7 +132,6 @@ def bootstrap( topology: str, database: str, manifest: Optional[Path] = None, - preseed: Optional[Path] = None, accept_defaults: bool = False, ) -> None: """Bootstrap the local node. @@ -156,7 +149,9 @@ def bootstrap( else: manifest_obj = Manifest.get_default_manifest(client) - LOG.debug(f"Manifest used for deployment: {manifest_obj}") + LOG.debug(f"Manifest used for deployment - preseed: {manifest_obj.deployment}") + LOG.debug(f"Manifest used for deployment - software: {manifest_obj.software}") + preseed = manifest_obj.deployment # Bootstrap node must always have the control role if Role.CONTROL not in roles: @@ -174,7 +169,7 @@ def bootstrap( cloud_type = snap.config.get("juju.cloud.type") cloud_name = snap.config.get("juju.cloud.name") - juju_bootstrap_args = manifest_obj.juju.bootstrap_args + juju_bootstrap_args = manifest_obj.software.juju.bootstrap_args data_location = snap.paths.user_data preflight_checks = [] @@ -204,7 +199,7 @@ def bootstrap( CONTROLLER, bootstrap_args=juju_bootstrap_args, accept_defaults=accept_defaults, - preseed_file=preseed, + deployment_preseed=preseed, ) ) run_plan(plan, console) @@ -240,7 +235,7 @@ def bootstrap( manifest_obj, jhelper, accept_defaults=accept_defaults, - preseed_file=preseed, + deployment_preseed=preseed, ) ) plan4.append(AddMicrok8sUnitStep(client, fqdn, jhelper)) @@ -258,7 +253,7 @@ def bootstrap( fqdn, jhelper, accept_defaults=accept_defaults, - preseed_file=preseed, + deployment_preseed=preseed, ) ) diff --git a/sunbeam-python/sunbeam/commands/configure.py b/sunbeam-python/sunbeam/commands/configure.py index 9ac8ce7e..052dcce2 100644 --- a/sunbeam-python/sunbeam/commands/configure.py +++ b/sunbeam-python/sunbeam/commands/configure.py @@ -434,7 +434,7 @@ def __init__( self, client: Client, answer_file: str, - preseed_file: str | None = None, + deployment_preseed: dict | None = None, accept_defaults: bool = False, ): super().__init__( @@ -442,7 +442,7 @@ def __init__( ) self.client = client self.accept_defaults = accept_defaults - self.preseed_file = preseed_file + self.preseed = deployment_preseed or {} self.answer_file = answer_file def has_prompts(self) -> bool: @@ -462,14 +462,11 @@ def prompt(self, console: Optional[Console] = None) -> None: for section in ["user", "external_network"]: if not self.variables.get(section): self.variables[section] = {} - if self.preseed_file: - preseed = sunbeam.jobs.questions.read_preseed(Path(self.preseed_file)) - else: - preseed = {} + user_bank = sunbeam.jobs.questions.QuestionBank( questions=user_questions(), console=console, - preseed=preseed.get("user"), + preseed=self.preseed.get("user"), previous_answers=self.variables.get("user"), accept_defaults=self.accept_defaults, ) @@ -481,7 +478,7 @@ def prompt(self, console: Optional[Console] = None) -> None: ext_net_bank = sunbeam.jobs.questions.QuestionBank( questions=ext_net_questions_local_only(), console=console, - preseed=preseed.get("external_network"), + preseed=self.preseed.get("external_network"), previous_answers=self.variables.get("external_network"), accept_defaults=self.accept_defaults, ) @@ -489,7 +486,7 @@ def prompt(self, console: Optional[Console] = None) -> None: ext_net_bank = sunbeam.jobs.questions.QuestionBank( questions=ext_net_questions(), console=console, - preseed=preseed.get("external_network"), + preseed=self.preseed.get("external_network"), previous_answers=self.variables.get("external_network"), accept_defaults=self.accept_defaults, ) @@ -633,7 +630,7 @@ def __init__( name: str, jhelper: JujuHelper, join_mode: bool = False, - preseed_file: Path | None = None, + deployment_preseed: dict | None = None, ): super().__init__( "Apply local hypervisor settings", "Applying local hypervisor settings" @@ -642,8 +639,7 @@ def __init__( self.name = name self.jhelper = jhelper self.join_mode = join_mode - self.preseed_file = preseed_file - self.preseed_file = preseed_file + self.preseed = deployment_preseed or {} def has_prompts(self) -> bool: return True @@ -686,14 +682,10 @@ def prompt(self, console: Optional[Console] = None) -> None: remote_access_location = self.variables.get("user", {}).get( "remote_access_location" ) - if self.preseed_file: - preseed = sunbeam.jobs.questions.read_preseed(self.preseed_file) - else: - preseed = {} # If adding new nodes to the cluster then local access makes no sense # so always prompt for the nic. if self.join_mode or remote_access_location == utils.REMOTE_ACCESS: - ext_net_preseed = preseed.get("external_network", {}) + ext_net_preseed = self.preseed.get("external_network", {}) # If nic is in the preseed assume the user knows what they are doing and # bypass validation if ext_net_preseed.get("nic"): @@ -730,7 +722,7 @@ def run(self, status: Optional[Status] = None) -> Result: def _configure( client: Client, openrc: Optional[Path] = None, - preseed: Optional[Path] = None, + manifest: Optional[Path] = None, accept_defaults: bool = False, ): preflight_checks = [] @@ -738,13 +730,26 @@ def _configure( preflight_checks.append(VerifyBootstrappedCheck(client)) run_preflight_checks(preflight_checks, console) - manifest_obj = Manifest.load_latest_from_clusterdb(client, include_defaults=True) + # Validate manifest file + manifest_obj = None + if manifest: + manifest_obj = Manifest.load( + client, manifest_file=manifest, include_defaults=True + ) + else: + manifest_obj = Manifest.load_latest_from_clusterdb( + client, include_defaults=True + ) + + LOG.debug(f"Manifest used for deployment - preseed: {manifest_obj.deployment}") + LOG.debug(f"Manifest used for deployment - software: {manifest_obj.software}") + preseed = manifest_obj.deployment or {} name = utils.get_fqdn() tfplan = "demo-setup" tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan) snap = Snap() - manifest_tfplans = manifest_obj.terraform + manifest_tfplans = manifest_obj.software.terraform src = manifest_tfplans.get(tfplan).source dst = snap.paths.user_common / "etc" / tfplan_dir try: @@ -776,7 +781,7 @@ def _configure( UserQuestions( client, answer_file=answer_file, - preseed_file=preseed, + deployment_preseed=preseed, accept_defaults=accept_defaults, ), TerraformDemoInitStep(client, tfhelper), @@ -805,7 +810,7 @@ def _configure( # Accept preseed file but do not allow 'accept_defaults' as nic # selection may vary from machine to machine and is potentially # destructive if it takes over an unintended nic. - preseed_file=preseed, + deployment_preseed=preseed, ) ) run_plan(plan, console) @@ -815,9 +820,9 @@ def _configure( @click.pass_context @click.option("-a", "--accept-defaults", help="Accept all defaults.", is_flag=True) @click.option( - "-p", - "--preseed", - help="Preseed file.", + "-m", + "--manifest", + help="Manifest file.", type=click.Path(exists=True, dir_okay=False, path_type=Path), ) @click.option( @@ -829,14 +834,14 @@ def _configure( def configure( ctx: click.Context, openrc: Optional[Path] = None, - preseed: Optional[Path] = None, + manifest: Optional[Path] = None, accept_defaults: bool = False, ) -> None: """Configure cloud with some sensible defaults.""" if ctx.invoked_subcommand is not None: return client: Client = ctx.obj - _configure(client, openrc, preseed, accept_defaults) + _configure(client, openrc, manifest, accept_defaults) for name, command in configure.commands.items(): LOG.debug("Running configure %r", name) cmd_ctx = click.Context( diff --git a/sunbeam-python/sunbeam/commands/juju.py b/sunbeam-python/sunbeam/commands/juju.py index 6deeddfd..f6a7dc9c 100644 --- a/sunbeam-python/sunbeam/commands/juju.py +++ b/sunbeam-python/sunbeam/commands/juju.py @@ -303,7 +303,7 @@ def __init__( cloud_type: str, controller: str, bootstrap_args: list = [], - preseed_file: Optional[Path] = None, + deployment_preseed: dict | None = None, accept_defaults: bool = False, ): super().__init__("Bootstrap Juju", "Bootstrapping Juju onto machine") @@ -312,7 +312,7 @@ def __init__( self.cloud_type = cloud_type self.controller = controller self.bootstrap_args = bootstrap_args - self.preseed_file = preseed_file + self.preseed = deployment_preseed or {} self.accept_defaults = accept_defaults self.juju_clouds = [] self.client = client @@ -330,14 +330,10 @@ def prompt(self, console: Optional[Console] = None) -> None: self.variables = questions.load_answers(self.client, self._CONFIG) self.variables.setdefault("bootstrap", {}) - if self.preseed_file: - preseed = questions.read_preseed(self.preseed_file) - else: - preseed = {} bootstrap_bank = questions.QuestionBank( questions=bootstrap_questions(), console=console, # type: ignore - preseed=preseed.get("bootstrap"), + preseed=self.preseed.get("bootstrap"), previous_answers=self.variables.get("bootstrap", {}), accept_defaults=self.accept_defaults, ) diff --git a/sunbeam-python/sunbeam/commands/manifest.py b/sunbeam-python/sunbeam/commands/manifest.py index 77fdf22d..af777e18 100644 --- a/sunbeam-python/sunbeam/commands/manifest.py +++ b/sunbeam-python/sunbeam/commands/manifest.py @@ -23,20 +23,176 @@ from rich.console import Console from rich.table import Table +from sunbeam import utils from sunbeam.clusterd.client import Client from sunbeam.clusterd.service import ( ClusterServiceUnavailableException, ManifestItemNotFoundException, ) +from sunbeam.commands.configure import ( + CLOUD_CONFIG_SECTION, + ext_net_questions, + ext_net_questions_local_only, + user_questions, +) +from sunbeam.commands.juju import BOOTSTRAP_CONFIG_KEY, bootstrap_questions +from sunbeam.commands.microceph import microceph_questions +from sunbeam.commands.microk8s import ( + MICROK8S_ADDONS_CONFIG_KEY, + microk8s_addons_questions, +) from sunbeam.jobs.checks import DaemonGroupCheck, VerifyBootstrappedCheck from sunbeam.jobs.common import FORMAT_TABLE, FORMAT_YAML, run_preflight_checks from sunbeam.jobs.manifest import Manifest +from sunbeam.jobs.questions import QuestionBank, load_answers from sunbeam.utils import asdict_with_extra_fields LOG = logging.getLogger(__name__) console = Console() +def show_questions( + question_bank, + section=None, + subsection=None, + section_description=None, + comment_out=False, +) -> list: + lines = [] + space = " " + indent = "" + outer_indent = space * 2 + if comment_out: + comment = "# " + else: + comment = "" + if section: + if section_description: + lines.append(f"{outer_indent}{comment}{indent}# {section_description}") + lines.append(f"{outer_indent}{comment}{indent}{section}:") + indent = space * 2 + if subsection: + lines.append(f"{outer_indent}{comment}{indent}{subsection}:") + indent = space * 4 + for key, question in question_bank.questions.items(): + default = question.calculate_default() or "" + lines.append(f"{outer_indent}{comment}{indent}# {question.question}") + lines.append(f"{outer_indent}{comment}{indent}{key}: {default}") + + return lines + + +def generate_deployment_preseed(client: Client) -> str: + """Generate deployment preseed section.""" + name = utils.get_fqdn() + preseed_content = ["deployment:"] + try: + variables = load_answers(client, BOOTSTRAP_CONFIG_KEY) + except ClusterServiceUnavailableException: + variables = {} + bootstrap_bank = QuestionBank( + questions=bootstrap_questions(), + console=console, + previous_answers=variables.get("bootstrap", {}), + ) + preseed_content.extend(show_questions(bootstrap_bank, section="bootstrap")) + try: + variables = load_answers(client, MICROK8S_ADDONS_CONFIG_KEY) + except ClusterServiceUnavailableException: + variables = {} + microk8s_addons_bank = QuestionBank( + questions=microk8s_addons_questions(), + console=console, + previous_answers=variables.get("addons", {}), + ) + preseed_content.extend(show_questions(microk8s_addons_bank, section="addons")) + user_bank = QuestionBank( + questions=user_questions(), + console=console, + previous_answers=variables.get("user"), + ) + try: + variables = load_answers(client, CLOUD_CONFIG_SECTION) + except ClusterServiceUnavailableException: + variables = {} + preseed_content.extend(show_questions(user_bank, section="user")) + ext_net_bank_local = QuestionBank( + questions=ext_net_questions_local_only(), + console=console, + previous_answers=variables.get("external_network"), + ) + preseed_content.extend( + show_questions( + ext_net_bank_local, + section="external_network", + section_description="Local Access", + ) + ) + ext_net_bank_remote = QuestionBank( + questions=ext_net_questions(), + console=console, + previous_answers=variables.get("external_network"), + ) + preseed_content.extend( + show_questions( + ext_net_bank_remote, + section="external_network", + section_description="Remote Access", + comment_out=True, + ) + ) + microceph_config_bank = QuestionBank( + questions=microceph_questions(), + console=console, + previous_answers=variables.get("microceph_config", {}).get(name), + ) + preseed_content.extend( + show_questions( + microceph_config_bank, + section="microceph_config", + subsection=name, + section_description="MicroCeph config", + ) + ) + + preseed_content_final = "\n".join(preseed_content) + return preseed_content_final + + +def generate_software_manifest(manifest: Manifest) -> str: + space = " " + indent = space * 2 + comment = "# " + + try: + software_dict = asdict_with_extra_fields(manifest.software) + LOG.debug(f"Manifest software dict with extra fields: {software_dict}") + + # Remove terraform default sources + manifest_terraform_dict = software_dict.get("terraform", {}) + for name, value in manifest_terraform_dict.items(): + if value.get("source") and value.get("source").startswith( + "/snap/openstack" + ): + value["source"] = None + + software_yaml = yaml.safe_dump(software_dict, sort_keys=False) + + # TODO(hemanth): Add an option schema to print the JsonSchema for the + # Manifest. This will be easier when moved to pydantic 2.x + + # add comment to each line + software_lines = ( + f"{indent}{comment}{line}" for line in software_yaml.split("\n") + ) + software_yaml_commented = "\n".join(software_lines) + software_content = f"software:\n{software_yaml_commented}" + return software_content + except Exception as e: + LOG.debug(e) + raise click.ClickException(f"Manifest generation failed: {str(e)}") + + @click.command() @click.option( "-f", @@ -115,9 +271,6 @@ def generate( """ client: Client = ctx.obj - # TODO(hemanth): Add an option schema to print the JsonSchema for the - # Manifest. This will be easier when moved to pydantic 2.x - if not manifest_file: home = os.environ.get("SNAP_REAL_HOME") manifest_file = Path(home) / ".config" / "openstack" / "manifest.yaml" @@ -136,28 +289,16 @@ def generate( LOG.debug("Fallback to generating manifest with defaults") manifest_obj = Manifest.get_default_manifest(client) - try: - manifest_dict = asdict_with_extra_fields(manifest_obj) - LOG.debug(f"Manifest dict with extra fields: {manifest_dict}") - - # Remove terraform default sources - manifest_terraform_dict = manifest_dict.get("terraform", {}) - for name, value in manifest_terraform_dict.items(): - if value.get("source") and value.get("source").startswith( - "/snap/openstack" - ): - value["source"] = None - - manifest_yaml = yaml.safe_dump(manifest_dict, sort_keys=False) - - # add comment to each line - manifest_lines = (f"# {line}" for line in manifest_yaml.split("\n")) - manifest_yaml_commented = "\n".join(manifest_lines) + preseed_content = generate_deployment_preseed(client) + software_content = generate_software_manifest(manifest_obj) + try: with manifest_file.open("w") as file: file.write("# Generated Sunbeam Deployment Manifest\n\n") - file.write(manifest_yaml_commented) - except Exception as e: + file.write(preseed_content) + file.write("\n") + file.write(software_content) + except IOError as e: LOG.debug(e) raise click.ClickException(f"Manifest generation failed: {str(e)}") diff --git a/sunbeam-python/sunbeam/commands/microceph.py b/sunbeam-python/sunbeam/commands/microceph.py index 2e05f151..a1c2d807 100644 --- a/sunbeam-python/sunbeam/commands/microceph.py +++ b/sunbeam-python/sunbeam/commands/microceph.py @@ -15,7 +15,6 @@ import ast import logging -from pathlib import Path from typing import Optional import click @@ -133,14 +132,14 @@ def __init__( client: Client, name: str, jhelper: JujuHelper, - preseed_file: Optional[Path] = None, + deployment_preseed: dict | None = None, accept_defaults: bool = False, ): super().__init__("Configure MicroCeph storage", "Configuring MicroCeph storage") self.client = client self.name = name self.jhelper = jhelper - self.preseed_file = preseed_file + self.preseed = deployment_preseed self.accept_defaults = accept_defaults self.variables = {} self.machine_id = "" @@ -197,24 +196,22 @@ def prompt(self, console: Optional[Console] = None) -> None: self.variables.setdefault("microceph_config", {}) self.variables["microceph_config"].setdefault(self.name, {"osd_devices": ""}) - if self.preseed_file: - preseed = questions.read_preseed(self.preseed_file) - else: - preseed = {} # Set defaults - preseed.setdefault("microceph_config", {}) - preseed["microceph_config"].setdefault(self.name, {"osd_devices": None}) + self.preseed.setdefault("microceph_config", {}) + self.preseed["microceph_config"].setdefault(self.name, {"osd_devices": None}) # Preseed can have osd_devices as list. If so, change to comma separated str - osd_devices = preseed.get("microceph_config").get(self.name).get("osd_devices") + osd_devices = ( + self.preseed.get("microceph_config").get(self.name).get("osd_devices") + ) if isinstance(osd_devices, list): osd_devices_str = ",".join(osd_devices) - preseed["microceph_config"][self.name]["osd_devices"] = osd_devices_str + self.preseed["microceph_config"][self.name]["osd_devices"] = osd_devices_str microceph_config_bank = questions.QuestionBank( questions=self.microceph_config_questions(), console=console, # type: ignore - preseed=preseed.get("microceph_config").get(self.name), + preseed=self.preseed.get("microceph_config").get(self.name), previous_answers=self.variables.get("microceph_config").get(self.name), accept_defaults=self.accept_defaults, ) diff --git a/sunbeam-python/sunbeam/commands/microk8s.py b/sunbeam-python/sunbeam/commands/microk8s.py index bd5cb942..62847702 100644 --- a/sunbeam-python/sunbeam/commands/microk8s.py +++ b/sunbeam-python/sunbeam/commands/microk8s.py @@ -15,7 +15,6 @@ import ipaddress import logging -from pathlib import Path from typing import Optional import yaml @@ -94,7 +93,7 @@ def __init__( client: Client, manifest: Manifest, jhelper: JujuHelper, - preseed_file: Optional[Path] = None, + deployment_preseed: dict | None = None, accept_defaults: bool = False, refresh: bool = False, ): @@ -111,7 +110,7 @@ def __init__( refresh, ) - self.preseed_file = preseed_file + self.preseed = deployment_preseed or {} self.accept_defaults = accept_defaults self.variables = {} @@ -128,14 +127,10 @@ def prompt(self, console: Optional[Console] = None) -> None: self.variables = questions.load_answers(self.client, self._ADDONS_CONFIG) self.variables.setdefault("addons", {}) - if self.preseed_file: - preseed = questions.read_preseed(self.preseed_file) - else: - preseed = {} microk8s_addons_bank = questions.QuestionBank( questions=microk8s_addons_questions(), console=console, # type: ignore - preseed=preseed.get("addons"), + preseed=self.preseed.get("addons"), previous_answers=self.variables.get("addons", {}), accept_defaults=self.accept_defaults, ) diff --git a/sunbeam-python/sunbeam/commands/node.py b/sunbeam-python/sunbeam/commands/node.py index f0e4fe7b..e3bb8d46 100644 --- a/sunbeam-python/sunbeam/commands/node.py +++ b/sunbeam-python/sunbeam/commands/node.py @@ -14,8 +14,7 @@ # limitations under the License. import logging -from pathlib import Path -from typing import List, Optional +from typing import List import click import yaml @@ -155,12 +154,6 @@ def _print_output(token): @click.command() @click.option("-a", "--accept-defaults", help="Accept all defaults.", is_flag=True) -@click.option( - "-p", - "--preseed", - help="Preseed file.", - type=click.Path(exists=True, dir_okay=False, path_type=Path), -) @click.option("--token", type=str, help="Join token") @click.option( "--role", @@ -176,7 +169,6 @@ def join( ctx: click.Context, token: str, roles: List[Role], - preseed: Optional[Path] = None, accept_defaults: bool = False, ) -> None: """Join node to the cluster. @@ -225,6 +217,7 @@ def join( # Get manifest object once the cluster is joined manifest_obj = Manifest.load_latest_from_clusterdb(client, include_defaults=True) + preseed = manifest_obj.deployment machine_id = -1 machine_id_result = get_step_message(plan1_results, AddJujuMachineStep) @@ -249,7 +242,7 @@ def join( name, jhelper, accept_defaults=accept_defaults, - preseed_file=preseed, + deployment_preseed=preseed, ) ) @@ -260,7 +253,7 @@ def join( DeployHypervisorApplicationStep(client, manifest_obj, jhelper), AddHypervisorUnitStep(client, name, jhelper), SetLocalHypervisorOptions( - client, name, jhelper, join_mode=True, preseed_file=preseed + client, name, jhelper, join_mode=True, deployment_preseed=preseed ), ] ) diff --git a/sunbeam-python/sunbeam/commands/refresh.py b/sunbeam-python/sunbeam/commands/refresh.py index 8289759b..e979ee34 100644 --- a/sunbeam-python/sunbeam/commands/refresh.py +++ b/sunbeam-python/sunbeam/commands/refresh.py @@ -89,7 +89,7 @@ def refresh( client, include_defaults=True ) - LOG.debug(f"Manifest object used for refresh: {manifest_obj}") + LOG.debug(f"Manifest used for deployment - software: {manifest_obj.software}") data_location = snap.paths.user_data jhelper = JujuHelper(client, data_location) if upgrade_release: diff --git a/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py b/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py index 1479eebd..0163cf4a 100644 --- a/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py +++ b/sunbeam-python/sunbeam/commands/upgrades/intra_channel.py @@ -53,11 +53,13 @@ def is_skip(self, status: Optional[Status] = None) -> Result: def is_track_changed_for_any_charm(self, deployed_apps: dict): """Check if chanel track is same in manifest and deployed app.""" for app_name, (charm, channel, revision) in deployed_apps.items(): - if not self.manifest.charms.get(charm): + if not self.manifest.software.charms.get(charm): LOG.debug(f"Charm not present in manifest: {charm}") continue - channel_from_manifest = self.manifest.charms.get(charm).channel or "" + channel_from_manifest = ( + self.manifest.software.charms.get(charm).channel or "" + ) track_from_manifest = channel_from_manifest.split("/")[0] track_from_deployed_app = channel.split("/")[0] # Compare tracks @@ -78,7 +80,7 @@ def refresh_apps(self, apps: dict, model: str) -> None: Otherwise ignore so that terraform plan apply will take care of charm upgrade. """ for app_name, (charm, channel, revision) in apps.items(): - manifest_charm = self.manifest.charms.get(charm) + manifest_charm = self.manifest.software.charms.get(charm) if not manifest_charm: continue diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index e0bfa27d..8bb71a85 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -21,7 +21,7 @@ from typing import Any, Dict, List, Optional import yaml -from pydantic import Field, PrivateAttr +from pydantic import Field from pydantic.dataclasses import dataclass from snaphelpers import Snap @@ -94,15 +94,92 @@ class TerraformManifest: @dataclass(config=dict(extra="allow")) -class Manifest: +class SoftwareConfig: client: InitVar[Client] - tf_helpers: dict = PrivateAttr(default={}) - tfvar_map: dict = PrivateAttr(default={}) - default_manifest_dict: dict = PrivateAttr(default={}) + plugin_manager: InitVar[PluginManager] juju: Optional[JujuManifest] = None charms: Optional[Dict[str, CharmsManifest]] = None terraform: Optional[Dict[str, TerraformManifest]] = None + """ + # field_validator supported only in pydantix 2.x + @field_validator("terraform", "mode_after") + def validate_terraform(cls, terraform): + if terraform: + tf_keys = list(terraform.keys()) + if not set(tf_keys) <= set(VALID_TERRAFORM_PLANS): + raise ValueError( + f"Terraform keys should be one of {VALID_TERRAFORM_PLANS}" + ) + + return terraform + """ + + def validate_terraform_keys(self, default_software_config: dict): + if self.terraform: + tf_keys = set(self.terraform.keys()) + all_tfplans = default_software_config.get("terraform", {}).keys() + if not tf_keys <= all_tfplans: + raise ValueError( + f"Manifest Software Terraform keys should be one of {all_tfplans} " + ) + + def validate_charm_keys(self, default_software_config: dict): + if self.charms: + charms_keys = set(self.charms.keys()) + all_charms = default_software_config.get("charms", {}).keys() + if not charms_keys <= all_charms: + raise ValueError( + f"Manifest Software charms keys should be one of {all_charms} " + ) + + def __post_init__(self, client: Client, plugin_manager: PluginManager): + LOG.debug("Calling __post__init__") + plugin_manager.add_manifest_section(client, self) + default_software_config = self.get_default_software_as_dict( + client, plugin_manager + ) + # Add custom validations + self.validate_terraform_keys(default_software_config) + self.validate_charm_keys(default_software_config) + + @classmethod + def get_default_software_as_dict( + cls, client: Client, plugin_manager: PluginManager + ) -> dict: + snap = Snap() + software = {"juju": {"bootstrap_args": []}} + software["charms"] = { + charm: {"channel": channel} + for charm, channel in MANIFEST_CHARM_VERSIONS.items() + } + software["terraform"] = { + tfplan: {"source": Path(snap.paths.snap / "etc" / tfplan_dir)} + for tfplan, tfplan_dir in TERRAFORM_DIR_NAMES.items() + } + + # Update manifests from plugins + software_from_plugins = plugin_manager.get_all_plugin_manifests(client) + utils.merge_dict(software, software_from_plugins) + return copy.deepcopy(software) + + +class Manifest: + + def __init__( + self, + client: Client, + plugin_manager: PluginManager, + deployment: dict, + software: dict, + ): + self.client = client + self.plugin_manager = plugin_manager + self.deployment = deployment + self.software = SoftwareConfig(client, plugin_manager, **software) + self.tf_helpers = {} + self.tfvar_map = self._get_all_tfvar_map(client, plugin_manager) + @classmethod def load( cls, client: Client, manifest_file: Path, include_defaults: bool = False @@ -114,8 +191,15 @@ def load( if include_defaults: return cls.load_on_default(client, manifest_file) + plugin_manager = PluginManager() with manifest_file.open() as file: - return Manifest(client, **yaml.safe_load(file)) + override = yaml.safe_load(file) + return Manifest( + client, + plugin_manager, + override.get("deployment", {}), + override.get("software", {}), + ) @classmethod def load_latest_from_clusterdb( @@ -129,26 +213,45 @@ def load_latest_from_clusterdb( if include_defaults: return cls.load_latest_from_clusterdb_on_default(client) + plugin_manager = PluginManager() try: manifest_latest = client.cluster.get_latest_manifest() - return Manifest(client, **yaml.safe_load(manifest_latest.get("data"))) + override = yaml.safe_load(manifest_latest.get("data")) + return Manifest( + client, + plugin_manager, + override.get("deployment", {}), + override.get("software", {}), + ) except ManifestItemNotFoundException as e: LOG.debug(f"Error in getting latest manifest from cluster DB: {str(e)}") - return Manifest(client) + return Manifest(client, plugin_manager, {}, {}) @classmethod def load_on_default(cls, client: Client, manifest_file: Path) -> "Manifest": """Load manifest and override the default manifest""" + plugin_manager = PluginManager() with manifest_file.open() as file: override = yaml.safe_load(file) - default = cls.get_default_manifest_as_dict(client) - utils.merge_dict(default, override) - return Manifest(client, **default) + override_deployment = override.get("deployment") or {} + override_software = override.get("software") or {} + default_software = SoftwareConfig.get_default_software_as_dict( + client, plugin_manager + ) + LOG.warning(default_software) + LOG.warning(override_software) + utils.merge_dict(default_software, override_software) + return Manifest( + client, plugin_manager, override_deployment, default_software + ) @classmethod def load_latest_from_clusterdb_on_default(cls, client: Client) -> "Manifest": """Load the latest manifest from clusterdb""" - default = cls.get_default_manifest_as_dict(client) + plugin_manager = PluginManager() + default_software = SoftwareConfig.get_default_software_as_dict( + client, plugin_manager + ) try: manifest_latest = client.cluster.get_latest_manifest() override = yaml.safe_load(manifest_latest.get("data")) @@ -156,82 +259,22 @@ def load_latest_from_clusterdb_on_default(cls, client: Client) -> "Manifest": LOG.debug(f"Error in getting latest manifest from cluster DB: {str(e)}") override = {} - utils.merge_dict(default, override) - return Manifest(client, **default) - - @classmethod - def get_default_manifest_as_dict(cls, client: Client) -> dict: - snap = Snap() - m = { - "juju": {"bootstrap_args": []}, - "charms": {}, - "terraform": {}, - } - m["charms"] = { - charm: {"channel": channel} - for charm, channel in MANIFEST_CHARM_VERSIONS.items() - } - m["terraform"] = { - tfplan: {"source": Path(snap.paths.snap / "etc" / tfplan_dir)} - for tfplan, tfplan_dir in TERRAFORM_DIR_NAMES.items() - } - - # Update manifests from plugins - m_plugin = PluginManager().get_all_plugin_manifests(client) - utils.merge_dict(m, m_plugin) - - return copy.deepcopy(m) + override_deployment = override.get("deployment") or {} + override_software = override.get("software") or {} + utils.merge_dict(default_software, override_software) + return Manifest(client, plugin_manager, override_deployment, default_software) @classmethod def get_default_manifest(cls, client: Client) -> "Manifest": - return Manifest(client, **cls.get_default_manifest_as_dict(client)) - - """ - # field_validator supported only in pydantix 2.x - @field_validator("terraform", "mode_after") - def validate_terraform(cls, terraform): - if terraform: - tf_keys = list(terraform.keys()) - if not set(tf_keys) <= set(VALID_TERRAFORM_PLANS): - raise ValueError( - f"Terraform keys should be one of {VALID_TERRAFORM_PLANS}" - ) - - return terraform - """ - - def validate_terraform_keys(self, default_manifest: dict): - if self.terraform: - tf_keys = set(self.terraform.keys()) - all_tfplans = default_manifest.get("terraform", {}).keys() - if not tf_keys <= all_tfplans: - raise ValueError( - f"Manifest Terraform keys should be one of {all_tfplans} " - ) - - def validate_charm_keys(self, default_manifest: dict): - if self.charms: - charms_keys = set(self.charms.keys()) - all_charms = default_manifest.get("charms", {}).keys() - if not charms_keys <= all_charms: - raise ValueError(f"Manifest charms keys should be one of {all_charms} ") - - def __post_init__(self, client: Client): - LOG.debug("Calling __post__init__") - PluginManager().add_manifest_section(client, self) - self.default_manifest_dict = self.get_default_manifest_as_dict(client) - # Add custom validations - self.validate_terraform_keys(self.default_manifest_dict) - self.validate_charm_keys(self.default_manifest_dict) - - # Add object variables to store - self.tf_helpers = {} - self.tfvar_map = self._get_all_tfvar_map(client) - self.client = client + plugin_manager = PluginManager() + default_software = SoftwareConfig.get_default_software_as_dict( + client, plugin_manager + ) + return Manifest(client, plugin_manager, {}, default_software) - def _get_all_tfvar_map(self, client: Client) -> dict: + def _get_all_tfvar_map(self, client: Client, plugin_manager: PluginManager) -> dict: tfvar_map = copy.deepcopy(MANIFEST_ATTRIBUTES_TFVAR_MAP) - tfvar_map_plugin = PluginManager().get_all_plugin_manfiest_tfvar_map(client) + tfvar_map_plugin = plugin_manager.get_all_plugin_manfiest_tfvar_map(client) utils.merge_dict(tfvar_map, tfvar_map_plugin) return tfvar_map @@ -241,13 +284,13 @@ def get_tfhelper(self, tfplan: str) -> TerraformHelper: if self.tf_helpers.get(tfplan): return self.tf_helpers.get(tfplan) - if not (self.terraform and self.terraform.get(tfplan)): + if not (self.software.terraform and self.software.terraform.get(tfplan)): raise MissingTerraformInfoException( f"Terraform information missing in manifest for {tfplan}" ) tfplan_dir = TERRAFORM_DIR_NAMES.get(tfplan, tfplan) - src = self.terraform.get(tfplan).source + src = self.software.terraform.get(tfplan).source dst = snap.paths.user_common / "etc" / tfplan_dir LOG.debug(f"Updating {dst} from {src}...") shutil.copytree(src, dst, dirs_exist_ok=True) @@ -370,7 +413,7 @@ def _get_tfvars(self, tfplan: str, charms: Optional[list] = None) -> dict: # handle tfvars for charms section for charm, per_charm_tfvar_map in charms_tfvar_map.items(): - charm_ = self.charms.get(charm) + charm_ = self.software.charms.get(charm) if charm_: manifest_charm = asdict(charm_) for charm_attribute, tfvar_name in per_charm_tfvar_map.items(): diff --git a/sunbeam-python/sunbeam/jobs/plugin.py b/sunbeam-python/sunbeam/jobs/plugin.py index 567d91df..5ebc3fdf 100644 --- a/sunbeam-python/sunbeam/jobs/plugin.py +++ b/sunbeam-python/sunbeam/jobs/plugin.py @@ -279,11 +279,11 @@ def get_all_plugin_manfiest_tfvar_map(cls, client: Client) -> dict: return tfvar_map @classmethod - def add_manifest_section(cls, client, manifest) -> None: + def add_manifest_section(cls, client, software_config) -> None: plugins = cls.get_all_plugin_classes() for klass in plugins: plugin = klass(client) - plugin.add_manifest_section(manifest) + plugin.add_manifest_section(software_config) @classmethod def get_all_charms_in_openstack_plan(cls, client: Client) -> list: diff --git a/sunbeam-python/sunbeam/plugins/caas/plugin.py b/sunbeam-python/sunbeam/plugins/caas/plugin.py index 0a42499e..9858ed10 100644 --- a/sunbeam-python/sunbeam/plugins/caas/plugin.py +++ b/sunbeam-python/sunbeam/plugins/caas/plugin.py @@ -32,7 +32,7 @@ from sunbeam.commands.terraform import TerraformException, TerraformInitStep from sunbeam.jobs.common import BaseStep, Result, ResultType, run_plan from sunbeam.jobs.juju import JujuHelper -from sunbeam.jobs.manifest import Manifest +from sunbeam.jobs.manifest import Manifest, SoftwareConfig from sunbeam.plugins.interface.v1.base import PluginRequirement from sunbeam.plugins.interface.v1.openstack import ( OpenStackControlPlanePlugin, @@ -81,7 +81,7 @@ def run(self, status: Optional[Status] = None) -> Result: try: override_tfvars = {} try: - manifest_caas_config = asdict(self.manifest.caas_config) + manifest_caas_config = asdict(self.manifest.software.caas_config) for caas_config_attribute, tfvar_name in ( self.tfvar_map.get(self.tfplan, {}).get("caas_config", {}).items() ): @@ -156,14 +156,14 @@ def manifest_attributes_tfvar_map(self) -> dict: }, } - def add_manifest_section(self, manifest: Manifest) -> None: + def add_manifest_section(self, software_config: SoftwareConfig) -> None: """Adds manifest section""" try: - _caas_config = manifest.caas_config - manifest.caas_config = CaasConfig(**_caas_config) + _caas_config = software_config.caas_config + software_config.caas_config = CaasConfig(**_caas_config) except AttributeError: # Attribute not defined in manifest - manifest.caas_config = CaasConfig() + software_config.caas_config = CaasConfig() def set_application_names(self) -> list: """Application names handled by the terraform plan.""" diff --git a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py index 3aad963a..b2af7771 100644 --- a/sunbeam-python/sunbeam/plugins/telemetry/plugin.py +++ b/sunbeam-python/sunbeam/plugins/telemetry/plugin.py @@ -54,6 +54,7 @@ def manifest_defaults(self) -> dict: "aodh-k8s": {"channel": OPENSTACK_CHANNEL}, "gnocchi-k8s": {"channel": OPENSTACK_CHANNEL}, "ceilometer-k8s": {"channel": OPENSTACK_CHANNEL}, + "openstack-exporter-k8s": {"channel": OPENSTACK_CHANNEL}, } } @@ -77,6 +78,11 @@ def manifest_attributes_tfvar_map(self) -> dict: "revision": "ceilometer-revision", "config": "ceilometer-config", }, + "openstack-exporter-k8s": { + "channel": "openstack-exporter-channel", + "revision": "openstack-exporter-revision", + "config": "openstack-exporter-config", + }, } } } diff --git a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py index ceeaef1d..57ec974a 100644 --- a/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py +++ b/sunbeam-python/tests/unit/sunbeam/jobs/test_manifest.py @@ -30,52 +30,56 @@ from sunbeam.versions import OPENSTACK_CHANNEL, TERRAFORM_DIR_NAMES test_manifest = """ -juju: - bootstrap_args: - - --agent-version=3.2.4 -charms: - keystone-k8s: - channel: 2023.1/stable - revision: 234 - config: - debug: True - glance-k8s: - channel: 2023.1/stable - revision: 134 -terraform: - openstack-plan: - source: /home/ubuntu/openstack-tf - hypervisor-plan: - source: /home/ubuntu/hypervisor-tf +software: + juju: + bootstrap_args: + - --agent-version=3.2.4 + charms: + keystone-k8s: + channel: 2023.1/stable + revision: 234 + config: + debug: True + glance-k8s: + channel: 2023.1/stable + revision: 134 + terraform: + openstack-plan: + source: /home/ubuntu/openstack-tf + hypervisor-plan: + source: /home/ubuntu/hypervisor-tf """ malformed_test_manifest = """ -charms: - keystone-k8s: - channel: 2023.1/stable - revision: 234 - conf +software: + charms: + keystone-k8s: + channel: 2023.1/stable + revision: 234 + conf """ test_manifest_invalid_values = """ -charms: - keystone-k8s: - channel: 2023.1/stable - revision: 234 - # Config value should be dictionary but provided str - config: debug +software: + charms: + keystone-k8s: + channel: 2023.1/stable + revision: 234 + # Config value should be dictionary but provided str + config: debug """ test_manifest_incorrect_terraform_key = """ -charms: - keystone-k8s: - channel: 2023.1/stable - revision: 234 - config: - debug: True -terraform: - fake-plan: - source: /home/ubuntu/tfplan +software: + charms: + keystone-k8s: + channel: 2023.1/stable + revision: 234 + config: + debug: True + terraform: + fake-plan: + source: /home/ubuntu/tfplan """ @@ -114,18 +118,18 @@ def test_load(self, mocker, snap, cclient, pluginmanager, tmpdir): manifest_file = tmpdir.mkdir("manifests").join("test_manifest.yaml") manifest_file.write(test_manifest) manifest_obj = manifest.Manifest.load(cclient, manifest_file) - ks_manifest = manifest_obj.charms.get("keystone-k8s") + ks_manifest = manifest_obj.software.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Assert defaults does not exist - assert "nova" not in manifest_obj.charms.keys() + assert "nova" not in manifest_obj.software.charms.keys() test_manifest_dict = yaml.safe_load(test_manifest) - assert manifest_obj.juju.bootstrap_args == test_manifest_dict.get( - "juju", {} - ).get("bootstrap_args", []) + assert manifest_obj.software.juju.bootstrap_args == test_manifest_dict.get( + "software", {} + ).get("juju", {}).get("bootstrap_args", []) def test_load_on_default(self, mocker, snap, cclient, pluginmanager, tmpdir): mocker.patch.object(manifest, "Snap", return_value=snap) @@ -136,13 +140,13 @@ def test_load_on_default(self, mocker, snap, cclient, pluginmanager, tmpdir): ) # Check updates from manifest file - ks_manifest = manifest_obj.charms.get("keystone-k8s") + ks_manifest = manifest_obj.software.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Check default ones - nova_manifest = manifest_obj.charms.get("nova-k8s") + nova_manifest = manifest_obj.software.charms.get("nova-k8s") assert nova_manifest.channel == OPENSTACK_CHANNEL assert nova_manifest.revision is None assert nova_manifest.config is None @@ -151,13 +155,13 @@ def test_load_latest_from_clusterdb(self, mocker, snap, cclient, pluginmanager): mocker.patch.object(manifest, "Snap", return_value=snap) cclient.cluster.get_latest_manifest.return_value = {"data": test_manifest} manifest_obj = manifest.Manifest.load_latest_from_clusterdb(cclient) - ks_manifest = manifest_obj.charms.get("keystone-k8s") + ks_manifest = manifest_obj.software.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Assert defaults does not exist - assert "nova-k8s" not in manifest_obj.charms.keys() + assert "nova-k8s" not in manifest_obj.software.charms.keys() def test_load_latest_from_clusterdb_on_default( self, mocker, snap, cclient, pluginmanager @@ -167,13 +171,13 @@ def test_load_latest_from_clusterdb_on_default( manifest_obj = manifest.Manifest.load_latest_from_clusterdb( cclient, include_defaults=True ) - ks_manifest = manifest_obj.charms.get("keystone-k8s") + ks_manifest = manifest_obj.software.charms.get("keystone-k8s") assert ks_manifest.channel == "2023.1/stable" assert ks_manifest.revision == 234 assert ks_manifest.config == {"debug": True} # Check default ones - nova_manifest = manifest_obj.charms.get("nova-k8s") + nova_manifest = manifest_obj.software.charms.get("nova-k8s") assert nova_manifest.channel == OPENSTACK_CHANNEL assert nova_manifest.revision is None assert nova_manifest.config is None @@ -181,7 +185,7 @@ def test_load_latest_from_clusterdb_on_default( def test_get_default_manifest(self, mocker, snap, cclient, pluginmanager): mocker.patch.object(manifest, "Snap", return_value=snap) default_manifest = manifest.Manifest.get_default_manifest(cclient) - nova_manifest = default_manifest.charms.get("nova-k8s") + nova_manifest = default_manifest.software.charms.get("nova-k8s") assert nova_manifest.channel == OPENSTACK_CHANNEL assert nova_manifest.revision is None assert nova_manifest.config is None @@ -243,7 +247,8 @@ def test_get_tfhelper_tfplan_override_in_manifest( test_manifest_dict = yaml.safe_load(test_manifest) copytree.assert_called_once_with( Path( - test_manifest_dict.get("terraform", {}) + test_manifest_dict.get("software", "") + .get("terraform", {}) .get("openstack-plan", {}) .get("source") ), From 9cce59d232109e35c1f1dd06bff90f097e260164 Mon Sep 17 00:00:00 2001 From: Guillaume Boutry Date: Fri, 9 Feb 2024 02:18:18 +0100 Subject: [PATCH 26/27] Compute number of OSDs to decide number of replicas Since recent charm-microceph update, replicas size 1 is available under limited conditions, get the number of OSDs configured on bootstrap and resize to update the number of pool replicas for glance and cinder. --- cloud/etc/deploy-microceph/variables.tf | 2 +- sunbeam-python/sunbeam/commands/microceph.py | 26 ++++++++++++++----- sunbeam-python/sunbeam/commands/openstack.py | 26 ++++++++++++------- .../unit/sunbeam/commands/test_openstack.py | 16 +++++++----- 4 files changed, 46 insertions(+), 24 deletions(-) diff --git a/cloud/etc/deploy-microceph/variables.tf b/cloud/etc/deploy-microceph/variables.tf index 7495dd6d..dfbe888f 100644 --- a/cloud/etc/deploy-microceph/variables.tf +++ b/cloud/etc/deploy-microceph/variables.tf @@ -33,7 +33,7 @@ variable "charm_microceph_config" { variable "microceph_channel" { description = "K8S channel to deploy, not the operator channel" - default = "latest/stable" + default = "reef/stable" } variable "machine_ids" { diff --git a/sunbeam-python/sunbeam/commands/microceph.py b/sunbeam-python/sunbeam/commands/microceph.py index a1c2d807..548b1085 100644 --- a/sunbeam-python/sunbeam/commands/microceph.py +++ b/sunbeam-python/sunbeam/commands/microceph.py @@ -57,6 +57,22 @@ def microceph_questions(): } +async def list_disks(jhelper: JujuHelper, model: str, unit: str) -> tuple[dict, dict]: + """Call list-disks action on an unit.""" + LOG.debug("Running list-disks on : %r", unit) + action_result = await jhelper.run_action(unit, model, "list-disks") + LOG.debug( + "Result after running action list-disks on %r: %r", + unit, + action_result, + ) + osds = ast.literal_eval(action_result.get("osds", "[]")) + unpartitioned_disks = ast.literal_eval( + action_result.get("unpartitioned-disks", "[]") + ) + return osds, unpartitioned_disks + + class DeployMicrocephApplicationStep(DeployMachineApplicationStep): """Deploy Microceph application using Terraform""" @@ -165,14 +181,10 @@ def get_unpartitioned_disks(self) -> list: unit = run_sync( self.jhelper.get_unit_from_machine(APPLICATION, self.machine_id, MODEL) ) - LOG.debug(f"Running action list-disks on {unit.entity_id}") - action_result = run_sync( - self.jhelper.run_action(unit.entity_id, MODEL, "list-disks") + _, unpartitioned_disks = run_sync( + list_disks(self.jhelper, MODEL, unit.entity_id) ) - LOG.debug(f"Result after running action list-disks: {action_result}") - - disks = ast.literal_eval(action_result.get("unpartitioned-disks", "[]")) - unpartitioned_disks = [disk.get("path") for disk in disks] + unpartitioned_disks = [disk.get("path") for disk in unpartitioned_disks] # Remove duplicates if any unpartitioned_disks = list(set(unpartitioned_disks)) if OSD_PATH_PREFIX in unpartitioned_disks: diff --git a/sunbeam-python/sunbeam/commands/openstack.py b/sunbeam-python/sunbeam/commands/openstack.py index 309400bb..c1e97650 100644 --- a/sunbeam-python/sunbeam/commands/openstack.py +++ b/sunbeam-python/sunbeam/commands/openstack.py @@ -22,10 +22,10 @@ from lightkube.resources.core_v1 import Service from rich.status import Status +import sunbeam.commands.microceph as microceph from sunbeam.clusterd.client import Client from sunbeam.clusterd.service import ConfigItemNotFoundException from sunbeam.commands.juju import JujuStepHelper -from sunbeam.commands.microceph import APPLICATION as MICROCEPH_APPLICATION from sunbeam.commands.microk8s import ( CREDENTIAL_SUFFIX, MICROK8S_CLOUD, @@ -117,10 +117,15 @@ def compute_ingress_scale(topology: str, control_nodes: int) -> int: return control_nodes -def compute_ceph_replica_scale(topology: str, storage_nodes: int) -> int: - if topology == "single" or storage_nodes < 2: - return 1 - return min(storage_nodes, 3) +def compute_ceph_replica_scale(osds: int) -> int: + return min(osds, 3) + + +async def _get_number_of_osds(jhelper: JujuHelper) -> int: + """Fetch the number of osds from the microceph application""" + leader = await jhelper.get_leader_unit(microceph.APPLICATION, microceph.MODEL) + osds, _ = await microceph.list_disks(jhelper, microceph.MODEL, leader) + return len(osds) class DeployControlPlaneStep(BaseStep, JujuStepHelper): @@ -154,8 +159,11 @@ def get_storage_tfvars(self) -> dict: tfvars = {} storage_nodes = self.client.cluster.list_nodes_by_role("storage") if storage_nodes: + tfvars["ceph-osd-replication-count"] = compute_ceph_replica_scale( + run_sync(_get_number_of_osds(self.jhelper)) + ) tfvars["enable-ceph"] = True - tfvars["ceph-offer-url"] = f"{CONTROLLER_MODEL}.{MICROCEPH_APPLICATION}" + tfvars["ceph-offer-url"] = f"{CONTROLLER_MODEL}.{microceph.APPLICATION}" else: tfvars["enable-ceph"] = False @@ -325,11 +333,11 @@ def run(self, status: Optional[Status] = None) -> Result: "ha-scale": compute_ha_scale(topology, len(control_nodes)), "os-api-scale": compute_os_api_scale(topology, len(control_nodes)), "ingress-scale": compute_ingress_scale(topology, len(control_nodes)), + "enable-ceph": len(storage_nodes) > 0, + "ceph-offer-url": f"{CONTROLLER_MODEL}.{microceph.APPLICATION}", "ceph-osd-replication-count": compute_ceph_replica_scale( - topology, len(storage_nodes) + run_sync(_get_number_of_osds(self.jhelper)) ), - "enable-ceph": len(storage_nodes) > 0, - "ceph-offer-url": f"{CONTROLLER_MODEL}.{MICROCEPH_APPLICATION}", } self.update_status(status, "scaling services") diff --git a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py index 5fde8be7..7a904f4d 100644 --- a/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py +++ b/sunbeam-python/tests/unit/sunbeam/commands/test_openstack.py @@ -63,6 +63,7 @@ def __init__(self, methodName: str = "runTest") -> None: def setUp(self): self.jhelper = AsyncMock() + self.jhelper.run_action.return_value = {} self.manifest = Mock() self.client = Mock() @@ -186,6 +187,7 @@ def setUp(self): ) self.read_config.start() self.jhelper = AsyncMock() + self.jhelper.run_action.return_value = {} self.manifest = Mock() def tearDown(self): @@ -412,16 +414,16 @@ def test_compute_ingress_scale(topology, control_nodes, scale): @pytest.mark.parametrize( - "topology,storage_nodes,scale", + "osds,scale", [ - ("single", 1, 1), - ("multi", 1, 1), - ("multi", 9, 3), - ("multi", 2, 2), + (1, 1), + (1, 1), + (9, 3), + (2, 2), ], ) -def test_compute_ceph_replica_scale(topology, storage_nodes, scale): - assert compute_ceph_replica_scale(topology, storage_nodes) == scale +def test_compute_ceph_replica_scale(osds, scale): + assert compute_ceph_replica_scale(osds) == scale class TestReapplyOpenStackTerraformPlanStep(unittest.TestCase): From e55c2c889c677fa34169c804e345f41461275da9 Mon Sep 17 00:00:00 2001 From: Hemanth Nakkina Date: Fri, 9 Feb 2024 07:32:42 +0530 Subject: [PATCH 27/27] Remove unnecessary warnings --- sunbeam-python/sunbeam/jobs/manifest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sunbeam-python/sunbeam/jobs/manifest.py b/sunbeam-python/sunbeam/jobs/manifest.py index 8bb71a85..ca4a8544 100644 --- a/sunbeam-python/sunbeam/jobs/manifest.py +++ b/sunbeam-python/sunbeam/jobs/manifest.py @@ -238,8 +238,6 @@ def load_on_default(cls, client: Client, manifest_file: Path) -> "Manifest": default_software = SoftwareConfig.get_default_software_as_dict( client, plugin_manager ) - LOG.warning(default_software) - LOG.warning(override_software) utils.merge_dict(default_software, override_software) return Manifest( client, plugin_manager, override_deployment, default_software