Refactor + Multi admiralty test
This commit is contained in:
@@ -14,61 +14,60 @@ import (
|
||||
tools "cloud.o-forge.io/core/oc-lib/tools"
|
||||
)
|
||||
|
||||
|
||||
type AdmiraltySetter struct {
|
||||
Id string // ID to identify the execution, correspond to workflow_executions id
|
||||
NodeName string // Allows to retrieve the name of the node used for this execution on each peer {"peerId": "nodeName"}
|
||||
Id string // ID to identify the execution, correspond to workflow_executions id
|
||||
NodeName string // Allows to retrieve the name of the node used for this execution on each peer {"peerId": "nodeName"}
|
||||
}
|
||||
|
||||
func (s *AdmiraltySetter) InitializeAdmiralty(localPeerID string,remotePeerID string) error {
|
||||
|
||||
func (s *AdmiraltySetter) InitializeAdmiralty(localPeerID string, remotePeerID string) error {
|
||||
|
||||
logger := logs.GetLogger()
|
||||
|
||||
data := oclib.NewRequest(oclib.LibDataEnum(oclib.PEER),"",localPeerID,nil,nil).LoadOne(remotePeerID)
|
||||
data := oclib.NewRequest(oclib.LibDataEnum(oclib.PEER), "", localPeerID, nil, nil).LoadOne(remotePeerID)
|
||||
if data.Code != 200 {
|
||||
logger.Error().Msg("Error while trying to instantiate remote peer " + remotePeerID)
|
||||
return fmt.Errorf(data.Err)
|
||||
}
|
||||
remotePeer := data.ToPeer()
|
||||
|
||||
data = oclib.NewRequest(oclib.LibDataEnum(oclib.PEER),"",localPeerID,nil,nil).LoadOne(localPeerID)
|
||||
if data.Code != 200 {
|
||||
logger.Error().Msg("Error while trying to instantiate local peer " + remotePeerID)
|
||||
return fmt.Errorf(data.Err)
|
||||
}
|
||||
localPeer := data.ToPeer()
|
||||
data = oclib.NewRequest(oclib.LibDataEnum(oclib.PEER), "", localPeerID, nil, nil).LoadOne(localPeerID)
|
||||
if data.Code != 200 {
|
||||
logger.Error().Msg("Error while trying to instantiate local peer " + remotePeerID)
|
||||
return fmt.Errorf(data.Err)
|
||||
}
|
||||
localPeer := data.ToPeer()
|
||||
|
||||
caller := tools.NewHTTPCaller(
|
||||
map[tools.DataType]map[tools.METHOD]string{
|
||||
tools.ADMIRALTY_SOURCE: {
|
||||
tools.POST :"/:id",
|
||||
},
|
||||
tools.ADMIRALTY_KUBECONFIG: {
|
||||
tools.GET:"/:id",
|
||||
},
|
||||
tools.ADMIRALTY_SECRET: {
|
||||
tools.POST:"/:id/" + remotePeerID,
|
||||
},
|
||||
tools.ADMIRALTY_TARGET: {
|
||||
tools.POST:"/:id/" + remotePeerID,
|
||||
},
|
||||
tools.ADMIRALTY_NODES: {
|
||||
tools.GET:"/:id/" + remotePeerID,
|
||||
},
|
||||
caller := tools.NewHTTPCaller(
|
||||
map[tools.DataType]map[tools.METHOD]string{
|
||||
tools.ADMIRALTY_SOURCE: {
|
||||
tools.POST: "/:id",
|
||||
},
|
||||
)
|
||||
|
||||
tools.ADMIRALTY_KUBECONFIG: {
|
||||
tools.GET: "/:id",
|
||||
},
|
||||
tools.ADMIRALTY_SECRET: {
|
||||
tools.POST: "/:id/" + remotePeerID,
|
||||
},
|
||||
tools.ADMIRALTY_TARGET: {
|
||||
tools.POST: "/:id/" + remotePeerID,
|
||||
},
|
||||
tools.ADMIRALTY_NODES: {
|
||||
tools.GET: "/:id/" + remotePeerID,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
logger.Info().Msg("\n\n Creating the Admiralty Source on " + remotePeerID + " ns-" + s.Id)
|
||||
_ = s.callRemoteExecution(remotePeer, []int{http.StatusCreated, http.StatusConflict},caller, s.Id, tools.ADMIRALTY_SOURCE, tools.POST, nil, true)
|
||||
_ = s.callRemoteExecution(remotePeer, []int{http.StatusCreated, http.StatusConflict}, caller, s.Id, tools.ADMIRALTY_SOURCE, tools.POST, nil, true)
|
||||
logger.Info().Msg("\n\n Retrieving kubeconfig with the secret on " + remotePeerID + " ns-" + s.Id)
|
||||
kubeconfig := s.getKubeconfig(remotePeer, caller)
|
||||
logger.Info().Msg("\n\n Creating a secret from the kubeconfig " + localPeerID + " ns-" + s.Id)
|
||||
_ = s.callRemoteExecution(localPeer, []int{http.StatusCreated}, caller,s.Id, tools.ADMIRALTY_SECRET, tools.POST,kubeconfig, true)
|
||||
logger.Info().Msg("\n\n Creating the Admiralty Target on " + localPeerID + " in namespace " + s.Id )
|
||||
_ = s.callRemoteExecution(localPeer,[]int{http.StatusCreated, http.StatusConflict},caller,s.Id,tools.ADMIRALTY_TARGET,tools.POST, nil, true)
|
||||
_ = s.callRemoteExecution(localPeer, []int{http.StatusCreated}, caller, s.Id, tools.ADMIRALTY_SECRET, tools.POST, kubeconfig, true)
|
||||
logger.Info().Msg("\n\n Creating the Admiralty Target on " + localPeerID + " in namespace " + s.Id)
|
||||
_ = s.callRemoteExecution(localPeer, []int{http.StatusCreated, http.StatusConflict}, caller, s.Id, tools.ADMIRALTY_TARGET, tools.POST, nil, true)
|
||||
logger.Info().Msg("\n\n Checking for the creation of the admiralty node on " + localPeerID + " ns-" + s.Id)
|
||||
s.checkNodeStatus(localPeer,caller)
|
||||
|
||||
s.checkNodeStatus(localPeer, caller)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -90,7 +89,7 @@ func (s *AdmiraltySetter) getKubeconfig(peer *peer.Peer, caller *tools.HTTPCalle
|
||||
return kubedata
|
||||
}
|
||||
|
||||
func (*AdmiraltySetter) callRemoteExecution(peer *peer.Peer, expectedCode []int,caller *tools.HTTPCaller, dataID string, dt tools.DataType, method tools.METHOD, body interface{}, panicCode bool) *peer.PeerExecution {
|
||||
func (*AdmiraltySetter) callRemoteExecution(peer *peer.Peer, expectedCode []int, caller *tools.HTTPCaller, dataID string, dt tools.DataType, method tools.METHOD, body interface{}, panicCode bool) *peer.PeerExecution {
|
||||
l := utils.GetLogger()
|
||||
resp, err := peer.LaunchPeerExecution(peer.UUID, dataID, dt, method, body, caller)
|
||||
if err != nil {
|
||||
@@ -112,7 +111,7 @@ func (*AdmiraltySetter) callRemoteExecution(peer *peer.Peer, expectedCode []int,
|
||||
return resp
|
||||
}
|
||||
|
||||
func (s *AdmiraltySetter) storeNodeName(caller *tools.HTTPCaller){
|
||||
func (s *AdmiraltySetter) storeNodeName(caller *tools.HTTPCaller) {
|
||||
var data map[string]interface{}
|
||||
if resp, ok := caller.LastResults["body"]; ok {
|
||||
json.Unmarshal(resp.([]byte), &data)
|
||||
@@ -129,10 +128,10 @@ func (s *AdmiraltySetter) storeNodeName(caller *tools.HTTPCaller){
|
||||
}
|
||||
}
|
||||
|
||||
func (s *AdmiraltySetter) checkNodeStatus(localPeer *peer.Peer, caller *tools.HTTPCaller){
|
||||
for i := range(5) {
|
||||
func (s *AdmiraltySetter) checkNodeStatus(localPeer *peer.Peer, caller *tools.HTTPCaller) {
|
||||
for i := range 5 {
|
||||
time.Sleep(10 * time.Second) // let some time for kube to generate the node
|
||||
_ = s.callRemoteExecution(localPeer,[]int{http.StatusOK},caller,s.Id,tools.ADMIRALTY_NODES,tools.GET, nil, false)
|
||||
_ = s.callRemoteExecution(localPeer, []int{http.StatusOK}, caller, s.Id, tools.ADMIRALTY_NODES, tools.GET, nil, false)
|
||||
if caller.LastResults["code"] == 200 {
|
||||
s.storeNodeName(caller)
|
||||
return
|
||||
@@ -143,5 +142,5 @@ func (s *AdmiraltySetter) checkNodeStatus(localPeer *peer.Peer, caller *tools.HT
|
||||
}
|
||||
logger.Info().Msg("Could not verify that node is up. Retrying...")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user