test of a lightest formula of code
This commit is contained in:
@@ -1,10 +1,8 @@
|
||||
package processing
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"cloud.o-forge.io/core/oc-lib/models/resource_model"
|
||||
"cloud.o-forge.io/core/oc-lib/models/resources/compute"
|
||||
"cloud.o-forge.io/core/oc-lib/models/resources/resource_model"
|
||||
"cloud.o-forge.io/core/oc-lib/models/utils"
|
||||
"cloud.o-forge.io/core/oc-lib/tools"
|
||||
)
|
||||
@@ -29,39 +27,20 @@ type Expose struct {
|
||||
*/
|
||||
type ProcessingResource struct {
|
||||
resource_model.AbstractResource
|
||||
IsService bool `json:"is_service,omitempty" bson:"is_service,omitempty"` // IsService is a flag that indicates if the processing is a service
|
||||
IsService bool `json:"is_service,omitempty" bson:"is_service,omitempty"` // IsService is a flag that indicates if the processing is a service
|
||||
CPUs []*compute.CPU `bson:"cpus,omitempty" json:"cp_us,omitempty"` // CPUs is the list of CPUs
|
||||
GPUs []*compute.GPU `bson:"gpus,omitempty" json:"gp_us,omitempty"` // GPUs is the list of GPUs
|
||||
RAM *compute.RAM `bson:"ram,omitempty" json:"ram,omitempty"` // RAM is the RAM
|
||||
Storage uint `bson:"storage,omitempty" json:"storage,omitempty"` // Storage is the storage
|
||||
Parallel bool `bson:"parallel,omitempty" json:"parallel,omitempty"` // Parallel is a flag that indicates if the processing is parallel
|
||||
ScalingModel uint `bson:"scaling_model,omitempty" json:"scaling_model,omitempty"` // ScalingModel is the scaling model
|
||||
DiskIO string `bson:"disk_io,omitempty" json:"disk_io,omitempty"` // DiskIO is the disk IO
|
||||
Container *Container `bson:"container,omitempty" json:"container,omitempty"` // Container is the container
|
||||
Expose []Expose `bson:"expose,omitempty" json:"expose,omitempty"` // Expose is the execution
|
||||
Storage uint `bson:"storage,omitempty" json:"storage,omitempty"` // Storage is the storage
|
||||
Parallel bool `bson:"parallel,omitempty" json:"parallel,omitempty"` // Parallel is a flag that indicates if the processing is parallel
|
||||
ScalingModel uint `bson:"scaling_model,omitempty" json:"scaling_model,omitempty"` // ScalingModel is the scaling model
|
||||
DiskIO string `bson:"disk_io,omitempty" json:"disk_io,omitempty"` // DiskIO is the disk IO
|
||||
Container *Container `bson:"container,omitempty" json:"container,omitempty"` // Container is the container
|
||||
Expose []Expose `bson:"expose,omitempty" json:"expose,omitempty"` // Expose is the execution
|
||||
}
|
||||
|
||||
func (dma *ProcessingResource) Deserialize(j map[string]interface{}) utils.DBObject {
|
||||
b, err := json.Marshal(j)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
json.Unmarshal(b, dma)
|
||||
return dma
|
||||
}
|
||||
|
||||
func (dma *ProcessingResource) Serialize() map[string]interface{} {
|
||||
var m map[string]interface{}
|
||||
b, err := json.Marshal(dma)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
json.Unmarshal(b, &m)
|
||||
return m
|
||||
}
|
||||
|
||||
func (d *ProcessingResource) GetAccessor(caller *tools.HTTPCaller) utils.Accessor {
|
||||
data := New() // Create a new instance of the accessor
|
||||
data.Init(tools.PROCESSING_RESOURCE, caller) // Initialize the accessor with the PROCESSING_RESOURCE model type
|
||||
func (d *ProcessingResource) GetAccessor(peerID string, groups []string, caller *tools.HTTPCaller) utils.Accessor {
|
||||
data := New() // Create a new instance of the accessor
|
||||
data.Init(tools.PROCESSING_RESOURCE, peerID, groups, caller) // Initialize the accessor with the PROCESSING_RESOURCE model type
|
||||
return data
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user