This commit is contained in:
parent
ca4ac988b6
commit
26157b12c7
|
@ -48,6 +48,25 @@ type (
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
/******************ImportTask end*************************/
|
/******************ImportTask end*************************/
|
||||||
|
|
||||||
|
/******************ExportTask start*************************/
|
||||||
|
type (
|
||||||
|
CreateExportTaskReq {
|
||||||
|
ProjectId string `json:"project_id"`
|
||||||
|
DatasetId string `json:"dataset_id"`
|
||||||
|
ExportPath string `json:"export_path"`
|
||||||
|
// AnnotationFormat string `json:"annotation_format"`
|
||||||
|
// ExportFormat int64 `json:"export_format"`
|
||||||
|
// ExportParams ExportParams `json:"export_params"`
|
||||||
|
|
||||||
|
}
|
||||||
|
CreateExportTaskResp {
|
||||||
|
TaskId string `json:"task_id"`
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
/******************ExportTask end*************************/
|
||||||
|
|
||||||
/******************taskList start*************************/
|
/******************taskList start*************************/
|
||||||
type (
|
type (
|
||||||
ListImportTasksReq {
|
ListImportTasksReq {
|
||||||
|
@ -280,6 +299,121 @@ type (
|
||||||
)
|
)
|
||||||
/******************ListTrainingJobs end*************************/
|
/******************ListTrainingJobs end*************************/
|
||||||
|
|
||||||
|
/******************Create Service Start*************************/
|
||||||
|
type (
|
||||||
|
CreateServiceReq {
|
||||||
|
WorkspaceId string `json:"workspaceId"`
|
||||||
|
Schedule Scheduler `json:"schedule"`
|
||||||
|
ClusterId string `json:"clusterId"`
|
||||||
|
InferType string `json:"inferType"`
|
||||||
|
VpcId string `json:"vpcId"`
|
||||||
|
ServiceName string `json:"serviceName"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
SecurityGroupId string `json:"securityGroupId"`
|
||||||
|
SubnetNetworkId string `json:"subnetNetworkId"`
|
||||||
|
Config []ServiceConfig `json:"config"`
|
||||||
|
ProjectId string `json:"projectId"`
|
||||||
|
}
|
||||||
|
CreateServiceResp {
|
||||||
|
Code uint32 `json:"code"`
|
||||||
|
ServiceId string `json:"serviceId"`
|
||||||
|
ResourceIds []string `json:"resourceIds"`
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
type Scheduler {
|
||||||
|
Duration int32 `json:"duration"`
|
||||||
|
TimeUnit string `json:"timeUnit"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceConfig {
|
||||||
|
CustomSpec CustomSpec `json:"customSpec"`
|
||||||
|
Envs map[string]string `json:"envs"`
|
||||||
|
Specification string `json:"specification"`
|
||||||
|
Weight int32 `json:"weight"`
|
||||||
|
ModelId string `json:"modelId"`
|
||||||
|
SrcPath string `json:"srcPath"`
|
||||||
|
ReqUri string `json:"reqUri"`
|
||||||
|
MappingType string `json:"mappingType"`
|
||||||
|
ClusterId string `json:"clusterId"`
|
||||||
|
Nodes []string `json:"nodes"`
|
||||||
|
SrcType string `json:"srcType"`
|
||||||
|
DestPath string `json:"destPath"`
|
||||||
|
InstanceCount int32 `json:"instanceCount"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CustomSpec {
|
||||||
|
GpuP4 float64 `json:"gpuP4"`
|
||||||
|
Memory int64 `json:"memory"`
|
||||||
|
Cpu float64 `json:"cpu"`
|
||||||
|
AscendA310 int64 `json:"ascendA310"`
|
||||||
|
}
|
||||||
|
/******************Create Service End*************************/
|
||||||
|
|
||||||
|
/******************Delete Service Start*************************/
|
||||||
|
type (
|
||||||
|
deleteServiceReq {
|
||||||
|
ProjectId string `json:"projectId"`
|
||||||
|
ServiceId string `json:"serviceId"`
|
||||||
|
}
|
||||||
|
deleteServiceResp {
|
||||||
|
}
|
||||||
|
)
|
||||||
|
/******************Delete Service Start*************************/
|
||||||
|
|
||||||
|
/******************Show Service Start*************************/
|
||||||
|
type (
|
||||||
|
showServiceReq {
|
||||||
|
ProjectId string `json:"projectId"`
|
||||||
|
ServiceId string `json:"serviceId"`
|
||||||
|
}
|
||||||
|
showServiceResp {
|
||||||
|
}
|
||||||
|
)
|
||||||
|
/******************Delete Service end*************************/
|
||||||
|
|
||||||
|
/******************List Clusters Start*************************/
|
||||||
|
type (
|
||||||
|
ListClustersReq {
|
||||||
|
ProjectId string `json:"project_id"`
|
||||||
|
ClusterName string `json:"cluster_name,optional"`
|
||||||
|
Offset int64 `json:"offset,optional"`
|
||||||
|
Limit int64 `json:"limit,optional"`
|
||||||
|
SortBy string `json:"sort_by,optional"`
|
||||||
|
Order string `json:"order,optional"`
|
||||||
|
}
|
||||||
|
ListClustersResp {
|
||||||
|
Code int32 `json:"code,omitempty"`
|
||||||
|
Count int32 `json:"count,omitempty"`
|
||||||
|
Clusters []Cluster `json:"clusters,omitempty"`
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
type ClusterNode {
|
||||||
|
Specification string `json:"specification,omitempty"`
|
||||||
|
Count int32 `json:"count,omitempty"`
|
||||||
|
AvailableCount int32 `json:"available_count,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cluster {
|
||||||
|
ClusterId string `json:"cluster_id,omitempty"`
|
||||||
|
ClusterName string `json:"cluster_name,omitempty"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Tenant string `json:"tenant,omitempty"`
|
||||||
|
Project string `json:"project,omitempty"`
|
||||||
|
Owner string `json:"owner,omitempty"`
|
||||||
|
CreatedAt int32 `json:"created_at,omitempty"`
|
||||||
|
Status string `json:"status,omitempty"`
|
||||||
|
Nodes ClusterNode `json:"nodes,omitempty"`
|
||||||
|
AllocatableCpuCores float64 `json:"allocatable_cpu_cores,omitempty"`
|
||||||
|
AllocatableMemory int64 `json:"allocatable_memory,omitempty"`
|
||||||
|
PeriodNum int32 `json:"period_num,omitempty"`
|
||||||
|
PeriodType string `json:"period_type,omitempty"`
|
||||||
|
OrderId string `json:"order_id,omitempty"`
|
||||||
|
}
|
||||||
|
/******************List Clusters end*************************/
|
||||||
|
|
||||||
/******************CreateTrainingJob start*************************/
|
/******************CreateTrainingJob start*************************/
|
||||||
type (
|
type (
|
||||||
CreateTrainingJobReq {
|
CreateTrainingJobReq {
|
||||||
|
@ -305,4 +439,13 @@ service AICore-api {
|
||||||
// CreateTrainingJob 创建训练作业
|
// CreateTrainingJob 创建训练作业
|
||||||
@handler CreateTrainingJobHandler
|
@handler CreateTrainingJobHandler
|
||||||
post /CreateTrainingJob (CreateTrainingJobReq) returns (CreateTrainingJobResp)
|
post /CreateTrainingJob (CreateTrainingJobReq) returns (CreateTrainingJobResp)
|
||||||
|
|
||||||
|
// creat task 创建导入任务
|
||||||
|
@handler CreateExportTaskHandler
|
||||||
|
post /CreateExportTask (CreateExportTaskReq) returns (CreateExportTaskResp)
|
||||||
|
@handler createServiceHandler
|
||||||
|
get /createService (CreateServiceReq) returns (CreateServiceResp)
|
||||||
|
@handler ListClustersHandler
|
||||||
|
get /ListClusters (ListClustersReq) returns (ListClustersResp)
|
||||||
|
|
||||||
}
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
package logic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
|
||||||
|
"PCM/common/tool"
|
||||||
|
"PCM/common/xerr"
|
||||||
|
"context"
|
||||||
|
"github.com/jinzhu/copier"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
|
||||||
|
"PCM/adaptor/AIComputing/AICore/api/internal/types"
|
||||||
|
|
||||||
|
"github.com/zeromicro/go-zero/core/logx"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CreateExportTaskLogic struct {
|
||||||
|
logx.Logger
|
||||||
|
ctx context.Context
|
||||||
|
svcCtx *svc.ServiceContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCreateExportTaskLogic(ctx context.Context, svcCtx *svc.ServiceContext) *CreateExportTaskLogic {
|
||||||
|
return &CreateExportTaskLogic{
|
||||||
|
Logger: logx.WithContext(ctx),
|
||||||
|
ctx: ctx,
|
||||||
|
svcCtx: svcCtx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *CreateExportTaskLogic) CreateExportTask(req *types.CreateExportTaskReq) (resp *types.CreateExportTaskResp, err error) {
|
||||||
|
|
||||||
|
modelartsReq := &modelarts.GetExportTasksOfDatasetReq{}
|
||||||
|
err = copier.CopyWithOption(modelartsReq, req, copier.Option{Converters: tool.Converters})
|
||||||
|
CreateExportTaskResp, err := l.svcCtx.ModelArtsRpc.GetExportTasksOfDataset(l.ctx, modelartsReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(xerr.NewErrMsg("Failed to get db DataSet list"), "Failed to get db DataSet list err : %v ,req:%+v", err, req)
|
||||||
|
}
|
||||||
|
resp = &types.CreateExportTaskResp{}
|
||||||
|
err = copier.CopyWithOption(&resp, &CreateExportTaskResp, copier.Option{Converters: tool.Converters})
|
||||||
|
return resp, nil
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
package logic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
|
||||||
|
"PCM/common/tool"
|
||||||
|
"PCM/common/xerr"
|
||||||
|
"context"
|
||||||
|
"github.com/jinzhu/copier"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
|
||||||
|
"PCM/adaptor/AIComputing/AICore/api/internal/types"
|
||||||
|
|
||||||
|
"github.com/zeromicro/go-zero/core/logx"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CreateServiceLogic struct {
|
||||||
|
logx.Logger
|
||||||
|
ctx context.Context
|
||||||
|
svcCtx *svc.ServiceContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCreateServiceLogic(ctx context.Context, svcCtx *svc.ServiceContext) *CreateServiceLogic {
|
||||||
|
return &CreateServiceLogic{
|
||||||
|
Logger: logx.WithContext(ctx),
|
||||||
|
ctx: ctx,
|
||||||
|
svcCtx: svcCtx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *CreateServiceLogic) CreateService(req *types.CreateServiceReq) (resp *types.CreateServiceResp, err error) {
|
||||||
|
// todo: add your logic here and delete this line
|
||||||
|
modelartsReq := &modelarts.CreateServiceReq{}
|
||||||
|
err = copier.CopyWithOption(modelartsReq, req, copier.Option{Converters: tool.Converters})
|
||||||
|
CreateServiceResp, err := l.svcCtx.ModelArtsRpc.CreateService(l.ctx, modelartsReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(xerr.NewErrMsg("Failed to get db DataSet list"), "Failed to get db DataSet list err : %v ,req:%+v", err, req)
|
||||||
|
}
|
||||||
|
resp = &types.CreateServiceResp{}
|
||||||
|
err = copier.CopyWithOption(&resp, &CreateServiceResp, copier.Option{Converters: tool.Converters})
|
||||||
|
return resp, nil
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
package logic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
|
||||||
|
"PCM/common/tool"
|
||||||
|
"PCM/common/xerr"
|
||||||
|
"context"
|
||||||
|
"github.com/jinzhu/copier"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
|
||||||
|
"PCM/adaptor/AIComputing/AICore/api/internal/types"
|
||||||
|
|
||||||
|
"github.com/zeromicro/go-zero/core/logx"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ListClustersLogic struct {
|
||||||
|
logx.Logger
|
||||||
|
ctx context.Context
|
||||||
|
svcCtx *svc.ServiceContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewListClustersLogic(ctx context.Context, svcCtx *svc.ServiceContext) *ListClustersLogic {
|
||||||
|
return &ListClustersLogic{
|
||||||
|
Logger: logx.WithContext(ctx),
|
||||||
|
ctx: ctx,
|
||||||
|
svcCtx: svcCtx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *ListClustersLogic) ListClusters(req *types.ListClustersReq) (resp *types.ListClustersResp, err error) {
|
||||||
|
// todo: add your logic here and delete this line
|
||||||
|
modelartsReq := &modelarts.ListClustersReq{}
|
||||||
|
err = copier.CopyWithOption(modelartsReq, req, copier.Option{Converters: tool.Converters})
|
||||||
|
ListClustersResp, err := l.svcCtx.ModelArtsRpc.ListClusters(l.ctx, modelartsReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(xerr.NewErrMsg("Failed to get db DataSet list"), "Failed to get db DataSet list err : %v ,req:%+v", err, req)
|
||||||
|
}
|
||||||
|
resp = &types.ListClustersResp{}
|
||||||
|
err = copier.CopyWithOption(&resp, &ListClustersResp, copier.Option{Converters: tool.Converters})
|
||||||
|
return resp, nil
|
||||||
|
}
|
Loading…
Reference in New Issue