Merge branch 'pcm_modelarts_1.0' into pcm_modelarts_1.0_xd

# Conflicts:
#	adaptor/AIComputing/AICore/api/AICore.api
This commit is contained in:
Diva123456 2023-03-06 16:59:04 +08:00
commit 9aeade8bc7
25 changed files with 2918 additions and 692 deletions

View File

@ -36,6 +36,25 @@ type (
)
/******************find datasetList end*************************/
/******************Create dataset start*************************/
type CreateDataSetReq {
DatasetId string `json:"datasetId"`
DatasetType int32 `json:"datasetType"`
Description string `json:"description"`
WorkPath string `json:"workPath"`
DatasetName string `json:"datasetName"`
WorkPathType string `json:"workPathType"`
ProjectId string `json:"projectId"`
DataSources []DataSources `json:"DataSources"`
}
type CreateDataSetResp {
Dataset_id string `json:"datasetId"`
}
/******************Create dataset end*************************/
/******************ImportTask start*************************/
type (
ImportTaskDataReq {
@ -423,6 +442,104 @@ type (
}
)
/******************CreateTrainingJob end*************************/
/******************Notebook Type start*************************/
type (
ListNotebookReq {
Project_id string `json:"project_id"`
Param ListNotebookParam `json:"param"`
}
ListNotebookResp {
Current int32 `json:"current"`
Data []NotebookResp `json:"data"`
Pages int32 `json:"pages"`
Size int32 `json:"size"`
Total int64 `json:"total"`
}
ListNotebookParam {
Feature string `json:"feature"`
Limit int32 `json:"limit"`
Name string `json:"name"`
Pool_id string `json:"pool_id"`
Offset int32 `json:"offset"`
Owner string `json:"owner"`
Sort_dir string `json:"sort_dir"`
Sort_key string `json:"sort_key"`
Status string `json:"status"`
WorkspaceId string `json:"workspaceId"`
}
NotebookResp {
Action_progress []JobProgress `json:"action_progress"`
Description string `json:"description"`
Endpoints []EndpointsRes `json:"endpoints"`
Fail_reason string `json:"fail_reason"`
Flavor string `json:"flavor"`
Id string `json:"id"`
Image Image `json:"image"`
Lease Lease `json:"lease"`
Name string `json:"name"`
Pool Pool `json:"pool"`
Status string `json:"status"`
Token string `json:"token"`
Url string `json:"url"`
Volume VolumeRes `json:"volume"`
Workspace_id string `json:"workspace_id"`
Feature string `json:"feature"`
}
JobProgress {
Notebook_id string `json:"notebook_id"`
Status string `json:"status"`
Step int32 `json:"step"`
Step_description string `json:"step_description"`
}
EndpointsRes {
Allowed_access_ips []string `json:"allowed_access_ips"`
Dev_service string `json:"dev_service"`
Ssh_keys []string `json:"ssh_keys"`
}
Image {
Arch string `json:"arch"`
Create_at int64 `json:"create_at"`
Description string `json:"description"`
Dev_services []string `json:"dev_services"`
Id string `json:"id"`
Name string `json:"name"`
Namespace string `json:"namespace"`
Origin string `json:"origin"`
Resource_categories []string `json:"resource_categories"`
Service_type string `json:"service_type"`
Size int64 `json:"size"`
Status string `json:"status"`
Status_message string `json:"status_message"`
Support_res_categories []string `json:"support_res_categories"`
Swr_path string `json:"swr_path"`
Tag string `json:"tag"`
Type_image string `json:"type"`
Update_at int64 `json:"update_at"`
Visibility string `json:"visibility"`
Workspace_id string `json:"workspace_id"`
}
Lease {
Create_at int64 `json:"create_at"`
Duration int64 `json:"duration"`
Enable bool `json:"enable"`
Type_lease string `json:"type"`
Update_at int64 `json:"update_at"`
}
Pool {
Id string `json:"id"`
Name string `json:"name"`
}
VolumeRes {
Capacity int64 `json:"capacity"`
Category string `json:"category"`
Mount_path string `json:"mount_path"`
Ownership string `json:"ownership"`
Status string `json:"status"`
}
)
/******************Notebook Type end*************************/
service AICore-api {
@handler listDataSetHandler
@ -434,12 +551,12 @@ service AICore-api {
@handler ListImportHandler
get /ListImport (ListImportTasksReq) returns (ListImportTasksResp)
// ListTrainingJobs 查询训练作业列表
@handler ListTrainingJobsreq
@handler GetListTrainingJobsHandler
get /GetListTrainingJobs (ListTrainingJobsreq) returns (ListTrainingJobsresp)
// CreateTrainingJob 创建训练作业
@handler CreateTrainingJobHandler
post /CreateTrainingJob (CreateTrainingJobReq) returns (CreateTrainingJobResp)
// creat task 创建导入任务
@handler CreateExportTaskHandler
post /CreateExportTask (CreateExportTaskReq) returns (CreateExportTaskResp)
@ -447,5 +564,11 @@ service AICore-api {
get /createService (CreateServiceReq) returns (CreateServiceResp)
@handler ListClustersHandler
get /ListClusters (ListClustersReq) returns (ListClustersResp)
/******************Notebook Method start*************************/
@handler listNotebookHandler
get /listNotebook (ListNotebookReq) returns (ListNotebookResp)
/******************Notebook Method end*************************/
}

View File

@ -0,0 +1,28 @@
package handler
import (
"net/http"
"PCM/adaptor/AIComputing/AICore/api/internal/logic"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
func CreateTrainingJobHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.CreateTrainingJobReq
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := logic.NewCreateTrainingJobLogic(r.Context(), svcCtx)
resp, err := l.CreateTrainingJob(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

View File

@ -0,0 +1,28 @@
package handler
import (
"net/http"
"PCM/adaptor/AIComputing/AICore/api/internal/logic"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
func GetListTrainingJobsHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.ListTrainingJobsreq
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := logic.NewGetListTrainingJobsLogic(r.Context(), svcCtx)
resp, err := l.GetListTrainingJobs(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

View File

@ -0,0 +1,28 @@
package handler
import (
"net/http"
"PCM/adaptor/AIComputing/AICore/api/internal/logic"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
func ListImportHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.ListImportTasksReq
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := logic.NewListImportLogic(r.Context(), svcCtx)
resp, err := l.ListImport(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

View File

@ -0,0 +1,28 @@
package handler
import (
"net/http"
"PCM/adaptor/AIComputing/AICore/api/internal/logic"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/rest/httpx"
)
func listNotebookHandler(svcCtx *svc.ServiceContext) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var req types.ListNotebookReq
if err := httpx.Parse(r, &req); err != nil {
httpx.ErrorCtx(r.Context(), w, err)
return
}
l := logic.NewListNotebookLogic(r.Context(), svcCtx)
resp, err := l.ListNotebook(&req)
if err != nil {
httpx.ErrorCtx(r.Context(), w, err)
} else {
httpx.OkJsonCtx(r.Context(), w, resp)
}
}
}

View File

@ -19,13 +19,28 @@ func RegisterHandlers(server *rest.Server, serverCtx *svc.ServiceContext) {
},
{
Method: http.MethodPost,
Path: "/createDataSet",
Handler: CreateDataSetHandler(serverCtx),
Path: "/CreateTask",
Handler: CreateTaskHandler(serverCtx),
},
{
Method: http.MethodGet,
Path: "/ListImport",
Handler: ListImportHandler(serverCtx),
},
{
Method: http.MethodGet,
Path: "/GetListTrainingJobs",
Handler: GetListTrainingJobsHandler(serverCtx),
},
{
Method: http.MethodPost,
Path: "/CreateTask",
Handler: CreateTaskHandler(serverCtx),
Path: "/CreateTrainingJob",
Handler: CreateTrainingJobHandler(serverCtx),
},
{
Method: http.MethodGet,
Path: "/listNotebook",
Handler: listNotebookHandler(serverCtx),
},
},
)

View File

@ -0,0 +1,45 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"PCM/common/tool"
"PCM/common/xerr"
"context"
"github.com/jinzhu/copier"
"github.com/pkg/errors"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type CreateDataSetLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
func NewCreateDataSetLogic(ctx context.Context, svcCtx *svc.ServiceContext) *CreateDataSetLogic {
return &CreateDataSetLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *CreateDataSetLogic) CreateDataSet(req *types.CreateDataSetReq) (resp *types.CreateDataSetResp, err error) {
// todo: add your logic here and delete this line
modelartsReq := &modelarts.CreateDataSetReq{}
err = copier.CopyWithOption(modelartsReq, req, copier.Option{Converters: tool.Converters})
CreateDataSetResp, err := l.svcCtx.ModelArtsRpc.CreateDataSet(l.ctx, modelartsReq)
if err != nil {
return nil, errors.Wrapf(xerr.NewErrMsg("Failed to get db DataSet list"), "Failed to get db DataSet list err : %v ,req:%+v", err, req)
}
resp = &types.CreateDataSetResp{}
err = copier.CopyWithOption(&resp, &CreateDataSetResp, copier.Option{Converters: tool.Converters})
return resp, nil
}

View File

@ -1,16 +1,14 @@
package logic
import (
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"PCM/common/tool"
"PCM/common/xerr"
"context"
"github.com/jinzhu/copier"
"github.com/pkg/errors"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
@ -29,7 +27,6 @@ func NewCreateTaskLogic(ctx context.Context, svcCtx *svc.ServiceContext) *Create
}
func (l *CreateTaskLogic) CreateTask(req *types.ImportTaskDataReq) (resp *types.ImportTaskDataResp, err error) {
modelartsReq := &modelarts.ImportTaskDataReq{}
err = copier.CopyWithOption(modelartsReq, req, copier.Option{Converters: tool.Converters})
ImportTaskDataResp, err := l.svcCtx.ModelArtsRpc.CreateTask(l.ctx, modelartsReq)
@ -39,12 +36,6 @@ func (l *CreateTaskLogic) CreateTask(req *types.ImportTaskDataReq) (resp *types.
resp = &types.ImportTaskDataResp{}
err = copier.CopyWithOption(&resp, &ImportTaskDataResp, copier.Option{Converters: tool.Converters})
/*if err != nil {
return nil, err
}
for i := range resp.DataSet {
resp.DataSet[i]. = "modelarts"
}*/
return resp, nil
}

View File

@ -14,21 +14,21 @@ import (
"github.com/zeromicro/go-zero/core/logx"
)
type ListTrainingJobsreqLogic struct {
type GetListTrainingJobsLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
func NewListTrainingJobsreqLogic(ctx context.Context, svcCtx *svc.ServiceContext) *ListTrainingJobsreqLogic {
return &ListTrainingJobsreqLogic{
func NewGetListTrainingJobsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetListTrainingJobsLogic {
return &GetListTrainingJobsLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *ListTrainingJobsreqLogic) ListTrainingJobsreq(req *types.ListTrainingJobsreq) (resp *types.ListTrainingJobsresp, err error) {
func (l *GetListTrainingJobsLogic) GetListTrainingJobs(req *types.ListTrainingJobsreq) (resp *types.ListTrainingJobsresp, err error) {
modelartsReq := &modelarts.ListTrainingJobsreq{}
err = copier.CopyWithOption(modelartsReq, req, copier.Option{Converters: tool.Converters})
listDataSetResp, err := l.svcCtx.ModelArtsRpc.GetListTrainingJobs(l.ctx, modelartsReq)

View File

@ -0,0 +1,30 @@
package logic
import (
"context"
"PCM/adaptor/AIComputing/AICore/api/internal/svc"
"PCM/adaptor/AIComputing/AICore/api/internal/types"
"github.com/zeromicro/go-zero/core/logx"
)
type ListNotebookLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
func NewListNotebookLogic(ctx context.Context, svcCtx *svc.ServiceContext) *ListNotebookLogic {
return &ListNotebookLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *ListNotebookLogic) ListNotebook(req *types.ListNotebookReq) (resp *types.ListNotebookResp, err error) {
// todo: add your logic here and delete this line
return
}

View File

@ -27,16 +27,6 @@ type ListDataSetResp struct {
DataSet []DataSet `json:"dataSets"`
}
type ImportTaskDataReq struct {
ProjectId string `json:"projectId"`
DatasetId string `json:"datasetId"`
ImportPath string `json:"importPath"`
}
type ImportTaskDataResp struct {
TaskId string `json:"TaskId"`
}
type CreateDataSetReq struct {
DatasetId string `json:"datasetId"`
DatasetType int32 `json:"datasetType"`
@ -51,3 +41,378 @@ type CreateDataSetReq struct {
type CreateDataSetResp struct {
Dataset_id string `json:"datasetId"`
}
type ImportTaskDataReq struct {
ProjectId string `json:"projectId"`
DatasetId string `json:"datasetId"`
ImportPath string `json:"importPath"`
}
type ImportTaskDataResp struct {
TaskId string `json:"taskId"`
}
type ListImportTasksReq struct {
ProjectId string `json:"projectId"`
DatasetId string `json:"datasetId"`
Limit int32 `json:"limit,optional"`
Offset int32 `json:"offset,optional"`
}
type ListImportTasksResp struct {
TotalCount uint32 `json:"totalCount"`
ImportTasks []ImportTasks `json:"importTasks"`
}
type ImportTasks struct {
Status string `json:"status,omitempty"`
TaskId string `json:"task_id,omitempty"`
DatasetId string `json:"dataset_id,omitempty"`
ImportPath string `json:"import_path,omitempty"`
ImportType int32 `json:"import_type,omitempty"`
TotalSampleCount uint32 `json:"total_sample_count,omitempty"`
ImportedSampleCount uint32 `json:"imported_sample_count,omitempty"`
AnnotatedSampleCount uint32 `json:"annotated_sample_count,omitempty"`
TotalSubSampleCount uint32 `json:"total_sub_sample_count,omitempty"`
ImportedSubSampleCount uint32 `json:"imported_sub_sample_count,omitempty"`
TotalFileSize uint32 `json:"total_file_size,omitempty"`
FinishedFileCount uint32 `json:"finished_file_count,omitempty"`
FinishedFileSize uint32 `json:"finished_file_size,omitempty"`
TotalFileCount uint32 `json:"total_file_count,omitempty"`
CreateTime uint32 `json:"create_time,omitempty"`
ElapsedTime uint32 `json:"elapsed_time,omitempty"`
AnnotationFormatConfig []interface{} `json:"annotation_format_config"`
}
type Annotations struct {
JobTemplate string `json:"job_template"`
KeyTask string `json:"key_task"`
}
type TrainingExperimentReference struct {
}
type Metadata struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
CreateTime uint32 `json:"create_time"`
WorkspaceID string `json:"workspace_id"`
AiProject string `json:"ai_project"`
UserName string `json:"user_name"`
Annotations Annotations `json:"annotations"`
TrainingExperimentReference TrainingExperimentReference `json:"training_experiment_reference"`
Tags []interface{} `json:"tags"`
}
type CPUUsage struct {
Average int32 `json:"average"`
Max int32 `json:"max"`
Min int32 `json:"min"`
}
type MemUsage struct {
Average int32 `json:"average"`
Max int32 `json:"max"`
Min int32 `json:"min"`
}
type Util struct {
Average int32 `json:"average"`
Max int32 `json:"max"`
Min int32 `json:"min"`
}
type Gpu struct {
MemUsage MemUsage `json:"mem_usage"`
Util Util `json:"util"`
UnitNum int32 `json:"unit_num"`
ProductName string `json:"product_name"`
Memory string `json:"memory"`
}
type MetricsStatistics struct {
CPUUsage CPUUsage `json:"cpu_usage"`
Gpu Gpu `json:"gpu"`
MemUsage MemUsage `json:"mem_usage"`
}
type Status struct {
Phase string `json:"phase"`
SecondaryPhase string `json:"secondary_phase"`
Duration int32 `json:"duration"`
IsHanged bool `json:"is_hanged"`
RetryCount int32 `json:"retry_count"`
StartTime int32 `json:"start_time"`
Tasks []string `json:"tasks"`
MetricsStatistics MetricsStatistics `json:"metrics_statistics"`
}
type Constraint struct {
Type string `json:"type"`
Editable bool `json:"editable"`
Required bool `json:"required"`
Sensitive bool `json:"sensitive"`
ValidType string `json:"valid_type"`
ValidRange interface{} `json:"valid_range"`
}
type Parameters struct {
Name string `json:"name"`
Description string `json:"description"`
I18NDescription interface{} `json:"i18n_description"`
Value string `json:"value"`
Constraint Constraint `json:"constraint"`
}
type Obs struct {
ObsURL string `json:"obs_url"`
}
type Remote struct {
Obs Obs `json:"obs"`
}
type Attributes struct {
DataFormat []string `json:"data_format"`
DataSegmentation []string `json:"data_segmentation"`
DatasetType []string `json:"dataset_type"`
IsFree string `json:"is_free"`
MaxFreeJobCount string `json:"max_free_job_count"`
}
type RemoteConstraints struct {
DataType string `json:"data_type"`
Attributes Attributes `json:"attributes,omitempty"`
}
type Inputs struct {
Name string `json:"name"`
Description string `json:"description"`
LocalDir string `json:"local_dir"`
AccessMethod string `json:"access_method"`
Remote Remote `json:"remote"`
RemoteConstraints []RemoteConstraints `json:"remote_constraints"`
}
type Outputs struct {
Name string `json:"name"`
LocalDir string `json:"local_dir"`
AccessMethod string `json:"access_method"`
Remote Remote `json:"remote"`
Mode string `json:"mode"`
Period int32 `json:"period"`
PrefetchToLocal bool `json:"prefetch_to_local"`
}
type Engine struct {
EngineID string `json:"engine_id"`
EngineName string `json:"engine_name"`
EngineVersion string `json:"engine_version"`
V1Compatible bool `json:"v1_compatible"`
RunUser string `json:"run_user"`
ImageSource bool `json:"image_source"`
}
type Policies struct {
}
type Algorithm struct {
ID string `json:"id"`
Name string `json:"name"`
V1Algorithm bool `json:"v1_algorithm"`
SubscriptionID string `json:"subscription_id"`
ItemVersionID string `json:"item_version_id"`
ContentID string `json:"content_id"`
Parameters []Parameters `json:"parameters"`
ParametersCustomization bool `json:"parameters_customization"`
Inputs []Inputs `json:"inputs"`
Outputs []Outputs `json:"outputs"`
Engine Engine `json:"engine"`
Policies Policies `json:"policies"`
}
type Billing struct {
Code string `json:"code"`
UnitNum int32 `json:"unit_num"`
}
type CPU struct {
Arch string `json:"arch"`
CoreNum int32 `json:"core_num"`
}
type Memory struct {
Size int `json:"size"`
Unit string `json:"unit"`
}
type Disk struct {
Size int32 `json:"size"`
Unit string `json:"unit"`
}
type FlavorInfo struct {
CPU CPU `json:"cpu"`
Gpu Gpu `json:"gpu"`
Memory Memory `json:"memory"`
Disk Disk `json:"disk"`
}
type FlavorDetail struct {
FlavorType string `json:"flavor_type"`
Billing Billing `json:"billing"`
Attributes Attributes `json:"attributes"`
FlavorInfo FlavorInfo `json:"flavor_info"`
}
type Resource struct {
Policy string `json:"policy"`
FlavorID string `json:"flavor_id"`
FlavorName string `json:"flavor_name"`
NodeCount int32 `json:"node_count"`
FlavorDetail FlavorDetail `json:"flavor_detail"`
}
type LogExportPath struct {
}
type Spec struct {
Resource Resource `json:"resource"`
LogExportPath LogExportPath `json:"log_export_path"`
IsHostedLog bool `json:"is_hosted_log"`
}
type Items struct {
Kind string `json:"kind"`
Metadata Metadata `json:"metadata"`
Status Status `json:"status"`
Algorithm Algorithm `json:"algorithm,omitempty"`
Spec Spec `json:"spec"`
}
type ListTrainingJobsreq struct {
ProjectId string `json:"projectId"`
}
type ListTrainingJobsresp struct {
Total int32 `json:"total"`
Count int32 `json:"count"`
Limit int32 `json:"limit"`
Offset int32 `json:"offset"`
SortBy string `json:"sort_by"`
Order string `json:"order"`
GroupBy string `json:"group_by"`
WorkspaceID string `json:"workspace_id"`
AiProject string `json:"ai_project"`
Items []Items `json:"items"`
}
type CreateTrainingJobReq struct {
ProjectId string `json:"projectId"`
}
type CreateTrainingJobResp struct {
}
type ListNotebookReq struct {
Project_id string `json:"project_id"`
Param ListNotebookParam `json:"param"`
}
type ListNotebookResp struct {
Current int32 `json:"current"`
Data []NotebookResp `json:"data"`
Pages int32 `json:"pages"`
Size int32 `json:"size"`
Total int64 `json:"total"`
}
type ListNotebookParam struct {
Feature string `json:"feature"`
Limit int32 `json:"limit"`
Name string `json:"name"`
Pool_id string `json:"pool_id"`
Offset int32 `json:"offset"`
Owner string `json:"owner"`
Sort_dir string `json:"sort_dir"`
Sort_key string `json:"sort_key"`
Status string `json:"status"`
WorkspaceId string `json:"workspaceId"`
}
type NotebookResp struct {
Action_progress []JobProgress `json:"action_progress"`
Description string `json:"description"`
Endpoints []EndpointsRes `json:"endpoints"`
Fail_reason string `json:"fail_reason"`
Flavor string `json:"flavor"`
Id string `json:"id"`
Image Image `json:"image"`
Lease Lease `json:"lease"`
Name string `json:"name"`
Pool Pool `json:"pool"`
Status string `json:"status"`
Token string `json:"token"`
Url string `json:"url"`
Volume VolumeRes `json:"volume"`
Workspace_id string `json:"workspace_id"`
Feature string `json:"feature"`
}
type JobProgress struct {
Notebook_id string `json:"notebook_id"`
Status string `json:"status"`
Step int32 `json:"step"`
Step_description string `json:"step_description"`
}
type EndpointsRes struct {
Allowed_access_ips []string `json:"allowed_access_ips"`
Dev_service string `json:"dev_service"`
Ssh_keys []string `json:"ssh_keys"`
}
type Image struct {
Arch string `json:"arch"`
Create_at int64 `json:"create_at"`
Description string `json:"description"`
Dev_services []string `json:"dev_services"`
Id string `json:"id"`
Name string `json:"name"`
Namespace string `json:"namespace"`
Origin string `json:"origin"`
Resource_categories []string `json:"resource_categories"`
Service_type string `json:"service_type"`
Size int64 `json:"size"`
Status string `json:"status"`
Status_message string `json:"status_message"`
Support_res_categories []string `json:"support_res_categories"`
Swr_path string `json:"swr_path"`
Tag string `json:"tag"`
Type_image string `json:"type"`
Update_at int64 `json:"update_at"`
Visibility string `json:"visibility"`
Workspace_id string `json:"workspace_id"`
}
type Lease struct {
Create_at int64 `json:"create_at"`
Duration int64 `json:"duration"`
Enable bool `json:"enable"`
Type_lease string `json:"type"`
Update_at int64 `json:"update_at"`
}
type Pool struct {
Id string `json:"id"`
Name string `json:"name"`
}
type VolumeRes struct {
Capacity int64 `json:"capacity"`
Category string `json:"category"`
Mount_path string `json:"mount_path"`
Ownership string `json:"ownership"`
Status string `json:"status"`
}

View File

@ -0,0 +1,46 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type CreateNotebookLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewCreateNotebookLogic(ctx context.Context, svcCtx *svc.ServiceContext) *CreateNotebookLogic {
return &CreateNotebookLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
func (l *CreateNotebookLogic) CreateNotebook(in *modelarts.CreateNotebookReq) (*modelarts.CreateNotebookResp, error) {
var resp modelarts.CreateNotebookResp
createUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/notebooks"
createUrl = strings.Replace(createUrl, "{project_id}", in.ProjectId, -1)
reqByte, err := json.Marshal(in.Param)
if err != nil {
panic(err.Error())
}
payload := strings.NewReader(string(reqByte))
token := common.GetToken()
body, err := tool.HttpClient(tool.POST, createUrl, payload, token)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp.NotebookResp)
return &resp, nil
}

View File

@ -0,0 +1,47 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type CreateVisualizationJobLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewCreateVisualizationJobLogic(ctx context.Context, svcCtx *svc.ServiceContext) *CreateVisualizationJobLogic {
return &CreateVisualizationJobLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
func (l *CreateVisualizationJobLogic) CreateVisualizationJob(in *modelarts.CreateVisualizationJobReq) (*modelarts.CreateVisualizationJobResp, error) {
var resp modelarts.CreateVisualizationJobResp
createVisualJobUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/visualization-jobs"
createVisualJobUrl = strings.Replace(createVisualJobUrl, "{project_id}", in.ProjectId, -1)
reqByte, err := json.Marshal(in.Param)
if err != nil {
panic(err.Error())
}
payload := strings.NewReader(string(reqByte))
token := common.GetToken()
body, err := tool.HttpClient(tool.POST, createVisualJobUrl, payload, token)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp)
return &resp, nil
}

View File

@ -0,0 +1,47 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type GetNotebookStorageLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewGetNotebookStorageLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetNotebookStorageLogic {
return &GetNotebookStorageLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
func (l *GetNotebookStorageLogic) GetNotebookStorage(in *modelarts.GetNotebookStorageReq) (*modelarts.GetNotebookStorageResp, error) {
var resp modelarts.GetNotebookStorageResp
getObsUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/notebooks/{instance_id}/storage"
getObsUrl = strings.Replace(getObsUrl, "{project_id}", in.ProjectId, -1)
getObsUrl = strings.Replace(getObsUrl, "{instance_id}", in.InstanceId, -1)
token := common.GetToken()
//empty struct
var e struct{}
body, err := tool.HttpClientWithQueries(tool.GET, getObsUrl, nil, token, e)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp)
return &resp, nil
}

View File

@ -0,0 +1,44 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type GetVisualizationJobLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewGetVisualizationJobLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetVisualizationJobLogic {
return &GetVisualizationJobLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
// visualization-jobs
func (l *GetVisualizationJobLogic) GetVisualizationJob(in *modelarts.GetVisualizationJobReq) (*modelarts.GetVisualizationJobResp, error) {
var resp modelarts.GetVisualizationJobResp
getVisualJobUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/visualization-jobs"
getVisualJobUrl = strings.Replace(getVisualJobUrl, "{project_id}", in.ProjectId, -1)
token := common.GetToken()
body, err := tool.HttpClientWithQueries(tool.GET, getVisualJobUrl, nil, token, in.Param)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp)
return &resp, nil
}

View File

@ -0,0 +1,44 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type ListNotebookLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewListNotebookLogic(ctx context.Context, svcCtx *svc.ServiceContext) *ListNotebookLogic {
return &ListNotebookLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
// notebook task
func (l *ListNotebookLogic) ListNotebook(in *modelarts.ListNotebookReq) (*modelarts.ListNotebookResp, error) {
var resp modelarts.ListNotebookResp
getUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/notebooks"
getUrl = strings.Replace(getUrl, "{project_id}", in.ProjectId, -1)
token := common.GetToken()
body, err := tool.HttpClientWithQueries(tool.GET, getUrl, nil, token, in.Param)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp)
return &resp, nil
}

View File

@ -0,0 +1,48 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type MountNotebookStorageLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewMountNotebookStorageLogic(ctx context.Context, svcCtx *svc.ServiceContext) *MountNotebookStorageLogic {
return &MountNotebookStorageLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
func (l *MountNotebookStorageLogic) MountNotebookStorage(in *modelarts.MountNotebookStorageReq) (*modelarts.MountNotebookStorageResp, error) {
var resp modelarts.MountNotebookStorageResp
mountUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/notebooks/{instance_id}/storage"
mountUrl = strings.Replace(mountUrl, "{project_id}", in.ProjectId, -1)
mountUrl = strings.Replace(mountUrl, "{instance_id}", in.InstanceId, -1)
reqByte, err := json.Marshal(in.Param)
if err != nil {
panic(err.Error())
}
payload := strings.NewReader(string(reqByte))
token := common.GetToken()
body, err := tool.HttpClient(tool.POST, mountUrl, payload, token)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp)
return &resp, nil
}

View File

@ -0,0 +1,43 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type StartNotebookLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewStartNotebookLogic(ctx context.Context, svcCtx *svc.ServiceContext) *StartNotebookLogic {
return &StartNotebookLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
func (l *StartNotebookLogic) StartNotebook(in *modelarts.StartNotebookReq) (*modelarts.StartNotebookResp, error) {
var resp modelarts.StartNotebookResp
startUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/notebooks/{id}/start"
startUrl = strings.Replace(startUrl, "{project_id}", in.ProjectId, -1)
startUrl = strings.Replace(startUrl, "{id}", in.Id, -1)
token := common.GetToken()
body, err := tool.HttpClientWithQueries(tool.POST, startUrl, nil, token, in.Param)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp.NotebookResp)
return &resp, nil
}

View File

@ -0,0 +1,46 @@
package logic
import (
"PCM/adaptor/AIComputing/modelarts/rpc/internal/common"
"PCM/common/tool"
"context"
"encoding/json"
"strings"
"PCM/adaptor/AIComputing/modelarts/rpc/internal/svc"
"PCM/adaptor/AIComputing/modelarts/rpc/modelarts"
"github.com/zeromicro/go-zero/core/logx"
)
type StopNotebookLogic struct {
ctx context.Context
svcCtx *svc.ServiceContext
logx.Logger
}
func NewStopNotebookLogic(ctx context.Context, svcCtx *svc.ServiceContext) *StopNotebookLogic {
return &StopNotebookLogic{
ctx: ctx,
svcCtx: svcCtx,
Logger: logx.WithContext(ctx),
}
}
func (l *StopNotebookLogic) StopNotebook(in *modelarts.StopNotebookReq) (*modelarts.StopNotebookResp, error) {
var resp modelarts.StopNotebookResp
stopUrl := "https://modelarts.cn-east-3.myhuaweicloud.com/v1/{project_id}/notebooks/{id}/stop"
stopUrl = strings.Replace(stopUrl, "{project_id}", in.ProjectId, -1)
stopUrl = strings.Replace(stopUrl, "{id}", in.Id, -1)
token := common.GetToken()
//empty struct
var e struct{}
body, err := tool.HttpClientWithQueries(tool.POST, stopUrl, nil, token, e)
if err != nil {
return nil, err
}
json.Unmarshal(body, &resp.NotebookResp)
return &resp, nil
}

View File

@ -34,6 +34,12 @@ func (s *ModelArtsServer) GetDatasetList(ctx context.Context, in *modelarts.Data
return l.GetDatasetList(in)
}
// create DateSet
func (s *ModelArtsServer) CreateDataSet(ctx context.Context, in *modelarts.CreateDataSetReq) (*modelarts.CreateDataSetResq, error) {
l := logic.NewCreateDataSetLogic(ctx, s.svcCtx)
return l.CreateDataSet(in)
}
// creat task 创建导入任务
func (s *ModelArtsServer) CreateTask(ctx context.Context, in *modelarts.ImportTaskDataReq) (*modelarts.ImportTaskDataResp, error) {
l := logic.NewCreateTaskLogic(ctx, s.svcCtx)
@ -198,3 +204,14 @@ func (s *ModelArtsServer) MountNotebookStorage(ctx context.Context, in *modelart
l := logic.NewMountNotebookStorageLogic(ctx, s.svcCtx)
return l.MountNotebookStorage(in)
}
// visualization-jobs
func (s *ModelArtsServer) GetVisualizationJob(ctx context.Context, in *modelarts.GetVisualizationJobReq) (*modelarts.GetVisualizationJobResp, error) {
l := logic.NewGetVisualizationJobLogic(ctx, s.svcCtx)
return l.GetVisualizationJob(in)
}
func (s *ModelArtsServer) CreateVisualizationJob(ctx context.Context, in *modelarts.CreateVisualizationJobReq) (*modelarts.CreateVisualizationJobResp, error) {
l := logic.NewCreateVisualizationJobLogic(ctx, s.svcCtx)
return l.CreateVisualizationJob(in)
}

File diff suppressed because it is too large Load Diff

View File

@ -22,55 +22,60 @@ const _ = grpc.SupportPackageIsVersion7
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type ModelArtsClient interface {
//get modelarts Token
// get modelarts Token
GetToken(ctx context.Context, in *TokenReq, opts ...grpc.CallOption) (*TokenResp, error)
//get modelarts Token
// get modelarts Token
GetDatasetList(ctx context.Context, in *DatasetReq, opts ...grpc.CallOption) (*DatasetResp, error)
//creat task 创建导入任务
// create DateSet
CreateDataSet(ctx context.Context, in *CreateDataSetReq, opts ...grpc.CallOption) (*CreateDataSetResq, error)
// creat task 创建导入任务
CreateTask(ctx context.Context, in *ImportTaskDataReq, opts ...grpc.CallOption) (*ImportTaskDataResp, error)
//get taskList 查询数据集导入任务列表
// get taskList 查询数据集导入任务列表
GetImportTaskList(ctx context.Context, in *ListImportTasksReq, opts ...grpc.CallOption) (*ListImportTasksResp, error)
// ListTrainingJobs 查询训练作业列表
// ListTrainingJobs 查询训练作业列表
GetListTrainingJobs(ctx context.Context, in *ListTrainingJobsreq, opts ...grpc.CallOption) (*ListTrainingJobsresp, error)
// CreateTrainingJob 创建训练作业
// CreateTrainingJob 创建训练作业
CreateTrainingJob(ctx context.Context, in *CreateTrainingJobReq, opts ...grpc.CallOption) (*CreateTrainingJobResp, error)
// DeleteTrainingJobConfig 删除训练作业
// DeleteTrainingJobConfig 删除训练作业
DeleteTrainingJob(ctx context.Context, in *DeleteTrainingJobReq, opts ...grpc.CallOption) (*DeleteTrainingJobResp, error)
// CreateTrainingJobConfig 创建训练作业参数
// CreateTrainingJobConfig 创建训练作业参数
CreateTrainingJobConfig(ctx context.Context, in *CreateTrainingJobConfigReq, opts ...grpc.CallOption) (*CreateTrainingJobConfigResp, error)
// DeleteTrainingJobConfig 删除训练作业参数
// DeleteTrainingJobConfig 删除训练作业参数
DeleteTrainingJobConfig(ctx context.Context, in *DeleteTrainingJobConfigReq, opts ...grpc.CallOption) (*DeleteTrainingJobConfigResp, error)
// ListTrainingJobConfig 查询训练作业参数
// ListTrainingJobConfig 查询训练作业参数
ListTrainingJobConfig(ctx context.Context, in *ListTrainingJobConfigReq, opts ...grpc.CallOption) (*ListTrainingJobConfigResp, error)
// CreateAlgorithm 创建算法
// CreateAlgorithm 创建算法
CreateAlgorithm(ctx context.Context, in *CreateAlgorithmReq, opts ...grpc.CallOption) (*CreateAlgorithmResp, error)
// ListAlgorithms 查询算法
// ListAlgorithms 查询算法
ListAlgorithms(ctx context.Context, in *ListAlgorithmsReq, opts ...grpc.CallOption) (*ListAlgorithmsResp, error)
//export task
ExportTask(ctx context.Context, in *ExportTaskReq, opts ...grpc.CallOption) (*ExportTaskResp, error)
GetExportTasksOfDataset(ctx context.Context, in *GetExportTasksOfDatasetReq, opts ...grpc.CallOption) (*GetExportTasksOfDatasetResp, error)
GetExportTaskStatusOfDataset(ctx context.Context, in *GetExportTaskStatusOfDatasetReq, opts ...grpc.CallOption) (*GetExportTaskStatusOfDatasetResp, error)
//processor task
// processor task
CreateProcessorTask(ctx context.Context, in *CreateProcessorTaskReq, opts ...grpc.CallOption) (*CreateProcessorTaskResp, error)
DescribeProcessorTask(ctx context.Context, in *DescribeProcessorTaskReq, opts ...grpc.CallOption) (*DescribeProcessorTaskResp, error)
//model management
// model management
CreateModel(ctx context.Context, in *CreateModelReq, opts ...grpc.CallOption) (*CreateModelResp, error)
DeleteModel(ctx context.Context, in *DeleteModelReq, opts ...grpc.CallOption) (*DeleteModelResp, error)
ListModels(ctx context.Context, in *ListModelReq, opts ...grpc.CallOption) (*ListModelResp, error)
ShowModels(ctx context.Context, in *ShowModelReq, opts ...grpc.CallOption) (*ShowModelResp, error)
//service management
// service management
CreateService(ctx context.Context, in *CreateServiceReq, opts ...grpc.CallOption) (*CreateServiceResp, error)
ListServices(ctx context.Context, in *ListServicesReq, opts ...grpc.CallOption) (*ListServicesResp, error)
ShowService(ctx context.Context, in *ShowServiceReq, opts ...grpc.CallOption) (*ShowServiceResp, error)
DeleteService(ctx context.Context, in *DeleteServiceReq, opts ...grpc.CallOption) (*DeleteServiceResp, error)
ListClusters(ctx context.Context, in *ListClustersReq, opts ...grpc.CallOption) (*ListClustersResp, error)
//notebook task
// notebook task
ListNotebook(ctx context.Context, in *ListNotebookReq, opts ...grpc.CallOption) (*ListNotebookResp, error)
CreateNotebook(ctx context.Context, in *CreateNotebookReq, opts ...grpc.CallOption) (*CreateNotebookResp, error)
StartNotebook(ctx context.Context, in *StartNotebookReq, opts ...grpc.CallOption) (*StartNotebookResp, error)
StopNotebook(ctx context.Context, in *StopNotebookReq, opts ...grpc.CallOption) (*StopNotebookResp, error)
GetNotebookStorage(ctx context.Context, in *GetNotebookStorageReq, opts ...grpc.CallOption) (*GetNotebookStorageResp, error)
MountNotebookStorage(ctx context.Context, in *MountNotebookStorageReq, opts ...grpc.CallOption) (*MountNotebookStorageResp, error)
// visualization-jobs
GetVisualizationJob(ctx context.Context, in *GetVisualizationJobReq, opts ...grpc.CallOption) (*GetVisualizationJobResp, error)
CreateVisualizationJob(ctx context.Context, in *CreateVisualizationJobReq, opts ...grpc.CallOption) (*CreateVisualizationJobResp, error)
}
type modelArtsClient struct {
@ -99,6 +104,15 @@ func (c *modelArtsClient) GetDatasetList(ctx context.Context, in *DatasetReq, op
return out, nil
}
func (c *modelArtsClient) CreateDataSet(ctx context.Context, in *CreateDataSetReq, opts ...grpc.CallOption) (*CreateDataSetResq, error) {
out := new(CreateDataSetResq)
err := c.cc.Invoke(ctx, "/modelarts.ModelArts/CreateDataSet", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *modelArtsClient) CreateTask(ctx context.Context, in *ImportTaskDataReq, opts ...grpc.CallOption) (*ImportTaskDataResp, error) {
out := new(ImportTaskDataResp)
err := c.cc.Invoke(ctx, "/modelarts.ModelArts/createTask", in, out, opts...)
@ -369,59 +383,82 @@ func (c *modelArtsClient) MountNotebookStorage(ctx context.Context, in *MountNot
return out, nil
}
func (c *modelArtsClient) GetVisualizationJob(ctx context.Context, in *GetVisualizationJobReq, opts ...grpc.CallOption) (*GetVisualizationJobResp, error) {
out := new(GetVisualizationJobResp)
err := c.cc.Invoke(ctx, "/modelarts.ModelArts/GetVisualizationJob", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *modelArtsClient) CreateVisualizationJob(ctx context.Context, in *CreateVisualizationJobReq, opts ...grpc.CallOption) (*CreateVisualizationJobResp, error) {
out := new(CreateVisualizationJobResp)
err := c.cc.Invoke(ctx, "/modelarts.ModelArts/CreateVisualizationJob", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ModelArtsServer is the server API for ModelArts service.
// All implementations must embed UnimplementedModelArtsServer
// for forward compatibility
type ModelArtsServer interface {
//get modelarts Token
// get modelarts Token
GetToken(context.Context, *TokenReq) (*TokenResp, error)
//get modelarts Token
// get modelarts Token
GetDatasetList(context.Context, *DatasetReq) (*DatasetResp, error)
//creat task 创建导入任务
// create DateSet
CreateDataSet(context.Context, *CreateDataSetReq) (*CreateDataSetResq, error)
// creat task 创建导入任务
CreateTask(context.Context, *ImportTaskDataReq) (*ImportTaskDataResp, error)
//get taskList 查询数据集导入任务列表
// get taskList 查询数据集导入任务列表
GetImportTaskList(context.Context, *ListImportTasksReq) (*ListImportTasksResp, error)
// ListTrainingJobs 查询训练作业列表
// ListTrainingJobs 查询训练作业列表
GetListTrainingJobs(context.Context, *ListTrainingJobsreq) (*ListTrainingJobsresp, error)
// CreateTrainingJob 创建训练作业
// CreateTrainingJob 创建训练作业
CreateTrainingJob(context.Context, *CreateTrainingJobReq) (*CreateTrainingJobResp, error)
// DeleteTrainingJobConfig 删除训练作业
// DeleteTrainingJobConfig 删除训练作业
DeleteTrainingJob(context.Context, *DeleteTrainingJobReq) (*DeleteTrainingJobResp, error)
// CreateTrainingJobConfig 创建训练作业参数
// CreateTrainingJobConfig 创建训练作业参数
CreateTrainingJobConfig(context.Context, *CreateTrainingJobConfigReq) (*CreateTrainingJobConfigResp, error)
// DeleteTrainingJobConfig 删除训练作业参数
// DeleteTrainingJobConfig 删除训练作业参数
DeleteTrainingJobConfig(context.Context, *DeleteTrainingJobConfigReq) (*DeleteTrainingJobConfigResp, error)
// ListTrainingJobConfig 查询训练作业参数
// ListTrainingJobConfig 查询训练作业参数
ListTrainingJobConfig(context.Context, *ListTrainingJobConfigReq) (*ListTrainingJobConfigResp, error)
// CreateAlgorithm 创建算法
// CreateAlgorithm 创建算法
CreateAlgorithm(context.Context, *CreateAlgorithmReq) (*CreateAlgorithmResp, error)
// ListAlgorithms 查询算法
// ListAlgorithms 查询算法
ListAlgorithms(context.Context, *ListAlgorithmsReq) (*ListAlgorithmsResp, error)
//export task
ExportTask(context.Context, *ExportTaskReq) (*ExportTaskResp, error)
GetExportTasksOfDataset(context.Context, *GetExportTasksOfDatasetReq) (*GetExportTasksOfDatasetResp, error)
GetExportTaskStatusOfDataset(context.Context, *GetExportTaskStatusOfDatasetReq) (*GetExportTaskStatusOfDatasetResp, error)
//processor task
// processor task
CreateProcessorTask(context.Context, *CreateProcessorTaskReq) (*CreateProcessorTaskResp, error)
DescribeProcessorTask(context.Context, *DescribeProcessorTaskReq) (*DescribeProcessorTaskResp, error)
//model management
// model management
CreateModel(context.Context, *CreateModelReq) (*CreateModelResp, error)
DeleteModel(context.Context, *DeleteModelReq) (*DeleteModelResp, error)
ListModels(context.Context, *ListModelReq) (*ListModelResp, error)
ShowModels(context.Context, *ShowModelReq) (*ShowModelResp, error)
//service management
// service management
CreateService(context.Context, *CreateServiceReq) (*CreateServiceResp, error)
ListServices(context.Context, *ListServicesReq) (*ListServicesResp, error)
ShowService(context.Context, *ShowServiceReq) (*ShowServiceResp, error)
DeleteService(context.Context, *DeleteServiceReq) (*DeleteServiceResp, error)
ListClusters(context.Context, *ListClustersReq) (*ListClustersResp, error)
//notebook task
// notebook task
ListNotebook(context.Context, *ListNotebookReq) (*ListNotebookResp, error)
CreateNotebook(context.Context, *CreateNotebookReq) (*CreateNotebookResp, error)
StartNotebook(context.Context, *StartNotebookReq) (*StartNotebookResp, error)
StopNotebook(context.Context, *StopNotebookReq) (*StopNotebookResp, error)
GetNotebookStorage(context.Context, *GetNotebookStorageReq) (*GetNotebookStorageResp, error)
MountNotebookStorage(context.Context, *MountNotebookStorageReq) (*MountNotebookStorageResp, error)
// visualization-jobs
GetVisualizationJob(context.Context, *GetVisualizationJobReq) (*GetVisualizationJobResp, error)
CreateVisualizationJob(context.Context, *CreateVisualizationJobReq) (*CreateVisualizationJobResp, error)
mustEmbedUnimplementedModelArtsServer()
}
@ -435,6 +472,9 @@ func (UnimplementedModelArtsServer) GetToken(context.Context, *TokenReq) (*Token
func (UnimplementedModelArtsServer) GetDatasetList(context.Context, *DatasetReq) (*DatasetResp, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDatasetList not implemented")
}
func (UnimplementedModelArtsServer) CreateDataSet(context.Context, *CreateDataSetReq) (*CreateDataSetResq, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateDataSet not implemented")
}
func (UnimplementedModelArtsServer) CreateTask(context.Context, *ImportTaskDataReq) (*ImportTaskDataResp, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateTask not implemented")
}
@ -525,6 +565,12 @@ func (UnimplementedModelArtsServer) GetNotebookStorage(context.Context, *GetNote
func (UnimplementedModelArtsServer) MountNotebookStorage(context.Context, *MountNotebookStorageReq) (*MountNotebookStorageResp, error) {
return nil, status.Errorf(codes.Unimplemented, "method MountNotebookStorage not implemented")
}
func (UnimplementedModelArtsServer) GetVisualizationJob(context.Context, *GetVisualizationJobReq) (*GetVisualizationJobResp, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetVisualizationJob not implemented")
}
func (UnimplementedModelArtsServer) CreateVisualizationJob(context.Context, *CreateVisualizationJobReq) (*CreateVisualizationJobResp, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateVisualizationJob not implemented")
}
func (UnimplementedModelArtsServer) mustEmbedUnimplementedModelArtsServer() {}
// UnsafeModelArtsServer may be embedded to opt out of forward compatibility for this service.
@ -574,6 +620,24 @@ func _ModelArts_GetDatasetList_Handler(srv interface{}, ctx context.Context, dec
return interceptor(ctx, in, info, handler)
}
func _ModelArts_CreateDataSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CreateDataSetReq)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ModelArtsServer).CreateDataSet(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/modelarts.ModelArts/CreateDataSet",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ModelArtsServer).CreateDataSet(ctx, req.(*CreateDataSetReq))
}
return interceptor(ctx, in, info, handler)
}
func _ModelArts_CreateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ImportTaskDataReq)
if err := dec(in); err != nil {
@ -1114,6 +1178,42 @@ func _ModelArts_MountNotebookStorage_Handler(srv interface{}, ctx context.Contex
return interceptor(ctx, in, info, handler)
}
func _ModelArts_GetVisualizationJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetVisualizationJobReq)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ModelArtsServer).GetVisualizationJob(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/modelarts.ModelArts/GetVisualizationJob",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ModelArtsServer).GetVisualizationJob(ctx, req.(*GetVisualizationJobReq))
}
return interceptor(ctx, in, info, handler)
}
func _ModelArts_CreateVisualizationJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CreateVisualizationJobReq)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ModelArtsServer).CreateVisualizationJob(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/modelarts.ModelArts/CreateVisualizationJob",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ModelArtsServer).CreateVisualizationJob(ctx, req.(*CreateVisualizationJobReq))
}
return interceptor(ctx, in, info, handler)
}
// ModelArts_ServiceDesc is the grpc.ServiceDesc for ModelArts service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
@ -1129,6 +1229,10 @@ var ModelArts_ServiceDesc = grpc.ServiceDesc{
MethodName: "GetDatasetList",
Handler: _ModelArts_GetDatasetList_Handler,
},
{
MethodName: "CreateDataSet",
Handler: _ModelArts_CreateDataSet_Handler,
},
{
MethodName: "createTask",
Handler: _ModelArts_CreateTask_Handler,
@ -1249,6 +1353,14 @@ var ModelArts_ServiceDesc = grpc.ServiceDesc{
MethodName: "MountNotebookStorage",
Handler: _ModelArts_MountNotebookStorage_Handler,
},
{
MethodName: "GetVisualizationJob",
Handler: _ModelArts_GetVisualizationJob_Handler,
},
{
MethodName: "CreateVisualizationJob",
Handler: _ModelArts_CreateVisualizationJob_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "modelarts.proto",

View File

@ -57,6 +57,9 @@ type (
CreateTrainingJobConfigResp = modelarts.CreateTrainingJobConfigResp
CreateTrainingJobReq = modelarts.CreateTrainingJobReq
CreateTrainingJobResp = modelarts.CreateTrainingJobResp
CreateVisualizationJobParam = modelarts.CreateVisualizationJobParam
CreateVisualizationJobReq = modelarts.CreateVisualizationJobReq
CreateVisualizationJobResp = modelarts.CreateVisualizationJobResp
CustomHooks = modelarts.CustomHooks
CustomSpec = modelarts.CustomSpec
DataSource = modelarts.DataSource
@ -90,6 +93,7 @@ type (
ExportTaskResp = modelarts.ExportTaskResp
ExportTaskStatus = modelarts.ExportTaskStatus
FileStatistics = modelarts.FileStatistics
Flavor = modelarts.Flavor
FlavorDetail = modelarts.FlavorDetail
FlavorInfo = modelarts.FlavorInfo
FlavorResponse = modelarts.FlavorResponse
@ -99,6 +103,9 @@ type (
GetExportTasksOfDatasetResp = modelarts.GetExportTasksOfDatasetResp
GetNotebookStorageReq = modelarts.GetNotebookStorageReq
GetNotebookStorageResp = modelarts.GetNotebookStorageResp
GetVisualizationJobParam = modelarts.GetVisualizationJobParam
GetVisualizationJobReq = modelarts.GetVisualizationJobReq
GetVisualizationJobResp = modelarts.GetVisualizationJobResp
Gpu = modelarts.Gpu
GuideDoc = modelarts.GuideDoc
I18NDescription = modelarts.I18NDescription
@ -118,6 +125,7 @@ type (
JobMetadata = modelarts.JobMetadata
JobProgress = modelarts.JobProgress
JobResponse = modelarts.JobResponse
Jobs = modelarts.Jobs
Lease = modelarts.Lease
LeaseReq = modelarts.LeaseReq
ListAlgorithmsReq = modelarts.ListAlgorithmsReq
@ -183,6 +191,7 @@ type (
ResourceRequirements = modelarts.ResourceRequirements
ResourceS = modelarts.ResourceS
RewardAttrs = modelarts.RewardAttrs
Schedule = modelarts.Schedule
Scheduler = modelarts.Scheduler
SchemaMaps = modelarts.SchemaMaps
Scope = modelarts.Scope
@ -226,6 +235,8 @@ type (
GetToken(ctx context.Context, in *TokenReq, opts ...grpc.CallOption) (*TokenResp, error)
// get modelarts Token
GetDatasetList(ctx context.Context, in *DatasetReq, opts ...grpc.CallOption) (*DatasetResp, error)
// create DateSet
CreateDataSet(ctx context.Context, in *CreateDataSetReq, opts ...grpc.CallOption) (*CreateDataSetResq, error)
// creat task 创建导入任务
CreateTask(ctx context.Context, in *ImportTaskDataReq, opts ...grpc.CallOption) (*ImportTaskDataResp, error)
// get taskList 查询数据集导入任务列表
@ -271,6 +282,9 @@ type (
StopNotebook(ctx context.Context, in *StopNotebookReq, opts ...grpc.CallOption) (*StopNotebookResp, error)
GetNotebookStorage(ctx context.Context, in *GetNotebookStorageReq, opts ...grpc.CallOption) (*GetNotebookStorageResp, error)
MountNotebookStorage(ctx context.Context, in *MountNotebookStorageReq, opts ...grpc.CallOption) (*MountNotebookStorageResp, error)
// visualization-jobs
GetVisualizationJob(ctx context.Context, in *GetVisualizationJobReq, opts ...grpc.CallOption) (*GetVisualizationJobResp, error)
CreateVisualizationJob(ctx context.Context, in *CreateVisualizationJobReq, opts ...grpc.CallOption) (*CreateVisualizationJobResp, error)
}
defaultModelArts struct {
@ -296,6 +310,12 @@ func (m *defaultModelArts) GetDatasetList(ctx context.Context, in *DatasetReq, o
return client.GetDatasetList(ctx, in, opts...)
}
// create DateSet
func (m *defaultModelArts) CreateDataSet(ctx context.Context, in *CreateDataSetReq, opts ...grpc.CallOption) (*CreateDataSetResq, error) {
client := modelarts.NewModelArtsClient(m.cli.Conn())
return client.CreateDataSet(ctx, in, opts...)
}
// creat task 创建导入任务
func (m *defaultModelArts) CreateTask(ctx context.Context, in *ImportTaskDataReq, opts ...grpc.CallOption) (*ImportTaskDataResp, error) {
client := modelarts.NewModelArtsClient(m.cli.Conn())
@ -460,3 +480,14 @@ func (m *defaultModelArts) MountNotebookStorage(ctx context.Context, in *MountNo
client := modelarts.NewModelArtsClient(m.cli.Conn())
return client.MountNotebookStorage(ctx, in, opts...)
}
// visualization-jobs
func (m *defaultModelArts) GetVisualizationJob(ctx context.Context, in *GetVisualizationJobReq, opts ...grpc.CallOption) (*GetVisualizationJobResp, error) {
client := modelarts.NewModelArtsClient(m.cli.Conn())
return client.GetVisualizationJob(ctx, in, opts...)
}
func (m *defaultModelArts) CreateVisualizationJob(ctx context.Context, in *CreateVisualizationJobReq, opts ...grpc.CallOption) (*CreateVisualizationJobResp, error) {
client := modelarts.NewModelArtsClient(m.cli.Conn())
return client.CreateVisualizationJob(ctx, in, opts...)
}

View File

@ -1543,27 +1543,27 @@ message CreateDataSetResq{
/******************Notebook Start*************************/
message ListNotebookReq{
string project_id = 1;
ListNotebookParam param = 2;
string project_id = 1; // @gotags: copier:"project_id"
ListNotebookParam param = 2; // @gotags: copier:"param"
}
message ListNotebookResp{
int32 current = 1;
repeated NotebookResp data = 2;
int32 pages = 3;
int32 size = 4;
int64 total = 5;
int32 current = 1; // @gotags: copier:"current"
repeated NotebookResp data = 2; // @gotags: copier:"data"
int32 pages = 3; // @gotags: copier:"pages"
int32 size = 4; // @gotags: copier:"size"
int64 total = 5; // @gotags: copier:"total"
}
message ListNotebookParam{
string feature = 1;
int32 limit = 2;
string name = 3;
string pool_id = 4;
int32 offset = 5;
string owner = 6;
string sort_dir = 7;
string sort_key = 8;
string status = 9;
string workspaceId = 10;
string feature = 1; // @gotags: copier:"feature"
int32 limit = 2; // @gotags: copier:"limit"
string name = 3; // @gotags: copier:"name"
string pool_id = 4; // @gotags: copier:"pool_id"
int32 offset = 5; // @gotags: copier:"offset"
string owner = 6; // @gotags: copier:"owner"
string sort_dir = 7; // @gotags: copier:"sort_dir"
string sort_key = 8; // @gotags: copier:"sort_key"
string status = 9; // @gotags: copier:"status"
string workspaceId = 10; // @gotags: copier:"workspaceId"
}
message CreateNotebookReq{
@ -1591,6 +1591,8 @@ message CreateNotebookParam{
message StartNotebookReq{
string id = 1;
string project_id = 2;
StartNotebookParam param = 3;
}
message StartNotebookResp{
NotebookResp notebookResp = 1;
@ -1647,73 +1649,73 @@ message DataVolumesRes{
}
message NotebookResp{
repeated JobProgress action_progress = 1;
string description = 2;
repeated EndpointsRes endpoints = 3;
string fail_reason = 4;
string flavor = 5;
string id = 6;
Image image = 7;
Lease lease = 8;
string name = 9;
Pool pool = 10;
string status = 11;
string token = 12;
string url = 13;
VolumeRes volume = 14;
string workspace_id = 15;
string feature = 16;
repeated JobProgress action_progress = 1; // @gotags: copier:"action_progress"
string description = 2; // @gotags: copier:"description"
repeated EndpointsRes endpoints = 3; // @gotags: copier:"endpoints"
string fail_reason = 4; // @gotags: copier:"fail_reason"
string flavor = 5; // @gotags: copier:"flavor"
string id = 6; // @gotags: copier:"id"
Image image = 7; // @gotags: copier:"image"
Lease lease = 8; // @gotags: copier:"lease"
string name = 9; // @gotags: copier:"name"
Pool pool = 10; // @gotags: copier:"pool"
string status = 11; // @gotags: copier:"status"
string token = 12; // @gotags: copier:"token"
string url = 13; // @gotags: copier:"url"
VolumeRes volume = 14; // @gotags: copier:"volume"
string workspace_id = 15; // @gotags: copier:"workspace_id"
string feature = 16; // @gotags: copier:"feature"
}
message JobProgress{
string notebook_id = 1;
string status = 2;
int32 step = 3;
string step_description = 4;
string notebook_id = 1; // @gotags: copier:"notebook_id"
string status = 2; // @gotags: copier:"status"
int32 step = 3; // @gotags: copier:"step"
string step_description = 4; // @gotags: copier:"step_description"
}
message EndpointsRes{
repeated string allowed_access_ips = 1;
string dev_service = 2;
repeated string ssh_keys = 3;
repeated string allowed_access_ips = 1; // @gotags: copier:"allowed_access_ips"
string dev_service = 2; // @gotags: copier:"dev_service"
repeated string ssh_keys = 3; // @gotags: copier:"ssh_keys"
}
message Image{
string arch = 1;
int64 create_at = 2;
string description = 3;
repeated string dev_services = 4;
string id = 5;
string name = 6;
string namespace = 7;
string origin = 8;
repeated string resource_categories = 9;
string service_type = 10;
int64 size = 11;
string status = 12;
string status_message = 13;
repeated string support_res_categories = 14;
string swr_path = 15;
string tag = 16;
string type = 17;
int64 update_at = 18;
string visibility = 19;
string workspace_id = 20;
string arch = 1; // @gotags: copier:"arch"
int64 create_at = 2; // @gotags: copier:"create_at"
string description = 3; // @gotags: copier:"description"
repeated string dev_services = 4; // @gotags: copier:"dev_services"
string id = 5; // @gotags: copier:"id"
string name = 6; // @gotags: copier:"name"
string namespace = 7; // @gotags: copier:"namespace"
string origin = 8; // @gotags: copier:"origin"
repeated string resource_categories = 9; // @gotags: copier:"resource_categories"
string service_type = 10; // @gotags: copier:"service_type"
int64 size = 11; // @gotags: copier:"size"
string status = 12; // @gotags: copier:"status"
string status_message = 13; // @gotags: copier:"status_message"
repeated string support_res_categories = 14; // @gotags: copier:"support_res_categories"
string swr_path = 15; // @gotags: copier:"swr_path"
string tag = 16; // @gotags: copier:"tag"
string type = 17; // @gotags: copier:"type_image"
int64 update_at = 18; // @gotags: copier:"update_at"
string visibility = 19; // @gotags: copier:"visibility"
string workspace_id = 20; // @gotags: copier:"workspace_id"
}
message Lease{
int64 create_at = 1;
int64 duration = 2;
bool enable = 3;
string type = 4;
int64 update_at = 5;
int64 create_at = 1; // @gotags: copier:"create_at"
int64 duration = 2; // @gotags: copier:"duration"
bool enable = 3; // @gotags: copier:"enable"
string type = 4; // @gotags: copier:"type_lease"
int64 update_at = 5; // @gotags: copier:"update_at"
}
message Pool{
string id = 1;
string name = 2;
string id = 1; // @gotags: copier:"id"
string name = 2; // @gotags: copier:"name"
}
message VolumeRes{
int64 capacity = 1;
string category = 2;
string mount_path = 3;
string ownership = 4;
string status = 5;
int64 capacity = 1; // @gotags: copier:"capacity"
string category = 2; // @gotags: copier:"category"
string mount_path = 3; // @gotags: copier:"mount_path"
string ownership = 4; // @gotags: copier:"ownership"
string status = 5; // @gotags: copier:"status"
}
message EndpointsReq{
repeated string allowed_access_ips = 1;
@ -1796,8 +1798,16 @@ message CreateVisualizationJobParam{
string job_desc = 2;
string train_url = 3;
string job_type = 4;
//flavor
//schedule
flavor flavor = 5;
schedule schedule = 6;
}
message flavor{
string code = 1;
}
message schedule{
string type = 1;
string time_unit = 2;
int32 duration = 3;
}
/******************Visualization Job End*************************/
@ -1809,6 +1819,8 @@ service ModelArts {
rpc GetToken(TokenReq) returns (TokenResp);
//get modelarts Token
rpc GetDatasetList(datasetReq) returns (datasetResp);
//create DateSet
rpc CreateDataSet(CreateDataSetReq) returns (CreateDataSetResq);
//creat task
rpc createTask(ImportTaskDataReq) returns(ImportTaskDataResp);

View File

@ -4,6 +4,8 @@ import (
"io"
"log"
"net/http"
"reflect"
"strconv"
)
const (
@ -18,6 +20,7 @@ func HttpClient(method string, url string, payload io.Reader, token string) ([]b
request.Header.Add("Content-Type", "application/json")
request.Header.Add("User-Agent", "API Explorer")
request.Header.Add("x-auth-token", token)
client := &http.Client{}
res, err := client.Do(request)
if err != nil {
@ -31,3 +34,48 @@ func HttpClient(method string, url string, payload io.Reader, token string) ([]b
return body, err
}
func HttpClientWithQueries[T any](method string, url string, payload io.Reader, token string, param T) ([]byte, error) {
request, err := http.NewRequest(method, url, payload)
request.Header.Add("Content-Type", "application/json")
request.Header.Add("User-Agent", "API Explorer")
request.Header.Add("x-auth-token", token)
convertStructToQueryUrl(request, param)
client := &http.Client{}
res, err := client.Do(request)
if err != nil {
log.Fatal(err)
}
defer res.Body.Close()
body, err := io.ReadAll(res.Body)
if err != nil {
log.Fatal(err)
}
return body, err
}
func convertStructToQueryUrl[T any](request *http.Request, param T) {
if reflect.ValueOf(param).IsZero() {
return
}
query := request.URL.Query()
values := reflect.ValueOf(param)
types := values.Type()
for i := 0; i < values.NumField(); i++ {
if !values.Field(i).IsZero() {
if values.Field(i).CanInt() {
query.Add(types.Field(i).Name, strconv.FormatInt(values.Field(i).Int(), 10))
} else if values.Field(i).Kind() == reflect.Bool {
query.Add(types.Field(i).Name, strconv.FormatBool(values.Field(i).Bool()))
} else {
query.Add(types.Field(i).Name, values.Field(i).String())
}
}
}
request.URL.RawQuery = query.Encode()
}