| @@ -41,7 +41,7 @@ type Attachment struct { | |||||
| Size int64 `xorm:"DEFAULT 0"` | Size int64 `xorm:"DEFAULT 0"` | ||||
| IsPrivate bool `xorm:"DEFAULT false"` | IsPrivate bool `xorm:"DEFAULT false"` | ||||
| DecompressState int32 `xorm:"DEFAULT 0"` | DecompressState int32 `xorm:"DEFAULT 0"` | ||||
| Type int `xorm:"DEFAULT 0"` | |||||
| Type int `xorm:"DEFAULT 0"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | CreatedUnix timeutil.TimeStamp `xorm:"created"` | ||||
| } | } | ||||
| @@ -7,24 +7,26 @@ import ( | |||||
| ) | ) | ||||
| type BlockChainIssueStatus int | type BlockChainIssueStatus int | ||||
| const ( | const ( | ||||
| BlockChainIssueInit BlockChainIssueStatus = iota | BlockChainIssueInit BlockChainIssueStatus = iota | ||||
| BlockChainIssueSuccess | BlockChainIssueSuccess | ||||
| BlockChainIssueFailed | BlockChainIssueFailed | ||||
| ) | ) | ||||
| type BlockChainIssue struct { | type BlockChainIssue struct { | ||||
| ID int64 `xorm:"pk autoincr"` | |||||
| IssueID int64 `xorm:"INDEX NOT NULL unique"` | |||||
| Contributor string `xorm:"INDEX NOT NULL"` | |||||
| ContractAddress string `xorm:"INDEX NOT NULL"` | |||||
| Status BlockChainIssueStatus `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| Amount int64 `xorm:"INDEX"` | |||||
| UserID int64 `xorm:"INDEX"` | |||||
| RepoID int64 `xorm:"INDEX"` | |||||
| TransactionHash string `xorm:"INDEX"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||||
| DeletedAt time.Time `xorm:"deleted"` | |||||
| ID int64 `xorm:"pk autoincr"` | |||||
| IssueID int64 `xorm:"INDEX NOT NULL unique"` | |||||
| Contributor string `xorm:"INDEX NOT NULL"` | |||||
| ContractAddress string `xorm:"INDEX NOT NULL"` | |||||
| Status BlockChainIssueStatus `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| Amount int64 `xorm:"INDEX"` | |||||
| UserID int64 `xorm:"INDEX"` | |||||
| RepoID int64 `xorm:"INDEX"` | |||||
| TransactionHash string `xorm:"INDEX"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||||
| DeletedAt time.Time `xorm:"deleted"` | |||||
| User *User `xorm:"-"` | User *User `xorm:"-"` | ||||
| Repo *Repository `xorm:"-"` | Repo *Repository `xorm:"-"` | ||||
| @@ -15,19 +15,19 @@ const ( | |||||
| ) | ) | ||||
| type BlockChain struct { | type BlockChain struct { | ||||
| ID int64 `xorm:"pk autoincr"` | |||||
| PrID int64 `xorm:"INDEX NOT NULL unique"` | |||||
| CommitID string `xorm:"INDEX NOT NULL unique"` | |||||
| Contributor string `xorm:"INDEX NOT NULL"` | |||||
| ContractAddress string `xorm:"INDEX NOT NULL"` | |||||
| Status BlockChainCommitStatus `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| Amount int64 `xorm:"INDEX"` | |||||
| UserID int64 `xorm:"INDEX"` | |||||
| RepoID int64 `xorm:"INDEX"` | |||||
| TransactionHash string `xorm:"INDEX"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||||
| DeletedAt time.Time `xorm:"deleted"` | |||||
| ID int64 `xorm:"pk autoincr"` | |||||
| PrID int64 `xorm:"INDEX NOT NULL unique"` | |||||
| CommitID string `xorm:"INDEX NOT NULL unique"` | |||||
| Contributor string `xorm:"INDEX NOT NULL"` | |||||
| ContractAddress string `xorm:"INDEX NOT NULL"` | |||||
| Status BlockChainCommitStatus `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| Amount int64 `xorm:"INDEX"` | |||||
| UserID int64 `xorm:"INDEX"` | |||||
| RepoID int64 `xorm:"INDEX"` | |||||
| TransactionHash string `xorm:"INDEX"` | |||||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||||
| DeletedAt time.Time `xorm:"deleted"` | |||||
| User *User `xorm:"-"` | User *User `xorm:"-"` | ||||
| Repo *Repository `xorm:"-"` | Repo *Repository `xorm:"-"` | ||||
| @@ -23,25 +23,25 @@ const ( | |||||
| JobFailed CloudbrainStatus = "FAILED" | JobFailed CloudbrainStatus = "FAILED" | ||||
| JobRunning CloudbrainStatus = "RUNNING" | JobRunning CloudbrainStatus = "RUNNING" | ||||
| JobTypeDebug JobType = "DEBUG" | |||||
| JobTypeBenchmark JobType = "BENCHMARK" | |||||
| JobTypeDebug JobType = "DEBUG" | |||||
| JobTypeBenchmark JobType = "BENCHMARK" | |||||
| JobTypeSnn4imagenet JobType = "SNN4IMAGENET" | JobTypeSnn4imagenet JobType = "SNN4IMAGENET" | ||||
| ModelArtsCreateQueue ModelArtsJobStatus = "CREATE_QUEUING" //免费资源创建排队中 | |||||
| ModelArtsCreating ModelArtsJobStatus = "CREATING" //创建中 | |||||
| ModelArtsCreateFailed ModelArtsJobStatus = "CREATE_FAILED" //创建失败 | |||||
| ModelArtsStartQueuing ModelArtsJobStatus = "START_QUEUING" //免费资源启动排队中 | |||||
| ModelArtsReadyToStart ModelArtsJobStatus = "READY_TO_START" //免费资源等待启动 | |||||
| ModelArtsStarting ModelArtsJobStatus = "STARTING" //启动中 | |||||
| ModelArtsRestarting ModelArtsJobStatus = "RESTARTING" //重启中 | |||||
| ModelArtsStartFailed ModelArtsJobStatus = "START_FAILED" //启动失败 | |||||
| ModelArtsRunning ModelArtsJobStatus = "RUNNING" //运行中 | |||||
| ModelArtsStopping ModelArtsJobStatus = "STOPPING" //停止中 | |||||
| ModelArtsStopped ModelArtsJobStatus = "STOPPED" //停止 | |||||
| ModelArtsUnavailable ModelArtsJobStatus = "UNAVAILABLE" //故障 | |||||
| ModelArtsDeleted ModelArtsJobStatus = "DELETED" //已删除 | |||||
| ModelArtsResizing ModelArtsJobStatus = "RESIZING" //规格变更中 | |||||
| ModelArtsResizFailed ModelArtsJobStatus = "RESIZE_FAILED" //规格变更失败 | |||||
| ModelArtsCreateQueue ModelArtsJobStatus = "CREATE_QUEUING" //免费资源创建排队中 | |||||
| ModelArtsCreating ModelArtsJobStatus = "CREATING" //创建中 | |||||
| ModelArtsCreateFailed ModelArtsJobStatus = "CREATE_FAILED" //创建失败 | |||||
| ModelArtsStartQueuing ModelArtsJobStatus = "START_QUEUING" //免费资源启动排队中 | |||||
| ModelArtsReadyToStart ModelArtsJobStatus = "READY_TO_START" //免费资源等待启动 | |||||
| ModelArtsStarting ModelArtsJobStatus = "STARTING" //启动中 | |||||
| ModelArtsRestarting ModelArtsJobStatus = "RESTARTING" //重启中 | |||||
| ModelArtsStartFailed ModelArtsJobStatus = "START_FAILED" //启动失败 | |||||
| ModelArtsRunning ModelArtsJobStatus = "RUNNING" //运行中 | |||||
| ModelArtsStopping ModelArtsJobStatus = "STOPPING" //停止中 | |||||
| ModelArtsStopped ModelArtsJobStatus = "STOPPED" //停止 | |||||
| ModelArtsUnavailable ModelArtsJobStatus = "UNAVAILABLE" //故障 | |||||
| ModelArtsDeleted ModelArtsJobStatus = "DELETED" //已删除 | |||||
| ModelArtsResizing ModelArtsJobStatus = "RESIZING" //规格变更中 | |||||
| ModelArtsResizFailed ModelArtsJobStatus = "RESIZE_FAILED" //规格变更失败 | |||||
| ) | ) | ||||
| type Cloudbrain struct { | type Cloudbrain struct { | ||||
| @@ -59,7 +59,7 @@ type Cloudbrain struct { | |||||
| UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | ||||
| DeletedAt time.Time `xorm:"deleted"` | DeletedAt time.Time `xorm:"deleted"` | ||||
| CanDebug bool `xorm:"-"` | CanDebug bool `xorm:"-"` | ||||
| Type int `xorm:"INDEX DEFAULT 0"` | |||||
| Type int `xorm:"INDEX DEFAULT 0"` | |||||
| User *User `xorm:"-"` | User *User `xorm:"-"` | ||||
| Repo *Repository `xorm:"-"` | Repo *Repository `xorm:"-"` | ||||
| @@ -118,14 +118,14 @@ type GetJobResult struct { | |||||
| } | } | ||||
| type GetImagesResult struct { | type GetImagesResult struct { | ||||
| Code string `json:"code"` | |||||
| Msg string `json:"msg"` | |||||
| Payload GetImagesPayload `json:"payload"` | |||||
| Code string `json:"code"` | |||||
| Msg string `json:"msg"` | |||||
| Payload GetImagesPayload `json:"payload"` | |||||
| } | } | ||||
| type GetImagesPayload struct { | type GetImagesPayload struct { | ||||
| Count int `json:"count"` | |||||
| ImageInfo []*ImageInfo `json:"rows"` | |||||
| Count int `json:"count"` | |||||
| ImageInfo []*ImageInfo `json:"rows"` | |||||
| } | } | ||||
| type CloudbrainsOptions struct { | type CloudbrainsOptions struct { | ||||
| @@ -136,7 +136,7 @@ type CloudbrainsOptions struct { | |||||
| SortType string | SortType string | ||||
| CloudbrainIDs []int64 | CloudbrainIDs []int64 | ||||
| // JobStatus CloudbrainStatus | // JobStatus CloudbrainStatus | ||||
| Type int | |||||
| Type int | |||||
| } | } | ||||
| type TaskPod struct { | type TaskPod struct { | ||||
| TaskRoleStatus struct { | TaskRoleStatus struct { | ||||
| @@ -162,11 +162,11 @@ type TaskPod struct { | |||||
| } | } | ||||
| type TaskInfo struct { | type TaskInfo struct { | ||||
| Username string `json:"username"` | |||||
| TaskName string `json:"task_name"` | |||||
| CodeName string `json:"code_name"` | |||||
| Username string `json:"username"` | |||||
| TaskName string `json:"task_name"` | |||||
| CodeName string `json:"code_name"` | |||||
| BenchmarkCategory []string `json:"selected_category"` | BenchmarkCategory []string `json:"selected_category"` | ||||
| CodeLink string `json:"code_link"` | |||||
| CodeLink string `json:"code_link"` | |||||
| } | } | ||||
| func ConvertToTaskPod(input map[string]interface{}) (TaskPod, error) { | func ConvertToTaskPod(input map[string]interface{}) (TaskPod, error) { | ||||
| @@ -268,11 +268,11 @@ type ImageInfo struct { | |||||
| } | } | ||||
| type Categories struct { | type Categories struct { | ||||
| Category []*Category `json:"category"` | |||||
| Category []*Category `json:"category"` | |||||
| } | } | ||||
| type Category struct { | type Category struct { | ||||
| Id int `json:"id"` | |||||
| Id int `json:"id"` | |||||
| Value string `json:"value"` | Value string `json:"value"` | ||||
| } | } | ||||
| @@ -295,42 +295,42 @@ type StopJobResult struct { | |||||
| } | } | ||||
| type CreateNotebookParams struct { | type CreateNotebookParams struct { | ||||
| JobName string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| ProfileID string `json:"profile_id"` | |||||
| Flavor string `json:"flavor"` | |||||
| Spec Spec `json:"spec"` | |||||
| Workspace Workspace `json:"workspace"` | |||||
| Pool Pool `json:"pool"` | |||||
| JobName string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| ProfileID string `json:"profile_id"` | |||||
| Flavor string `json:"flavor"` | |||||
| Spec Spec `json:"spec"` | |||||
| Workspace Workspace `json:"workspace"` | |||||
| Pool Pool `json:"pool"` | |||||
| } | } | ||||
| type Pool struct { | type Pool struct { | ||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Type string `json:"type"` | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Type string `json:"type"` | |||||
| } | } | ||||
| type Workspace struct { | type Workspace struct { | ||||
| ID string `json:"id"` | |||||
| ID string `json:"id"` | |||||
| } | } | ||||
| type Spec struct { | type Spec struct { | ||||
| Storage Storage `json:"storage"` | |||||
| AutoStop AutoStop `json:"auto_stop"` | |||||
| Storage Storage `json:"storage"` | |||||
| AutoStop AutoStop `json:"auto_stop"` | |||||
| } | } | ||||
| type AutoStop struct { | type AutoStop struct { | ||||
| Enable bool `json:"enable"` | |||||
| Duration int `json:"duration"` | |||||
| Enable bool `json:"enable"` | |||||
| Duration int `json:"duration"` | |||||
| } | } | ||||
| type Storage struct { | type Storage struct { | ||||
| Type string `json:"type"` | |||||
| Location Location `json:"location"` | |||||
| Type string `json:"type"` | |||||
| Location Location `json:"location"` | |||||
| } | } | ||||
| type Location struct { | type Location struct { | ||||
| Path string `json:"path"` | |||||
| Path string `json:"path"` | |||||
| } | } | ||||
| type NotebookResult struct { | type NotebookResult struct { | ||||
| @@ -339,119 +339,119 @@ type NotebookResult struct { | |||||
| } | } | ||||
| type CreateNotebookResult struct { | type CreateNotebookResult struct { | ||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| Status string `json:"status"` | |||||
| CreationTimestamp string `json:"creation_timestamp"` | |||||
| LatestUpdateTimestamp string `json:"latest_update_timestamp"` | |||||
| Profile struct { | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| DeType string `json:"de_type"` | |||||
| FlavorType string `json:"flavor_type"` | |||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| Status string `json:"status"` | |||||
| CreationTimestamp string `json:"creation_timestamp"` | |||||
| LatestUpdateTimestamp string `json:"latest_update_timestamp"` | |||||
| Profile struct { | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| DeType string `json:"de_type"` | |||||
| FlavorType string `json:"flavor_type"` | |||||
| } `json:"profile"` | } `json:"profile"` | ||||
| Flavor string `json:"flavor"` | |||||
| FlavorDetails struct{ | |||||
| Name string `json:"name"` | |||||
| Status string `json:"status"` | |||||
| QueuingNum int `json:"queuing_num"` | |||||
| QueueLeftTime int `json:"queue_left_time"` //s | |||||
| Duration int `json:"duration"` //auto_stop_time s | |||||
| Flavor string `json:"flavor"` | |||||
| FlavorDetails struct { | |||||
| Name string `json:"name"` | |||||
| Status string `json:"status"` | |||||
| QueuingNum int `json:"queuing_num"` | |||||
| QueueLeftTime int `json:"queue_left_time"` //s | |||||
| Duration int `json:"duration"` //auto_stop_time s | |||||
| } `json:"flavor_details"` | } `json:"flavor_details"` | ||||
| } | } | ||||
| type GetNotebookResult struct { | type GetNotebookResult struct { | ||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| Status string `json:"status"` | |||||
| CreationTimestamp string `json:"creation_timestamp"` | |||||
| CreateTime string | |||||
| LatestUpdateTimestamp string `json:"latest_update_timestamp"` | |||||
| LatestUpdateTime string | |||||
| Profile struct { | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| DeType string `json:"de_type"` | |||||
| FlavorType string `json:"flavor_type"` | |||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| Status string `json:"status"` | |||||
| CreationTimestamp string `json:"creation_timestamp"` | |||||
| CreateTime string | |||||
| LatestUpdateTimestamp string `json:"latest_update_timestamp"` | |||||
| LatestUpdateTime string | |||||
| Profile struct { | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Description string `json:"description"` | |||||
| DeType string `json:"de_type"` | |||||
| FlavorType string `json:"flavor_type"` | |||||
| } `json:"profile"` | } `json:"profile"` | ||||
| Flavor string `json:"flavor"` | |||||
| FlavorDetails struct{ | |||||
| Name string `json:"name"` | |||||
| Status string `json:"status"` | |||||
| QueuingNum int `json:"queuing_num"` | |||||
| QueueLeftTime int `json:"queue_left_time"` //s | |||||
| Duration int `json:"duration"` //auto_stop_time s | |||||
| Flavor string `json:"flavor"` | |||||
| FlavorDetails struct { | |||||
| Name string `json:"name"` | |||||
| Status string `json:"status"` | |||||
| QueuingNum int `json:"queuing_num"` | |||||
| QueueLeftTime int `json:"queue_left_time"` //s | |||||
| Duration int `json:"duration"` //auto_stop_time s | |||||
| } `json:"flavor_details"` | } `json:"flavor_details"` | ||||
| QueuingInfo struct{ | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Flavor string `json:"flavor"` | |||||
| DeType string `json:"de_type"` | |||||
| Status string `json:"status"` | |||||
| BeginTimestamp int `json:"begin_timestamp"`//time of instance begin in queue | |||||
| BeginTime string | |||||
| RemainTime int `json:"remain_time"` //remain time of instance | |||||
| EndTimestamp int `json:"end_timestamp"` // | |||||
| EndTime string | |||||
| Rank int `json:"rank"` //rank of instance in queue | |||||
| QueuingInfo struct { | |||||
| ID string `json:"id"` | |||||
| Name string `json:"name"` | |||||
| Flavor string `json:"flavor"` | |||||
| DeType string `json:"de_type"` | |||||
| Status string `json:"status"` | |||||
| BeginTimestamp int `json:"begin_timestamp"` //time of instance begin in queue | |||||
| BeginTime string | |||||
| RemainTime int `json:"remain_time"` //remain time of instance | |||||
| EndTimestamp int `json:"end_timestamp"` // | |||||
| EndTime string | |||||
| Rank int `json:"rank"` //rank of instance in queue | |||||
| } `json:"queuing_info"` | } `json:"queuing_info"` | ||||
| Spec struct{ | |||||
| Annotations struct{ | |||||
| TargetDomain string `json:"target_domain"` | |||||
| Url string `json:"url"` | |||||
| Spec struct { | |||||
| Annotations struct { | |||||
| TargetDomain string `json:"target_domain"` | |||||
| Url string `json:"url"` | |||||
| } `json:"annotations"` | } `json:"annotations"` | ||||
| } `json:"spec"` | } `json:"spec"` | ||||
| } | } | ||||
| type GetTokenParams struct { | type GetTokenParams struct { | ||||
| Auth Auth `json:"auth"` | |||||
| Auth Auth `json:"auth"` | |||||
| } | } | ||||
| type Auth struct { | type Auth struct { | ||||
| Identity Identity `json:"identity"` | |||||
| Scope Scope `json:"scope"` | |||||
| Identity Identity `json:"identity"` | |||||
| Scope Scope `json:"scope"` | |||||
| } | } | ||||
| type Scope struct { | type Scope struct { | ||||
| Project Project `json:"project"` | |||||
| Project Project `json:"project"` | |||||
| } | } | ||||
| type Project struct { | type Project struct { | ||||
| Name string `json:"name"` | |||||
| Name string `json:"name"` | |||||
| } | } | ||||
| type Identity struct { | type Identity struct { | ||||
| Methods []string `json:"methods"` | |||||
| Password Password `json:"password"` | |||||
| Methods []string `json:"methods"` | |||||
| Password Password `json:"password"` | |||||
| } | } | ||||
| type Password struct { | type Password struct { | ||||
| User NotebookUser `json:"user"` | |||||
| User NotebookUser `json:"user"` | |||||
| } | } | ||||
| type NotebookUser struct { | type NotebookUser struct { | ||||
| Name string `json:"name"` | |||||
| Password string `json:"password"` | |||||
| Domain Domain `json:"domain"` | |||||
| Name string `json:"name"` | |||||
| Password string `json:"password"` | |||||
| Domain Domain `json:"domain"` | |||||
| } | } | ||||
| type Domain struct { | type Domain struct { | ||||
| Name string `json:"name"` | |||||
| Name string `json:"name"` | |||||
| } | } | ||||
| const ( | const ( | ||||
| ActionStart = "start" | |||||
| ActionStop = "stop" | |||||
| ActionStart = "start" | |||||
| ActionStop = "stop" | |||||
| ActionRestart = "restart" | ActionRestart = "restart" | ||||
| ActionQueue = "queue" | |||||
| ActionQueue = "queue" | |||||
| ActionDequeue = "dequeue" | ActionDequeue = "dequeue" | ||||
| ) | ) | ||||
| @@ -460,20 +460,20 @@ type NotebookAction struct { | |||||
| } | } | ||||
| type NotebookActionResult struct { | type NotebookActionResult struct { | ||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| CurrentStatus string `json:"current_status"` | |||||
| PreviousState string `json:"previous_state"` | |||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| CurrentStatus string `json:"current_status"` | |||||
| PreviousState string `json:"previous_state"` | |||||
| } | } | ||||
| type NotebookGetJobTokenResult struct { | type NotebookGetJobTokenResult struct { | ||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| Token string `json:"token"` | |||||
| ErrorCode string `json:"error_code"` | |||||
| ErrorMsg string `json:"error_msg"` | |||||
| Token string `json:"token"` | |||||
| } | } | ||||
| type NotebookDelResult struct { | type NotebookDelResult struct { | ||||
| InstanceID string `json:"instance_id"` | |||||
| InstanceID string `json:"instance_id"` | |||||
| } | } | ||||
| func Cloudbrains(opts *CloudbrainsOptions) ([]*Cloudbrain, int64, error) { | func Cloudbrains(opts *CloudbrainsOptions) ([]*Cloudbrain, int64, error) { | ||||
| @@ -196,7 +196,7 @@ func (s datasetMetaSearch) Less(i, j int) bool { | |||||
| return s.ID[i] < s.ID[j] | return s.ID[i] < s.ID[j] | ||||
| } | } | ||||
| func GetDatasetAttachments(typeCloudBrain int ,rels ...*Dataset) (err error) { | |||||
| func GetDatasetAttachments(typeCloudBrain int, rels ...*Dataset) (err error) { | |||||
| return getDatasetAttachments(x, typeCloudBrain, rels...) | return getDatasetAttachments(x, typeCloudBrain, rels...) | ||||
| } | } | ||||
| @@ -24,7 +24,7 @@ type FileChunk struct { | |||||
| TotalChunks int | TotalChunks int | ||||
| Size int64 | Size int64 | ||||
| UserID int64 `xorm:"INDEX"` | UserID int64 `xorm:"INDEX"` | ||||
| Type int `xorm:"INDEX DEFAULT 0"` | |||||
| Type int `xorm:"INDEX DEFAULT 0"` | |||||
| CompletedParts []string `xorm:"DEFAULT ''"` // chunkNumber+etag eg: ,1-asqwewqe21312312.2-123hjkas | CompletedParts []string `xorm:"DEFAULT ''"` // chunkNumber+etag eg: ,1-asqwewqe21312312.2-123hjkas | ||||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | ||||
| UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | ||||
| @@ -68,8 +68,8 @@ type Issue struct { | |||||
| IsLocked bool `xorm:"NOT NULL DEFAULT false"` | IsLocked bool `xorm:"NOT NULL DEFAULT false"` | ||||
| //block_chain | //block_chain | ||||
| Amount int64 | |||||
| IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||||
| Amount int64 | |||||
| IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||||
| } | } | ||||
| var ( | var ( | ||||
| @@ -38,9 +38,9 @@ const ( | |||||
| const ( | const ( | ||||
| PullRequestAmountZero int = 0 | PullRequestAmountZero int = 0 | ||||
| PullRequestAmountOne int = 100 | |||||
| PullRequestAmountTwo int = 200 | |||||
| PullRequestAmountMax int = 300 | |||||
| PullRequestAmountOne int = 100 | |||||
| PullRequestAmountTwo int = 200 | |||||
| PullRequestAmountMax int = 300 | |||||
| ) | ) | ||||
| // PullRequest represents relation between pull request and repositories. | // PullRequest represents relation between pull request and repositories. | ||||
| @@ -74,8 +74,8 @@ type PullRequest struct { | |||||
| isHeadRepoLoaded bool `xorm:"-"` | isHeadRepoLoaded bool `xorm:"-"` | ||||
| //block_chain | //block_chain | ||||
| IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||||
| Amount int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||||
| Amount int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| // MustHeadUserName returns the HeadRepo's username if failed return blank | // MustHeadUserName returns the HeadRepo's username if failed return blank | ||||
| @@ -175,7 +175,7 @@ func (prs PullRequestList) InvalidateCodeComments(doer *User, repo *git.Reposito | |||||
| func GetUnTransformedMergedPullRequests() ([]*PullRequest, error) { | func GetUnTransformedMergedPullRequests() ([]*PullRequest, error) { | ||||
| prs := make([]*PullRequest, 0, 10) | prs := make([]*PullRequest, 0, 10) | ||||
| return prs, x. | return prs, x. | ||||
| Where("has_merged = ? AND pull_request.is_transformed = ? AND to_timestamp(merged_unix) >= ?",true, false, setting.CommitValidDate). | |||||
| Where("has_merged = ? AND pull_request.is_transformed = ? AND to_timestamp(merged_unix) >= ?", true, false, setting.CommitValidDate). | |||||
| Join("INNER", "issue", "issue.id = pull_request.issue_id"). | Join("INNER", "issue", "issue.id = pull_request.issue_id"). | ||||
| Find(&prs) | Find(&prs) | ||||
| } | } | ||||
| @@ -206,9 +206,9 @@ type Repository struct { | |||||
| Avatar string `xorm:"VARCHAR(64)"` | Avatar string `xorm:"VARCHAR(64)"` | ||||
| //blockchain | //blockchain | ||||
| ContractAddress string `xorm:"INDEX"` | |||||
| Balance string `xorm:"NOT NULL DEFAULT '0'"` | |||||
| BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"` | |||||
| ContractAddress string `xorm:"INDEX"` | |||||
| Balance string `xorm:"NOT NULL DEFAULT '0'"` | |||||
| BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"` | |||||
| // git clone total count | // git clone total count | ||||
| CloneCnt int64 `xorm:"NOT NULL DEFAULT 0"` | CloneCnt int64 `xorm:"NOT NULL DEFAULT 0"` | ||||
| @@ -292,7 +292,7 @@ var ( | |||||
| UnitTypeExternalWiki: UnitExternalWiki, | UnitTypeExternalWiki: UnitExternalWiki, | ||||
| UnitTypeDatasets: UnitDataset, | UnitTypeDatasets: UnitDataset, | ||||
| UnitTypeCloudBrain: UnitCloudBrain, | UnitTypeCloudBrain: UnitCloudBrain, | ||||
| UnitTypeBlockChain: UnitBlockChain, | |||||
| UnitTypeBlockChain: UnitBlockChain, | |||||
| } | } | ||||
| ) | ) | ||||
| @@ -174,8 +174,8 @@ type User struct { | |||||
| Token string `xorm:"VARCHAR(1024)"` | Token string `xorm:"VARCHAR(1024)"` | ||||
| //BlockChain | //BlockChain | ||||
| PublicKey string `xorm:"INDEX"` | |||||
| PrivateKey string `xorm:"INDEX"` | |||||
| PublicKey string `xorm:"INDEX"` | |||||
| PrivateKey string `xorm:"INDEX"` | |||||
| } | } | ||||
| // SearchOrganizationsOptions options to filter organizations | // SearchOrganizationsOptions options to filter organizations | ||||
| @@ -6,12 +6,12 @@ import ( | |||||
| ) | ) | ||||
| type CreateCloudBrainForm struct { | type CreateCloudBrainForm struct { | ||||
| JobName string `form:"job_name" binding:"Required"` | |||||
| Image string `form:"image" binding:"Required"` | |||||
| Command string `form:"command" binding:"Required"` | |||||
| Attachment string `form:"attachment" binding:"Required"` | |||||
| JobType string `form:"job_type" binding:"Required"` | |||||
| BenchmarkCategory string `form:"get_benchmark_category"` | |||||
| JobName string `form:"job_name" binding:"Required"` | |||||
| Image string `form:"image" binding:"Required"` | |||||
| Command string `form:"command" binding:"Required"` | |||||
| Attachment string `form:"attachment" binding:"Required"` | |||||
| JobType string `form:"job_type" binding:"Required"` | |||||
| BenchmarkCategory string `form:"get_benchmark_category"` | |||||
| } | } | ||||
| type CommitImageCloudBrainForm struct { | type CommitImageCloudBrainForm struct { | ||||
| @@ -6,8 +6,8 @@ import ( | |||||
| ) | ) | ||||
| type CreateModelArtsForm struct { | type CreateModelArtsForm struct { | ||||
| JobName string `form:"job_name" binding:"Required"` | |||||
| Attachment string `form:"attachment" binding:"Required"` | |||||
| JobName string `form:"job_name" binding:"Required"` | |||||
| Attachment string `form:"attachment" binding:"Required"` | |||||
| Description string `form:"description"` | Description string `form:"description"` | ||||
| } | } | ||||
| @@ -369,7 +369,7 @@ type CreateIssueForm struct { | |||||
| AssigneeID int64 | AssigneeID int64 | ||||
| Content string | Content string | ||||
| Files []string | Files []string | ||||
| Rewards int64 | |||||
| Rewards int64 | |||||
| } | } | ||||
| // Validate validates the fields | // Validate validates the fields | ||||
| @@ -14,10 +14,10 @@ var ( | |||||
| const ( | const ( | ||||
| UrlCreateAccount = "createAccount" | UrlCreateAccount = "createAccount" | ||||
| UrlGetBalance = "getBalance" | |||||
| UrlNewRepo = "newRepo" | |||||
| UrlContribute = "contribute" | |||||
| UrlSetIssue = "setIssue" | |||||
| UrlGetBalance = "getBalance" | |||||
| UrlNewRepo = "newRepo" | |||||
| UrlContribute = "contribute" | |||||
| UrlSetIssue = "setIssue" | |||||
| Success = 0 | Success = 0 | ||||
| ) | ) | ||||
| @@ -29,9 +29,9 @@ type CreateAccountResult struct { | |||||
| } | } | ||||
| type GetBalanceResult struct { | type GetBalanceResult struct { | ||||
| Code int `json:"code"` | |||||
| Msg string `json:"message"` | |||||
| Data string `json:"data"` | |||||
| Code int `json:"code"` | |||||
| Msg string `json:"message"` | |||||
| Data string `json:"data"` | |||||
| } | } | ||||
| type NewRepoResult struct { | type NewRepoResult struct { | ||||
| @@ -41,14 +41,14 @@ type NewRepoResult struct { | |||||
| } | } | ||||
| type ContributeResult struct { | type ContributeResult struct { | ||||
| Code int `json:"code"` | |||||
| Msg string `json:"message"` | |||||
| Code int `json:"code"` | |||||
| Msg string `json:"message"` | |||||
| //Payload map[string]interface{} `json:"data"` | //Payload map[string]interface{} `json:"data"` | ||||
| } | } | ||||
| type SetIssueResult struct { | type SetIssueResult struct { | ||||
| Code int `json:"code"` | |||||
| Msg string `json:"message"` | |||||
| Code int `json:"code"` | |||||
| Msg string `json:"message"` | |||||
| //Data string `json:"data"` | //Data string `json:"data"` | ||||
| } | } | ||||
| @@ -136,10 +136,10 @@ func Contribute(contractAddress, contributor, commitId string, amount int64) (*C | |||||
| res, err := client.R(). | res, err := client.R(). | ||||
| SetHeader("Accept", "application/json"). | SetHeader("Accept", "application/json"). | ||||
| SetQueryParams(map[string]string{ | SetQueryParams(map[string]string{ | ||||
| "contractAddress" : contractAddress, | |||||
| "contributor" : contributor, | |||||
| "commitId": commitId, | |||||
| "amount": strAmount, | |||||
| "contractAddress": contractAddress, | |||||
| "contributor": contributor, | |||||
| "commitId": commitId, | |||||
| "amount": strAmount, | |||||
| }). | }). | ||||
| SetResult(&result). | SetResult(&result). | ||||
| Get(setting.BlockChainHost + UrlContribute) | Get(setting.BlockChainHost + UrlContribute) | ||||
| @@ -164,10 +164,10 @@ func SetIssue(contractAddress, contributor string, issueId int64, amount int64) | |||||
| res, err := client.R(). | res, err := client.R(). | ||||
| SetHeader("Accept", "application/json"). | SetHeader("Accept", "application/json"). | ||||
| SetQueryParams(map[string]string{ | SetQueryParams(map[string]string{ | ||||
| "contractAddress" : contractAddress, | |||||
| "contributor" : contributor, | |||||
| "issueId": strIssue, | |||||
| "amount": strAmount, | |||||
| "contractAddress": contractAddress, | |||||
| "contributor": contributor, | |||||
| "issueId": strIssue, | |||||
| "amount": strAmount, | |||||
| }). | }). | ||||
| SetResult(&result). | SetResult(&result). | ||||
| Get(setting.BlockChainHost + UrlSetIssue) | Get(setting.BlockChainHost + UrlSetIssue) | ||||
| @@ -10,13 +10,13 @@ import ( | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||||
| CodeMountPath = "/code" | |||||
| DataSetMountPath = "/dataset" | |||||
| ModelMountPath = "/model" | |||||
| BenchMarkMountPath = "/benchmark" | |||||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||||
| CodeMountPath = "/code" | |||||
| DataSetMountPath = "/dataset" | |||||
| ModelMountPath = "/model" | |||||
| BenchMarkMountPath = "/benchmark" | |||||
| Snn4imagenetMountPath = "/snn4imagenet" | Snn4imagenetMountPath = "/snn4imagenet" | ||||
| TaskInfoName = "/taskInfo" | |||||
| TaskInfoName = "/taskInfo" | |||||
| SubTaskName = "task1" | SubTaskName = "task1" | ||||
| @@ -106,7 +106,7 @@ func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, | |||||
| JobName: jobName, | JobName: jobName, | ||||
| SubTaskName: SubTaskName, | SubTaskName: SubTaskName, | ||||
| JobType: jobType, | JobType: jobType, | ||||
| Type: models.TypeCloudBrainOne, | |||||
| Type: models.TypeCloudBrainOne, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -10,46 +10,45 @@ import ( | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| storageTypeOBS = "obs" | |||||
| storageTypeOBS = "obs" | |||||
| autoStopDuration = 4 * 60 * 60 | autoStopDuration = 4 * 60 * 60 | ||||
| flavor = "modelarts.kat1.xlarge" | |||||
| flavor = "modelarts.kat1.xlarge" | |||||
| //profileID = "Python3-ascend910-arm" | //profileID = "Python3-ascend910-arm" | ||||
| profileID = "efa847c0-7359-11eb-b34f-0255ac100057" | profileID = "efa847c0-7359-11eb-b34f-0255ac100057" | ||||
| poolID = "pool1328035d" | |||||
| poolName = "train-private-1" | |||||
| poolType = "USER_DEFINED" | |||||
| poolID = "pool1328035d" | |||||
| poolName = "train-private-1" | |||||
| poolType = "USER_DEFINED" | |||||
| DataSetMountPath = "/home/ma-user/work" | DataSetMountPath = "/home/ma-user/work" | ||||
| NotebookEnv = "Python3" | |||||
| NotebookType = "Ascend" | |||||
| FlavorInfo = "Ascend: 1*Ascend 910 CPU: 24 核 96GiB (modelarts.kat1.xlarge)" | |||||
| NotebookEnv = "Python3" | |||||
| NotebookType = "Ascend" | |||||
| FlavorInfo = "Ascend: 1*Ascend 910 CPU: 24 核 96GiB (modelarts.kat1.xlarge)" | |||||
| ) | ) | ||||
| func GenerateTask(ctx *context.Context, jobName, uuid, description string) error { | func GenerateTask(ctx *context.Context, jobName, uuid, description string) error { | ||||
| dataActualPath := setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + "/" | dataActualPath := setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + "/" | ||||
| jobResult, err := CreateJob(models.CreateNotebookParams{ | jobResult, err := CreateJob(models.CreateNotebookParams{ | ||||
| JobName: jobName, | |||||
| Description:description, | |||||
| ProfileID: profileID, | |||||
| Flavor: flavor, | |||||
| JobName: jobName, | |||||
| Description: description, | |||||
| ProfileID: profileID, | |||||
| Flavor: flavor, | |||||
| Pool: models.Pool{ | Pool: models.Pool{ | ||||
| ID: poolID, | |||||
| ID: poolID, | |||||
| Name: poolName, | Name: poolName, | ||||
| Type: poolType, | Type: poolType, | ||||
| }, | }, | ||||
| Spec: models.Spec{ | Spec: models.Spec{ | ||||
| Storage: models.Storage{ | Storage: models.Storage{ | ||||
| Type: storageTypeOBS, | Type: storageTypeOBS, | ||||
| Location:models.Location{ | |||||
| Location: models.Location{ | |||||
| Path: dataActualPath, | Path: dataActualPath, | ||||
| }, | }, | ||||
| }, | }, | ||||
| AutoStop: models.AutoStop{ | AutoStop: models.AutoStop{ | ||||
| Enable: true, | |||||
| Enable: true, | |||||
| Duration: autoStopDuration, | Duration: autoStopDuration, | ||||
| }, | }, | ||||
| }, | }, | ||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("CreateJob failed: %v", err.Error()) | log.Error("CreateJob failed: %v", err.Error()) | ||||
| @@ -57,13 +56,13 @@ func GenerateTask(ctx *context.Context, jobName, uuid, description string) error | |||||
| } | } | ||||
| err = models.CreateCloudbrain(&models.Cloudbrain{ | err = models.CreateCloudbrain(&models.Cloudbrain{ | ||||
| Status: string(models.JobWaiting), | |||||
| UserID: ctx.User.ID, | |||||
| RepoID: ctx.Repo.Repository.ID, | |||||
| JobID: jobResult.ID, | |||||
| JobName: jobName, | |||||
| JobType: string(models.JobTypeDebug), | |||||
| Type: models.TypeCloudBrainTwo, | |||||
| Status: string(models.JobWaiting), | |||||
| UserID: ctx.User.ID, | |||||
| RepoID: ctx.Repo.Repository.ID, | |||||
| JobID: jobResult.ID, | |||||
| JobName: jobName, | |||||
| JobType: string(models.JobTypeDebug), | |||||
| Type: models.TypeCloudBrainTwo, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -21,10 +21,11 @@ var ( | |||||
| const ( | const ( | ||||
| methodPassword = "password" | methodPassword = "password" | ||||
| urlGetToken = "/v3/auth/tokens" | |||||
| urlNotebook = "/demanager/instances" | |||||
| urlGetToken = "/v3/auth/tokens" | |||||
| urlNotebook = "/demanager/instances" | |||||
| errorCodeExceedLimit = "ModelArts.0118" | errorCodeExceedLimit = "ModelArts.0118" | ||||
| ) | ) | ||||
| func getRestyClient() *resty.Client { | func getRestyClient() *resty.Client { | ||||
| if restyClient == nil { | if restyClient == nil { | ||||
| restyClient = resty.New() | restyClient = resty.New() | ||||
| @@ -54,7 +55,7 @@ func getToken() error { | |||||
| Methods: []string{methodPassword}, | Methods: []string{methodPassword}, | ||||
| Password: models.Password{ | Password: models.Password{ | ||||
| User: models.NotebookUser{ | User: models.NotebookUser{ | ||||
| Name: setting.ModelArtsUsername, | |||||
| Name: setting.ModelArtsUsername, | |||||
| Password: setting.ModelArtsPassword, | Password: setting.ModelArtsPassword, | ||||
| Domain: models.Domain{ | Domain: models.Domain{ | ||||
| Name: setting.ModelArtsDomain, | Name: setting.ModelArtsDomain, | ||||
| @@ -790,7 +790,6 @@ func (obsClient ObsClient) GetBucketRequestPaymentWithSignedUrl(signedUrl string | |||||
| return | return | ||||
| } | } | ||||
| func (obsClient ObsClient) CreateUploadPartSignedUrl(bucketName, objectKey, uploadId string, partNumber int, partSize int64) (*http.Request, error) { | func (obsClient ObsClient) CreateUploadPartSignedUrl(bucketName, objectKey, uploadId string, partNumber int, partSize int64) (*http.Request, error) { | ||||
| var req *http.Request | var req *http.Request | ||||
| @@ -831,7 +830,7 @@ func (obsClient ObsClient) CreateUploadPartSignedUrl(bucketName, objectKey, uplo | |||||
| } | } | ||||
| } | } | ||||
| headers["Content-Length"] = []string{com.ToStr(partNumber,10)} | |||||
| headers["Content-Length"] = []string{com.ToStr(partNumber, 10)} | |||||
| requestURL, err := obsClient.doAuth(HTTP_PUT, bucketName, objectKey, params, headers, "") | requestURL, err := obsClient.doAuth(HTTP_PUT, bucketName, objectKey, params, headers, "") | ||||
| if err != nil { | if err != nil { | ||||
| @@ -443,7 +443,7 @@ var ( | |||||
| IsBenchmarkEnabled bool | IsBenchmarkEnabled bool | ||||
| BenchmarkCode string | BenchmarkCode string | ||||
| BenchmarkServerHost string | BenchmarkServerHost string | ||||
| BenchmarkCategory string | |||||
| BenchmarkCategory string | |||||
| //snn4imagenet config | //snn4imagenet config | ||||
| IsSnn4imagenetEnabled bool | IsSnn4imagenetEnabled bool | ||||
| @@ -464,13 +464,13 @@ var ( | |||||
| //RealPath string | //RealPath string | ||||
| //modelarts config | //modelarts config | ||||
| ModelArtsHost string | |||||
| IamHost string | |||||
| ProjectID string | |||||
| ProjectName string | |||||
| ModelArtsUsername string | |||||
| ModelArtsPassword string | |||||
| ModelArtsDomain string | |||||
| ModelArtsHost string | |||||
| IamHost string | |||||
| ProjectID string | |||||
| ProjectName string | |||||
| ModelArtsUsername string | |||||
| ModelArtsPassword string | |||||
| ModelArtsDomain string | |||||
| ) | ) | ||||
| // DateLang transforms standard language locale name to corresponding value in datetime plugin. | // DateLang transforms standard language locale name to corresponding value in datetime plugin. | ||||
| @@ -19,7 +19,7 @@ import ( | |||||
| //todo:修改查询方式 | //todo:修改查询方式 | ||||
| func ObsHasObject(path string) (bool, error) { | func ObsHasObject(path string) (bool, error) { | ||||
| hasObject := false | hasObject := false | ||||
| output, err := ObsCli.ListObjects(&obs.ListObjectsInput{Bucket:setting.Bucket}) | |||||
| output, err := ObsCli.ListObjects(&obs.ListObjectsInput{Bucket: setting.Bucket}) | |||||
| if err != nil { | if err != nil { | ||||
| log.Error("ListObjects failed:%v", err) | log.Error("ListObjects failed:%v", err) | ||||
| return hasObject, err | return hasObject, err | ||||
| @@ -40,9 +40,9 @@ func GetObsPartInfos(uuid string, uploadID string) (string, error) { | |||||
| key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | ||||
| output, err := ObsCli.ListParts(&obs.ListPartsInput{ | output, err := ObsCli.ListParts(&obs.ListPartsInput{ | ||||
| Bucket: setting.Bucket, | |||||
| Key: key, | |||||
| UploadId: uploadID, | |||||
| Bucket: setting.Bucket, | |||||
| Key: key, | |||||
| UploadId: uploadID, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("ListParts failed:", err.Error()) | log.Error("ListParts failed:", err.Error()) | ||||
| @@ -77,9 +77,9 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { | |||||
| input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | ||||
| input.UploadId = uploadID | input.UploadId = uploadID | ||||
| output, err := ObsCli.ListParts(&obs.ListPartsInput{ | output, err := ObsCli.ListParts(&obs.ListPartsInput{ | ||||
| Bucket: setting.Bucket, | |||||
| Key: input.Key, | |||||
| UploadId: uploadID, | |||||
| Bucket: setting.Bucket, | |||||
| Key: input.Key, | |||||
| UploadId: uploadID, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("ListParts failed:", err.Error()) | log.Error("ListParts failed:", err.Error()) | ||||
| @@ -111,8 +111,8 @@ func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, file | |||||
| input.Method = obs.HttpMethodPut | input.Method = obs.HttpMethodPut | ||||
| input.QueryParams = map[string]string{ | input.QueryParams = map[string]string{ | ||||
| "partNumber": com.ToStr(partNumber,10), | |||||
| "uploadId": uploadId, | |||||
| "partNumber": com.ToStr(partNumber, 10), | |||||
| "uploadId": uploadId, | |||||
| //"partSize": com.ToStr(partSize,10), | //"partSize": com.ToStr(partSize,10), | ||||
| } | } | ||||
| @@ -131,7 +131,7 @@ func ObsGetPreSignedUrl(uuid, fileName string) (string, error) { | |||||
| input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | ||||
| input.Bucket = setting.Bucket | input.Bucket = setting.Bucket | ||||
| input.Expires = 60 * 60 | input.Expires = 60 * 60 | ||||
| reqParams := make(map[string]string) | reqParams := make(map[string]string) | ||||
| reqParams["response-content-disposition"] = "attachment; filename=\"" + fileName + "\"" | reqParams["response-content-disposition"] = "attachment; filename=\"" + fileName + "\"" | ||||
| input.QueryParams = reqParams | input.QueryParams = reqParams | ||||
| @@ -42,7 +42,7 @@ func Copy(dstStorage ObjectStorage, dstPath string, srcStorage ObjectStorage, sr | |||||
| var ( | var ( | ||||
| // Attachments represents attachments storage | // Attachments represents attachments storage | ||||
| Attachments ObjectStorage | Attachments ObjectStorage | ||||
| ObsCli *obs.ObsClient | |||||
| ObsCli *obs.ObsClient | |||||
| ) | ) | ||||
| // Init init the stoarge | // Init init the stoarge | ||||
| @@ -38,8 +38,8 @@ func GetModelArtsTask(ctx *context.APIContext) { | |||||
| } | } | ||||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | ctx.JSON(http.StatusOK, map[string]interface{}{ | ||||
| "JobID": jobID, | |||||
| "JobStatus": result.Status, | |||||
| "JobID": jobID, | |||||
| "JobStatus": result.Status, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -42,12 +42,12 @@ type CloudBrainDataset struct { | |||||
| } | } | ||||
| type UploadForm struct { | type UploadForm struct { | ||||
| UploadID string `form:"uploadId"` | |||||
| UuID string `form:"uuid"` | |||||
| PartSize int64 `form:"size"` | |||||
| Offset int64 `form:"offset"` | |||||
| PartNumber int `form:"chunkNumber"` | |||||
| PartFile multipart.File `form:"file"` | |||||
| UploadID string `form:"uploadId"` | |||||
| UuID string `form:"uuid"` | |||||
| PartSize int64 `form:"size"` | |||||
| Offset int64 `form:"offset"` | |||||
| PartNumber int `form:"chunkNumber"` | |||||
| PartFile multipart.File `form:"file"` | |||||
| } | } | ||||
| func RenderAttachmentSettings(ctx *context.Context) { | func RenderAttachmentSettings(ctx *context.Context) { | ||||
| @@ -326,7 +326,7 @@ func AddAttachment(ctx *context.Context) { | |||||
| Name: ctx.Query("file_name"), | Name: ctx.Query("file_name"), | ||||
| Size: ctx.QueryInt64("size"), | Size: ctx.QueryInt64("size"), | ||||
| DatasetID: ctx.QueryInt64("dataset_id"), | DatasetID: ctx.QueryInt64("dataset_id"), | ||||
| Type: typeCloudBrain, | |||||
| Type: typeCloudBrain, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -562,7 +562,7 @@ func NewMultipart(ctx *context.Context) { | |||||
| Md5: ctx.Query("md5"), | Md5: ctx.Query("md5"), | ||||
| Size: fileSize, | Size: fileSize, | ||||
| TotalChunks: totalChunkCounts, | TotalChunks: totalChunkCounts, | ||||
| Type: typeCloudBrain, | |||||
| Type: typeCloudBrain, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -624,12 +624,12 @@ func GetObsKey(ctx *context.Context) { | |||||
| key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | ||||
| ctx.JSON(200, map[string]string{ | ctx.JSON(200, map[string]string{ | ||||
| "uuid": uuid, | |||||
| "key": key, | |||||
| "access_key_id": setting.AccessKeyID, | |||||
| "uuid": uuid, | |||||
| "key": key, | |||||
| "access_key_id": setting.AccessKeyID, | |||||
| "secret_access_key": setting.SecretAccessKey, | "secret_access_key": setting.SecretAccessKey, | ||||
| "server": setting.Endpoint, | |||||
| "bucket": setting.Bucket, | |||||
| "server": setting.Endpoint, | |||||
| "bucket": setting.Bucket, | |||||
| }) | }) | ||||
| } | } | ||||
| @@ -684,7 +684,7 @@ func CompleteMultipart(ctx *context.Context) { | |||||
| Name: fileName, | Name: fileName, | ||||
| Size: ctx.QueryInt64("size"), | Size: ctx.QueryInt64("size"), | ||||
| DatasetID: ctx.QueryInt64("dataset_id"), | DatasetID: ctx.QueryInt64("dataset_id"), | ||||
| Type: typeCloudBrain, | |||||
| Type: typeCloudBrain, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -20,13 +20,14 @@ type BlockChainCommitNotify struct { | |||||
| CommitID string `json:"commitId"` | CommitID string `json:"commitId"` | ||||
| TransactionHash string `json:"txHash"` | TransactionHash string `json:"txHash"` | ||||
| } | } | ||||
| const ( | const ( | ||||
| tplBlockChainIndex base.TplName = "repo/blockchain/index" | tplBlockChainIndex base.TplName = "repo/blockchain/index" | ||||
| ) | ) | ||||
| func BlockChainIndex(ctx *context.Context) { | func BlockChainIndex(ctx *context.Context) { | ||||
| repo := ctx.Repo.Repository | repo := ctx.Repo.Repository | ||||
| if repo.ContractAddress == "" || ctx.User.PublicKey == ""{ | |||||
| if repo.ContractAddress == "" || ctx.User.PublicKey == "" { | |||||
| log.Error("the repo(%d) or the user(%d) has not been initialized in block_chain", repo.RepoID, ctx.User.ID) | log.Error("the repo(%d) or the user(%d) has not been initialized in block_chain", repo.RepoID, ctx.User.ID) | ||||
| ctx.HTML(http.StatusInternalServerError, tplBlockChainIndex) | ctx.HTML(http.StatusInternalServerError, tplBlockChainIndex) | ||||
| return | return | ||||
| @@ -245,14 +246,14 @@ func HandleBlockChainMergedPulls() { | |||||
| } | } | ||||
| blockChain := models.BlockChain{ | blockChain := models.BlockChain{ | ||||
| Contributor : poster.PublicKey, | |||||
| PrID : pr.ID, | |||||
| CommitID : pr.MergedCommitID, | |||||
| ContractAddress : repo.ContractAddress, | |||||
| Status : models.BlockChainCommitInit, | |||||
| Amount : int64(pr.Amount), | |||||
| UserID : poster.ID, | |||||
| RepoID : pr.HeadRepoID, | |||||
| Contributor: poster.PublicKey, | |||||
| PrID: pr.ID, | |||||
| CommitID: pr.MergedCommitID, | |||||
| ContractAddress: repo.ContractAddress, | |||||
| Status: models.BlockChainCommitInit, | |||||
| Amount: int64(pr.Amount), | |||||
| UserID: poster.ID, | |||||
| RepoID: pr.HeadRepoID, | |||||
| } | } | ||||
| _, err = models.InsertBlockChain(&blockChain) | _, err = models.InsertBlockChain(&blockChain) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -46,7 +46,7 @@ func CloudBrainIndex(ctx *context.Context) { | |||||
| PageSize: setting.UI.IssuePagingNum, | PageSize: setting.UI.IssuePagingNum, | ||||
| }, | }, | ||||
| RepoID: repo.ID, | RepoID: repo.ID, | ||||
| Type: models.TypeCloudBrainOne, | |||||
| Type: models.TypeCloudBrainOne, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("Cloudbrain", err) | ctx.ServerError("Cloudbrain", err) | ||||
| @@ -148,7 +148,7 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||||
| jobType := form.JobType | jobType := form.JobType | ||||
| codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath | codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath | ||||
| if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet){ | |||||
| if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet) { | |||||
| log.Error("jobtype error:", jobType) | log.Error("jobtype error:", jobType) | ||||
| ctx.RenderWithErr("jobtype error", tplCloudBrainNew, &form) | ctx.RenderWithErr("jobtype error", tplCloudBrainNew, &form) | ||||
| return | return | ||||
| @@ -365,11 +365,11 @@ func downloadRateCode(repo *models.Repository, taskName, gitPath, codePath, benc | |||||
| defer f.Close() | defer f.Close() | ||||
| data, err := json.Marshal(models.TaskInfo{ | data, err := json.Marshal(models.TaskInfo{ | ||||
| Username: repo.Owner.Name, | |||||
| TaskName: taskName, | |||||
| CodeName: repo.Name, | |||||
| Username: repo.Owner.Name, | |||||
| TaskName: taskName, | |||||
| CodeName: repo.Name, | |||||
| BenchmarkCategory: strings.Split(benchmarkCategory, ","), | BenchmarkCategory: strings.Split(benchmarkCategory, ","), | ||||
| CodeLink: strings.TrimSuffix(repo.CloneLink().HTTPS, ".git"), | |||||
| CodeLink: strings.TrimSuffix(repo.CloneLink().HTTPS, ".git"), | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("json.Marshal failed", err.Error()) | log.Error("json.Marshal failed", err.Error()) | ||||
| @@ -43,7 +43,7 @@ func ModelArtsIndex(ctx *context.Context) { | |||||
| PageSize: setting.UI.IssuePagingNum, | PageSize: setting.UI.IssuePagingNum, | ||||
| }, | }, | ||||
| RepoID: repo.ID, | RepoID: repo.ID, | ||||
| Type: models.TypeCloudBrainTwo, | |||||
| Type: models.TypeCloudBrainTwo, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("Cloudbrain", err) | ctx.ServerError("Cloudbrain", err) | ||||
| @@ -165,7 +165,6 @@ func ModelArtsDebug(ctx *context.Context) { | |||||
| return | return | ||||
| } | } | ||||
| urls := strings.Split(result.Spec.Annotations.Url, "/") | urls := strings.Split(result.Spec.Annotations.Url, "/") | ||||
| urlPrefix := result.Spec.Annotations.TargetDomain | urlPrefix := result.Spec.Annotations.TargetDomain | ||||
| for i, url := range urls { | for i, url := range urls { | ||||
| @@ -244,4 +243,3 @@ func ModelArtsDel(ctx *context.Context) { | |||||
| ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts") | ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts") | ||||
| } | } | ||||